Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
actions-user committed Nov 10, 2024
2 parents d468c5f + 108069a commit ef35084
Show file tree
Hide file tree
Showing 50 changed files with 390 additions and 214 deletions.
13 changes: 12 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4
[MacOS-image]: https://img.shields.io/badge/-MacOS-black?logo=apple
[Linux-image]: https://img.shields.io/badge/-Linux-333?logo=ubuntu

[<img src="https://vercel.com/button" alt="Deploy on Zeabur" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) [<img src="https://img.shields.io/badge/BT_Deploy-Install-20a53a" alt="Open in Gitpod" height="30">](https://www.bt.cn/new/download.html)
[<img src="https://vercel.com/button" alt="Deploy on Vercel" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) [<img src="https://img.shields.io/badge/BT_Deploy-Install-20a53a" alt="BT Deply Install" height="30">](https://www.bt.cn/new/download.html) [<img src="https://svgshare.com/i/1AVg.svg" alt="Deploy to Alibaba Cloud" height="30">](https://computenest.aliyun.com/market/service-f1c9b75e59814dc49d52)

[<img src="https://github.com/user-attachments/assets/903482d4-3e87-4134-9af1-f2588fa90659" height="60" width="288" >](https://monica.im/?utm=nxcrp)

Expand Down Expand Up @@ -301,6 +301,14 @@ iflytek Api Key.

iflytek Api Secret.

### `CHATGLM_API_KEY` (optional)

ChatGLM Api Key.

### `CHATGLM_URL` (optional)

ChatGLM Api Url.

### `HIDE_USER_API_KEY` (optional)

> Default: Empty
Expand Down Expand Up @@ -397,6 +405,9 @@ yarn dev

> [简体中文 > 如何部署到私人服务器](./README_CN.md#部署)
### BT Install
> [简体中文 > 如何通过宝塔一键部署](./docs/bt-cn.md)
### Docker (Recommended)

```shell
Expand Down
10 changes: 10 additions & 0 deletions README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,13 @@ ByteDance Api Url.

讯飞星火Api Secret.

### `CHATGLM_API_KEY` (可选)

ChatGLM Api Key.

### `CHATGLM_URL` (可选)

ChatGLM Api Url.


### `HIDE_USER_API_KEY` (可选)
Expand Down Expand Up @@ -264,6 +271,9 @@ BASE_URL=https://b.nextweb.fun/api/proxy

## 部署

### 宝塔面板部署
> [简体中文 > 如何通过宝塔一键部署](./docs/bt-cn.md)
### 容器部署 (推荐)

> Docker 版本需要在 20 及其以上,否则会提示找不到镜像。
Expand Down
4 changes: 2 additions & 2 deletions app/api/common.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "../config/server";
import { OPENAI_BASE_URL, ServiceProvider } from "../constant";
import { isModelAvailableInServer } from "../utils/model";
import { cloudflareAIGatewayUrl } from "../utils/cloudflare";
import { getModelProvider, isModelAvailableInServer } from "../utils/model";

const serverConfig = getServerSideConfig();

Expand Down Expand Up @@ -71,7 +71,7 @@ export async function requestOpenai(req: NextRequest) {
.filter((v) => !!v && !v.startsWith("-") && v.includes(modelName))
.forEach((m) => {
const [fullName, displayName] = m.split("=");
const [_, providerName] = fullName.split("@");
const [_, providerName] = getModelProvider(fullName);
if (providerName === "azure" && !displayName) {
const [_, deployId] = (serverConfig?.azureUrl ?? "").split(
"deployments/",
Expand Down
2 changes: 1 addition & 1 deletion app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ export interface ChatOptions {
config: LLMConfig;

onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onFinish: (message: string, responseRes: Response) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
onBeforeTool?: (tool: ChatMessageTool) => void;
Expand Down
6 changes: 4 additions & 2 deletions app/client/platforms/alibaba.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ export class QwenApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -172,7 +173,7 @@ export class QwenApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -188,6 +189,7 @@ export class QwenApi implements LLMApi {
"[Alibaba] request response content type: ",
contentType,
);
responseRes = res;

if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
Expand Down Expand Up @@ -254,7 +256,7 @@ export class QwenApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
5 changes: 3 additions & 2 deletions app/client/platforms/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -317,13 +317,14 @@ export class ClaudeApi implements LLMApi {
};

try {
controller.signal.onabort = () => options.onFinish("");
controller.signal.onabort = () =>
options.onFinish("", new Response(null, { status: 400 }));

const res = await fetch(path, payload);
const resJson = await res.json();

const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
} catch (e) {
console.error("failed to chat", e);
options.onError?.(e as Error);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/baidu.ts
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ export class ErnieApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -191,7 +192,7 @@ export class ErnieApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -204,7 +205,7 @@ export class ErnieApi implements LLMApi {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Baidu] request response content type: ", contentType);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -267,7 +268,7 @@ export class ErnieApi implements LLMApi {

const resJson = await res.json();
const message = resJson?.result;
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/bytedance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ export class DoubaoApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -159,7 +160,7 @@ export class DoubaoApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -175,7 +176,7 @@ export class DoubaoApi implements LLMApi {
"[ByteDance] request response content type: ",
contentType,
);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -241,7 +242,7 @@ export class DoubaoApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/glm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ export class ChatGLMApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ export class GeminiProApi implements LLMApi {
);
}
const message = apiClient.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/iflytek.ts
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ export class SparkApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// Animate response text to make it look smooth
function animateResponseText() {
Expand All @@ -143,7 +144,7 @@ export class SparkApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -156,7 +157,7 @@ export class SparkApi implements LLMApi {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log("[Spark] request response content type: ", contentType);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -231,7 +232,7 @@ export class SparkApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/moonshot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ export class MoonshotApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
8 changes: 7 additions & 1 deletion app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ export interface RequestPayload {
frequency_penalty: number;
top_p: number;
max_tokens?: number;
max_completion_tokens?: number;
}

export interface DalleRequestPayload {
Expand Down Expand Up @@ -233,6 +234,11 @@ export class ChatGPTApi implements LLMApi {
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};

// O1 使用 max_completion_tokens 控制token数 (https://platform.openai.com/docs/guides/reasoning#controlling-costs)
if (isO1) {
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
}

// add max_tokens to vision model
if (visionModel) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
Expand Down Expand Up @@ -361,7 +367,7 @@ export class ChatGPTApi implements LLMApi {

const resJson = await res.json();
const message = await this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
7 changes: 4 additions & 3 deletions app/client/platforms/tencent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ export class HunyuanApi implements LLMApi {
let responseText = "";
let remainText = "";
let finished = false;
let responseRes: Response;

// animate response to make it looks smooth
function animateResponseText() {
Expand Down Expand Up @@ -171,7 +172,7 @@ export class HunyuanApi implements LLMApi {
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, responseRes);
}
};

Expand All @@ -187,7 +188,7 @@ export class HunyuanApi implements LLMApi {
"[Tencent] request response content type: ",
contentType,
);

responseRes = res;
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
Expand Down Expand Up @@ -253,7 +254,7 @@ export class HunyuanApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/xai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ export class XAIApi implements LLMApi {

const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
Expand Down
6 changes: 4 additions & 2 deletions app/components/auth.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ import {
trackSettingsPageGuideToCPaymentClick,
trackAuthorizationPageButtonToCPaymentClick,
} from "../utils/auth-settings-events";
import clsx from "clsx";

const storage = safeLocalStorage();

export function AuthPage() {
Expand Down Expand Up @@ -54,7 +56,7 @@ export function AuthPage() {
onClick={() => navigate(Path.Home)}
></IconButton>
</div>
<div className={`no-dark ${styles["auth-logo"]}`}>
<div className={clsx("no-dark", styles["auth-logo"])}>
<BotIcon />
</div>

Expand Down Expand Up @@ -163,7 +165,7 @@ function TopBanner() {
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
>
<div className={`${styles["top-banner-inner"]} no-dark`}>
<div className={clsx(styles["top-banner-inner"], "no-dark")}>
<Logo className={styles["top-banner-logo"]}></Logo>
<span>
{Locale.Auth.TopTips}
Expand Down
Loading

0 comments on commit ef35084

Please sign in to comment.