From d357b45e84eb773c2e0c142d0d849c4f20be2975 Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Wed, 30 Oct 2024 19:24:03 +0800 Subject: [PATCH 1/5] =?UTF-8?q?feat:=20[#5714]=20=E6=94=AF=E6=8C=81GLM?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/api/[provider]/[...path]/route.ts | 3 + app/api/auth.ts | 3 + app/api/glm.ts | 129 +++++++++++++++++ app/client/api.ts | 10 ++ app/client/platforms/glm.ts | 192 ++++++++++++++++++++++++++ app/components/settings.tsx | 41 ++++++ app/config/server.ts | 9 ++ app/constant.ts | 32 +++++ app/locales/cn.ts | 11 ++ app/locales/en.ts | 11 ++ app/store/access.ts | 12 ++ 11 files changed, 453 insertions(+) create mode 100644 app/api/glm.ts create mode 100644 app/client/platforms/glm.ts diff --git a/app/api/[provider]/[...path]/route.ts b/app/api/[provider]/[...path]/route.ts index 5ac248d0c87..78836cc528e 100644 --- a/app/api/[provider]/[...path]/route.ts +++ b/app/api/[provider]/[...path]/route.ts @@ -11,6 +11,7 @@ import { handle as moonshotHandler } from "../../moonshot"; import { handle as stabilityHandler } from "../../stability"; import { handle as iflytekHandler } from "../../iflytek"; import { handle as xaiHandler } from "../../xai"; +import { handle as glmHandler } from "../../glm"; import { handle as proxyHandler } from "../../proxy"; async function handle( @@ -41,6 +42,8 @@ async function handle( return iflytekHandler(req, { params }); case ApiPath.XAI: return xaiHandler(req, { params }); + case ApiPath.GLM: + return glmHandler(req, { params }); case ApiPath.OpenAI: return openaiHandler(req, { params }); default: diff --git a/app/api/auth.ts b/app/api/auth.ts index d4ac66a113b..db920fc283f 100644 --- a/app/api/auth.ts +++ b/app/api/auth.ts @@ -95,6 +95,9 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) { case ModelProvider.XAI: systemApiKey = serverConfig.xaiApiKey; break; + case ModelProvider.GLM: + systemApiKey = serverConfig.glmApiKey; + break; case ModelProvider.GPT: default: if (req.nextUrl.pathname.includes("azure/deployments")) { diff --git a/app/api/glm.ts b/app/api/glm.ts new file mode 100644 index 00000000000..d40c4b6a8c2 --- /dev/null +++ b/app/api/glm.ts @@ -0,0 +1,129 @@ +import { getServerSideConfig } from "@/app/config/server"; +import { + GLM_BASE_URL, + ApiPath, + ModelProvider, + ServiceProvider, +} from "@/app/constant"; +import { prettyObject } from "@/app/utils/format"; +import { NextRequest, NextResponse } from "next/server"; +import { auth } from "@/app/api/auth"; +import { isModelAvailableInServer } from "@/app/utils/model"; + +const serverConfig = getServerSideConfig(); + +export async function handle( + req: NextRequest, + { params }: { params: { path: string[] } }, +) { + console.log("[GLM Route] params ", params); + + if (req.method === "OPTIONS") { + return NextResponse.json({ body: "OK" }, { status: 200 }); + } + + const authResult = auth(req, ModelProvider.GLM); + if (authResult.error) { + return NextResponse.json(authResult, { + status: 401, + }); + } + + try { + const response = await request(req); + return response; + } catch (e) { + console.error("[GLM] ", e); + return NextResponse.json(prettyObject(e)); + } +} + +async function request(req: NextRequest) { + const controller = new AbortController(); + + // alibaba use base url or just remove the path + let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.GLM, ""); + + let baseUrl = serverConfig.glmUrl || GLM_BASE_URL; + + if (!baseUrl.startsWith("http")) { + baseUrl = `https://${baseUrl}`; + } + + if (baseUrl.endsWith("/")) { + baseUrl = baseUrl.slice(0, -1); + } + + console.log("[Proxy] ", path); + console.log("[Base Url]", baseUrl); + + const timeoutId = setTimeout( + () => { + controller.abort(); + }, + 10 * 60 * 1000, + ); + + const fetchUrl = `${baseUrl}${path}`; + console.log("[Fetch Url] ", fetchUrl); + const fetchOptions: RequestInit = { + headers: { + "Content-Type": "application/json", + Authorization: req.headers.get("Authorization") ?? "", + }, + method: req.method, + body: req.body, + redirect: "manual", + // @ts-ignore + duplex: "half", + signal: controller.signal, + }; + + // #1815 try to refuse some request to some models + if (serverConfig.customModels && req.body) { + try { + const clonedBody = await req.text(); + fetchOptions.body = clonedBody; + + const jsonBody = JSON.parse(clonedBody) as { model?: string }; + + // not undefined and is false + if ( + isModelAvailableInServer( + serverConfig.customModels, + jsonBody?.model as string, + ServiceProvider.GLM as string, + ) + ) { + return NextResponse.json( + { + error: true, + message: `you are not allowed to use ${jsonBody?.model} model`, + }, + { + status: 403, + }, + ); + } + } catch (e) { + console.error(`[GLM] filter`, e); + } + } + try { + const res = await fetch(fetchUrl, fetchOptions); + + // to prevent browser prompt for credentials + const newHeaders = new Headers(res.headers); + newHeaders.delete("www-authenticate"); + // to disable nginx buffering + newHeaders.set("X-Accel-Buffering", "no"); + + return new Response(res.body, { + status: res.status, + statusText: res.statusText, + headers: newHeaders, + }); + } finally { + clearTimeout(timeoutId); + } +} diff --git a/app/client/api.ts b/app/client/api.ts index 4238c2a264b..4082d085c0b 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -21,6 +21,7 @@ import { HunyuanApi } from "./platforms/tencent"; import { MoonshotApi } from "./platforms/moonshot"; import { SparkApi } from "./platforms/iflytek"; import { XAIApi } from "./platforms/xai"; +import { GLMApi } from "./platforms/glm"; export const ROLES = ["system", "user", "assistant"] as const; export type MessageRole = (typeof ROLES)[number]; @@ -156,6 +157,9 @@ export class ClientApi { case ModelProvider.XAI: this.llm = new XAIApi(); break; + case ModelProvider.GLM: + this.llm = new GLMApi(); + break; default: this.llm = new ChatGPTApi(); } @@ -244,6 +248,7 @@ export function getHeaders(ignoreHeaders: boolean = false) { const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot; const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek; const isXAI = modelConfig.providerName === ServiceProvider.XAI; + const isGLM = modelConfig.providerName === ServiceProvider.GLM; const isEnabledAccessControl = accessStore.enabledAccessControl(); const apiKey = isGoogle ? accessStore.googleApiKey @@ -259,6 +264,8 @@ export function getHeaders(ignoreHeaders: boolean = false) { ? accessStore.moonshotApiKey : isXAI ? accessStore.xaiApiKey + : isGLM + ? accessStore.glmApiKey : isIflytek ? accessStore.iflytekApiKey && accessStore.iflytekApiSecret ? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret @@ -274,6 +281,7 @@ export function getHeaders(ignoreHeaders: boolean = false) { isMoonshot, isIflytek, isXAI, + isGLM, apiKey, isEnabledAccessControl, }; @@ -338,6 +346,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi { return new ClientApi(ModelProvider.Iflytek); case ServiceProvider.XAI: return new ClientApi(ModelProvider.XAI); + case ServiceProvider.GLM: + return new ClientApi(ModelProvider.GLM); default: return new ClientApi(ModelProvider.GPT); } diff --git a/app/client/platforms/glm.ts b/app/client/platforms/glm.ts new file mode 100644 index 00000000000..b88272ae175 --- /dev/null +++ b/app/client/platforms/glm.ts @@ -0,0 +1,192 @@ +"use client"; +import { ApiPath, GLM_BASE_URL, GLM, REQUEST_TIMEOUT_MS } from "@/app/constant"; +import { + useAccessStore, + useAppConfig, + useChatStore, + ChatMessageTool, + usePluginStore, +} from "@/app/store"; +import { stream } from "@/app/utils/chat"; +import { + ChatOptions, + getHeaders, + LLMApi, + LLMModel, + SpeechOptions, +} from "../api"; +import { getClientConfig } from "@/app/config/client"; +import { getMessageTextContent } from "@/app/utils"; +import { RequestPayload } from "./openai"; +import { fetch } from "@/app/utils/stream"; + +export class GLMApi implements LLMApi { + private disableListModels = true; + + path(path: string): string { + const accessStore = useAccessStore.getState(); + + let baseUrl = ""; + + if (accessStore.useCustomConfig) { + baseUrl = accessStore.glmUrl; + } + + if (baseUrl.length === 0) { + const isApp = !!getClientConfig()?.isApp; + const apiPath = ApiPath.GLM; + baseUrl = isApp ? GLM_BASE_URL : apiPath; + } + + if (baseUrl.endsWith("/")) { + baseUrl = baseUrl.slice(0, baseUrl.length - 1); + } + if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.GLM)) { + baseUrl = "https://" + baseUrl; + } + + console.log("[Proxy Endpoint] ", baseUrl, path); + + return [baseUrl, path].join("/"); + } + + extractMessage(res: any) { + return res.choices?.at(0)?.message?.content ?? ""; + } + + speech(options: SpeechOptions): Promise { + throw new Error("Method not implemented."); + } + + async chat(options: ChatOptions) { + const messages: ChatOptions["messages"] = []; + for (const v of options.messages) { + const content = getMessageTextContent(v); + messages.push({ role: v.role, content }); + } + + const modelConfig = { + ...useAppConfig.getState().modelConfig, + ...useChatStore.getState().currentSession().mask.modelConfig, + ...{ + model: options.config.model, + providerName: options.config.providerName, + }, + }; + + const requestPayload: RequestPayload = { + messages, + stream: options.config.stream, + model: modelConfig.model, + temperature: modelConfig.temperature, + presence_penalty: modelConfig.presence_penalty, + frequency_penalty: modelConfig.frequency_penalty, + top_p: modelConfig.top_p, + }; + + console.log("[Request] glm payload: ", requestPayload); + + const shouldStream = !!options.config.stream; + const controller = new AbortController(); + options.onController?.(controller); + + try { + const chatPath = this.path(GLM.ChatPath); + const chatPayload = { + method: "POST", + body: JSON.stringify(requestPayload), + signal: controller.signal, + headers: getHeaders(), + }; + + // make a fetch request + const requestTimeoutId = setTimeout( + () => controller.abort(), + REQUEST_TIMEOUT_MS, + ); + + if (shouldStream) { + const [tools, funcs] = usePluginStore + .getState() + .getAsTools( + useChatStore.getState().currentSession().mask?.plugin || [], + ); + return stream( + chatPath, + requestPayload, + getHeaders(), + tools as any, + funcs, + controller, + // parseSSE + (text: string, runTools: ChatMessageTool[]) => { + // console.log("parseSSE", text, runTools); + const json = JSON.parse(text); + const choices = json.choices as Array<{ + delta: { + content: string; + tool_calls: ChatMessageTool[]; + }; + }>; + const tool_calls = choices[0]?.delta?.tool_calls; + if (tool_calls?.length > 0) { + const index = tool_calls[0]?.index; + const id = tool_calls[0]?.id; + const args = tool_calls[0]?.function?.arguments; + if (id) { + runTools.push({ + id, + type: tool_calls[0]?.type, + function: { + name: tool_calls[0]?.function?.name as string, + arguments: args, + }, + }); + } else { + // @ts-ignore + runTools[index]["function"]["arguments"] += args; + } + } + return choices[0]?.delta?.content; + }, + // processToolMessage, include tool_calls message and tool call results + ( + requestPayload: RequestPayload, + toolCallMessage: any, + toolCallResult: any[], + ) => { + // @ts-ignore + requestPayload?.messages?.splice( + // @ts-ignore + requestPayload?.messages?.length, + 0, + toolCallMessage, + ...toolCallResult, + ); + }, + options, + ); + } else { + const res = await fetch(chatPath, chatPayload); + clearTimeout(requestTimeoutId); + + const resJson = await res.json(); + const message = this.extractMessage(resJson); + options.onFinish(message); + } + } catch (e) { + console.log("[Request] failed to make a chat request", e); + options.onError?.(e as Error); + } + } + async usage() { + return { + used: 0, + total: 0, + }; + } + + async models(): Promise { + return []; + } +} diff --git a/app/components/settings.tsx b/app/components/settings.tsx index 666caece838..e5859e716c3 100644 --- a/app/components/settings.tsx +++ b/app/components/settings.tsx @@ -72,6 +72,7 @@ import { Stability, Iflytek, SAAS_CHAT_URL, + GLM, } from "../constant"; import { Prompt, SearchService, usePromptStore } from "../store/prompt"; import { ErrorBoundary } from "./error"; @@ -1234,6 +1235,45 @@ export function Settings() { ); + const glmConfigComponent = accessStore.provider === ServiceProvider.GLM && ( + <> + + + accessStore.update( + (access) => (access.glmUrl = e.currentTarget.value), + ) + } + > + + + { + accessStore.update( + (access) => (access.glmApiKey = e.currentTarget.value), + ); + }} + /> + + + ); + const stabilityConfigComponent = accessStore.provider === ServiceProvider.Stability && ( <> @@ -1693,6 +1733,7 @@ export function Settings() { {stabilityConfigComponent} {lflytekConfigComponent} {XAIConfigComponent} + {glmConfigComponent} )} diff --git a/app/config/server.ts b/app/config/server.ts index eac4ba0cf33..b9a68ce4d48 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -75,6 +75,10 @@ declare global { XAI_URL?: string; XAI_API_KEY?: string; + // glm only + GLM_URL?: string; + GLM_API_KEY?: string; + // custom template for preprocessing user input DEFAULT_INPUT_TEMPLATE?: string; } @@ -151,6 +155,7 @@ export const getServerSideConfig = () => { const isMoonshot = !!process.env.MOONSHOT_API_KEY; const isIflytek = !!process.env.IFLYTEK_API_KEY; const isXAI = !!process.env.XAI_API_KEY; + const isGLM = !!process.env.GLM_API_KEY; // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? ""; // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim()); // const randomIndex = Math.floor(Math.random() * apiKeys.length); @@ -217,6 +222,10 @@ export const getServerSideConfig = () => { xaiUrl: process.env.XAI_URL, xaiApiKey: getApiKey(process.env.XAI_API_KEY), + isGLM, + glmUrl: process.env.GLM_URL, + glmApiKey: getApiKey(process.env.GLM_API_KEY), + cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID, cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID, cloudflareKVApiKey: getApiKey(process.env.CLOUDFLARE_KV_API_KEY), diff --git a/app/constant.ts b/app/constant.ts index 9774bb594dd..d58bc3935b3 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -30,6 +30,8 @@ export const IFLYTEK_BASE_URL = "https://spark-api-open.xf-yun.com"; export const XAI_BASE_URL = "https://api.x.ai"; +export const GLM_BASE_URL = "https://open.bigmodel.cn"; + export const CACHE_URL_PREFIX = "/api/cache"; export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`; @@ -62,6 +64,7 @@ export enum ApiPath { Stability = "/api/stability", Artifacts = "/api/artifacts", XAI = "/api/xai", + GLM = "/api/glm", } export enum SlotID { @@ -115,6 +118,7 @@ export enum ServiceProvider { Stability = "Stability", Iflytek = "Iflytek", XAI = "XAI", + GLM = "GLM", } // Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings @@ -138,6 +142,7 @@ export enum ModelProvider { Moonshot = "Moonshot", Iflytek = "Iflytek", XAI = "XAI", + GLM = "GLM", } export const Stability = { @@ -225,6 +230,11 @@ export const XAI = { ChatPath: "v1/chat/completions", }; +export const GLM = { + ExampleEndpoint: GLM_BASE_URL, + ChatPath: "/api/paas/v4/chat/completions", +}; + export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang // export const DEFAULT_SYSTEM_TEMPLATE = ` // You are ChatGPT, a large language model trained by {{ServiceProvider}}. @@ -376,6 +386,17 @@ const iflytekModels = [ const xAIModes = ["grok-beta"]; +const glmModels = [ + "glm-4-plus", + "glm-4-0520", + "glm-4", + "glm-4-air", + "glm-4-airx", + "glm-4-long", + "glm-4-flashx", + "glm-4-flash", +]; + let seq = 1000; // 内置的模型序号生成器从1000开始 export const DEFAULT_MODELS = [ ...openaiModels.map((name) => ({ @@ -499,6 +520,17 @@ export const DEFAULT_MODELS = [ sorted: 11, }, })), + ...glmModels.map((name) => ({ + name, + available: true, + sorted: seq++, + provider: { + id: "glm", + providerName: "GLM", + providerType: "glm", + sorted: 12, + }, + })), ] as const; export const CHAT_PAGE_SIZE = 15; diff --git a/app/locales/cn.ts b/app/locales/cn.ts index 006fc81620d..92aaf62286e 100644 --- a/app/locales/cn.ts +++ b/app/locales/cn.ts @@ -473,6 +473,17 @@ const cn = { SubTitle: "样例:", }, }, + GLM: { + ApiKey: { + Title: "接口密钥", + SubTitle: "使用自定义 GLM API Key", + Placeholder: "GLM API Key", + }, + Endpoint: { + Title: "接口地址", + SubTitle: "样例:", + }, + }, Stability: { ApiKey: { Title: "接口密钥", diff --git a/app/locales/en.ts b/app/locales/en.ts index 7204bd94696..d691925c49c 100644 --- a/app/locales/en.ts +++ b/app/locales/en.ts @@ -457,6 +457,17 @@ const en: LocaleType = { SubTitle: "Example: ", }, }, + GLM: { + ApiKey: { + Title: "GLM API Key", + SubTitle: "Use a custom GLM API Key", + Placeholder: "GLM API Key", + }, + Endpoint: { + Title: "Endpoint Address", + SubTitle: "Example: ", + }, + }, Stability: { ApiKey: { Title: "Stability API Key", diff --git a/app/store/access.ts b/app/store/access.ts index b3d412a2daf..9cc420fdf01 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -14,6 +14,7 @@ import { STABILITY_BASE_URL, IFLYTEK_BASE_URL, XAI_BASE_URL, + GLM_BASE_URL, } from "../constant"; import { getHeaders } from "../client/api"; import { getClientConfig } from "../config/client"; @@ -47,6 +48,8 @@ const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek; const DEFAULT_XAI_URL = isApp ? XAI_BASE_URL : ApiPath.XAI; +const DEFAULT_GLM_URL = isApp ? GLM_BASE_URL : ApiPath.GLM; + const DEFAULT_ACCESS_STATE = { accessCode: "", useCustomConfig: false, @@ -108,6 +111,10 @@ const DEFAULT_ACCESS_STATE = { xaiUrl: DEFAULT_XAI_URL, xaiApiKey: "", + // glm + glmUrl: DEFAULT_GLM_URL, + glmApiKey: "", + // server config needCode: true, hideUserApiKey: false, @@ -180,6 +187,10 @@ export const useAccessStore = createPersistStore( return ensure(get(), ["xaiApiKey"]); }, + isValidGLM() { + return ensure(get(), ["glmApiKey"]); + }, + isAuthorized() { this.fetch(); @@ -196,6 +207,7 @@ export const useAccessStore = createPersistStore( this.isValidMoonshot() || this.isValidIflytek() || this.isValidXAI() || + this.isValidGLM() || !this.enabledAccessControl() || (this.enabledAccessControl() && ensure(get(), ["accessCode"])) ); From d3f0a77830073684dd8da25e34d5d8eb0a94ecdb Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Thu, 31 Oct 2024 11:23:06 +0800 Subject: [PATCH 2/5] chore: update Provider --- app/constant.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/constant.ts b/app/constant.ts index d58bc3935b3..b8b25b7ab6a 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -118,7 +118,7 @@ export enum ServiceProvider { Stability = "Stability", Iflytek = "Iflytek", XAI = "XAI", - GLM = "GLM", + GLM = "ChatGLM", } // Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings @@ -142,7 +142,7 @@ export enum ModelProvider { Moonshot = "Moonshot", Iflytek = "Iflytek", XAI = "XAI", - GLM = "GLM", + GLM = "ChatGLM", } export const Stability = { @@ -525,9 +525,9 @@ export const DEFAULT_MODELS = [ available: true, sorted: seq++, provider: { - id: "glm", - providerName: "GLM", - providerType: "glm", + id: "chatglm", + providerName: "ChatGLM", + providerType: "chatglm", sorted: 12, }, })), From 7a8d557ea37e9b02fc26d8416fc631f4b7adda56 Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Thu, 31 Oct 2024 11:37:19 +0800 Subject: [PATCH 3/5] =?UTF-8?q?chore:=20=E5=BC=80=E5=90=AF=E6=8F=92?= =?UTF-8?q?=E4=BB=B6?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/utils.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/utils.ts b/app/utils.ts index d8fc46330d1..91f11c0c2e0 100644 --- a/app/utils.ts +++ b/app/utils.ts @@ -278,7 +278,8 @@ export function showPlugins(provider: ServiceProvider, model: string) { if ( provider == ServiceProvider.OpenAI || provider == ServiceProvider.Azure || - provider == ServiceProvider.Moonshot + provider == ServiceProvider.Moonshot || + provider == ServiceProvider.GLM ) { return true; } From afe12c212e51bd2d27c5db5700f881c32a0bd3ba Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Fri, 1 Nov 2024 13:53:43 +0800 Subject: [PATCH 4/5] chore: update --- app/api/[provider]/[...path]/route.ts | 6 ++--- app/api/auth.ts | 4 ++-- app/api/glm.ts | 8 +++---- app/client/api.ts | 18 +++++++-------- app/client/platforms/glm.ts | 19 ++++++++++------ app/components/settings.tsx | 32 ++++++++++++++------------- app/config/server.ts | 14 ++++++------ app/constant.ts | 16 +++++++------- app/locales/cn.ts | 6 ++--- app/locales/en.ts | 8 +++---- app/store/access.ts | 16 +++++++------- app/utils.ts | 2 +- 12 files changed, 78 insertions(+), 71 deletions(-) diff --git a/app/api/[provider]/[...path]/route.ts b/app/api/[provider]/[...path]/route.ts index 78836cc528e..3017fd37180 100644 --- a/app/api/[provider]/[...path]/route.ts +++ b/app/api/[provider]/[...path]/route.ts @@ -11,7 +11,7 @@ import { handle as moonshotHandler } from "../../moonshot"; import { handle as stabilityHandler } from "../../stability"; import { handle as iflytekHandler } from "../../iflytek"; import { handle as xaiHandler } from "../../xai"; -import { handle as glmHandler } from "../../glm"; +import { handle as chatglmHandler } from "../../glm"; import { handle as proxyHandler } from "../../proxy"; async function handle( @@ -42,8 +42,8 @@ async function handle( return iflytekHandler(req, { params }); case ApiPath.XAI: return xaiHandler(req, { params }); - case ApiPath.GLM: - return glmHandler(req, { params }); + case ApiPath.ChatGLM: + return chatglmHandler(req, { params }); case ApiPath.OpenAI: return openaiHandler(req, { params }); default: diff --git a/app/api/auth.ts b/app/api/auth.ts index db920fc283f..6703b64bd15 100644 --- a/app/api/auth.ts +++ b/app/api/auth.ts @@ -95,8 +95,8 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) { case ModelProvider.XAI: systemApiKey = serverConfig.xaiApiKey; break; - case ModelProvider.GLM: - systemApiKey = serverConfig.glmApiKey; + case ModelProvider.ChatGLM: + systemApiKey = serverConfig.chatglmApiKey; break; case ModelProvider.GPT: default: diff --git a/app/api/glm.ts b/app/api/glm.ts index d40c4b6a8c2..ea7a766bd4b 100644 --- a/app/api/glm.ts +++ b/app/api/glm.ts @@ -1,6 +1,6 @@ import { getServerSideConfig } from "@/app/config/server"; import { - GLM_BASE_URL, + CHATGLM_BASE_URL, ApiPath, ModelProvider, ServiceProvider, @@ -42,9 +42,9 @@ async function request(req: NextRequest) { const controller = new AbortController(); // alibaba use base url or just remove the path - let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.GLM, ""); + let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ChatGLM, ""); - let baseUrl = serverConfig.glmUrl || GLM_BASE_URL; + let baseUrl = serverConfig.chatglmUrl || CHATGLM_BASE_URL; if (!baseUrl.startsWith("http")) { baseUrl = `https://${baseUrl}`; @@ -92,7 +92,7 @@ async function request(req: NextRequest) { isModelAvailableInServer( serverConfig.customModels, jsonBody?.model as string, - ServiceProvider.GLM as string, + ServiceProvider.ChatGLM as string, ) ) { return NextResponse.json( diff --git a/app/client/api.ts b/app/client/api.ts index 4082d085c0b..8fecf841fe3 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -21,7 +21,7 @@ import { HunyuanApi } from "./platforms/tencent"; import { MoonshotApi } from "./platforms/moonshot"; import { SparkApi } from "./platforms/iflytek"; import { XAIApi } from "./platforms/xai"; -import { GLMApi } from "./platforms/glm"; +import { ChatGLMApi } from "./platforms/glm"; export const ROLES = ["system", "user", "assistant"] as const; export type MessageRole = (typeof ROLES)[number]; @@ -157,8 +157,8 @@ export class ClientApi { case ModelProvider.XAI: this.llm = new XAIApi(); break; - case ModelProvider.GLM: - this.llm = new GLMApi(); + case ModelProvider.ChatGLM: + this.llm = new ChatGLMApi(); break; default: this.llm = new ChatGPTApi(); @@ -248,7 +248,7 @@ export function getHeaders(ignoreHeaders: boolean = false) { const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot; const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek; const isXAI = modelConfig.providerName === ServiceProvider.XAI; - const isGLM = modelConfig.providerName === ServiceProvider.GLM; + const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM; const isEnabledAccessControl = accessStore.enabledAccessControl(); const apiKey = isGoogle ? accessStore.googleApiKey @@ -264,8 +264,8 @@ export function getHeaders(ignoreHeaders: boolean = false) { ? accessStore.moonshotApiKey : isXAI ? accessStore.xaiApiKey - : isGLM - ? accessStore.glmApiKey + : isChatGLM + ? accessStore.chatglmApiKey : isIflytek ? accessStore.iflytekApiKey && accessStore.iflytekApiSecret ? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret @@ -281,7 +281,7 @@ export function getHeaders(ignoreHeaders: boolean = false) { isMoonshot, isIflytek, isXAI, - isGLM, + isChatGLM, apiKey, isEnabledAccessControl, }; @@ -346,8 +346,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi { return new ClientApi(ModelProvider.Iflytek); case ServiceProvider.XAI: return new ClientApi(ModelProvider.XAI); - case ServiceProvider.GLM: - return new ClientApi(ModelProvider.GLM); + case ServiceProvider.ChatGLM: + return new ClientApi(ModelProvider.ChatGLM); default: return new ClientApi(ModelProvider.GPT); } diff --git a/app/client/platforms/glm.ts b/app/client/platforms/glm.ts index b88272ae175..10696ee82d9 100644 --- a/app/client/platforms/glm.ts +++ b/app/client/platforms/glm.ts @@ -1,5 +1,10 @@ "use client"; -import { ApiPath, GLM_BASE_URL, GLM, REQUEST_TIMEOUT_MS } from "@/app/constant"; +import { + ApiPath, + CHATGLM_BASE_URL, + ChatGLM, + REQUEST_TIMEOUT_MS, +} from "@/app/constant"; import { useAccessStore, useAppConfig, @@ -20,7 +25,7 @@ import { getMessageTextContent } from "@/app/utils"; import { RequestPayload } from "./openai"; import { fetch } from "@/app/utils/stream"; -export class GLMApi implements LLMApi { +export class ChatGLMApi implements LLMApi { private disableListModels = true; path(path: string): string { @@ -29,19 +34,19 @@ export class GLMApi implements LLMApi { let baseUrl = ""; if (accessStore.useCustomConfig) { - baseUrl = accessStore.glmUrl; + baseUrl = accessStore.chatglmUrl; } if (baseUrl.length === 0) { const isApp = !!getClientConfig()?.isApp; - const apiPath = ApiPath.GLM; - baseUrl = isApp ? GLM_BASE_URL : apiPath; + const apiPath = ApiPath.ChatGLM; + baseUrl = isApp ? CHATGLM_BASE_URL : apiPath; } if (baseUrl.endsWith("/")) { baseUrl = baseUrl.slice(0, baseUrl.length - 1); } - if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.GLM)) { + if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ChatGLM)) { baseUrl = "https://" + baseUrl; } @@ -91,7 +96,7 @@ export class GLMApi implements LLMApi { options.onController?.(controller); try { - const chatPath = this.path(GLM.ChatPath); + const chatPath = this.path(ChatGLM.ChatPath); const chatPayload = { method: "POST", body: JSON.stringify(requestPayload), diff --git a/app/components/settings.tsx b/app/components/settings.tsx index e5859e716c3..e2666b5512c 100644 --- a/app/components/settings.tsx +++ b/app/components/settings.tsx @@ -72,7 +72,7 @@ import { Stability, Iflytek, SAAS_CHAT_URL, - GLM, + ChatGLM, } from "../constant"; import { Prompt, SearchService, usePromptStore } from "../store/prompt"; import { ErrorBoundary } from "./error"; @@ -1235,38 +1235,40 @@ export function Settings() { ); - const glmConfigComponent = accessStore.provider === ServiceProvider.GLM && ( + const chatglmConfigComponent = accessStore.provider === + ServiceProvider.ChatGLM && ( <> accessStore.update( - (access) => (access.glmUrl = e.currentTarget.value), + (access) => (access.chatglmUrl = e.currentTarget.value), ) } > { accessStore.update( - (access) => (access.glmApiKey = e.currentTarget.value), + (access) => (access.chatglmApiKey = e.currentTarget.value), ); }} /> @@ -1733,7 +1735,7 @@ export function Settings() { {stabilityConfigComponent} {lflytekConfigComponent} {XAIConfigComponent} - {glmConfigComponent} + {chatglmConfigComponent} )} diff --git a/app/config/server.ts b/app/config/server.ts index b9a68ce4d48..485f950da03 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -75,9 +75,9 @@ declare global { XAI_URL?: string; XAI_API_KEY?: string; - // glm only - GLM_URL?: string; - GLM_API_KEY?: string; + // chatglm only + CHATGLM_URL?: string; + CHATGLM_API_KEY?: string; // custom template for preprocessing user input DEFAULT_INPUT_TEMPLATE?: string; @@ -155,7 +155,7 @@ export const getServerSideConfig = () => { const isMoonshot = !!process.env.MOONSHOT_API_KEY; const isIflytek = !!process.env.IFLYTEK_API_KEY; const isXAI = !!process.env.XAI_API_KEY; - const isGLM = !!process.env.GLM_API_KEY; + const isChatGLM = !!process.env.CHATGLM_API_KEY; // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? ""; // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim()); // const randomIndex = Math.floor(Math.random() * apiKeys.length); @@ -222,9 +222,9 @@ export const getServerSideConfig = () => { xaiUrl: process.env.XAI_URL, xaiApiKey: getApiKey(process.env.XAI_API_KEY), - isGLM, - glmUrl: process.env.GLM_URL, - glmApiKey: getApiKey(process.env.GLM_API_KEY), + isChatGLM, + chatglmUrl: process.env.CHATGLM_URL, + chatglmApiKey: getApiKey(process.env.CHATGLM_API_KEY), cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID, cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID, diff --git a/app/constant.ts b/app/constant.ts index b8b25b7ab6a..1a84e5c84f2 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -30,7 +30,7 @@ export const IFLYTEK_BASE_URL = "https://spark-api-open.xf-yun.com"; export const XAI_BASE_URL = "https://api.x.ai"; -export const GLM_BASE_URL = "https://open.bigmodel.cn"; +export const CHATGLM_BASE_URL = "https://open.bigmodel.cn"; export const CACHE_URL_PREFIX = "/api/cache"; export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`; @@ -64,7 +64,7 @@ export enum ApiPath { Stability = "/api/stability", Artifacts = "/api/artifacts", XAI = "/api/xai", - GLM = "/api/glm", + ChatGLM = "/api/chatglm", } export enum SlotID { @@ -118,7 +118,7 @@ export enum ServiceProvider { Stability = "Stability", Iflytek = "Iflytek", XAI = "XAI", - GLM = "ChatGLM", + ChatGLM = "ChatGLM", } // Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings @@ -142,7 +142,7 @@ export enum ModelProvider { Moonshot = "Moonshot", Iflytek = "Iflytek", XAI = "XAI", - GLM = "ChatGLM", + ChatGLM = "ChatGLM", } export const Stability = { @@ -230,8 +230,8 @@ export const XAI = { ChatPath: "v1/chat/completions", }; -export const GLM = { - ExampleEndpoint: GLM_BASE_URL, +export const ChatGLM = { + ExampleEndpoint: CHATGLM_BASE_URL, ChatPath: "/api/paas/v4/chat/completions", }; @@ -386,7 +386,7 @@ const iflytekModels = [ const xAIModes = ["grok-beta"]; -const glmModels = [ +const chatglmModels = [ "glm-4-plus", "glm-4-0520", "glm-4", @@ -520,7 +520,7 @@ export const DEFAULT_MODELS = [ sorted: 11, }, })), - ...glmModels.map((name) => ({ + ...chatglmModels.map((name) => ({ name, available: true, sorted: seq++, diff --git a/app/locales/cn.ts b/app/locales/cn.ts index 92aaf62286e..9712593c61b 100644 --- a/app/locales/cn.ts +++ b/app/locales/cn.ts @@ -473,11 +473,11 @@ const cn = { SubTitle: "样例:", }, }, - GLM: { + ChatGLM: { ApiKey: { Title: "接口密钥", - SubTitle: "使用自定义 GLM API Key", - Placeholder: "GLM API Key", + SubTitle: "使用自定义 ChatGLM API Key", + Placeholder: "ChatGLM API Key", }, Endpoint: { Title: "接口地址", diff --git a/app/locales/en.ts b/app/locales/en.ts index d691925c49c..ac8d3aed2a3 100644 --- a/app/locales/en.ts +++ b/app/locales/en.ts @@ -457,11 +457,11 @@ const en: LocaleType = { SubTitle: "Example: ", }, }, - GLM: { + ChatGLM: { ApiKey: { - Title: "GLM API Key", - SubTitle: "Use a custom GLM API Key", - Placeholder: "GLM API Key", + Title: "ChatGLM API Key", + SubTitle: "Use a custom ChatGLM API Key", + Placeholder: "ChatGLM API Key", }, Endpoint: { Title: "Endpoint Address", diff --git a/app/store/access.ts b/app/store/access.ts index 9cc420fdf01..3b0e6357bc1 100644 --- a/app/store/access.ts +++ b/app/store/access.ts @@ -14,7 +14,7 @@ import { STABILITY_BASE_URL, IFLYTEK_BASE_URL, XAI_BASE_URL, - GLM_BASE_URL, + CHATGLM_BASE_URL, } from "../constant"; import { getHeaders } from "../client/api"; import { getClientConfig } from "../config/client"; @@ -48,7 +48,7 @@ const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek; const DEFAULT_XAI_URL = isApp ? XAI_BASE_URL : ApiPath.XAI; -const DEFAULT_GLM_URL = isApp ? GLM_BASE_URL : ApiPath.GLM; +const DEFAULT_CHATGLM_URL = isApp ? CHATGLM_BASE_URL : ApiPath.ChatGLM; const DEFAULT_ACCESS_STATE = { accessCode: "", @@ -111,9 +111,9 @@ const DEFAULT_ACCESS_STATE = { xaiUrl: DEFAULT_XAI_URL, xaiApiKey: "", - // glm - glmUrl: DEFAULT_GLM_URL, - glmApiKey: "", + // chatglm + chatglmUrl: DEFAULT_CHATGLM_URL, + chatglmApiKey: "", // server config needCode: true, @@ -187,8 +187,8 @@ export const useAccessStore = createPersistStore( return ensure(get(), ["xaiApiKey"]); }, - isValidGLM() { - return ensure(get(), ["glmApiKey"]); + isValidChatGLM() { + return ensure(get(), ["chatglmApiKey"]); }, isAuthorized() { @@ -207,7 +207,7 @@ export const useAccessStore = createPersistStore( this.isValidMoonshot() || this.isValidIflytek() || this.isValidXAI() || - this.isValidGLM() || + this.isValidChatGLM() || !this.enabledAccessControl() || (this.enabledAccessControl() && ensure(get(), ["accessCode"])) ); diff --git a/app/utils.ts b/app/utils.ts index 91f11c0c2e0..c444f8ef422 100644 --- a/app/utils.ts +++ b/app/utils.ts @@ -279,7 +279,7 @@ export function showPlugins(provider: ServiceProvider, model: string) { provider == ServiceProvider.OpenAI || provider == ServiceProvider.Azure || provider == ServiceProvider.Moonshot || - provider == ServiceProvider.GLM + provider == ServiceProvider.ChatGLM ) { return true; } From 4d75b23ed1b41a042e28805e46ad2b5c8111cc3d Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Fri, 1 Nov 2024 14:15:12 +0800 Subject: [PATCH 5/5] fix: ts error --- app/api/glm.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/api/glm.ts b/app/api/glm.ts index ea7a766bd4b..3625b9f7bf9 100644 --- a/app/api/glm.ts +++ b/app/api/glm.ts @@ -22,7 +22,7 @@ export async function handle( return NextResponse.json({ body: "OK" }, { status: 200 }); } - const authResult = auth(req, ModelProvider.GLM); + const authResult = auth(req, ModelProvider.ChatGLM); if (authResult.error) { return NextResponse.json(authResult, { status: 401,