mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-01-18 16:54:25 +08:00
feat: support baidu model
This commit is contained in:
parent
2d1f522aaf
commit
785d3748e1
2
.gitignore
vendored
2
.gitignore
vendored
@ -43,4 +43,4 @@ dev
|
||||
.env
|
||||
|
||||
*.key
|
||||
*.key.pub
|
||||
*.key.pub
|
||||
|
@ -73,6 +73,9 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
|
||||
case ModelProvider.Claude:
|
||||
systemApiKey = serverConfig.anthropicApiKey;
|
||||
break;
|
||||
case ModelProvider.Ernie:
|
||||
systemApiKey = serverConfig.baiduApiKey;
|
||||
break;
|
||||
case ModelProvider.GPT:
|
||||
default:
|
||||
if (req.nextUrl.pathname.includes("azure/deployments")) {
|
||||
|
176
app/api/baidu/[...path]/route.ts
Normal file
176
app/api/baidu/[...path]/route.ts
Normal file
@ -0,0 +1,176 @@
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
BAIDU_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
BAIDU_OATUH_URL,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[Baidu Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.Ernie);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[Baidu] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
||||
export const preferredRegion = [
|
||||
"arn1",
|
||||
"bom1",
|
||||
"cdg1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"dub1",
|
||||
"fra1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"lhr1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Baidu, "");
|
||||
|
||||
let baseUrl = serverConfig.baiduUrl || BAIDU_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const { access_token } = await getAccessToken();
|
||||
const fetchUrl = `${baseUrl}${path}?access_token=${access_token}`;
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Baidu as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Baidu] filter`, e);
|
||||
}
|
||||
}
|
||||
console.log("[Baidu request]", fetchOptions.headers, req.method);
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
console.log("[Baidu response]", res.status, " ", res.headers, res.url);
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 使用 AK,SK 生成鉴权签名(Access Token)
|
||||
* @return 鉴权签名信息
|
||||
*/
|
||||
async function getAccessToken(): Promise<{
|
||||
access_token: string;
|
||||
expires_in: number;
|
||||
error?: number;
|
||||
}> {
|
||||
const AK = serverConfig.baiduApiKey;
|
||||
const SK = serverConfig.baiduSecretKey;
|
||||
const res = await fetch(
|
||||
`${BAIDU_OATUH_URL}?grant_type=client_credentials&client_id=${AK}&client_secret=${SK}`,
|
||||
{
|
||||
method: "POST",
|
||||
},
|
||||
);
|
||||
const resJson = await res.json();
|
||||
return resJson;
|
||||
}
|
@ -9,6 +9,8 @@ import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
|
||||
import { ChatGPTApi } from "./platforms/openai";
|
||||
import { GeminiProApi } from "./platforms/google";
|
||||
import { ClaudeApi } from "./platforms/anthropic";
|
||||
import { ErnieApi } from "./platforms/baidu";
|
||||
|
||||
export const ROLES = ["system", "user", "assistant"] as const;
|
||||
export type MessageRole = (typeof ROLES)[number];
|
||||
|
||||
@ -104,6 +106,9 @@ export class ClientApi {
|
||||
case ModelProvider.Claude:
|
||||
this.llm = new ClaudeApi();
|
||||
break;
|
||||
case ModelProvider.Ernie:
|
||||
this.llm = new ErnieApi();
|
||||
break;
|
||||
default:
|
||||
this.llm = new ChatGPTApi();
|
||||
}
|
||||
|
252
app/client/platforms/baidu.ts
Normal file
252
app/client/platforms/baidu.ts
Normal file
@ -0,0 +1,252 @@
|
||||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
Baidu,
|
||||
DEFAULT_API_HOST,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
MultimodalContent,
|
||||
} from "../api";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
data: Array<{
|
||||
id: string;
|
||||
object: string;
|
||||
root: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface RequestPayload {
|
||||
messages: {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string | MultimodalContent[];
|
||||
}[];
|
||||
stream?: boolean;
|
||||
model: string;
|
||||
temperature: number;
|
||||
presence_penalty: number;
|
||||
frequency_penalty: number;
|
||||
top_p: number;
|
||||
max_tokens?: number;
|
||||
}
|
||||
|
||||
export class ErnieApi implements LLMApi {
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.baiduUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/api/proxy/baidu" : ApiPath.Baidu;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Baidu)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const visionModel = isVisionModel(options.config.model);
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content: visionModel ? v.content : getMessageTextContent(v),
|
||||
}));
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
},
|
||||
};
|
||||
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
};
|
||||
|
||||
console.log("[Request] Baidu payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(Baidu.ChatPath(modelConfig.model));
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
if (finished || controller.signal.aborted) {
|
||||
responseText += remainText;
|
||||
console.log("[Response Animation] finished");
|
||||
if (responseText?.length === 0) {
|
||||
options.onError?.(new Error("empty response from server"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (remainText.length > 0) {
|
||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||
const fetchText = remainText.slice(0, fetchCount);
|
||||
responseText += fetchText;
|
||||
remainText = remainText.slice(fetchCount);
|
||||
options.onUpdate?.(responseText, fetchText);
|
||||
}
|
||||
|
||||
requestAnimationFrame(animateResponseText);
|
||||
}
|
||||
|
||||
// start animaion
|
||||
animateResponseText();
|
||||
|
||||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
}
|
||||
};
|
||||
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log("[Baidu] request response content type: ", contentType);
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
}
|
||||
|
||||
if (
|
||||
!res.ok ||
|
||||
!res.headers
|
||||
.get("content-type")
|
||||
?.startsWith(EventStreamContentType) ||
|
||||
res.status !== 200
|
||||
) {
|
||||
const responseTexts = [responseText];
|
||||
let extraInfo = await res.clone().text();
|
||||
try {
|
||||
const resJson = await res.clone().json();
|
||||
extraInfo = prettyObject(resJson);
|
||||
} catch {}
|
||||
|
||||
if (res.status === 401) {
|
||||
responseTexts.push(Locale.Error.Unauthorized);
|
||||
}
|
||||
|
||||
if (extraInfo) {
|
||||
responseTexts.push(extraInfo);
|
||||
}
|
||||
|
||||
responseText = responseTexts.join("\n\n");
|
||||
|
||||
return finish();
|
||||
}
|
||||
},
|
||||
onmessage(msg) {
|
||||
if (msg.data === "[DONE]" || finished) {
|
||||
return finish();
|
||||
}
|
||||
const text = msg.data;
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
const delta = json?.result;
|
||||
if (delta) {
|
||||
remainText += delta;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[Request] parse error", text, msg);
|
||||
}
|
||||
},
|
||||
onclose() {
|
||||
finish();
|
||||
},
|
||||
onerror(e) {
|
||||
options.onError?.(e);
|
||||
throw e;
|
||||
},
|
||||
openWhenHidden: true,
|
||||
});
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
export { Baidu };
|
@ -321,6 +321,8 @@ export function PreviewActions(props: {
|
||||
api = new ClientApi(ModelProvider.GeminiPro);
|
||||
} else if (config.modelConfig.providerName == ServiceProvider.Anthropic) {
|
||||
api = new ClientApi(ModelProvider.Claude);
|
||||
} else if (config.modelConfig.providerName == ServiceProvider.Baidu) {
|
||||
api = new ClientApi(ModelProvider.Ernie);
|
||||
} else {
|
||||
api = new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
|
@ -175,6 +175,8 @@ export function useLoadData() {
|
||||
api = new ClientApi(ModelProvider.GeminiPro);
|
||||
} else if (config.modelConfig.providerName == ServiceProvider.Anthropic) {
|
||||
api = new ClientApi(ModelProvider.Claude);
|
||||
} else if (config.modelConfig.providerName == ServiceProvider.Baidu) {
|
||||
api = new ClientApi(ModelProvider.Ernie);
|
||||
} else {
|
||||
api = new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
|
@ -53,6 +53,7 @@ import Link from "next/link";
|
||||
import {
|
||||
Anthropic,
|
||||
Azure,
|
||||
Baidu,
|
||||
Google,
|
||||
OPENAI_BASE_URL,
|
||||
Path,
|
||||
@ -1187,6 +1188,67 @@ export function Settings() {
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
{accessStore.provider === ServiceProvider.Baidu && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Baidu.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Anthropic.Endpoint.SubTitle +
|
||||
Baidu.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.baiduUrl}
|
||||
placeholder={Baidu.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.baiduUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Baidu.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Baidu.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.baiduApiKey}
|
||||
type="text"
|
||||
placeholder={
|
||||
Locale.Settings.Access.Baidu.ApiKey.Placeholder
|
||||
}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.baiduApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Baidu.SecretKey.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Baidu.SecretKey.SubTitle
|
||||
}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.baiduSecretKey}
|
||||
type="text"
|
||||
placeholder={
|
||||
Locale.Settings.Access.Baidu.SecretKey.Placeholder
|
||||
}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.baiduSecretKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
|
@ -35,6 +35,16 @@ declare global {
|
||||
// google tag manager
|
||||
GTM_ID?: string;
|
||||
|
||||
// anthropic only
|
||||
ANTHROPIC_URL?: string;
|
||||
ANTHROPIC_API_KEY?: string;
|
||||
ANTHROPIC_API_VERSION?: string;
|
||||
|
||||
// baidu only
|
||||
BAIDU_URL?: string;
|
||||
BAIDU_API_KEY?: string;
|
||||
BAIDU_SECRET_KEY?: string;
|
||||
|
||||
// custom template for preprocessing user input
|
||||
DEFAULT_INPUT_TEMPLATE?: string;
|
||||
}
|
||||
@ -92,7 +102,7 @@ export const getServerSideConfig = () => {
|
||||
const isAzure = !!process.env.AZURE_URL;
|
||||
const isGoogle = !!process.env.GOOGLE_API_KEY;
|
||||
const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
|
||||
|
||||
const isBaidu = !!process.env.BAIDU_API_KEY;
|
||||
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
@ -124,6 +134,11 @@ export const getServerSideConfig = () => {
|
||||
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
|
||||
anthropicUrl: process.env.ANTHROPIC_URL,
|
||||
|
||||
isBaidu,
|
||||
baiduUrl: process.env.BAIDU_URL,
|
||||
baiduApiKey: getApiKey(process.env.BAIDU_API_KEY),
|
||||
baiduSecretKey: process.env.BAIDU_SECRET_KEY,
|
||||
|
||||
gtmId: process.env.GTM_ID,
|
||||
|
||||
needCode: ACCESS_CODES.size > 0,
|
||||
|
@ -14,6 +14,10 @@ export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
||||
|
||||
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
|
||||
|
||||
export const BAIDU_BASE_URL = "https://aip.baidubce.com";
|
||||
|
||||
export const BAIDU_OATUH_URL = `${BAIDU_BASE_URL}/oauth/2.0/token`;
|
||||
|
||||
export enum Path {
|
||||
Home = "/",
|
||||
Chat = "/chat",
|
||||
@ -28,6 +32,7 @@ export enum ApiPath {
|
||||
Azure = "/api/azure",
|
||||
OpenAI = "/api/openai",
|
||||
Anthropic = "/api/anthropic",
|
||||
Baidu = "/api/baidu",
|
||||
}
|
||||
|
||||
export enum SlotID {
|
||||
@ -71,12 +76,14 @@ export enum ServiceProvider {
|
||||
Azure = "Azure",
|
||||
Google = "Google",
|
||||
Anthropic = "Anthropic",
|
||||
Baidu = "Baidu",
|
||||
}
|
||||
|
||||
export enum ModelProvider {
|
||||
GPT = "GPT",
|
||||
GeminiPro = "GeminiPro",
|
||||
Claude = "Claude",
|
||||
Ernie = "Ernie",
|
||||
}
|
||||
|
||||
export const Anthropic = {
|
||||
@ -104,6 +111,12 @@ export const Google = {
|
||||
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
|
||||
};
|
||||
|
||||
export const Baidu = {
|
||||
ExampleEndpoint: "https://aip.baidubce.com",
|
||||
ChatPath: (modelName: string) =>
|
||||
`/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/${modelName}`,
|
||||
};
|
||||
|
||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||
// export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
||||
@ -173,6 +186,16 @@ const anthropicModels = [
|
||||
"claude-3-5-sonnet-20240620",
|
||||
];
|
||||
|
||||
const baiduModels = [
|
||||
"ernie-4.0-turbo-8k",
|
||||
"completions_pro=ernie-4.0-8k",
|
||||
"ernie-4.0-8k-preview",
|
||||
"completions_adv_pro=ernie-4.0-8k-preview-0518",
|
||||
"ernie-4.0-8k-latest",
|
||||
"completions=ernie-3.5-8k",
|
||||
"ernie-3.5-8k-0205",
|
||||
];
|
||||
|
||||
export const DEFAULT_MODELS = [
|
||||
...openaiModels.map((name) => ({
|
||||
name,
|
||||
@ -210,6 +233,15 @@ export const DEFAULT_MODELS = [
|
||||
providerType: "anthropic",
|
||||
},
|
||||
})),
|
||||
...baiduModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
provider: {
|
||||
id: "baidu",
|
||||
providerName: "Baidu",
|
||||
providerType: "baidu",
|
||||
},
|
||||
})),
|
||||
] as const;
|
||||
|
||||
export const CHAT_PAGE_SIZE = 15;
|
||||
|
@ -347,6 +347,22 @@ const cn = {
|
||||
SubTitle: "选择一个特定的 API 版本",
|
||||
},
|
||||
},
|
||||
Baidu: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义 Baidu API Key 绕过密码访问限制",
|
||||
Placeholder: "Baidu API Key",
|
||||
},
|
||||
SecretKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义 Baidu Secret Key 绕过密码访问限制",
|
||||
Placeholder: "Baidu Secret Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
CustomModel: {
|
||||
Title: "自定义模型名",
|
||||
SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
|
||||
|
@ -47,6 +47,11 @@ const DEFAULT_ACCESS_STATE = {
|
||||
anthropicApiVersion: "2023-06-01",
|
||||
anthropicUrl: "",
|
||||
|
||||
// baidu
|
||||
baiduUrl: "",
|
||||
baiduApiKey: "",
|
||||
baiduSecretKey: "",
|
||||
|
||||
// server config
|
||||
needCode: true,
|
||||
hideUserApiKey: false,
|
||||
@ -83,6 +88,10 @@ export const useAccessStore = createPersistStore(
|
||||
return ensure(get(), ["anthropicApiKey"]);
|
||||
},
|
||||
|
||||
isValidBaidu() {
|
||||
return ensure(get(), ["baiduApiKey", "baiduSecretKey"]);
|
||||
},
|
||||
|
||||
isAuthorized() {
|
||||
this.fetch();
|
||||
|
||||
@ -92,6 +101,7 @@ export const useAccessStore = createPersistStore(
|
||||
this.isValidAzure() ||
|
||||
this.isValidGoogle() ||
|
||||
this.isValidAnthropic() ||
|
||||
this.isValidBaidu() ||
|
||||
!this.enabledAccessControl() ||
|
||||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
||||
);
|
||||
|
@ -368,6 +368,8 @@ export const useChatStore = createPersistStore(
|
||||
api = new ClientApi(ModelProvider.GeminiPro);
|
||||
} else if (modelConfig.providerName == ServiceProvider.Anthropic) {
|
||||
api = new ClientApi(ModelProvider.Claude);
|
||||
} else if (modelConfig.providerName == ServiceProvider.Baidu) {
|
||||
api = new ClientApi(ModelProvider.Ernie);
|
||||
} else {
|
||||
api = new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
@ -552,6 +554,8 @@ export const useChatStore = createPersistStore(
|
||||
api = new ClientApi(ModelProvider.GeminiPro);
|
||||
} else if (modelConfig.providerName == ServiceProvider.Anthropic) {
|
||||
api = new ClientApi(ModelProvider.Claude);
|
||||
} else if (modelConfig.providerName == ServiceProvider.Baidu) {
|
||||
api = new ClientApi(ModelProvider.Ernie);
|
||||
} else {
|
||||
api = new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
|
@ -24,10 +24,13 @@ export function collectModelTable(
|
||||
|
||||
// default models
|
||||
models.forEach((m) => {
|
||||
// supoort name=displayName eg:completions_pro=ernie-4.0-8k
|
||||
const [name, displayName] = m.name?.split("=");
|
||||
// using <modelName>@<providerId> as fullName
|
||||
modelTable[`${m.name}@${m?.provider?.id}`] = {
|
||||
modelTable[`${name}@${m?.provider?.id}`] = {
|
||||
...m,
|
||||
displayName: m.name, // 'provider' is copied over if it exists
|
||||
name,
|
||||
displayName: displayName || name, // 'provider' is copied over if it exists
|
||||
};
|
||||
});
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user