ChatGPT-Next-Web/app/client/api.ts

175 lines
4.3 KiB
TypeScript
Raw Normal View History

2023-06-15 15:55:18 +00:00
import { getClientConfig } from "../config/client";
2023-12-23 18:15:30 +00:00
import {
ACCESS_CODE_PREFIX,
Azure,
ModelProvider,
ServiceProvider,
} from "../constant";
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
2023-05-14 15:00:17 +00:00
import { ChatGPTApi } from "./platforms/openai";
2023-12-23 18:15:30 +00:00
import { GeminiApi } from "./platforms/google";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
2023-05-14 15:00:17 +00:00
export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
export type ChatModel = ModelType;
export interface RequestMessage {
2023-05-14 15:00:17 +00:00
role: MessageRole;
content: string;
}
export interface LLMConfig {
model: string;
2023-05-14 15:00:17 +00:00
temperature?: number;
top_p?: number;
2023-05-14 15:00:17 +00:00
stream?: boolean;
presence_penalty?: number;
frequency_penalty?: number;
2023-05-14 15:00:17 +00:00
}
export interface ChatOptions {
messages: RequestMessage[];
2023-05-14 15:00:17 +00:00
config: LLMConfig;
onUpdate?: (message: string, chunk: string) => void;
2023-05-14 15:00:17 +00:00
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
2023-05-14 15:00:17 +00:00
}
export interface LLMUsage {
used: number;
total: number;
}
export interface LLMModel {
name: string;
available: boolean;
2023-12-23 18:15:30 +00:00
provider: LLMModelProvider;
}
export interface LLMModelProvider {
id: string;
providerName: string;
providerType: string;
}
2023-05-14 15:00:17 +00:00
export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract usage(): Promise<LLMUsage>;
abstract models(): Promise<LLMModel[]>;
2023-05-14 15:00:17 +00:00
}
type ProviderName = "openai" | "azure" | "claude" | "palm";
interface Model {
name: string;
provider: ProviderName;
ctxlen: number;
}
interface ChatProvider {
name: ProviderName;
apiConfig: {
baseUrl: string;
apiKey: string;
summaryModel: Model;
};
models: Model[];
chat: () => void;
usage: () => void;
}
2023-05-14 15:00:17 +00:00
export class ClientApi {
public llm: LLMApi;
2023-12-23 18:15:30 +00:00
constructor(provider: ModelProvider = ModelProvider.GPT) {
if (provider === ModelProvider.Gemini) {
this.llm = new GeminiApi();
return;
}
2023-05-14 15:00:17 +00:00
this.llm = new ChatGPTApi();
}
config() {}
prompts() {}
masks() {}
2023-05-24 17:04:37 +00:00
async share(messages: ChatMessage[], avatarUrl: string | null = null) {
const msgs = messages
.map((m) => ({
from: m.role === "user" ? "human" : "gpt",
value: m.content,
}))
.concat([
{
from: "human",
value:
2023-12-23 08:48:16 +00:00
"Share from [NextChat]: https://github.com/Yidadaa/ChatGPT-Next-Web",
2023-05-24 17:04:37 +00:00
},
]);
// 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
// Please do not modify this message
2023-07-12 15:19:46 +00:00
console.log("[Share]", messages, msgs);
2023-06-15 15:55:18 +00:00
const clientConfig = getClientConfig();
const proxyUrl = "/sharegpt";
const rawUrl = "https://sharegpt.com/api/conversations";
const shareUrl = clientConfig?.isApp ? rawUrl : proxyUrl;
const res = await fetch(shareUrl, {
2023-05-24 17:04:37 +00:00
body: JSON.stringify({
avatarUrl,
items: msgs,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
});
const resJson = await res.json();
console.log("[Share]", resJson);
if (resJson.id) {
return `https://shareg.pt/${resJson.id}`;
}
}
2023-05-14 15:00:17 +00:00
}
export function getHeaders() {
const accessStore = useAccessStore.getState();
2023-11-09 18:43:30 +00:00
const headers: Record<string, string> = {
2023-05-14 15:00:17 +00:00
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
2023-05-14 15:00:17 +00:00
};
2023-12-23 19:05:23 +00:00
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini";
2023-11-09 18:43:30 +00:00
const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization";
2023-12-23 18:15:30 +00:00
const apiKey = isGoogle
? accessStore.googleApiKey
: isAzure
? accessStore.azureApiKey
: accessStore.openaiApiKey;
2023-11-09 18:43:30 +00:00
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
2023-05-14 15:00:17 +00:00
const validString = (x: string) => x && x.length > 0;
// use user's api key first
2023-11-09 18:43:30 +00:00
if (validString(apiKey)) {
headers[authHeader] = makeBearer(apiKey);
2023-05-14 15:00:17 +00:00
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
2023-11-09 18:43:30 +00:00
headers[authHeader] = makeBearer(
2023-05-14 15:00:17 +00:00
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
return headers;
}