forked from XiaoMo/ChatGPT-Next-Web
commit
9ef680db57
@ -1,5 +1,6 @@
|
||||
import {
|
||||
DEFAULT_API_HOST,
|
||||
DEFAULT_MODELS,
|
||||
OpenaiPath,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
@ -23,6 +24,8 @@ export interface OpenAIListModelResponse {
|
||||
}
|
||||
|
||||
export class ChatGPTApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
||||
path(path: string): string {
|
||||
let openaiUrl = useAccessStore.getState().openaiUrl;
|
||||
if (openaiUrl.length === 0) {
|
||||
@ -246,6 +249,10 @@ export class ChatGPTApi implements LLMApi {
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
if (this.disableListModels) {
|
||||
return DEFAULT_MODELS.slice();
|
||||
}
|
||||
|
||||
const res = await fetch(this.path(OpenaiPath.ListModelPath), {
|
||||
method: "GET",
|
||||
headers: {
|
||||
|
@ -710,7 +710,7 @@ export function Chat() {
|
||||
};
|
||||
|
||||
const findLastUserIndex = (messageId: string) => {
|
||||
// find last user input message and resend
|
||||
// find last user input message
|
||||
let lastUserMessageIndex: number | null = null;
|
||||
for (let i = 0; i < session.messages.length; i += 1) {
|
||||
const message = session.messages[i];
|
||||
@ -737,17 +737,56 @@ export function Chat() {
|
||||
};
|
||||
|
||||
const onResend = (message: ChatMessage) => {
|
||||
let content = message.content;
|
||||
// when it is resending a message
|
||||
// 1. for a user's message, find the next bot response
|
||||
// 2. for a bot's message, find the last user's input
|
||||
// 3. delete original user input and bot's message
|
||||
// 4. resend the user's input
|
||||
|
||||
if (message.role === "assistant" && message.id) {
|
||||
const userIndex = findLastUserIndex(message.id);
|
||||
if (userIndex) {
|
||||
content = session.messages.at(userIndex)?.content ?? content;
|
||||
const resendingIndex = session.messages.findIndex(
|
||||
(m) => m.id === message.id,
|
||||
);
|
||||
|
||||
if (resendingIndex <= 0 || resendingIndex >= session.messages.length) {
|
||||
console.error("[Chat] failed to find resending message", message);
|
||||
return;
|
||||
}
|
||||
|
||||
let userMessage: ChatMessage | undefined;
|
||||
let botMessage: ChatMessage | undefined;
|
||||
|
||||
if (message.role === "assistant") {
|
||||
// if it is resending a bot's message, find the user input for it
|
||||
botMessage = message;
|
||||
for (let i = resendingIndex; i >= 0; i -= 1) {
|
||||
if (session.messages[i].role === "user") {
|
||||
userMessage = session.messages[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (message.role === "user") {
|
||||
// if it is resending a user's input, find the bot's response
|
||||
userMessage = message;
|
||||
for (let i = resendingIndex; i < session.messages.length; i += 1) {
|
||||
if (session.messages[i].role === "assistant") {
|
||||
botMessage = session.messages[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (userMessage === undefined) {
|
||||
console.error("[Chat] failed to resend", message);
|
||||
return;
|
||||
}
|
||||
|
||||
// delete the original messages
|
||||
deleteMessage(userMessage.id);
|
||||
deleteMessage(botMessage?.id);
|
||||
|
||||
// resend the message
|
||||
setIsLoading(true);
|
||||
chatStore.onUserInput(content).then(() => setIsLoading(false));
|
||||
chatStore.onUserInput(userMessage.content).then(() => setIsLoading(false));
|
||||
inputRef.current?.focus();
|
||||
};
|
||||
|
||||
|
@ -28,6 +28,7 @@ import { useAppConfig } from "../store/config";
|
||||
import { AuthPage } from "./auth";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { api } from "../client/api";
|
||||
import { useAccessStore } from "../store";
|
||||
|
||||
export function Loading(props: { noLogo?: boolean }) {
|
||||
return (
|
||||
@ -171,6 +172,7 @@ export function Home() {
|
||||
|
||||
useEffect(() => {
|
||||
console.log("[Config] got config from build time", getClientConfig());
|
||||
useAccessStore.getState().fetch();
|
||||
}, []);
|
||||
|
||||
if (!useHasHydrated()) {
|
||||
|
@ -108,24 +108,4 @@ export const DEFAULT_MODELS = [
|
||||
name: "gpt-3.5-turbo-16k-0613",
|
||||
available: true,
|
||||
},
|
||||
{
|
||||
name: "qwen-v1", // 通义千问
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "ernie", // 文心一言
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "spark", // 讯飞星火
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "llama", // llama
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "chatglm", // chatglm-6b
|
||||
available: false,
|
||||
},
|
||||
] as const;
|
||||
|
@ -143,8 +143,7 @@ const en: LocaleType = {
|
||||
},
|
||||
InjectSystemPrompts: {
|
||||
Title: "Inject System Prompts",
|
||||
SubTitle:
|
||||
"Forcefully add a simulated ChatGPT system prompt at the beginning of the message list for every request",
|
||||
SubTitle: "Inject a global system prompt for every request",
|
||||
},
|
||||
InputTemplate: {
|
||||
Title: "Input Template",
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { create } from "zustand";
|
||||
import { persist } from "zustand/middleware";
|
||||
import { DEFAULT_API_HOST, StoreKey } from "../constant";
|
||||
import { DEFAULT_API_HOST, DEFAULT_MODELS, StoreKey } from "../constant";
|
||||
import { getHeaders } from "../client/api";
|
||||
import { BOT_HELLO } from "./chat";
|
||||
import { getClientConfig } from "../config/client";
|
||||
@ -11,8 +11,10 @@ export interface AccessControlStore {
|
||||
|
||||
needCode: boolean;
|
||||
hideUserApiKey: boolean;
|
||||
openaiUrl: string;
|
||||
hideBalanceQuery: boolean;
|
||||
disableGPT4: boolean;
|
||||
|
||||
openaiUrl: string;
|
||||
|
||||
updateToken: (_: string) => void;
|
||||
updateCode: (_: string) => void;
|
||||
@ -35,8 +37,10 @@ export const useAccessStore = create<AccessControlStore>()(
|
||||
accessCode: "",
|
||||
needCode: true,
|
||||
hideUserApiKey: false,
|
||||
openaiUrl: DEFAULT_OPENAI_URL,
|
||||
hideBalanceQuery: false,
|
||||
disableGPT4: false,
|
||||
|
||||
openaiUrl: DEFAULT_OPENAI_URL,
|
||||
|
||||
enabledAccessControl() {
|
||||
get().fetch();
|
||||
@ -75,8 +79,10 @@ export const useAccessStore = create<AccessControlStore>()(
|
||||
console.log("[Config] got config from server", res);
|
||||
set(() => ({ ...res }));
|
||||
|
||||
if ((res as any).botHello) {
|
||||
BOT_HELLO.content = (res as any).botHello;
|
||||
if (res.disableGPT4) {
|
||||
DEFAULT_MODELS.forEach(
|
||||
(m: any) => (m.available = !m.name.startsWith("gpt-4")),
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
|
@ -289,7 +289,6 @@ export const useChatStore = create<ChatStore>()(
|
||||
const botMessage: ChatMessage = createMessage({
|
||||
role: "assistant",
|
||||
streaming: true,
|
||||
id: userMessage.id! + 1,
|
||||
model: modelConfig.model,
|
||||
});
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user