feat: close #2580 only use 3.5 to summarize when not using custom models

This commit is contained in:
Yidadaa 2023-08-28 00:02:52 +08:00
parent ada4e3cdcd
commit 3bd76b9156
2 changed files with 14 additions and 2 deletions

View File

@ -63,6 +63,8 @@ Knowledge cutoff: 2021-09
Current model: {{model}} Current model: {{model}}
Current time: {{time}}`; Current time: {{time}}`;
export const SUMMARIZE_MODEL = "gpt-3.5-turbo";
export const DEFAULT_MODELS = [ export const DEFAULT_MODELS = [
{ {
name: "gpt-4", name: "gpt-4",

View File

@ -11,6 +11,7 @@ import {
DEFAULT_INPUT_TEMPLATE, DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE, DEFAULT_SYSTEM_TEMPLATE,
StoreKey, StoreKey,
SUMMARIZE_MODEL,
} from "../constant"; } from "../constant";
import { api, RequestMessage } from "../client/api"; import { api, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller"; import { ChatControllerPool } from "../client/controller";
@ -80,6 +81,11 @@ function createEmptySession(): ChatSession {
}; };
} }
function getSummarizeModel(currentModel: string) {
// if it is using gpt-* models, force to use 3.5 to summarize
return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel;
}
interface ChatStore { interface ChatStore {
sessions: ChatSession[]; sessions: ChatSession[];
currentSessionIndex: number; currentSessionIndex: number;
@ -501,7 +507,7 @@ export const useChatStore = create<ChatStore>()(
api.llm.chat({ api.llm.chat({
messages: topicMessages, messages: topicMessages,
config: { config: {
model: "gpt-3.5-turbo", model: getSummarizeModel(session.mask.modelConfig.model),
}, },
onFinish(message) { onFinish(message) {
get().updateCurrentSession( get().updateCurrentSession(
@ -555,7 +561,11 @@ export const useChatStore = create<ChatStore>()(
date: "", date: "",
}), }),
), ),
config: { ...modelConfig, stream: true, model: "gpt-3.5-turbo" }, config: {
...modelConfig,
stream: true,
model: getSummarizeModel(session.mask.modelConfig.model),
},
onUpdate(message) { onUpdate(message) {
session.memoryPrompt = message; session.memoryPrompt = message;
}, },