Merge pull request #2145 from Yidadaa/bugfix-0626

feat: #2144 improve input template
This commit is contained in:
Yifei Zhang 2023-06-26 13:21:48 +08:00 committed by GitHub
commit 1607640bed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 36 additions and 5 deletions

View File

@ -54,3 +54,8 @@ export const OpenaiPath = {
};
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
export const DEFAULT_SYSTEM_TEMPLATE = `
You are ChatGPT, a large language model trained by OpenAI.
Knowledge cutoff: 2021-09
Current model: {{model}}
Current time: {{time}}`;

View File

@ -7,7 +7,11 @@ import Locale, { getLang } from "../locales";
import { showToast } from "../components/ui-lib";
import { ModelConfig, ModelType, useAppConfig } from "./config";
import { createEmptyMask, Mask } from "./mask";
import { DEFAULT_INPUT_TEMPLATE, StoreKey } from "../constant";
import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE,
StoreKey,
} from "../constant";
import { api, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format";
@ -279,7 +283,7 @@ export const useChatStore = create<ChatStore>()(
const modelConfig = session.mask.modelConfig;
const userContent = fillTemplateWith(content, modelConfig);
console.log("[User Input] fill with template: ", userContent);
console.log("[User Input] after template: ", userContent);
const userMessage: ChatMessage = createMessage({
role: "user",
@ -312,7 +316,6 @@ export const useChatStore = create<ChatStore>()(
});
// make request
console.log("[User Input] ", sendMessages);
api.llm.chat({
messages: sendMessages,
config: { ...modelConfig, stream: true },
@ -391,6 +394,27 @@ export const useChatStore = create<ChatStore>()(
// in-context prompts
const contextPrompts = session.mask.context.slice();
// system prompts, to get close to OpenAI Web ChatGPT
// only will be injected if user does not use a mask or set none context prompts
const shouldInjectSystemPrompts = contextPrompts.length === 0;
const systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
content: fillTemplateWith("", {
...modelConfig,
template: DEFAULT_SYSTEM_TEMPLATE,
}),
}),
]
: [];
if (shouldInjectSystemPrompts) {
console.log(
"[Global System Prompt] ",
systemPrompts.at(0)?.content ?? "empty",
);
}
// long term memory
const shouldSendLongTermMemory =
modelConfig.sendMemory &&
@ -409,6 +433,7 @@ export const useChatStore = create<ChatStore>()(
);
// lets concat send messages, including 4 parts:
// 0. system prompt: to get close to OpenAI Web ChatGPT
// 1. long term memory: summarized memory messages
// 2. pre-defined in-context prompts
// 3. short term memory: latest n messages
@ -435,6 +460,7 @@ export const useChatStore = create<ChatStore>()(
// concat all messages
const recentMessages = [
...systemPrompts,
...longTermMemoryPrompts,
...contextPrompts,
...reversedRecentMessages.reverse(),

View File

@ -177,9 +177,9 @@ export const useAppConfig = create<ChatConfigStore>()(
}),
{
name: StoreKey.Config,
version: 3.1,
version: 3.2,
migrate(persistedState, version) {
if (version === 3.1) return persistedState as any;
if (version === 3.2) return persistedState as any;
const state = persistedState as ChatConfig;
state.modelConfig.sendMemory = true;