forked from XiaoMo/ChatGPT-Next-Web
feat: #2144 improve input template
This commit is contained in:
parent
5f7c262759
commit
5f0cda829f
@ -54,3 +54,8 @@ export const OpenaiPath = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||||
|
export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||||
|
You are ChatGPT, a large language model trained by OpenAI.
|
||||||
|
Knowledge cutoff: 2021-09
|
||||||
|
Current model: {{model}}
|
||||||
|
Current time: {{time}}`;
|
||||||
|
@ -7,7 +7,11 @@ import Locale, { getLang } from "../locales";
|
|||||||
import { showToast } from "../components/ui-lib";
|
import { showToast } from "../components/ui-lib";
|
||||||
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
||||||
import { createEmptyMask, Mask } from "./mask";
|
import { createEmptyMask, Mask } from "./mask";
|
||||||
import { DEFAULT_INPUT_TEMPLATE, StoreKey } from "../constant";
|
import {
|
||||||
|
DEFAULT_INPUT_TEMPLATE,
|
||||||
|
DEFAULT_SYSTEM_TEMPLATE,
|
||||||
|
StoreKey,
|
||||||
|
} from "../constant";
|
||||||
import { api, RequestMessage } from "../client/api";
|
import { api, RequestMessage } from "../client/api";
|
||||||
import { ChatControllerPool } from "../client/controller";
|
import { ChatControllerPool } from "../client/controller";
|
||||||
import { prettyObject } from "../utils/format";
|
import { prettyObject } from "../utils/format";
|
||||||
@ -279,7 +283,7 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
const modelConfig = session.mask.modelConfig;
|
const modelConfig = session.mask.modelConfig;
|
||||||
|
|
||||||
const userContent = fillTemplateWith(content, modelConfig);
|
const userContent = fillTemplateWith(content, modelConfig);
|
||||||
console.log("[User Input] fill with template: ", userContent);
|
console.log("[User Input] after template: ", userContent);
|
||||||
|
|
||||||
const userMessage: ChatMessage = createMessage({
|
const userMessage: ChatMessage = createMessage({
|
||||||
role: "user",
|
role: "user",
|
||||||
@ -312,7 +316,6 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
});
|
});
|
||||||
|
|
||||||
// make request
|
// make request
|
||||||
console.log("[User Input] ", sendMessages);
|
|
||||||
api.llm.chat({
|
api.llm.chat({
|
||||||
messages: sendMessages,
|
messages: sendMessages,
|
||||||
config: { ...modelConfig, stream: true },
|
config: { ...modelConfig, stream: true },
|
||||||
@ -391,6 +394,27 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
// in-context prompts
|
// in-context prompts
|
||||||
const contextPrompts = session.mask.context.slice();
|
const contextPrompts = session.mask.context.slice();
|
||||||
|
|
||||||
|
// system prompts, to get close to OpenAI Web ChatGPT
|
||||||
|
// only will be injected if user does not use a mask or set none context prompts
|
||||||
|
const shouldInjectSystemPrompts = contextPrompts.length === 0;
|
||||||
|
const systemPrompts = shouldInjectSystemPrompts
|
||||||
|
? [
|
||||||
|
createMessage({
|
||||||
|
role: "system",
|
||||||
|
content: fillTemplateWith("", {
|
||||||
|
...modelConfig,
|
||||||
|
template: DEFAULT_SYSTEM_TEMPLATE,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
: [];
|
||||||
|
if (shouldInjectSystemPrompts) {
|
||||||
|
console.log(
|
||||||
|
"[Global System Prompt] ",
|
||||||
|
systemPrompts.at(0)?.content ?? "empty",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// long term memory
|
// long term memory
|
||||||
const shouldSendLongTermMemory =
|
const shouldSendLongTermMemory =
|
||||||
modelConfig.sendMemory &&
|
modelConfig.sendMemory &&
|
||||||
@ -409,6 +433,7 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
);
|
);
|
||||||
|
|
||||||
// lets concat send messages, including 4 parts:
|
// lets concat send messages, including 4 parts:
|
||||||
|
// 0. system prompt: to get close to OpenAI Web ChatGPT
|
||||||
// 1. long term memory: summarized memory messages
|
// 1. long term memory: summarized memory messages
|
||||||
// 2. pre-defined in-context prompts
|
// 2. pre-defined in-context prompts
|
||||||
// 3. short term memory: latest n messages
|
// 3. short term memory: latest n messages
|
||||||
@ -435,6 +460,7 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
|
|
||||||
// concat all messages
|
// concat all messages
|
||||||
const recentMessages = [
|
const recentMessages = [
|
||||||
|
...systemPrompts,
|
||||||
...longTermMemoryPrompts,
|
...longTermMemoryPrompts,
|
||||||
...contextPrompts,
|
...contextPrompts,
|
||||||
...reversedRecentMessages.reverse(),
|
...reversedRecentMessages.reverse(),
|
||||||
|
@ -177,9 +177,9 @@ export const useAppConfig = create<ChatConfigStore>()(
|
|||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
name: StoreKey.Config,
|
name: StoreKey.Config,
|
||||||
version: 3.1,
|
version: 3.2,
|
||||||
migrate(persistedState, version) {
|
migrate(persistedState, version) {
|
||||||
if (version === 3.1) return persistedState as any;
|
if (version === 3.2) return persistedState as any;
|
||||||
|
|
||||||
const state = persistedState as ChatConfig;
|
const state = persistedState as ChatConfig;
|
||||||
state.modelConfig.sendMemory = true;
|
state.modelConfig.sendMemory = true;
|
||||||
|
Loading…
Reference in New Issue
Block a user