diff --git a/app/constant.ts b/app/constant.ts index 95de3562..635fbeae 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -69,13 +69,20 @@ export const OpenaiPath = { export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_SYSTEM_TEMPLATE = ` You are ChatGPT, a large language model trained by OpenAI. -Knowledge cutoff: {{knowledgeCutoff}} +Knowledge cutoff: {{cutoff}} Current model: {{model}} Current time: {{time}} `; export const SUMMARIZE_MODEL = "gpt-3.5-turbo"; +export const KnowledgeCutOffDate: Record = { + default: "2021-09", + "gpt-3.5-turbo-1106": "2023-04", + "gpt-4-1106-preview": "2023-04", + "gpt-4-vision-preview": "2023-04", +}; + export const DEFAULT_MODELS = [ { name: "gpt-4", diff --git a/app/store/chat.ts b/app/store/chat.ts index 9f73fdf2..95822c19 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -7,6 +7,7 @@ import { createEmptyMask, Mask } from "./mask"; import { DEFAULT_INPUT_TEMPLATE, DEFAULT_SYSTEM_TEMPLATE, + KnowledgeCutOffDate, StoreKey, SUMMARIZE_MODEL, } from "../constant"; @@ -116,7 +117,11 @@ function countMessages(msgs: ChatMessage[]) { } function fillTemplateWith(input: string, modelConfig: ModelConfig) { + let cutoff = + KnowledgeCutOffDate[modelConfig.model] ?? KnowledgeCutOffDate.default; + const vars = { + cutoff, model: modelConfig.model, time: new Date().toLocaleString(), lang: getLang(), @@ -401,26 +406,22 @@ export const useChatStore = createPersistStore( // system prompts, to get close to OpenAI Web ChatGPT const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts; - let systemPrompts = shouldInjectSystemPrompts ? [] : []; - + const systemPrompts = shouldInjectSystemPrompts + ? [ + createMessage({ + role: "system", + content: fillTemplateWith("", { + ...modelConfig, + template: DEFAULT_SYSTEM_TEMPLATE, + }), + }), + ] + : []; if (shouldInjectSystemPrompts) { - const model = modelConfig.model; - let systemTemplate = DEFAULT_SYSTEM_TEMPLATE; - - if (model === "gpt-4-1106-preview" || model === "gpt-4-vision-preview") { - systemTemplate = systemTemplate.replace("{{knowledgeCutoff}}", "2023-04"); - } else { - systemTemplate = systemTemplate.replace("{{knowledgeCutoff}}", "2021-09"); - } - - const systemPrompt = createMessage({ - role: "system", - content: fillTemplateWith("", { - ...modelConfig, - template: systemTemplate, - }), - }); - console.log("[Global System Prompt] ", systemPrompt.content); + console.log( + "[Global System Prompt] ", + systemPrompts.at(0)?.content ?? "empty", + ); } // long term memory