ChatGPT-Next-Web/app/store/chat.ts

626 lines
18 KiB
TypeScript
Raw Normal View History

2023-03-10 18:25:33 +00:00
import { create } from "zustand";
import { persist } from "zustand/middleware";
import { trimTopic } from "../utils";
2023-03-09 17:01:40 +00:00
import Locale, { getLang } from "../locales";
2023-04-06 16:14:27 +00:00
import { showToast } from "../components/ui-lib";
2023-06-23 16:18:27 +00:00
import { ModelConfig, ModelType, useAppConfig } from "./config";
2023-04-24 16:49:27 +00:00
import { createEmptyMask, Mask } from "./mask";
2023-06-26 05:18:59 +00:00
import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE,
StoreKey,
} from "../constant";
import { api, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format";
2023-06-14 16:14:38 +00:00
import { estimateTokenLength } from "../utils/token";
2023-03-20 16:17:45 +00:00
export type ChatMessage = RequestMessage & {
2023-03-10 18:25:33 +00:00
date: string;
2023-03-11 12:54:24 +00:00
streaming?: boolean;
isError?: boolean;
2023-04-05 19:19:33 +00:00
id?: number;
model?: ModelType;
2023-03-10 18:25:33 +00:00
};
2023-03-09 17:01:40 +00:00
export function createMessage(override: Partial<ChatMessage>): ChatMessage {
2023-04-05 19:19:33 +00:00
return {
id: Date.now(),
date: new Date().toLocaleString(),
role: "user",
content: "",
...override,
};
}
2023-03-19 16:09:30 +00:00
export interface ChatStat {
2023-03-10 18:25:33 +00:00
tokenCount: number;
wordCount: number;
charCount: number;
}
2023-03-19 16:09:30 +00:00
export interface ChatSession {
2023-03-11 17:14:07 +00:00
id: number;
2023-03-10 18:25:33 +00:00
topic: string;
2023-04-24 16:49:27 +00:00
2023-03-10 18:25:33 +00:00
memoryPrompt: string;
messages: ChatMessage[];
2023-03-10 18:25:33 +00:00
stat: ChatStat;
2023-04-25 18:02:46 +00:00
lastUpdate: number;
2023-03-19 15:13:10 +00:00
lastSummarizeIndex: number;
2023-05-20 17:28:09 +00:00
clearContextIndex?: number;
2023-04-22 17:27:15 +00:00
2023-04-24 16:49:27 +00:00
mask: Mask;
2023-03-10 18:25:33 +00:00
}
2023-04-22 17:27:15 +00:00
export const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
export const BOT_HELLO: ChatMessage = createMessage({
2023-04-02 13:56:34 +00:00
role: "assistant",
content: Locale.Store.BotHello,
2023-04-05 19:19:33 +00:00
});
2023-03-10 18:25:33 +00:00
function createEmptySession(): ChatSession {
return {
2023-04-27 17:54:57 +00:00
id: Date.now() + Math.random(),
2023-03-10 18:25:33 +00:00
topic: DEFAULT_TOPIC,
memoryPrompt: "",
2023-04-02 13:56:34 +00:00
messages: [],
2023-03-10 18:25:33 +00:00
stat: {
tokenCount: 0,
wordCount: 0,
charCount: 0,
},
2023-04-25 18:02:46 +00:00
lastUpdate: Date.now(),
2023-03-19 15:13:10 +00:00
lastSummarizeIndex: 0,
2023-04-24 16:49:27 +00:00
mask: createEmptyMask(),
2023-03-10 18:25:33 +00:00
};
2023-03-09 17:01:40 +00:00
}
2023-03-10 18:25:33 +00:00
interface ChatStore {
sessions: ChatSession[];
currentSessionIndex: number;
2023-04-25 18:02:46 +00:00
globalId: number;
2023-04-02 05:42:47 +00:00
clearSessions: () => void;
2023-04-05 17:34:46 +00:00
moveSession: (from: number, to: number) => void;
2023-03-10 18:25:33 +00:00
selectSession: (index: number) => void;
2023-04-25 18:02:46 +00:00
newSession: (mask?: Mask) => void;
2023-05-01 15:21:28 +00:00
deleteSession: (index: number) => void;
2023-03-10 18:25:33 +00:00
currentSession: () => ChatSession;
2023-06-24 15:38:11 +00:00
nextSession: (delta: number) => void;
onNewMessage: (message: ChatMessage) => void;
2023-03-10 18:25:33 +00:00
onUserInput: (content: string) => Promise<void>;
summarizeSession: () => void;
updateStat: (message: ChatMessage) => void;
2023-03-10 18:25:33 +00:00
updateCurrentSession: (updater: (session: ChatSession) => void) => void;
2023-03-11 12:54:24 +00:00
updateMessage: (
sessionIndex: number,
messageIndex: number,
updater: (message?: ChatMessage) => void,
2023-03-11 12:54:24 +00:00
) => void;
resetSession: () => void;
getMessagesWithMemory: () => ChatMessage[];
getMemoryPrompt: () => ChatMessage;
2023-03-11 17:14:07 +00:00
2023-03-19 15:13:10 +00:00
clearAllData: () => void;
2023-03-09 17:01:40 +00:00
}
function countMessages(msgs: ChatMessage[]) {
2023-06-14 16:14:38 +00:00
return msgs.reduce((pre, cur) => pre + estimateTokenLength(cur.content), 0);
2023-03-29 16:02:50 +00:00
}
function fillTemplateWith(input: string, modelConfig: ModelConfig) {
const vars = {
model: modelConfig.model,
time: new Date().toLocaleString(),
lang: getLang(),
input: input,
};
let output = modelConfig.template ?? DEFAULT_INPUT_TEMPLATE;
// must contains {{input}}
const inputVar = "{{input}}";
if (!output.includes(inputVar)) {
output += "\n" + inputVar;
}
Object.entries(vars).forEach(([name, value]) => {
output = output.replaceAll(`{{${name}}}`, value);
});
return output;
}
2023-03-10 18:25:33 +00:00
export const useChatStore = create<ChatStore>()(
persist(
(set, get) => ({
sessions: [createEmptySession()],
currentSessionIndex: 0,
2023-04-25 18:02:46 +00:00
globalId: 0,
2023-03-12 19:21:48 +00:00
clearSessions() {
2023-04-02 05:42:47 +00:00
set(() => ({
sessions: [createEmptySession()],
currentSessionIndex: 0,
}));
},
2023-03-10 18:25:33 +00:00
selectSession(index: number) {
set({
currentSessionIndex: index,
});
},
2023-04-05 17:34:46 +00:00
moveSession(from: number, to: number) {
set((state) => {
const { sessions, currentSessionIndex: oldIndex } = state;
// move the session
const newSessions = [...sessions];
const session = newSessions[from];
newSessions.splice(from, 1);
newSessions.splice(to, 0, session);
// modify current session id
let newIndex = oldIndex === from ? to : oldIndex;
if (oldIndex > from && oldIndex <= to) {
newIndex -= 1;
} else if (oldIndex < from && oldIndex >= to) {
newIndex += 1;
}
return {
currentSessionIndex: newIndex,
sessions: newSessions,
};
});
},
2023-04-25 18:02:46 +00:00
newSession(mask) {
const session = createEmptySession();
set(() => ({ globalId: get().globalId + 1 }));
session.id = get().globalId;
if (mask) {
2023-06-23 16:18:27 +00:00
const config = useAppConfig.getState();
const globalModelConfig = config.modelConfig;
session.mask = {
...mask,
modelConfig: {
...globalModelConfig,
...mask.modelConfig,
},
};
2023-04-25 18:02:46 +00:00
session.topic = mask.name;
}
2023-03-10 18:25:33 +00:00
set((state) => ({
2023-03-11 08:24:17 +00:00
currentSessionIndex: 0,
2023-04-25 18:02:46 +00:00
sessions: [session].concat(state.sessions),
2023-03-10 18:25:33 +00:00
}));
},
2023-06-24 15:38:11 +00:00
nextSession(delta) {
const n = get().sessions.length;
const limit = (x: number) => (x + n) % n;
const i = get().currentSessionIndex;
get().selectSession(limit(i + delta));
},
2023-05-01 15:21:28 +00:00
deleteSession(index) {
const deletingLastSession = get().sessions.length === 1;
const deletedSession = get().sessions.at(index);
if (!deletedSession) return;
const sessions = get().sessions.slice();
sessions.splice(index, 1);
const currentIndex = get().currentSessionIndex;
2023-05-01 15:21:28 +00:00
let nextIndex = Math.min(
currentIndex - Number(index < currentIndex),
2023-05-01 15:21:28 +00:00
sessions.length - 1,
);
if (deletingLastSession) {
nextIndex = 0;
sessions.push(createEmptySession());
2023-04-06 16:14:27 +00:00
}
2023-05-01 15:21:28 +00:00
// for undo delete action
const restoreState = {
currentSessionIndex: get().currentSessionIndex,
sessions: get().sessions.slice(),
};
set(() => ({
currentSessionIndex: nextIndex,
sessions,
}));
showToast(
Locale.Home.DeleteToast,
{
text: Locale.Home.Revert,
onClick() {
set(() => restoreState);
},
},
5000,
);
2023-04-06 16:14:27 +00:00
},
2023-03-10 18:25:33 +00:00
currentSession() {
let index = get().currentSessionIndex;
const sessions = get().sessions;
if (index < 0 || index >= sessions.length) {
index = Math.min(sessions.length - 1, Math.max(0, index));
set(() => ({ currentSessionIndex: index }));
}
2023-03-11 17:14:07 +00:00
const session = sessions[index];
return session;
2023-03-10 18:25:33 +00:00
},
onNewMessage(message) {
2023-03-21 16:20:32 +00:00
get().updateCurrentSession((session) => {
session.messages = session.messages.concat();
2023-04-25 18:02:46 +00:00
session.lastUpdate = Date.now();
2023-03-21 16:20:32 +00:00
});
2023-03-10 18:25:33 +00:00
get().updateStat(message);
get().summarizeSession();
},
async onUserInput(content) {
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
const userContent = fillTemplateWith(content, modelConfig);
2023-06-26 05:18:59 +00:00
console.log("[User Input] after template: ", userContent);
const userMessage: ChatMessage = createMessage({
2023-03-10 18:25:33 +00:00
role: "user",
content: userContent,
2023-04-05 19:19:33 +00:00
});
2023-03-10 18:25:33 +00:00
const botMessage: ChatMessage = createMessage({
2023-03-11 12:54:24 +00:00
role: "assistant",
streaming: true,
id: userMessage.id! + 1,
model: modelConfig.model,
2023-04-05 19:19:33 +00:00
});
2023-03-11 12:54:24 +00:00
2023-03-21 16:20:32 +00:00
// get recent messages
const recentMessages = get().getMessagesWithMemory();
const sendMessages = recentMessages.concat(userMessage);
2023-03-26 10:59:09 +00:00
const sessionIndex = get().currentSessionIndex;
const messageIndex = get().currentSession().messages.length + 1;
2023-03-19 16:09:30 +00:00
// save user's and bot's message
2023-03-11 12:54:24 +00:00
get().updateCurrentSession((session) => {
const savedUserMessage = {
...userMessage,
content,
};
session.messages = session.messages.concat([
savedUserMessage,
botMessage,
]);
2023-03-11 12:54:24 +00:00
});
2023-03-26 10:59:09 +00:00
// make request
api.llm.chat({
messages: sendMessages,
config: { ...modelConfig, stream: true },
onUpdate(message) {
botMessage.streaming = true;
2023-05-20 11:58:12 +00:00
if (message) {
botMessage.content = message;
}
get().updateCurrentSession((session) => {
session.messages = session.messages.concat();
});
},
onFinish(message) {
botMessage.streaming = false;
2023-05-20 11:58:12 +00:00
if (message) {
botMessage.content = message;
get().onNewMessage(botMessage);
}
ChatControllerPool.remove(
sessionIndex,
botMessage.id ?? messageIndex,
);
2023-03-11 12:54:24 +00:00
},
onError(error) {
const isAborted = error.message.includes("aborted");
2023-05-20 11:58:12 +00:00
botMessage.content =
"\n\n" +
prettyObject({
error: true,
message: error.message,
});
2023-03-11 17:14:07 +00:00
botMessage.streaming = false;
userMessage.isError = !isAborted;
botMessage.isError = !isAborted;
get().updateCurrentSession((session) => {
session.messages = session.messages.concat();
});
ChatControllerPool.remove(
sessionIndex,
botMessage.id ?? messageIndex,
);
2023-05-20 11:58:12 +00:00
console.error("[Chat] failed ", error);
2023-03-26 10:59:09 +00:00
},
onController(controller) {
// collect controller for stop/retry
ChatControllerPool.addController(
2023-03-26 10:59:09 +00:00
sessionIndex,
2023-04-05 19:19:33 +00:00
botMessage.id ?? messageIndex,
2023-04-18 03:44:15 +00:00
controller,
2023-03-26 10:59:09 +00:00
);
2023-03-11 17:14:07 +00:00
},
2023-03-10 18:25:33 +00:00
});
},
2023-03-19 16:29:09 +00:00
getMemoryPrompt() {
2023-03-21 16:20:32 +00:00
const session = get().currentSession();
2023-03-19 16:29:09 +00:00
return {
2023-03-21 16:20:32 +00:00
role: "system",
content:
session.memoryPrompt.length > 0
? Locale.Store.Prompt.History(session.memoryPrompt)
: "",
2023-03-21 16:20:32 +00:00
date: "",
} as ChatMessage;
2023-03-19 16:29:09 +00:00
},
2023-03-19 15:13:10 +00:00
getMessagesWithMemory() {
2023-03-21 16:20:32 +00:00
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
const clearContextIndex = session.clearContextIndex ?? 0;
const messages = session.messages.slice();
const totalMessageCount = session.messages.length;
2023-05-20 17:28:09 +00:00
// in-context prompts
const contextPrompts = session.mask.context.slice();
2023-03-19 15:13:10 +00:00
2023-06-26 05:18:59 +00:00
// system prompts, to get close to OpenAI Web ChatGPT
// only will be injected if user does not use a mask or set none context prompts
const shouldInjectSystemPrompts = contextPrompts.length === 0;
const systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
content: fillTemplateWith("", {
...modelConfig,
template: DEFAULT_SYSTEM_TEMPLATE,
}),
}),
]
: [];
if (shouldInjectSystemPrompts) {
console.log(
"[Global System Prompt] ",
systemPrompts.at(0)?.content ?? "empty",
);
}
// long term memory
const shouldSendLongTermMemory =
modelConfig.sendMemory &&
session.memoryPrompt &&
session.memoryPrompt.length > 0 &&
2023-07-05 06:27:37 +00:00
session.lastSummarizeIndex > clearContextIndex;
const longTermMemoryPrompts = shouldSendLongTermMemory
? [get().getMemoryPrompt()]
: [];
const longTermMemoryStartIndex = session.lastSummarizeIndex;
// short term memory
const shortTermMemoryStartIndex = Math.max(
0,
totalMessageCount - modelConfig.historyMessageCount,
);
// lets concat send messages, including 4 parts:
2023-06-26 05:18:59 +00:00
// 0. system prompt: to get close to OpenAI Web ChatGPT
// 1. long term memory: summarized memory messages
// 2. pre-defined in-context prompts
// 3. short term memory: latest n messages
// 4. newest input message
const memoryStartIndex = shouldSendLongTermMemory
? Math.min(longTermMemoryStartIndex, shortTermMemoryStartIndex)
: shortTermMemoryStartIndex;
// and if user has cleared history messages, we should exclude the memory too.
const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex);
const maxTokenThreshold = modelConfig.max_tokens;
// get recent messages as much as possible
const reversedRecentMessages = [];
for (
let i = totalMessageCount - 1, tokenCount = 0;
i >= contextStartIndex && tokenCount < maxTokenThreshold;
i -= 1
) {
const msg = messages[i];
if (!msg || msg.isError) continue;
tokenCount += estimateTokenLength(msg.content);
reversedRecentMessages.push(msg);
}
// concat all messages
const recentMessages = [
2023-06-26 05:18:59 +00:00
...systemPrompts,
...longTermMemoryPrompts,
...contextPrompts,
...reversedRecentMessages.reverse(),
];
2023-03-21 16:20:32 +00:00
return recentMessages;
2023-03-19 15:13:10 +00:00
},
2023-03-11 12:54:24 +00:00
updateMessage(
sessionIndex: number,
messageIndex: number,
updater: (message?: ChatMessage) => void,
2023-03-11 12:54:24 +00:00
) {
const sessions = get().sessions;
const session = sessions.at(sessionIndex);
const messages = session?.messages;
updater(messages?.at(messageIndex));
set(() => ({ sessions }));
},
resetSession() {
get().updateCurrentSession((session) => {
session.messages = [];
session.memoryPrompt = "";
});
},
2023-03-10 18:25:33 +00:00
summarizeSession() {
const session = get().currentSession();
// remove error messages if any
2023-05-20 17:28:09 +00:00
const messages = session.messages;
2023-03-10 18:25:33 +00:00
2023-03-29 16:02:50 +00:00
// should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50;
if (
session.topic === DEFAULT_TOPIC &&
2023-05-20 17:28:09 +00:00
countMessages(messages) >= SUMMARIZE_MIN_LEN
2023-03-29 16:02:50 +00:00
) {
2023-05-20 17:28:09 +00:00
const topicMessages = messages.concat(
createMessage({
role: "user",
content: Locale.Store.Prompt.Topic,
}),
);
api.llm.chat({
messages: topicMessages,
config: {
model: "gpt-3.5-turbo",
},
onFinish(message) {
get().updateCurrentSession(
(session) =>
(session.topic =
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
);
},
});
2023-03-13 16:25:07 +00:00
}
2023-03-19 15:13:10 +00:00
const modelConfig = session.mask.modelConfig;
2023-05-20 17:28:09 +00:00
const summarizeIndex = Math.max(
2023-04-18 03:44:15 +00:00
session.lastSummarizeIndex,
2023-05-20 17:28:09 +00:00
session.clearContextIndex ?? 0,
2023-03-21 16:20:32 +00:00
);
2023-05-20 17:28:09 +00:00
let toBeSummarizedMsgs = messages
.filter((msg) => !msg.isError)
.slice(summarizeIndex);
2023-03-29 16:02:50 +00:00
const historyMsgLength = countMessages(toBeSummarizedMsgs);
2023-03-19 16:29:09 +00:00
if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
const n = toBeSummarizedMsgs.length;
2023-03-21 16:20:32 +00:00
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
Math.max(0, n - modelConfig.historyMessageCount),
2023-03-21 16:20:32 +00:00
);
2023-03-19 16:29:09 +00:00
}
// add memory prompt
2023-03-21 16:20:32 +00:00
toBeSummarizedMsgs.unshift(get().getMemoryPrompt());
2023-03-19 16:29:09 +00:00
2023-03-21 16:20:32 +00:00
const lastSummarizeIndex = session.messages.length;
2023-03-19 16:09:30 +00:00
2023-03-21 16:20:32 +00:00
console.log(
"[Chat History] ",
toBeSummarizedMsgs,
historyMsgLength,
modelConfig.compressMessageLengthThreshold,
2023-03-21 16:20:32 +00:00
);
2023-03-19 16:09:30 +00:00
if (
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
modelConfig.sendMemory
) {
api.llm.chat({
messages: toBeSummarizedMsgs.concat({
2023-03-21 16:20:32 +00:00
role: "system",
content: Locale.Store.Prompt.Summarize,
date: "",
}),
config: { ...modelConfig, stream: true },
onUpdate(message) {
session.memoryPrompt = message;
2023-04-18 03:44:15 +00:00
},
onFinish(message) {
console.log("[Memory] ", message);
session.lastSummarizeIndex = lastSummarizeIndex;
},
onError(err) {
console.error("[Summarize] ", err);
},
});
2023-03-19 15:13:10 +00:00
}
2023-03-10 18:25:33 +00:00
},
updateStat(message) {
get().updateCurrentSession((session) => {
session.stat.charCount += message.content.length;
// TODO: should update chat count and word count
});
},
updateCurrentSession(updater) {
const sessions = get().sessions;
const index = get().currentSessionIndex;
updater(sessions[index]);
set(() => ({ sessions }));
},
2023-03-19 15:13:10 +00:00
clearAllData() {
localStorage.clear();
location.reload();
2023-03-19 15:13:10 +00:00
},
2023-03-10 18:25:33 +00:00
}),
2023-03-13 16:25:07 +00:00
{
2023-04-26 18:00:22 +00:00
name: StoreKey.Chat,
2023-04-22 17:37:47 +00:00
version: 2,
migrate(persistedState, version) {
2023-04-26 18:00:22 +00:00
const state = persistedState as any;
const newState = JSON.parse(JSON.stringify(state)) as ChatStore;
2023-04-22 17:37:47 +00:00
if (version < 2) {
2023-04-26 18:00:22 +00:00
newState.globalId = 0;
newState.sessions = [];
const oldSessions = state.sessions;
for (const oldSession of oldSessions) {
const newSession = createEmptySession();
newSession.topic = oldSession.topic;
newSession.messages = [...oldSession.messages];
newSession.mask.modelConfig.sendMemory = true;
newSession.mask.modelConfig.historyMessageCount = 4;
newSession.mask.modelConfig.compressMessageLengthThreshold = 1000;
newState.sessions.push(newSession);
}
}
2023-04-26 18:00:22 +00:00
return newState;
},
2023-04-18 03:44:15 +00:00
},
),
2023-03-10 18:25:33 +00:00
);