This commit is contained in:
GH Action - Upstream Sync 2023-05-15 01:00:59 +00:00
commit 68e27e9513
21 changed files with 527 additions and 593 deletions

View File

@ -1,49 +1,8 @@
import { createParser } from "eventsource-parser";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { requestOpenai } from "../../common";
async function createStream(res: Response) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
const stream = new ReadableStream({
async start(controller) {
function onParse(event: any) {
if (event.type === "event") {
const data = event.data;
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
if (data === "[DONE]") {
controller.close();
return;
}
try {
const json = JSON.parse(data);
const text = json.choices[0].delta.content;
const queue = encoder.encode(text);
controller.enqueue(queue);
} catch (e) {
controller.error(e);
}
}
}
const parser = createParser(onParse);
for await (const chunk of res.body as any) {
parser.feed(decoder.decode(chunk, { stream: true }));
}
},
});
return stream;
}
function formatResponse(msg: any) {
const jsonMsg = ["```json\n", JSON.stringify(msg, null, " "), "\n```"].join(
"",
);
return new Response(jsonMsg);
}
async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
@ -58,40 +17,10 @@ async function handle(
}
try {
const api = await requestOpenai(req);
const contentType = api.headers.get("Content-Type") ?? "";
// streaming response
if (contentType.includes("stream")) {
const stream = await createStream(api);
const res = new Response(stream);
res.headers.set("Content-Type", contentType);
return res;
}
// try to parse error msg
try {
const mayBeErrorBody = await api.json();
if (mayBeErrorBody.error) {
console.error("[OpenAI Response] ", mayBeErrorBody);
return formatResponse(mayBeErrorBody);
} else {
const res = new Response(JSON.stringify(mayBeErrorBody));
res.headers.set("Content-Type", "application/json");
res.headers.set("Cache-Control", "no-cache");
return res;
}
} catch (e) {
console.error("[OpenAI Parse] ", e);
return formatResponse({
msg: "invalid response from openai server",
error: e,
});
}
return await requestOpenai(req);
} catch (e) {
console.error("[OpenAI] ", e);
return formatResponse(e);
return NextResponse.json(prettyObject(e));
}
}

View File

@ -1,9 +0,0 @@
import type {
CreateChatCompletionRequest,
CreateChatCompletionResponse,
} from "openai";
export type ChatRequest = CreateChatCompletionRequest;
export type ChatResponse = CreateChatCompletionResponse;
export type Updater<T> = (updater: (value: T) => void) => void;

83
app/client/api.ts Normal file
View File

@ -0,0 +1,83 @@
import { ACCESS_CODE_PREFIX } from "../constant";
import { ModelConfig, ModelType, useAccessStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
export const Models = ["gpt-3.5-turbo", "gpt-4"] as const;
export type ChatModel = ModelType;
export interface RequestMessage {
role: MessageRole;
content: string;
}
export interface LLMConfig {
model: string;
temperature?: number;
top_p?: number;
stream?: boolean;
presence_penalty?: number;
frequency_penalty?: number;
}
export interface ChatOptions {
messages: RequestMessage[];
config: LLMConfig;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}
export interface LLMUsage {
used: number;
total: number;
}
export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract usage(): Promise<LLMUsage>;
}
export class ClientApi {
public llm: LLMApi;
constructor() {
this.llm = new ChatGPTApi();
}
config() {}
prompts() {}
masks() {}
}
export const api = new ClientApi();
export function getHeaders() {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {
"Content-Type": "application/json",
};
const makeBearer = (token: string) => `Bearer ${token.trim()}`;
const validString = (x: string) => x && x.length > 0;
// use user's api key first
if (validString(accessStore.token)) {
headers.Authorization = makeBearer(accessStore.token);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers.Authorization = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
return headers;
}

37
app/client/controller.ts Normal file
View File

@ -0,0 +1,37 @@
// To store message streaming controller
export const ChatControllerPool = {
controllers: {} as Record<string, AbortController>,
addController(
sessionIndex: number,
messageId: number,
controller: AbortController,
) {
const key = this.key(sessionIndex, messageId);
this.controllers[key] = controller;
return key;
},
stop(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
const controller = this.controllers[key];
controller?.abort();
},
stopAll() {
Object.values(this.controllers).forEach((v) => v.abort());
},
hasPending() {
return Object.values(this.controllers).length > 0;
},
remove(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
delete this.controllers[key];
},
key(sessionIndex: number, messageIndex: number) {
return `${sessionIndex},${messageIndex}`;
},
};

View File

@ -0,0 +1,192 @@
import { REQUEST_TIMEOUT_MS } from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { ChatOptions, getHeaders, LLMApi, LLMUsage } from "../api";
import Locale from "../../locales";
export class ChatGPTApi implements LLMApi {
public ChatPath = "v1/chat/completions";
public UsagePath = "dashboard/billing/usage";
public SubsPath = "dashboard/billing/subscription";
path(path: string): string {
const openaiUrl = useAccessStore.getState().openaiUrl;
if (openaiUrl.endsWith("/")) openaiUrl.slice(0, openaiUrl.length - 1);
return [openaiUrl, path].join("/");
}
extractMessage(res: any) {
return res.choices?.at(0)?.message?.content ?? "";
}
async chat(options: ChatOptions) {
const messages = options.messages.map((v) => ({
role: v.role,
content: v.content,
}));
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
},
};
const requestPayload = {
messages,
stream: options.config.stream,
model: modelConfig.model,
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
};
console.log("[Request] openai payload: ", requestPayload);
const shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
try {
const chatPath = this.path(this.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
// make a fetch request
const reqestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
if (shouldStream) {
let responseText = "";
const finish = () => {
options.onFinish(responseText);
};
const res = await fetch(chatPath, chatPayload);
clearTimeout(reqestTimeoutId);
if (res.status === 401) {
responseText += "\n\n" + Locale.Error.Unauthorized;
return finish();
}
if (
!res.ok ||
!res.headers.get("Content-Type")?.includes("stream") ||
!res.body
) {
return options.onError?.(new Error());
}
const reader = res.body.getReader();
const decoder = new TextDecoder("utf-8");
while (true) {
const { done, value } = await reader.read();
if (done) {
return finish();
}
const chunk = decoder.decode(value, { stream: true });
const lines = chunk.split("data: ");
for (const line of lines) {
const text = line.trim();
if (line.startsWith("[DONE]")) {
return finish();
}
if (text.length === 0) continue;
try {
const json = JSON.parse(text);
const delta = json.choices[0].delta.content;
if (delta) {
responseText += delta;
options.onUpdate?.(responseText, delta);
}
} catch (e) {
console.error("[Request] parse error", text, chunk);
}
}
}
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(reqestTimeoutId);
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
}
} catch (e) {
console.log("[Request] failed to make a chat reqeust", e);
options.onError?.(e as Error);
}
}
async usage() {
const formatDate = (d: Date) =>
`${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
.getDate()
.toString()
.padStart(2, "0")}`;
const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
const now = new Date();
const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
const startDate = formatDate(startOfMonth);
const endDate = formatDate(new Date(Date.now() + ONE_DAY));
const [used, subs] = await Promise.all([
fetch(
this.path(
`${this.UsagePath}?start_date=${startDate}&end_date=${endDate}`,
),
{
method: "GET",
headers: getHeaders(),
},
),
fetch(this.path(this.SubsPath), {
method: "GET",
headers: getHeaders(),
}),
]);
if (!used.ok || !subs.ok || used.status === 401) {
throw new Error(Locale.Error.Unauthorized);
}
const response = (await used.json()) as {
total_usage?: number;
error?: {
type: string;
message: string;
};
};
const total = (await subs.json()) as {
hard_limit_usd?: number;
};
if (response.error && response.error.type) {
throw Error(response.error.message);
}
if (response.total_usage) {
response.total_usage = Math.round(response.total_usage) / 100;
}
if (total.hard_limit_usd) {
total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
}
return {
used: response.total_usage,
total: total.hard_limit_usd,
} as LLMUsage;
}
}

View File

@ -22,7 +22,7 @@ import BottomIcon from "../icons/bottom.svg";
import StopIcon from "../icons/pause.svg";
import {
Message,
ChatMessage,
SubmitKey,
useChatStore,
BOT_HELLO,
@ -43,7 +43,7 @@ import {
import dynamic from "next/dynamic";
import { ControllerPool } from "../requests";
import { ChatControllerPool } from "../client/controller";
import { Prompt, usePromptStore } from "../store/prompt";
import Locale from "../locales";
@ -63,7 +63,7 @@ const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />,
});
function exportMessages(messages: Message[], topic: string) {
function exportMessages(messages: ChatMessage[], topic: string) {
const mdText =
`# ${topic}\n\n` +
messages
@ -331,8 +331,8 @@ export function ChatActions(props: {
}
// stop all responses
const couldStop = ControllerPool.hasPending();
const stopAll = () => ControllerPool.stopAll();
const couldStop = ChatControllerPool.hasPending();
const stopAll = () => ChatControllerPool.stopAll();
return (
<div className={chatStyle["chat-input-actions"]}>
@ -394,7 +394,7 @@ export function ChatActions(props: {
}
export function Chat() {
type RenderMessage = Message & { preview?: boolean };
type RenderMessage = ChatMessage & { preview?: boolean };
const chatStore = useChatStore();
const [session, sessionIndex] = useChatStore((state) => [
@ -487,7 +487,7 @@ export function Chat() {
// stop response
const onUserStop = (messageId: number) => {
ControllerPool.stop(sessionIndex, messageId);
ChatControllerPool.stop(sessionIndex, messageId);
};
// check if should send message
@ -507,7 +507,7 @@ export function Chat() {
e.preventDefault();
}
};
const onRightClick = (e: any, message: Message) => {
const onRightClick = (e: any, message: ChatMessage) => {
// copy to clipboard
if (selectOrCopy(e.currentTarget, message.content)) {
e.preventDefault();

View File

@ -186,7 +186,7 @@
.chat-item-delete {
position: absolute;
top: 10px;
right: -20px;
right: 0;
transition: all ease 0.3s;
opacity: 0;
cursor: pointer;
@ -194,7 +194,7 @@
.chat-item:hover > .chat-item-delete {
opacity: 0.5;
right: 10px;
transform: translateX(-10px);
}
.chat-item:hover > .chat-item-delete:hover {

View File

@ -23,7 +23,6 @@ import {
} from "react-router-dom";
import { SideBar } from "./sidebar";
import { useAppConfig } from "../store/config";
import { useMaskStore } from "../store/mask";
export function Loading(props: { noLogo?: boolean }) {
return (
@ -91,12 +90,24 @@ const useHasHydrated = () => {
return hasHydrated;
};
const loadAsyncGoogleFont = () => {
const linkEl = document.createElement("link");
linkEl.rel = "stylesheet";
linkEl.href =
"/google-fonts/css2?family=Noto+Sans+SC:wght@300;400;700;900&display=swap";
document.head.appendChild(linkEl);
};
function Screen() {
const config = useAppConfig();
const location = useLocation();
const isHome = location.pathname === Path.Home;
const isMobileScreen = useMobileScreen();
useEffect(() => {
loadAsyncGoogleFont();
}, []);
return (
<div
className={

View File

@ -13,7 +13,8 @@ import EyeIcon from "../icons/eye.svg";
import CopyIcon from "../icons/copy.svg";
import { DEFAULT_MASK_AVATAR, Mask, useMaskStore } from "../store/mask";
import { Message, ModelConfig, ROLES, useChatStore } from "../store";
import { ChatMessage, ModelConfig, useChatStore } from "../store";
import { ROLES } from "../client/api";
import { Input, List, ListItem, Modal, Popover, Select } from "./ui-lib";
import { Avatar, AvatarPicker } from "./emoji";
import Locale, { AllLangs, Lang } from "../locales";
@ -22,7 +23,7 @@ import { useNavigate } from "react-router-dom";
import chatStyle from "./chat.module.scss";
import { useState } from "react";
import { downloadAs, readFromFile } from "../utils";
import { Updater } from "../api/openai/typing";
import { Updater } from "../typing";
import { ModelConfigList } from "./model-config";
import { FileName, Path } from "../constant";
import { BUILTIN_MASK_STORE } from "../masks";
@ -107,8 +108,8 @@ export function MaskConfig(props: {
}
function ContextPromptItem(props: {
prompt: Message;
update: (prompt: Message) => void;
prompt: ChatMessage;
update: (prompt: ChatMessage) => void;
remove: () => void;
}) {
const [focusingInput, setFocusingInput] = useState(false);
@ -160,12 +161,12 @@ function ContextPromptItem(props: {
}
export function ContextPrompts(props: {
context: Message[];
updateContext: (updater: (context: Message[]) => void) => void;
context: ChatMessage[];
updateContext: (updater: (context: ChatMessage[]) => void) => void;
}) {
const context = props.context;
const addContextPrompt = (prompt: Message) => {
const addContextPrompt = (prompt: ChatMessage) => {
props.updateContext((context) => context.push(prompt));
};
@ -173,7 +174,7 @@ export function ContextPrompts(props: {
props.updateContext((context) => context.splice(i, 1));
};
const updateContextPrompt = (i: number, prompt: Message) => {
const updateContextPrompt = (i: number, prompt: ChatMessage) => {
props.updateContext((context) => (context[i] = prompt));
};

View File

@ -40,3 +40,5 @@ export const NARROW_SIDEBAR_WIDTH = 100;
export const ACCESS_CODE_PREFIX = "ak-";
export const LAST_INPUT_KEY = "last-input";
export const REQUEST_TIMEOUT_MS = 60000;

View File

@ -34,11 +34,6 @@ export default function RootLayout({
<head>
<meta name="version" content={buildConfig.commitId} />
<link rel="manifest" href="/site.webmanifest"></link>
<link rel="preconnect" href="https://fonts.proxy.ustclug.org"></link>
<link
rel="stylesheet"
href="https://fonts.proxy.ustclug.org/css2?family=Noto+Sans+SC:wght@300;400;700;900&display=swap"
></link>
<script src="/serviceWorkerRegister.js" defer></script>
</head>
<body>{children}</body>

View File

@ -1,285 +0,0 @@
import type { ChatRequest, ChatResponse } from "./api/openai/typing";
import {
Message,
ModelConfig,
ModelType,
useAccessStore,
useAppConfig,
useChatStore,
} from "./store";
import { showToast } from "./components/ui-lib";
import { ACCESS_CODE_PREFIX } from "./constant";
const TIME_OUT_MS = 60000;
const makeRequestParam = (
messages: Message[],
options?: {
stream?: boolean;
overrideModel?: ModelType;
},
): ChatRequest => {
let sendMessages = messages.map((v) => ({
role: v.role,
content: v.content,
}));
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
};
// override model config
if (options?.overrideModel) {
modelConfig.model = options.overrideModel;
}
return {
messages: sendMessages,
stream: options?.stream,
model: modelConfig.model,
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
};
};
export function getHeaders() {
const accessStore = useAccessStore.getState();
let headers: Record<string, string> = {};
const makeBearer = (token: string) => `Bearer ${token.trim()}`;
const validString = (x: string) => x && x.length > 0;
// use user's api key first
if (validString(accessStore.token)) {
headers.Authorization = makeBearer(accessStore.token);
} else if (
accessStore.enabledAccessControl() &&
validString(accessStore.accessCode)
) {
headers.Authorization = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
return headers;
}
export function requestOpenaiClient(path: string) {
const openaiUrl = useAccessStore.getState().openaiUrl;
return (body: any, method = "POST") =>
fetch(openaiUrl + path, {
method,
body: body && JSON.stringify(body),
headers: getHeaders(),
});
}
export async function requestChat(
messages: Message[],
options?: {
model?: ModelType;
},
) {
const req: ChatRequest = makeRequestParam(messages, {
overrideModel: options?.model,
});
const res = await requestOpenaiClient("v1/chat/completions")(req);
try {
const response = (await res.json()) as ChatResponse;
return response;
} catch (error) {
console.error("[Request Chat] ", error, res.body);
}
}
export async function requestUsage() {
const formatDate = (d: Date) =>
`${d.getFullYear()}-${(d.getMonth() + 1).toString().padStart(2, "0")}-${d
.getDate()
.toString()
.padStart(2, "0")}`;
const ONE_DAY = 1 * 24 * 60 * 60 * 1000;
const now = new Date();
const startOfMonth = new Date(now.getFullYear(), now.getMonth(), 1);
const startDate = formatDate(startOfMonth);
const endDate = formatDate(new Date(Date.now() + ONE_DAY));
const [used, subs] = await Promise.all([
requestOpenaiClient(
`dashboard/billing/usage?start_date=${startDate}&end_date=${endDate}`,
)(null, "GET"),
requestOpenaiClient("dashboard/billing/subscription")(null, "GET"),
]);
const response = (await used.json()) as {
total_usage?: number;
error?: {
type: string;
message: string;
};
};
const total = (await subs.json()) as {
hard_limit_usd?: number;
};
if (response.error && response.error.type) {
showToast(response.error.message);
return;
}
if (response.total_usage) {
response.total_usage = Math.round(response.total_usage) / 100;
}
if (total.hard_limit_usd) {
total.hard_limit_usd = Math.round(total.hard_limit_usd * 100) / 100;
}
return {
used: response.total_usage,
subscription: total.hard_limit_usd,
};
}
export async function requestChatStream(
messages: Message[],
options?: {
modelConfig?: ModelConfig;
overrideModel?: ModelType;
onMessage: (message: string, done: boolean) => void;
onError: (error: Error, statusCode?: number) => void;
onController?: (controller: AbortController) => void;
},
) {
const req = makeRequestParam(messages, {
stream: true,
overrideModel: options?.overrideModel,
});
console.log("[Request] ", req);
const controller = new AbortController();
const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
try {
const openaiUrl = useAccessStore.getState().openaiUrl;
const res = await fetch(openaiUrl + "v1/chat/completions", {
method: "POST",
headers: {
"Content-Type": "application/json",
...getHeaders(),
},
body: JSON.stringify(req),
signal: controller.signal,
});
clearTimeout(reqTimeoutId);
let responseText = "";
const finish = () => {
options?.onMessage(responseText, true);
controller.abort();
};
if (res.ok) {
const reader = res.body?.getReader();
const decoder = new TextDecoder();
options?.onController?.(controller);
while (true) {
const resTimeoutId = setTimeout(() => finish(), TIME_OUT_MS);
const content = await reader?.read();
clearTimeout(resTimeoutId);
if (!content || !content.value) {
break;
}
const text = decoder.decode(content.value, { stream: true });
responseText += text;
const done = content.done;
options?.onMessage(responseText, false);
if (done) {
break;
}
}
finish();
} else if (res.status === 401) {
console.error("Unauthorized");
options?.onError(new Error("Unauthorized"), res.status);
} else {
console.error("Stream Error", res.body);
options?.onError(new Error("Stream Error"), res.status);
}
} catch (err) {
console.error("NetWork Error", err);
options?.onError(err as Error);
}
}
export async function requestWithPrompt(
messages: Message[],
prompt: string,
options?: {
model?: ModelType;
},
) {
messages = messages.concat([
{
role: "user",
content: prompt,
date: new Date().toLocaleString(),
},
]);
const res = await requestChat(messages, options);
return res?.choices?.at(0)?.message?.content ?? "";
}
// To store message streaming controller
export const ControllerPool = {
controllers: {} as Record<string, AbortController>,
addController(
sessionIndex: number,
messageId: number,
controller: AbortController,
) {
const key = this.key(sessionIndex, messageId);
this.controllers[key] = controller;
return key;
},
stop(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
const controller = this.controllers[key];
controller?.abort();
},
stopAll() {
Object.values(this.controllers).forEach((v) => v.abort());
},
hasPending() {
return Object.values(this.controllers).length > 0;
},
remove(sessionIndex: number, messageId: number) {
const key = this.key(sessionIndex, messageId);
delete this.controllers[key];
},
key(sessionIndex: number, messageIndex: number) {
return `${sessionIndex},${messageIndex}`;
},
};

View File

@ -1,7 +1,7 @@
import { create } from "zustand";
import { persist } from "zustand/middleware";
import { StoreKey } from "../constant";
import { getHeaders } from "../requests";
import { getHeaders } from "../client/api";
import { BOT_HELLO } from "./chat";
import { ALL_MODELS } from "./config";

View File

@ -1,12 +1,6 @@
import { create } from "zustand";
import { persist } from "zustand/middleware";
import { type ChatCompletionResponseMessage } from "openai";
import {
ControllerPool,
requestChatStream,
requestWithPrompt,
} from "../requests";
import { trimTopic } from "../utils";
import Locale from "../locales";
@ -14,8 +8,11 @@ import { showToast } from "../components/ui-lib";
import { ModelType } from "./config";
import { createEmptyMask, Mask } from "./mask";
import { StoreKey } from "../constant";
import { api, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format";
export type Message = ChatCompletionResponseMessage & {
export type ChatMessage = RequestMessage & {
date: string;
streaming?: boolean;
isError?: boolean;
@ -23,7 +20,7 @@ export type Message = ChatCompletionResponseMessage & {
model?: ModelType;
};
export function createMessage(override: Partial<Message>): Message {
export function createMessage(override: Partial<ChatMessage>): ChatMessage {
return {
id: Date.now(),
date: new Date().toLocaleString(),
@ -33,8 +30,6 @@ export function createMessage(override: Partial<Message>): Message {
};
}
export const ROLES: Message["role"][] = ["system", "user", "assistant"];
export interface ChatStat {
tokenCount: number;
wordCount: number;
@ -47,7 +42,7 @@ export interface ChatSession {
topic: string;
memoryPrompt: string;
messages: Message[];
messages: ChatMessage[];
stat: ChatStat;
lastUpdate: number;
lastSummarizeIndex: number;
@ -56,7 +51,7 @@ export interface ChatSession {
}
export const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
export const BOT_HELLO: Message = createMessage({
export const BOT_HELLO: ChatMessage = createMessage({
role: "assistant",
content: Locale.Store.BotHello,
});
@ -88,24 +83,24 @@ interface ChatStore {
newSession: (mask?: Mask) => void;
deleteSession: (index: number) => void;
currentSession: () => ChatSession;
onNewMessage: (message: Message) => void;
onNewMessage: (message: ChatMessage) => void;
onUserInput: (content: string) => Promise<void>;
summarizeSession: () => void;
updateStat: (message: Message) => void;
updateStat: (message: ChatMessage) => void;
updateCurrentSession: (updater: (session: ChatSession) => void) => void;
updateMessage: (
sessionIndex: number,
messageIndex: number,
updater: (message?: Message) => void,
updater: (message?: ChatMessage) => void,
) => void;
resetSession: () => void;
getMessagesWithMemory: () => Message[];
getMemoryPrompt: () => Message;
getMessagesWithMemory: () => ChatMessage[];
getMemoryPrompt: () => ChatMessage;
clearAllData: () => void;
}
function countMessages(msgs: Message[]) {
function countMessages(msgs: ChatMessage[]) {
return msgs.reduce((pre, cur) => pre + cur.content.length, 0);
}
@ -240,12 +235,12 @@ export const useChatStore = create<ChatStore>()(
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
const userMessage: Message = createMessage({
const userMessage: ChatMessage = createMessage({
role: "user",
content,
});
const botMessage: Message = createMessage({
const botMessage: ChatMessage = createMessage({
role: "assistant",
streaming: true,
id: userMessage.id! + 1,
@ -277,45 +272,54 @@ export const useChatStore = create<ChatStore>()(
// make request
console.log("[User Input] ", sendMessages);
requestChatStream(sendMessages, {
onMessage(content, done) {
// stream response
if (done) {
botMessage.streaming = false;
botMessage.content = content;
get().onNewMessage(botMessage);
ControllerPool.remove(
sessionIndex,
botMessage.id ?? messageIndex,
);
} else {
botMessage.content = content;
set(() => ({}));
}
api.llm.chat({
messages: sendMessages,
config: { ...modelConfig, stream: true },
onUpdate(message) {
botMessage.streaming = true;
botMessage.content = message;
set(() => ({}));
},
onError(error, statusCode) {
onFinish(message) {
botMessage.streaming = false;
botMessage.content = message;
get().onNewMessage(botMessage);
ChatControllerPool.remove(
sessionIndex,
botMessage.id ?? messageIndex,
);
set(() => ({}));
},
onError(error) {
const isAborted = error.message.includes("aborted");
if (statusCode === 401) {
botMessage.content = Locale.Error.Unauthorized;
} else if (!isAborted) {
if (
botMessage.content !== Locale.Error.Unauthorized &&
!isAborted
) {
botMessage.content += "\n\n" + Locale.Store.Error;
} else if (botMessage.content.length === 0) {
botMessage.content = prettyObject(error);
}
botMessage.streaming = false;
userMessage.isError = !isAborted;
botMessage.isError = !isAborted;
set(() => ({}));
ControllerPool.remove(sessionIndex, botMessage.id ?? messageIndex);
ChatControllerPool.remove(
sessionIndex,
botMessage.id ?? messageIndex,
);
console.error("[Chat] error ", error);
},
onController(controller) {
// collect controller for stop/retry
ControllerPool.addController(
ChatControllerPool.addController(
sessionIndex,
botMessage.id ?? messageIndex,
controller,
);
},
modelConfig: { ...modelConfig },
});
},
@ -329,7 +333,7 @@ export const useChatStore = create<ChatStore>()(
? Locale.Store.Prompt.History(session.memoryPrompt)
: "",
date: "",
} as Message;
} as ChatMessage;
},
getMessagesWithMemory() {
@ -384,7 +388,7 @@ export const useChatStore = create<ChatStore>()(
updateMessage(
sessionIndex: number,
messageIndex: number,
updater: (message?: Message) => void,
updater: (message?: ChatMessage) => void,
) {
const sessions = get().sessions;
const session = sessions.at(sessionIndex);
@ -403,24 +407,38 @@ export const useChatStore = create<ChatStore>()(
summarizeSession() {
const session = get().currentSession();
// remove error messages if any
const cleanMessages = session.messages.filter((msg) => !msg.isError);
// should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50;
if (
session.topic === DEFAULT_TOPIC &&
countMessages(session.messages) >= SUMMARIZE_MIN_LEN
countMessages(cleanMessages) >= SUMMARIZE_MIN_LEN
) {
requestWithPrompt(session.messages, Locale.Store.Prompt.Topic, {
model: "gpt-3.5-turbo",
}).then((res) => {
get().updateCurrentSession(
(session) =>
(session.topic = res ? trimTopic(res) : DEFAULT_TOPIC),
);
const topicMessages = cleanMessages.concat(
createMessage({
role: "user",
content: Locale.Store.Prompt.Topic,
}),
);
api.llm.chat({
messages: topicMessages,
config: {
model: "gpt-3.5-turbo",
},
onFinish(message) {
get().updateCurrentSession(
(session) =>
(session.topic =
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
);
},
});
}
const modelConfig = session.mask.modelConfig;
let toBeSummarizedMsgs = session.messages.slice(
let toBeSummarizedMsgs = cleanMessages.slice(
session.lastSummarizeIndex,
);
@ -449,26 +467,24 @@ export const useChatStore = create<ChatStore>()(
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
session.mask.modelConfig.sendMemory
) {
requestChatStream(
toBeSummarizedMsgs.concat({
api.llm.chat({
messages: toBeSummarizedMsgs.concat({
role: "system",
content: Locale.Store.Prompt.Summarize,
date: "",
}),
{
overrideModel: "gpt-3.5-turbo",
onMessage(message, done) {
session.memoryPrompt = message;
if (done) {
console.log("[Memory] ", session.memoryPrompt);
session.lastSummarizeIndex = lastSummarizeIndex;
}
},
onError(error) {
console.error("[Summarize] ", error);
},
config: { ...modelConfig, stream: true },
onUpdate(message) {
session.memoryPrompt = message;
},
);
onFinish(message) {
console.log("[Memory] ", message);
session.lastSummarizeIndex = lastSummarizeIndex;
},
onError(err) {
console.error("[Summarize] ", err);
},
});
}
},

View File

@ -2,7 +2,7 @@ import { create } from "zustand";
import { persist } from "zustand/middleware";
import { BUILTIN_MASKS } from "../masks";
import { getLang, Lang } from "../locales";
import { DEFAULT_TOPIC, Message } from "./chat";
import { DEFAULT_TOPIC, ChatMessage } from "./chat";
import { ModelConfig, ModelType, useAppConfig } from "./config";
import { StoreKey } from "../constant";
@ -10,7 +10,7 @@ export type Mask = {
id: number;
avatar: string;
name: string;
context: Message[];
context: ChatMessage[];
modelConfig: ModelConfig;
lang: Lang;
builtin: boolean;

View File

@ -1,7 +1,8 @@
import { create } from "zustand";
import { persist } from "zustand/middleware";
import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant";
import { requestUsage } from "../requests";
import { FETCH_COMMIT_URL, StoreKey } from "../constant";
import { api } from "../client/api";
import { showToast } from "../components/ui-lib";
export interface UpdateStore {
lastUpdate: number;
@ -73,10 +74,17 @@ export const useUpdateStore = create<UpdateStore>()(
lastUpdateUsage: Date.now(),
}));
const usage = await requestUsage();
try {
const usage = await api.llm.usage();
if (usage) {
set(() => usage);
if (usage) {
set(() => ({
used: usage.used,
subscription: usage.total,
}));
}
} catch (e) {
showToast((e as Error).message);
}
},
}),

1
app/typing.ts Normal file
View File

@ -0,0 +1 @@
export type Updater<T> = (updater: (value: T) => void) => void;

8
app/utils/format.ts Normal file
View File

@ -0,0 +1,8 @@
export function prettyObject(msg: any) {
const prettyMsg = [
"```json\n",
JSON.stringify(msg, null, " "),
"\n```",
].join("");
return prettyMsg;
}

View File

@ -10,6 +10,10 @@ const nextConfig = {
source: "/api/proxy/:path*",
destination: "https://api.openai.com/:path*",
},
{
source: "/google-fonts/:path*",
destination: "https://fonts.googleapis.com/:path*",
},
];
const apiUrl = process.env.API_URL;

View File

@ -17,12 +17,10 @@
"@svgr/webpack": "^6.5.1",
"@vercel/analytics": "^0.1.11",
"emoji-picker-react": "^4.4.7",
"eventsource-parser": "^0.1.0",
"fuse.js": "^6.6.2",
"mermaid": "^10.1.0",
"next": "^13.3.1-canary.8",
"next": "^13.4.2",
"node-fetch": "^3.3.1",
"openai": "^3.2.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-markdown": "^8.0.5",

187
yarn.lock
View File

@ -1111,10 +1111,10 @@
dependencies:
"@types/react" ">=16.0.0"
"@next/env@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/env/-/env-13.3.1-canary.8.tgz#9f5cf57999e4f4b59ef6407924803a247cc4e451"
integrity sha512-xZfNu7yq3OfiC4rkGuGMcqb25se+ZHRqajSdny8dp+nZzkNSK1SHuNT3W8faI+KGk6dqzO/zAdHR9YrqnQlCAg==
"@next/env@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/env/-/env-13.4.2.tgz#cf3ebfd523a33d8404c1216e02ac8d856a73170e"
integrity sha512-Wqvo7lDeS0KGwtwg9TT9wKQ8raelmUxt+TQKWvG/xKfcmDXNOtCuaszcfCF8JzlBG1q0VhpI6CKaRMbVPMDWgw==
"@next/eslint-plugin-next@13.2.3":
version "13.2.3"
@ -1123,50 +1123,50 @@
dependencies:
glob "7.1.7"
"@next/swc-darwin-arm64@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.3.1-canary.8.tgz#66786ba76d37c210c184739624c6f84eaf2dc52b"
integrity sha512-BLbvhcaSzwuXbREOmJiqAdXVD7Jl9830hDY5ZTTNg7hXqEZgoMg2LxAEmtaaBMVZRfDQjd5bH3QPBV8fbG4UKg==
"@next/swc-darwin-arm64@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.4.2.tgz#d0b497df972bd02eee3bc823d6a76c2cc8b733ef"
integrity sha512-6BBlqGu3ewgJflv9iLCwO1v1hqlecaIH2AotpKfVUEzUxuuDNJQZ2a4KLb4MBl8T9/vca1YuWhSqtbF6ZuUJJw==
"@next/swc-darwin-x64@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-13.3.1-canary.8.tgz#289296bd3cc55db7fef42037eb89ce4a6260ba31"
integrity sha512-n4tJKPIvFTZshS1TVWrsqaW7h9VW+BmguO/AlZ3Q3NJ9hWxC5L4lxn2T6CTQ4M30Gf+t5u+dPzYLQ5IDtJFnFQ==
"@next/swc-darwin-x64@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-darwin-x64/-/swc-darwin-x64-13.4.2.tgz#09a800bed8dfe4beec4cbf14092f9c22db24470b"
integrity sha512-iZuYr7ZvGLPjPmfhhMl0ISm+z8EiyLBC1bLyFwGBxkWmPXqdJ60mzuTaDSr5WezDwv0fz32HB7JHmRC6JVHSZg==
"@next/swc-linux-arm64-gnu@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.3.1-canary.8.tgz#dc79e8005849b6482241b460abdce9334665c766"
integrity sha512-AxnsgZ56whwVAeejyEZMk8xc8Vapwzb3Zn0YdZzPCR42WKfkcSkM+AWfq33zUOZnjvCmQBDyfHIo4CURVweR6g==
"@next/swc-linux-arm64-gnu@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.4.2.tgz#b7ade28834564120b0b25ffa0b79d75982d290bc"
integrity sha512-2xVabFtIge6BJTcJrW8YuUnYTuQjh4jEuRuS2mscyNVOj6zUZkom3CQg+egKOoS+zh2rrro66ffSKIS+ztFJTg==
"@next/swc-linux-arm64-musl@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.3.1-canary.8.tgz#f70873add4aad7ced36f760d1640adc008b7dc03"
integrity sha512-zc7rzhtrHMWZ/phvjCNplHGo+ZLembjtluI5J8Xl4iwQQCyZwAtnmQhs37/zkdi6dHZou+wcFBZWRz14awRDBw==
"@next/swc-linux-arm64-musl@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.4.2.tgz#f5420548234d35251630ddaa2e9a7dc32337a887"
integrity sha512-wKRCQ27xCUJx5d6IivfjYGq8oVngqIhlhSAJntgXLt7Uo9sRT/3EppMHqUZRfyuNBTbykEre1s5166z+pvRB5A==
"@next/swc-linux-x64-gnu@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.3.1-canary.8.tgz#fe81b8033628c6cf74e154f2db8c8c7f1593008f"
integrity sha512-vNbFDiuZ9fWmcznlilDbflZLb04evWPUQlyDT7Tqjd964PlSIaaX3tr64pdYjJOljDaqTr2Kbx0YW74mWF/PEw==
"@next/swc-linux-x64-gnu@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.4.2.tgz#0241dc011d73f08df9d9998cffdfcf08d1971520"
integrity sha512-NpCa+UVhhuNeaFVUP1Bftm0uqtvLWq2JTm7+Ta48+2Uqj2mNXrDIvyn1DY/ZEfmW/1yvGBRaUAv9zkMkMRixQA==
"@next/swc-linux-x64-musl@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.3.1-canary.8.tgz#ada4585046a7937f96f2d39fc4aaca12826dde5f"
integrity sha512-/FVBPJEBDZYCNraocRWtd5ObAgNi9VFnzJYGYDYIj4jKkFRWWm/CaWu9A7toQACC/JDy262uPyDPathXT9BAqQ==
"@next/swc-linux-x64-musl@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.4.2.tgz#fd35919e2b64b1c739583145799fefd594ef5d63"
integrity sha512-ZWVC72x0lW4aj44e3khvBrj2oSYj1bD0jESmyah3zG/3DplEy/FOtYkMzbMjHTdDSheso7zH8GIlW6CDQnKhmQ==
"@next/swc-win32-arm64-msvc@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.3.1-canary.8.tgz#21b4f6c4be61845759753df9313bd9bcbb241969"
integrity sha512-8jMwRCeI26yVZLPwG0AjOi4b1yqSeqYmbHA7r+dqiV0OgFdYjnbyHU1FmiKDaC5SnnJN6LWV2Qjer9GDD0Kcuw==
"@next/swc-win32-arm64-msvc@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.4.2.tgz#fa95d2dbb97707c130a868a1bd7e83e64bedf4c6"
integrity sha512-pLT+OWYpzJig5K4VKhLttlIfBcVZfr2+Xbjra0Tjs83NQSkFS+y7xx+YhCwvpEmXYLIvaggj2ONPyjbiigOvHQ==
"@next/swc-win32-ia32-msvc@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.3.1-canary.8.tgz#e23192e1d1b1a32b0eb805363b02360c5b523a77"
integrity sha512-kcYB9iSEikFhv0I9uQDdgQ2lm8i3O8LA+GhnED9e5VtURBwOSwED7c6ZpaRQBYSPgnEA9/xiJVChICE/I7Ig1g==
"@next/swc-win32-ia32-msvc@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.4.2.tgz#31a98e61d3cda92ec2293c50df7cb5280fc63697"
integrity sha512-dhpiksQCyGca4WY0fJyzK3FxMDFoqMb0Cn+uDB+9GYjpU2K5//UGPQlCwiK4JHxuhg8oLMag5Nf3/IPSJNG8jw==
"@next/swc-win32-x64-msvc@13.3.1-canary.8":
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.3.1-canary.8.tgz#a3f29404955cba2193de5e74fd5d9fcfdcb0ab51"
integrity sha512-UKrGHonKVWBNg+HI4J8pXE6Jjjl8GwjhygFau71s8M0+jSy99y5Y+nGH9EmMNWKNvrObukyYvrs6OsAusKdCqw==
"@next/swc-win32-x64-msvc@13.4.2":
version "13.4.2"
resolved "https://registry.npmmirror.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.4.2.tgz#8435ab6087046355f5de07122d3097949e8fab10"
integrity sha512-O7bort1Vld00cu8g0jHZq3cbSTUNMohOEvYqsqE10+yfohhdPHzvzO+ziJRz4Dyyr/fYKREwS7gR4JC0soSOMw==
"@nodelib/fs.scandir@2.1.5":
version "2.1.5"
@ -1317,10 +1317,10 @@
"@svgr/plugin-jsx" "^6.5.1"
"@svgr/plugin-svgo" "^6.5.1"
"@swc/helpers@0.4.14":
version "0.4.14"
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.4.14.tgz#1352ac6d95e3617ccb7c1498ff019654f1e12a74"
integrity sha512-4C7nX/dvpzB7za4Ql9K81xK3HPxCpHMgwTZVyf+9JQ6VUbn9jjZVN7/Nkdz/Ugzs2CSjqnL/UPXroiVBVHUWUw==
"@swc/helpers@0.5.1":
version "0.5.1"
resolved "https://registry.npmmirror.com/@swc/helpers/-/helpers-0.5.1.tgz#e9031491aa3f26bfcc974a67f48bd456c8a5357a"
integrity sha512-sJ902EfIzn1Fa+qYmjdQqh8tPsoxyBz+8yBKC2HKUxyezKJFwPGOn7pv4WY6QuQW//ySQi5lJjA/ZT9sNWWNTg==
dependencies:
tslib "^2.4.0"
@ -1638,11 +1638,6 @@ astral-regex@^2.0.0:
resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31"
integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==
asynckit@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==
available-typed-arrays@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7"
@ -1653,13 +1648,6 @@ axe-core@^4.6.2:
resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.6.3.tgz#fc0db6fdb65cc7a80ccf85286d91d64ababa3ece"
integrity sha512-/BQzOX780JhsxDnPpH4ZiyrJAzcd8AfzFPkv+89veFSr1rcMjuq2JDCwypKaPeB6ljHp9KjXhPpjgCvQlWYuqg==
axios@^0.26.0:
version "0.26.1"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
dependencies:
follow-redirects "^1.14.8"
axobject-query@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.1.1.tgz#3b6e5c6d4e43ca7ba51c5babf99d22a9c68485e1"
@ -1880,13 +1868,6 @@ colorette@^2.0.19:
resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798"
integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==
combined-stream@^1.0.8:
version "1.0.8"
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
dependencies:
delayed-stream "~1.0.0"
comma-separated-tokens@^2.0.0:
version "2.0.3"
resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee"
@ -2371,11 +2352,6 @@ delaunator@5:
dependencies:
robust-predicates "^3.0.0"
delayed-stream@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
dequal@^2.0.0:
version "2.0.3"
resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be"
@ -2816,11 +2792,6 @@ esutils@^2.0.2:
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
eventsource-parser@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/eventsource-parser/-/eventsource-parser-0.1.0.tgz#4a6b84751ca8e704040e6f7f50e7d77344fa1b7c"
integrity sha512-M9QjFtEIkwytUarnx113HGmgtk52LSn3jNAtnWKi3V+b9rqSfQeVdLsaD5AG/O4IrGQwmAAHBIsqbmURPTd2rA==
execa@^7.0.0:
version "7.1.1"
resolved "https://registry.yarnpkg.com/execa/-/execa-7.1.1.tgz#3eb3c83d239488e7b409d48e8813b76bb55c9c43"
@ -2929,11 +2900,6 @@ flatted@^3.1.0:
resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787"
integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==
follow-redirects@^1.14.8:
version "1.15.2"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
for-each@^0.3.3:
version "0.3.3"
resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e"
@ -2941,15 +2907,6 @@ for-each@^0.3.3:
dependencies:
is-callable "^1.1.3"
form-data@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452"
integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==
dependencies:
asynckit "^0.4.0"
combined-stream "^1.0.8"
mime-types "^2.1.12"
format@^0.2.0:
version "0.2.2"
resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b"
@ -4266,18 +4223,6 @@ micromatch@^4.0.4, micromatch@^4.0.5:
braces "^3.0.2"
picomatch "^2.3.1"
mime-db@1.52.0:
version "1.52.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
mime-types@^2.1.12:
version "2.1.35"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
dependencies:
mime-db "1.52.0"
mimic-fn@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
@ -4325,27 +4270,28 @@ natural-compare@^1.4.0:
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==
next@^13.3.1-canary.8:
version "13.3.1-canary.8"
resolved "https://registry.yarnpkg.com/next/-/next-13.3.1-canary.8.tgz#f0846e5eada1491884326786a0749d5adc04c24d"
integrity sha512-z4QUgyAN+hSWSEqb4pvGvC3iRktE6NH2DVLU4AvfqNYpzP+prePiJC8HN/cJpFhGW9YbhyRLi5FliDC631OOag==
next@^13.4.2:
version "13.4.2"
resolved "https://registry.npmmirror.com/next/-/next-13.4.2.tgz#972f73a794f2c61729facedc79c49b22bdc89f0c"
integrity sha512-aNFqLs3a3nTGvLWlO9SUhCuMUHVPSFQC0+tDNGAsDXqx+WJDFSbvc233gOJ5H19SBc7nw36A9LwQepOJ2u/8Kg==
dependencies:
"@next/env" "13.3.1-canary.8"
"@swc/helpers" "0.4.14"
"@next/env" "13.4.2"
"@swc/helpers" "0.5.1"
busboy "1.6.0"
caniuse-lite "^1.0.30001406"
postcss "8.4.14"
styled-jsx "5.1.1"
zod "3.21.4"
optionalDependencies:
"@next/swc-darwin-arm64" "13.3.1-canary.8"
"@next/swc-darwin-x64" "13.3.1-canary.8"
"@next/swc-linux-arm64-gnu" "13.3.1-canary.8"
"@next/swc-linux-arm64-musl" "13.3.1-canary.8"
"@next/swc-linux-x64-gnu" "13.3.1-canary.8"
"@next/swc-linux-x64-musl" "13.3.1-canary.8"
"@next/swc-win32-arm64-msvc" "13.3.1-canary.8"
"@next/swc-win32-ia32-msvc" "13.3.1-canary.8"
"@next/swc-win32-x64-msvc" "13.3.1-canary.8"
"@next/swc-darwin-arm64" "13.4.2"
"@next/swc-darwin-x64" "13.4.2"
"@next/swc-linux-arm64-gnu" "13.4.2"
"@next/swc-linux-arm64-musl" "13.4.2"
"@next/swc-linux-x64-gnu" "13.4.2"
"@next/swc-linux-x64-musl" "13.4.2"
"@next/swc-win32-arm64-msvc" "13.4.2"
"@next/swc-win32-ia32-msvc" "13.4.2"
"@next/swc-win32-x64-msvc" "13.4.2"
node-domexception@^1.0.0:
version "1.0.0"
@ -4488,14 +4434,6 @@ open@^8.4.0:
is-docker "^2.1.1"
is-wsl "^2.2.0"
openai@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-3.2.1.tgz#1fa35bdf979cbde8453b43f2dd3a7d401ee40866"
integrity sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==
dependencies:
axios "^0.26.0"
form-data "^4.0.0"
optionator@^0.9.1:
version "0.9.1"
resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499"
@ -5647,6 +5585,11 @@ yocto-queue@^0.1.0:
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
zod@3.21.4:
version "3.21.4"
resolved "https://registry.npmmirror.com/zod/-/zod-3.21.4.tgz#10882231d992519f0a10b5dd58a38c9dabbb64db"
integrity sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==
zustand@^4.3.6:
version "4.3.6"
resolved "https://registry.yarnpkg.com/zustand/-/zustand-4.3.6.tgz#ce7804eb75361af0461a2d0536b65461ec5de86f"