Merge pull request #172 from Yidadaa/prompt

feat: improve prompts ux
This commit is contained in:
Yifei Zhang 2023-03-30 02:01:55 +08:00 committed by GitHub
commit d908099798
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 356 additions and 163 deletions

View File

@ -1,6 +1,6 @@
{
"./app/**/*.{js,ts,jsx,tsx,json,html,css,scss,md}": [
"eslint --fix",
"prettier --write"
]
}
"./app/**/*.{js,ts,jsx,tsx,json,html,css,md}": [
"eslint --fix",
"prettier --write"
]
}

View File

@ -78,9 +78,9 @@ This project will be continuously maintained. If you want to keep the code repos
You can star or watch this project or follow author to get release notifictions in time.
## 访问控制 Access Control
## 配置密码 Password
本项目提供有限的权限控制功能,请在环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义控制码:
本项目提供有限的权限控制功能,请在 Vercel 项目控制面板的环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义码:
```
code1,code2,code3
@ -88,7 +88,7 @@ code1,code2,code3
增加或修改该环境变量后,请**重新部署**项目使改动生效。
This project provides limited access control. Please add an environment variable named `CODE` on the environment variables page. The value should be a custom control code separated by comma like this:
This project provides limited access control. Please add an environment variable named `CODE` on the vercel environment variables page. The value should be passwords separated by comma like this:
```
code1,code2,code3
@ -96,6 +96,38 @@ code1,code2,code3
After adding or modifying this environment variable, please redeploy the project for the changes to take effect.
## 环境变量 Environment Variables
### `OPENAI_API_KEY` (required)
OpanAI 密钥。
Your openai api key.
### `CODE` (optional)
访问密码,可选,可以使用逗号隔开多个密码。
Access passsword, separated by comma.
### `BASE_URL` (optional)
> Default: `api.openai.com`
OpenAI 接口代理 URL。
Override openai api request base url.
### `PROTOCOL` (optional)
> Default: `https`
> Values: `http` | `https`
OpenAI 接口协议。
Override openai api request protocol.
## 开发 Development
点击下方按钮,开始二次开发:
@ -118,11 +150,11 @@ OPENAI_API_KEY=<your api key here>
2. 执行 `yarn install && yarn dev` 即可。
### 本地部署 Local Deployment
```shell
bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh)
```
### 容器部署 Docker Deployment
```shell
@ -157,6 +189,7 @@ If you would like to contribute your API key, you can email it to the author and
[@hoochanlon](https://github.com/hoochanlon)
### 贡献者 Contributor
[Contributors](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors)
## LICENSE

View File

@ -1,26 +1,12 @@
import { createParser } from "eventsource-parser";
import { NextRequest } from "next/server";
import { requestOpenai } from "../common";
async function createStream(req: NextRequest) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
let apiKey = process.env.OPENAI_API_KEY;
const userApiKey = req.headers.get("token");
if (userApiKey) {
apiKey = userApiKey;
console.log("[Stream] using user api key");
}
const res = await fetch("https://api.openai.com/v1/chat/completions", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: "POST",
body: req.body,
});
const res = await requestOpenai(req);
const stream = new ReadableStream({
async start(controller) {

View File

@ -1 +0,0 @@
config.ts

View File

@ -1,29 +0,0 @@
import { OpenAIApi, Configuration } from "openai";
import { ChatRequest } from "./typing";
export async function POST(req: Request) {
try {
let apiKey = process.env.OPENAI_API_KEY;
const userApiKey = req.headers.get("token");
if (userApiKey) {
apiKey = userApiKey;
}
const openai = new OpenAIApi(
new Configuration({
apiKey,
})
);
const requestBody = (await req.json()) as ChatRequest;
const completion = await openai!.createChatCompletion({
...requestBody,
});
return new Response(JSON.stringify(completion.data));
} catch (e) {
console.error("[Chat] ", e);
return new Response(JSON.stringify(e));
}
}

22
app/api/common.ts Normal file
View File

@ -0,0 +1,22 @@
import { NextRequest } from "next/server";
const OPENAI_URL = "api.openai.com";
const DEFAULT_PROTOCOL = "https";
const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL;
const BASE_URL = process.env.BASE_URL ?? OPENAI_URL;
export async function requestOpenai(req: NextRequest) {
const apiKey = req.headers.get("token");
const openaiPath = req.headers.get("path");
console.log("[Proxy] ", openaiPath);
return fetch(`${PROTOCOL}://${BASE_URL}/${openaiPath}`, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: req.method,
body: req.body,
});
}

28
app/api/openai/route.ts Normal file
View File

@ -0,0 +1,28 @@
import { NextRequest, NextResponse } from "next/server";
import { requestOpenai } from "../common";
async function makeRequest(req: NextRequest) {
try {
const res = await requestOpenai(req);
return new Response(res.body);
} catch (e) {
console.error("[OpenAI] ", req.body, e);
return NextResponse.json(
{
error: true,
msg: JSON.stringify(e),
},
{
status: 500,
},
);
}
}
export async function POST(req: NextRequest) {
return makeRequest(req);
}
export async function GET(req: NextRequest) {
return makeRequest(req);
}

View File

@ -221,6 +221,14 @@
margin-bottom: 100px;
}
.chat-body-title {
cursor: pointer;
&:hover {
text-decoration: underline;
}
}
.chat-message {
display: flex;
flex-direction: row;

View File

@ -102,7 +102,7 @@ export function ChatList() {
state.currentSessionIndex,
state.selectSession,
state.removeSession,
]
],
);
return (
@ -128,7 +128,7 @@ function useSubmitHandler() {
const shouldSubmit = (e: KeyboardEvent) => {
if (e.key !== "Enter") return false;
return (
(config.submitKey === SubmitKey.AltEnter && e.altKey) ||
(config.submitKey === SubmitKey.CtrlEnter && e.ctrlKey) ||
@ -170,7 +170,10 @@ export function PromptHints(props: {
);
}
export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean }) {
export function Chat(props: {
showSideBar?: () => void;
sideBarShowing?: boolean;
}) {
type RenderMessage = Message & { preview?: boolean };
const chatStore = useChatStore();
@ -190,11 +193,10 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
const [promptHints, setPromptHints] = useState<Prompt[]>([]);
const onSearch = useDebouncedCallback(
(text: string) => {
if (chatStore.config.disablePromptHint) return;
setPromptHints(promptStore.search(text));
},
100,
{ leading: true, trailing: true }
{ leading: true, trailing: true },
);
const onPromptSelect = (prompt: Prompt) => {
@ -203,20 +205,31 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
inputRef.current?.focus();
};
const scrollInput = () => {
const dom = inputRef.current;
if (!dom) return;
const paddingBottomNum: number = parseInt(
window.getComputedStyle(dom).paddingBottom,
10,
);
dom.scrollTop = dom.scrollHeight - dom.offsetHeight + paddingBottomNum;
};
// only search prompts when user input is short
const SEARCH_TEXT_LIMIT = 30;
const onInput = (text: string) => {
const textareaDom = inputRef.current
if (textareaDom) {
const paddingBottomNum: number = parseInt(window.getComputedStyle(textareaDom).paddingBottom, 10);
textareaDom.scrollTop = textareaDom.scrollHeight - textareaDom.offsetHeight + paddingBottomNum;
}
scrollInput();
setUserInput(text);
const n = text.trim().length;
if (n === 0 || n > SEARCH_TEXT_LIMIT) {
// clear search results
if (n === 0) {
setPromptHints([]);
} else {
onSearch(text);
} else if (!chatStore.config.disablePromptHint && n < SEARCH_TEXT_LIMIT) {
// check if need to trigger auto completion
if (text.startsWith("/") && text.length > 1) {
onSearch(text.slice(1));
}
}
};
@ -262,6 +275,7 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
chatStore
.onUserInput(messages[i].content)
.then(() => setIsLoading(false));
inputRef.current?.focus();
return;
}
}
@ -285,7 +299,7 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
preview: true,
},
]
: []
: [],
)
.concat(
userInput.length > 0
@ -297,7 +311,7 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
preview: true,
},
]
: []
: [],
);
// auto scroll
@ -306,7 +320,6 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
const dom = latestMessageRef.current;
if (dom && !isIOS() && autoScroll) {
dom.scrollIntoView({
behavior: "smooth",
block: "end",
});
}
@ -320,7 +333,17 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
className={styles["window-header-title"]}
onClick={props?.showSideBar}
>
<div className={styles["window-header-main-title"]}>
<div
className={`${styles["window-header-main-title"]} ${styles["chat-body-title"]}`}
onClick={() => {
const newTopic = prompt(Locale.Chat.Rename, session.topic);
if (newTopic && newTopic !== session.topic) {
chatStore.updateCurrentSession(
(session) => (session.topic = newTopic!),
);
}
}}
>
{session.topic}
</div>
<div className={styles["window-header-sub-title"]}>
@ -380,32 +403,33 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
</div>
)}
<div className={styles["chat-message-item"]}>
{(!isUser && !(message.preview || message.content.length === 0)) && (
<div className={styles["chat-message-top-actions"]}>
{message.streaming ? (
<div
className={styles["chat-message-top-action"]}
onClick={() => onUserStop(i)}
>
{Locale.Chat.Actions.Stop}
</div>
) : (
<div
className={styles["chat-message-top-action"]}
onClick={() => onResend(i)}
>
{Locale.Chat.Actions.Retry}
</div>
)}
{!isUser &&
!(message.preview || message.content.length === 0) && (
<div className={styles["chat-message-top-actions"]}>
{message.streaming ? (
<div
className={styles["chat-message-top-action"]}
onClick={() => onUserStop(i)}
>
{Locale.Chat.Actions.Stop}
</div>
) : (
<div
className={styles["chat-message-top-action"]}
onClick={() => onResend(i)}
>
{Locale.Chat.Actions.Retry}
</div>
)}
<div
className={styles["chat-message-top-action"]}
onClick={() => copyToClipboard(message.content)}
>
{Locale.Chat.Actions.Copy}
<div
className={styles["chat-message-top-action"]}
onClick={() => copyToClipboard(message.content)}
>
{Locale.Chat.Actions.Copy}
</div>
</div>
</div>
)}
)}
{(message.preview || message.content.length === 0) &&
!isUser ? (
<LoadingIcon />
@ -430,7 +454,7 @@ export function Chat(props: { showSideBar?: () => void, sideBarShowing?: boolean
</div>
);
})}
<div ref={latestMessageRef} style={{ opacity: 0, height: "2em" }}>
<div ref={latestMessageRef} style={{ opacity: 0, height: "3em" }}>
-
</div>
</div>
@ -560,7 +584,7 @@ export function Home() {
state.newSession,
state.currentSessionIndex,
state.removeSession,
]
],
);
const loading = !useHasHydrated();
const [showSideBar, setShowSideBar] = useState(true);
@ -653,7 +677,11 @@ export function Home() {
}}
/>
) : (
<Chat key="chat" showSideBar={() => setShowSideBar(true)} sideBarShowing={showSideBar} />
<Chat
key="chat"
showSideBar={() => setShowSideBar(true)}
sideBarShowing={showSideBar}
/>
)}
</div>
</div>

View File

@ -27,6 +27,7 @@ import { getCurrentCommitId } from "../utils";
import Link from "next/link";
import { UPDATE_URL } from "../constant";
import { SearchService, usePromptStore } from "../store/prompt";
import { requestUsage } from "../requests";
function SettingItem(props: {
title: string;
@ -54,7 +55,7 @@ export function Settings(props: { closeSettings: () => void }) {
state.updateConfig,
state.resetConfig,
state.clearAllData,
]
],
);
const updateStore = useUpdateStore();
@ -70,14 +71,34 @@ export function Settings(props: { closeSettings: () => void }) {
});
}
const [usage, setUsage] = useState<{
granted?: number;
used?: number;
}>();
const [loadingUsage, setLoadingUsage] = useState(false);
function checkUsage() {
setLoadingUsage(true);
requestUsage()
.then((res) =>
setUsage({
granted: res?.total_granted,
used: res?.total_used,
}),
)
.finally(() => {
setLoadingUsage(false);
});
}
useEffect(() => {
checkUpdate();
checkUsage();
}, []);
const accessStore = useAccessStore();
const enabledAccessControl = useMemo(
() => accessStore.enabledAccessControl(),
[]
[],
);
const promptStore = usePromptStore();
@ -179,7 +200,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) => {
updateConfig(
(config) =>
(config.submitKey = e.target.value as any as SubmitKey)
(config.submitKey = e.target.value as any as SubmitKey),
);
}}
>
@ -199,7 +220,7 @@ export function Settings(props: { closeSettings: () => void }) {
value={config.theme}
onChange={(e) => {
updateConfig(
(config) => (config.theme = e.target.value as any as Theme)
(config) => (config.theme = e.target.value as any as Theme),
);
}}
>
@ -240,7 +261,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) =>
updateConfig(
(config) =>
(config.fontSize = Number.parseInt(e.currentTarget.value))
(config.fontSize = Number.parseInt(e.currentTarget.value)),
)
}
></input>
@ -253,7 +274,7 @@ export function Settings(props: { closeSettings: () => void }) {
checked={config.tightBorder}
onChange={(e) =>
updateConfig(
(config) => (config.tightBorder = e.currentTarget.checked)
(config) => (config.tightBorder = e.currentTarget.checked),
)
}
></input>
@ -271,7 +292,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) =>
updateConfig(
(config) =>
(config.disablePromptHint = e.currentTarget.checked)
(config.disablePromptHint = e.currentTarget.checked),
)
}
></input>
@ -281,7 +302,7 @@ export function Settings(props: { closeSettings: () => void }) {
title={Locale.Settings.Prompt.List}
subTitle={Locale.Settings.Prompt.ListCount(
builtinCount,
customCount
customCount,
)}
>
<IconButton
@ -324,6 +345,28 @@ export function Settings(props: { closeSettings: () => void }) {
></input>
</SettingItem>
<SettingItem
title={Locale.Settings.Usage.Title}
subTitle={
loadingUsage
? Locale.Settings.Usage.IsChecking
: Locale.Settings.Usage.SubTitle(
usage?.granted ?? "[?]",
usage?.used ?? "[?]",
)
}
>
{loadingUsage ? (
<div />
) : (
<IconButton
icon={<ResetIcon></ResetIcon>}
text={Locale.Settings.Usage.Check}
onClick={checkUsage}
/>
)}
</SettingItem>
<SettingItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}
@ -338,7 +381,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) =>
updateConfig(
(config) =>
(config.historyMessageCount = e.target.valueAsNumber)
(config.historyMessageCount = e.target.valueAsNumber),
)
}
></input>
@ -357,7 +400,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.compressMessageLengthThreshold =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
)
}
></input>
@ -370,7 +413,8 @@ export function Settings(props: { closeSettings: () => void }) {
value={config.modelConfig.model}
onChange={(e) => {
updateConfig(
(config) => (config.modelConfig.model = e.currentTarget.value)
(config) =>
(config.modelConfig.model = e.currentTarget.value),
);
}}
>
@ -395,7 +439,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.temperature =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
);
}}
></input>
@ -413,7 +457,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.max_tokens =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
)
}
></input>
@ -432,7 +476,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.presence_penalty =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
);
}}
></input>

View File

@ -18,6 +18,7 @@ const cn = {
Stop: "停止",
Retry: "重试",
},
Rename: "重命名对话",
Typing: "正在输入…",
Input: (submitKey: string) => {
var inputHints = `输入消息,${submitKey} 发送`;
@ -63,6 +64,7 @@ const cn = {
Title: "字体大小",
SubTitle: "聊天内容的字体大小",
},
Update: {
Version: (x: string) => `当前版本:${x}`,
IsLatest: "已是最新版本",
@ -77,7 +79,7 @@ const cn = {
Prompt: {
Disable: {
Title: "禁用提示词自动补全",
SubTitle: "禁用后将无法自动根据输入补全",
SubTitle: "在输入框开头输入 / 即可触发自动补全",
},
List: "自定义提示词列表",
ListCount: (builtin: number, custom: number) =>
@ -97,6 +99,14 @@ const cn = {
SubTitle: "使用自己的 Key 可绕过受控访问限制",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "账户余额",
SubTitle(granted: any, used: any) {
return `总共 $${granted},已使用 $${used}`;
},
IsChecking: "正在检查…",
Check: "重新检查",
},
AccessCode: {
Title: "访问码",
SubTitle: "现在是受控访问状态",
@ -124,7 +134,7 @@ const cn = {
History: (content: string) =>
"这是 ai 和用户的历史聊天总结作为前情提要:" + content,
Topic:
"直接返回这句话的简要主题,不要解释,如果没有主题,请直接返回“闲聊”",
"使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,如果没有主题,请直接返回“闲聊”",
Summarize:
"简要总结一下你和用户的对话,用作后续的上下文提示 prompt控制在 50 字以内",
},

View File

@ -20,6 +20,7 @@ const en: LocaleType = {
Stop: "Stop",
Retry: "Retry",
},
Rename: "Rename Chat",
Typing: "Typing…",
Input: (submitKey: string) => {
var inputHints = `Type something and press ${submitKey} to send`;
@ -79,7 +80,7 @@ const en: LocaleType = {
Prompt: {
Disable: {
Title: "Disable auto-completion",
SubTitle: "After disabling, auto-completion will not be available",
SubTitle: "Input / to trigger auto-completion",
},
List: "Prompt List",
ListCount: (builtin: number, custom: number) =>
@ -100,6 +101,14 @@ const en: LocaleType = {
SubTitle: "Use your key to ignore access code limit",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "Account Balance",
SubTitle(granted: any, used: any) {
return `Total $${granted}, Used $${used}`;
},
IsChecking: "Checking...",
Check: "Check Again",
},
AccessCode: {
Title: "Access Code",
SubTitle: "Access control enabled",
@ -129,7 +138,7 @@ const en: LocaleType = {
"This is a summary of the chat history between the AI and the user as a recap: " +
content,
Topic:
"Provide a brief topic of the sentence without explanation. If there is no topic, return 'Chitchat'.",
"Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, or additional text. Remove enclosing quotation marks.",
Summarize:
"Summarize our discussion briefly in 50 characters or less to use as a prompt for future context.",
},

View File

@ -19,8 +19,9 @@ const tw: LocaleType = {
Stop: "停止",
Retry: "重試",
},
Rename: "重命名對話",
Typing: "正在輸入…",
Input: (submitKey: string) => {
Input: (submitKey: string) => {
var inputHints = `輸入訊息後,按下 ${submitKey} 鍵即可發送`;
if (submitKey === String(SubmitKey.Enter)) {
inputHints += "Shift + Enter 鍵換行";
@ -78,7 +79,7 @@ const tw: LocaleType = {
Prompt: {
Disable: {
Title: "停用提示詞自動補全",
SubTitle: "若停用後,將無法自動根據輸入進行補全",
SubTitle: "在輸入框開頭輸入 / 即可觸發自動補全",
},
List: "自定義提示詞列表",
ListCount: (builtin: number, custom: number) =>
@ -98,6 +99,14 @@ const tw: LocaleType = {
SubTitle: "使用自己的 Key 可規避受控訪問限制",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "帳戶餘額",
SubTitle(granted: any, used: any) {
return `總共 $${granted},已使用 $${used}`;
},
IsChecking: "正在檢查…",
Check: "重新檢查",
},
AccessCode: {
Title: "訪問碼",
SubTitle: "現在是受控訪問狀態",
@ -124,8 +133,7 @@ const tw: LocaleType = {
Prompt: {
History: (content: string) =>
"這是 AI 與用戶的歷史聊天總結,作為前情提要:" + content,
Topic:
"直接返回這句話的簡要主題,無須解釋,若無主題,請直接返回「閒聊」",
Topic: "直接返回這句話的簡要主題,無須解釋,若無主題,請直接返回「閒聊」",
Summarize:
"簡要總結一下你和用戶的對話,作為後續的上下文提示 prompt且字數控制在 50 字以內",
},

View File

@ -1,4 +1,4 @@
import type { ChatRequest, ChatReponse } from "./api/chat/typing";
import type { ChatRequest, ChatReponse } from "./api/openai/typing";
import { filterConfig, Message, ModelConfig, useAccessStore } from "./store";
import Locale from "./locales";
@ -9,7 +9,7 @@ const makeRequestParam = (
options?: {
filterBot?: boolean;
stream?: boolean;
}
},
): ChatRequest => {
let sendMessages = messages.map((v) => ({
role: v.role,
@ -42,19 +42,48 @@ function getHeaders() {
return headers;
}
export function requestOpenaiClient(path: string) {
return (body: any, method = "POST") =>
fetch("/api/openai", {
method,
headers: {
"Content-Type": "application/json",
path,
...getHeaders(),
},
body: body && JSON.stringify(body),
});
}
export async function requestChat(messages: Message[]) {
const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
const res = await fetch("/api/chat", {
method: "POST",
headers: {
"Content-Type": "application/json",
...getHeaders(),
},
body: JSON.stringify(req),
});
const res = await requestOpenaiClient("v1/chat/completions")(req);
return (await res.json()) as ChatReponse;
try {
const response = (await res.json()) as ChatReponse;
return response;
} catch (error) {
console.error("[Request Chat] ", error, res.body);
}
}
export async function requestUsage() {
const res = await requestOpenaiClient("dashboard/billing/credit_grants")(
null,
"GET",
);
try {
const response = (await res.json()) as {
total_available: number;
total_granted: number;
total_used: number;
};
return response;
} catch (error) {
console.error("[Request usage] ", error, res.body);
}
}
export async function requestChatStream(
@ -65,7 +94,7 @@ export async function requestChatStream(
onMessage: (message: string, done: boolean) => void;
onError: (error: Error) => void;
onController?: (controller: AbortController) => void;
}
},
) {
const req = makeRequestParam(messages, {
stream: true,
@ -87,6 +116,7 @@ export async function requestChatStream(
method: "POST",
headers: {
"Content-Type": "application/json",
path: "v1/chat/completions",
...getHeaders(),
},
body: JSON.stringify(req),
@ -129,7 +159,7 @@ export async function requestChatStream(
responseText = Locale.Error.Unauthorized;
finish();
} else {
console.error("Stream Error");
console.error("Stream Error", res.body);
options?.onError(new Error("Stream Error"));
}
} catch (err) {
@ -149,7 +179,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) {
const res = await requestChat(messages);
return res.choices.at(0)?.message?.content ?? "";
return res?.choices?.at(0)?.message?.content ?? "";
}
// To store message streaming controller
@ -159,7 +189,7 @@ export const ControllerPool = {
addController(
sessionIndex: number,
messageIndex: number,
controller: AbortController
controller: AbortController,
) {
const key = this.key(sessionIndex, messageIndex);
this.controllers[key] = controller;

View File

@ -206,6 +206,10 @@ interface ChatStore {
clearAllData: () => void;
}
function countMessages(msgs: Message[]) {
return msgs.reduce((pre, cur) => pre + cur.content.length, 0);
}
const LOCAL_KEY = "chat-next-web-store";
export const useChatStore = create<ChatStore>()(
@ -393,8 +397,12 @@ export const useChatStore = create<ChatStore>()(
summarizeSession() {
const session = get().currentSession();
if (session.topic === DEFAULT_TOPIC && session.messages.length >= 3) {
// should summarize topic
// should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50;
if (
session.topic === DEFAULT_TOPIC &&
countMessages(session.messages) >= SUMMARIZE_MIN_LEN
) {
requestWithPrompt(session.messages, Locale.Store.Prompt.Topic).then(
(res) => {
get().updateCurrentSession(
@ -408,10 +416,7 @@ export const useChatStore = create<ChatStore>()(
let toBeSummarizedMsgs = session.messages.slice(
session.lastSummarizeIndex,
);
const historyMsgLength = toBeSummarizedMsgs.reduce(
(pre, cur) => pre + cur.content.length,
0,
);
const historyMsgLength = countMessages(toBeSummarizedMsgs);
if (historyMsgLength > 4000) {
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(

View File

@ -6,7 +6,7 @@ export const config = {
matcher: ["/api/chat", "/api/chat-stream"],
};
export function middleware(req: NextRequest, res: NextResponse) {
export function middleware(req: NextRequest) {
const accessCode = req.headers.get("access-code");
const token = req.headers.get("token");
const hashedCode = md5.hash(accessCode ?? "").trim();
@ -18,14 +18,37 @@ export function middleware(req: NextRequest, res: NextResponse) {
if (ACCESS_CODES.size > 0 && !ACCESS_CODES.has(hashedCode) && !token) {
return NextResponse.json(
{
error: true,
needAccessCode: true,
hint: "Please go settings page and fill your access code.",
msg: "Please go settings page and fill your access code.",
},
{
status: 401,
}
},
);
}
return NextResponse.next();
// inject api key
if (!token) {
const apiKey = process.env.OPENAI_API_KEY;
if (apiKey) {
req.headers.set("token", apiKey);
} else {
return NextResponse.json(
{
error: true,
msg: "Empty Api Key",
},
{
status: 401,
},
);
}
}
return NextResponse.next({
request: {
headers: req.headers,
},
});
}

View File

@ -1,24 +1,13 @@
const CHATGPT_NEXT_WEB_CACHE = "chatgpt-next-web-cache";
self.addEventListener('activate', function (event) {
console.log('ServiceWorker activated.');
self.addEventListener("activate", function (event) {
console.log("ServiceWorker activated.");
});
self.addEventListener('install', function (event) {
self.addEventListener("install", function (event) {
event.waitUntil(
caches.open(CHATGPT_NEXT_WEB_CACHE)
.then(function (cache) {
return cache.addAll([
]);
})
caches.open(CHATGPT_NEXT_WEB_CACHE).then(function (cache) {
return cache.addAll([]);
}),
);
});
self.addEventListener('fetch', function (event) {
event.respondWith(
caches.match(event.request)
.then(function (response) {
return response || fetch(event.request);
})
);
});