Merge branch 'main' into reset

# Conflicts:
#	app/components/settings.tsx
This commit is contained in:
AprilNEA 2023-04-02 13:45:34 +08:00
commit 0a60a87c9f
No known key found for this signature in database
GPG Key ID: B93E17BB436B4DE1
44 changed files with 2646 additions and 1124 deletions

View File

@ -1,3 +1,4 @@
{
"extends": "next/core-web-vitals"
"extends": "next/core-web-vitals",
"plugins": ["prettier"]
}

View File

@ -1,6 +1,7 @@
name: Publish Docker image
on:
workflow_dispatch:
release:
types: [published]
@ -9,25 +10,43 @@ jobs:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
steps:
- name: Check out the repo
-
name: Check out the repo
uses: actions/checkout@v3
- name: Log in to Docker Hub
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
-
name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract metadata (tags, labels) for Docker
-
name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
uses: docker/metadata-action@v4
with:
images: yidadaa/chatgpt-next-web
tags: |
type=raw,value=latest
type=ref,event=tag
- name: Build and push Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
-
name: Set up QEMU
uses: docker/setup-qemu-action@v2
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
-
name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
platforms: linux/amd64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

29
.github/workflows/sync.yml vendored Normal file
View File

@ -0,0 +1,29 @@
name: Upstream Sync
on:
schedule:
- cron: '0 */12 * * *' # every 12 hours
workflow_dispatch: # on button click
jobs:
sync_latest_from_upstream:
name: Sync latest commits from upstream repo
runs-on: ubuntu-latest
steps:
# Step 1: run a standard checkout action, provided by github
- name: Checkout target repo
uses: actions/checkout@v3
# Step 2: run the sync action
- name: Sync upstream changes
id: sync
uses: aormsby/Fork-Sync-With-Upstream-action@v3.4
with:
upstream_sync_repo: Yidadaa/ChatGPT-Next-Web
upstream_sync_branch: main
target_sync_branch: main
target_repo_token: ${{ secrets.GITHUB_TOKEN }} # automatically generated, no need to set
# Set test_mode true to run tests instead of the true action!!
test_mode: false

2
.gitignore vendored
View File

@ -35,3 +35,5 @@ yarn-error.log*
*.tsbuildinfo
next-env.d.ts
dev
public/prompts.json

4
.husky/pre-commit Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx lint-staged

6
.lintstagedrc.json Normal file
View File

@ -0,0 +1,6 @@
{
"./app/**/*.{js,ts,jsx,tsx,json,html,css,md}": [
"eslint --fix",
"prettier --write"
]
}

10
.prettierrc.js Normal file
View File

@ -0,0 +1,10 @@
module.exports = {
printWidth: 80,
tabWidth: 2,
useTabs: false,
semi: true,
singleQuote: false,
trailingComma: 'all',
bracketSpacing: true,
arrowParens: 'always',
};

View File

@ -6,13 +6,9 @@ RUN apk add --no-cache libc6-compat
WORKDIR /app
COPY package.json yarn.lock* package-lock.json* ./
COPY package.json yarn.lock ./
RUN \
if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
elif [ -f package-lock.json ]; then npm ci; \
else echo "Lockfile not found." && exit 1; \
fi
RUN yarn install
FROM base AS builder

View File

@ -7,9 +7,9 @@
One-Click to deploy your own ChatGPT web UI.
[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [微信群](https://user-images.githubusercontent.com/16968934/227772522-b3ba3713-9206-4c8d-a81f-22300b7c313a.jpg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg)
[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt)
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
@ -22,6 +22,7 @@ One-Click to deploy your own ChatGPT web UI.
- 在 1 分钟内使用 Vercel **免费一键部署**
- 精心设计的 UI响应式设计支持深色模式
- 极快的首屏加载速度(~85kb
- 海量的内置 prompt 列表,来自[中文](https://github.com/PlexPt/awesome-chatgpt-prompts-zh)和[英文](https://github.com/f/awesome-chatgpt-prompts)
- 自动压缩上下文聊天记录,在节省 Token 的同时支持超长对话
- 一键导出聊天记录,完整的 Markdown 支持
- 拥有自己的域名?好上加好,绑定后即可在任何地方**无障碍**快速访问
@ -31,14 +32,25 @@ One-Click to deploy your own ChatGPT web UI.
- **Deploy for free with one-click** on Vercel in under 1 minute
- Responsive design, and dark mode
- Fast first screen loading speed (~85kb)
- Awesome prompts powered by [awesome-chatgpt-prompts-zh](https://github.com/PlexPt/awesome-chatgpt-prompts-zh) and [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts)
- Automatically compresses chat history to support long conversations while also saving your tokens
- One-click export all chat history with full Markdown support
## 使用
## 开发计划 Roadmap
- System Prompt: pin a user defined prompt as system prompt 为每个对话设置系统 Prompt [#138](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/138)
- User Prompt: user can edit and save custom prompts to prompt list 允许用户自行编辑内置 Prompt 列表
- Self-host Model: support llama, alpaca, ChatGLM, BELLE etc. 支持自部署的大语言模型
- Plugins: support network search, caculator, any other apis etc. 插件机制,支持联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165)
### 不会开发的功能 Not in Plan
- User login, accounts, cloud sync 用户登陆、账号管理、消息云同步
- UI text customize 界面文字自定义
## 开始使用
1. 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys);
2. 点击右侧按钮开始部署:
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登陆即可,记得在环境变量页填入 API Key
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登陆即可,记得在环境变量页填入 API Key
3. 部署完毕后,即可开始使用;
4. (可选)[绑定自定义域名](https://vercel.com/docs/concepts/projects/domains/add-a-domain)Vercel 分配的域名 DNS 在某些区域被污染了,绑定自定义域名即可直连。
@ -46,7 +58,7 @@ One-Click to deploy your own ChatGPT web UI.
1. Get [OpenAI API Key](https://platform.openai.com/account/api-keys);
2. Click
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web);
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web);
3. Enjoy :)
## 保持更新 Keep Updated
@ -76,9 +88,9 @@ This project will be continuously maintained. If you want to keep the code repos
You can star or watch this project or follow author to get release notifictions in time.
## 访问控制 Access Control
## 配置密码 Password
本项目提供有限的权限控制功能,请在环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义控制码:
本项目提供有限的权限控制功能,请在 Vercel 项目控制面板的环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义码:
```
code1,code2,code3
@ -86,7 +98,7 @@ code1,code2,code3
增加或修改该环境变量后,请**重新部署**项目使改动生效。
This project provides limited access control. Please add an environment variable named `CODE` on the environment variables page. The value should be a custom control code separated by comma like this:
This project provides limited access control. Please add an environment variable named `CODE` on the vercel environment variables page. The value should be passwords separated by comma like this:
```
code1,code2,code3
@ -94,6 +106,38 @@ code1,code2,code3
After adding or modifying this environment variable, please redeploy the project for the changes to take effect.
## 环境变量 Environment Variables
### `OPENAI_API_KEY` (required)
OpanAI 密钥。
Your openai api key.
### `CODE` (optional)
访问密码,可选,可以使用逗号隔开多个密码。
Access passsword, separated by comma.
### `BASE_URL` (optional)
> Default: `api.openai.com`
OpenAI 接口代理 URL。
Override openai api request base url.
### `PROTOCOL` (optional)
> Default: `https`
> Values: `http` | `https`
OpenAI 接口协议。
Override openai api request protocol.
## 开发 Development
点击下方按钮,开始二次开发:
@ -117,16 +161,8 @@ OPENAI_API_KEY=<your api key here>
### 本地部署 Local Deployment
请直接询问 ChatGPT使用下列 Prompt
```
如何使用 pm2 和 yarn 部署 nextjs 项目到 ubuntu 服务器上,项目编译命令为 yarn build启动命令为 yarn start启动时需要设置环境变量为 OPENAI_API_KEY端口为 3000使用 ngnix 做反向代理
```
Please ask ChatGPT with prompt:
```
how to deploy nextjs project with pm2 and yarn on my ubuntu server, the build command is `yarn build`, the start command is `yarn start`, the project must start with env var named `OPENAI_API_KEY`, the port is 3000, use ngnix
```shell
bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh)
```
### 容器部署 Docker Deployment
@ -143,15 +179,12 @@ docker run -d -p 3000:3000 -e OPENAI_API_KEY="" -e CODE="" yidadaa/chatgpt-next-
![更多展示 More](./static/more.png)
## 说明 Attention
本项目的演示地址所用的 OpenAI 账户的免费额度将于 2023-04-01 过期,届时将无法通过演示地址在线体验。
如果你想贡献出自己的 API Key可以通过作者主页的邮箱发送给作者并标注过期时间。
The free trial of the OpenAI account used by the demo will expire on April 1, 2023, and the demo will not be available at that time.
If you would like to contribute your API key, you can email it to the author and indicate the expiration date of the API key.
## 捐赠 Donate USDT
> BNB Smart Chain (BEP 20)
```
0x67cD02c7EB62641De576a1fA3EdB32eA0c3ffD89
```
## 鸣谢 Special Thanks
@ -159,13 +192,13 @@ If you would like to contribute your API key, you can email it to the author and
[@mushan0x0](https://github.com/mushan0x0)
[@ClarenceDan](https://github.com/ClarenceDan)
[@zhangjia](https://github.com/zhangjia)
[@hoochanlon](https://github.com/hoochanlon)
### 贡献者 Contributor
[@AprilNEA](https://github.com/AprilNEA)
[@iSource](https://github.com/iSource)
[@iFwu](https://github.com/iFwu)
[Contributors](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors)
## LICENSE
- [Anti 996 License](https://github.com/kattgu7/Anti-996-License/blob/master/LICENSE_CN_EN)
[Anti 996 License](https://github.com/kattgu7/Anti-996-License/blob/master/LICENSE_CN_EN)

View File

@ -14,3 +14,4 @@ export function getAccessCodes(): Set<string> {
}
export const ACCESS_CODES = getAccessCodes();
export const IS_IN_DOCKER = process.env.DOCKER;

View File

@ -1,26 +1,12 @@
import { createParser } from "eventsource-parser";
import { NextRequest } from "next/server";
import { requestOpenai } from "../common";
async function createStream(req: NextRequest) {
const encoder = new TextEncoder();
const decoder = new TextDecoder();
let apiKey = process.env.OPENAI_API_KEY;
const userApiKey = req.headers.get("token");
if (userApiKey) {
apiKey = userApiKey;
console.log("[Stream] using user api key");
}
const res = await fetch("https://api.openai.com/v1/chat/completions", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: "POST",
body: req.body,
});
const res = await requestOpenai(req);
const stream = new ReadableStream({
async start(controller) {

View File

@ -1 +0,0 @@
config.ts

View File

@ -1,29 +0,0 @@
import { OpenAIApi, Configuration } from "openai";
import { ChatRequest } from "./typing";
export async function POST(req: Request) {
try {
let apiKey = process.env.OPENAI_API_KEY;
const userApiKey = req.headers.get("token");
if (userApiKey) {
apiKey = userApiKey;
}
const openai = new OpenAIApi(
new Configuration({
apiKey,
})
);
const requestBody = (await req.json()) as ChatRequest;
const completion = await openai!.createChatCompletion({
...requestBody,
});
return new Response(JSON.stringify(completion.data));
} catch (e) {
console.error("[Chat] ", e);
return new Response(JSON.stringify(e));
}
}

22
app/api/common.ts Normal file
View File

@ -0,0 +1,22 @@
import { NextRequest } from "next/server";
const OPENAI_URL = "api.openai.com";
const DEFAULT_PROTOCOL = "https";
const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL;
const BASE_URL = process.env.BASE_URL ?? OPENAI_URL;
export async function requestOpenai(req: NextRequest) {
const apiKey = req.headers.get("token");
const openaiPath = req.headers.get("path");
console.log("[Proxy] ", openaiPath);
return fetch(`${PROTOCOL}://${BASE_URL}/${openaiPath}`, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
method: req.method,
body: req.body,
});
}

30
app/api/openai/route.ts Normal file
View File

@ -0,0 +1,30 @@
import { NextRequest, NextResponse } from "next/server";
import { requestOpenai } from "../common";
async function makeRequest(req: NextRequest) {
try {
const api = await requestOpenai(req);
const res = new NextResponse(api.body);
res.headers.set("Content-Type", "application/json");
return res;
} catch (e) {
console.error("[OpenAI] ", req.body, e);
return NextResponse.json(
{
error: true,
msg: JSON.stringify(e),
},
{
status: 500,
},
);
}
}
export async function POST(req: NextRequest) {
return makeRequest(req);
}
export async function GET(req: NextRequest) {
return makeRequest(req);
}

View File

@ -26,13 +26,13 @@
@media only screen and (min-width: 600px) {
.tight-container {
--window-width: 100vw;
--window-height: 100vh;
--window-height: var(--full-height);
--window-content-width: calc(100% - var(--sidebar-width));
@include container();
max-width: 100vw;
max-height: 100vh;
max-height: var(--full-height);
border-radius: 0;
}
@ -74,7 +74,7 @@
position: absolute;
left: -100%;
z-index: 999;
height: 100vh;
height: var(--full-height);
transition: all ease 0.3s;
box-shadow: none;
}
@ -218,7 +218,14 @@
flex: 1;
overflow: auto;
padding: 20px;
margin-bottom: 100px;
}
.chat-body-title {
cursor: pointer;
&:hover {
text-decoration: underline;
}
}
.chat-message {
@ -292,6 +299,7 @@
position: absolute;
right: 20px;
top: -26px;
left: 100px;
transition: all ease 0.3s;
opacity: 0;
pointer-events: none;
@ -302,6 +310,7 @@
.chat-message-top-action {
opacity: 0.5;
color: var(--black);
white-space: nowrap;
cursor: pointer;
&:hover {
@ -332,12 +341,63 @@
}
.chat-input-panel {
position: absolute;
bottom: 20px;
display: flex;
width: 100%;
padding: 20px;
box-sizing: border-box;
flex-direction: column;
}
@mixin single-line {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.prompt-hints {
min-height: 20px;
width: 100%;
max-height: 50vh;
overflow: auto;
display: flex;
flex-direction: column-reverse;
background-color: var(--white);
border: var(--border-in-light);
border-radius: 10px;
margin-bottom: 10px;
box-shadow: var(--shadow);
.prompt-hint {
color: var(--black);
padding: 6px 10px;
animation: slide-in ease 0.3s;
cursor: pointer;
transition: all ease 0.3s;
border: transparent 1px solid;
margin: 4px;
border-radius: 8px;
&:not(:last-child) {
margin-top: 0;
}
.hint-title {
font-size: 12px;
font-weight: bolder;
@include single-line();
}
.hint-content {
font-size: 12px;
@include single-line();
}
&-selected,
&:hover {
border-color: var(--primary);
}
}
}
.chat-input-panel-inner {
@ -354,7 +414,7 @@
background-color: var(--white);
color: var(--black);
font-family: inherit;
padding: 10px 14px;
padding: 10px 14px 50px;
resize: none;
outline: none;
}
@ -375,7 +435,7 @@
position: absolute;
right: 30px;
bottom: 10px;
bottom: 30px;
}
.export-content {

View File

@ -1,6 +1,7 @@
"use client";
import { useState, useRef, useEffect, useLayoutEffect } from "react";
import { useDebouncedCallback } from "use-debounce";
import { IconButton } from "./button";
import styles from "./home.module.scss";
@ -22,12 +23,19 @@ import DownloadIcon from "../icons/download.svg";
import { Message, SubmitKey, useChatStore, ChatSession } from "../store";
import { showModal, showToast } from "./ui-lib";
import { copyToClipboard, downloadAs, isIOS, selectOrCopy } from "../utils";
import {
copyToClipboard,
downloadAs,
isIOS,
isMobileScreen,
selectOrCopy,
} from "../utils";
import Locale from "../locales";
import dynamic from "next/dynamic";
import { REPO_URL } from "../constant";
import { ControllerPool } from "../requests";
import { Prompt, usePromptStore } from "../store/prompt";
export function Loading(props: { noLogo?: boolean }) {
return (
@ -100,7 +108,7 @@ export function ChatList() {
state.currentSessionIndex,
state.selectSession,
state.removeSession,
]
],
);
return (
@ -113,7 +121,7 @@ export function ChatList() {
key={i}
selected={i === selectedIndex}
onClick={() => selectSession(i)}
onDelete={() => removeSession(i)}
onDelete={() => confirm(Locale.Home.DeleteChat) && removeSession(i)}
/>
))}
</div>
@ -124,17 +132,19 @@ function useSubmitHandler() {
const config = useChatStore((state) => state.config);
const submitKey = config.submitKey;
const shouldSubmit = (e: KeyboardEvent) => {
const shouldSubmit = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key !== "Enter") return false;
if (e.key === "Enter" && e.nativeEvent.isComposing) return false;
return (
(config.submitKey === SubmitKey.AltEnter && e.altKey) ||
(config.submitKey === SubmitKey.CtrlEnter && e.ctrlKey) ||
(config.submitKey === SubmitKey.ShiftEnter && e.shiftKey) ||
(config.submitKey === SubmitKey.MetaEnter && e.metaKey) ||
(config.submitKey === SubmitKey.Enter &&
!e.altKey &&
!e.ctrlKey &&
!e.shiftKey)
!e.shiftKey &&
!e.metaKey)
);
};
@ -144,25 +154,99 @@ function useSubmitHandler() {
};
}
export function Chat(props: { showSideBar?: () => void }) {
export function PromptHints(props: {
prompts: Prompt[];
onPromptSelect: (prompt: Prompt) => void;
}) {
if (props.prompts.length === 0) return null;
return (
<div className={styles["prompt-hints"]}>
{props.prompts.map((prompt, i) => (
<div
className={styles["prompt-hint"]}
key={prompt.title + i.toString()}
onClick={() => props.onPromptSelect(prompt)}
>
<div className={styles["hint-title"]}>{prompt.title}</div>
<div className={styles["hint-content"]}>{prompt.content}</div>
</div>
))}
</div>
);
}
export function Chat(props: {
showSideBar?: () => void;
sideBarShowing?: boolean;
}) {
type RenderMessage = Message & { preview?: boolean };
const chatStore = useChatStore();
const [session, sessionIndex] = useChatStore((state) => [
state.currentSession(),
state.currentSessionIndex,
]);
const fontSize = useChatStore((state) => state.config.fontSize);
const inputRef = useRef<HTMLTextAreaElement>(null);
const [userInput, setUserInput] = useState("");
const [isLoading, setIsLoading] = useState(false);
const { submitKey, shouldSubmit } = useSubmitHandler();
const onUserInput = useChatStore((state) => state.onUserInput);
// prompt hints
const promptStore = usePromptStore();
const [promptHints, setPromptHints] = useState<Prompt[]>([]);
const onSearch = useDebouncedCallback(
(text: string) => {
setPromptHints(promptStore.search(text));
},
100,
{ leading: true, trailing: true },
);
const onPromptSelect = (prompt: Prompt) => {
setUserInput(prompt.content);
setPromptHints([]);
inputRef.current?.focus();
};
const scrollInput = () => {
const dom = inputRef.current;
if (!dom) return;
const paddingBottomNum: number = parseInt(
window.getComputedStyle(dom).paddingBottom,
10,
);
dom.scrollTop = dom.scrollHeight - dom.offsetHeight + paddingBottomNum;
};
// only search prompts when user input is short
const SEARCH_TEXT_LIMIT = 30;
const onInput = (text: string) => {
scrollInput();
setUserInput(text);
const n = text.trim().length;
// clear search results
if (n === 0) {
setPromptHints([]);
} else if (!chatStore.config.disablePromptHint && n < SEARCH_TEXT_LIMIT) {
// check if need to trigger auto completion
if (text.startsWith("/") && text.length > 1) {
onSearch(text.slice(1));
}
}
};
// submit user input
const onUserSubmit = () => {
if (userInput.length <= 0) return;
setIsLoading(true);
onUserInput(userInput).then(() => setIsLoading(false));
chatStore.onUserInput(userInput).then(() => setIsLoading(false));
setUserInput("");
setPromptHints([]);
inputRef.current?.focus();
};
// stop response
@ -172,7 +256,7 @@ export function Chat(props: { showSideBar?: () => void }) {
};
// check if should send message
const onInputKeyDown = (e: KeyboardEvent) => {
const onInputKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (shouldSubmit(e)) {
onUserSubmit();
e.preventDefault();
@ -195,7 +279,10 @@ export function Chat(props: { showSideBar?: () => void }) {
for (let i = botIndex; i >= 0; i -= 1) {
if (messages[i].role === "user") {
setIsLoading(true);
onUserInput(messages[i].content).then(() => setIsLoading(false));
chatStore
.onUserInput(messages[i].content)
.then(() => setIsLoading(false));
inputRef.current?.focus();
return;
}
}
@ -203,9 +290,7 @@ export function Chat(props: { showSideBar?: () => void }) {
// for auto-scroll
const latestMessageRef = useRef<HTMLDivElement>(null);
// wont scroll while hovering messages
const [autoScroll, setAutoScroll] = useState(false);
const [autoScroll, setAutoScroll] = useState(true);
// preview messages
const messages = (session.messages as RenderMessage[])
@ -219,7 +304,7 @@ export function Chat(props: { showSideBar?: () => void }) {
preview: true,
},
]
: []
: [],
)
.concat(
userInput.length > 0
@ -231,16 +316,25 @@ export function Chat(props: { showSideBar?: () => void }) {
preview: true,
},
]
: []
: [],
);
// auto scroll
useLayoutEffect(() => {
setTimeout(() => {
const dom = latestMessageRef.current;
if (dom && !isIOS() && autoScroll) {
const inputDom = inputRef.current;
// only scroll when input overlaped message body
let shouldScroll = true;
if (dom && inputDom) {
const domRect = dom.getBoundingClientRect();
const inputRect = inputDom.getBoundingClientRect();
shouldScroll = domRect.top > inputRect.top;
}
if (dom && autoScroll && shouldScroll) {
dom.scrollIntoView({
behavior: "smooth",
block: "end",
});
}
@ -254,7 +348,17 @@ export function Chat(props: { showSideBar?: () => void }) {
className={styles["window-header-title"]}
onClick={props?.showSideBar}
>
<div className={styles["window-header-main-title"]}>
<div
className={`${styles["window-header-main-title"]} ${styles["chat-body-title"]}`}
onClick={() => {
const newTopic = prompt(Locale.Chat.Rename, session.topic);
if (newTopic && newTopic !== session.topic) {
chatStore.updateCurrentSession(
(session) => (session.topic = newTopic!),
);
}
}}
>
{session.topic}
</div>
<div className={styles["window-header-sub-title"]}>
@ -314,7 +418,8 @@ export function Chat(props: { showSideBar?: () => void }) {
</div>
)}
<div className={styles["chat-message-item"]}>
{!isUser && (
{!isUser &&
!(message.preview || message.content.length === 0) && (
<div className={styles["chat-message-top-actions"]}>
{message.streaming ? (
<div
@ -346,7 +451,12 @@ export function Chat(props: { showSideBar?: () => void }) {
) : (
<div
className="markdown-body"
style={{ fontSize: `${fontSize}px` }}
onContextMenu={(e) => onRightClick(e, message)}
onDoubleClickCapture={() => {
if (!isMobileScreen()) return;
setUserInput(message.content);
}}
>
<Markdown content={message.content} />
</div>
@ -363,23 +473,28 @@ export function Chat(props: { showSideBar?: () => void }) {
</div>
);
})}
<div ref={latestMessageRef} style={{ opacity: 0, height: "2em" }}>
<div ref={latestMessageRef} style={{ opacity: 0, height: "1px" }}>
-
</div>
</div>
<div className={styles["chat-input-panel"]}>
<PromptHints prompts={promptHints} onPromptSelect={onPromptSelect} />
<div className={styles["chat-input-panel-inner"]}>
<textarea
ref={inputRef}
className={styles["chat-input"]}
placeholder={Locale.Chat.Input(submitKey)}
rows={3}
onInput={(e) => setUserInput(e.currentTarget.value)}
rows={4}
onInput={(e) => onInput(e.currentTarget.value)}
value={userInput}
onKeyDown={(e) => onInputKeyDown(e as any)}
onKeyDown={onInputKeyDown}
onFocus={() => setAutoScroll(true)}
onBlur={() => setAutoScroll(false)}
autoFocus
onBlur={() => {
setAutoScroll(false);
setTimeout(() => setPromptHints([]), 500);
}}
autoFocus={!props?.sideBarShowing}
/>
<IconButton
icon={<SendWhiteIcon />}
@ -406,9 +521,11 @@ function useSwitchTheme() {
document.body.classList.add("light");
}
const themeColor = getComputedStyle(document.body).getPropertyValue("--theme-color").trim();
const themeColor = getComputedStyle(document.body)
.getPropertyValue("--theme-color")
.trim();
const metaDescription = document.querySelector('meta[name="theme-color"]');
metaDescription?.setAttribute('content', themeColor);
metaDescription?.setAttribute("content", themeColor);
}, [config.theme]);
}
@ -486,7 +603,7 @@ export function Home() {
state.newSession,
state.currentSessionIndex,
state.removeSession,
]
],
);
const loading = !useHasHydrated();
const [showSideBar, setShowSideBar] = useState(true);
@ -504,7 +621,9 @@ export function Home() {
return (
<div
className={`${
config.tightBorder ? styles["tight-container"] : styles.container
config.tightBorder && !isMobileScreen()
? styles["tight-container"]
: styles.container
}`}
>
<div
@ -561,7 +680,10 @@ export function Home() {
<IconButton
icon={<AddIcon />}
text={Locale.Home.NewChat}
onClick={createNewSession}
onClick={() => {
createNewSession();
setShowSideBar(false);
}}
/>
</div>
</div>
@ -576,7 +698,11 @@ export function Home() {
}}
/>
) : (
<Chat key="chat" showSideBar={() => setShowSideBar(true)} />
<Chat
key="chat"
showSideBar={() => setShowSideBar(true)}
sideBarShowing={showSideBar}
/>
)}
</div>
</div>

View File

@ -1,6 +1,7 @@
import ReactMarkdown from "react-markdown";
import "katex/dist/katex.min.css";
import RemarkMath from "remark-math";
import RemarkBreaks from "remark-breaks";
import RehypeKatex from "rehype-katex";
import RemarkGfm from "remark-gfm";
import RehypePrsim from "rehype-prism-plus";
@ -29,7 +30,7 @@ export function PreCode(props: { children: any }) {
export function Markdown(props: { content: string }) {
return (
<ReactMarkdown
remarkPlugins={[RemarkMath, RemarkGfm]}
remarkPlugins={[RemarkMath, RemarkGfm, RemarkBreaks]}
rehypePlugins={[RehypeKatex, [RehypePrsim, { ignoreMissing: true }]]}
components={{
pre: PreCode,

View File

@ -7,8 +7,9 @@ import styles from "./settings.module.scss";
import ResetIcon from "../icons/reload.svg";
import CloseIcon from "../icons/close.svg";
import ClearIcon from "../icons/clear.svg";
import EditIcon from "../icons/edit.svg";
import { List, ListItem, Popover } from "./ui-lib";
import { List, ListItem, Popover, showToast } from "./ui-lib";
import { IconButton } from "./button";
import {
@ -19,12 +20,14 @@ import {
useUpdateStore,
useAccessStore,
} from "../store";
import { Avatar } from "./home";
import { Avatar, PromptHints } from "./home";
import Locale, { changeLang, getLang } from "../locales";
import { getCurrentCommitId } from "../utils";
import Locale, { AllLangs, changeLang, getLang } from "../locales";
import { getCurrentVersion } from "../utils";
import Link from "next/link";
import { UPDATE_URL } from "../constant";
import { SearchService, usePromptStore } from "../store/prompt";
import { requestUsage } from "../requests";
function SettingItem(props: {
title: string;
@ -57,7 +60,7 @@ export function Settings(props: { closeSettings: () => void }) {
const updateStore = useUpdateStore();
const [checkingUpdate, setCheckingUpdate] = useState(false);
const currentId = getCurrentCommitId();
const currentId = getCurrentVersion();
const remoteId = updateStore.remoteId;
const hasNewVersion = currentId !== remoteId;
@ -68,16 +71,40 @@ export function Settings(props: { closeSettings: () => void }) {
});
}
const [usage, setUsage] = useState<{
granted?: number;
used?: number;
}>();
const [loadingUsage, setLoadingUsage] = useState(false);
function checkUsage() {
setLoadingUsage(true);
requestUsage()
.then((res) =>
setUsage({
granted: res?.total_granted,
used: res?.total_used,
}),
)
.finally(() => {
setLoadingUsage(false);
});
}
useEffect(() => {
checkUpdate();
checkUsage();
}, []);
const accessStore = useAccessStore();
const enabledAccessControl = useMemo(
() => accessStore.enabledAccessControl(),
[]
[],
);
const promptStore = usePromptStore();
const builtinCount = SearchService.count.builtin;
const customCount = promptStore.prompts.size ?? 0;
return (
<>
<div className={styles["window-header"]}>
@ -173,7 +200,7 @@ export function Settings(props: { closeSettings: () => void }) {
onChange={(e) => {
updateConfig(
(config) =>
(config.submitKey = e.target.value as any as SubmitKey)
(config.submitKey = e.target.value as any as SubmitKey),
);
}}
>
@ -193,7 +220,7 @@ export function Settings(props: { closeSettings: () => void }) {
value={config.theme}
onChange={(e) => {
updateConfig(
(config) => (config.theme = e.target.value as any as Theme)
(config) => (config.theme = e.target.value as any as Theme),
);
}}
>
@ -206,37 +233,82 @@ export function Settings(props: { closeSettings: () => void }) {
</ListItem>
<SettingItem title={Locale.Settings.Lang.Name}>
<div className="">
<select
value={getLang()}
onChange={(e) => {
changeLang(e.target.value as any);
}}
>
<option value="en" key="en">
{Locale.Settings.Lang.Options.en}
</option>
<option value="cn" key="cn">
{Locale.Settings.Lang.Options.cn}
{AllLangs.map((lang) => (
<option value={lang} key={lang}>
{Locale.Settings.Lang.Options[lang]}
</option>
))}
</select>
</div>
</SettingItem>
<div className="no-mobile">
<SettingItem
title={Locale.Settings.FontSize.Title}
subTitle={Locale.Settings.FontSize.SubTitle}
>
<input
type="range"
title={`${config.fontSize ?? 14}px`}
value={config.fontSize}
min="12"
max="18"
step="1"
onChange={(e) =>
updateConfig(
(config) =>
(config.fontSize = Number.parseInt(e.currentTarget.value)),
)
}
></input>
</SettingItem>
<SettingItem title={Locale.Settings.TightBorder}>
<input
type="checkbox"
checked={config.tightBorder}
onChange={(e) =>
updateConfig(
(config) => (config.tightBorder = e.currentTarget.checked)
(config) => (config.tightBorder = e.currentTarget.checked),
)
}
></input>
</SettingItem>
</div>
</List>
<List>
<SettingItem
title={Locale.Settings.Prompt.Disable.Title}
subTitle={Locale.Settings.Prompt.Disable.SubTitle}
>
<input
type="checkbox"
checked={config.disablePromptHint}
onChange={(e) =>
updateConfig(
(config) =>
(config.disablePromptHint = e.currentTarget.checked),
)
}
></input>
</SettingItem>
<SettingItem
title={Locale.Settings.Prompt.List}
subTitle={Locale.Settings.Prompt.ListCount(
builtinCount,
customCount,
)}
>
<IconButton
icon={<EditIcon />}
text={Locale.Settings.Prompt.Edit}
onClick={() => showToast(Locale.WIP)}
/>
</SettingItem>
</List>
<List>
{enabledAccessControl ? (
@ -271,6 +343,28 @@ export function Settings(props: { closeSettings: () => void }) {
></input>
</SettingItem>
<SettingItem
title={Locale.Settings.Usage.Title}
subTitle={
loadingUsage
? Locale.Settings.Usage.IsChecking
: Locale.Settings.Usage.SubTitle(
usage?.granted ?? "[?]",
usage?.used ?? "[?]",
)
}
>
{loadingUsage ? (
<div />
) : (
<IconButton
icon={<ResetIcon></ResetIcon>}
text={Locale.Settings.Usage.Check}
onClick={checkUsage}
/>
)}
</SettingItem>
<SettingItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}
@ -279,13 +373,13 @@ export function Settings(props: { closeSettings: () => void }) {
type="range"
title={config.historyMessageCount.toString()}
value={config.historyMessageCount}
min="2"
min="0"
max="25"
step="2"
onChange={(e) =>
updateConfig(
(config) =>
(config.historyMessageCount = e.target.valueAsNumber)
(config.historyMessageCount = e.target.valueAsNumber),
)
}
></input>
@ -304,7 +398,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.compressMessageLengthThreshold =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
)
}
></input>
@ -317,7 +411,8 @@ export function Settings(props: { closeSettings: () => void }) {
value={config.modelConfig.model}
onChange={(e) => {
updateConfig(
(config) => (config.modelConfig.model = e.currentTarget.value)
(config) =>
(config.modelConfig.model = e.currentTarget.value),
);
}}
>
@ -336,13 +431,13 @@ export function Settings(props: { closeSettings: () => void }) {
type="range"
value={config.modelConfig.temperature.toFixed(1)}
min="0"
max="1"
max="2"
step="0.1"
onChange={(e) => {
updateConfig(
(config) =>
(config.modelConfig.temperature =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
);
}}
></input>
@ -360,7 +455,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.max_tokens =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
)
}
></input>
@ -379,7 +474,7 @@ export function Settings(props: { closeSettings: () => void }) {
updateConfig(
(config) =>
(config.modelConfig.presence_penalty =
e.currentTarget.valueAsNumber)
e.currentTarget.valueAsNumber),
);
}}
></input>

View File

@ -36,7 +36,7 @@ export function ListItem(props: { children: JSX.Element[] }) {
return <div className={styles["list-item"]}>{props.children}</div>;
}
export function List(props: { children: JSX.Element[] }) {
export function List(props: { children: JSX.Element[] | JSX.Element }) {
return <div className={styles.list}>{props.children}</div>;
}

View File

@ -3,3 +3,4 @@ export const REPO = "ChatGPT-Next-Web";
export const REPO_URL = `https://github.com/${OWNER}/${REPO}`;
export const UPDATE_URL = `${REPO_URL}#%E4%BF%9D%E6%8C%81%E6%9B%B4%E6%96%B0-keep-updated`;
export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/commits?per_page=1`;
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;

1
app/icons/edit.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="16" height="16" viewBox="0 0 16 16" fill="none"><defs><rect id="path_0" x="0" y="0" width="16" height="16" /></defs><g opacity="1" transform="translate(0 0) rotate(0 8 8)"><mask id="bg-mask-0" fill="white"><use xlink:href="#path_0"></use></mask><g mask="url(#bg-mask-0)" ><path id="路径 1" style="stroke:#333333; stroke-width:1.3333333333333333; stroke-opacity:1; stroke-dasharray:0 0" transform="translate(10.5 11) rotate(0 1.4166666666666665 1.8333333333333333)" d="M2.83,0L2.83,3C2.83,3.37 2.53,3.67 2.17,3.67L0,3.67 " /><path id="路径 2" style="stroke:#333333; stroke-width:1.3333333333333333; stroke-opacity:1; stroke-dasharray:0 0" transform="translate(2.6666666666666665 1.3333333333333333) rotate(0 5.333333333333333 6.666666666666666)" d="M10.67,4L10.67,0.67C10.67,0.3 10.37,0 10,0L0.67,0C0.3,0 0,0.3 0,0.67L0,12.67C0,13.03 0.3,13.33 0.67,13.33L2.67,13.33 " /><path id="路径 3" style="stroke:#333333; stroke-width:1.3333333333333333; stroke-opacity:1; stroke-dasharray:0 0" transform="translate(5.333333333333333 5.333333333333333) rotate(0 2.333333333333333 0)" d="M0,0L4.67,0 " /><path id="路径 4" style="stroke:#333333; stroke-width:1.3333333333333333; stroke-opacity:1; stroke-dasharray:0 0" transform="translate(7.666666666666666 7.666666666666666) rotate(0 2.833333333333333 3.5)" d="M0,7L5.67,0 " /><path id="路径 5" style="stroke:#333333; stroke-width:1.3333333333333333; stroke-opacity:1; stroke-dasharray:0 0" transform="translate(5.333333333333333 8) rotate(0 1.3333333333333333 0)" d="M0,0L2.67,0 " /></g></g></svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -3,16 +3,17 @@ import "./styles/globals.scss";
import "./styles/markdown.scss";
import "./styles/prism.scss";
import process from "child_process";
import { ACCESS_CODES } from "./api/access";
import { ACCESS_CODES, IS_IN_DOCKER } from "./api/access";
let COMMIT_ID: string | undefined;
try {
COMMIT_ID = process
// .execSync("git describe --tags --abbrev=0")
.execSync("git rev-parse --short HEAD")
.toString()
.trim();
} catch (e) {
console.error("No git or not from git repo.")
console.error("No git or not from git repo.");
}
export const metadata = {
@ -22,13 +23,13 @@ export const metadata = {
title: "ChatGPT Next Web",
statusBarStyle: "black-translucent",
},
themeColor: "#fafafa"
themeColor: "#fafafa",
};
function Meta() {
const metas = {
version: COMMIT_ID ?? "unknown",
access: ACCESS_CODES.size > 0 ? "enabled" : "disabled",
access: ACCESS_CODES.size > 0 || IS_IN_DOCKER ? "enabled" : "disabled",
};
return (

View File

@ -1,3 +1,5 @@
import { SubmitKey } from "../store/app";
const cn = {
WIP: "该功能仍在开发中……",
Error: {
@ -16,8 +18,15 @@ const cn = {
Stop: "停止",
Retry: "重试",
},
Rename: "重命名对话",
Typing: "正在输入…",
Input: (submitKey: string) => `输入消息,${submitKey} 发送`,
Input: (submitKey: string) => {
var inputHints = `输入消息,${submitKey} 发送`;
if (submitKey === String(SubmitKey.Enter)) {
inputHints += "Shift + Enter 换行";
}
return inputHints;
},
Send: "发送",
},
Export: {
@ -45,11 +54,18 @@ const cn = {
Lang: {
Name: "Language",
Options: {
cn: "中文",
cn: "简体中文",
en: "English",
tw: "繁體中文",
es: "Español",
},
},
Avatar: "头像",
FontSize: {
Title: "字体大小",
SubTitle: "聊天内容的字体大小",
},
Update: {
Version: (x: string) => `当前版本:${x}`,
IsLatest: "已是最新版本",
@ -61,6 +77,16 @@ const cn = {
SendKey: "发送键",
Theme: "主题",
TightBorder: "紧凑边框",
Prompt: {
Disable: {
Title: "禁用提示词自动补全",
SubTitle: "在输入框开头输入 / 即可触发自动补全",
},
List: "自定义提示词列表",
ListCount: (builtin: number, custom: number) =>
`内置 ${builtin} 条,用户定义 ${custom}`,
Edit: "编辑",
},
HistoryCount: {
Title: "附带历史消息数",
SubTitle: "每次请求携带的历史消息数",
@ -74,6 +100,14 @@ const cn = {
SubTitle: "使用自己的 Key 可绕过受控访问限制",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "账户余额",
SubTitle(granted: any, used: any) {
return `总共 $${granted},已使用 $${used}`;
},
IsChecking: "正在检查…",
Check: "重新检查",
},
AccessCode: {
Title: "访问码",
SubTitle: "现在是受控访问状态",
@ -101,7 +135,7 @@ const cn = {
History: (content: string) =>
"这是 ai 和用户的历史聊天总结作为前情提要:" + content,
Topic:
"直接返回这句话的简要主题,不要解释,如果没有主题,请直接返回“闲聊”",
"使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,如果没有主题,请直接返回“闲聊”",
Summarize:
"简要总结一下你和用户的对话,用作后续的上下文提示 prompt控制在 50 字以内",
},

View File

@ -1,3 +1,4 @@
import { SubmitKey } from "../store/app";
import type { LocaleType } from "./index";
const en: LocaleType = {
@ -19,9 +20,15 @@ const en: LocaleType = {
Stop: "Stop",
Retry: "Retry",
},
Rename: "Rename Chat",
Typing: "Typing…",
Input: (submitKey: string) =>
`Type something and press ${submitKey} to send`,
Input: (submitKey: string) => {
var inputHints = `Type something and press ${submitKey} to send`;
if (submitKey === String(SubmitKey.Enter)) {
inputHints += ", press Shift + Enter to newline";
}
return inputHints;
},
Send: "Send",
},
Export: {
@ -47,13 +54,19 @@ const en: LocaleType = {
Close: "Close",
},
Lang: {
Name: "语言",
Name: "Language",
Options: {
cn: "中文",
cn: "简体中文",
en: "English",
tw: "繁體中文",
es: "Español",
},
},
Avatar: "Avatar",
FontSize: {
Title: "Font Size",
SubTitle: "Adjust font size of chat content",
},
Update: {
Version: (x: string) => `Version: ${x}`,
IsLatest: "Latest version",
@ -65,6 +78,16 @@ const en: LocaleType = {
SendKey: "Send Key",
Theme: "Theme",
TightBorder: "Tight Border",
Prompt: {
Disable: {
Title: "Disable auto-completion",
SubTitle: "Input / to trigger auto-completion",
},
List: "Prompt List",
ListCount: (builtin: number, custom: number) =>
`${builtin} built-in, ${custom} user-defined`,
Edit: "Edit",
},
HistoryCount: {
Title: "Attached Messages Count",
SubTitle: "Number of sent messages attached per request",
@ -79,6 +102,14 @@ const en: LocaleType = {
SubTitle: "Use your key to ignore access code limit",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "Account Balance",
SubTitle(granted: any, used: any) {
return `Total $${granted}, Used $${used}`;
},
IsChecking: "Checking...",
Check: "Check Again",
},
AccessCode: {
Title: "Access Code",
SubTitle: "Access control enabled",
@ -108,7 +139,7 @@ const en: LocaleType = {
"This is a summary of the chat history between the AI and the user as a recap: " +
content,
Topic:
"Provide a brief topic of the sentence without explanation. If there is no topic, return 'Chitchat'.",
"Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, or additional text. Remove enclosing quotation marks.",
Summarize:
"Summarize our discussion briefly in 50 characters or less to use as a prompt for future context.",
},

156
app/locales/es.ts Normal file
View File

@ -0,0 +1,156 @@
import { SubmitKey } from "../store/app";
import type { LocaleType } from "./index";
const es: LocaleType = {
WIP: "En construcción...",
Error: {
Unauthorized:
"Acceso no autorizado, por favor ingrese el código de acceso en la página de configuración.",
},
ChatItem: {
ChatItemCount: (count: number) => `${count} mensajes`,
},
Chat: {
SubTitle: (count: number) => `${count} mensajes con ChatGPT`,
Actions: {
ChatList: "Ir a la lista de chats",
CompressedHistory: "Historial de memoria comprimido",
Export: "Exportar todos los mensajes como Markdown",
Copy: "Copiar",
Stop: "Detener",
Retry: "Reintentar",
},
Rename: "Renombrar chat",
Typing: "Escribiendo...",
Input: (submitKey: string) => {
var inputHints = `Escribe algo y presiona ${submitKey} para enviar`;
if (submitKey === String(SubmitKey.Enter)) {
inputHints += ", presiona Shift + Enter para nueva línea";
}
return inputHints;
},
Send: "Enviar",
},
Export: {
Title: "Todos los mensajes",
Copy: "Copiar todo",
Download: "Descargar",
},
Memory: {
Title: "Historial de memoria",
EmptyContent: "Aún no hay nada.",
Copy: "Copiar todo",
},
Home: {
NewChat: "Nuevo chat",
DeleteChat: "¿Confirmar eliminación de la conversación seleccionada?",
},
Settings: {
Title: "Configuración",
SubTitle: "Todas las configuraciones",
Actions: {
ClearAll: "Borrar todos los datos",
ResetAll: "Restablecer todas las configuraciones",
Close: "Cerrar",
},
Lang: {
Name: "Language",
Options: {
cn: "简体中文",
en: "Inglés",
tw: "繁體中文",
es: "Español",
},
},
Avatar: "Avatar",
FontSize: {
Title: "Tamaño de fuente",
SubTitle: "Ajustar el tamaño de fuente del contenido del chat",
},
Update: {
Version: (x: string) => `Versión: ${x}`,
IsLatest: "Última versión",
CheckUpdate: "Buscar actualizaciones",
IsChecking: "Buscando actualizaciones...",
FoundUpdate: (x: string) => `Se encontró una nueva versión: ${x}`,
GoToUpdate: "Actualizar",
},
SendKey: "Tecla de envío",
Theme: "Tema",
TightBorder: "Borde ajustado",
Prompt: {
Disable: {
Title: "Desactivar autocompletado",
SubTitle: "Escribe / para activar el autocompletado",
},
List: "Lista de autocompletado",
ListCount: (builtin: number, custom: number) =>
`${builtin} incorporado, ${custom} definido por el usuario`,
Edit: "Editar",
},
HistoryCount: {
Title: "Cantidad de mensajes adjuntos",
SubTitle: "Número de mensajes enviados adjuntos por solicitud",
},
CompressThreshold: {
Title: "Umbral de compresión de historial",
SubTitle:
"Se comprimirán los mensajes si la longitud de los mensajes no comprimidos supera el valor",
},
Token: {
Title: "Clave de API",
SubTitle: "Utiliza tu clave para ignorar el límite de código de acceso",
Placeholder: "Clave de la API de OpenAI",
},
Usage: {
Title: "Saldo de la cuenta",
SubTitle(granted: any, used: any) {
return `Total $${granted}, Usado $${used}`;
},
IsChecking: "Comprobando...",
Check: "Comprobar de nuevo",
},
AccessCode: {
Title: "Código de acceso",
SubTitle: "Control de acceso habilitado",
Placeholder: "Necesita código de acceso",
},
Model: "Modelo",
Temperature: {
Title: "Temperatura",
SubTitle: "Un valor mayor genera una salida más aleatoria",
},
MaxTokens: {
Title: "Máximo de tokens",
SubTitle: "Longitud máxima de tokens de entrada y tokens generados",
},
PresencePenlty: {
Title: "Penalización de presencia",
SubTitle:
"Un valor mayor aumenta la probabilidad de hablar sobre nuevos temas",
},
},
Store: {
DefaultTopic: "Nueva conversación",
BotHello: "¡Hola! ¿Cómo puedo ayudarte hoy?",
Error: "Algo salió mal, por favor intenta nuevamente más tarde.",
Prompt: {
History: (content: string) =>
"Este es un resumen del historial del chat entre la IA y el usuario como recapitulación: " +
content,
Topic:
"Por favor, genera un título de cuatro a cinco palabras que resuma nuestra conversación sin ningún inicio, puntuación, comillas, puntos, símbolos o texto adicional. Elimina las comillas que lo envuelven.",
Summarize:
"Resuma nuestra discusión brevemente en 50 caracteres o menos para usarlo como un recordatorio para futuros contextos.",
},
ConfirmClearAll:
"¿Confirmar para borrar todos los datos de chat y configuración?",
},
Copy: {
Success: "Copiado al portapapeles",
Failed:
"La copia falló, por favor concede permiso para acceder al portapapeles",
},
};
export default es;

View File

@ -1,53 +1,60 @@
import CN from './cn'
import EN from './en'
import CN from "./cn";
import EN from "./en";
import TW from "./tw";
import ES from "./es";
export type { LocaleType } from './cn'
export type { LocaleType } from "./cn";
type Lang = 'en' | 'cn'
export const AllLangs = ["en", "cn", "tw", "es"] as const;
type Lang = (typeof AllLangs)[number];
const LANG_KEY = 'lang'
const LANG_KEY = "lang";
function getItem(key: string) {
try {
return localStorage.getItem(key)
return localStorage.getItem(key);
} catch {
return null
return null;
}
}
function setItem(key: string, value: string) {
try {
localStorage.setItem(key, value)
localStorage.setItem(key, value);
} catch {}
}
function getLanguage() {
try {
return navigator.language.toLowerCase()
return navigator.language.toLowerCase();
} catch {
return 'cn'
return "cn";
}
}
export function getLang(): Lang {
const savedLang = getItem(LANG_KEY)
const savedLang = getItem(LANG_KEY);
if (['en', 'cn'].includes(savedLang ?? '')) {
return savedLang as Lang
if (AllLangs.includes((savedLang ?? "") as Lang)) {
return savedLang as Lang;
}
const lang = getLanguage()
const lang = getLanguage();
if (lang.includes('zh') || lang.includes('cn')) {
return 'cn'
if (lang.includes("zh") || lang.includes("cn")) {
return "cn";
} else if (lang.includes("tw")) {
return "tw";
} else if (lang.includes("es")) {
return "es";
} else {
return 'en'
return "en";
}
}
export function changeLang(lang: Lang) {
setItem(LANG_KEY, lang)
location.reload()
setItem(LANG_KEY, lang);
location.reload();
}
export default { en: EN, cn: CN }[getLang()]
export default { en: EN, cn: CN, tw: TW, es: ES }[getLang()];

149
app/locales/tw.ts Normal file
View File

@ -0,0 +1,149 @@
import { SubmitKey } from "../store/app";
import type { LocaleType } from "./index";
const tw: LocaleType = {
WIP: "該功能仍在開發中……",
Error: {
Unauthorized: "目前您的狀態是未授權,請前往設定頁面填寫授權碼。",
},
ChatItem: {
ChatItemCount: (count: number) => `${count} 條對話`,
},
Chat: {
SubTitle: (count: number) => `您已經與 ChatGPT 進行了 ${count} 條對話`,
Actions: {
ChatList: "查看消息列表",
CompressedHistory: "查看壓縮後的歷史 Prompt",
Export: "匯出聊天紀錄",
Copy: "複製",
Stop: "停止",
Retry: "重試",
},
Rename: "重命名對話",
Typing: "正在輸入…",
Input: (submitKey: string) => {
var inputHints = `輸入訊息後,按下 ${submitKey} 鍵即可發送`;
if (submitKey === String(SubmitKey.Enter)) {
inputHints += "Shift + Enter 鍵換行";
}
return inputHints;
},
Send: "發送",
},
Export: {
Title: "匯出聊天記錄為 Markdown",
Copy: "複製全部",
Download: "下載檔案",
},
Memory: {
Title: "上下文記憶 Prompt",
EmptyContent: "尚未記憶",
Copy: "複製全部",
},
Home: {
NewChat: "新的對話",
DeleteChat: "確定要刪除選取的對話嗎?",
},
Settings: {
Title: "設定",
SubTitle: "設定選項",
Actions: {
ClearAll: "清除所有數據",
ResetAll: "重置所有設定",
Close: "關閉",
},
Lang: {
Name: "Language",
Options: {
cn: "简体中文",
en: "English",
tw: "繁體中文",
es: "Español",
},
},
Avatar: "大頭貼",
FontSize: {
Title: "字型大小",
SubTitle: "聊天內容的字型大小",
},
Update: {
Version: (x: string) => `當前版本:${x}`,
IsLatest: "已是最新版本",
CheckUpdate: "檢查更新",
IsChecking: "正在檢查更新...",
FoundUpdate: (x: string) => `發現新版本:${x}`,
GoToUpdate: "前往更新",
},
SendKey: "發送鍵",
Theme: "主題",
TightBorder: "緊湊邊框",
Prompt: {
Disable: {
Title: "停用提示詞自動補全",
SubTitle: "在輸入框開頭輸入 / 即可觸發自動補全",
},
List: "自定義提示詞列表",
ListCount: (builtin: number, custom: number) =>
`內置 ${builtin} 條,用戶定義 ${custom}`,
Edit: "編輯",
},
HistoryCount: {
Title: "附帶歷史訊息數",
SubTitle: "每次請求附帶的歷史訊息數",
},
CompressThreshold: {
Title: "歷史訊息長度壓縮閾值",
SubTitle: "當未壓縮的歷史訊息超過該值時,將進行壓縮",
},
Token: {
Title: "API Key",
SubTitle: "使用自己的 Key 可規避受控訪問限制",
Placeholder: "OpenAI API Key",
},
Usage: {
Title: "帳戶餘額",
SubTitle(granted: any, used: any) {
return `總共 $${granted},已使用 $${used}`;
},
IsChecking: "正在檢查…",
Check: "重新檢查",
},
AccessCode: {
Title: "訪問碼",
SubTitle: "現在是受控訪問狀態",
Placeholder: "請輸入訪問碼",
},
Model: "模型 (model)",
Temperature: {
Title: "隨機性 (temperature)",
SubTitle: "值越大,回復越隨機",
},
MaxTokens: {
Title: "單次回復限制 (max_tokens)",
SubTitle: "單次交互所用的最大 Token 數",
},
PresencePenlty: {
Title: "話題新穎度 (presence_penalty)",
SubTitle: "值越大,越有可能擴展到新話題",
},
},
Store: {
DefaultTopic: "新的對話",
BotHello: "請問需要我的協助嗎?",
Error: "出錯了,請稍後再嘗試",
Prompt: {
History: (content: string) =>
"這是 AI 與用戶的歷史聊天總結,作為前情提要:" + content,
Topic: "直接返回這句話的簡要主題,無須解釋,若無主題,請直接返回「閒聊」",
Summarize:
"簡要總結一下你和用戶的對話,作為後續的上下文提示 prompt且字數控制在 50 字以內",
},
ConfirmClearAll: "確認清除所有對話、設定數據?",
},
Copy: {
Success: "已複製到剪貼簿中",
Failed: "複製失敗,請賦予剪貼簿權限",
},
};
export default tw;

View File

@ -1,7 +1,11 @@
import type { ChatRequest, ChatReponse } from "./api/chat/typing";
import type { ChatRequest, ChatReponse } from "./api/openai/typing";
import { filterConfig, Message, ModelConfig, useAccessStore } from "./store";
import Locale from "./locales";
if (!Array.prototype.at) {
require("array.prototype.at/auto");
}
const TIME_OUT_MS = 30000;
const makeRequestParam = (
@ -9,7 +13,7 @@ const makeRequestParam = (
options?: {
filterBot?: boolean;
stream?: boolean;
}
},
): ChatRequest => {
let sendMessages = messages.map((v) => ({
role: v.role,
@ -42,19 +46,47 @@ function getHeaders() {
return headers;
}
export function requestOpenaiClient(path: string) {
return (body: any, method = "POST") =>
fetch("/api/openai", {
method,
headers: {
"Content-Type": "application/json",
path,
...getHeaders(),
},
body: body && JSON.stringify(body),
});
}
export async function requestChat(messages: Message[]) {
const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
const res = await fetch("/api/chat", {
method: "POST",
headers: {
"Content-Type": "application/json",
...getHeaders(),
},
body: JSON.stringify(req),
});
const res = await requestOpenaiClient("v1/chat/completions")(req);
return (await res.json()) as ChatReponse;
try {
const response = (await res.json()) as ChatReponse;
return response;
} catch (error) {
console.error("[Request Chat] ", error, res.body);
}
}
export async function requestUsage() {
const res = await requestOpenaiClient(
"dashboard/billing/credit_grants?_vercel_no_cache=1",
)(null, "GET");
try {
const response = (await res.json()) as {
total_available: number;
total_granted: number;
total_used: number;
};
return response;
} catch (error) {
console.error("[Request usage] ", error, res.body);
}
}
export async function requestChatStream(
@ -65,7 +97,7 @@ export async function requestChatStream(
onMessage: (message: string, done: boolean) => void;
onError: (error: Error) => void;
onController?: (controller: AbortController) => void;
}
},
) {
const req = makeRequestParam(messages, {
stream: true,
@ -87,6 +119,7 @@ export async function requestChatStream(
method: "POST",
headers: {
"Content-Type": "application/json",
path: "v1/chat/completions",
...getHeaders(),
},
body: JSON.stringify(req),
@ -129,7 +162,7 @@ export async function requestChatStream(
responseText = Locale.Error.Unauthorized;
finish();
} else {
console.error("Stream Error");
console.error("Stream Error", res.body);
options?.onError(new Error("Stream Error"));
}
} catch (err) {
@ -149,7 +182,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) {
const res = await requestChat(messages);
return res.choices.at(0)?.message?.content ?? "";
return res?.choices?.at(0)?.message?.content ?? "";
}
// To store message streaming controller
@ -159,7 +192,7 @@ export const ControllerPool = {
addController(
sessionIndex: number,
messageIndex: number,
controller: AbortController
controller: AbortController,
) {
const key = this.key(sessionIndex, messageIndex);
this.controllers[key] = controller;

View File

@ -11,6 +11,10 @@ import { trimTopic } from "../utils";
import Locale from "../locales";
if (!Array.prototype.at) {
require("array.prototype.at/auto");
}
export type Message = ChatCompletionResponseMessage & {
date: string;
streaming?: boolean;
@ -21,6 +25,7 @@ export enum SubmitKey {
CtrlEnter = "Ctrl + Enter",
ShiftEnter = "Shift + Enter",
AltEnter = "Alt + Enter",
MetaEnter = "Meta + Enter",
}
export enum Theme {
@ -30,15 +35,17 @@ export enum Theme {
}
export interface ChatConfig {
maxToken?: number;
historyMessageCount: number; // -1 means all
compressMessageLengthThreshold: number;
sendBotMessages: boolean; // send bot's message or not
submitKey: SubmitKey;
avatar: string;
fontSize: number;
theme: Theme;
tightBorder: boolean;
disablePromptHint: boolean;
modelConfig: {
model: string;
temperature: number;
@ -86,7 +93,9 @@ export function isValidNumber(x: number, min: number, max: number) {
return typeof x === "number" && x <= max && x >= min;
}
export function filterConfig(config: ModelConfig): Partial<ModelConfig> {
export function filterConfig(oldConfig: ModelConfig): Partial<ModelConfig> {
const config = Object.assign({}, oldConfig);
const validator: {
[k in keyof ModelConfig]: (x: ModelConfig[keyof ModelConfig]) => boolean;
} = {
@ -100,7 +109,7 @@ export function filterConfig(config: ModelConfig): Partial<ModelConfig> {
return isValidNumber(x as number, -2, 2);
},
temperature(x) {
return isValidNumber(x as number, 0, 1);
return isValidNumber(x as number, 0, 2);
},
};
@ -120,9 +129,12 @@ const DEFAULT_CONFIG: ChatConfig = {
sendBotMessages: true as boolean,
submitKey: SubmitKey.CtrlEnter as SubmitKey,
avatar: "1f603",
fontSize: 14,
theme: Theme.Auto as Theme,
tightBorder: false,
disablePromptHint: false,
modelConfig: {
model: "gpt-3.5-turbo",
temperature: 1,
@ -190,7 +202,7 @@ interface ChatStore {
updateMessage: (
sessionIndex: number,
messageIndex: number,
updater: (message?: Message) => void
updater: (message?: Message) => void,
) => void;
getMessagesWithMemory: () => Message[];
getMemoryPrompt: () => Message;
@ -201,6 +213,10 @@ interface ChatStore {
clearAllData: () => void;
}
function countMessages(msgs: Message[]) {
return msgs.reduce((pre, cur) => pre + cur.content.length, 0);
}
const LOCAL_KEY = "chat-next-web-store";
export const useChatStore = create<ChatStore>()(
@ -345,7 +361,7 @@ export const useChatStore = create<ChatStore>()(
ControllerPool.addController(
sessionIndex,
messageIndex,
controller
controller,
);
},
filterBot: !get().config.sendBotMessages,
@ -368,7 +384,7 @@ export const useChatStore = create<ChatStore>()(
const config = get().config;
const n = session.messages.length;
const recentMessages = session.messages.slice(
n - config.historyMessageCount
n - config.historyMessageCount,
);
const memoryPrompt = get().getMemoryPrompt();
@ -383,7 +399,7 @@ export const useChatStore = create<ChatStore>()(
updateMessage(
sessionIndex: number,
messageIndex: number,
updater: (message?: Message) => void
updater: (message?: Message) => void,
) {
const sessions = get().sessions;
const session = sessions.at(sessionIndex);
@ -395,29 +411,30 @@ export const useChatStore = create<ChatStore>()(
summarizeSession() {
const session = get().currentSession();
if (session.topic === DEFAULT_TOPIC && session.messages.length >= 3) {
// should summarize topic
// should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50;
if (
session.topic === DEFAULT_TOPIC &&
countMessages(session.messages) >= SUMMARIZE_MIN_LEN
) {
requestWithPrompt(session.messages, Locale.Store.Prompt.Topic).then(
(res) => {
get().updateCurrentSession(
(session) => (session.topic = trimTopic(res))
(session) => (session.topic = trimTopic(res)),
);
}
},
);
}
const config = get().config;
let toBeSummarizedMsgs = session.messages.slice(
session.lastSummarizeIndex
);
const historyMsgLength = toBeSummarizedMsgs.reduce(
(pre, cur) => pre + cur.content.length,
0
session.lastSummarizeIndex,
);
const historyMsgLength = countMessages(toBeSummarizedMsgs);
if (historyMsgLength > 4000) {
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
-config.historyMessageCount
-config.historyMessageCount,
);
}
@ -430,7 +447,7 @@ export const useChatStore = create<ChatStore>()(
"[Chat History] ",
toBeSummarizedMsgs,
historyMsgLength,
config.compressMessageLengthThreshold
config.compressMessageLengthThreshold,
);
if (historyMsgLength > config.compressMessageLengthThreshold) {
@ -452,7 +469,7 @@ export const useChatStore = create<ChatStore>()(
onError(error) {
console.error("[Summarize] ", error);
},
}
},
);
}
},
@ -481,6 +498,6 @@ export const useChatStore = create<ChatStore>()(
{
name: LOCAL_KEY,
version: 1,
}
)
},
),
);

117
app/store/prompt.ts Normal file
View File

@ -0,0 +1,117 @@
import { create } from "zustand";
import { persist } from "zustand/middleware";
import Fuse from "fuse.js";
export interface Prompt {
id?: number;
title: string;
content: string;
}
export interface PromptStore {
latestId: number;
prompts: Map<number, Prompt>;
add: (prompt: Prompt) => number;
remove: (id: number) => void;
search: (text: string) => Prompt[];
}
export const PROMPT_KEY = "prompt-store";
export const SearchService = {
ready: false,
engine: new Fuse<Prompt>([], { keys: ["title"] }),
count: {
builtin: 0,
},
init(prompts: Prompt[]) {
if (this.ready) {
return;
}
this.engine.setCollection(prompts);
this.ready = true;
},
remove(id: number) {
this.engine.remove((doc) => doc.id === id);
},
add(prompt: Prompt) {
this.engine.add(prompt);
},
search(text: string) {
const results = this.engine.search(text);
return results.map((v) => v.item);
},
};
export const usePromptStore = create<PromptStore>()(
persist(
(set, get) => ({
latestId: 0,
prompts: new Map(),
add(prompt) {
const prompts = get().prompts;
prompt.id = get().latestId + 1;
prompts.set(prompt.id, prompt);
set(() => ({
latestId: prompt.id!,
prompts: prompts,
}));
return prompt.id!;
},
remove(id) {
const prompts = get().prompts;
prompts.delete(id);
SearchService.remove(id);
set(() => ({
prompts,
}));
},
search(text) {
return SearchService.search(text) as Prompt[];
},
}),
{
name: PROMPT_KEY,
version: 1,
onRehydrateStorage(state) {
const PROMPT_URL = "./prompts.json";
type PromptList = Array<[string, string]>;
fetch(PROMPT_URL)
.then((res) => res.json())
.then((res) => {
const builtinPrompts = [res.en, res.cn]
.map((promptList: PromptList) => {
return promptList.map(
([title, content]) =>
({
title,
content,
} as Prompt),
);
})
.concat([...(state?.prompts?.values() ?? [])]);
const allPromptsForSearch = builtinPrompts.reduce(
(pre, cur) => pre.concat(cur),
[],
);
SearchService.count.builtin = res.en.length + res.cn.length;
SearchService.init(allPromptsForSearch);
});
},
},
),
);

View File

@ -1,7 +1,7 @@
import { create } from "zustand";
import { persist } from "zustand/middleware";
import { FETCH_COMMIT_URL } from "../constant";
import { getCurrentCommitId } from "../utils";
import { FETCH_COMMIT_URL, FETCH_TAG_URL } from "../constant";
import { getCurrentVersion } from "../utils";
export interface UpdateStore {
lastUpdate: number;
@ -19,16 +19,17 @@ export const useUpdateStore = create<UpdateStore>()(
remoteId: "",
async getLatestCommitId(force = false) {
const overOneHour = Date.now() - get().lastUpdate > 3600 * 1000;
const shouldFetch = force || overOneHour;
const overTenMins = Date.now() - get().lastUpdate > 10 * 60 * 1000;
const shouldFetch = force || overTenMins;
if (!shouldFetch) {
return getCurrentCommitId();
return getCurrentVersion();
}
try {
// const data = await (await fetch(FETCH_TAG_URL)).json();
// const remoteId = data[0].name as string;
const data = await (await fetch(FETCH_COMMIT_URL)).json();
const sha = data[0].sha as string;
const remoteId = sha.substring(0, 7);
const remoteId = (data[0].sha as string).substring(0, 7);
set(() => ({
lastUpdate: Date.now(),
remoteId,
@ -37,13 +38,13 @@ export const useUpdateStore = create<UpdateStore>()(
return remoteId;
} catch (error) {
console.error("[Fetch Upstream Commit Id]", error);
return getCurrentCommitId();
return getCurrentVersion();
}
},
}),
{
name: UPDATE_KEY,
version: 1,
}
)
},
),
);

View File

@ -53,12 +53,13 @@
--sidebar-width: 300px;
--window-content-width: calc(100% - var(--sidebar-width));
--message-max-width: 80%;
--full-height: 100%;
}
@media only screen and (max-width: 600px) {
:root {
--window-width: 100vw;
--window-height: 100vh;
--window-height: var(--full-height);
--sidebar-width: 100vw;
--window-content-width: var(--window-width);
--message-max-width: 100%;
@ -74,13 +75,16 @@
@include dark;
}
}
html {
height: var(--full-height);
}
body {
background-color: var(--gray);
color: var(--black);
margin: 0;
padding: 0;
height: 100vh;
height: var(--full-height);
width: 100vw;
display: flex;
justify-content: center;
@ -119,6 +123,11 @@ select {
cursor: pointer;
background-color: var(--white);
color: var(--black);
text-align: center;
}
input {
text-align: center;
}
input[type="checkbox"] {
@ -196,7 +205,7 @@ div.math {
position: fixed;
top: 0;
left: 0;
height: 100vh;
height: var(--full-height);
width: 100vw;
background-color: rgba($color: #000000, $alpha: 0.5);
display: flex;

View File

@ -839,21 +839,20 @@
.markdown-body .highlight pre,
.markdown-body pre {
padding: 16px;
padding: 16px 16px 8px 16px;
overflow: auto;
font-size: 85%;
line-height: 1.45;
background-color: var(--color-canvas-subtle);
border-radius: 6px;
}
.markdown-body pre code,
.markdown-body pre tt {
display: inline;
max-width: auto;
display: inline-block;
max-width: 100%;
padding: 0;
margin: 0;
overflow: visible;
overflow-x: scroll;
line-height: inherit;
word-wrap: normal;
background-color: transparent;

View File

@ -120,33 +120,3 @@
cursor: help;
}
}
// @mixin light {
// .markdown-body pre[class*="language-"] {
// filter: invert(1) hue-rotate(50deg) brightness(1.3);
// }
// }
// @mixin dark {
// .markdown-body pre[class*="language-"] {
// filter: none;
// }
// }
// :root {
// @include light();
// }
// .light {
// @include light();
// }
// .dark {
// @include dark();
// }
// @media (prefers-color-scheme: dark) {
// :root {
// @include dark();
// }
// }

View File

@ -2,15 +2,7 @@ import { showToast } from "./components/ui-lib";
import Locale from "./locales";
export function trimTopic(topic: string) {
const s = topic.split("");
let lastChar = s.at(-1); // 获取 s 的最后一个字符
let pattern = /[,。!?、]/; // 定义匹配中文标点符号的正则表达式
while (lastChar && pattern.test(lastChar!)) {
s.pop();
lastChar = s.at(-1);
}
return s.join("");
return topic.replace(/[,。!?、,.!?]*$/, "");
}
export function copyToClipboard(text: string) {
@ -28,7 +20,7 @@ export function downloadAs(text: string, filename: string) {
const element = document.createElement("a");
element.setAttribute(
"href",
"data:text/plain;charset=utf-8," + encodeURIComponent(text)
"data:text/plain;charset=utf-8," + encodeURIComponent(text),
);
element.setAttribute("download", filename);
@ -45,6 +37,10 @@ export function isIOS() {
return /iphone|ipad|ipod/.test(userAgent);
}
export function isMobileScreen() {
return window.innerWidth <= 600;
}
export function selectOrCopy(el: HTMLElement, content: string) {
const currentSelection = window.getSelection();
@ -61,7 +57,7 @@ export function queryMeta(key: string, defaultValue?: string): string {
let ret: string;
if (document) {
const meta = document.head.querySelector(
`meta[name='${key}']`
`meta[name='${key}']`,
) as HTMLMetaElement;
ret = meta?.content ?? "";
} else {
@ -72,7 +68,7 @@ export function queryMeta(key: string, defaultValue?: string): string {
}
let currentId: string;
export function getCurrentCommitId() {
export function getCurrentVersion() {
if (currentId) {
return currentId;
}

View File

@ -3,10 +3,10 @@ import { ACCESS_CODES } from "./app/api/access";
import md5 from "spark-md5";
export const config = {
matcher: ["/api/chat", "/api/chat-stream"],
matcher: ["/api/openai", "/api/chat-stream"],
};
export function middleware(req: NextRequest, res: NextResponse) {
export function middleware(req: NextRequest) {
const accessCode = req.headers.get("access-code");
const token = req.headers.get("token");
const hashedCode = md5.hash(accessCode ?? "").trim();
@ -18,14 +18,40 @@ export function middleware(req: NextRequest, res: NextResponse) {
if (ACCESS_CODES.size > 0 && !ACCESS_CODES.has(hashedCode) && !token) {
return NextResponse.json(
{
error: true,
needAccessCode: true,
hint: "Please go settings page and fill your access code.",
msg: "Please go settings page and fill your access code.",
},
{
status: 401,
}
},
);
}
return NextResponse.next();
// inject api key
if (!token) {
const apiKey = process.env.OPENAI_API_KEY;
if (apiKey) {
console.log("[Auth] set system token");
req.headers.set("token", apiKey);
} else {
return NextResponse.json(
{
error: true,
msg: "Empty Api Key",
},
{
status: 401,
},
);
}
} else {
console.log("[Auth] set user token");
}
return NextResponse.next({
request: {
headers: req.headers,
},
});
}

View File

@ -4,36 +4,50 @@
"private": false,
"license": "Anti 996",
"scripts": {
"dev": "next dev",
"build": "next build",
"dev": "yarn fetch && next dev",
"build": "yarn fetch && next build",
"start": "next start",
"lint": "next lint"
"lint": "next lint",
"fetch": "node ./scripts/fetch-prompts.mjs",
"prepare": "husky install"
},
"dependencies": {
"@svgr/webpack": "^6.5.1",
"@types/node": "^18.14.6",
"@types/react": "^18.0.28",
"@types/react-dom": "^18.0.11",
"@types/react-katex": "^3.0.0",
"@types/spark-md5": "^3.0.2",
"@vercel/analytics": "^0.1.11",
"cross-env": "^7.0.3",
"emoji-picker-react": "^4.4.7",
"eslint": "8.35.0",
"eslint-config-next": "13.2.3",
"eventsource-parser": "^0.1.0",
"fuse.js": "^6.6.2",
"next": "^13.2.3",
"node-fetch": "^3.3.1",
"openai": "^3.2.1",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-markdown": "^8.0.5",
"rehype-katex": "^6.0.2",
"rehype-prism-plus": "^1.5.1",
"remark-breaks": "^3.0.2",
"remark-gfm": "^3.0.1",
"remark-math": "^5.1.1",
"sass": "^1.59.2",
"spark-md5": "^3.0.2",
"typescript": "4.9.5",
"use-debounce": "^9.0.3",
"zustand": "^4.3.6"
},
"devDependencies": {
"@types/node": "^18.14.6",
"@types/react": "^18.0.28",
"@types/react-dom": "^18.0.11",
"@types/react-katex": "^3.0.0",
"@types/spark-md5": "^3.0.2",
"array.prototype.at": "^1.1.1",
"cross-env": "^7.0.3",
"eslint": "^8.36.0",
"eslint-config-next": "13.2.3",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-prettier": "^4.2.1",
"husky": "^8.0.0",
"lint-staged": "^13.2.0",
"prettier": "^2.8.7",
"typescript": "4.9.5"
}
}

4
public/robots.txt Normal file
View File

@ -0,0 +1,4 @@
User-agent: *
Disallow: /
User-agent: vitals.vercel-insights.com
Allow: /

View File

@ -1,24 +1,13 @@
const CHATGPT_NEXT_WEB_CACHE = "chatgpt-next-web-cache";
self.addEventListener('activate', function (event) {
console.log('ServiceWorker activated.');
self.addEventListener("activate", function (event) {
console.log("ServiceWorker activated.");
});
self.addEventListener('install', function (event) {
self.addEventListener("install", function (event) {
event.waitUntil(
caches.open(CHATGPT_NEXT_WEB_CACHE)
.then(function (cache) {
return cache.addAll([
]);
})
);
});
self.addEventListener('fetch', function (event) {
event.respondWith(
caches.match(event.request)
.then(function (response) {
return response || fetch(event.request);
})
caches.open(CHATGPT_NEXT_WEB_CACHE).then(function (cache) {
return cache.addAll([]);
}),
);
});

53
scripts/fetch-prompts.mjs Normal file
View File

@ -0,0 +1,53 @@
import fetch from "node-fetch";
import fs from "fs/promises";
const RAW_CN_URL =
"https://raw.githubusercontent.com/PlexPt/awesome-chatgpt-prompts-zh/main/prompts-zh.json";
const CN_URL =
"https://cdn.jsdelivr.net/gh/PlexPt/awesome-chatgpt-prompts-zh@main/prompts-zh.json";
const RAW_EN_URL =
"https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv";
const EN_URL =
"https://cdn.jsdelivr.net/gh/f/awesome-chatgpt-prompts@main/prompts.csv";
const FILE = "./public/prompts.json";
async function fetchCN() {
console.log("[Fetch] fetching cn prompts...");
try {
const raw = await (await fetch(CN_URL)).json();
return raw.map((v) => [v.act, v.prompt]);
} catch (error) {
console.error("[Fetch] failed to fetch cn prompts", error);
return [];
}
}
async function fetchEN() {
console.log("[Fetch] fetching en prompts...");
try {
const raw = await (await fetch(EN_URL)).text();
return raw
.split("\n")
.slice(1)
.map((v) => v.split('","').map((v) => v.replace('"', "")));
} catch (error) {
console.error("[Fetch] failed to fetch cn prompts", error);
return [];
}
}
async function main() {
Promise.all([fetchCN(), fetchEN()])
.then(([cn, en]) => {
fs.writeFile(FILE, JSON.stringify({ cn, en }));
})
.catch((e) => {
console.error("[Fetch] failed to fetch prompts");
fs.writeFile(FILE, JSON.stringify({ cn: [], en: [] }));
})
.finally(() => {
console.log("[Fetch] saved to " + FILE);
});
}
main();

64
scripts/setup.sh Normal file
View File

@ -0,0 +1,64 @@
#!/bin/bash
# Check if running on a supported system
case "$(uname -s)" in
Linux)
if [[ -f "/etc/lsb-release" ]]; then
. /etc/lsb-release
if [[ "$DISTRIB_ID" != "Ubuntu" ]]; then
echo "This script only works on Ubuntu, not $DISTRIB_ID."
exit 1
fi
else
if [[ ! "$(cat /etc/*-release | grep '^ID=')" =~ ^(ID=\"ubuntu\")|(ID=\"centos\")|(ID=\"arch\")$ ]]; then
echo "Unsupported Linux distribution."
exit 1
fi
fi
;;
Darwin)
echo "Running on MacOS."
;;
*)
echo "Unsupported operating system."
exit 1
;;
esac
# Check if needed dependencies are installed and install if necessary
if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v yarn >/dev/null; then
case "$(uname -s)" in
Linux)
if [[ "$(cat /etc/*-release | grep '^ID=')" = "ID=\"ubuntu\"" ]]; then
sudo apt-get update
sudo apt-get -y install nodejs git yarn
elif [[ "$(cat /etc/*-release | grep '^ID=')" = "ID=\"centos\"" ]]; then
sudo yum -y install epel-release
sudo yum -y install nodejs git yarn
elif [[ "$(cat /etc/*-release | grep '^ID=')" = "ID=\"arch\"" ]]; then
sudo pacman -Syu -y
sudo pacman -S -y nodejs git yarn
else
echo "Unsupported Linux distribution"
exit 1
fi
;;
Darwin)
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
brew install node git yarn
;;
esac
fi
# Clone the repository and install dependencies
git clone https://github.com/Yidadaa/ChatGPT-Next-Web
cd ChatGPT-Next-Web
yarn install
# Prompt user for environment variables
read -p "Enter OPENAI_API_KEY: " OPENAI_API_KEY
read -p "Enter CODE: " CODE
read -p "Enter PORT: " PORT
# Build and run the project using the environment variables
OPENAI_API_KEY=$OPENAI_API_KEY CODE=$CODE PORT=$PORT yarn build && OPENAI_API_KEY=$OPENAI_API_KEY CODE=$CODE PORT=$PORT yarn start

1951
yarn.lock

File diff suppressed because it is too large Load Diff