diff --git a/.eslintrc.json b/.eslintrc.json index bffb357a..d229e86f 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,3 +1,4 @@ { - "extends": "next/core-web-vitals" + "extends": "next/core-web-vitals", + "plugins": ["prettier"] } diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 5b2520eb..a7a29644 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,6 +1,7 @@ name: Publish Docker image on: + workflow_dispatch: release: types: [published] @@ -9,25 +10,43 @@ jobs: name: Push Docker image to Docker Hub runs-on: ubuntu-latest steps: - - name: Check out the repo + - + name: Check out the repo uses: actions/checkout@v3 - - - name: Log in to Docker Hub - uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 + - + name: Log in to Docker Hub + uses: docker/login-action@v2 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Extract metadata (tags, labels) for Docker + - + name: Extract metadata (tags, labels) for Docker id: meta - uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + uses: docker/metadata-action@v4 with: images: yidadaa/chatgpt-next-web + tags: | + type=raw,value=latest + type=ref,event=tag - - name: Build and push Docker image - uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc + - + name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - + name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - + name: Build and push Docker image + uses: docker/build-push-action@v4 with: context: . + platforms: linux/amd64 push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml new file mode 100644 index 00000000..1c9dc413 --- /dev/null +++ b/.github/workflows/sync.yml @@ -0,0 +1,29 @@ +name: Upstream Sync + +on: + schedule: + - cron: '0 */12 * * *' # every 12 hours + workflow_dispatch: # on button click + +jobs: + sync_latest_from_upstream: + name: Sync latest commits from upstream repo + runs-on: ubuntu-latest + + steps: + # Step 1: run a standard checkout action, provided by github + - name: Checkout target repo + uses: actions/checkout@v3 + + # Step 2: run the sync action + - name: Sync upstream changes + id: sync + uses: aormsby/Fork-Sync-With-Upstream-action@v3.4 + with: + upstream_sync_repo: Yidadaa/ChatGPT-Next-Web + upstream_sync_branch: main + target_sync_branch: main + target_repo_token: ${{ secrets.GITHUB_TOKEN }} # automatically generated, no need to set + + # Set test_mode true to run tests instead of the true action!! + test_mode: false diff --git a/.gitignore b/.gitignore index 354be805..0a3e52af 100644 --- a/.gitignore +++ b/.gitignore @@ -34,4 +34,6 @@ yarn-error.log* # typescript *.tsbuildinfo next-env.d.ts -dev \ No newline at end of file +dev + +public/prompts.json \ No newline at end of file diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 00000000..0312b760 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +npx lint-staged \ No newline at end of file diff --git a/.lintstagedrc.json b/.lintstagedrc.json new file mode 100644 index 00000000..58784bad --- /dev/null +++ b/.lintstagedrc.json @@ -0,0 +1,6 @@ +{ + "./app/**/*.{js,ts,jsx,tsx,json,html,css,md}": [ + "eslint --fix", + "prettier --write" + ] +} diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 00000000..95cc75ff --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,10 @@ +module.exports = { + printWidth: 80, + tabWidth: 2, + useTabs: false, + semi: true, + singleQuote: false, + trailingComma: 'all', + bracketSpacing: true, + arrowParens: 'always', +}; diff --git a/Dockerfile b/Dockerfile index 314d6c36..6f7547b2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,13 +6,9 @@ RUN apk add --no-cache libc6-compat WORKDIR /app -COPY package.json yarn.lock* package-lock.json* ./ +COPY package.json yarn.lock ./ -RUN \ - if [ -f yarn.lock ]; then yarn --frozen-lockfile; \ - elif [ -f package-lock.json ]; then npm ci; \ - else echo "Lockfile not found." && exit 1; \ - fi +RUN yarn install FROM base AS builder diff --git a/README.md b/README.md index ca08c2c6..d026e61a 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@ One-Click to deploy your own ChatGPT web UI. -[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [微信群](https://user-images.githubusercontent.com/16968934/227772522-b3ba3713-9206-4c8d-a81f-22300b7c313a.jpg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) +[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt) -[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) +[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) [![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web) @@ -22,6 +22,7 @@ One-Click to deploy your own ChatGPT web UI. - 在 1 分钟内使用 Vercel **免费一键部署** - 精心设计的 UI,响应式设计,支持深色模式 - 极快的首屏加载速度(~85kb) +- 海量的内置 prompt 列表,来自[中文](https://github.com/PlexPt/awesome-chatgpt-prompts-zh)和[英文](https://github.com/f/awesome-chatgpt-prompts) - 自动压缩上下文聊天记录,在节省 Token 的同时支持超长对话 - 一键导出聊天记录,完整的 Markdown 支持 - 拥有自己的域名?好上加好,绑定后即可在任何地方**无障碍**快速访问 @@ -31,14 +32,25 @@ One-Click to deploy your own ChatGPT web UI. - **Deploy for free with one-click** on Vercel in under 1 minute - Responsive design, and dark mode - Fast first screen loading speed (~85kb) +- Awesome prompts powered by [awesome-chatgpt-prompts-zh](https://github.com/PlexPt/awesome-chatgpt-prompts-zh) and [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) - Automatically compresses chat history to support long conversations while also saving your tokens - One-click export all chat history with full Markdown support -## 使用 +## 开发计划 Roadmap +- System Prompt: pin a user defined prompt as system prompt 为每个对话设置系统 Prompt [#138](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/138) +- User Prompt: user can edit and save custom prompts to prompt list 允许用户自行编辑内置 Prompt 列表 +- Self-host Model: support llama, alpaca, ChatGLM, BELLE etc. 支持自部署的大语言模型 +- Plugins: support network search, caculator, any other apis etc. 插件机制,支持联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) + +### 不会开发的功能 Not in Plan +- User login, accounts, cloud sync 用户登陆、账号管理、消息云同步 +- UI text customize 界面文字自定义 + +## 开始使用 1. 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys); 2. 点击右侧按钮开始部署: - [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登陆即可,记得在环境变量页填入 API Key; + [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登陆即可,记得在环境变量页填入 API Key; 3. 部署完毕后,即可开始使用; 4. (可选)[绑定自定义域名](https://vercel.com/docs/concepts/projects/domains/add-a-domain):Vercel 分配的域名 DNS 在某些区域被污染了,绑定自定义域名即可直连。 @@ -46,7 +58,7 @@ One-Click to deploy your own ChatGPT web UI. 1. Get [OpenAI API Key](https://platform.openai.com/account/api-keys); 2. Click - [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web); + [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web); 3. Enjoy :) ## 保持更新 Keep Updated @@ -76,9 +88,9 @@ This project will be continuously maintained. If you want to keep the code repos You can star or watch this project or follow author to get release notifictions in time. -## 访问控制 Access Control +## 配置密码 Password -本项目提供有限的权限控制功能,请在环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义控制码: +本项目提供有限的权限控制功能,请在 Vercel 项目控制面板的环境变量页增加名为 `CODE` 的环境变量,值为用英文逗号分隔的自定义密码: ``` code1,code2,code3 @@ -86,7 +98,7 @@ code1,code2,code3 增加或修改该环境变量后,请**重新部署**项目使改动生效。 -This project provides limited access control. Please add an environment variable named `CODE` on the environment variables page. The value should be a custom control code separated by comma like this: +This project provides limited access control. Please add an environment variable named `CODE` on the vercel environment variables page. The value should be passwords separated by comma like this: ``` code1,code2,code3 @@ -94,6 +106,38 @@ code1,code2,code3 After adding or modifying this environment variable, please redeploy the project for the changes to take effect. +## 环境变量 Environment Variables + +### `OPENAI_API_KEY` (required) + +OpanAI 密钥。 + +Your openai api key. + +### `CODE` (optional) + +访问密码,可选,可以使用逗号隔开多个密码。 + +Access passsword, separated by comma. + +### `BASE_URL` (optional) + +> Default: `api.openai.com` + +OpenAI 接口代理 URL。 + +Override openai api request base url. + +### `PROTOCOL` (optional) + +> Default: `https` + +> Values: `http` | `https` + +OpenAI 接口协议。 + +Override openai api request protocol. + ## 开发 Development 点击下方按钮,开始二次开发: @@ -117,16 +161,8 @@ OPENAI_API_KEY= ### 本地部署 Local Deployment -请直接询问 ChatGPT,使用下列 Prompt: - -``` -如何使用 pm2 和 yarn 部署 nextjs 项目到 ubuntu 服务器上,项目编译命令为 yarn build,启动命令为 yarn start,启动时需要设置环境变量为 OPENAI_API_KEY,端口为 3000,使用 ngnix 做反向代理 -``` - -Please ask ChatGPT with prompt: - -``` -how to deploy nextjs project with pm2 and yarn on my ubuntu server, the build command is `yarn build`, the start command is `yarn start`, the project must start with env var named `OPENAI_API_KEY`, the port is 3000, use ngnix +```shell +bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh) ``` ### 容器部署 Docker Deployment @@ -143,15 +179,12 @@ docker run -d -p 3000:3000 -e OPENAI_API_KEY="" -e CODE="" yidadaa/chatgpt-next- ![更多展示 More](./static/more.png) -## 说明 Attention -本项目的演示地址所用的 OpenAI 账户的免费额度将于 2023-04-01 过期,届时将无法通过演示地址在线体验。 - -如果你想贡献出自己的 API Key,可以通过作者主页的邮箱发送给作者,并标注过期时间。 - -The free trial of the OpenAI account used by the demo will expire on April 1, 2023, and the demo will not be available at that time. - -If you would like to contribute your API key, you can email it to the author and indicate the expiration date of the API key. +## 捐赠 Donate USDT +> BNB Smart Chain (BEP 20) +``` +0x67cD02c7EB62641De576a1fA3EdB32eA0c3ffD89 +``` ## 鸣谢 Special Thanks @@ -159,13 +192,13 @@ If you would like to contribute your API key, you can email it to the author and [@mushan0x0](https://github.com/mushan0x0) [@ClarenceDan](https://github.com/ClarenceDan) +[@zhangjia](https://github.com/zhangjia) +[@hoochanlon](https://github.com/hoochanlon) ### 贡献者 Contributor -[@AprilNEA](https://github.com/AprilNEA) -[@iSource](https://github.com/iSource) -[@iFwu](https://github.com/iFwu) +[Contributors](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors) ## LICENSE -- [Anti 996 License](https://github.com/kattgu7/Anti-996-License/blob/master/LICENSE_CN_EN) +[Anti 996 License](https://github.com/kattgu7/Anti-996-License/blob/master/LICENSE_CN_EN) diff --git a/app/api/access.ts b/app/api/access.ts index 13ada214..d3e4c9cf 100644 --- a/app/api/access.ts +++ b/app/api/access.ts @@ -14,3 +14,4 @@ export function getAccessCodes(): Set { } export const ACCESS_CODES = getAccessCodes(); +export const IS_IN_DOCKER = process.env.DOCKER; diff --git a/app/api/chat-stream/route.ts b/app/api/chat-stream/route.ts index ad40c6be..e7bdfc5f 100644 --- a/app/api/chat-stream/route.ts +++ b/app/api/chat-stream/route.ts @@ -1,26 +1,12 @@ import { createParser } from "eventsource-parser"; import { NextRequest } from "next/server"; +import { requestOpenai } from "../common"; async function createStream(req: NextRequest) { const encoder = new TextEncoder(); const decoder = new TextDecoder(); - let apiKey = process.env.OPENAI_API_KEY; - - const userApiKey = req.headers.get("token"); - if (userApiKey) { - apiKey = userApiKey; - console.log("[Stream] using user api key"); - } - - const res = await fetch("https://api.openai.com/v1/chat/completions", { - headers: { - "Content-Type": "application/json", - Authorization: `Bearer ${apiKey}`, - }, - method: "POST", - body: req.body, - }); + const res = await requestOpenai(req); const stream = new ReadableStream({ async start(controller) { diff --git a/app/api/chat/.gitignore b/app/api/chat/.gitignore deleted file mode 100644 index 1b8afd08..00000000 --- a/app/api/chat/.gitignore +++ /dev/null @@ -1 +0,0 @@ -config.ts \ No newline at end of file diff --git a/app/api/chat/route.ts b/app/api/chat/route.ts deleted file mode 100644 index 18c7db14..00000000 --- a/app/api/chat/route.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { OpenAIApi, Configuration } from "openai"; -import { ChatRequest } from "./typing"; - -export async function POST(req: Request) { - try { - let apiKey = process.env.OPENAI_API_KEY; - - const userApiKey = req.headers.get("token"); - if (userApiKey) { - apiKey = userApiKey; - } - - const openai = new OpenAIApi( - new Configuration({ - apiKey, - }) - ); - - const requestBody = (await req.json()) as ChatRequest; - const completion = await openai!.createChatCompletion({ - ...requestBody, - }); - - return new Response(JSON.stringify(completion.data)); - } catch (e) { - console.error("[Chat] ", e); - return new Response(JSON.stringify(e)); - } -} diff --git a/app/api/common.ts b/app/api/common.ts new file mode 100644 index 00000000..842eeaca --- /dev/null +++ b/app/api/common.ts @@ -0,0 +1,22 @@ +import { NextRequest } from "next/server"; + +const OPENAI_URL = "api.openai.com"; +const DEFAULT_PROTOCOL = "https"; +const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL; +const BASE_URL = process.env.BASE_URL ?? OPENAI_URL; + +export async function requestOpenai(req: NextRequest) { + const apiKey = req.headers.get("token"); + const openaiPath = req.headers.get("path"); + + console.log("[Proxy] ", openaiPath); + + return fetch(`${PROTOCOL}://${BASE_URL}/${openaiPath}`, { + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${apiKey}`, + }, + method: req.method, + body: req.body, + }); +} diff --git a/app/api/openai/route.ts b/app/api/openai/route.ts new file mode 100644 index 00000000..cc51dbfc --- /dev/null +++ b/app/api/openai/route.ts @@ -0,0 +1,30 @@ +import { NextRequest, NextResponse } from "next/server"; +import { requestOpenai } from "../common"; + +async function makeRequest(req: NextRequest) { + try { + const api = await requestOpenai(req); + const res = new NextResponse(api.body); + res.headers.set("Content-Type", "application/json"); + return res; + } catch (e) { + console.error("[OpenAI] ", req.body, e); + return NextResponse.json( + { + error: true, + msg: JSON.stringify(e), + }, + { + status: 500, + }, + ); + } +} + +export async function POST(req: NextRequest) { + return makeRequest(req); +} + +export async function GET(req: NextRequest) { + return makeRequest(req); +} diff --git a/app/api/chat/typing.ts b/app/api/openai/typing.ts similarity index 100% rename from app/api/chat/typing.ts rename to app/api/openai/typing.ts diff --git a/app/components/home.module.scss b/app/components/home.module.scss index 730c05ef..764805d8 100644 --- a/app/components/home.module.scss +++ b/app/components/home.module.scss @@ -26,13 +26,13 @@ @media only screen and (min-width: 600px) { .tight-container { --window-width: 100vw; - --window-height: 100vh; + --window-height: var(--full-height); --window-content-width: calc(100% - var(--sidebar-width)); @include container(); max-width: 100vw; - max-height: 100vh; + max-height: var(--full-height); border-radius: 0; } @@ -74,7 +74,7 @@ position: absolute; left: -100%; z-index: 999; - height: 100vh; + height: var(--full-height); transition: all ease 0.3s; box-shadow: none; } @@ -218,7 +218,14 @@ flex: 1; overflow: auto; padding: 20px; - margin-bottom: 100px; +} + +.chat-body-title { + cursor: pointer; + + &:hover { + text-decoration: underline; + } } .chat-message { @@ -292,6 +299,7 @@ position: absolute; right: 20px; top: -26px; + left: 100px; transition: all ease 0.3s; opacity: 0; pointer-events: none; @@ -302,6 +310,7 @@ .chat-message-top-action { opacity: 0.5; color: var(--black); + white-space: nowrap; cursor: pointer; &:hover { @@ -332,12 +341,63 @@ } .chat-input-panel { - position: absolute; - bottom: 20px; - display: flex; width: 100%; padding: 20px; box-sizing: border-box; + flex-direction: column; +} + +@mixin single-line { + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + +.prompt-hints { + min-height: 20px; + width: 100%; + max-height: 50vh; + overflow: auto; + display: flex; + flex-direction: column-reverse; + + background-color: var(--white); + border: var(--border-in-light); + border-radius: 10px; + margin-bottom: 10px; + box-shadow: var(--shadow); + + .prompt-hint { + color: var(--black); + padding: 6px 10px; + animation: slide-in ease 0.3s; + cursor: pointer; + transition: all ease 0.3s; + border: transparent 1px solid; + margin: 4px; + border-radius: 8px; + + &:not(:last-child) { + margin-top: 0; + } + + .hint-title { + font-size: 12px; + font-weight: bolder; + + @include single-line(); + } + .hint-content { + font-size: 12px; + + @include single-line(); + } + + &-selected, + &:hover { + border-color: var(--primary); + } + } } .chat-input-panel-inner { @@ -354,7 +414,7 @@ background-color: var(--white); color: var(--black); font-family: inherit; - padding: 10px 14px; + padding: 10px 14px 50px; resize: none; outline: none; } @@ -375,7 +435,7 @@ position: absolute; right: 30px; - bottom: 10px; + bottom: 30px; } .export-content { diff --git a/app/components/home.tsx b/app/components/home.tsx index 2526f232..6f744cd9 100644 --- a/app/components/home.tsx +++ b/app/components/home.tsx @@ -1,6 +1,7 @@ "use client"; import { useState, useRef, useEffect, useLayoutEffect } from "react"; +import { useDebouncedCallback } from "use-debounce"; import { IconButton } from "./button"; import styles from "./home.module.scss"; @@ -22,12 +23,19 @@ import DownloadIcon from "../icons/download.svg"; import { Message, SubmitKey, useChatStore, ChatSession } from "../store"; import { showModal, showToast } from "./ui-lib"; -import { copyToClipboard, downloadAs, isIOS, selectOrCopy } from "../utils"; +import { + copyToClipboard, + downloadAs, + isIOS, + isMobileScreen, + selectOrCopy, +} from "../utils"; import Locale from "../locales"; import dynamic from "next/dynamic"; import { REPO_URL } from "../constant"; import { ControllerPool } from "../requests"; +import { Prompt, usePromptStore } from "../store/prompt"; export function Loading(props: { noLogo?: boolean }) { return ( @@ -100,7 +108,7 @@ export function ChatList() { state.currentSessionIndex, state.selectSession, state.removeSession, - ] + ], ); return ( @@ -113,7 +121,7 @@ export function ChatList() { key={i} selected={i === selectedIndex} onClick={() => selectSession(i)} - onDelete={() => removeSession(i)} + onDelete={() => confirm(Locale.Home.DeleteChat) && removeSession(i)} /> ))} @@ -124,17 +132,19 @@ function useSubmitHandler() { const config = useChatStore((state) => state.config); const submitKey = config.submitKey; - const shouldSubmit = (e: KeyboardEvent) => { + const shouldSubmit = (e: React.KeyboardEvent) => { if (e.key !== "Enter") return false; - + if (e.key === "Enter" && e.nativeEvent.isComposing) return false; return ( (config.submitKey === SubmitKey.AltEnter && e.altKey) || (config.submitKey === SubmitKey.CtrlEnter && e.ctrlKey) || (config.submitKey === SubmitKey.ShiftEnter && e.shiftKey) || + (config.submitKey === SubmitKey.MetaEnter && e.metaKey) || (config.submitKey === SubmitKey.Enter && !e.altKey && !e.ctrlKey && - !e.shiftKey) + !e.shiftKey && + !e.metaKey) ); }; @@ -144,25 +154,99 @@ function useSubmitHandler() { }; } -export function Chat(props: { showSideBar?: () => void }) { +export function PromptHints(props: { + prompts: Prompt[]; + onPromptSelect: (prompt: Prompt) => void; +}) { + if (props.prompts.length === 0) return null; + + return ( +
+ {props.prompts.map((prompt, i) => ( +
props.onPromptSelect(prompt)} + > +
{prompt.title}
+
{prompt.content}
+
+ ))} +
+ ); +} + +export function Chat(props: { + showSideBar?: () => void; + sideBarShowing?: boolean; +}) { type RenderMessage = Message & { preview?: boolean }; + const chatStore = useChatStore(); const [session, sessionIndex] = useChatStore((state) => [ state.currentSession(), state.currentSessionIndex, ]); + const fontSize = useChatStore((state) => state.config.fontSize); + + const inputRef = useRef(null); const [userInput, setUserInput] = useState(""); const [isLoading, setIsLoading] = useState(false); const { submitKey, shouldSubmit } = useSubmitHandler(); - const onUserInput = useChatStore((state) => state.onUserInput); + // prompt hints + const promptStore = usePromptStore(); + const [promptHints, setPromptHints] = useState([]); + const onSearch = useDebouncedCallback( + (text: string) => { + setPromptHints(promptStore.search(text)); + }, + 100, + { leading: true, trailing: true }, + ); + + const onPromptSelect = (prompt: Prompt) => { + setUserInput(prompt.content); + setPromptHints([]); + inputRef.current?.focus(); + }; + + const scrollInput = () => { + const dom = inputRef.current; + if (!dom) return; + const paddingBottomNum: number = parseInt( + window.getComputedStyle(dom).paddingBottom, + 10, + ); + dom.scrollTop = dom.scrollHeight - dom.offsetHeight + paddingBottomNum; + }; + + // only search prompts when user input is short + const SEARCH_TEXT_LIMIT = 30; + const onInput = (text: string) => { + scrollInput(); + setUserInput(text); + const n = text.trim().length; + + // clear search results + if (n === 0) { + setPromptHints([]); + } else if (!chatStore.config.disablePromptHint && n < SEARCH_TEXT_LIMIT) { + // check if need to trigger auto completion + if (text.startsWith("/") && text.length > 1) { + onSearch(text.slice(1)); + } + } + }; // submit user input const onUserSubmit = () => { if (userInput.length <= 0) return; setIsLoading(true); - onUserInput(userInput).then(() => setIsLoading(false)); + chatStore.onUserInput(userInput).then(() => setIsLoading(false)); setUserInput(""); + setPromptHints([]); + inputRef.current?.focus(); }; // stop response @@ -172,7 +256,7 @@ export function Chat(props: { showSideBar?: () => void }) { }; // check if should send message - const onInputKeyDown = (e: KeyboardEvent) => { + const onInputKeyDown = (e: React.KeyboardEvent) => { if (shouldSubmit(e)) { onUserSubmit(); e.preventDefault(); @@ -195,7 +279,10 @@ export function Chat(props: { showSideBar?: () => void }) { for (let i = botIndex; i >= 0; i -= 1) { if (messages[i].role === "user") { setIsLoading(true); - onUserInput(messages[i].content).then(() => setIsLoading(false)); + chatStore + .onUserInput(messages[i].content) + .then(() => setIsLoading(false)); + inputRef.current?.focus(); return; } } @@ -203,9 +290,7 @@ export function Chat(props: { showSideBar?: () => void }) { // for auto-scroll const latestMessageRef = useRef(null); - - // wont scroll while hovering messages - const [autoScroll, setAutoScroll] = useState(false); + const [autoScroll, setAutoScroll] = useState(true); // preview messages const messages = (session.messages as RenderMessage[]) @@ -219,7 +304,7 @@ export function Chat(props: { showSideBar?: () => void }) { preview: true, }, ] - : [] + : [], ) .concat( userInput.length > 0 @@ -231,16 +316,25 @@ export function Chat(props: { showSideBar?: () => void }) { preview: true, }, ] - : [] + : [], ); // auto scroll useLayoutEffect(() => { setTimeout(() => { const dom = latestMessageRef.current; - if (dom && !isIOS() && autoScroll) { + const inputDom = inputRef.current; + + // only scroll when input overlaped message body + let shouldScroll = true; + if (dom && inputDom) { + const domRect = dom.getBoundingClientRect(); + const inputRect = inputDom.getBoundingClientRect(); + shouldScroll = domRect.top > inputRect.top; + } + + if (dom && autoScroll && shouldScroll) { dom.scrollIntoView({ - behavior: "smooth", block: "end", }); } @@ -254,7 +348,17 @@ export function Chat(props: { showSideBar?: () => void }) { className={styles["window-header-title"]} onClick={props?.showSideBar} > -
+
{ + const newTopic = prompt(Locale.Chat.Rename, session.topic); + if (newTopic && newTopic !== session.topic) { + chatStore.updateCurrentSession( + (session) => (session.topic = newTopic!), + ); + } + }} + > {session.topic}
@@ -314,39 +418,45 @@ export function Chat(props: { showSideBar?: () => void }) {
)}
- {!isUser && ( -
- {message.streaming ? ( -
onUserStop(i)} - > - {Locale.Chat.Actions.Stop} -
- ) : ( -
onResend(i)} - > - {Locale.Chat.Actions.Retry} -
- )} + {!isUser && + !(message.preview || message.content.length === 0) && ( +
+ {message.streaming ? ( +
onUserStop(i)} + > + {Locale.Chat.Actions.Stop} +
+ ) : ( +
onResend(i)} + > + {Locale.Chat.Actions.Retry} +
+ )} -
copyToClipboard(message.content)} - > - {Locale.Chat.Actions.Copy} +
copyToClipboard(message.content)} + > + {Locale.Chat.Actions.Copy} +
-
- )} + )} {(message.preview || message.content.length === 0) && !isUser ? ( ) : (
onRightClick(e, message)} + onDoubleClickCapture={() => { + if (!isMobileScreen()) return; + setUserInput(message.content); + }} >
@@ -363,23 +473,28 @@ export function Chat(props: { showSideBar?: () => void }) {
); })} -
+
-
+