forked from XiaoMo/ChatGPT-Next-Web
Merge pull request #1222 from Yidadaa/proxy-api
refactor: merge /api/chat-stream to /api/openai
This commit is contained in:
commit
5f8fc3d155
12
README.md
12
README.md
@ -13,7 +13,6 @@ One-Click to deploy well-designed ChatGPT web UI on Vercel.
|
||||
|
||||
[演示](https://chat-gpt-next-web.vercel.app/) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://user-images.githubusercontent.com/16968934/234462588-e8eff256-f5ca-46ef-8f5f-d7db6d28735a.jpg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg)
|
||||
|
||||
|
||||
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)
|
||||
|
||||
[![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
|
||||
@ -50,6 +49,7 @@ One-Click to deploy well-designed ChatGPT web UI on Vercel.
|
||||
- UI text customize
|
||||
|
||||
## What's New
|
||||
|
||||
- 🚀 v2.0 is released, now you can create prompt templates, turn your ideas into reality! Read this: [ChatGPT Prompt Engineering Tips: Zero, One and Few Shot Prompting](https://www.allabtai.com/prompt-engineering-tips-zero-one-and-few-shot-prompting/).
|
||||
|
||||
## 主要功能
|
||||
@ -80,10 +80,9 @@ One-Click to deploy well-designed ChatGPT web UI on Vercel.
|
||||
- 用户登录、账号管理、消息云同步
|
||||
|
||||
## 最新动态
|
||||
|
||||
- 🚀 v2.0 已经发布,现在你可以使用面具功能快速创建预制对话了! 了解更多: [ChatGPT 提示词高阶技能:零次、一次和少样本提示](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/138)。
|
||||
|
||||
|
||||
|
||||
## Get Started
|
||||
|
||||
> [简体中文 > 如何开始使用](./README_CN.md#开始使用)
|
||||
@ -163,6 +162,12 @@ Override openai api request base url.
|
||||
|
||||
Specify OpenAI organization ID.
|
||||
|
||||
### `HIDE_USER_API_KEY` (optional)
|
||||
|
||||
> Default: Empty
|
||||
|
||||
If you do not want users to input their own API key, set this environment variable to 1.
|
||||
|
||||
## Development
|
||||
|
||||
> [简体中文 > 如何进行二次开发](./README_CN.md#开发)
|
||||
@ -249,7 +254,6 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s
|
||||
[@WingCH](https://github.com/WingCH)
|
||||
[@jtung4](https://github.com/jtung4)
|
||||
|
||||
|
||||
### Contributor
|
||||
|
||||
[Contributors](https://github.com/Yidadaa/ChatGPT-Next-Web/graphs/contributors)
|
||||
|
12
README_CN.md
12
README_CN.md
@ -33,6 +33,7 @@
|
||||
- 在 Vercel 重新选择并部署,[请查看详细教程](./docs/vercel-cn.md#如何新建项目)。
|
||||
|
||||
### 打开自动更新
|
||||
|
||||
> 如果你遇到了 Upstream Sync 执行错误,请手动 Sync Fork 一次!
|
||||
|
||||
当你 fork 项目之后,由于 Github 的限制,需要手动去你 fork 后的项目的 Actions 页面启用 Workflows,并启用 Upstream Sync Action,启用之后即可开启每小时定时自动更新:
|
||||
@ -89,6 +90,10 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填
|
||||
|
||||
指定 OpenAI 中的组织 ID。
|
||||
|
||||
### `HIDE_USER_API_KEY` (可选)
|
||||
|
||||
如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。
|
||||
|
||||
## 开发
|
||||
|
||||
> 强烈不建议在本地进行开发或者部署,由于一些技术原因,很难在本地配置好 OpenAI API 代理,除非你能保证可以直连 OpenAI 服务器。
|
||||
@ -106,15 +111,16 @@ OPENAI_API_KEY=<your api key here>
|
||||
### 本地开发
|
||||
|
||||
1. 安装 nodejs 18 和 yarn,具体细节请询问 ChatGPT;
|
||||
2. 执行 `yarn install && yarn dev` 即可。⚠️注意:此命令仅用于本地开发,不要用于部署!
|
||||
2. 执行 `yarn install && yarn dev` 即可。⚠️ 注意:此命令仅用于本地开发,不要用于部署!
|
||||
3. 如果你想本地部署,请使用 `yarn install && yarn start` 命令,你可以配合 pm2 来守护进程,防止被杀死,详情询问 ChatGPT。
|
||||
|
||||
## 部署
|
||||
|
||||
### 容器部署 (推荐)
|
||||
|
||||
> Docker 版本需要在 20 及其以上,否则会提示找不到镜像。
|
||||
|
||||
> ⚠️注意:docker 版本在大多数时间都会落后最新的版本 1 到 2 天,所以部署后会持续出现“存在更新”的提示,属于正常现象。
|
||||
> ⚠️ 注意:docker 版本在大多数时间都会落后最新的版本 1 到 2 天,所以部署后会持续出现“存在更新”的提示,属于正常现象。
|
||||
|
||||
```shell
|
||||
docker pull yidadaa/chatgpt-next-web
|
||||
@ -146,7 +152,7 @@ docker run -d -p 3000:3000 \
|
||||
bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh)
|
||||
```
|
||||
|
||||
⚠️注意:如果你安装过程中遇到了问题,请使用 docker 部署。
|
||||
⚠️ 注意:如果你安装过程中遇到了问题,请使用 docker 部署。
|
||||
|
||||
## 鸣谢
|
||||
|
||||
|
70
app/api/auth.ts
Normal file
70
app/api/auth.ts
Normal file
@ -0,0 +1,70 @@
|
||||
import { NextRequest } from "next/server";
|
||||
import { getServerSideConfig } from "../config/server";
|
||||
import md5 from "spark-md5";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
function getIP(req: NextRequest) {
|
||||
let ip = req.ip ?? req.headers.get("x-real-ip");
|
||||
const forwardedFor = req.headers.get("x-forwarded-for");
|
||||
|
||||
if (!ip && forwardedFor) {
|
||||
ip = forwardedFor.split(",").at(0) ?? "";
|
||||
}
|
||||
|
||||
return ip;
|
||||
}
|
||||
|
||||
function parseApiKey(bearToken: string) {
|
||||
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
|
||||
const isOpenAiKey = token.startsWith("sk-");
|
||||
|
||||
return {
|
||||
accessCode: isOpenAiKey ? "" : token,
|
||||
apiKey: isOpenAiKey ? token : "",
|
||||
};
|
||||
}
|
||||
|
||||
export function auth(req: NextRequest) {
|
||||
const authToken = req.headers.get("Authorization") ?? "";
|
||||
|
||||
// check if it is openai api key or user token
|
||||
const { accessCode, apiKey: token } = parseApiKey(authToken);
|
||||
|
||||
const hashedCode = md5.hash(accessCode ?? "").trim();
|
||||
|
||||
console.log("[Auth] allowed hashed codes: ", [...serverConfig.codes]);
|
||||
console.log("[Auth] got access code:", accessCode);
|
||||
console.log("[Auth] hashed access code:", hashedCode);
|
||||
console.log("[User IP] ", getIP(req));
|
||||
console.log("[Time] ", new Date().toLocaleString());
|
||||
|
||||
if (serverConfig.needCode && !serverConfig.codes.has(hashedCode) && !token) {
|
||||
return {
|
||||
error: true,
|
||||
needAccessCode: true,
|
||||
msg: "Please go settings page and fill your access code.",
|
||||
};
|
||||
}
|
||||
|
||||
// if user does not provide an api key, inject system api key
|
||||
if (!token) {
|
||||
const apiKey = serverConfig.apiKey;
|
||||
if (apiKey) {
|
||||
console.log("[Auth] use system api key");
|
||||
req.headers.set("Authorization", `Bearer ${apiKey}`);
|
||||
} else {
|
||||
console.log("[Auth] admin did not provide an api key");
|
||||
return {
|
||||
error: true,
|
||||
msg: "Empty Api Key",
|
||||
};
|
||||
}
|
||||
} else {
|
||||
console.log("[Auth] use user api key");
|
||||
}
|
||||
|
||||
return {
|
||||
error: false,
|
||||
};
|
||||
}
|
@ -1,62 +0,0 @@
|
||||
import { createParser } from "eventsource-parser";
|
||||
import { NextRequest } from "next/server";
|
||||
import { requestOpenai } from "../common";
|
||||
|
||||
async function createStream(req: NextRequest) {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
const res = await requestOpenai(req);
|
||||
|
||||
const contentType = res.headers.get("Content-Type") ?? "";
|
||||
if (!contentType.includes("stream")) {
|
||||
const content = await (
|
||||
await res.text()
|
||||
).replace(/provided:.*. You/, "provided: ***. You");
|
||||
console.log("[Stream] error ", content);
|
||||
return "```json\n" + content + "```";
|
||||
}
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
function onParse(event: any) {
|
||||
if (event.type === "event") {
|
||||
const data = event.data;
|
||||
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
|
||||
if (data === "[DONE]") {
|
||||
controller.close();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const text = json.choices[0].delta.content;
|
||||
const queue = encoder.encode(text);
|
||||
controller.enqueue(queue);
|
||||
} catch (e) {
|
||||
controller.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parser = createParser(onParse);
|
||||
for await (const chunk of res.body as any) {
|
||||
parser.feed(decoder.decode(chunk, { stream: true }));
|
||||
}
|
||||
},
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const stream = await createStream(req);
|
||||
return new Response(stream);
|
||||
} catch (error) {
|
||||
console.error("[Chat Stream]", error);
|
||||
return new Response(
|
||||
["```json\n", JSON.stringify(error, null, " "), "\n```"].join(""),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const runtime = "edge";
|
@ -6,8 +6,11 @@ const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL;
|
||||
const BASE_URL = process.env.BASE_URL ?? OPENAI_URL;
|
||||
|
||||
export async function requestOpenai(req: NextRequest) {
|
||||
const apiKey = req.headers.get("token");
|
||||
const openaiPath = req.headers.get("path");
|
||||
const authValue = req.headers.get("Authorization") ?? "";
|
||||
const openaiPath = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
|
||||
"/api/openai/",
|
||||
"",
|
||||
);
|
||||
|
||||
let baseUrl = BASE_URL;
|
||||
|
||||
@ -22,10 +25,14 @@ export async function requestOpenai(req: NextRequest) {
|
||||
console.log("[Org ID]", process.env.OPENAI_ORG_ID);
|
||||
}
|
||||
|
||||
if (!authValue || !authValue.startsWith("Bearer sk-")) {
|
||||
console.error("[OpenAI Request] invlid api key provided", authValue);
|
||||
}
|
||||
|
||||
return fetch(`${baseUrl}/${openaiPath}`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
Authorization: authValue,
|
||||
...(process.env.OPENAI_ORG_ID && {
|
||||
"OpenAI-Organization": process.env.OPENAI_ORG_ID,
|
||||
}),
|
||||
|
@ -8,16 +8,15 @@ const serverConfig = getServerSideConfig();
|
||||
// 警告!不要在这里写入任何敏感信息!
|
||||
const DANGER_CONFIG = {
|
||||
needCode: serverConfig.needCode,
|
||||
hideUserApiKey: serverConfig.hideUserApiKey,
|
||||
};
|
||||
|
||||
declare global {
|
||||
type DangerConfig = typeof DANGER_CONFIG;
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
return NextResponse.json({
|
||||
needCode: serverConfig.needCode,
|
||||
});
|
||||
export async function POST() {
|
||||
return NextResponse.json(DANGER_CONFIG);
|
||||
}
|
||||
|
||||
export const runtime = "edge";
|
||||
|
101
app/api/openai/[...path]/route.ts
Normal file
101
app/api/openai/[...path]/route.ts
Normal file
@ -0,0 +1,101 @@
|
||||
import { createParser } from "eventsource-parser";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "../../auth";
|
||||
import { requestOpenai } from "../../common";
|
||||
|
||||
async function createStream(res: Response) {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
function onParse(event: any) {
|
||||
if (event.type === "event") {
|
||||
const data = event.data;
|
||||
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
|
||||
if (data === "[DONE]") {
|
||||
controller.close();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const text = json.choices[0].delta.content;
|
||||
const queue = encoder.encode(text);
|
||||
controller.enqueue(queue);
|
||||
} catch (e) {
|
||||
controller.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parser = createParser(onParse);
|
||||
for await (const chunk of res.body as any) {
|
||||
parser.feed(decoder.decode(chunk, { stream: true }));
|
||||
}
|
||||
},
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
function formatResponse(msg: any) {
|
||||
const jsonMsg = ["```json\n", JSON.stringify(msg, null, " "), "\n```"].join(
|
||||
"",
|
||||
);
|
||||
return new Response(jsonMsg);
|
||||
}
|
||||
|
||||
async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[OpenAI Route] params ", params);
|
||||
|
||||
const authResult = auth(req);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const api = await requestOpenai(req);
|
||||
|
||||
const contentType = api.headers.get("Content-Type") ?? "";
|
||||
|
||||
// streaming response
|
||||
if (contentType.includes("stream")) {
|
||||
const stream = await createStream(api);
|
||||
const res = new Response(stream);
|
||||
res.headers.set("Content-Type", contentType);
|
||||
return res;
|
||||
}
|
||||
|
||||
// try to parse error msg
|
||||
try {
|
||||
const mayBeErrorBody = await api.json();
|
||||
if (mayBeErrorBody.error) {
|
||||
console.error("[OpenAI Response] ", mayBeErrorBody);
|
||||
return formatResponse(mayBeErrorBody);
|
||||
} else {
|
||||
const res = new Response(JSON.stringify(mayBeErrorBody));
|
||||
res.headers.set("Content-Type", "application/json");
|
||||
res.headers.set("Cache-Control", "no-cache");
|
||||
return res;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[OpenAI Parse] ", e);
|
||||
return formatResponse({
|
||||
msg: "invalid response from openai server",
|
||||
error: e,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[OpenAI] ", e);
|
||||
return formatResponse(e);
|
||||
}
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
@ -1,33 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { requestOpenai } from "../common";
|
||||
|
||||
async function makeRequest(req: NextRequest) {
|
||||
try {
|
||||
const api = await requestOpenai(req);
|
||||
const res = new NextResponse(api.body);
|
||||
res.headers.set("Content-Type", "application/json");
|
||||
res.headers.set("Cache-Control", "no-cache");
|
||||
return res;
|
||||
} catch (e) {
|
||||
console.error("[OpenAI] ", req.body, e);
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
msg: JSON.stringify(e),
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
return makeRequest(req);
|
||||
}
|
||||
|
||||
export async function GET(req: NextRequest) {
|
||||
return makeRequest(req);
|
||||
}
|
||||
|
||||
export const runtime = "edge";
|
@ -183,6 +183,19 @@ function UserPromptModal(props: { onClose?: () => void }) {
|
||||
);
|
||||
}
|
||||
|
||||
function formatVersionDate(t: string) {
|
||||
const d = new Date(+t);
|
||||
const year = d.getUTCFullYear();
|
||||
const month = d.getUTCMonth() + 1;
|
||||
const day = d.getUTCDate();
|
||||
|
||||
return [
|
||||
year.toString(),
|
||||
month.toString().padStart(2, "0"),
|
||||
day.toString().padStart(2, "0"),
|
||||
].join("");
|
||||
}
|
||||
|
||||
export function Settings() {
|
||||
const navigate = useNavigate();
|
||||
const [showEmojiPicker, setShowEmojiPicker] = useState(false);
|
||||
@ -193,8 +206,8 @@ export function Settings() {
|
||||
|
||||
const updateStore = useUpdateStore();
|
||||
const [checkingUpdate, setCheckingUpdate] = useState(false);
|
||||
const currentVersion = updateStore.version;
|
||||
const remoteId = updateStore.remoteVersion;
|
||||
const currentVersion = formatVersionDate(updateStore.version);
|
||||
const remoteId = formatVersionDate(updateStore.remoteVersion);
|
||||
const hasNewVersion = currentVersion !== remoteId;
|
||||
|
||||
function checkUpdate(force = false) {
|
||||
@ -202,6 +215,15 @@ export function Settings() {
|
||||
updateStore.getLatestVersion(force).then(() => {
|
||||
setCheckingUpdate(false);
|
||||
});
|
||||
|
||||
console.log(
|
||||
"[Update] local version ",
|
||||
new Date(+updateStore.version).toLocaleString(),
|
||||
);
|
||||
console.log(
|
||||
"[Update] remote version ",
|
||||
new Date(+updateStore.remoteVersion).toLocaleString(),
|
||||
);
|
||||
}
|
||||
|
||||
const usage = {
|
||||
@ -466,19 +488,21 @@ export function Settings() {
|
||||
<></>
|
||||
)}
|
||||
|
||||
<ListItem
|
||||
title={Locale.Settings.Token.Title}
|
||||
subTitle={Locale.Settings.Token.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.token}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Token.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.updateToken(e.currentTarget.value);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
{!accessStore.hideUserApiKey ? (
|
||||
<ListItem
|
||||
title={Locale.Settings.Token.Title}
|
||||
subTitle={Locale.Settings.Token.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.token}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Token.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.updateToken(e.currentTarget.value);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
) : null}
|
||||
|
||||
<ListItem
|
||||
title={Locale.Settings.Usage.Title}
|
||||
|
@ -163,6 +163,7 @@ export function SideBar(props: { className?: string }) {
|
||||
onClick={() => {
|
||||
if (config.dontShowMaskSplashScreen) {
|
||||
chatStore.newSession();
|
||||
navigate(Path.Chat);
|
||||
} else {
|
||||
navigate(Path.NewChat);
|
||||
}
|
||||
|
@ -1,13 +1,10 @@
|
||||
const COMMIT_ID: string = (() => {
|
||||
try {
|
||||
const childProcess = require("child_process");
|
||||
return (
|
||||
childProcess
|
||||
// .execSync("git describe --tags --abbrev=0")
|
||||
.execSync("git rev-parse --short HEAD")
|
||||
.toString()
|
||||
.trim()
|
||||
);
|
||||
return childProcess
|
||||
.execSync('git log -1 --format="%at000" --date=unix')
|
||||
.toString()
|
||||
.trim();
|
||||
} catch (e) {
|
||||
console.error("[Build Config] No git or not from git repo.");
|
||||
return "unknown";
|
||||
|
@ -7,6 +7,7 @@ declare global {
|
||||
CODE?: string;
|
||||
PROXY_URL?: string;
|
||||
VERCEL?: string;
|
||||
HIDE_USER_API_KEY?: string; // disable user's api key input
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -38,5 +39,6 @@ export const getServerSideConfig = () => {
|
||||
needCode: ACCESS_CODES.size > 0,
|
||||
proxyUrl: process.env.PROXY_URL,
|
||||
isVercel: !!process.env.VERCEL,
|
||||
hideUserApiKey: !!process.env.HIDE_USER_API_KEY,
|
||||
};
|
||||
};
|
||||
|
@ -44,29 +44,33 @@ const makeRequestParam = (
|
||||
|
||||
function getHeaders() {
|
||||
const accessStore = useAccessStore.getState();
|
||||
let headers: Record<string, string> = {};
|
||||
const headers = {
|
||||
Authorization: "",
|
||||
};
|
||||
|
||||
if (accessStore.enabledAccessControl()) {
|
||||
headers["access-code"] = accessStore.accessCode;
|
||||
}
|
||||
const makeBearer = (token: string) => `Bearer ${token.trim()}`;
|
||||
const validString = (x: string) => x && x.length > 0;
|
||||
|
||||
if (accessStore.token && accessStore.token.length > 0) {
|
||||
headers["token"] = accessStore.token;
|
||||
// use user's api key first
|
||||
if (validString(accessStore.token)) {
|
||||
headers.Authorization = makeBearer(accessStore.token);
|
||||
} else if (
|
||||
accessStore.enabledAccessControl() &&
|
||||
validString(accessStore.accessCode)
|
||||
) {
|
||||
headers.Authorization = makeBearer(accessStore.accessCode);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
export function requestOpenaiClient(path: string) {
|
||||
const openaiUrl = useAccessStore.getState().openaiUrl;
|
||||
return (body: any, method = "POST") =>
|
||||
fetch("/api/openai", {
|
||||
fetch(openaiUrl + path, {
|
||||
method,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
path,
|
||||
...getHeaders(),
|
||||
},
|
||||
body: body && JSON.stringify(body),
|
||||
headers: getHeaders(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -161,16 +165,17 @@ export async function requestChatStream(
|
||||
const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
|
||||
|
||||
try {
|
||||
const res = await fetch("/api/chat-stream", {
|
||||
const openaiUrl = useAccessStore.getState().openaiUrl;
|
||||
const res = await fetch(openaiUrl + "v1/chat/completions", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
path: "v1/chat/completions",
|
||||
...getHeaders(),
|
||||
},
|
||||
body: JSON.stringify(req),
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
clearTimeout(reqTimeoutId);
|
||||
|
||||
let responseText = "";
|
||||
|
@ -1,12 +1,15 @@
|
||||
import { create } from "zustand";
|
||||
import { persist } from "zustand/middleware";
|
||||
import { StoreKey } from "../constant";
|
||||
import { BOT_HELLO } from "./chat";
|
||||
|
||||
export interface AccessControlStore {
|
||||
accessCode: string;
|
||||
token: string;
|
||||
|
||||
needCode: boolean;
|
||||
hideUserApiKey: boolean;
|
||||
openaiUrl: string;
|
||||
|
||||
updateToken: (_: string) => void;
|
||||
updateCode: (_: string) => void;
|
||||
@ -23,16 +26,19 @@ export const useAccessStore = create<AccessControlStore>()(
|
||||
token: "",
|
||||
accessCode: "",
|
||||
needCode: true,
|
||||
hideUserApiKey: false,
|
||||
openaiUrl: "/api/openai/",
|
||||
|
||||
enabledAccessControl() {
|
||||
get().fetch();
|
||||
|
||||
return get().needCode;
|
||||
},
|
||||
updateCode(code: string) {
|
||||
set((state) => ({ accessCode: code }));
|
||||
set(() => ({ accessCode: code }));
|
||||
},
|
||||
updateToken(token: string) {
|
||||
set((state) => ({ token }));
|
||||
set(() => ({ token }));
|
||||
},
|
||||
isAuthorized() {
|
||||
// has token or has code or disabled access control
|
||||
@ -51,6 +57,10 @@ export const useAccessStore = create<AccessControlStore>()(
|
||||
.then((res: DangerConfig) => {
|
||||
console.log("[Config] got config from server", res);
|
||||
set(() => ({ ...res }));
|
||||
|
||||
if ((res as any).botHello) {
|
||||
BOT_HELLO.content = (res as any).botHello;
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
console.error("[Config] failed to fetch config");
|
||||
|
@ -53,10 +53,9 @@ export const useUpdateStore = create<UpdateStore>()(
|
||||
}));
|
||||
|
||||
try {
|
||||
// const data = await (await fetch(FETCH_TAG_URL)).json();
|
||||
// const remoteId = data[0].name as string;
|
||||
const data = await (await fetch(FETCH_COMMIT_URL)).json();
|
||||
const remoteId = (data[0].sha as string).substring(0, 7);
|
||||
const remoteCommitTime = data[0].commit.committer.date;
|
||||
const remoteId = new Date(remoteCommitTime).getTime().toString();
|
||||
set(() => ({
|
||||
remoteVersion: remoteId,
|
||||
}));
|
||||
|
@ -1,72 +0,0 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSideConfig } from "./app/config/server";
|
||||
import md5 from "spark-md5";
|
||||
|
||||
export const config = {
|
||||
matcher: ["/api/openai", "/api/chat-stream"],
|
||||
};
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
function getIP(req: NextRequest) {
|
||||
let ip = req.ip ?? req.headers.get("x-real-ip");
|
||||
const forwardedFor = req.headers.get("x-forwarded-for");
|
||||
|
||||
if (!ip && forwardedFor) {
|
||||
ip = forwardedFor.split(",").at(0) ?? "";
|
||||
}
|
||||
|
||||
return ip;
|
||||
}
|
||||
|
||||
export function middleware(req: NextRequest) {
|
||||
const accessCode = req.headers.get("access-code");
|
||||
const token = req.headers.get("token");
|
||||
const hashedCode = md5.hash(accessCode ?? "").trim();
|
||||
|
||||
console.log("[Auth] allowed hashed codes: ", [...serverConfig.codes]);
|
||||
console.log("[Auth] got access code:", accessCode);
|
||||
console.log("[Auth] hashed access code:", hashedCode);
|
||||
console.log("[User IP] ", getIP(req));
|
||||
console.log("[Time] ", new Date().toLocaleString());
|
||||
|
||||
if (serverConfig.needCode && !serverConfig.codes.has(hashedCode) && !token) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
needAccessCode: true,
|
||||
msg: "Please go settings page and fill your access code.",
|
||||
},
|
||||
{
|
||||
status: 401,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// inject api key
|
||||
if (!token) {
|
||||
const apiKey = serverConfig.apiKey;
|
||||
if (apiKey) {
|
||||
console.log("[Auth] set system token");
|
||||
req.headers.set("token", apiKey);
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
msg: "Empty Api Key",
|
||||
},
|
||||
{
|
||||
status: 401,
|
||||
},
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log("[Auth] set user token");
|
||||
}
|
||||
|
||||
return NextResponse.next({
|
||||
request: {
|
||||
headers: req.headers,
|
||||
},
|
||||
});
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
|
||||
const nextConfig = {
|
||||
experimental: {
|
||||
appDir: true,
|
||||
},
|
||||
webpack(config) {
|
||||
config.module.rules.push({
|
||||
test: /\.svg$/,
|
||||
use: ["@svgr/webpack"],
|
||||
});
|
||||
|
||||
return config;
|
||||
},
|
||||
output: "standalone",
|
||||
};
|
||||
|
||||
module.exports = nextConfig;
|
32
next.config.mjs
Normal file
32
next.config.mjs
Normal file
@ -0,0 +1,32 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
|
||||
const nextConfig = {
|
||||
experimental: {
|
||||
appDir: true,
|
||||
},
|
||||
async rewrites() {
|
||||
const ret = [];
|
||||
|
||||
const apiUrl = process.env.API_URL;
|
||||
if (apiUrl) {
|
||||
console.log("[Next] using api url ", apiUrl);
|
||||
ret.push({
|
||||
source: "/api/:path*",
|
||||
destination: `${apiUrl}/:path*`,
|
||||
});
|
||||
}
|
||||
|
||||
return ret;
|
||||
},
|
||||
webpack(config) {
|
||||
config.module.rules.push({
|
||||
test: /\.svg$/,
|
||||
use: ["@svgr/webpack"],
|
||||
});
|
||||
|
||||
return config;
|
||||
},
|
||||
output: "standalone",
|
||||
};
|
||||
|
||||
export default nextConfig;
|
Loading…
Reference in New Issue
Block a user