forked from XiaoMo/ChatGPT-Next-Web
refactor: merge /api/chat-stream to /api/openai
This commit is contained in:
parent
e0053d57f7
commit
ef5b7ce853
@ -1,62 +0,0 @@
|
||||
import { createParser } from "eventsource-parser";
|
||||
import { NextRequest } from "next/server";
|
||||
import { requestOpenai } from "../common";
|
||||
|
||||
async function createStream(req: NextRequest) {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
const res = await requestOpenai(req);
|
||||
|
||||
const contentType = res.headers.get("Content-Type") ?? "";
|
||||
if (!contentType.includes("stream")) {
|
||||
const content = await (
|
||||
await res.text()
|
||||
).replace(/provided:.*. You/, "provided: ***. You");
|
||||
console.log("[Stream] error ", content);
|
||||
return "```json\n" + content + "```";
|
||||
}
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
function onParse(event: any) {
|
||||
if (event.type === "event") {
|
||||
const data = event.data;
|
||||
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
|
||||
if (data === "[DONE]") {
|
||||
controller.close();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const text = json.choices[0].delta.content;
|
||||
const queue = encoder.encode(text);
|
||||
controller.enqueue(queue);
|
||||
} catch (e) {
|
||||
controller.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parser = createParser(onParse);
|
||||
for await (const chunk of res.body as any) {
|
||||
parser.feed(decoder.decode(chunk, { stream: true }));
|
||||
}
|
||||
},
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const stream = await createStream(req);
|
||||
return new Response(stream);
|
||||
} catch (error) {
|
||||
console.error("[Chat Stream]", error);
|
||||
return new Response(
|
||||
["```json\n", JSON.stringify(error, null, " "), "\n```"].join(""),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const runtime = "edge";
|
@ -1,24 +1,82 @@
|
||||
import { createParser } from "eventsource-parser";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { requestOpenai } from "../common";
|
||||
|
||||
async function createStream(res: Response) {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
function onParse(event: any) {
|
||||
if (event.type === "event") {
|
||||
const data = event.data;
|
||||
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
|
||||
if (data === "[DONE]") {
|
||||
controller.close();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const text = json.choices[0].delta.content;
|
||||
const queue = encoder.encode(text);
|
||||
controller.enqueue(queue);
|
||||
} catch (e) {
|
||||
controller.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parser = createParser(onParse);
|
||||
for await (const chunk of res.body as any) {
|
||||
parser.feed(decoder.decode(chunk, { stream: true }));
|
||||
}
|
||||
},
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
function formatResponse(msg: any) {
|
||||
const jsonMsg = ["```json\n", JSON.stringify(msg, null, " "), "\n```"].join(
|
||||
"",
|
||||
);
|
||||
return new Response(jsonMsg);
|
||||
}
|
||||
|
||||
async function makeRequest(req: NextRequest) {
|
||||
try {
|
||||
const api = await requestOpenai(req);
|
||||
const res = new NextResponse(api.body);
|
||||
res.headers.set("Content-Type", "application/json");
|
||||
res.headers.set("Cache-Control", "no-cache");
|
||||
return res;
|
||||
|
||||
const contentType = api.headers.get("Content-Type") ?? "";
|
||||
|
||||
// streaming response
|
||||
if (contentType.includes("stream")) {
|
||||
const stream = await createStream(api);
|
||||
return new Response(stream);
|
||||
}
|
||||
|
||||
// try to parse error msg
|
||||
try {
|
||||
const mayBeErrorBody = await api.json();
|
||||
if (mayBeErrorBody.error) {
|
||||
console.error("[OpenAI Response] ", mayBeErrorBody);
|
||||
return formatResponse(mayBeErrorBody);
|
||||
} else {
|
||||
const res = new Response(JSON.stringify(mayBeErrorBody));
|
||||
res.headers.set("Content-Type", "application/json");
|
||||
res.headers.set("Cache-Control", "no-cache");
|
||||
return res;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[OpenAI Parse] ", e);
|
||||
return formatResponse({
|
||||
msg: "invalid response from openai server",
|
||||
error: e,
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[OpenAI] ", req.body, e);
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
msg: JSON.stringify(e),
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
},
|
||||
);
|
||||
console.error("[OpenAI] ", e);
|
||||
return formatResponse(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -161,7 +161,7 @@ export async function requestChatStream(
|
||||
const reqTimeoutId = setTimeout(() => controller.abort(), TIME_OUT_MS);
|
||||
|
||||
try {
|
||||
const res = await fetch("/api/chat-stream", {
|
||||
const res = await fetch("/api/openai", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
|
@ -3,7 +3,7 @@ import { getServerSideConfig } from "./app/config/server";
|
||||
import md5 from "spark-md5";
|
||||
|
||||
export const config = {
|
||||
matcher: ["/api/openai", "/api/chat-stream"],
|
||||
matcher: ["/api/openai"],
|
||||
};
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
Loading…
Reference in New Issue
Block a user