forked from XiaoMo/ChatGPT-Next-Web
67 lines
1.7 KiB
TypeScript
67 lines
1.7 KiB
TypeScript
import { createParser } from "eventsource-parser";
|
|
import { NextRequest } from "next/server";
|
|
|
|
async function createStream(req: NextRequest) {
|
|
const encoder = new TextEncoder();
|
|
const decoder = new TextDecoder();
|
|
|
|
let apiKey = process.env.OPENAI_API_KEY;
|
|
|
|
const userApiKey = req.headers.get("token");
|
|
if (userApiKey) {
|
|
apiKey = userApiKey;
|
|
console.log("[Stream] using user api key");
|
|
}
|
|
|
|
const res = await fetch("https://api.openai.com/v1/chat/completions", {
|
|
headers: {
|
|
"Content-Type": "application/json",
|
|
Authorization: `Bearer ${apiKey}`,
|
|
},
|
|
method: "POST",
|
|
body: req.body,
|
|
});
|
|
|
|
const stream = new ReadableStream({
|
|
async start(controller) {
|
|
function onParse(event: any) {
|
|
if (event.type === "event") {
|
|
const data = event.data;
|
|
// https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
|
|
if (data === "[DONE]") {
|
|
controller.close();
|
|
return;
|
|
}
|
|
try {
|
|
const json = JSON.parse(data);
|
|
const text = json.choices[0].delta.content;
|
|
const queue = encoder.encode(text);
|
|
controller.enqueue(queue);
|
|
} catch (e) {
|
|
controller.error(e);
|
|
}
|
|
}
|
|
}
|
|
|
|
const parser = createParser(onParse);
|
|
for await (const chunk of res.body as any) {
|
|
parser.feed(decoder.decode(chunk));
|
|
}
|
|
},
|
|
});
|
|
return stream;
|
|
}
|
|
|
|
export async function POST(req: NextRequest) {
|
|
try {
|
|
const stream = await createStream(req);
|
|
return new Response(stream);
|
|
} catch (error) {
|
|
console.error("[Chat Stream]", error);
|
|
}
|
|
}
|
|
|
|
export const config = {
|
|
runtime: "edge",
|
|
};
|