feat: handle non-stream response

This commit is contained in:
Yidadaa 2023-05-18 02:04:12 +08:00
parent 736c66f46a
commit 5f2745c32a

View File

@ -86,27 +86,39 @@ export class ChatGPTApi implements LLMApi {
...chatPayload, ...chatPayload,
async onopen(res) { async onopen(res) {
clearTimeout(requestTimeoutId); clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[OpenAI] request response content type: ",
contentType,
);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if ( if (
!res.ok || !res.ok ||
res.headers.get("content-type") !== EventStreamContentType || res.headers.get("content-type") !== EventStreamContentType ||
res.status !== 200 res.status !== 200
) { ) {
let extraInfo = { error: undefined }; const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try { try {
extraInfo = await res.clone().json(); const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {} } catch {}
if (res.status === 401) { if (res.status === 401) {
if (responseText.length > 0) { responseTexts.push(Locale.Error.Unauthorized);
responseText += "\n\n";
}
responseText += Locale.Error.Unauthorized;
} }
if (extraInfo.error) { if (extraInfo) {
responseText += "\n\n" + prettyObject(extraInfo); responseTexts.push(extraInfo);
} }
responseText = responseTexts.join("\n\n");
return finish(); return finish();
} }
}, },