diff --git a/Dockerfile b/Dockerfile index 7755b1a5..21adff9b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -50,6 +50,8 @@ CMD if [ -n "$PROXY_URL" ]; then \ echo "remote_dns_subnet 224" >> $conf; \ echo "tcp_read_time_out 15000" >> $conf; \ echo "tcp_connect_time_out 8000" >> $conf; \ + echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \ + echo "localnet ::1/128" >> $conf; \ echo "[ProxyList]" >> $conf; \ echo "$protocol $host $port" >> $conf; \ cat /etc/proxychains.conf; \ diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index cc8b5b6f..63587440 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -3,7 +3,10 @@ import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { ChatOptions, getHeaders, LLMApi, LLMUsage } from "../api"; import Locale from "../../locales"; -import { fetchEventSource } from "@microsoft/fetch-event-source"; +import { + EventStreamContentType, + fetchEventSource, +} from "@microsoft/fetch-event-source"; import { prettyObject } from "@/app/utils/format"; export class ChatGPTApi implements LLMApi { @@ -79,6 +82,13 @@ export class ChatGPTApi implements LLMApi { ...chatPayload, async onopen(res) { clearTimeout(requestTimeoutId); + if ( + res.ok && + res.headers.get("content-type") !== EventStreamContentType + ) { + responseText += await res.clone().json(); + return finish(); + } if (res.status === 401) { let extraInfo = { error: undefined }; try { diff --git a/app/components/model-config.tsx b/app/components/model-config.tsx index fbc96d4d..0392621d 100644 --- a/app/components/model-config.tsx +++ b/app/components/model-config.tsx @@ -68,8 +68,8 @@ export function ModelConfigList(props: { >