From 4169431f2c5d78345de7704dda4872d7d5e7790f Mon Sep 17 00:00:00 2001 From: Fred Liang Date: Sun, 24 Dec 2023 03:05:23 +0800 Subject: [PATCH] fix: fix add api auth --- app/api/google/[...path]/route.ts | 5 ++--- app/client/api.ts | 4 ++-- app/client/platforms/google.ts | 8 +++----- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/app/api/google/[...path]/route.ts b/app/api/google/[...path]/route.ts index 5b19740a..ec5d6552 100644 --- a/app/api/google/[...path]/route.ts +++ b/app/api/google/[...path]/route.ts @@ -43,9 +43,8 @@ async function handle( 10 * 60 * 1000, ); - const fetchUrl = `${baseUrl}/${path}?key=${req.nextUrl.searchParams.get( - "key", - )}`; + const key = req.nextUrl.searchParams.get("key") ?? serverConfig.googleApiKey; + const fetchUrl = `${baseUrl}/${path}?key=${key}`; const fetchOptions: RequestInit = { headers: { diff --git a/app/client/api.ts b/app/client/api.ts index 163e5fc5..3215f45b 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -145,8 +145,8 @@ export function getHeaders() { "Content-Type": "application/json", "x-requested-with": "XMLHttpRequest", }; - - const isGoogle = accessStore.provider === ServiceProvider.Google; + const modelConfig = useChatStore.getState().currentSession().mask.modelConfig; + const isGoogle = modelConfig.model === "gemini"; const isAzure = accessStore.provider === ServiceProvider.Azure; const authHeader = isAzure ? "api-key" : "Authorization"; const apiKey = isGoogle diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 90584571..b44def99 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -8,6 +8,7 @@ import { import { prettyObject } from "@/app/utils/format"; import { getClientConfig } from "@/app/config/client"; import Locale from "../../locales"; +import { getServerSideConfig } from "@/app/config/server"; export class GeminiApi implements LLMApi { extractMessage(res: any) { console.log("[Response] gemini response: ", res); @@ -30,8 +31,6 @@ export class GeminiApi implements LLMApi { model: options.config.model, }, }; - const accessStore = useAccessStore.getState(); - const requestPayload = { contents: messages, // stream: options.config.stream, @@ -44,7 +43,7 @@ export class GeminiApi implements LLMApi { // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. }; - console.log("[Request] openai payload: ", requestPayload); + console.log("[Request] google payload: ", requestPayload); // todo: support stream later const shouldStream = false; @@ -52,8 +51,7 @@ export class GeminiApi implements LLMApi { options.onController?.(controller); try { - const chatPath = - this.path(Google.ChatPath) + `?key=${accessStore.googleApiKey}`; + const chatPath = this.path(Google.ChatPath); const chatPayload = { method: "POST", body: JSON.stringify(requestPayload),