fix: fix gemini issue when using app (#4013)

* chore: update path

* fix: fix google auth logic

* fix: not using header authorization for google api

* chore: revert to allow stream
This commit is contained in:
fred-bf 2024-02-07 13:17:11 +08:00 committed by GitHub
parent 9d5801fb5f
commit bca74241e6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 102 additions and 59 deletions

View File

@ -144,10 +144,10 @@ export function getHeaders() {
const headers: Record<string, string> = { const headers: Record<string, string> = {
"Content-Type": "application/json", "Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest", "x-requested-with": "XMLHttpRequest",
"Accept": "application/json", Accept: "application/json",
}; };
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig; const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro"; const isGoogle = modelConfig.model.startsWith("gemini");
const isAzure = accessStore.provider === ServiceProvider.Azure; const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization"; const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isGoogle const apiKey = isGoogle
@ -155,20 +155,23 @@ export function getHeaders() {
: isAzure : isAzure
? accessStore.azureApiKey ? accessStore.azureApiKey
: accessStore.openaiApiKey; : accessStore.openaiApiKey;
const clientConfig = getClientConfig();
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`; const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
const validString = (x: string) => x && x.length > 0; const validString = (x: string) => x && x.length > 0;
// use user's api key first // when using google api in app, not set auth header
if (validString(apiKey)) { if (!(isGoogle && clientConfig?.isApp)) {
headers[authHeader] = makeBearer(apiKey); // use user's api key first
} else if ( if (validString(apiKey)) {
accessStore.enabledAccessControl() && headers[authHeader] = makeBearer(apiKey);
validString(accessStore.accessCode) } else if (
) { accessStore.enabledAccessControl() &&
headers[authHeader] = makeBearer( validString(accessStore.accessCode)
ACCESS_CODE_PREFIX + accessStore.accessCode, ) {
); headers[authHeader] = makeBearer(
ACCESS_CODE_PREFIX + accessStore.accessCode,
);
}
} }
return headers; return headers;

View File

@ -1,15 +1,8 @@
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant"; import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client"; import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales"; import { DEFAULT_API_HOST } from "@/app/constant";
import { getServerSideConfig } from "@/app/config/server";
import de from "@/app/locales/de";
export class GeminiProApi implements LLMApi { export class GeminiProApi implements LLMApi {
extractMessage(res: any) { extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res); console.log("[Response] gemini-pro response: ", res);
@ -21,7 +14,7 @@ export class GeminiProApi implements LLMApi {
); );
} }
async chat(options: ChatOptions): Promise<void> { async chat(options: ChatOptions): Promise<void> {
const apiClient = this; // const apiClient = this;
const messages = options.messages.map((v) => ({ const messages = options.messages.map((v) => ({
role: v.role.replace("assistant", "model").replace("system", "user"), role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }], parts: [{ text: v.content }],
@ -79,20 +72,31 @@ export class GeminiProApi implements LLMApi {
], ],
}; };
console.log("[Request] google payload: ", requestPayload); const isApp = !!getClientConfig()?.isApp;
const shouldStream = !!options.config.stream; const shouldStream = !!options.config.stream;
const controller = new AbortController(); const controller = new AbortController();
options.onController?.(controller); options.onController?.(controller);
const accessStore = useAccessStore.getState();
try { try {
const chatPath = this.path(Google.ChatPath); let chatPath = this.path(Google.ChatPath);
// let baseUrl = accessStore.googleUrl;
chatPath = isApp
? DEFAULT_API_HOST +
"/api/proxy/google/" +
Google.ChatPath +
`?key=${accessStore.googleApiKey}`
: chatPath;
const chatPayload = { const chatPayload = {
method: "POST", method: "POST",
body: JSON.stringify(requestPayload), body: JSON.stringify(requestPayload),
signal: controller.signal, signal: controller.signal,
headers: getHeaders(), headers: getHeaders(),
}; };
console.log("[Request] google chatPath: ", chatPath, isApp);
// make a fetch request // make a fetch request
const requestTimeoutId = setTimeout( const requestTimeoutId = setTimeout(
() => controller.abort(), () => controller.abort(),
@ -134,6 +138,8 @@ export class GeminiProApi implements LLMApi {
// start animaion // start animaion
animateResponseText(); animateResponseText();
console.log("[Proxy Endpoint] ", streamChatPath);
fetch(streamChatPath, chatPayload) fetch(streamChatPath, chatPayload)
.then((response) => { .then((response) => {
const reader = response?.body?.getReader(); const reader = response?.body?.getReader();
@ -187,9 +193,7 @@ export class GeminiProApi implements LLMApi {
} else { } else {
const res = await fetch(chatPath, chatPayload); const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId); clearTimeout(requestTimeoutId);
const resJson = await res.json(); const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) { if (resJson?.promptFeedback?.blockReason) {
// being blocked // being blocked
options.onError?.( options.onError?.(

View File

@ -1,3 +1,4 @@
"use client";
import { import {
ApiPath, ApiPath,
DEFAULT_API_HOST, DEFAULT_API_HOST,
@ -45,7 +46,9 @@ export class ChatGPTApi implements LLMApi {
if (baseUrl.length === 0) { if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp; const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI; baseUrl = isApp
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
: ApiPath.OpenAI;
} }
if (baseUrl.endsWith("/")) { if (baseUrl.endsWith("/")) {
@ -59,6 +62,8 @@ export class ChatGPTApi implements LLMApi {
path = makeAzurePath(path, accessStore.azureApiVersion); path = makeAzurePath(path, accessStore.azureApiVersion);
} }
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/"); return [baseUrl, path].join("/");
} }

View File

@ -307,7 +307,7 @@ export function PreviewActions(props: {
setShouldExport(false); setShouldExport(false);
var api: ClientApi; var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") { if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);

View File

@ -171,7 +171,7 @@ export function useLoadData() {
const config = useAppConfig(); const config = useAppConfig();
var api: ClientApi; var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") { if (config.modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);

View File

@ -92,7 +92,7 @@ export function ModelConfigList(props: {
></input> ></input>
</ListItem> </ListItem>
{props.modelConfig.model === "gemini-pro" ? null : ( {props.modelConfig.model.startsWith("gemini") ? null : (
<> <>
<ListItem <ListItem
title={Locale.Settings.PresencePenalty.Title} title={Locale.Settings.PresencePenalty.Title}

View File

@ -8,8 +8,10 @@ export const FETCH_COMMIT_URL = `https://api.github.com/repos/${OWNER}/${REPO}/c
export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`; export const FETCH_TAG_URL = `https://api.github.com/repos/${OWNER}/${REPO}/tags?per_page=1`;
export const RUNTIME_CONFIG_DOM = "danger-runtime-config"; export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
export const DEFAULT_CORS_HOST = "https://a.nextweb.fun"; // export const DEFAULT_CORS_HOST = "https://api.nextchat.dev";
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`; // export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
export const OPENAI_BASE_URL = "https://api.openai.com"; export const OPENAI_BASE_URL = "https://api.openai.com";
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/"; export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";

View File

@ -12,7 +12,9 @@ import { ensure } from "../utils/clone";
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
const DEFAULT_OPENAI_URL = const DEFAULT_OPENAI_URL =
getClientConfig()?.buildMode === "export" ? DEFAULT_API_HOST : ApiPath.OpenAI; getClientConfig()?.buildMode === "export"
? DEFAULT_API_HOST + "/api/proxy/openai"
: ApiPath.OpenAI;
const DEFAULT_ACCESS_STATE = { const DEFAULT_ACCESS_STATE = {
accessCode: "", accessCode: "",

View File

@ -316,7 +316,7 @@ export const useChatStore = createPersistStore(
}); });
var api: ClientApi; var api: ClientApi;
if (modelConfig.model === "gemini-pro") { if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);
@ -501,7 +501,7 @@ export const useChatStore = createPersistStore(
const modelConfig = session.mask.modelConfig; const modelConfig = session.mask.modelConfig;
var api: ClientApi; var api: ClientApi;
if (modelConfig.model === "gemini-pro") { if (modelConfig.model.startsWith("gemini")) {
api = new ClientApi(ModelProvider.GeminiPro); api = new ClientApi(ModelProvider.GeminiPro);
} else { } else {
api = new ClientApi(ModelProvider.GPT); api = new ClientApi(ModelProvider.GPT);

View File

@ -1,8 +1,8 @@
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { ApiPath, DEFAULT_CORS_HOST } from "../constant"; import { ApiPath, DEFAULT_API_HOST } from "../constant";
export function corsPath(path: string) { export function corsPath(path: string) {
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_CORS_HOST}` : ""; const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
if (!path.startsWith("/")) { if (!path.startsWith("/")) {
path = "/" + path; path = "/" + path;

View File

@ -64,8 +64,17 @@ if (mode !== "export") {
nextConfig.rewrites = async () => { nextConfig.rewrites = async () => {
const ret = [ const ret = [
// adjust for previous verison directly using "/api/proxy/" as proxy base route
{ {
source: "/api/proxy/:path*", source: "/api/proxy/v1/:path*",
destination: "https://api.openai.com/v1/:path*",
},
{
source: "/api/proxy/google/:path*",
destination: "https://generativelanguage.googleapis.com/:path*",
},
{
source: "/api/proxy/openai/:path*",
destination: "https://api.openai.com/:path*", destination: "https://api.openai.com/:path*",
}, },
{ {

View File

@ -1,5 +1,5 @@
{ {
"name": "chatgpt-next-web", "name": "nextchat",
"private": false, "private": false,
"license": "mit", "license": "mit",
"scripts": { "scripts": {

View File

@ -54,7 +54,7 @@ if ! command -v node >/dev/null || ! command -v git >/dev/null || ! command -v y
fi fi
# Clone the repository and install dependencies # Clone the repository and install dependencies
git clone https://github.com/Yidadaa/ChatGPT-Next-Web git clone https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web
cd ChatGPT-Next-Web cd ChatGPT-Next-Web
yarn install yarn install

22
src-tauri/Cargo.lock generated
View File

@ -431,17 +431,6 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chatgpt-next-web"
version = "0.1.0"
dependencies = [
"serde",
"serde_json",
"tauri",
"tauri-build",
"tauri-plugin-window-state",
]
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.24" version = "0.4.24"
@ -1824,6 +1813,17 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "nextchat"
version = "0.1.0"
dependencies = [
"serde",
"serde_json",
"tauri",
"tauri-build",
"tauri-plugin-window-state",
]
[[package]] [[package]]
name = "nix" name = "nix"
version = "0.26.4" version = "0.26.4"

View File

@ -1,11 +1,11 @@
[package] [package]
name = "chatgpt-next-web" name = "nextchat"
version = "0.1.0" version = "0.1.0"
description = "A cross platform app for LLM ChatBot." description = "A cross platform app for LLM ChatBot."
authors = ["Yidadaa"] authors = ["Yidadaa"]
license = "mit" license = "mit"
repository = "" repository = ""
default-run = "chatgpt-next-web" default-run = "nextchat"
edition = "2021" edition = "2021"
rust-version = "1.60" rust-version = "1.60"
@ -17,11 +17,29 @@ tauri-build = { version = "1.3.0", features = [] }
[dependencies] [dependencies]
serde_json = "1.0" serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
tauri = { version = "1.3.0", features = ["notification-all", "fs-all", "clipboard-all", "dialog-all", "shell-open", "updater", "window-close", "window-hide", "window-maximize", "window-minimize", "window-set-icon", "window-set-ignore-cursor-events", "window-set-resizable", "window-show", "window-start-dragging", "window-unmaximize", "window-unminimize"] } tauri = { version = "1.3.0", features = [
"notification-all",
"fs-all",
"clipboard-all",
"dialog-all",
"shell-open",
"updater",
"window-close",
"window-hide",
"window-maximize",
"window-minimize",
"window-set-icon",
"window-set-ignore-cursor-events",
"window-set-resizable",
"window-show",
"window-start-dragging",
"window-unmaximize",
"window-unminimize",
] }
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" } tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
[features] [features]
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled. # this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes. # If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
# DO NOT REMOVE!! # DO NOT REMOVE!!
custom-protocol = [ "tauri/custom-protocol" ] custom-protocol = ["tauri/custom-protocol"]

View File

@ -91,7 +91,7 @@
"updater": { "updater": {
"active": true, "active": true,
"endpoints": [ "endpoints": [
"https://github.com/Yidadaa/ChatGPT-Next-Web/releases/latest/download/latest.json" "https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json"
], ],
"dialog": false, "dialog": false,
"windows": { "windows": {