forked from XiaoMo/ChatGPT-Next-Web
fix: fix type errors
This commit is contained in:
parent
ae0d68c27e
commit
45798f993d
@ -323,6 +323,11 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
return chatModels.map((m) => ({
|
return chatModels.map((m) => ({
|
||||||
name: m.id,
|
name: m.id,
|
||||||
available: true,
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "openai",
|
||||||
|
providerName: "OpenAI",
|
||||||
|
providerType: "openai",
|
||||||
|
},
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -584,6 +584,7 @@ export function Settings() {
|
|||||||
const accessStore = useAccessStore();
|
const accessStore = useAccessStore();
|
||||||
const shouldHideBalanceQuery = useMemo(() => {
|
const shouldHideBalanceQuery = useMemo(() => {
|
||||||
const isOpenAiUrl = accessStore.openaiUrl.includes(OPENAI_BASE_URL);
|
const isOpenAiUrl = accessStore.openaiUrl.includes(OPENAI_BASE_URL);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
accessStore.hideBalanceQuery ||
|
accessStore.hideBalanceQuery ||
|
||||||
isOpenAiUrl ||
|
isOpenAiUrl ||
|
||||||
|
@ -319,6 +319,24 @@ const en: LocaleType = {
|
|||||||
Title: "Custom Models",
|
Title: "Custom Models",
|
||||||
SubTitle: "Custom model options, seperated by comma",
|
SubTitle: "Custom model options, seperated by comma",
|
||||||
},
|
},
|
||||||
|
Google: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "API Key",
|
||||||
|
SubTitle:
|
||||||
|
"Bypass password access restrictions using a custom Google AI Studio API Key",
|
||||||
|
Placeholder: "Google AI Studio API Key",
|
||||||
|
},
|
||||||
|
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Example:",
|
||||||
|
},
|
||||||
|
|
||||||
|
ApiVerion: {
|
||||||
|
Title: "API Version (gemini api version)",
|
||||||
|
SubTitle: "Select a specific part version",
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
Model: "Model",
|
Model: "Model",
|
||||||
@ -443,8 +461,8 @@ const en: LocaleType = {
|
|||||||
},
|
},
|
||||||
Exporter: {
|
Exporter: {
|
||||||
Description: {
|
Description: {
|
||||||
Title: "Only messages after clearing the context will be displayed"
|
Title: "Only messages after clearing the context will be displayed",
|
||||||
},
|
},
|
||||||
Model: "Model",
|
Model: "Model",
|
||||||
Messages: "Messages",
|
Messages: "Messages",
|
||||||
Topic: "Topic",
|
Topic: "Topic",
|
||||||
|
@ -1,8 +1,16 @@
|
|||||||
import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant";
|
import {
|
||||||
|
FETCH_COMMIT_URL,
|
||||||
|
FETCH_TAG_URL,
|
||||||
|
ModelProvider,
|
||||||
|
StoreKey,
|
||||||
|
} from "../constant";
|
||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { createPersistStore } from "../utils/store";
|
import { createPersistStore } from "../utils/store";
|
||||||
import ChatGptIcon from "../icons/chatgpt.png";
|
import ChatGptIcon from "../icons/chatgpt.png";
|
||||||
import Locale from "../locales";
|
import Locale from "../locales";
|
||||||
|
import { use } from "react";
|
||||||
|
import { useAppConfig } from ".";
|
||||||
|
import { ClientApi } from "../client/api";
|
||||||
|
|
||||||
const ONE_MINUTE = 60 * 1000;
|
const ONE_MINUTE = 60 * 1000;
|
||||||
const isApp = !!getClientConfig()?.isApp;
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
@ -126,6 +134,7 @@ export const useUpdateStore = createPersistStore(
|
|||||||
},
|
},
|
||||||
|
|
||||||
async updateUsage(force = false) {
|
async updateUsage(force = false) {
|
||||||
|
// only support openai for now
|
||||||
const overOneMinute = Date.now() - get().lastUpdateUsage >= ONE_MINUTE;
|
const overOneMinute = Date.now() - get().lastUpdateUsage >= ONE_MINUTE;
|
||||||
if (!overOneMinute && !force) return;
|
if (!overOneMinute && !force) return;
|
||||||
|
|
||||||
@ -134,6 +143,7 @@ export const useUpdateStore = createPersistStore(
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const api = new ClientApi(ModelProvider.GPT);
|
||||||
const usage = await api.llm.usage();
|
const usage = await api.llm.usage();
|
||||||
|
|
||||||
if (usage) {
|
if (usage) {
|
||||||
|
@ -4,7 +4,15 @@ export function collectModelTable(
|
|||||||
models: readonly LLMModel[],
|
models: readonly LLMModel[],
|
||||||
customModels: string,
|
customModels: string,
|
||||||
) {
|
) {
|
||||||
const modelTable: { [key: string]: LLMModel } = {};
|
const modelTable: Record<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
available: boolean;
|
||||||
|
name: string;
|
||||||
|
displayName: string;
|
||||||
|
provider: LLMModel["provider"];
|
||||||
|
}
|
||||||
|
> = {};
|
||||||
|
|
||||||
// default models
|
// default models
|
||||||
models.forEach(
|
models.forEach(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user