forked from XiaoMo/ChatGPT-Next-Web
feat: #2330 disable /list/models
This commit is contained in:
parent
c00a63e4c3
commit
15e063e1b5
@ -1,5 +1,6 @@
|
||||
import {
|
||||
DEFAULT_API_HOST,
|
||||
DEFAULT_MODELS,
|
||||
OpenaiPath,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
@ -23,6 +24,8 @@ export interface OpenAIListModelResponse {
|
||||
}
|
||||
|
||||
export class ChatGPTApi implements LLMApi {
|
||||
private disableListModels = true;
|
||||
|
||||
path(path: string): string {
|
||||
let openaiUrl = useAccessStore.getState().openaiUrl;
|
||||
if (openaiUrl.length === 0) {
|
||||
@ -246,6 +249,10 @@ export class ChatGPTApi implements LLMApi {
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
if (this.disableListModels) {
|
||||
return DEFAULT_MODELS.slice();
|
||||
}
|
||||
|
||||
const res = await fetch(this.path(OpenaiPath.ListModelPath), {
|
||||
method: "GET",
|
||||
headers: {
|
||||
|
@ -28,6 +28,7 @@ import { useAppConfig } from "../store/config";
|
||||
import { AuthPage } from "./auth";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { api } from "../client/api";
|
||||
import { useAccessStore } from "../store";
|
||||
|
||||
export function Loading(props: { noLogo?: boolean }) {
|
||||
return (
|
||||
@ -171,6 +172,7 @@ export function Home() {
|
||||
|
||||
useEffect(() => {
|
||||
console.log("[Config] got config from build time", getClientConfig());
|
||||
useAccessStore.getState().fetch();
|
||||
}, []);
|
||||
|
||||
if (!useHasHydrated()) {
|
||||
|
@ -108,24 +108,4 @@ export const DEFAULT_MODELS = [
|
||||
name: "gpt-3.5-turbo-16k-0613",
|
||||
available: true,
|
||||
},
|
||||
{
|
||||
name: "qwen-v1", // 通义千问
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "ernie", // 文心一言
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "spark", // 讯飞星火
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "llama", // llama
|
||||
available: false,
|
||||
},
|
||||
{
|
||||
name: "chatglm", // chatglm-6b
|
||||
available: false,
|
||||
},
|
||||
] as const;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { create } from "zustand";
|
||||
import { persist } from "zustand/middleware";
|
||||
import { DEFAULT_API_HOST, StoreKey } from "../constant";
|
||||
import { DEFAULT_API_HOST, DEFAULT_MODELS, StoreKey } from "../constant";
|
||||
import { getHeaders } from "../client/api";
|
||||
import { BOT_HELLO } from "./chat";
|
||||
import { getClientConfig } from "../config/client";
|
||||
@ -11,8 +11,10 @@ export interface AccessControlStore {
|
||||
|
||||
needCode: boolean;
|
||||
hideUserApiKey: boolean;
|
||||
openaiUrl: string;
|
||||
hideBalanceQuery: boolean;
|
||||
disableGPT4: boolean;
|
||||
|
||||
openaiUrl: string;
|
||||
|
||||
updateToken: (_: string) => void;
|
||||
updateCode: (_: string) => void;
|
||||
@ -35,8 +37,10 @@ export const useAccessStore = create<AccessControlStore>()(
|
||||
accessCode: "",
|
||||
needCode: true,
|
||||
hideUserApiKey: false,
|
||||
openaiUrl: DEFAULT_OPENAI_URL,
|
||||
hideBalanceQuery: false,
|
||||
disableGPT4: false,
|
||||
|
||||
openaiUrl: DEFAULT_OPENAI_URL,
|
||||
|
||||
enabledAccessControl() {
|
||||
get().fetch();
|
||||
@ -75,8 +79,10 @@ export const useAccessStore = create<AccessControlStore>()(
|
||||
console.log("[Config] got config from server", res);
|
||||
set(() => ({ ...res }));
|
||||
|
||||
if ((res as any).botHello) {
|
||||
BOT_HELLO.content = (res as any).botHello;
|
||||
if (res.disableGPT4) {
|
||||
DEFAULT_MODELS.forEach(
|
||||
(m: any) => (m.available = !m.name.startsWith("gpt-4")),
|
||||
);
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
|
Loading…
Reference in New Issue
Block a user