Merge pull request #3230 from Yidadaa/bugfix-1112

This commit is contained in:
Yifei Zhang 2023-11-12 00:52:21 +08:00 committed by GitHub
commit 22b6987249
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 31 additions and 21 deletions

View File

@ -216,9 +216,9 @@ If you want to disable parse settings from url, set this to 1.
### `CUSTOM_MODELS` (optional) ### `CUSTOM_MODELS` (optional)
> Default: Empty > Default: Empty
> Example: `+llama,+claude-2,-gpt-3.5-turbo` means add `llama, claude-2` to model list, and remove `gpt-3.5-turbo` from list. > Example: `+llama,+claude-2,-gpt-3.5-turbo,gpt-4-1106-preview:gpt-4-turbo` means add `llama, claude-2` to model list, and remove `gpt-3.5-turbo` from list, and display `gpt-4-1106-preview` as `gpt-4-turbo`.
To control custom models, use `+` to add a custom model, use `-` to hide a model, separated by comma. To control custom models, use `+` to add a custom model, use `-` to hide a model, use `name:displayName` to customize model name, separated by comma.
## Requirements ## Requirements

View File

@ -122,9 +122,9 @@ Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.micro
### `CUSTOM_MODELS` (可选) ### `CUSTOM_MODELS` (可选)
> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo` 表示增加 `qwen-7b-chat``glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`。 > 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview:gpt-4-turbo` 表示增加 `qwen-7b-chat``glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`。
用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,用英文逗号隔开。 用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名:展示名` 来自定义模型的展示名,用英文逗号隔开。
## 开发 ## 开发

View File

@ -81,7 +81,7 @@ export async function requestOpenai(req: NextRequest) {
const jsonBody = JSON.parse(clonedBody) as { model?: string }; const jsonBody = JSON.parse(clonedBody) as { model?: string };
// not undefined and is false // not undefined and is false
if (modelTable[jsonBody?.model ?? ""] === false) { if (modelTable[jsonBody?.model ?? ""].available === false) {
return NextResponse.json( return NextResponse.json(
{ {
error: true, error: true,

View File

@ -433,7 +433,7 @@ export function ChatActions(props: {
const currentModel = chatStore.currentSession().mask.modelConfig.model; const currentModel = chatStore.currentSession().mask.modelConfig.model;
const allModels = useAllModels(); const allModels = useAllModels();
const models = useMemo( const models = useMemo(
() => allModels.filter((m) => m.available).map((m) => m.name), () => allModels.filter((m) => m.available),
[allModels], [allModels],
); );
const [showModelSelector, setShowModelSelector] = useState(false); const [showModelSelector, setShowModelSelector] = useState(false);
@ -441,9 +441,9 @@ export function ChatActions(props: {
useEffect(() => { useEffect(() => {
// if current model is not available // if current model is not available
// switch to first available model // switch to first available model
const isUnavaliableModel = !models.includes(currentModel); const isUnavaliableModel = !models.some((m) => m.name === currentModel);
if (isUnavaliableModel && models.length > 0) { if (isUnavaliableModel && models.length > 0) {
const nextModel = models[0] as ModelType; const nextModel = models[0].name as ModelType;
chatStore.updateCurrentSession( chatStore.updateCurrentSession(
(session) => (session.mask.modelConfig.model = nextModel), (session) => (session.mask.modelConfig.model = nextModel),
); );
@ -531,8 +531,8 @@ export function ChatActions(props: {
<Selector <Selector
defaultSelectedValue={currentModel} defaultSelectedValue={currentModel}
items={models.map((m) => ({ items={models.map((m) => ({
title: m, title: m.displayName,
value: m, value: m.name,
}))} }))}
onClose={() => setShowModelSelector(false)} onClose={() => setShowModelSelector(false)}
onSelection={(s) => { onSelection={(s) => {

View File

@ -4,21 +4,34 @@ export function collectModelTable(
models: readonly LLMModel[], models: readonly LLMModel[],
customModels: string, customModels: string,
) { ) {
const modelTable: Record<string, boolean> = {}; const modelTable: Record<
string,
{ available: boolean; name: string; displayName: string }
> = {};
// default models // default models
models.forEach((m) => (modelTable[m.name] = m.available)); models.forEach(
(m) =>
(modelTable[m.name] = {
...m,
displayName: m.name,
}),
);
// server custom models // server custom models
customModels customModels
.split(",") .split(",")
.filter((v) => !!v && v.length > 0) .filter((v) => !!v && v.length > 0)
.map((m) => { .map((m) => {
if (m.startsWith("+")) { const available = !m.startsWith("-");
modelTable[m.slice(1)] = true; const nameConfig =
} else if (m.startsWith("-")) { m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m;
modelTable[m.slice(1)] = false; const [name, displayName] = nameConfig.split(":");
} else modelTable[m] = true; modelTable[name] = {
name,
displayName: displayName || name,
available,
};
}); });
return modelTable; return modelTable;
} }
@ -31,10 +44,7 @@ export function collectModels(
customModels: string, customModels: string,
) { ) {
const modelTable = collectModelTable(models, customModels); const modelTable = collectModelTable(models, customModels);
const allModels = Object.keys(modelTable).map((m) => ({ const allModels = Object.values(modelTable);
name: m,
available: modelTable[m],
}));
return allModels; return allModels;
} }