Merge pull request #2241 from Yidadaa/bugfix-0704

feat: add top p config
This commit is contained in:
Yifei Zhang 2023-07-04 00:42:41 +08:00 committed by GitHub
commit 78ed24dbf6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 39 additions and 5 deletions

View File

@ -50,6 +50,7 @@ export class ChatGPTApi implements LLMApi {
temperature: modelConfig.temperature, temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty, presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty, frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
}; };
console.log("[Request] openai payload: ", requestPayload); console.log("[Request] openai payload: ", requestPayload);

View File

@ -888,7 +888,8 @@ export function Chat() {
const showActions = const showActions =
!isUser && !isUser &&
i > 0 && i > 0 &&
!(message.preview || message.content.length === 0); !(message.preview || message.content.length === 0) &&
i >= context.length; // do not show actions for context prompts
const showTyping = message.preview || message.streaming; const showTyping = message.preview || message.streaming;
const shouldShowClearContextDivider = i === clearContextIndex - 1; const shouldShowClearContextDivider = i === clearContextIndex - 1;

View File

@ -48,6 +48,25 @@ export function ModelConfigList(props: {
}} }}
></InputRange> ></InputRange>
</ListItem> </ListItem>
<ListItem
title={Locale.Settings.TopP.Title}
subTitle={Locale.Settings.TopP.SubTitle}
>
<InputRange
value={(props.modelConfig.top_p ?? 1).toFixed(1)}
min="0"
max="1"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.temperature = ModalConfigValidator.top_p(
e.currentTarget.valueAsNumber,
)),
);
}}
></InputRange>
</ListItem>
<ListItem <ListItem
title={Locale.Settings.MaxTokens.Title} title={Locale.Settings.MaxTokens.Title}
subTitle={Locale.Settings.MaxTokens.SubTitle} subTitle={Locale.Settings.MaxTokens.SubTitle}

View File

@ -214,6 +214,10 @@ const cn = {
Title: "随机性 (temperature)", Title: "随机性 (temperature)",
SubTitle: "值越大,回复越随机", SubTitle: "值越大,回复越随机",
}, },
TopP: {
Title: "核采样 (top_p)",
SubTitle: "与随机性类似,但不要和随机性一起更改",
},
MaxTokens: { MaxTokens: {
Title: "单次回复限制 (max_tokens)", Title: "单次回复限制 (max_tokens)",
SubTitle: "单次交互所用的最大 Token 数", SubTitle: "单次交互所用的最大 Token 数",

View File

@ -215,6 +215,10 @@ const en: LocaleType = {
Title: "Temperature", Title: "Temperature",
SubTitle: "A larger value makes the more random output", SubTitle: "A larger value makes the more random output",
}, },
TopP: {
Title: "Top P",
SubTitle: "Do not alter this value together with temperature",
},
MaxTokens: { MaxTokens: {
Title: "Max Tokens", Title: "Max Tokens",
SubTitle: "Maximum length of input tokens and generated tokens", SubTitle: "Maximum length of input tokens and generated tokens",
@ -249,7 +253,7 @@ const en: LocaleType = {
}, },
Context: { Context: {
Toast: (x: any) => `With ${x} contextual prompts`, Toast: (x: any) => `With ${x} contextual prompts`,
Edit: "Contextual and Memory Prompts", Edit: "Current Chat Settings",
Add: "Add a Prompt", Add: "Add a Prompt",
Clear: "Context Cleared", Clear: "Context Cleared",
Revert: "Revert", Revert: "Revert",

View File

@ -33,6 +33,7 @@ export const DEFAULT_CONFIG = {
modelConfig: { modelConfig: {
model: "gpt-3.5-turbo" as ModelType, model: "gpt-3.5-turbo" as ModelType,
temperature: 0.5, temperature: 0.5,
top_p: 1,
max_tokens: 2000, max_tokens: 2000,
presence_penalty: 0, presence_penalty: 0,
frequency_penalty: 0, frequency_penalty: 0,
@ -158,6 +159,9 @@ export const ModalConfigValidator = {
temperature(x: number) { temperature(x: number) {
return limitNumber(x, 0, 1, 1); return limitNumber(x, 0, 1, 1);
}, },
top_p(x: number) {
return limitNumber(x, 0, 1, 1);
},
}; };
export const useAppConfig = create<ChatConfigStore>()( export const useAppConfig = create<ChatConfigStore>()(
@ -177,15 +181,16 @@ export const useAppConfig = create<ChatConfigStore>()(
}), }),
{ {
name: StoreKey.Config, name: StoreKey.Config,
version: 3.2, version: 3.3,
migrate(persistedState, version) { migrate(persistedState, version) {
if (version === 3.2) return persistedState as any; if (version === 3.3) return persistedState as any;
const state = persistedState as ChatConfig; const state = persistedState as ChatConfig;
state.modelConfig.sendMemory = true; state.modelConfig.sendMemory = true;
state.modelConfig.historyMessageCount = 4; state.modelConfig.historyMessageCount = 4;
state.modelConfig.compressMessageLengthThreshold = 1000; state.modelConfig.compressMessageLengthThreshold = 1000;
state.modelConfig.frequency_penalty = 0; state.modelConfig.frequency_penalty = 0;
state.modelConfig.top_p = 1;
state.modelConfig.template = DEFAULT_INPUT_TEMPLATE; state.modelConfig.template = DEFAULT_INPUT_TEMPLATE;
state.dontShowMaskSplashScreen = false; state.dontShowMaskSplashScreen = false;

View File

@ -127,7 +127,7 @@ export const usePromptStore = create<PromptStore>()(
search(text) { search(text) {
if (text.length === 0) { if (text.length === 0) {
// return all rompts // return all rompts
return SearchService.allPrompts.concat([...get().getUserPrompts()]); return get().getUserPrompts().concat(SearchService.builtinPrompts);
} }
return SearchService.search(text) as Prompt[]; return SearchService.search(text) as Prompt[];
}, },