feat: wont send max_tokens

This commit is contained in:
Yifei Zhang 2023-11-09 20:45:25 +08:00 committed by GitHub
parent 3b3ebda34b
commit fd2f441e02
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -70,7 +70,8 @@ export class ChatGPTApi implements LLMApi {
presence_penalty: modelConfig.presence_penalty, presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty, frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p, top_p: modelConfig.top_p,
max_tokens: Math.max(modelConfig.max_tokens, 1024), // max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
}; };
console.log("[Request] openai payload: ", requestPayload); console.log("[Request] openai payload: ", requestPayload);