+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/app/requests.ts b/app/requests.ts
index 3cb838e6..9159f1cf 100644
--- a/app/requests.ts
+++ b/app/requests.ts
@@ -1,5 +1,11 @@
import type { ChatRequest, ChatResponse } from "./api/openai/typing";
-import { Message, ModelConfig, useAccessStore, useChatStore } from "./store";
+import {
+ Message,
+ ModelConfig,
+ ModelType,
+ useAccessStore,
+ useChatStore,
+} from "./store";
import { showToast } from "./components/ui-lib";
const TIME_OUT_MS = 60000;
@@ -9,6 +15,7 @@ const makeRequestParam = (
options?: {
filterBot?: boolean;
stream?: boolean;
+ model?: ModelType;
},
): ChatRequest => {
let sendMessages = messages.map((v) => ({
@@ -26,6 +33,11 @@ const makeRequestParam = (
// @ts-expect-error
delete modelConfig.max_tokens;
+ // override model config
+ if (options?.model) {
+ modelConfig.model = options.model;
+ }
+
return {
messages: sendMessages,
stream: options?.stream,
@@ -50,7 +62,7 @@ function getHeaders() {
export function requestOpenaiClient(path: string) {
return (body: any, method = "POST") =>
- fetch("/api/openai?_vercel_no_cache=1", {
+ fetch("/api/openai", {
method,
headers: {
"Content-Type": "application/json",
@@ -61,8 +73,16 @@ export function requestOpenaiClient(path: string) {
});
}
-export async function requestChat(messages: Message[]) {
- const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
+export async function requestChat(
+ messages: Message[],
+ options?: {
+ model?: ModelType;
+ },
+) {
+ const req: ChatRequest = makeRequestParam(messages, {
+ filterBot: true,
+ model: options?.model,
+ });
const res = await requestOpenaiClient("v1/chat/completions")(req);
@@ -204,7 +224,13 @@ export async function requestChatStream(
}
}
-export async function requestWithPrompt(messages: Message[], prompt: string) {
+export async function requestWithPrompt(
+ messages: Message[],
+ prompt: string,
+ options?: {
+ model?: ModelType;
+ },
+) {
messages = messages.concat([
{
role: "user",
@@ -213,7 +239,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) {
},
]);
- const res = await requestChat(messages);
+ const res = await requestChat(messages, options);
return res?.choices?.at(0)?.message?.content ?? "";
}
diff --git a/app/store/app.ts b/app/store/app.ts
index 8d875fee..fe2a07da 100644
--- a/app/store/app.ts
+++ b/app/store/app.ts
@@ -17,6 +17,7 @@ export type Message = ChatCompletionResponseMessage & {
streaming?: boolean;
isError?: boolean;
id?: number;
+ model?: ModelType;
};
export function createMessage(override: Partial
): Message {
@@ -58,7 +59,7 @@ export interface ChatConfig {
disablePromptHint: boolean;
modelConfig: {
- model: string;
+ model: ModelType;
temperature: number;
max_tokens: number;
presence_penalty: number;
@@ -96,7 +97,9 @@ export const ALL_MODELS = [
name: "gpt-3.5-turbo-0301",
available: true,
},
-];
+] as const;
+
+export type ModelType = (typeof ALL_MODELS)[number]["name"];
export function limitNumber(
x: number,
@@ -119,7 +122,7 @@ export function limitModel(name: string) {
export const ModalConfigValidator = {
model(x: string) {
- return limitModel(x);
+ return limitModel(x) as ModelType;
},
max_tokens(x: number) {
return limitNumber(x, 0, 32000, 2000);
@@ -387,6 +390,7 @@ export const useChatStore = create()(
role: "assistant",
streaming: true,
id: userMessage.id! + 1,
+ model: get().config.modelConfig.model,
});
// get recent messages
@@ -531,14 +535,14 @@ export const useChatStore = create()(
session.topic === DEFAULT_TOPIC &&
countMessages(session.messages) >= SUMMARIZE_MIN_LEN
) {
- requestWithPrompt(session.messages, Locale.Store.Prompt.Topic).then(
- (res) => {
- get().updateCurrentSession(
- (session) =>
- (session.topic = res ? trimTopic(res) : DEFAULT_TOPIC),
- );
- },
- );
+ requestWithPrompt(session.messages, Locale.Store.Prompt.Topic, {
+ model: "gpt-3.5-turbo",
+ }).then((res) => {
+ get().updateCurrentSession(
+ (session) =>
+ (session.topic = res ? trimTopic(res) : DEFAULT_TOPIC),
+ );
+ });
}
const config = get().config;
diff --git a/app/utils.ts b/app/utils.ts
index 0e4a8eae..9d6e9062 100644
--- a/app/utils.ts
+++ b/app/utils.ts
@@ -1,4 +1,5 @@
import { EmojiStyle } from "emoji-picker-react";
+import { useEffect, useState } from "react";
import { showToast } from "./components/ui-lib";
import Locale from "./locales";
@@ -47,6 +48,23 @@ export function isIOS() {
return /iphone|ipad|ipod/.test(userAgent);
}
+export function useMobileScreen() {
+ const [isMobileScreen_, setIsMobileScreen] = useState(false);
+ useEffect(() => {
+ const onResize = () => {
+ setIsMobileScreen(isMobileScreen());
+ };
+
+ window.addEventListener("resize", onResize);
+
+ return () => {
+ window.removeEventListener("resize", onResize);
+ };
+ }, []);
+
+ return isMobileScreen_;
+}
+
export function isMobileScreen() {
return window.innerWidth <= 600;
}