diff --git a/src/CHATGPT_API_WEB_VERSION.ts b/src/CHATGPT_API_WEB_VERSION.ts index ec86328..a7d5d37 100644 --- a/src/CHATGPT_API_WEB_VERSION.ts +++ b/src/CHATGPT_API_WEB_VERSION.ts @@ -1,3 +1,3 @@ -const CHATGPT_API_WEB_VERSION = "v1.3.0"; +const CHATGPT_API_WEB_VERSION = "v1.4.0"; export default CHATGPT_API_WEB_VERSION; diff --git a/src/app.tsx b/src/app.tsx index 9aa7ff8..f97be86 100644 --- a/src/app.tsx +++ b/src/app.tsx @@ -27,6 +27,11 @@ export interface ChatStore { model: string; responseModelName: string; cost: number; + temperature: number; + top_p: number; + presence_penalty: number; + frequency_penalty: number; + develop_mode: boolean; } const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions"; @@ -35,7 +40,9 @@ const newChatStore = ( systemMessageContent = "Follow my instructions carefully", apiEndpoint = _defaultAPIEndpoint, streamMode = true, - model = "gpt-3.5-turbo-0613" + model = "gpt-3.5-turbo-0613", + temperature = 1.0, + dev = false ): ChatStore => { return { chatgpt_api_web_version: CHATGPT_API_WEB_VERSION, @@ -51,6 +58,11 @@ const newChatStore = ( model: getDefaultParams("model", model), responseModelName: "", cost: 0, + temperature: getDefaultParams("temp", temperature), + top_p: 1, + presence_penalty: 0, + frequency_penalty: 0, + develop_mode: getDefaultParams("dev", dev), }; }; diff --git a/src/chatbox.tsx b/src/chatbox.tsx index ceebe02..47c93e9 100644 --- a/src/chatbox.tsx +++ b/src/chatbox.tsx @@ -32,6 +32,19 @@ export default function ChatBOX(props: { const client = new ChatGPT(chatStore.apiKey); + const update_total_tokens = () => { + // manually estimate token + client.total_tokens = calculate_token_length( + chatStore.systemMessageContent + ); + for (const msg of chatStore.history + .filter(({ hide }) => !hide) + .slice(chatStore.postBeginIndex)) { + client.total_tokens += msg.token; + } + chatStore.totalTokens = client.total_tokens; + }; + const _completeWithStreamMode = async (response: Response) => { chatStore.streamMode = true; // call api, return reponse text @@ -121,14 +134,7 @@ export default function ChatBOX(props: { // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; chatStore.tokenMargin = client.tokens_margin; - // manually estimate token - client.total_tokens = 0; - for (const msg of chatStore.history - .filter(({ hide }) => !hide) - .slice(chatStore.postBeginIndex)) { - client.total_tokens += msg.token; - } - chatStore.totalTokens = client.total_tokens; + update_total_tokens(); setChatStore({ ...chatStore }); setGeneratingMessage(""); setShowGenerating(false); @@ -407,6 +413,42 @@ export default function ChatBOX(props: { > Send + {chatStore.develop_mode && ( + + )} + {chatStore.develop_mode && ( + + )} ); diff --git a/src/getDefaultParam.ts b/src/getDefaultParam.ts index 789bdb7..2f5ff89 100644 --- a/src/getDefaultParam.ts +++ b/src/getDefaultParam.ts @@ -7,10 +7,12 @@ function getDefaultParams(param: any, val: any) { if (typeof val === "string") { return get ?? val; } else if (typeof val === "number") { - return parseInt(get ?? `${val}`); + return parseFloat(get ?? `${val}`); } else if (typeof val === "boolean") { if (get === "stream") return true; if (get === "fetch") return false; + if (get === "true") return true; + if (get === "false") return false; return val; } } diff --git a/src/message.tsx b/src/message.tsx index b3e15e5..7c38f07 100644 --- a/src/message.tsx +++ b/src/message.tsx @@ -51,19 +51,59 @@ export default function Message(props: Props) { chat.role === "assistant" ? "justify-start" : "justify-end" }`} > -
- {chat.hide - ? chat.content.split("\n")[0].slice(0, 16) + "... (deleted)" - : chat.content} -
-+ {chat.hide + ? chat.content.split("\n")[0].slice(0, 16) + "... (deleted)" + : chat.content} +
+ )} +