From 3151fb84771796c07b5a0560d19d16cac0e318b0 Mon Sep 17 00:00:00 2001 From: heimoshuiyu Date: Tue, 21 Jan 2025 05:31:37 +0800 Subject: [PATCH 1/5] add options to enable/disable presence/frequency penalty --- src/components/Settings.tsx | 24 ++++++++++++++++++++++++ src/components/navbar.tsx | 13 +++++++++---- src/types/chatstore.ts | 2 ++ src/types/newChatstore.ts | 4 ++++ 4 files changed, 39 insertions(+), 4 deletions(-) diff --git a/src/components/Settings.tsx b/src/components/Settings.tsx index c0ea76d..5ba5982 100644 --- a/src/components/Settings.tsx +++ b/src/components/Settings.tsx @@ -406,6 +406,30 @@ const Number = (props: { }} /> )} + + {props.field === "presence_penalty" && ( + { + const newChatStore = { ...chatStore }; + newChatStore.presence_penalty_enabled = + !newChatStore.presence_penalty_enabled; + setChatStore({ ...newChatStore }); + }} + /> + )} + + {props.field === "frequency_penalty" && ( + { + const newChatStore = { ...chatStore }; + newChatStore.frequency_penalty_enabled = + !newChatStore.frequency_penalty_enabled; + setChatStore({ ...newChatStore }); + }} + /> + )} { Price:{" "} - {models[chatStore.model]?.price?.prompt * 1000 * 1000}$ - / 1M input tokens + {models[chatStore.model]?.price?.prompt * + 1000 * + 1000}$ / 1M input tokens - Total: {getTotalCost().toFixed(2)}$ + Total: {getTotalCost().toFixed( + 2 + )}$ @@ -112,7 +115,9 @@ const Navbar: React.FC = () => { - {chatStore.postBeginIndex} / {chatStore.history.length} + { + chatStore.postBeginIndex + } / {chatStore.history.length} diff --git a/src/types/chatstore.ts b/src/types/chatstore.ts index e3c4f4c..599a451 100644 --- a/src/types/chatstore.ts +++ b/src/types/chatstore.ts @@ -26,7 +26,9 @@ export interface ChatStore { top_p: number; top_p_enabled: boolean; presence_penalty: number; + presence_penalty_enabled: boolean; frequency_penalty: number; + frequency_penalty_enabled: boolean; develop_mode: boolean; whisper_api: string; whisper_key: string; diff --git a/src/types/newChatstore.ts b/src/types/newChatstore.ts index 53350e5..db6e7e5 100644 --- a/src/types/newChatstore.ts +++ b/src/types/newChatstore.ts @@ -18,7 +18,9 @@ interface NewChatStoreOptions { top_p?: number; top_p_enabled?: boolean; presence_penalty?: number; + presence_penalty_enabled?: boolean; frequency_penalty?: number; + frequency_penalty_enabled?: boolean; dev?: boolean; whisper_api?: string; whisper_key?: string; @@ -67,7 +69,9 @@ export const newChatStore = (options: NewChatStoreOptions): ChatStore => { top_p: options.top_p ?? 1, top_p_enabled: options.top_p_enabled ?? false, presence_penalty: options.presence_penalty ?? 0, + presence_penalty_enabled: options.presence_penalty_enabled ?? false, frequency_penalty: options.frequency_penalty ?? 0, + frequency_penalty_enabled: options.frequency_penalty_enabled ?? false, develop_mode: getDefaultParams("dev", options.dev ?? false), whisper_api: getDefaultParams( "whisper-api", From edcdc70a2b7c143901cb4000e69436c308de1118 Mon Sep 17 00:00:00 2001 From: heimoshuiyu Date: Tue, 21 Jan 2025 05:35:05 +0800 Subject: [PATCH 2/5] fix: enable/disable penalty in chatgpt.ts --- src/chatgpt.ts | 14 ++++++++++++-- src/pages/Chatbox.tsx | 2 ++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/chatgpt.ts b/src/chatgpt.ts index d41705d..67cb57e 100644 --- a/src/chatgpt.ts +++ b/src/chatgpt.ts @@ -162,7 +162,9 @@ class Chat { top_p: number; enable_top_p: boolean; presence_penalty: number; + presence_penalty_enabled: boolean; frequency_penalty: number; + frequency_penalty_enabled: boolean; json_mode: boolean; constructor( @@ -181,7 +183,9 @@ class Chat { top_p = 1, enable_top_p = false, presence_penalty = 0, + presence_penalty_enabled = false, frequency_penalty = 0, + frequency_penalty_enabled = false, json_mode = false, } = {} ) { @@ -201,7 +205,9 @@ class Chat { this.top_p = top_p; this.enable_top_p = enable_top_p; this.presence_penalty = presence_penalty; + this.presence_penalty_enabled = presence_penalty_enabled this.frequency_penalty = frequency_penalty; + this.frequency_penalty_enabled = frequency_penalty_enabled; this.json_mode = json_mode; } @@ -239,8 +245,6 @@ class Chat { model: this.model, messages, stream, - presence_penalty: this.presence_penalty, - frequency_penalty: this.frequency_penalty, }; if (stream) { body["stream_options"] = { @@ -256,6 +260,12 @@ class Chat { if (this.enable_max_gen_tokens) { body["max_tokens"] = this.max_gen_tokens; } + if (this.presence_penalty_enabled) { + body["presence_penalty"] = this.presence_penalty; + } + if (this.frequency_penalty_enabled) { + body["frequency_penalty"] = this.frequency_penalty; + } if (this.json_mode) { body["response_format"] = { type: "json_object", diff --git a/src/pages/Chatbox.tsx b/src/pages/Chatbox.tsx index 9a4bb29..20fec3e 100644 --- a/src/pages/Chatbox.tsx +++ b/src/pages/Chatbox.tsx @@ -238,7 +238,9 @@ export default function ChatBOX() { client.top_p = chatStore.top_p; client.enable_top_p = chatStore.top_p_enabled; client.frequency_penalty = chatStore.frequency_penalty; + client.frequency_penalty_enabled = chatStore.frequency_penalty_enabled; client.presence_penalty = chatStore.presence_penalty; + client.presence_penalty_enabled = chatStore.presence_penalty_enabled; client.json_mode = chatStore.json_mode; client.messages = chatStore.history // only copy non hidden message From 6b78308bb548b7e3566dffb1e32aabb6a8b7cc6e Mon Sep 17 00:00:00 2001 From: heimoshuiyu Date: Tue, 21 Jan 2025 09:51:39 +0800 Subject: [PATCH 3/5] feat: add maxTokens option to newChatStore --- src/chatgpt.ts | 2 +- src/types/newChatstore.ts | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/chatgpt.ts b/src/chatgpt.ts index 67cb57e..e1b04cf 100644 --- a/src/chatgpt.ts +++ b/src/chatgpt.ts @@ -205,7 +205,7 @@ class Chat { this.top_p = top_p; this.enable_top_p = enable_top_p; this.presence_penalty = presence_penalty; - this.presence_penalty_enabled = presence_penalty_enabled + this.presence_penalty_enabled = presence_penalty_enabled; this.frequency_penalty = frequency_penalty; this.frequency_penalty_enabled = frequency_penalty_enabled; this.json_mode = json_mode; diff --git a/src/types/newChatstore.ts b/src/types/newChatstore.ts index db6e7e5..0501152 100644 --- a/src/types/newChatstore.ts +++ b/src/types/newChatstore.ts @@ -35,6 +35,7 @@ interface NewChatStoreOptions { image_gen_key?: string; json_mode?: boolean; logprobs?: boolean; + maxTokens?: number; } export const newChatStore = (options: NewChatStoreOptions): ChatStore => { @@ -52,7 +53,9 @@ export const newChatStore = (options: NewChatStoreOptions): ChatStore => { maxTokens: getDefaultParams( "max", models[getDefaultParams("model", options.model ?? DefaultModel)] - ?.maxToken ?? 2048 + ?.maxToken ?? + options.maxTokens ?? + 2048 ), maxGenTokens: 2048, maxGenTokens_enabled: false, From 7aee52d5a2d7c7b9edc0499fff5b1cbd60db5455 Mon Sep 17 00:00:00 2001 From: heimoshuiyu Date: Wed, 22 Jan 2025 18:46:37 +0800 Subject: [PATCH 4/5] save reasoning_content --- src/chatgpt.ts | 2 ++ src/components/ImageGenDrawer.tsx | 1 + src/pages/AddToolMsg.tsx | 1 + src/pages/Chatbox.tsx | 20 ++++++++++++++++++-- src/types/chatstore.ts | 1 + 5 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/chatgpt.ts b/src/chatgpt.ts index e1b04cf..035f8d7 100644 --- a/src/chatgpt.ts +++ b/src/chatgpt.ts @@ -22,6 +22,7 @@ export interface ToolCall { export interface Message { role: "system" | "user" | "assistant" | "tool"; content: string | MessageDetail[]; + reasoning_content?: string | null; name?: "example_user" | "example_assistant"; tool_calls?: ToolCall[]; tool_call_id?: string; @@ -30,6 +31,7 @@ export interface Message { interface Delta { role?: string; content?: string; + reasoning_content?: string; tool_calls?: ToolCall[]; } diff --git a/src/components/ImageGenDrawer.tsx b/src/components/ImageGenDrawer.tsx index 0414c6b..0efc2de 100644 --- a/src/components/ImageGenDrawer.tsx +++ b/src/components/ImageGenDrawer.tsx @@ -208,6 +208,7 @@ export function ImageGenDrawer({ disableFactor }: Props) { audio: null, logprobs: null, response_model_name: imageGenModel, + reasoning_content: null, }); setChatStore({ ...chatStore }); diff --git a/src/pages/AddToolMsg.tsx b/src/pages/AddToolMsg.tsx index 71aec42..9ade051 100644 --- a/src/pages/AddToolMsg.tsx +++ b/src/pages/AddToolMsg.tsx @@ -74,6 +74,7 @@ const AddToolMsg = (props: { audio: null, logprobs: null, response_model_name: null, + reasoning_content: null, }); setChatStore({ ...chatStore }); setNewToolCallID(""); diff --git a/src/pages/Chatbox.tsx b/src/pages/Chatbox.tsx index 20fec3e..3de25ab 100644 --- a/src/pages/Chatbox.tsx +++ b/src/pages/Chatbox.tsx @@ -82,6 +82,7 @@ export default function ChatBOX() { ): Promise => { let responseTokenCount = 0; const allChunkMessage: string[] = []; + const allReasoningContentChunk: string[] = []; const allChunkTool: ToolCall[] = []; setShowGenerating(true); const logprobs: Logprobs = { @@ -110,7 +111,13 @@ export default function ChatBOX() { console.log(c?.delta?.content, logprob); } - allChunkMessage.push(c?.delta?.content ?? ""); + if (c?.delta?.content) { + allChunkMessage.push(c?.delta?.content ?? ""); + } + if (c?.delta?.reasoning_content) { + allReasoningContentChunk.push(c?.delta?.reasoning_content ?? ""); + } + const tool_calls = c?.delta?.tool_calls; if (tool_calls) { for (const tool_call of tool_calls) { @@ -142,7 +149,12 @@ export default function ChatBOX() { } } setGeneratingMessage( - allChunkMessage.join("") + + (allReasoningContentChunk.length + ? "----------\nreasoning:\n" + + allReasoningContentChunk.join("") + + "\n----------\n" + : "") + + allChunkMessage.join("") + allChunkTool.map((tool) => { return `Tool Call ID: ${tool.id}\nType: ${tool.type}\nFunction: ${tool.function.name}\nArguments: ${tool.function.arguments}`; }) @@ -150,11 +162,13 @@ export default function ChatBOX() { } setShowGenerating(false); const content = allChunkMessage.join(""); + const reasoning_content = allReasoningContentChunk.join(""); console.log("save logprobs", logprobs); const newMsg: ChatStoreMessage = { role: "assistant", content, + reasoning_content, hide: false, token: responseTokenCount, example: false, @@ -211,6 +225,7 @@ export default function ChatBOX() { audio: null, logprobs: data.choices[0]?.logprobs, response_model_name: data.model, + reasoning_content: data.choices[0]?.message?.reasoning_content ?? null, }); setShowGenerating(false); @@ -375,6 +390,7 @@ export default function ChatBOX() { audio: null, logprobs: null, response_model_name: null, + reasoning_content: null, }); // manually calculate token length diff --git a/src/types/chatstore.ts b/src/types/chatstore.ts index 599a451..387986b 100644 --- a/src/types/chatstore.ts +++ b/src/types/chatstore.ts @@ -74,6 +74,7 @@ export interface ChatStoreMessage { role: "system" | "user" | "assistant" | "tool"; content: string | MessageDetail[]; + reasoning_content: string | null; name?: "example_user" | "example_assistant"; tool_calls?: ToolCall[]; tool_call_id?: string; From 0b3610935b0bf72647c7f4b104ef5e98027efc85 Mon Sep 17 00:00:00 2001 From: heimoshuiyu Date: Wed, 22 Jan 2025 19:19:48 +0800 Subject: [PATCH 5/5] fix: chatStore total_tokens count with reasoning --- src/pages/Chatbox.tsx | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/pages/Chatbox.tsx b/src/pages/Chatbox.tsx index 3de25ab..9f40b6b 100644 --- a/src/pages/Chatbox.tsx +++ b/src/pages/Chatbox.tsx @@ -80,7 +80,7 @@ export default function ChatBOX() { const _completeWithStreamMode = async ( response: Response ): Promise => { - let responseTokenCount = 0; + let responseTokenCount = 0; // including reasoning content and normal content const allChunkMessage: string[] = []; const allReasoningContentChunk: string[] = []; const allChunkTool: ToolCall[] = []; @@ -170,7 +170,9 @@ export default function ChatBOX() { content, reasoning_content, hide: false, - token: responseTokenCount, + token: + responseTokenCount - + (usage?.completion_tokens_details?.reasoning_tokens ?? 0), example: false, audio: null, logprobs, @@ -219,8 +221,10 @@ export default function ChatBOX() { content: msg.content, tool_calls: msg.tool_calls, hide: false, - token: - data.usage.completion_tokens ?? calculate_token_length(msg.content), + token: data.usage?.completion_tokens_details + ? data.usage.completion_tokens - + data.usage.completion_tokens_details.reasoning_tokens + : (data.usage.completion_tokens ?? calculate_token_length(msg.content)), example: false, audio: null, logprobs: data.choices[0]?.logprobs,