diff --git a/src/addImage.tsx b/src/addImage.tsx index a05c9fe..48cfd3e 100644 --- a/src/addImage.tsx +++ b/src/addImage.tsx @@ -263,6 +263,7 @@ export function AddImage({ example: false, audio: null, logprobs: null, + response_model_name: imageGenModel, }); setChatStore({ ...chatStore }); diff --git a/src/message.tsx b/src/message.tsx index 5535631..509981d 100644 --- a/src/message.tsx +++ b/src/message.tsx @@ -181,6 +181,14 @@ export default function Message(props: Props) { /> )} + {chat.response_model_name && ( + <> + + {chat.response_model_name} + +
+ + )} {showEdit && ( diff --git a/src/pages/AddToolMsg.tsx b/src/pages/AddToolMsg.tsx index a04a3e4..ac76c4e 100644 --- a/src/pages/AddToolMsg.tsx +++ b/src/pages/AddToolMsg.tsx @@ -74,6 +74,7 @@ const AddToolMsg = (props: { example: false, audio: null, logprobs: null, + response_model_name: null, }); setChatStore({ ...chatStore }); setNewToolCallID(""); diff --git a/src/pages/App.tsx b/src/pages/App.tsx index 4a7322b..4356ebc 100644 --- a/src/pages/App.tsx +++ b/src/pages/App.tsx @@ -34,7 +34,6 @@ export function App() { if (ret.maxGenTokens === undefined) ret.maxGenTokens = 2048; if (ret.maxGenTokens_enabled === undefined) ret.maxGenTokens_enabled = true; if (ret.model === undefined) ret.model = DefaultModel; - if (ret.responseModelName === undefined) ret.responseModelName = ""; if (ret.toolsString === undefined) ret.toolsString = ""; if (ret.chatgpt_api_web_version === undefined) // this is from old version becasue it is undefined, diff --git a/src/pages/Chatbox.tsx b/src/pages/Chatbox.tsx index a323093..4059cc8 100644 --- a/src/pages/Chatbox.tsx +++ b/src/pages/Chatbox.tsx @@ -87,8 +87,9 @@ export default function ChatBOX(props: { const logprobs: Logprobs = { content: [], }; + let response_model_name : string | null = null; for await (const i of client.processStreamResponse(response)) { - chatStore.responseModelName = i.model; + response_model_name = i.model; responseTokenCount += 1; const c = i.choices[0]; @@ -148,17 +149,17 @@ export default function ChatBOX(props: { // estimate cost let cost = 0; - if (chatStore.responseModelName) { + if (response_model_name) { cost += responseTokenCount * - (models[chatStore.responseModelName]?.price?.completion ?? 0); + (models[response_model_name]?.price?.completion ?? 0); let sum = 0; for (const msg of chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex)) { sum += msg.token; } - cost += sum * (models[chatStore.responseModelName]?.price?.prompt ?? 0); + cost += sum * (models[response_model_name]?.price?.prompt ?? 0); } console.log("cost", cost); @@ -174,6 +175,7 @@ export default function ChatBOX(props: { example: false, audio: null, logprobs, + response_model_name, }; if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool; @@ -188,7 +190,6 @@ export default function ChatBOX(props: { const _completeWithFetchMode = async (response: Response) => { const data = (await response.json()) as FetchResponse; - chatStore.responseModelName = data.model ?? ""; if (data.model) { let cost = 0; cost += @@ -228,6 +229,7 @@ export default function ChatBOX(props: { example: false, audio: null, logprobs: data.choices[0]?.logprobs, + response_model_name: data.model, }); setShowGenerating(false); }; @@ -311,7 +313,6 @@ export default function ChatBOX(props: { console.log("empty message"); return; } - if (call_complete) chatStore.responseModelName = ""; let content: string | MessageDetail[] = inputMsg; if (images.length > 0) { @@ -328,6 +329,7 @@ export default function ChatBOX(props: { example: false, audio: null, logprobs: null, + response_model_name: null, }); // manually calculate token length @@ -619,11 +621,6 @@ export default function ChatBOX(props: { )}

- {chatStore.responseModelName && ( - <> - {Tr("Generated by")} {chatStore.responseModelName} - - )} {chatStore.postBeginIndex !== 0 && ( <>
@@ -754,6 +751,7 @@ export default function ChatBOX(props: { example: false, audio: null, logprobs: null, + response_model_name: null, }); setInputMsg(""); setChatStore({ ...chatStore }); diff --git a/src/types/chatstore.ts b/src/types/chatstore.ts index 06567fd..da9f8a0 100644 --- a/src/types/chatstore.ts +++ b/src/types/chatstore.ts @@ -20,7 +20,6 @@ export interface ChatStore { apiEndpoint: string; streamMode: boolean; model: string; - responseModelName: string; cost: number; temperature: number; temperature_enabled: boolean; @@ -69,4 +68,5 @@ export interface ChatStoreMessage extends Message { example: boolean; audio: Blob | null; logprobs: Logprobs | null; + response_model_name: string | null; } diff --git a/src/types/newChatstore.ts b/src/types/newChatstore.ts index dd5a598..3a6bae5 100644 --- a/src/types/newChatstore.ts +++ b/src/types/newChatstore.ts @@ -55,7 +55,6 @@ export const newChatStore = (options: NewChatStoreOptions): ChatStore => { ), streamMode: getDefaultParams("mode", options.streamMode ?? true), model: getDefaultParams("model", options.model ?? DefaultModel), - responseModelName: "", cost: 0, temperature: getDefaultParams("temp", options.temperature ?? 0.7), temperature_enabled: true,