store response_model_name by message

This commit is contained in:
2024-12-08 16:19:04 +08:00
parent 6aca74a7b4
commit e7c26560bb
7 changed files with 20 additions and 14 deletions

View File

@@ -263,6 +263,7 @@ export function AddImage({
example: false, example: false,
audio: null, audio: null,
logprobs: null, logprobs: null,
response_model_name: imageGenModel,
}); });
setChatStore({ ...chatStore }); setChatStore({ ...chatStore });

View File

@@ -181,6 +181,14 @@ export default function Message(props: Props) {
/> />
)} )}
<TTSPlay chat={chat} /> <TTSPlay chat={chat} />
{chat.response_model_name && (
<>
<span className="opacity-50">
{chat.response_model_name}
</span>
<hr />
</>
)}
</div> </div>
</div> </div>
{showEdit && ( {showEdit && (

View File

@@ -74,6 +74,7 @@ const AddToolMsg = (props: {
example: false, example: false,
audio: null, audio: null,
logprobs: null, logprobs: null,
response_model_name: null,
}); });
setChatStore({ ...chatStore }); setChatStore({ ...chatStore });
setNewToolCallID(""); setNewToolCallID("");

View File

@@ -34,7 +34,6 @@ export function App() {
if (ret.maxGenTokens === undefined) ret.maxGenTokens = 2048; if (ret.maxGenTokens === undefined) ret.maxGenTokens = 2048;
if (ret.maxGenTokens_enabled === undefined) ret.maxGenTokens_enabled = true; if (ret.maxGenTokens_enabled === undefined) ret.maxGenTokens_enabled = true;
if (ret.model === undefined) ret.model = DefaultModel; if (ret.model === undefined) ret.model = DefaultModel;
if (ret.responseModelName === undefined) ret.responseModelName = "";
if (ret.toolsString === undefined) ret.toolsString = ""; if (ret.toolsString === undefined) ret.toolsString = "";
if (ret.chatgpt_api_web_version === undefined) if (ret.chatgpt_api_web_version === undefined)
// this is from old version becasue it is undefined, // this is from old version becasue it is undefined,

View File

@@ -87,8 +87,9 @@ export default function ChatBOX(props: {
const logprobs: Logprobs = { const logprobs: Logprobs = {
content: [], content: [],
}; };
let response_model_name : string | null = null;
for await (const i of client.processStreamResponse(response)) { for await (const i of client.processStreamResponse(response)) {
chatStore.responseModelName = i.model; response_model_name = i.model;
responseTokenCount += 1; responseTokenCount += 1;
const c = i.choices[0]; const c = i.choices[0];
@@ -148,17 +149,17 @@ export default function ChatBOX(props: {
// estimate cost // estimate cost
let cost = 0; let cost = 0;
if (chatStore.responseModelName) { if (response_model_name) {
cost += cost +=
responseTokenCount * responseTokenCount *
(models[chatStore.responseModelName]?.price?.completion ?? 0); (models[response_model_name]?.price?.completion ?? 0);
let sum = 0; let sum = 0;
for (const msg of chatStore.history for (const msg of chatStore.history
.filter(({ hide }) => !hide) .filter(({ hide }) => !hide)
.slice(chatStore.postBeginIndex)) { .slice(chatStore.postBeginIndex)) {
sum += msg.token; sum += msg.token;
} }
cost += sum * (models[chatStore.responseModelName]?.price?.prompt ?? 0); cost += sum * (models[response_model_name]?.price?.prompt ?? 0);
} }
console.log("cost", cost); console.log("cost", cost);
@@ -174,6 +175,7 @@ export default function ChatBOX(props: {
example: false, example: false,
audio: null, audio: null,
logprobs, logprobs,
response_model_name,
}; };
if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool; if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool;
@@ -188,7 +190,6 @@ export default function ChatBOX(props: {
const _completeWithFetchMode = async (response: Response) => { const _completeWithFetchMode = async (response: Response) => {
const data = (await response.json()) as FetchResponse; const data = (await response.json()) as FetchResponse;
chatStore.responseModelName = data.model ?? "";
if (data.model) { if (data.model) {
let cost = 0; let cost = 0;
cost += cost +=
@@ -228,6 +229,7 @@ export default function ChatBOX(props: {
example: false, example: false,
audio: null, audio: null,
logprobs: data.choices[0]?.logprobs, logprobs: data.choices[0]?.logprobs,
response_model_name: data.model,
}); });
setShowGenerating(false); setShowGenerating(false);
}; };
@@ -311,7 +313,6 @@ export default function ChatBOX(props: {
console.log("empty message"); console.log("empty message");
return; return;
} }
if (call_complete) chatStore.responseModelName = "";
let content: string | MessageDetail[] = inputMsg; let content: string | MessageDetail[] = inputMsg;
if (images.length > 0) { if (images.length > 0) {
@@ -328,6 +329,7 @@ export default function ChatBOX(props: {
example: false, example: false,
audio: null, audio: null,
logprobs: null, logprobs: null,
response_model_name: null,
}); });
// manually calculate token length // manually calculate token length
@@ -619,11 +621,6 @@ export default function ChatBOX(props: {
)} )}
</p> </p>
<p className="p-2 my-2 text-center opacity-50 dark:text-white"> <p className="p-2 my-2 text-center opacity-50 dark:text-white">
{chatStore.responseModelName && (
<>
{Tr("Generated by")} {chatStore.responseModelName}
</>
)}
{chatStore.postBeginIndex !== 0 && ( {chatStore.postBeginIndex !== 0 && (
<> <>
<br /> <br />
@@ -754,6 +751,7 @@ export default function ChatBOX(props: {
example: false, example: false,
audio: null, audio: null,
logprobs: null, logprobs: null,
response_model_name: null,
}); });
setInputMsg(""); setInputMsg("");
setChatStore({ ...chatStore }); setChatStore({ ...chatStore });

View File

@@ -20,7 +20,6 @@ export interface ChatStore {
apiEndpoint: string; apiEndpoint: string;
streamMode: boolean; streamMode: boolean;
model: string; model: string;
responseModelName: string;
cost: number; cost: number;
temperature: number; temperature: number;
temperature_enabled: boolean; temperature_enabled: boolean;
@@ -69,4 +68,5 @@ export interface ChatStoreMessage extends Message {
example: boolean; example: boolean;
audio: Blob | null; audio: Blob | null;
logprobs: Logprobs | null; logprobs: Logprobs | null;
response_model_name: string | null;
} }

View File

@@ -55,7 +55,6 @@ export const newChatStore = (options: NewChatStoreOptions): ChatStore => {
), ),
streamMode: getDefaultParams("mode", options.streamMode ?? true), streamMode: getDefaultParams("mode", options.streamMode ?? true),
model: getDefaultParams("model", options.model ?? DefaultModel), model: getDefaultParams("model", options.model ?? DefaultModel),
responseModelName: "",
cost: 0, cost: 0,
temperature: getDefaultParams("temp", options.temperature ?? 0.7), temperature: getDefaultParams("temp", options.temperature ?? 0.7),
temperature_enabled: true, temperature_enabled: true,