From f257d9e5b34c856ca3db0db0641ff7957853fd11 Mon Sep 17 00:00:00 2001 From: heimoshuiyu Date: Sun, 8 Dec 2024 17:18:07 +0800 Subject: [PATCH] calculate streaming response cost --- src/chatgpt.ts | 12 ++++++++++++ src/pages/Chatbox.tsx | 16 ++++++++++++++++ src/types/newChatstore.ts | 10 +++++----- 3 files changed, 33 insertions(+), 5 deletions(-) diff --git a/src/chatgpt.ts b/src/chatgpt.ts index 986e8e1..217906b 100644 --- a/src/chatgpt.ts +++ b/src/chatgpt.ts @@ -49,6 +49,12 @@ interface LogprobsContent { logprob: number; } +export interface StreamingUsage { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; +} + export interface StreamingResponseChunk { id: string; object: string; @@ -56,6 +62,7 @@ export interface StreamingResponseChunk { model: string; system_fingerprint: string; choices: Choices[]; + usage: null | StreamingUsage; } export const getMessageText = (message: Message): string => { if (typeof message.content === "string") { @@ -224,6 +231,11 @@ class Chat { presence_penalty: this.presence_penalty, frequency_penalty: this.frequency_penalty, }; + if (stream) { + body["stream_options"] = { + include_usage: true, + }; + } if (this.enable_temperature) { body["temperature"] = this.temperature; } diff --git a/src/pages/Chatbox.tsx b/src/pages/Chatbox.tsx index f63bf33..fe34fa7 100644 --- a/src/pages/Chatbox.tsx +++ b/src/pages/Chatbox.tsx @@ -18,6 +18,7 @@ import ChatGPT, { MessageDetail, ToolCall, Logprobs, + StreamingUsage, } from "@/chatgpt"; import { ChatStore, @@ -88,9 +89,13 @@ export default function ChatBOX(props: { content: [], }; let response_model_name: string | null = null; + let usage: StreamingUsage | null = null; for await (const i of client.processStreamResponse(response)) { response_model_name = i.model; responseTokenCount += 1; + if (i.usage) { + usage = i.usage; + } const c = i.choices[0]; @@ -160,6 +165,17 @@ export default function ChatBOX(props: { sum += msg.token; } cost += sum * (models[response_model_name]?.price?.prompt ?? 0); + if (usage) { + // use the response usage if exists + cost = 0; + cost += + (usage.prompt_tokens ?? 0) * + (models[response_model_name]?.price?.prompt ?? 0); + cost += + (usage.completion_tokens ?? 0) * + models[response_model_name]?.price?.completion; + console.log("usage", usage, "cost", cost); + } } console.log("cost", cost); diff --git a/src/types/newChatstore.ts b/src/types/newChatstore.ts index 53350e5..6c00a63 100644 --- a/src/types/newChatstore.ts +++ b/src/types/newChatstore.ts @@ -40,7 +40,7 @@ export const newChatStore = (options: NewChatStoreOptions): ChatStore => { chatgpt_api_web_version: CHATGPT_API_WEB_VERSION, systemMessageContent: getDefaultParams( "sys", - options.systemMessageContent ?? "" + options.systemMessageContent ?? "", ), toolsString: options.toolsString ?? "", history: [], @@ -50,14 +50,14 @@ export const newChatStore = (options: NewChatStoreOptions): ChatStore => { maxTokens: getDefaultParams( "max", models[getDefaultParams("model", options.model ?? DefaultModel)] - ?.maxToken ?? 2048 + ?.maxToken ?? 2048, ), maxGenTokens: 2048, maxGenTokens_enabled: false, apiKey: getDefaultParams("key", options.apiKey ?? ""), apiEndpoint: getDefaultParams( "api", - options.apiEndpoint ?? DefaultAPIEndpoint + options.apiEndpoint ?? DefaultAPIEndpoint, ), streamMode: getDefaultParams("mode", options.streamMode ?? true), model: getDefaultParams("model", options.model ?? DefaultModel), @@ -71,12 +71,12 @@ export const newChatStore = (options: NewChatStoreOptions): ChatStore => { develop_mode: getDefaultParams("dev", options.dev ?? false), whisper_api: getDefaultParams( "whisper-api", - options.whisper_api ?? "https://api.openai.com/v1/audio/transcriptions" + options.whisper_api ?? "https://api.openai.com/v1/audio/transcriptions", ), whisper_key: getDefaultParams("whisper-key", options.whisper_key ?? ""), tts_api: getDefaultParams( "tts-api", - options.tts_api ?? "https://api.openai.com/v1/audio/speech" + options.tts_api ?? "https://api.openai.com/v1/audio/speech", ), tts_key: getDefaultParams("tts-key", options.tts_key ?? ""), tts_voice: options.tts_voice ?? "alloy",