import { useContext, useRef } from "react"; import { useEffect, useState } from "react"; import { langCodeContext, tr, Tr } from "@/translate"; import { addTotalCost } from "@/utils/totalCost"; import ChatGPT, { calculate_token_length, FetchResponse, Message as MessageType, MessageDetail, ToolCall, Logprobs, Usage, } from "@/chatgpt"; import { ChatStoreMessage } from "../types/chatstore"; import Message from "@/components/MessageBubble"; import { models } from "@/types/models"; import { ImageUploadDrawer } from "@/components/ImageUploadDrawer"; import { autoHeight } from "@/utils/textAreaHelp"; import VersionHint from "@/components/VersionHint"; import WhisperButton from "@/components/WhisperButton"; import { Button } from "@/components/ui/button"; import { ChatInput } from "@/components/ui/chat/chat-input"; import { ChatBubble, ChatBubbleMessage, ChatBubbleAction, ChatBubbleActionWrapper, } from "@/components/ui/chat/chat-bubble"; import { ChatMessageList } from "@/components/ui/chat/chat-message-list"; import { ArrowDownToDotIcon, CornerDownLeftIcon, CornerLeftUpIcon, CornerRightUpIcon, InfoIcon, ScissorsIcon, } from "lucide-react"; import { Switch } from "@/components/ui/switch"; import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; import { AppChatStoreContext, AppContext } from "./App"; import { ImageGenDrawer } from "@/components/ImageGenDrawer"; const createMessageFromCurrentBuffer = ( chunkMessages: string[], reasoningChunks: string[], tools: ToolCall[], response_count: number ): ChatStoreMessage => { return { role: "assistant", content: chunkMessages.join(""), reasoning_content: reasoningChunks.join(""), tool_calls: tools.length > 0 ? tools : undefined, // 补全其他必填字段的默认值(根据你的类型定义) hide: false, token: calculate_token_length( chunkMessages.join("") + reasoningChunks.join("") ), // 需要实际的token计算逻辑 example: false, audio: null, logprobs: null, response_model_name: null, usage: null, response_count, }; }; export default function ChatBOX() { const { db, selectedChatIndex, setSelectedChatIndex, handleNewChatStore } = useContext(AppContext); const { langCode, setLangCode } = useContext(langCodeContext); const { chatStore, setChatStore } = useContext(AppChatStoreContext); // prevent error const [inputMsg, setInputMsg] = useState(""); const [images, setImages] = useState([]); const [showAddImage, setShowAddImage] = useState(false); const [showGenImage, setShowGenImage] = useState(false); const [showGenerating, setShowGenerating] = useState(false); const [generatingMessage, setGeneratingMessage] = useState(""); const [showRetry, setShowRetry] = useState(false); let default_follow = localStorage.getItem("follow"); if (default_follow === null) { default_follow = "true"; } const [follow, _setFollow] = useState(default_follow === "true"); const setFollow = (follow: boolean) => { console.log("set follow", follow); localStorage.setItem("follow", follow.toString()); _setFollow(follow); }; const messagesEndRef = useRef(null); useEffect(() => { if (follow) { if (messagesEndRef.current === null) return; messagesEndRef.current.scrollIntoView({ behavior: "smooth" }); } }, [showRetry, showGenerating, generatingMessage, chatStore]); const client = new ChatGPT(chatStore.apiKey); const _completeWithStreamMode = async ( response: Response, signal: AbortSignal ): Promise => { let responseTokenCount = 0; // including reasoning content and normal content const allChunkMessage: string[] = []; const allReasoningContentChunk: string[] = []; const allChunkTool: ToolCall[] = []; setShowGenerating(true); const logprobs: Logprobs = { content: [], }; let response_model_name: string | null = null; let usage: Usage | null = null; try { for await (const i of client.processStreamResponse(response, signal)) { if (signal?.aborted) break; response_model_name = i.model; responseTokenCount += 1; if (i.usage) { usage = i.usage; } const c = i.choices[0]; // skip if choice is empty (e.g. azure) if (!c) continue; const logprob = c?.logprobs?.content[0]?.logprob; if (logprob !== undefined) { logprobs.content.push({ token: c?.delta?.content ?? "", logprob, }); console.log(c?.delta?.content, logprob); } if (c?.delta?.content) { allChunkMessage.push(c?.delta?.content ?? ""); } if (c?.delta?.reasoning_content) { allReasoningContentChunk.push(c?.delta?.reasoning_content ?? ""); } const tool_calls = c?.delta?.tool_calls; if (tool_calls) { for (const tool_call of tool_calls) { // init if (tool_call.id) { allChunkTool.push({ id: tool_call.id, type: tool_call.type, index: tool_call.index, function: { name: tool_call.function.name, arguments: "", }, }); continue; } // update tool call arguments const tool = allChunkTool.find( (tool) => tool.index === tool_call.index ); if (!tool) { console.log("tool (by index) not found", tool_call.index); continue; } tool.function.arguments += tool_call.function.arguments; } } setGeneratingMessage( (allReasoningContentChunk.length ? "----------\nreasoning:\n" + allReasoningContentChunk.join("") + "\n----------\n" : "") + allChunkMessage.join("") + allChunkTool.map((tool) => { return `Tool Call ID: ${tool.id}\nType: ${tool.type}\nFunction: ${tool.function.name}\nArguments: ${tool.function.arguments}`; }) + "\n" + responseTokenCount + " response count" ); } } catch (e: any) { if (e.name === "AbortError") { // 1. 立即保存当前buffer中的内容 if (allChunkMessage.length > 0 || allReasoningContentChunk.length > 0) { const partialMsg = createMessageFromCurrentBuffer( allChunkMessage, allReasoningContentChunk, allChunkTool, responseTokenCount ); chatStore.history.push(partialMsg); setChatStore({ ...chatStore }); } // 2. 不隐藏错误,重新抛出给上层 throw e; } // 其他错误直接抛出 throw e; } finally { setShowGenerating(false); setGeneratingMessage(""); } const content = allChunkMessage.join(""); const reasoning_content = allReasoningContentChunk.join(""); console.log("save logprobs", logprobs); // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; chatStore.tokenMargin = client.tokens_margin; setGeneratingMessage(""); setShowGenerating(false); const prompt_tokens = chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex, -1) .reduce((acc, msg) => acc + msg.token, 0); const ret: Usage = { prompt_tokens: prompt_tokens, completion_tokens: responseTokenCount, total_tokens: prompt_tokens + responseTokenCount, response_model_name: response_model_name, prompt_tokens_details: null, completion_tokens_details: null, }; if (usage) { ret.prompt_tokens = usage.prompt_tokens ?? prompt_tokens; ret.completion_tokens = usage.completion_tokens ?? responseTokenCount; ret.total_tokens = usage.total_tokens ?? prompt_tokens + responseTokenCount; ret.prompt_tokens_details = usage.prompt_tokens_details ?? null; ret.completion_tokens_details = usage.completion_tokens_details ?? null; } const newMsg: ChatStoreMessage = { role: "assistant", content, reasoning_content, hide: false, token: responseTokenCount - (usage?.completion_tokens_details?.reasoning_tokens ?? 0), example: false, audio: null, logprobs, response_model_name, usage, response_count: responseTokenCount, }; if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool; return newMsg; }; const _completeWithFetchMode = async ( response: Response ): Promise => { const data = (await response.json()) as FetchResponse; const msg = client.processFetchResponse(data); setShowGenerating(false); const usage: Usage = { prompt_tokens: data.usage.prompt_tokens ?? 0, completion_tokens: data.usage.completion_tokens ?? 0, total_tokens: data.usage.total_tokens ?? 0, response_model_name: data.model ?? null, prompt_tokens_details: data.usage.prompt_tokens_details ?? null, completion_tokens_details: data.usage.completion_tokens_details ?? null, }; const ret: ChatStoreMessage = { role: "assistant", content: msg.content, tool_calls: msg.tool_calls, hide: false, token: data.usage?.completion_tokens_details ? data.usage.completion_tokens - data.usage.completion_tokens_details.reasoning_tokens : (data.usage.completion_tokens ?? calculate_token_length(msg.content)), example: false, audio: null, logprobs: data.choices[0]?.logprobs, response_model_name: data.model, reasoning_content: data.choices[0]?.message?.reasoning_content ?? null, usage, }; return ret; }; // wrap the actuall complete api const complete = async () => { // manually copy status from chatStore to client client.apiEndpoint = chatStore.apiEndpoint; client.sysMessageContent = chatStore.systemMessageContent; client.toolsString = chatStore.toolsString; client.tokens_margin = chatStore.tokenMargin; client.temperature = chatStore.temperature; client.enable_temperature = chatStore.temperature_enabled; client.top_p = chatStore.top_p; client.enable_top_p = chatStore.top_p_enabled; client.frequency_penalty = chatStore.frequency_penalty; client.frequency_penalty_enabled = chatStore.frequency_penalty_enabled; client.presence_penalty = chatStore.presence_penalty; client.presence_penalty_enabled = chatStore.presence_penalty_enabled; client.json_mode = chatStore.json_mode; client.messages = chatStore.history // only copy non hidden message .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex) // only copy content and role attribute to client for posting .map(({ content, role, example, tool_call_id, tool_calls }) => { const ret: MessageType = { content, role, tool_calls, }; if (example) { ret.name = ret.role === "assistant" ? "example_assistant" : "example_user"; ret.role = "system"; } if (tool_call_id) ret.tool_call_id = tool_call_id; return ret; }); client.model = chatStore.model; client.max_tokens = chatStore.maxTokens; client.max_gen_tokens = chatStore.maxGenTokens; client.enable_max_gen_tokens = chatStore.maxGenTokens_enabled; const created_at = new Date(); try { setShowGenerating(true); abortControllerRef.current = new AbortController(); const response = await client._fetch( chatStore.streamMode, chatStore.logprobs, abortControllerRef.current.signal ); const responsed_at = new Date(); const contentType = response.headers.get("content-type"); let cs: ChatStoreMessage; if (contentType?.startsWith("text/event-stream")) { cs = await _completeWithStreamMode( response, abortControllerRef.current.signal ); } else if (contentType?.startsWith("application/json")) { cs = await _completeWithFetchMode(response); } else { throw `unknown response content type ${contentType}`; } const usage = cs.usage; if (!usage) { throw "panic: usage is null"; } const completed_at = new Date(); cs.created_at = created_at.toISOString(); cs.responsed_at = responsed_at.toISOString(); cs.completed_at = completed_at.toISOString(); chatStore.history.push(cs); console.log("new chatStore", cs); // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; chatStore.tokenMargin = client.tokens_margin; chatStore.totalTokens = client.total_tokens; // estimate user's input message token const aboveTokens = chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex, -2) .reduce((acc, msg) => acc + msg.token, 0); const userMessage = chatStore.history .filter(({ hide }) => !hide) .slice(-2, -1)[0]; if (userMessage) { userMessage.token = usage.prompt_tokens - aboveTokens; console.log("estimate user message token", userMessage.token); } // [TODO] // calculate cost if (usage.response_model_name) { let cost = 0; if (usage.prompt_tokens_details) { const cached_prompt_tokens = usage.prompt_tokens_details.cached_tokens ?? 0; const uncached_prompt_tokens = usage.prompt_tokens - cached_prompt_tokens; const prompt_price = models[usage.response_model_name]?.price?.prompt ?? 0; const cached_price = models[usage.response_model_name]?.price?.cached_prompt ?? prompt_price; cost += cached_prompt_tokens * cached_price + uncached_prompt_tokens * prompt_price; } else { cost += usage.prompt_tokens * (models[usage.response_model_name]?.price?.prompt ?? 0); } cost += usage.completion_tokens * (models[usage.response_model_name]?.price?.completion ?? 0); addTotalCost(cost); chatStore.cost += cost; console.log("cost", cost); } setShowRetry(false); setChatStore({ ...chatStore }); } catch (error: any) { if (error.name === "AbortError") { console.log("abort complete"); return; } setShowRetry(true); alert(error); } finally { setShowGenerating(false); setSelectedChatIndex(selectedChatIndex); } }; // when user click the "send" button or ctrl+Enter in the textarea const send = async (msg = "", call_complete = true) => { setTimeout(() => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); }, 0); const inputMsg = msg.trim(); if (!inputMsg && images.length === 0) { console.log("empty message"); return; } let content: string | MessageDetail[] = inputMsg; if (images.length > 0) { content = images; } if (images.length > 0 && inputMsg.trim()) { content = [{ type: "text", text: inputMsg }, ...images]; } chatStore.history.push({ role: "user", content, hide: false, token: calculate_token_length(inputMsg.trim()), example: false, audio: null, logprobs: null, response_model_name: null, reasoning_content: null, usage: null, }); // manually calculate token length chatStore.totalTokens += calculate_token_length(inputMsg.trim()) + calculate_token_length(images); client.total_tokens = chatStore.totalTokens; setChatStore({ ...chatStore }); setInputMsg(""); setImages([]); if (call_complete) { await complete(); } }; const userInputRef = useRef(null); const abortControllerRef = useRef(new AbortController()); return ( <>
{chatStore.history.length === 0 && ( This is a new chat session, start by typing a message
Settings button located at the top right corner can be used to change the settings of this chat
'New' button located at the top left corner can be used to create a new chat
All chat history and settings are stored in the local browser
)} {chatStore.systemMessageContent.trim() && (
System Prompt
setShowSettings(true)} // TODO: add a button to show settings > {chatStore.systemMessageContent}
} onClick={() => { chatStore.systemMessageContent = ""; chatStore.toolsString = ""; chatStore.history = []; setChatStore({ ...chatStore }); }} />
)} {chatStore.history.map((_, messageIndex) => ( ))} {showGenerating && ( {generatingMessage} )}

{chatStore.history.length > 0 && !showGenerating && ( )} {chatStore.history.length > 0 && !showGenerating && ( )} {chatStore.develop_mode && chatStore.history.length > 0 && ( )}

{chatStore.postBeginIndex !== 0 && (

Chat History Notice Info: chat history is too long, forget messages:{" "} {chatStore.postBeginIndex}

)} {showRetry && (

)}
{images.length > 0 && (
{images.map((image, index) => (
{image.type === "image_url" && ( )}
))}
)}
{generatingMessage && (
)}
{ setInputMsg(event.target.value); autoHeight(event.target); }} onKeyPress={(event: any) => { if (event.ctrlKey && event.code === "Enter") { send(event.target.value, true); setInputMsg(""); event.target.value = ""; autoHeight(event.target); return; } autoHeight(event.target); setInputMsg(event.target.value); }} className="min-h-12 resize-none rounded-lg bg-background border-0 p-3 shadow-none focus-visible:ring-0" />
); }