import { Tr, langCodeContext, LANG_OPTIONS } from "./translate"; import structuredClone from "@ungap/structured-clone"; import { createRef } from "preact"; import { StateUpdater, useEffect, useState } from "preact/hooks"; import { ChatStore, ChatStoreMessage, STORAGE_NAME_TEMPLATE, STORAGE_NAME_TEMPLATE_API, STORAGE_NAME_TEMPLATE_API_IMAGE_GEN, STORAGE_NAME_TEMPLATE_API_TTS, STORAGE_NAME_TEMPLATE_API_WHISPER, STORAGE_NAME_TEMPLATE_TOOLS, TemplateAPI, TemplateTools, addTotalCost, getTotalCost, } from "./app"; import ChatGPT, { calculate_token_length, ChunkMessage, FetchResponse, Message as MessageType, MessageDetail, ToolCall, Logprobs, } from "./chatgpt"; import Message from "./message"; import models from "./models"; import Settings from "./settings"; import getDefaultParams from "./getDefaultParam"; import { AddImage } from "./addImage"; import { ListAPIs } from "./listAPIs"; import { ListToolsTempaltes } from "./listToolsTemplates"; import { autoHeight } from "./textarea"; import Search from "./search"; import { IDBPDatabase } from "idb"; import { MagnifyingGlassIcon, CubeIcon, BanknotesIcon, DocumentTextIcon, ChatBubbleLeftEllipsisIcon, ScissorsIcon, SwatchIcon, SparklesIcon, } from "@heroicons/react/24/outline"; export interface TemplateChatStore extends ChatStore { name: string; } export default function ChatBOX(props: { db: Promise>; chatStore: ChatStore; setChatStore: (cs: ChatStore) => void; selectedChatIndex: number; setSelectedChatIndex: StateUpdater; }) { const { chatStore, setChatStore } = props; // prevent error if (chatStore === undefined) return
; const [inputMsg, setInputMsg] = useState(""); const [images, setImages] = useState([]); const [showAddImage, setShowAddImage] = useState(false); const [showGenerating, setShowGenerating] = useState(false); const [generatingMessage, setGeneratingMessage] = useState(""); const [showRetry, setShowRetry] = useState(false); const [isRecording, setIsRecording] = useState("Mic"); const [showAddToolMsg, setShowAddToolMsg] = useState(false); const [newToolCallID, setNewToolCallID] = useState(""); const [newToolContent, setNewToolContent] = useState(""); const [showSearch, setShowSearch] = useState(false); let default_follow = localStorage.getItem("follow"); if (default_follow === null) { default_follow = "true"; } const [follow, _setFollow] = useState(default_follow === "true"); const mediaRef = createRef(); const setFollow = (follow: boolean) => { console.log("set follow", follow); localStorage.setItem("follow", follow.toString()); _setFollow(follow); }; const messagesEndRef = createRef(); useEffect(() => { if (follow) { messagesEndRef.current.scrollIntoView({ behavior: "smooth" }); } }, [showRetry, showGenerating, generatingMessage]); const client = new ChatGPT(chatStore.apiKey); const update_total_tokens = () => { // manually estimate token client.total_tokens = calculate_token_length( chatStore.systemMessageContent ); for (const msg of chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex)) { client.total_tokens += msg.token; } chatStore.totalTokens = client.total_tokens; }; const _completeWithStreamMode = async (response: Response) => { let responseTokenCount = 0; const allChunkMessage: string[] = []; const allChunkTool: ToolCall[] = []; setShowGenerating(true); const logprobs: Logprobs = { content: [], }; for await (const i of client.processStreamResponse(response)) { chatStore.responseModelName = i.model; responseTokenCount += 1; const c = i.choices[0]; // skip if choice is empty (e.g. azure) if (!c) continue; const logprob = c?.logprobs?.content[0]?.logprob; if (logprob !== undefined) { logprobs.content.push({ token: c?.delta?.content ?? "", logprob, }); console.log(c?.delta?.content, logprob); } allChunkMessage.push(c?.delta?.content ?? ""); const tool_calls = c?.delta?.tool_calls; if (tool_calls) { for (const tool_call of tool_calls) { // init if (tool_call.id) { allChunkTool.push({ id: tool_call.id, type: tool_call.type, index: tool_call.index, function: { name: tool_call.function.name, arguments: "", }, }); continue; } // update tool call arguments const tool = allChunkTool.find( (tool) => tool.index === tool_call.index ); if (!tool) { console.log("tool (by index) not found", tool_call.index); continue; } tool.function.arguments += tool_call.function.arguments; } } setGeneratingMessage( allChunkMessage.join("") + allChunkTool.map((tool) => { return `Tool Call ID: ${tool.id}\nType: ${tool.type}\nFunction: ${tool.function.name}\nArguments: ${tool.function.arguments}`; }) ); } setShowGenerating(false); const content = allChunkMessage.join(""); // estimate cost let cost = 0; if (chatStore.responseModelName) { cost += responseTokenCount * (models[chatStore.responseModelName]?.price?.completion ?? 0); let sum = 0; for (const msg of chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex)) { sum += msg.token; } cost += sum * (models[chatStore.responseModelName]?.price?.prompt ?? 0); } console.log("cost", cost); chatStore.cost += cost; addTotalCost(cost); console.log("save logprobs", logprobs); const newMsg: ChatStoreMessage = { role: "assistant", content, hide: false, token: responseTokenCount, example: false, audio: null, logprobs, }; if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool; chatStore.history.push(newMsg); // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; chatStore.tokenMargin = client.tokens_margin; update_total_tokens(); setChatStore({ ...chatStore }); setGeneratingMessage(""); setShowGenerating(false); }; const _completeWithFetchMode = async (response: Response) => { const data = (await response.json()) as FetchResponse; chatStore.responseModelName = data.model ?? ""; if (data.model) { let cost = 0; cost += (data.usage.prompt_tokens ?? 0) * (models[data.model]?.price?.prompt ?? 0); cost += (data.usage.completion_tokens ?? 0) * (models[data.model]?.price?.completion ?? 0); chatStore.cost += cost; addTotalCost(cost); } const msg = client.processFetchResponse(data); // estimate user's input message token let aboveToken = 0; for (const msg of chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex, -1)) { aboveToken += msg.token; } if (data.usage.prompt_tokens) { const userMessageToken = data.usage.prompt_tokens - aboveToken; console.log("set user message token"); if (chatStore.history.filter((msg) => !msg.hide).length > 0) { chatStore.history.filter((msg) => !msg.hide).slice(-1)[0].token = userMessageToken; } } chatStore.history.push({ role: "assistant", content: msg.content, tool_calls: msg.tool_calls, hide: false, token: data.usage.completion_tokens ?? calculate_token_length(msg.content), example: false, audio: null, logprobs: data.choices[0]?.logprobs, }); setShowGenerating(false); }; // wrap the actuall complete api const complete = async () => { // manually copy status from chatStore to client client.apiEndpoint = chatStore.apiEndpoint; client.sysMessageContent = chatStore.systemMessageContent; client.toolsString = chatStore.toolsString; client.tokens_margin = chatStore.tokenMargin; client.temperature = chatStore.temperature; client.enable_temperature = chatStore.temperature_enabled; client.top_p = chatStore.top_p; client.enable_top_p = chatStore.top_p_enabled; client.frequency_penalty = chatStore.frequency_penalty; client.presence_penalty = chatStore.presence_penalty; client.json_mode = chatStore.json_mode; client.messages = chatStore.history // only copy non hidden message .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex) // only copy content and role attribute to client for posting .map(({ content, role, example, tool_call_id, tool_calls }) => { const ret: MessageType = { content, role, tool_calls, }; if (example) { ret.name = ret.role === "assistant" ? "example_assistant" : "example_user"; ret.role = "system"; } if (tool_call_id) ret.tool_call_id = tool_call_id; return ret; }); client.model = chatStore.model; client.max_tokens = chatStore.maxTokens; client.max_gen_tokens = chatStore.maxGenTokens; client.enable_max_gen_tokens = chatStore.maxGenTokens_enabled; try { setShowGenerating(true); const response = await client._fetch( chatStore.streamMode, chatStore.logprobs ); const contentType = response.headers.get("content-type"); if (contentType?.startsWith("text/event-stream")) { await _completeWithStreamMode(response); } else if (contentType?.startsWith("application/json")) { await _completeWithFetchMode(response); } else { throw `unknown response content type ${contentType}`; } // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; chatStore.tokenMargin = client.tokens_margin; chatStore.totalTokens = client.total_tokens; console.log("postBeginIndex", chatStore.postBeginIndex); setShowRetry(false); setChatStore({ ...chatStore }); } catch (error) { setShowRetry(true); alert(error); } finally { setShowGenerating(false); props.setSelectedChatIndex(props.selectedChatIndex); } }; // when user click the "send" button or ctrl+Enter in the textarea const send = async (msg = "", call_complete = true) => { const inputMsg = msg.trim(); if (!inputMsg && images.length === 0) { console.log("empty message"); return; } if (call_complete) chatStore.responseModelName = ""; let content: string | MessageDetail[] = inputMsg; if (images.length > 0) { content = images; } if (images.length > 0 && inputMsg.trim()) { content = [{ type: "text", text: inputMsg }, ...images]; } chatStore.history.push({ role: "user", content, hide: false, token: calculate_token_length(inputMsg.trim()), example: false, audio: null, logprobs: null, }); // manually calculate token length chatStore.totalTokens += calculate_token_length(inputMsg.trim()) + calculate_token_length(images); client.total_tokens = chatStore.totalTokens; setChatStore({ ...chatStore }); setInputMsg(""); setImages([]); if (call_complete) { await complete(); } }; const [showSettings, setShowSettings] = useState(false); const [templates, _setTemplates] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE) || "[]" ) as TemplateChatStore[] ); const [templateAPIs, _setTemplateAPIs] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE_API) || "[]" ) as TemplateAPI[] ); const [templateAPIsWhisper, _setTemplateAPIsWhisper] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE_API_WHISPER) || "[]" ) as TemplateAPI[] ); const [templateAPIsTTS, _setTemplateAPIsTTS] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE_API_TTS) || "[]" ) as TemplateAPI[] ); const [templateAPIsImageGen, _setTemplateAPIsImageGen] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE_API_IMAGE_GEN) || "[]" ) as TemplateAPI[] ); const [toolsTemplates, _setToolsTemplates] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE_TOOLS) || "[]" ) as TemplateTools[] ); const setTemplates = (templates: TemplateChatStore[]) => { localStorage.setItem(STORAGE_NAME_TEMPLATE, JSON.stringify(templates)); _setTemplates(templates); }; const setTemplateAPIs = (templateAPIs: TemplateAPI[]) => { localStorage.setItem( STORAGE_NAME_TEMPLATE_API, JSON.stringify(templateAPIs) ); _setTemplateAPIs(templateAPIs); }; const setTemplateAPIsWhisper = (templateAPIWhisper: TemplateAPI[]) => { localStorage.setItem( STORAGE_NAME_TEMPLATE_API_WHISPER, JSON.stringify(templateAPIWhisper) ); _setTemplateAPIsWhisper(templateAPIWhisper); }; const setTemplateAPIsTTS = (templateAPITTS: TemplateAPI[]) => { localStorage.setItem( STORAGE_NAME_TEMPLATE_API_TTS, JSON.stringify(templateAPITTS) ); _setTemplateAPIsTTS(templateAPITTS); }; const setTemplateAPIsImageGen = (templateAPIImageGen: TemplateAPI[]) => { localStorage.setItem( STORAGE_NAME_TEMPLATE_API_IMAGE_GEN, JSON.stringify(templateAPIImageGen) ); _setTemplateAPIsImageGen(templateAPIImageGen); }; const setTemplateTools = (templateTools: TemplateTools[]) => { localStorage.setItem( STORAGE_NAME_TEMPLATE_TOOLS, JSON.stringify(templateTools) ); _setToolsTemplates(templateTools); }; const userInputRef = createRef(); return (
{showSettings && ( )} {showSearch && ( )} {/*
setShowSettings(true)} >
{" "} {" "} {chatStore.toolsString.trim() && ( )}
{chatStore.model}{" "} Tokens:{" "} {chatStore.totalTokens}/{chatStore.maxTokens} {" "} {Tr("Cut")}:{" "} {chatStore.postBeginIndex}/ {chatStore.history.filter(({ hide }) => !hide).length} {" "} {" "} {Tr("Cost")}:{" "} ${chatStore.cost.toFixed(4)}
*/}
{!chatStore.apiKey && (

{Tr("Please click above to set")} (OpenAI) API KEY

)} {!chatStore.apiEndpoint && (

{Tr("Please click above to set")} API Endpoint

)} {templateAPIs.length > 0 && ( )} {templateAPIsWhisper.length > 0 && ( )} {templateAPIsTTS.length > 0 && ( )} {templateAPIsImageGen.length > 0 && ( )} {toolsTemplates.length > 0 && ( )} {chatStore.history.filter((msg) => !msg.example).length == 0 && (

{Tr("Saved prompt templates")}

{templates.map((t, index) => (
{ const newChatStore: ChatStore = structuredClone(t); // @ts-ignore delete newChatStore.name; if (!newChatStore.apiEndpoint) { newChatStore.apiEndpoint = getDefaultParams( "api", chatStore.apiEndpoint ); } if (!newChatStore.apiKey) { newChatStore.apiKey = getDefaultParams( "key", chatStore.apiKey ); } if (!newChatStore.whisper_api) { newChatStore.whisper_api = getDefaultParams( "whisper-api", chatStore.whisper_api ); } if (!newChatStore.whisper_key) { newChatStore.whisper_key = getDefaultParams( "whisper-key", chatStore.whisper_key ); } if (!newChatStore.tts_api) { newChatStore.tts_api = getDefaultParams( "tts-api", chatStore.tts_api ); } if (!newChatStore.tts_key) { newChatStore.tts_key = getDefaultParams( "tts-key", chatStore.tts_key ); } if (!newChatStore.image_gen_api) { newChatStore.image_gen_api = getDefaultParams( "image-gen-api", chatStore.image_gen_api ); } if (!newChatStore.image_gen_key) { newChatStore.image_gen_key = getDefaultParams( "image-gen-key", chatStore.image_gen_key ); } newChatStore.cost = 0; // manage undefined value because of version update newChatStore.toolsString = newChatStore.toolsString || ""; setChatStore({ ...newChatStore }); }} > {t.name}
))}
)} {chatStore.history.length === 0 && (

{Tr("No chat history here")}
⚙{Tr("Model")}: {chatStore.model}
⬆{Tr("Click above to change the settings of this chat")}
↖{Tr("Click the conor to create a new chat")}
⚠ {Tr( "All chat history and settings are stored in the local browser" )}

)} {chatStore.systemMessageContent.trim() && (
Prompt
setShowSettings(true)} > {chatStore.systemMessageContent}
)} {chatStore.history.map((_, messageIndex) => ( ))} {showGenerating && (

{generatingMessage || Tr("Generating...")} ...

)}

{chatStore.history.length > 0 && ( )} {chatStore.develop_mode && chatStore.history.length > 0 && ( )}

{chatStore.responseModelName && ( <> {Tr("Generated by")} {chatStore.responseModelName} )} {chatStore.postBeginIndex !== 0 && ( <>
{Tr("Info: chat history is too long, forget messages")}:{" "} {chatStore.postBeginIndex} )}

{chatStore.chatgpt_api_web_version < "v1.3.0" && (


{Tr("Warning: current chatStore version")}:{" "} {chatStore.chatgpt_api_web_version} {"< v1.3.0"}
v1.3.0 引入与旧版不兼容的消息裁切算法。继续使用旧版可能会导致消息裁切过多或过少(表现为失去上下文或输出不完整)。
请在左上角创建新会话:)

)} {chatStore.chatgpt_api_web_version < "v1.4.0" && (


{Tr("Warning: current chatStore version")}:{" "} {chatStore.chatgpt_api_web_version} {"< v1.4.0"}
v1.4.0 增加了更多参数,继续使用旧版可能因参数确实导致未定义的行为
请在左上角创建新会话:)

)} {chatStore.chatgpt_api_web_version < "v1.6.0" && (


提示:当前会话版本 {chatStore.chatgpt_api_web_version} {Tr("Warning: current chatStore version")}:{" "} {chatStore.chatgpt_api_web_version} {"< v1.6.0"} 。
v1.6.0 开始保存会话模板时会将 apiKey 和 apiEndpoint 设置为空,继续使用旧版可能在保存读取模板时出现问题
请在左上角创建新会话:)

)} {showRetry && (

)}
{images.length > 0 && (
{images.map((image, index) => (
{image.type === "image_url" && ( )}
))}
)} {generatingMessage && ( { setFollow(!follow); }} > )}
{showAddImage && ( )} {chatStore.whisper_api && chatStore.whisper_key && (chatStore.whisper_key || chatStore.apiKey) && ( )} {chatStore.develop_mode && ( )} {chatStore.develop_mode && ( )} {chatStore.develop_mode && ( )} {showAddToolMsg && (
{ setShowAddToolMsg(false); }} >
{ event.stopPropagation(); }} >

Add Tool Message


setNewToolCallID(event.target.value) } />
)}
); }