import { Tr, langCodeContext, LANG_OPTIONS } from "./translate"; import structuredClone from "@ungap/structured-clone"; import { createRef } from "preact"; import { StateUpdater, useEffect, useState } from "preact/hooks"; import { ChatStore, STORAGE_NAME_TEMPLATE, STORAGE_NAME_TEMPLATE_API, TemplateAPI, addTotalCost, } from "./app"; import ChatGPT, { calculate_token_length, ChunkMessage, FetchResponse, MessageDetail, } from "./chatgpt"; import Message from "./message"; import models from "./models"; import Settings from "./settings"; import getDefaultParams from "./getDefaultParam"; export interface TemplateChatStore extends ChatStore { name: string; } export default function ChatBOX(props: { chatStore: ChatStore; setChatStore: (cs: ChatStore) => void; selectedChatIndex: number; setSelectedChatIndex: StateUpdater; }) { const { chatStore, setChatStore } = props; // prevent error if (chatStore === undefined) return
; const [inputMsg, setInputMsg] = useState(""); const [showGenerating, setShowGenerating] = useState(false); const [generatingMessage, setGeneratingMessage] = useState(""); const [showRetry, setShowRetry] = useState(false); const [isRecording, setIsRecording] = useState("Mic"); const mediaRef = createRef(); const messagesEndRef = createRef(); useEffect(() => { messagesEndRef.current.scrollIntoView({ behavior: "smooth" }); }, [showRetry, showGenerating, generatingMessage]); const client = new ChatGPT(chatStore.apiKey); const update_total_tokens = () => { // manually estimate token client.total_tokens = calculate_token_length( chatStore.systemMessageContent ); for (const msg of chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex)) { client.total_tokens += msg.token; } chatStore.totalTokens = client.total_tokens; }; const _completeWithStreamMode = async (response: Response) => { let responseTokenCount = 0; chatStore.streamMode = true; const allChunkMessage: string[] = []; setShowGenerating(true); for await (const i of client.processStreamResponse(response)) { chatStore.responseModelName = i.model; responseTokenCount += 1; allChunkMessage.push(i.choices[0].delta.content ?? ""); setGeneratingMessage(allChunkMessage.join("")); } setShowGenerating(false); const content = allChunkMessage.join(""); // estimate cost let cost = 0; if (chatStore.responseModelName) { cost += responseTokenCount * (models[chatStore.responseModelName]?.price?.completion ?? 0); let sum = 0; for (const msg of chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex)) { sum += msg.token; } cost += sum * (models[chatStore.responseModelName]?.price?.prompt ?? 0); } console.log("cost", cost); chatStore.cost += cost; addTotalCost(cost); chatStore.history.push({ role: "assistant", content, hide: false, token: responseTokenCount, example: false, }); // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; chatStore.tokenMargin = client.tokens_margin; update_total_tokens(); setChatStore({ ...chatStore }); setGeneratingMessage(""); setShowGenerating(false); }; const _completeWithFetchMode = async (response: Response) => { chatStore.streamMode = false; const data = (await response.json()) as FetchResponse; chatStore.responseModelName = data.model ?? ""; if (data.model) { let cost = 0; cost += (data.usage.prompt_tokens ?? 0) * (models[data.model]?.price?.prompt ?? 0); cost += (data.usage.completion_tokens ?? 0) * (models[data.model]?.price?.completion ?? 0); chatStore.cost += cost; addTotalCost(cost); } const content = client.processFetchResponse(data); // estimate user's input message token let aboveToken = 0; for (const msg of chatStore.history .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex, -1)) { aboveToken += msg.token; } if (data.usage.prompt_tokens) { const userMessageToken = data.usage.prompt_tokens - aboveToken; console.log("set user message token"); if (chatStore.history.filter((msg) => !msg.hide).length > 0) { chatStore.history.filter((msg) => !msg.hide).slice(-1)[0].token = userMessageToken; } } chatStore.history.push({ role: "assistant", content, hide: false, token: data.usage.completion_tokens ?? calculate_token_length(content), example: false, }); setShowGenerating(false); }; // wrap the actuall complete api const complete = async () => { // manually copy status from chatStore to client client.apiEndpoint = chatStore.apiEndpoint; client.sysMessageContent = chatStore.systemMessageContent; client.tokens_margin = chatStore.tokenMargin; client.temperature = chatStore.temperature; client.enable_temperature = chatStore.temperature_enabled; client.top_p = chatStore.top_p; client.enable_top_p = chatStore.top_p_enabled; client.frequency_penalty = chatStore.frequency_penalty; client.presence_penalty = chatStore.presence_penalty; client.messages = chatStore.history // only copy non hidden message .filter(({ hide }) => !hide) .slice(chatStore.postBeginIndex) // only copy content and role attribute to client for posting .map(({ content, role, example }) => { if (example) { return { content, role: "system", name: role === "assistant" ? "example_assistant" : "example_user", }; } return { content, role, }; }); client.model = chatStore.model; client.max_tokens = chatStore.maxTokens; client.max_gen_tokens = chatStore.maxGenTokens; client.enable_max_gen_tokens = chatStore.maxGenTokens_enabled; try { setShowGenerating(true); const response = await client._fetch(chatStore.streamMode); const contentType = response.headers.get("content-type"); if (contentType?.startsWith("text/event-stream")) { await _completeWithStreamMode(response); } else if (contentType?.startsWith("application/json")) { await _completeWithFetchMode(response); } else { throw `unknown response content type ${contentType}`; } // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; chatStore.tokenMargin = client.tokens_margin; chatStore.totalTokens = client.total_tokens; console.log("postBeginIndex", chatStore.postBeginIndex); setShowRetry(false); setChatStore({ ...chatStore }); } catch (error) { setShowRetry(true); alert(error); } finally { setShowGenerating(false); props.setSelectedChatIndex(props.selectedChatIndex); } }; // when user click the "send" button or ctrl+Enter in the textarea const send = async (msg = "") => { const inputMsg = msg.trim(); if (!inputMsg) { console.log("empty message"); return; } chatStore.responseModelName = ""; chatStore.history.push({ role: "user", content: inputMsg.trim(), hide: false, token: calculate_token_length(inputMsg.trim()), example: false, }); // manually calculate token length chatStore.totalTokens += client.calculate_token_length(inputMsg.trim()); client.total_tokens += client.calculate_token_length(inputMsg.trim()); setChatStore({ ...chatStore }); setInputMsg(""); await complete(); }; const [showSettings, setShowSettings] = useState(false); const [templates, _setTemplates] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE) || "[]" ) as TemplateChatStore[] ); const [templateAPIs, _setTemplateAPIs] = useState( JSON.parse( localStorage.getItem(STORAGE_NAME_TEMPLATE_API) || "[]" ) as TemplateAPI[] ); const setTemplates = (templates: TemplateChatStore[]) => { localStorage.setItem(STORAGE_NAME_TEMPLATE, JSON.stringify(templates)); _setTemplates(templates); }; const setTemplateAPIs = (templateAPIs: TemplateAPI[]) => { localStorage.setItem( STORAGE_NAME_TEMPLATE_API, JSON.stringify(templateAPIs) ); _setTemplateAPIs(templateAPIs); }; const [images, setImages] = useState([]); return (
{showSettings && ( )}
setShowSettings(true)} >
{" "}
{chatStore.model}{" "} Tokens:{" "} {chatStore.totalTokens}/{chatStore.maxTokens} {" "} {Tr("Cut")}:{" "} {chatStore.postBeginIndex}/ {chatStore.history.filter(({ hide }) => !hide).length} {" "} {" "} {Tr("Cost")}:{" "} ${chatStore.cost.toFixed(4)}
{!chatStore.apiKey && (

{Tr("Please click above to set")} (OpenAI) API KEY

)} {!chatStore.apiEndpoint && (

{Tr("Please click above to set")} API Endpoint

)} {templateAPIs.length > 0 && (chatStore.develop_mode || chatStore.history.filter((msg) => !msg.example).length == 0 || !chatStore.apiEndpoint || !chatStore.apiKey) && (

{Tr("Saved API templates")}


{templateAPIs.map((t, index) => (
{ chatStore.apiEndpoint = t.endpoint; chatStore.apiKey = t.key; setChatStore({ ...chatStore }); }} > {t.name}
))}
)} {chatStore.history.filter((msg) => !msg.example).length == 0 && (

{Tr("Saved prompt templates")}


{templates.map((t, index) => (
{ const newChatStore: ChatStore = structuredClone(t); // @ts-ignore delete newChatStore.name; if (!newChatStore.apiEndpoint) { newChatStore.apiEndpoint = getDefaultParams( "api", chatStore.apiEndpoint ); } if (!newChatStore.apiKey) { newChatStore.apiKey = getDefaultParams( "key", chatStore.apiKey ); } newChatStore.cost = 0; setChatStore({ ...newChatStore }); }} > {t.name}
))}
)} {chatStore.history.length === 0 && (

{Tr("No chat history here")}
⚙{Tr("Model")}: {chatStore.model}
⬆{Tr("Click above to change the settings of this chat")}
↖{Tr("Click the conor to create a new chat")}
⚠ {Tr( "All chat history and settings are stored in the local browser" )}
⚠{Tr("Documents and source code are avaliable here")}:{" "} github.com/heimoshuiyu/chatgpt-api-web

)} {chatStore.history.map((_, messageIndex) => ( ))} {showGenerating && (

{generatingMessage || Tr("Generating...")} ...

)}

{chatStore.history.length > 0 && ( )} {chatStore.develop_mode && chatStore.history.length > 0 && ( )}

{chatStore.responseModelName && ( <> {Tr("Generated by")} {chatStore.responseModelName} )} {chatStore.postBeginIndex !== 0 && ( <>
{Tr("Info: chat history is too long, forget messages")}:{" "} {chatStore.postBeginIndex} )}

{chatStore.chatgpt_api_web_version < "v1.3.0" && (


{Tr("Warning: current chatStore version")}:{" "} {chatStore.chatgpt_api_web_version} {"< v1.3.0"}
v1.3.0 引入与旧版不兼容的消息裁切算法。继续使用旧版可能会导致消息裁切过多或过少(表现为失去上下文或输出不完整)。
请在左上角创建新会话:)

)} {chatStore.chatgpt_api_web_version < "v1.4.0" && (


{Tr("Warning: current chatStore version")}:{" "} {chatStore.chatgpt_api_web_version} {"< v1.4.0"}
v1.4.0 增加了更多参数,继续使用旧版可能因参数确实导致未定义的行为
请在左上角创建新会话:)

)} {chatStore.chatgpt_api_web_version < "v1.6.0" && (


提示:当前会话版本 {chatStore.chatgpt_api_web_version} {Tr("Warning: current chatStore version")}:{" "} {chatStore.chatgpt_api_web_version} {"< v1.6.0"} 。
v1.6.0 开始保存会话模板时会将 apiKey 和 apiEndpoint 设置为空,继续使用旧版可能在保存读取模板时出现问题
请在左上角创建新会话:)

)} {showRetry && (

)}
{chatStore.whisper_api && (chatStore.whisper_key || chatStore.apiKey) && ( )} {chatStore.develop_mode && ( )} {chatStore.develop_mode && ( )}
); }