record each message's token and hide status, calc postBeginIndex based on token

This commit is contained in:
2023-03-31 04:16:23 +08:00
parent bdfe03699f
commit 26f9632f41
4 changed files with 154 additions and 48 deletions

View File

@@ -1,3 +1,3 @@
const CHATGPT_API_WEB_VERSION = "v1.2.2";
const CHATGPT_API_WEB_VERSION = "v1.3.0";
export default CHATGPT_API_WEB_VERSION;

View File

@@ -1,17 +1,22 @@
import { useEffect, useState } from "preact/hooks";
import "./global.css";
import { Message } from "./chatgpt";
import { calculate_token_length, Message } from "./chatgpt";
import getDefaultParams from "./getDefaultParam";
import ChatBOX from "./chatbox";
import { options } from "./settings";
import CHATGPT_API_WEB_VERSION from './CHATGPT_API_WEB_VERSION'
import CHATGPT_API_WEB_VERSION from "./CHATGPT_API_WEB_VERSION";
export interface ChatStoreMessage extends Message {
hide: boolean;
token: number;
}
export interface ChatStore {
chatgpt_api_web_version: string;
systemMessageContent: string;
history: Message[];
history: ChatStoreMessage[];
postBeginIndex: number;
tokenMargin: number;
totalTokens: number;
@@ -86,7 +91,14 @@ export function App() {
if (ret.model === undefined) ret.model = "gpt-3.5-turbo";
if (ret.responseModelName === undefined) ret.responseModelName = "";
if (ret.chatgpt_api_web_version === undefined)
ret.chatgpt_api_web_version = CHATGPT_API_WEB_VERSION;
// this is from old version becasue it is undefined,
// so no higher than v1.3.0
ret.chatgpt_api_web_version = "v1.2.2";
for (const message of ret.history) {
if (message.hide === undefined) message.hide = false;
if (message.token === undefined)
message.token = calculate_token_length(message.content);
}
return ret;
};

View File

@@ -1,7 +1,11 @@
import { createRef } from "preact";
import { StateUpdater, useEffect, useState } from "preact/hooks";
import type { ChatStore } from "./app";
import ChatGPT, { ChunkMessage, FetchResponse } from "./chatgpt";
import ChatGPT, {
calculate_token_length,
ChunkMessage,
FetchResponse,
} from "./chatgpt";
import Message from "./message";
import Settings from "./settings";
@@ -77,17 +81,16 @@ export default function ChatBOX(props: {
setShowGenerating(false);
// console.log("push to history", allChunkMessage);
const content = allChunkMessage.join("");
chatStore.history.push({
role: "assistant",
content: allChunkMessage.join(""),
content,
hide: false,
token: calculate_token_length(content),
});
// manually copy status from client to chatStore
chatStore.maxTokens = client.max_tokens;
chatStore.tokenMargin = client.tokens_margin;
chatStore.totalTokens =
client.total_tokens +
39 +
client.calculate_token_length(allChunkMessage.join(""));
setChatStore({ ...chatStore });
setGeneratingMessage("");
setShowGenerating(false);
@@ -100,7 +103,12 @@ export default function ChatBOX(props: {
const data = (await response.json()) as FetchResponse;
chatStore.responseModelName = data.model ?? "";
const content = client.processFetchResponse(data);
chatStore.history.push({ role: "assistant", content });
chatStore.history.push({
role: "assistant",
content,
hide: false,
token: data.usage.completion_tokens ?? calculate_token_length(content),
});
setShowGenerating(false);
};
@@ -109,11 +117,35 @@ export default function ChatBOX(props: {
// manually copy status from chatStore to client
client.apiEndpoint = chatStore.apiEndpoint;
client.sysMessageContent = chatStore.systemMessageContent;
client.messages = chatStore.history.slice(chatStore.postBeginIndex);
client.tokens_margin = chatStore.tokenMargin;
client.messages = chatStore.history
.slice(chatStore.postBeginIndex)
// only copy non hidden message
.filter(({ hide }) => !hide)
// only copy content and role attribute to client for posting
.map(({ content, role }) => {
return {
content,
role,
};
});
client.model = chatStore.model;
client.max_tokens = chatStore.maxTokens;
// try forget message before sending request
client.forgetSomeMessages();
// todo move code
const max = chatStore.maxTokens - chatStore.tokenMargin;
let sum = 0;
chatStore.postBeginIndex = chatStore.history.filter(
({ hide }) => !hide
).length;
for (const msg of chatStore.history.slice().reverse()) {
sum += msg.token;
if (sum > max) break;
chatStore.postBeginIndex -= 1;
}
chatStore.postBeginIndex =
chatStore.postBeginIndex < 0 ? 0 : chatStore.postBeginIndex;
try {
setShowGenerating(true);
const response = await client._fetch(chatStore.streamMode);
@@ -129,11 +161,21 @@ export default function ChatBOX(props: {
chatStore.maxTokens = client.max_tokens;
chatStore.tokenMargin = client.tokens_margin;
chatStore.totalTokens = client.total_tokens;
// when total token > max token - margin token:
// ChatGPT will "forgot" some historical message
// so client.message.length will be less than chatStore.history.length
// todo move code
const max = chatStore.maxTokens - chatStore.tokenMargin;
let sum = 0;
chatStore.postBeginIndex = chatStore.history.filter(
({ hide }) => !hide
).length;
for (const msg of chatStore.history.slice().reverse()) {
sum += msg.token;
if (sum > max) break;
chatStore.postBeginIndex -= 1;
}
chatStore.postBeginIndex =
chatStore.history.length - client.messages.length;
chatStore.postBeginIndex < 0 ? 0 : chatStore.postBeginIndex;
console.log("postBeginIndex", chatStore.postBeginIndex);
setChatStore({ ...chatStore });
} catch (error) {
@@ -153,7 +195,12 @@ export default function ChatBOX(props: {
return;
}
chatStore.responseModelName = "";
chatStore.history.push({ role: "user", content: inputMsg.trim() });
chatStore.history.push({
role: "user",
content: inputMsg.trim(),
hide: false,
token: calculate_token_length(inputMsg.trim()),
});
// manually calculate token length
chatStore.totalTokens += client.calculate_token_length(inputMsg.trim());
client.total_tokens += client.calculate_token_length(inputMsg.trim());
@@ -191,7 +238,9 @@ export default function ChatBOX(props: {
Tokens: {chatStore.totalTokens} / {chatStore.maxTokens}
</span>{" "}
<span>{chatStore.model}</span>{" "}
<span>Messages: {chatStore.history.length}</span>{" "}
<span>
Messages: {chatStore.history.filter(({ hide }) => !hide).length}
</span>{" "}
<span>Cut: {chatStore.postBeginIndex}</span>
</div>
</p>
@@ -252,6 +301,23 @@ export default function ChatBOX(props: {
{chatStore.responseModelName && (
<p className="p-2 my-2 text-center opacity-50 dark:text-white">
Generated by {chatStore.responseModelName}
{chatStore.postBeginIndex !== 0 && (
<>
<br />
{chatStore.postBeginIndex}
</>
)}
{chatStore.chatgpt_api_web_version < "v1.3.0" && (
<>
<br />
{chatStore.chatgpt_api_web_version}
<br />
v1.3.0
使
<br />
</>
)}
</p>
)}
{showRetry && (

View File

@@ -15,28 +15,51 @@ export default function Message(props: Props) {
chat.role === "user" ? "left-0" : "right-0"
}`}
onClick={() => {
if (
confirm(
`Are you sure to delete this message?\n${chat.content.slice(
0,
39
)}...`
)
) {
chatStore.history.splice(messageIndex, 1);
chatStore.postBeginIndex = Math.max(chatStore.postBeginIndex - 1, 0);
chatStore.totalTokens = Math.max(
0,
chatStore.totalTokens - calculate_token_length(chat.content)
);
setChatStore({ ...chatStore });
chatStore.history[messageIndex].hide =
!chatStore.history[messageIndex].hide;
// todo move code
const max = chatStore.maxTokens - chatStore.tokenMargin;
let sum = 0;
chatStore.postBeginIndex = chatStore.history.filter(
({ hide }) => !hide
).length;
for (const msg of chatStore.history.slice().reverse()) {
sum += msg.token;
if (sum > max) break;
chatStore.postBeginIndex -= 1;
}
chatStore.postBeginIndex =
chatStore.postBeginIndex < 0 ? 0 : chatStore.postBeginIndex;
//chatStore.totalTokens =
chatStore.totalTokens = 0;
for (const i of chatStore.history
.filter(({ hide }) => !hide)
.slice(chatStore.postBeginIndex)
.map(({ token }) => token)) {
chatStore.totalTokens += i;
}
setChatStore({ ...chatStore });
}}
>
🗑
</button>
);
return (
<>
{chatStore.postBeginIndex !== 0 &&
!chatStore.history[messageIndex].hide &&
chatStore.postBeginIndex ===
chatStore.history.slice(0, messageIndex).filter(({ hide }) => !hide)
.length && (
<div className="flex items-center relative justify-center">
<hr className="w-full h-px my-4 border-0 bg-slate-800 dark:bg-white" />
<span className="absolute px-3 bg-slate-800 text-white rounded p-1 dark:bg-white dark:text-black">
Above messages are "forgotten"
</span>
</div>
)}
<div
className={`flex ${
chat.role === "assistant" ? "justify-start" : "justify-end"
@@ -47,11 +70,16 @@ export default function Message(props: Props) {
chat.role === "assistant"
? "bg-white dark:bg-gray-700 dark:text-white"
: "bg-green-400"
}`}
} ${chat.hide ? "opacity-50" : ""}`}
>
<p className="message-content">{chat.content}</p>
<p className="message-content">
{chat.hide
? chat.content.split("\n")[0].slice(0, 16) + "... (deleted)"
: chat.content}
</p>
<DeleteIcon />
</div>
</div>
</>
);
}