calculate token and forget some message

This commit is contained in:
2023-03-28 21:12:34 +08:00
parent 26a66d112b
commit 7c34379ecb
3 changed files with 19 additions and 6 deletions

View File

@@ -102,6 +102,8 @@ export default function ChatBOX(props: {
client.apiEndpoint = chatStore.apiEndpoint;
client.sysMessageContent = chatStore.systemMessageContent;
client.messages = chatStore.history.slice(chatStore.postBeginIndex);
// try forget message before sending request
client.forgetSomeMessages();
try {
setShowGenerating(true);
const response = await client._fetch(chatStore.streamMode);
@@ -140,6 +142,9 @@ export default function ChatBOX(props: {
return;
}
chatStore.history.push({ role: "user", content: inputMsg.trim() });
// manually calculate token length
chatStore.totalTokens += client.calculate_token_length(inputMsg.trim());
client.total_tokens += client.calculate_token_length(inputMsg.trim());
setChatStore({ ...chatStore });
setInputMsg("");
await complete();

View File

@@ -25,6 +25,14 @@ export interface FetchResponse {
index: number | undefined;
}[];
}
// https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
export function calculate_token_length(content: string): number {
const totalCount = content.length;
const chineseCount = content.match(/[\u00ff-\uffff]|\S+/g)?.length ?? 0;
const englishCount = totalCount - chineseCount;
const tokenLength = englishCount / 4 + (chineseCount * 4) / 3;
return ~~tokenLength;
}
class Chat {
OPENAI_API_KEY: string;
@@ -114,13 +122,8 @@ class Chat {
return this._fetch(true);
}
// https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
calculate_token_length(content: string): number {
const totalCount = content.length;
const chineseCount = content.match(/[\u00ff-\uffff]|\S+/g)?.length ?? 0;
const englishCount = totalCount - chineseCount;
const tokenLength = englishCount / 4 + (chineseCount * 4) / 3;
return ~~tokenLength;
return calculate_token_length(content);
}
user(...messages: string[]) {

View File

@@ -1,4 +1,5 @@
import { ChatStore } from "./app";
import { calculate_token_length } from "./chatgpt";
interface Props {
messageIndex: number;
@@ -24,6 +25,10 @@ export default function Message(props: Props) {
) {
chatStore.history.splice(messageIndex, 1);
chatStore.postBeginIndex = Math.max(chatStore.postBeginIndex - 1, 0);
chatStore.totalTokens = Math.max(
0,
chatStore.totalTokens - calculate_token_length(chat.content)
);
setChatStore({ ...chatStore });
}
}}