change to stream api mode

This commit is contained in:
2023-03-15 02:07:21 +08:00
parent 333a1c34b2
commit a68e4b9dbd
2 changed files with 99 additions and 21 deletions

View File

@@ -1,7 +1,7 @@
import { useEffect, useState } from "preact/hooks";
import "./global.css";
import ChatGPT, { Message } from "./chatgpt";
import ChatGPT, { Message, ChunkMessage } from "./chatgpt";
import { createRef } from "preact";
export interface ChatStore {
@@ -79,6 +79,7 @@ export function App() {
const [inputMsg, setInputMsg] = useState("");
const [showGenerating, setShowGenerating] = useState(false);
const [generatingMessage, setGeneratingMessage] = useState("");
const client = new ChatGPT(chatStore.apiKey);
@@ -89,8 +90,65 @@ export function App() {
client.messages = chatStore.history.slice(chatStore.postBeginIndex);
// call api, return reponse text
const response = await client.complete();
chatStore.history.push({ role: "assistant", content: response });
const response = await client.completeWithSteam();
console.log("response", response);
const reader = response.body?.getReader();
const allChunkMessage: string[] = [];
await new ReadableStream({
async start(controller) {
while (true) {
let responseDone = false;
let state = await reader?.read();
let done = state?.done;
let value = state?.value;
if (done) break;
let text = new TextDecoder().decode(value);
// console.log("text:", text);
const lines = text
.trim()
.split("\n")
.map((line) => line.trim())
.filter((i) => {
if (!i) return false;
if (i === "data: [DONE]") {
responseDone = true;
return false;
}
return true;
});
console.log("lines", lines);
const jsons: ChunkMessage[] = lines
.map((line) => {
return JSON.parse(line.trim().slice("data: ".length));
})
.filter((i) => i);
// console.log("jsons", jsons);
const chunkText = jsons
.map((j) => j.choices[0].delta.content ?? "")
.join("");
// console.log("chunk text", chunkText);
allChunkMessage.push(chunkText);
setGeneratingMessage(allChunkMessage.join(""));
if (responseDone) break;
}
// console.log("push to history", allChunkMessage);
chatStore.history.push({
role: "assistant",
content: allChunkMessage.join(""),
});
// manually copy status from client to chatStore
chatStore.maxTokens = client.max_tokens;
chatStore.tokenMargin = client.tokens_margin;
chatStore.totalTokens =
client.total_tokens +
39 +
client.calculate_token_length(allChunkMessage.join(""));
setChatStore({ ...chatStore });
setGeneratingMessage("");
setShowGenerating(false);
},
});
// manually copy status from client to chatStore
chatStore.maxTokens = client.max_tokens;
@@ -112,8 +170,6 @@ export function App() {
await _complete();
} catch (error) {
alert(error);
} finally {
setShowGenerating(false);
}
};
@@ -304,7 +360,10 @@ export function App() {
);
})}
{showGenerating && (
<p className="animate-pulse">Generating... please wait...</p>
<p className="p-2 my-2 animate-pulse">
{generatingMessage ? generatingMessage : "生成中,保持网络稳定喵"}
...
</p>
)}
</div>
<div className="flex justify-between">

View File

@@ -3,6 +3,12 @@ export interface Message {
content: string;
}
export interface ChunkMessage {
choices: {
delta: { role: "assitant" | undefined; content: string | undefined };
}[];
}
class Chat {
OPENAI_API_KEY: string;
messages: Message[];
@@ -33,6 +39,24 @@ class Chat {
this.apiEndpoint = apiEndPoint;
}
_fetch() {
return fetch(this.apiEndpoint, {
method: "POST",
headers: {
Authorization: `Bearer ${this.OPENAI_API_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "gpt-3.5-turbo",
messages: [
{ role: "system", content: this.sysMessageContent },
...this.messages,
],
stream: true,
}),
});
}
async fetch(): Promise<{
id: string;
object: string;
@@ -49,21 +73,8 @@ class Chat {
index: number | undefined;
}[];
}> {
const resp = await fetch(this.apiEndpoint, {
method: "POST",
headers: {
Authorization: `Bearer ${this.OPENAI_API_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "gpt-3.5-turbo",
messages: [
{ role: "system", content: this.sysMessageContent },
...this.messages,
],
}),
}).then((resp) => resp.json());
return resp;
const resp = await this._fetch();
return await resp.json();
}
async say(content: string): Promise<string> {
@@ -90,6 +101,14 @@ class Chat {
);
}
completeWithSteam() {
this.total_tokens =
this.messages
.map((msg) => this.calculate_token_length(msg.content) + 20)
.reduce((a, v) => a + v);
return this._fetch();
}
// https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
calculate_token_length(content: string): number {
const totalCount = content.length;