use stream mode depend on content-type

This commit is contained in:
2023-03-24 13:14:34 +08:00
parent 0148465e34
commit 99d3c69647
2 changed files with 37 additions and 28 deletions

View File

@@ -1,6 +1,6 @@
import { useState } from "preact/hooks"; import { useState } from "preact/hooks";
import type { ChatStore } from "./app"; import type { ChatStore } from "./app";
import ChatGPT, { ChunkMessage } from "./chatgpt"; import ChatGPT, { ChunkMessage, FetchResponse } from "./chatgpt";
import Message from "./message"; import Message from "./message";
import Settings from "./settings"; import Settings from "./settings";
@@ -18,9 +18,8 @@ export default function ChatBOX(props: {
const client = new ChatGPT(chatStore.apiKey); const client = new ChatGPT(chatStore.apiKey);
const _completeWithStreamMode = async () => { const _completeWithStreamMode = async (response: Response) => {
// call api, return reponse text // call api, return reponse text
const response = await client.completeWithSteam();
console.log("response", response); console.log("response", response);
const reader = response.body?.getReader(); const reader = response.body?.getReader();
const allChunkMessage: string[] = []; const allChunkMessage: string[] = [];
@@ -83,10 +82,10 @@ export default function ChatBOX(props: {
}); });
}; };
const _completeWithFetchMode = async () => { const _completeWithFetchMode = async (response: Response) => {
// call api, return reponse text const data = (await response.json()) as FetchResponse;
const response = await client.complete(); const content = client.processFetchResponse(data);
chatStore.history.push({ role: "assistant", content: response }); chatStore.history.push({ role: "assistant", content });
setShowGenerating(false); setShowGenerating(false);
}; };
@@ -98,10 +97,14 @@ export default function ChatBOX(props: {
client.messages = chatStore.history.slice(chatStore.postBeginIndex); client.messages = chatStore.history.slice(chatStore.postBeginIndex);
try { try {
setShowGenerating(true); setShowGenerating(true);
if (chatStore.streamMode) { const response = await client._fetch(chatStore.streamMode);
await _completeWithStreamMode(); const contentType = response.headers.get("content-type");
if (contentType === "text/event-stream") {
await _completeWithStreamMode(response);
} else if (contentType === "application/json") {
await _completeWithFetchMode(response);
} else { } else {
await _completeWithFetchMode(); throw `unknown response content type ${contentType}`;
} }
// manually copy status from client to chatStore // manually copy status from client to chatStore
chatStore.maxTokens = client.max_tokens; chatStore.maxTokens = client.max_tokens;

View File

@@ -9,6 +9,23 @@ export interface ChunkMessage {
}[]; }[];
} }
export interface FetchResponse {
id: string;
object: string;
created: number;
model: string;
usage: {
prompt_tokens: number | undefined;
completion_tokens: number | undefined;
total_tokens: number | undefined;
};
choices: {
message: Message | undefined;
finish_reason: "stop" | "length";
index: number | undefined;
}[];
}
class Chat { class Chat {
OPENAI_API_KEY: string; OPENAI_API_KEY: string;
messages: Message[]; messages: Message[];
@@ -57,22 +74,7 @@ class Chat {
}); });
} }
async fetch(): Promise<{ async fetch(): Promise<FetchResponse> {
id: string;
object: string;
created: number;
model: string;
usage: {
prompt_tokens: number | undefined;
completion_tokens: number | undefined;
total_tokens: number | undefined;
};
choices: {
message: Message | undefined;
finish_reason: "stop" | "length";
index: number | undefined;
}[];
}> {
const resp = await this._fetch(); const resp = await this._fetch();
return await resp.json(); return await resp.json();
} }
@@ -83,8 +85,7 @@ class Chat {
return this.messages.slice(-1)[0].content; return this.messages.slice(-1)[0].content;
} }
async complete(): Promise<string> { processFetchResponse(resp: FetchResponse): string {
const resp = await this.fetch();
this.total_tokens = resp?.usage?.total_tokens ?? 0; this.total_tokens = resp?.usage?.total_tokens ?? 0;
if (resp?.choices[0]?.message) { if (resp?.choices[0]?.message) {
this.messages.push(resp?.choices[0]?.message); this.messages.push(resp?.choices[0]?.message);
@@ -101,6 +102,11 @@ class Chat {
); );
} }
async complete(): Promise<string> {
const resp = await this.fetch();
return this.processFetchResponse(resp);
}
completeWithSteam() { completeWithSteam() {
this.total_tokens = this.messages this.total_tokens = this.messages
.map((msg) => this.calculate_token_length(msg.content) + 20) .map((msg) => this.calculate_token_length(msg.content) + 20)