From 99d3c69647efd7561497948749ab8d853a4e66d8 Mon Sep 17 00:00:00 2001 From: heimoshuiyu Date: Fri, 24 Mar 2023 13:14:34 +0800 Subject: [PATCH] use stream mode depend on content-type --- src/chatbox.tsx | 23 +++++++++++++---------- src/chatgpt.ts | 42 ++++++++++++++++++++++++------------------ 2 files changed, 37 insertions(+), 28 deletions(-) diff --git a/src/chatbox.tsx b/src/chatbox.tsx index 17c9a43..934a223 100644 --- a/src/chatbox.tsx +++ b/src/chatbox.tsx @@ -1,6 +1,6 @@ import { useState } from "preact/hooks"; import type { ChatStore } from "./app"; -import ChatGPT, { ChunkMessage } from "./chatgpt"; +import ChatGPT, { ChunkMessage, FetchResponse } from "./chatgpt"; import Message from "./message"; import Settings from "./settings"; @@ -18,9 +18,8 @@ export default function ChatBOX(props: { const client = new ChatGPT(chatStore.apiKey); - const _completeWithStreamMode = async () => { + const _completeWithStreamMode = async (response: Response) => { // call api, return reponse text - const response = await client.completeWithSteam(); console.log("response", response); const reader = response.body?.getReader(); const allChunkMessage: string[] = []; @@ -83,10 +82,10 @@ export default function ChatBOX(props: { }); }; - const _completeWithFetchMode = async () => { - // call api, return reponse text - const response = await client.complete(); - chatStore.history.push({ role: "assistant", content: response }); + const _completeWithFetchMode = async (response: Response) => { + const data = (await response.json()) as FetchResponse; + const content = client.processFetchResponse(data); + chatStore.history.push({ role: "assistant", content }); setShowGenerating(false); }; @@ -98,10 +97,14 @@ export default function ChatBOX(props: { client.messages = chatStore.history.slice(chatStore.postBeginIndex); try { setShowGenerating(true); - if (chatStore.streamMode) { - await _completeWithStreamMode(); + const response = await client._fetch(chatStore.streamMode); + const contentType = response.headers.get("content-type"); + if (contentType === "text/event-stream") { + await _completeWithStreamMode(response); + } else if (contentType === "application/json") { + await _completeWithFetchMode(response); } else { - await _completeWithFetchMode(); + throw `unknown response content type ${contentType}`; } // manually copy status from client to chatStore chatStore.maxTokens = client.max_tokens; diff --git a/src/chatgpt.ts b/src/chatgpt.ts index 8673f50..589fba5 100644 --- a/src/chatgpt.ts +++ b/src/chatgpt.ts @@ -9,6 +9,23 @@ export interface ChunkMessage { }[]; } +export interface FetchResponse { + id: string; + object: string; + created: number; + model: string; + usage: { + prompt_tokens: number | undefined; + completion_tokens: number | undefined; + total_tokens: number | undefined; + }; + choices: { + message: Message | undefined; + finish_reason: "stop" | "length"; + index: number | undefined; + }[]; +} + class Chat { OPENAI_API_KEY: string; messages: Message[]; @@ -57,22 +74,7 @@ class Chat { }); } - async fetch(): Promise<{ - id: string; - object: string; - created: number; - model: string; - usage: { - prompt_tokens: number | undefined; - completion_tokens: number | undefined; - total_tokens: number | undefined; - }; - choices: { - message: Message | undefined; - finish_reason: "stop" | "length"; - index: number | undefined; - }[]; - }> { + async fetch(): Promise { const resp = await this._fetch(); return await resp.json(); } @@ -83,8 +85,7 @@ class Chat { return this.messages.slice(-1)[0].content; } - async complete(): Promise { - const resp = await this.fetch(); + processFetchResponse(resp: FetchResponse): string { this.total_tokens = resp?.usage?.total_tokens ?? 0; if (resp?.choices[0]?.message) { this.messages.push(resp?.choices[0]?.message); @@ -101,6 +102,11 @@ class Chat { ); } + async complete(): Promise { + const resp = await this.fetch(); + return this.processFetchResponse(resp); + } + completeWithSteam() { this.total_tokens = this.messages .map((msg) => this.calculate_token_length(msg.content) + 20)