show response model name

This commit is contained in:
2023-03-30 13:39:19 +08:00
parent faac2303df
commit 07885c681c
3 changed files with 18 additions and 4 deletions

View File

@@ -17,6 +17,7 @@ export interface ChatStore {
apiEndpoint: string;
streamMode: boolean;
model: string;
responseModelName: string;
}
const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions";
@@ -38,6 +39,7 @@ const newChatStore = (
apiEndpoint: getDefaultParams("api", apiEndpoint),
streamMode: getDefaultParams("mode", streamMode),
model: getDefaultParams("model", model),
responseModelName: "",
};
};
@@ -78,6 +80,7 @@ export function App() {
const ret = JSON.parse(val) as ChatStore;
// handle read from old version chatstore
if (ret.model === undefined) ret.model = "gpt-3.5-turbo";
if (ret.responseModelName === undefined) ret.responseModelName = "";
return ret;
};
@@ -107,7 +110,7 @@ export function App() {
chatStore.apiKey,
chatStore.systemMessageContent,
chatStore.apiEndpoint,
chatStore.streamMode,
chatStore.streamMode
)
)
);

View File

@@ -28,6 +28,7 @@ export default function ChatBOX(props: {
const client = new ChatGPT(chatStore.apiKey);
const _completeWithStreamMode = async (response: Response) => {
chatStore.streamMode = true;
// call api, return reponse text
console.log("response", response);
const reader = response.body?.getReader();
@@ -60,7 +61,10 @@ export default function ChatBOX(props: {
return JSON.parse(line.trim().slice("data: ".length));
})
.filter((i) => i);
// console.log("jsons", jsons);
console.log("jsons", jsons);
for (const { model } of jsons) {
if (model) chatStore.responseModelName = model;
}
const chunkText = jsons
.map((j) => j.choices[0].delta.content ?? "")
.join("");
@@ -92,7 +96,9 @@ export default function ChatBOX(props: {
};
const _completeWithFetchMode = async (response: Response) => {
chatStore.streamMode = false;
const data = (await response.json()) as FetchResponse;
chatStore.responseModelName = data.model ?? "";
const content = client.processFetchResponse(data);
chatStore.history.push({ role: "assistant", content });
setShowGenerating(false);
@@ -113,10 +119,8 @@ export default function ChatBOX(props: {
const response = await client._fetch(chatStore.streamMode);
const contentType = response.headers.get("content-type");
if (contentType === "text/event-stream") {
chatStore.streamMode = true;
await _completeWithStreamMode(response);
} else if (contentType === "application/json") {
chatStore.streamMode = false;
await _completeWithFetchMode(response);
} else {
throw `unknown response content type ${contentType}`;
@@ -148,6 +152,7 @@ export default function ChatBOX(props: {
console.log("empty message");
return;
}
chatStore.responseModelName = "";
chatStore.history.push({ role: "user", content: inputMsg.trim() });
// manually calculate token length
chatStore.totalTokens += client.calculate_token_length(inputMsg.trim());
@@ -243,6 +248,11 @@ export default function ChatBOX(props: {
...
</p>
)}
{chatStore.responseModelName && (
<p className="p-2 my-2 text-center opacity-50 dark:text-white">
Generated by {chatStore.responseModelName}
</p>
)}
{showRetry && (
<p className="text-right p-2 my-2 dark:text-white">
<button

View File

@@ -4,6 +4,7 @@ export interface Message {
}
export interface ChunkMessage {
model: string;
choices: {
delta: { role: "assitant" | undefined; content: string | undefined };
}[];