From 05f57f29e523a13c8249f69b3b5ac8687e3be727 Mon Sep 17 00:00:00 2001
From: heimoshuiyu
Date: Fri, 31 Mar 2023 04:50:49 +0800
Subject: [PATCH] show cost
---
src/app.tsx | 7 +++++--
src/chatbox.tsx | 13 +++++++++++--
src/models.ts | 36 ++++++++++++++++++++++++++++++++++++
src/settings.tsx | 22 ++++++++++------------
4 files changed, 62 insertions(+), 16 deletions(-)
create mode 100644 src/models.ts
diff --git a/src/app.tsx b/src/app.tsx
index 47c3154..462345c 100644
--- a/src/app.tsx
+++ b/src/app.tsx
@@ -4,7 +4,7 @@ import "./global.css";
import { calculate_token_length, Message } from "./chatgpt";
import getDefaultParams from "./getDefaultParam";
import ChatBOX from "./chatbox";
-import { options } from "./settings";
+import models from "./models";
import CHATGPT_API_WEB_VERSION from "./CHATGPT_API_WEB_VERSION";
@@ -26,6 +26,7 @@ export interface ChatStore {
streamMode: boolean;
model: string;
responseModelName: string;
+ cost: number;
}
const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions";
@@ -43,12 +44,13 @@ const newChatStore = (
postBeginIndex: 0,
tokenMargin: 1024,
totalTokens: 0,
- maxTokens: options[getDefaultParams("model", model)],
+ maxTokens: models[getDefaultParams("model", model)].maxToken,
apiKey: getDefaultParams("key", apiKey),
apiEndpoint: getDefaultParams("api", apiEndpoint),
streamMode: getDefaultParams("mode", streamMode),
model: getDefaultParams("model", model),
responseModelName: "",
+ cost: 0,
};
};
@@ -99,6 +101,7 @@ export function App() {
if (message.token === undefined)
message.token = calculate_token_length(message.content);
}
+ if (ret.cost === undefined) ret.cost = 0;
return ret;
};
diff --git a/src/chatbox.tsx b/src/chatbox.tsx
index fa52423..585b9a0 100644
--- a/src/chatbox.tsx
+++ b/src/chatbox.tsx
@@ -7,6 +7,7 @@ import ChatGPT, {
FetchResponse,
} from "./chatgpt";
import Message from "./message";
+import models from "./models";
import Settings from "./settings";
export default function ChatBOX(props: {
@@ -102,6 +103,13 @@ export default function ChatBOX(props: {
chatStore.streamMode = false;
const data = (await response.json()) as FetchResponse;
chatStore.responseModelName = data.model ?? "";
+ if (data.model) {
+ chatStore.cost +=
+ (data.usage.prompt_tokens ?? 0) * models[data.model].price.prompt;
+ chatStore.cost +=
+ (data.usage.completion_tokens ?? 0) *
+ models[data.model].price.completion;
+ }
const content = client.processFetchResponse(data);
chatStore.history.push({
role: "assistant",
@@ -239,9 +247,10 @@ export default function ChatBOX(props: {
{" "}
{chatStore.model}{" "}
- Messages: {chatStore.history.filter(({ hide }) => !hide).length}
+ Msg: {chatStore.history.filter(({ hide }) => !hide).length}
{" "}
- Cut: {chatStore.postBeginIndex}
+ Cut: {chatStore.postBeginIndex}{" "}
+ ${chatStore.cost.toFixed(4)}
diff --git a/src/models.ts b/src/models.ts
new file mode 100644
index 0000000..ada18ff
--- /dev/null
+++ b/src/models.ts
@@ -0,0 +1,36 @@
+interface Model {
+ maxToken: number;
+ price: {
+ prompt: number;
+ completion: number;
+ };
+}
+
+const models: Record
= {
+ "gpt-3.5-turbo": {
+ maxToken: 4096,
+ price: { prompt: 0.002 / 1000, completion: 0.002 / 1000 },
+ },
+ "gpt-3.5-turbo-0301": {
+ maxToken: 4096,
+ price: { prompt: 0.002 / 1000, completion: 0.002 / 1000 },
+ },
+ "gpt-4": {
+ maxToken: 8192,
+ price: { prompt: 0.03 / 1000, completion: 0.06 / 1000 },
+ },
+ "gpt-4-0314": {
+ maxToken: 8192,
+ price: { prompt: 0.03 / 1000, completion: 0.06 / 1000 },
+ },
+ "gpt-4-32k": {
+ maxToken: 8192,
+ price: { prompt: 0.03 / 1000, completion: 0.06 / 1000 },
+ },
+ "gpt-4-32k-0314": {
+ maxToken: 8192,
+ price: { prompt: 0.06 / 1000, completion: 0.12 / 1000 },
+ },
+};
+
+export default models;
diff --git a/src/settings.tsx b/src/settings.tsx
index 4ef9b80..ed23bc9 100644
--- a/src/settings.tsx
+++ b/src/settings.tsx
@@ -1,6 +1,7 @@
import { createRef } from "preact";
import { StateUpdater } from "preact/hooks";
import { ChatStore } from "./app";
+import models from "./models";
const Help = (props: { children: any; help: string }) => {
return (
@@ -18,16 +19,6 @@ const Help = (props: { children: any; help: string }) => {
);
};
-// model and their max token
-export const options: Record = {
- "gpt-3.5-turbo": 4096,
- "gpt-3.5-turbo-0301": 4096,
- "gpt-4": 8192,
- "gpt-4-0314": 8192,
- "gpt-4-32k": 32768,
- "gpt-4-32k-0314": 32768,
-};
-
const SelectModel = (props: {
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
@@ -42,11 +33,11 @@ const SelectModel = (props: {
onChange={(event: any) => {
const model = event.target.value as string;
props.chatStore.model = model;
- props.chatStore.maxTokens = options[model];
+ props.chatStore.maxTokens = models[model].maxToken;
props.setChatStore({ ...props.chatStore });
}}
>
- {Object.keys(options).map((opt) => (
+ {Object.keys(models).map((opt) => (
))}
@@ -149,6 +140,9 @@ export default (props: {
Settings
+
+ Total cost in this section ${props.chatStore.cost.toFixed(4)}
+