diff --git a/src/app.tsx b/src/app.tsx index b726880..1b41cb8 100644 --- a/src/app.tsx +++ b/src/app.tsx @@ -15,6 +15,7 @@ export interface ChatStore { apiKey: string; apiEndpoint: string; streamMode: boolean; + model: string; } const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions"; @@ -22,7 +23,8 @@ const newChatStore = ( apiKey = "", systemMessageContent = "你是一个有用的人工智能助理,根据我的提问和要求回答我的问题", apiEndpoint = _defaultAPIEndpoint, - streamMode = true + streamMode = true, + model = "gpt-3.5-turbo" ): ChatStore => { return { systemMessageContent: getDefaultParams("sys", systemMessageContent), @@ -34,6 +36,7 @@ const newChatStore = ( apiKey: getDefaultParams("key", apiKey), apiEndpoint: getDefaultParams("api", apiEndpoint), streamMode: getDefaultParams("mode", streamMode), + model: getDefaultParams("model", model), }; }; @@ -71,7 +74,10 @@ export function App() { const key = `${STORAGE_NAME}-${index}`; const val = localStorage.getItem(key); if (val === null) return newChatStore(); - return JSON.parse(val) as ChatStore; + const ret = JSON.parse(val) as ChatStore; + // handle read from old version chatstore + if (ret.model === undefined) ret.model = "gpt-3.5-turbo"; + return ret; }; const [chatStore, _setChatStore] = useState( diff --git a/src/chatbox.tsx b/src/chatbox.tsx index 29a44b2..1f1183d 100644 --- a/src/chatbox.tsx +++ b/src/chatbox.tsx @@ -102,6 +102,7 @@ export default function ChatBOX(props: { client.apiEndpoint = chatStore.apiEndpoint; client.sysMessageContent = chatStore.systemMessageContent; client.messages = chatStore.history.slice(chatStore.postBeginIndex); + client.model = chatStore.model; // try forget message before sending request client.forgetSomeMessages(); try { @@ -198,6 +199,8 @@ export default function ChatBOX(props: {
暂无历史对话记录
+ ⚙Model: {chatStore.model}
+
⚙Key: {chatStore.apiKey}
⚙Endpoint: {chatStore.apiEndpoint}
diff --git a/src/chatgpt.ts b/src/chatgpt.ts
index 02c918e..73cc5e5 100644
--- a/src/chatgpt.ts
+++ b/src/chatgpt.ts
@@ -42,6 +42,7 @@ class Chat {
max_tokens: number;
tokens_margin: number;
apiEndpoint: string;
+ model: string;
constructor(
OPENAI_API_KEY: string | undefined,
@@ -50,6 +51,7 @@ class Chat {
max_tokens = 4096,
tokens_margin = 1024,
apiEndPoint = "https://api.openai.com/v1/chat/completions",
+ model = "gpt-3.5-turbo",
} = {}
) {
if (OPENAI_API_KEY === undefined) {
@@ -62,6 +64,7 @@ class Chat {
this.tokens_margin = tokens_margin;
this.sysMessageContent = systemMessage;
this.apiEndpoint = apiEndPoint;
+ this.model = model;
}
_fetch(stream = false) {
@@ -72,7 +75,7 @@ class Chat {
"Content-Type": "application/json",
},
body: JSON.stringify({
- model: "gpt-3.5-turbo",
+ model: this.model,
messages: [
{ role: "system", content: this.sysMessageContent },
...this.messages,
diff --git a/src/settings.tsx b/src/settings.tsx
index df516b0..f5c0263 100644
--- a/src/settings.tsx
+++ b/src/settings.tsx
@@ -17,6 +17,41 @@ const Help = (props: { children: any; help: string }) => {
);
};
+const SelectModel = (props: {
+ chatStore: ChatStore;
+ setChatStore: (cs: ChatStore) => void;
+ help: string;
+}) => {
+ // model and their max token
+ const options: Record