support gpt-4

This commit is contained in:
2023-03-29 12:45:59 +08:00
parent 1158fdca38
commit 2d7edeb5b0
4 changed files with 58 additions and 5 deletions

View File

@@ -15,6 +15,7 @@ export interface ChatStore {
apiKey: string;
apiEndpoint: string;
streamMode: boolean;
model: string;
}
const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions";
@@ -22,7 +23,8 @@ const newChatStore = (
apiKey = "",
systemMessageContent = "你是一个有用的人工智能助理,根据我的提问和要求回答我的问题",
apiEndpoint = _defaultAPIEndpoint,
streamMode = true
streamMode = true,
model = "gpt-3.5-turbo"
): ChatStore => {
return {
systemMessageContent: getDefaultParams("sys", systemMessageContent),
@@ -34,6 +36,7 @@ const newChatStore = (
apiKey: getDefaultParams("key", apiKey),
apiEndpoint: getDefaultParams("api", apiEndpoint),
streamMode: getDefaultParams("mode", streamMode),
model: getDefaultParams("model", model),
};
};
@@ -71,7 +74,10 @@ export function App() {
const key = `${STORAGE_NAME}-${index}`;
const val = localStorage.getItem(key);
if (val === null) return newChatStore();
return JSON.parse(val) as ChatStore;
const ret = JSON.parse(val) as ChatStore;
// handle read from old version chatstore
if (ret.model === undefined) ret.model = "gpt-3.5-turbo";
return ret;
};
const [chatStore, _setChatStore] = useState(

View File

@@ -102,6 +102,7 @@ export default function ChatBOX(props: {
client.apiEndpoint = chatStore.apiEndpoint;
client.sysMessageContent = chatStore.systemMessageContent;
client.messages = chatStore.history.slice(chatStore.postBeginIndex);
client.model = chatStore.model;
// try forget message before sending request
client.forgetSomeMessages();
try {
@@ -198,6 +199,8 @@ export default function ChatBOX(props: {
<p className="break-all opacity-60 p-6 rounded bg-white my-3 text-left dark:text-black">
<br />
Model: {chatStore.model}
<br />
Key: {chatStore.apiKey}
<br />
Endpoint: {chatStore.apiEndpoint}

View File

@@ -42,6 +42,7 @@ class Chat {
max_tokens: number;
tokens_margin: number;
apiEndpoint: string;
model: string;
constructor(
OPENAI_API_KEY: string | undefined,
@@ -50,6 +51,7 @@ class Chat {
max_tokens = 4096,
tokens_margin = 1024,
apiEndPoint = "https://api.openai.com/v1/chat/completions",
model = "gpt-3.5-turbo",
} = {}
) {
if (OPENAI_API_KEY === undefined) {
@@ -62,6 +64,7 @@ class Chat {
this.tokens_margin = tokens_margin;
this.sysMessageContent = systemMessage;
this.apiEndpoint = apiEndPoint;
this.model = model;
}
_fetch(stream = false) {
@@ -72,7 +75,7 @@ class Chat {
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "gpt-3.5-turbo",
model: this.model,
messages: [
{ role: "system", content: this.sysMessageContent },
...this.messages,

View File

@@ -17,6 +17,41 @@ const Help = (props: { children: any; help: string }) => {
);
};
const SelectModel = (props: {
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
help: string;
}) => {
// model and their max token
const options: Record<string, number> = {
"gpt-3.5-turbo": 4096,
"gpt-3.5-turbo-0301": 4096,
"gpt-4": 8192,
"gpt-4-0314": 8192,
"gpt-4-32k": 32768,
"gpt-4-32k-0314": 32768,
};
return (
<Help help={props.help}>
<label className="m-2 p-2">Model</label>
<select
className="m-2 p-2"
value={props.chatStore.model}
onChange={(event: any) => {
const model = event.target.value as string;
props.chatStore.model = model;
props.chatStore.maxTokens = options[model];
props.setChatStore({ ...props.chatStore });
}}
>
{Object.keys(options).map((opt) => (
<option value={opt}>{opt}</option>
))}
</select>
</Help>
);
};
const Input = (props: {
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
@@ -101,7 +136,9 @@ export default (props: {
props.chatStore.apiKey
)}&api=${encodeURIComponent(props.chatStore.apiEndpoint)}&mode=${
props.chatStore.streamMode ? "stream" : "fetch"
}&sys=${encodeURIComponent(props.chatStore.systemMessageContent)}`;
}&model=${props.chatStore.model}&sys=${encodeURIComponent(
props.chatStore.systemMessageContent
)}`;
return (
<div className="left-0 top-0 overflow-scroll flex justify-center absolute w-screen h-full bg-black bg-opacity-50 z-10">
<div className="m-2 p-2 bg-white rounded-lg h-fit">
@@ -128,9 +165,13 @@ export default (props: {
help="流模式,使用 stream mode 将可以动态看到生成内容,但无法准确计算 token 数量,在 token 数量过多时可能会裁切过多或过少历史消息"
{...props}
/>
<SelectModel
help="模型,默认 3.5。不同模型性能和定价也不同,请参考 API 文档。"
{...props}
/>
<Number
field="maxTokens"
help="最大 token 数量,这个详情参考 OPENAI API 文档"
help="最大 token 数量。如果使用非gpt-3.5模型请手动修改上限。gpt-4 & gpt-4-0314: 8192。gpt-4-32k & gpt-4-32k-0314: 32768"
readOnly={false}
{...props}
/>