add param max_gen_tokens

This commit is contained in:
2023-11-08 16:16:15 +08:00
parent 9142665585
commit ed090136ac
4 changed files with 43 additions and 1 deletions

View File

@@ -29,6 +29,8 @@ export interface ChatStore {
tokenMargin: number;
totalTokens: number;
maxTokens: number;
maxGenTokens: number;
maxGenTokens_enabled: boolean;
apiKey: string;
apiEndpoint: string;
streamMode: boolean;
@@ -75,6 +77,8 @@ export const newChatStore = (
tokenMargin: 1024,
totalTokens: 0,
maxTokens: models[getDefaultParams("model", model)]?.maxToken ?? 4096,
maxGenTokens: 2048,
maxGenTokens_enabled: true,
apiKey: getDefaultParams("key", apiKey),
apiEndpoint: getDefaultParams("api", apiEndpoint),
streamMode: getDefaultParams("mode", streamMode),
@@ -165,6 +169,8 @@ export function App() {
const ret: ChatStore = await (await db).get(STORAGE_NAME, index);
if (ret === null || ret === undefined) return newChatStore();
// handle read from old version chatstore
if (ret.maxGenTokens === undefined) ret.maxGenTokens = 2048;
if (ret.maxGenTokens_enabled === undefined) ret.maxGenTokens_enabled = true;
if (ret.model === undefined) ret.model = "gpt-3.5-turbo";
if (ret.responseModelName === undefined) ret.responseModelName = "";
if (ret.chatgpt_api_web_version === undefined)

View File

@@ -184,6 +184,8 @@ export default function ChatBOX(props: {
});
client.model = chatStore.model;
client.max_tokens = chatStore.maxTokens;
client.max_gen_tokens = chatStore.maxGenTokens;
client.enable_max_gen_tokens = chatStore.maxGenTokens_enabled;
try {
setShowGenerating(true);

View File

@@ -63,6 +63,8 @@ class Chat {
sysMessageContent: string;
total_tokens: number;
max_tokens: number;
max_gen_tokens: number;
enable_max_gen_tokens: boolean;
tokens_margin: number;
apiEndpoint: string;
model: string;
@@ -78,6 +80,8 @@ class Chat {
{
systemMessage = "",
max_tokens = 4096,
max_gen_tokens = 2048,
enable_max_gen_tokens = true,
tokens_margin = 1024,
apiEndPoint = "https://api.openai.com/v1/chat/completions",
model = "gpt-3.5-turbo",
@@ -96,6 +100,8 @@ class Chat {
this.messages = [];
this.total_tokens = calculate_token_length(systemMessage);
this.max_tokens = max_tokens;
this.max_gen_tokens = max_gen_tokens;
this.enable_max_gen_tokens = enable_max_gen_tokens;
this.tokens_margin = tokens_margin;
this.sysMessageContent = systemMessage;
this.apiEndpoint = apiEndPoint;
@@ -151,6 +157,9 @@ class Chat {
if (this.enable_top_p) {
body["top_p"] = this.top_p;
}
if (this.enable_max_gen_tokens) {
body["max_tokens"] = this.max_gen_tokens;
}
return fetch(this.apiEndpoint, {
method: "POST",

View File

@@ -188,6 +188,7 @@ const Number = (props: {
field:
| "totalTokens"
| "maxTokens"
| "maxGenTokens"
| "tokenMargin"
| "postBeginIndex"
| "presence_penalty"
@@ -197,9 +198,27 @@ const Number = (props: {
}) => {
return (
<Help help={props.help}>
<span>
<label className="m-2 p-2">{props.field}</label>
{props.field === "maxGenTokens" && (
<input
type="checkbox"
checked={props.chatStore.maxGenTokens_enabled}
onChange={() => {
const newChatStore = { ...props.chatStore };
newChatStore.maxGenTokens_enabled =
!newChatStore.maxGenTokens_enabled;
props.setChatStore({ ...newChatStore });
}}
/>
)}
</span>
<input
readOnly={props.readOnly}
disabled={
props.field === "maxGenTokens" &&
!props.chatStore.maxGenTokens_enabled
}
type="number"
className="m-2 p-2 border rounded focus w-28"
value={props.chatStore[props.field]}
@@ -384,6 +403,12 @@ export default (props: {
readOnly={false}
{...props}
/>
<Number
field="maxGenTokens"
help="最大生成 Tokens 数量"
readOnly={false}
{...props}
/>
<Number
field="tokenMargin"
help="当 totalTokens > maxTokens - tokenMargin 时会触发历史消息裁切chatgpt会“忘记”一部分对话中的消息但所有历史消息仍然保存在本地"