Compare commits
1 Commits
7ded1c8522
...
markdown
| Author | SHA1 | Date | |
|---|---|---|---|
|
e1ef16015d
|
21
README.md
21
README.md
@@ -1,11 +1,9 @@
|
|||||||
> 前排提示:滥用 API 或在不支持的地区调用 API 有被封号的风险 <https://github.com/zhayujie/chatgpt-on-wechat/issues/423>
|
> 前排提示:滥用 API 或在不支持的地区调用 API 有被封号的风险 <https://github.com/zhayujie/chatgpt-on-wechat/issues/423>
|
||||||
>
|
|
||||||
> 建议自行搭建代理中转 API 请求,然后更改对话设置中的 API Endpoint 参数使用中转
|
|
||||||
>
|
|
||||||
> 具体反向代理搭建教程请参阅此 [>>Wiki页面<<](https://github.com/heimoshuiyu/chatgpt-api-web/wiki)
|
|
||||||
|
|
||||||
# ChatGPT API WEB
|
# ChatGPT API WEB
|
||||||
|
|
||||||
|
> 灵车东西,做着玩儿的
|
||||||
|
|
||||||
一个简单的网页,调用 OPENAI ChatGPT 进行对话。
|
一个简单的网页,调用 OPENAI ChatGPT 进行对话。
|
||||||
|
|
||||||

|

|
||||||
@@ -15,14 +13,7 @@
|
|||||||
- API 调用速度更快更稳定
|
- API 调用速度更快更稳定
|
||||||
- 对话记录、API 密钥等使用浏览器的 localStorage 保存在本地
|
- 对话记录、API 密钥等使用浏览器的 localStorage 保存在本地
|
||||||
- 可删除对话消息
|
- 可删除对话消息
|
||||||
- 可以导入/导出整个历史对话记录
|
- 可以设置 system message (如:"你是一个猫娘" 或 "你是一个有用的助理" 或 "将我的话翻译成英语",参见官方 [API 文档](https://platform.openai.com/docs/guides/chat))
|
||||||
- 可以设置 system message (参见官方 [API 文档](https://platform.openai.com/docs/guides/chat)) 例如:
|
|
||||||
- > 你是一个有用的有用的人工智能助理
|
|
||||||
- > You are a helpful assistant
|
|
||||||
- > 你是一个专业英语翻译,把我说的话翻译成英语,为了保持通顺连贯可以适当修改内容。
|
|
||||||
- > 根据我的描述给出适用于 Stable Diffusion 的 prompt 和 negative prompt,用英文回答,要求尽量长一些。
|
|
||||||
- > 根据我的要求撰写并修改商业文案
|
|
||||||
- > ~~你是一个猫娘,你要用猫娘的语气说话~~
|
|
||||||
- 可以为不同对话设置不同 APIKEY
|
- 可以为不同对话设置不同 APIKEY
|
||||||
- 小(整个网页 30k 左右)
|
- 小(整个网页 30k 左右)
|
||||||
- 可以设置不同的 API Endpoint(方便墙内人士使用反向代理转发 API 请求)
|
- 可以设置不同的 API Endpoint(方便墙内人士使用反向代理转发 API 请求)
|
||||||
@@ -39,12 +30,6 @@
|
|||||||
- 从 [release](https://github.com/heimoshuiyu/chatgpt-api-web/releases) 下载网页文件,或在 [github pages](https://heimoshuiyu.github.io/chatgpt-api-web/) 按 `ctrl+s` 保存网页,然后双击打开
|
- 从 [release](https://github.com/heimoshuiyu/chatgpt-api-web/releases) 下载网页文件,或在 [github pages](https://heimoshuiyu.github.io/chatgpt-api-web/) 按 `ctrl+s` 保存网页,然后双击打开
|
||||||
- 自行编译构建网页
|
- 自行编译构建网页
|
||||||
|
|
||||||
### 默认参数继承
|
|
||||||
|
|
||||||
新建会话将会使用 URL 中设置的默认参数。
|
|
||||||
|
|
||||||
如果 URL 没有设置该参数,则使用 **目前选中的会话** 的参数
|
|
||||||
|
|
||||||
### 更改默认参数
|
### 更改默认参数
|
||||||
|
|
||||||
- `key`: OPENAI API KEY 默认为空
|
- `key`: OPENAI API KEY 默认为空
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
"autoprefixer": "^10.4.14",
|
"autoprefixer": "^10.4.14",
|
||||||
"postcss": "^8.4.21",
|
"postcss": "^8.4.21",
|
||||||
"preact": "^10.11.3",
|
"preact": "^10.11.3",
|
||||||
|
"preact-markdown": "^2.1.0",
|
||||||
"sakura.css": "^1.4.1",
|
"sakura.css": "^1.4.1",
|
||||||
"tailwindcss": "^3.2.7"
|
"tailwindcss": "^3.2.7"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
const CHATGPT_API_WEB_VERSION = "v1.3.0";
|
|
||||||
|
|
||||||
export default CHATGPT_API_WEB_VERSION;
|
|
||||||
120
src/app.tsx
120
src/app.tsx
@@ -1,22 +1,13 @@
|
|||||||
import { useEffect, useState } from "preact/hooks";
|
import { useEffect, useState } from "preact/hooks";
|
||||||
import "./global.css";
|
import "./global.css";
|
||||||
|
|
||||||
import { calculate_token_length, Message } from "./chatgpt";
|
import { Message } from "./chatgpt";
|
||||||
import getDefaultParams from "./getDefaultParam";
|
import getDefaultParams from "./getDefaultParam";
|
||||||
import ChatBOX from "./chatbox";
|
import ChatBOX from "./chatbox";
|
||||||
import models from "./models";
|
|
||||||
|
|
||||||
import CHATGPT_API_WEB_VERSION from "./CHATGPT_API_WEB_VERSION";
|
|
||||||
|
|
||||||
export interface ChatStoreMessage extends Message {
|
|
||||||
hide: boolean;
|
|
||||||
token: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ChatStore {
|
export interface ChatStore {
|
||||||
chatgpt_api_web_version: string;
|
|
||||||
systemMessageContent: string;
|
systemMessageContent: string;
|
||||||
history: ChatStoreMessage[];
|
history: Message[];
|
||||||
postBeginIndex: number;
|
postBeginIndex: number;
|
||||||
tokenMargin: number;
|
tokenMargin: number;
|
||||||
totalTokens: number;
|
totalTokens: number;
|
||||||
@@ -24,33 +15,25 @@ export interface ChatStore {
|
|||||||
apiKey: string;
|
apiKey: string;
|
||||||
apiEndpoint: string;
|
apiEndpoint: string;
|
||||||
streamMode: boolean;
|
streamMode: boolean;
|
||||||
model: string;
|
|
||||||
responseModelName: string;
|
|
||||||
cost: number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions";
|
const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions";
|
||||||
const newChatStore = (
|
const newChatStore = (
|
||||||
apiKey = "",
|
apiKey = "",
|
||||||
systemMessageContent = "Follow my instructions carefully",
|
systemMessageContent = "你是一个有用的人工智能助理",
|
||||||
apiEndpoint = _defaultAPIEndpoint,
|
apiEndpoint = _defaultAPIEndpoint,
|
||||||
streamMode = true,
|
streamMode = true
|
||||||
model = "gpt-3.5-turbo-0613"
|
|
||||||
): ChatStore => {
|
): ChatStore => {
|
||||||
return {
|
return {
|
||||||
chatgpt_api_web_version: CHATGPT_API_WEB_VERSION,
|
|
||||||
systemMessageContent: getDefaultParams("sys", systemMessageContent),
|
systemMessageContent: getDefaultParams("sys", systemMessageContent),
|
||||||
history: [],
|
history: [],
|
||||||
postBeginIndex: 0,
|
postBeginIndex: 0,
|
||||||
tokenMargin: 1024,
|
tokenMargin: 1024,
|
||||||
totalTokens: 0,
|
totalTokens: 0,
|
||||||
maxTokens: models[getDefaultParams("model", model)]?.maxToken ?? 4096,
|
maxTokens: 4096,
|
||||||
apiKey: getDefaultParams("key", apiKey),
|
apiKey: getDefaultParams("key", apiKey),
|
||||||
apiEndpoint: getDefaultParams("api", apiEndpoint),
|
apiEndpoint: getDefaultParams("api", apiEndpoint),
|
||||||
streamMode: getDefaultParams("mode", streamMode),
|
streamMode: getDefaultParams("mode", streamMode),
|
||||||
model: getDefaultParams("model", model),
|
|
||||||
responseModelName: "",
|
|
||||||
cost: 0,
|
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -88,57 +71,31 @@ export function App() {
|
|||||||
const key = `${STORAGE_NAME}-${index}`;
|
const key = `${STORAGE_NAME}-${index}`;
|
||||||
const val = localStorage.getItem(key);
|
const val = localStorage.getItem(key);
|
||||||
if (val === null) return newChatStore();
|
if (val === null) return newChatStore();
|
||||||
const ret = JSON.parse(val) as ChatStore;
|
return JSON.parse(val) as ChatStore;
|
||||||
// handle read from old version chatstore
|
|
||||||
if (ret.model === undefined) ret.model = "gpt-3.5-turbo";
|
|
||||||
if (ret.responseModelName === undefined) ret.responseModelName = "";
|
|
||||||
if (ret.chatgpt_api_web_version === undefined)
|
|
||||||
// this is from old version becasue it is undefined,
|
|
||||||
// so no higher than v1.3.0
|
|
||||||
ret.chatgpt_api_web_version = "v1.2.2";
|
|
||||||
for (const message of ret.history) {
|
|
||||||
if (message.hide === undefined) message.hide = false;
|
|
||||||
if (message.token === undefined)
|
|
||||||
message.token = calculate_token_length(message.content);
|
|
||||||
}
|
|
||||||
if (ret.cost === undefined) ret.cost = 0;
|
|
||||||
return ret;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const [chatStore, _setChatStore] = useState(
|
const [chatStore, _setChatStore] = useState(
|
||||||
getChatStoreByIndex(selectedChatIndex)
|
getChatStoreByIndex(selectedChatIndex)
|
||||||
);
|
);
|
||||||
const setChatStore = (chatStore: ChatStore) => {
|
const setChatStore = (cs: ChatStore) => {
|
||||||
console.log("saved chat", selectedChatIndex, chatStore);
|
console.log("saved chat", selectedChatIndex, chatStore);
|
||||||
localStorage.setItem(
|
localStorage.setItem(
|
||||||
`${STORAGE_NAME}-${selectedChatIndex}`,
|
`${STORAGE_NAME}-${selectedChatIndex}`,
|
||||||
JSON.stringify(chatStore)
|
JSON.stringify(cs)
|
||||||
);
|
);
|
||||||
|
_setChatStore(cs);
|
||||||
console.log("recalculate postBeginIndex");
|
|
||||||
const max = chatStore.maxTokens - chatStore.tokenMargin;
|
|
||||||
let sum = 0;
|
|
||||||
chatStore.postBeginIndex = chatStore.history.filter(
|
|
||||||
({ hide }) => !hide
|
|
||||||
).length;
|
|
||||||
for (const msg of chatStore.history
|
|
||||||
.filter(({ hide }) => !hide)
|
|
||||||
.slice()
|
|
||||||
.reverse()) {
|
|
||||||
if (sum + msg.token > max) break;
|
|
||||||
sum += msg.token;
|
|
||||||
chatStore.postBeginIndex -= 1;
|
|
||||||
}
|
|
||||||
chatStore.postBeginIndex =
|
|
||||||
chatStore.postBeginIndex < 0 ? 0 : chatStore.postBeginIndex;
|
|
||||||
|
|
||||||
_setChatStore(chatStore);
|
|
||||||
};
|
};
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
_setChatStore(getChatStoreByIndex(selectedChatIndex));
|
_setChatStore(getChatStoreByIndex(selectedChatIndex));
|
||||||
}, [selectedChatIndex]);
|
}, [selectedChatIndex]);
|
||||||
|
|
||||||
const handleNewChatStore = () => {
|
return (
|
||||||
|
<div className="flex text-sm h-screen bg-slate-200 dark:bg-slate-800 dark:text-white">
|
||||||
|
<div className="flex flex-col h-full p-2 border-r-indigo-500 border-2 dark:border-slate-800 dark:border-r-indigo-500 dark:text-black">
|
||||||
|
<div className="grow overflow-scroll">
|
||||||
|
<button
|
||||||
|
className="bg-violet-300 p-1 rounded hover:bg-violet-400"
|
||||||
|
onClick={() => {
|
||||||
const max = Math.max(...allChatStoreIndexes);
|
const max = Math.max(...allChatStoreIndexes);
|
||||||
const next = max + 1;
|
const next = max + 1;
|
||||||
console.log("save next chat", next);
|
console.log("save next chat", next);
|
||||||
@@ -149,43 +106,14 @@ export function App() {
|
|||||||
chatStore.apiKey,
|
chatStore.apiKey,
|
||||||
chatStore.systemMessageContent,
|
chatStore.systemMessageContent,
|
||||||
chatStore.apiEndpoint,
|
chatStore.apiEndpoint,
|
||||||
chatStore.streamMode,
|
chatStore.streamMode
|
||||||
chatStore.model
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
allChatStoreIndexes.push(next);
|
allChatStoreIndexes.push(next);
|
||||||
setAllChatStoreIndexes([...allChatStoreIndexes]);
|
setAllChatStoreIndexes([...allChatStoreIndexes]);
|
||||||
setSelectedChatIndex(next);
|
setSelectedChatIndex(next);
|
||||||
};
|
}}
|
||||||
|
|
||||||
// if there are any params in URL, create a new chatStore
|
|
||||||
useEffect(() => {
|
|
||||||
const api = getDefaultParams("api", "");
|
|
||||||
const key = getDefaultParams("key", "");
|
|
||||||
const sys = getDefaultParams("sys", "");
|
|
||||||
const mode = getDefaultParams("mode", "");
|
|
||||||
const model = getDefaultParams("model", "");
|
|
||||||
// only create new chatStore if the params in URL are NOT
|
|
||||||
// equal to the current selected chatStore
|
|
||||||
if (
|
|
||||||
(api && api !== chatStore.apiEndpoint) ||
|
|
||||||
(key && key !== chatStore.apiKey) ||
|
|
||||||
(sys && sys !== chatStore.systemMessageContent) ||
|
|
||||||
(mode && mode !== (chatStore.streamMode ? "stream" : "fetch")) ||
|
|
||||||
(model && model !== chatStore.model)
|
|
||||||
) {
|
|
||||||
handleNewChatStore();
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex text-sm h-full bg-slate-200 dark:bg-slate-800 dark:text-white">
|
|
||||||
<div className="flex flex-col h-full p-2 border-r-indigo-500 border-2 dark:border-slate-800 dark:border-r-indigo-500 dark:text-black">
|
|
||||||
<div className="grow overflow-scroll">
|
|
||||||
<button
|
|
||||||
className="bg-violet-300 p-1 rounded hover:bg-violet-400"
|
|
||||||
onClick={handleNewChatStore}
|
|
||||||
>
|
>
|
||||||
NEW
|
NEW
|
||||||
</button>
|
</button>
|
||||||
@@ -198,7 +126,7 @@ export function App() {
|
|||||||
return (
|
return (
|
||||||
<li>
|
<li>
|
||||||
<button
|
<button
|
||||||
className={`w-full my-1 p-1 rounded hover:bg-blue-500 ${
|
className={`w-full my-1 p-1 rounded hover:bg-blue-300 ${
|
||||||
i === selectedChatIndex ? "bg-blue-500" : "bg-blue-200"
|
i === selectedChatIndex ? "bg-blue-500" : "bg-blue-200"
|
||||||
}`}
|
}`}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
@@ -230,8 +158,7 @@ export function App() {
|
|||||||
chatStore.apiKey,
|
chatStore.apiKey,
|
||||||
chatStore.systemMessageContent,
|
chatStore.systemMessageContent,
|
||||||
chatStore.apiEndpoint,
|
chatStore.apiEndpoint,
|
||||||
chatStore.streamMode,
|
chatStore.streamMode
|
||||||
chatStore.model
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -248,12 +175,7 @@ export function App() {
|
|||||||
DEL
|
DEL
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<ChatBOX
|
<ChatBOX chatStore={chatStore} setChatStore={setChatStore} />
|
||||||
chatStore={chatStore}
|
|
||||||
setChatStore={setChatStore}
|
|
||||||
selectedChatIndex={selectedChatIndex}
|
|
||||||
setSelectedChatIndex={setSelectedChatIndex}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
197
src/chatbox.tsx
197
src/chatbox.tsx
@@ -1,20 +1,13 @@
|
|||||||
import { createRef } from "preact";
|
import { createRef } from "preact";
|
||||||
import { StateUpdater, useEffect, useState } from "preact/hooks";
|
import { useEffect, useState } from "preact/hooks";
|
||||||
import type { ChatStore } from "./app";
|
import type { ChatStore } from "./app";
|
||||||
import ChatGPT, {
|
import ChatGPT, { ChunkMessage, FetchResponse } from "./chatgpt";
|
||||||
calculate_token_length,
|
|
||||||
ChunkMessage,
|
|
||||||
FetchResponse,
|
|
||||||
} from "./chatgpt";
|
|
||||||
import Message from "./message";
|
import Message from "./message";
|
||||||
import models from "./models";
|
|
||||||
import Settings from "./settings";
|
import Settings from "./settings";
|
||||||
|
|
||||||
export default function ChatBOX(props: {
|
export default function ChatBOX(props: {
|
||||||
chatStore: ChatStore;
|
chatStore: ChatStore;
|
||||||
setChatStore: (cs: ChatStore) => void;
|
setChatStore: (cs: ChatStore) => void;
|
||||||
selectedChatIndex: number;
|
|
||||||
setSelectedChatIndex: StateUpdater<number>;
|
|
||||||
}) {
|
}) {
|
||||||
const { chatStore, setChatStore } = props;
|
const { chatStore, setChatStore } = props;
|
||||||
// prevent error
|
// prevent error
|
||||||
@@ -33,7 +26,6 @@ export default function ChatBOX(props: {
|
|||||||
const client = new ChatGPT(chatStore.apiKey);
|
const client = new ChatGPT(chatStore.apiKey);
|
||||||
|
|
||||||
const _completeWithStreamMode = async (response: Response) => {
|
const _completeWithStreamMode = async (response: Response) => {
|
||||||
chatStore.streamMode = true;
|
|
||||||
// call api, return reponse text
|
// call api, return reponse text
|
||||||
console.log("response", response);
|
console.log("response", response);
|
||||||
const reader = response.body?.getReader();
|
const reader = response.body?.getReader();
|
||||||
@@ -66,10 +58,7 @@ export default function ChatBOX(props: {
|
|||||||
return JSON.parse(line.trim().slice("data: ".length));
|
return JSON.parse(line.trim().slice("data: ".length));
|
||||||
})
|
})
|
||||||
.filter((i) => i);
|
.filter((i) => i);
|
||||||
console.log("jsons", jsons);
|
// console.log("jsons", jsons);
|
||||||
for (const { model } of jsons) {
|
|
||||||
if (model) chatStore.responseModelName = model;
|
|
||||||
}
|
|
||||||
const chunkText = jsons
|
const chunkText = jsons
|
||||||
.map((j) => j.choices[0].delta.content ?? "")
|
.map((j) => j.choices[0].delta.content ?? "")
|
||||||
.join("");
|
.join("");
|
||||||
@@ -82,39 +71,17 @@ export default function ChatBOX(props: {
|
|||||||
setShowGenerating(false);
|
setShowGenerating(false);
|
||||||
|
|
||||||
// console.log("push to history", allChunkMessage);
|
// console.log("push to history", allChunkMessage);
|
||||||
const content = allChunkMessage.join("");
|
|
||||||
const token = calculate_token_length(content);
|
|
||||||
// estimate cost
|
|
||||||
if (chatStore.responseModelName) {
|
|
||||||
chatStore.cost +=
|
|
||||||
token *
|
|
||||||
(models[chatStore.responseModelName]?.price?.completion ?? 0);
|
|
||||||
let sum = 0;
|
|
||||||
for (const msg of chatStore.history
|
|
||||||
.filter(({ hide }) => !hide)
|
|
||||||
.slice(chatStore.postBeginIndex)) {
|
|
||||||
sum += msg.token;
|
|
||||||
}
|
|
||||||
chatStore.cost +=
|
|
||||||
sum * (models[chatStore.responseModelName]?.price?.prompt ?? 0);
|
|
||||||
}
|
|
||||||
chatStore.history.push({
|
chatStore.history.push({
|
||||||
role: "assistant",
|
role: "assistant",
|
||||||
content,
|
content: allChunkMessage.join(""),
|
||||||
hide: false,
|
|
||||||
token,
|
|
||||||
});
|
});
|
||||||
// manually copy status from client to chatStore
|
// manually copy status from client to chatStore
|
||||||
chatStore.maxTokens = client.max_tokens;
|
chatStore.maxTokens = client.max_tokens;
|
||||||
chatStore.tokenMargin = client.tokens_margin;
|
chatStore.tokenMargin = client.tokens_margin;
|
||||||
// manually estimate token
|
chatStore.totalTokens =
|
||||||
client.total_tokens = 0;
|
client.total_tokens +
|
||||||
for (const msg of chatStore.history
|
39 +
|
||||||
.filter(({ hide }) => !hide)
|
client.calculate_token_length(allChunkMessage.join(""));
|
||||||
.slice(chatStore.postBeginIndex)) {
|
|
||||||
client.total_tokens += msg.token;
|
|
||||||
}
|
|
||||||
chatStore.totalTokens = client.total_tokens;
|
|
||||||
setChatStore({ ...chatStore });
|
setChatStore({ ...chatStore });
|
||||||
setGeneratingMessage("");
|
setGeneratingMessage("");
|
||||||
setShowGenerating(false);
|
setShowGenerating(false);
|
||||||
@@ -123,38 +90,9 @@ export default function ChatBOX(props: {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const _completeWithFetchMode = async (response: Response) => {
|
const _completeWithFetchMode = async (response: Response) => {
|
||||||
chatStore.streamMode = false;
|
|
||||||
const data = (await response.json()) as FetchResponse;
|
const data = (await response.json()) as FetchResponse;
|
||||||
chatStore.responseModelName = data.model ?? "";
|
|
||||||
if (data.model) {
|
|
||||||
chatStore.cost +=
|
|
||||||
(data.usage.prompt_tokens ?? 0) *
|
|
||||||
(models[data.model]?.price?.prompt ?? 0);
|
|
||||||
chatStore.cost +=
|
|
||||||
(data.usage.completion_tokens ?? 0) *
|
|
||||||
(models[data.model]?.price?.completion ?? 0);
|
|
||||||
}
|
|
||||||
const content = client.processFetchResponse(data);
|
const content = client.processFetchResponse(data);
|
||||||
|
chatStore.history.push({ role: "assistant", content });
|
||||||
// estimate user's input message token
|
|
||||||
let aboveToken = 0;
|
|
||||||
for (const msg of chatStore.history
|
|
||||||
.filter(({ hide }) => !hide)
|
|
||||||
.slice(chatStore.postBeginIndex, -1)) {
|
|
||||||
aboveToken += msg.token;
|
|
||||||
}
|
|
||||||
if (data.usage.prompt_tokens) {
|
|
||||||
const userMessageToken = data.usage.prompt_tokens - aboveToken;
|
|
||||||
console.log("set user message token");
|
|
||||||
chatStore.history.slice(-1)[0].token = userMessageToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
chatStore.history.push({
|
|
||||||
role: "assistant",
|
|
||||||
content,
|
|
||||||
hide: false,
|
|
||||||
token: data.usage.completion_tokens ?? calculate_token_length(content),
|
|
||||||
});
|
|
||||||
setShowGenerating(false);
|
setShowGenerating(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -163,26 +101,12 @@ export default function ChatBOX(props: {
|
|||||||
// manually copy status from chatStore to client
|
// manually copy status from chatStore to client
|
||||||
client.apiEndpoint = chatStore.apiEndpoint;
|
client.apiEndpoint = chatStore.apiEndpoint;
|
||||||
client.sysMessageContent = chatStore.systemMessageContent;
|
client.sysMessageContent = chatStore.systemMessageContent;
|
||||||
client.tokens_margin = chatStore.tokenMargin;
|
client.messages = chatStore.history.slice(chatStore.postBeginIndex);
|
||||||
client.messages = chatStore.history
|
|
||||||
// only copy non hidden message
|
|
||||||
.filter(({ hide }) => !hide)
|
|
||||||
.slice(chatStore.postBeginIndex)
|
|
||||||
// only copy content and role attribute to client for posting
|
|
||||||
.map(({ content, role }) => {
|
|
||||||
return {
|
|
||||||
content,
|
|
||||||
role,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
client.model = chatStore.model;
|
|
||||||
client.max_tokens = chatStore.maxTokens;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
setShowGenerating(true);
|
setShowGenerating(true);
|
||||||
const response = await client._fetch(chatStore.streamMode);
|
const response = await client._fetch(chatStore.streamMode);
|
||||||
const contentType = response.headers.get("content-type");
|
const contentType = response.headers.get("content-type");
|
||||||
if (contentType?.startsWith("text/event-stream")) {
|
if (contentType === "text/event-stream") {
|
||||||
await _completeWithStreamMode(response);
|
await _completeWithStreamMode(response);
|
||||||
} else if (contentType === "application/json") {
|
} else if (contentType === "application/json") {
|
||||||
await _completeWithFetchMode(response);
|
await _completeWithFetchMode(response);
|
||||||
@@ -193,7 +117,11 @@ export default function ChatBOX(props: {
|
|||||||
chatStore.maxTokens = client.max_tokens;
|
chatStore.maxTokens = client.max_tokens;
|
||||||
chatStore.tokenMargin = client.tokens_margin;
|
chatStore.tokenMargin = client.tokens_margin;
|
||||||
chatStore.totalTokens = client.total_tokens;
|
chatStore.totalTokens = client.total_tokens;
|
||||||
|
// when total token > max token - margin token:
|
||||||
|
// ChatGPT will "forgot" some historical message
|
||||||
|
// so client.message.length will be less than chatStore.history.length
|
||||||
|
chatStore.postBeginIndex =
|
||||||
|
chatStore.history.length - client.messages.length;
|
||||||
console.log("postBeginIndex", chatStore.postBeginIndex);
|
console.log("postBeginIndex", chatStore.postBeginIndex);
|
||||||
setChatStore({ ...chatStore });
|
setChatStore({ ...chatStore });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -201,7 +129,6 @@ export default function ChatBOX(props: {
|
|||||||
alert(error);
|
alert(error);
|
||||||
} finally {
|
} finally {
|
||||||
setShowGenerating(false);
|
setShowGenerating(false);
|
||||||
props.setSelectedChatIndex(props.selectedChatIndex);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -212,16 +139,7 @@ export default function ChatBOX(props: {
|
|||||||
console.log("empty message");
|
console.log("empty message");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
chatStore.responseModelName = "";
|
chatStore.history.push({ role: "user", content: inputMsg.trim() });
|
||||||
chatStore.history.push({
|
|
||||||
role: "user",
|
|
||||||
content: inputMsg.trim(),
|
|
||||||
hide: false,
|
|
||||||
token: calculate_token_length(inputMsg.trim()),
|
|
||||||
});
|
|
||||||
// manually calculate token length
|
|
||||||
chatStore.totalTokens += client.calculate_token_length(inputMsg.trim());
|
|
||||||
client.total_tokens += client.calculate_token_length(inputMsg.trim());
|
|
||||||
setChatStore({ ...chatStore });
|
setChatStore({ ...chatStore });
|
||||||
setInputMsg("");
|
setInputMsg("");
|
||||||
await complete();
|
await complete();
|
||||||
@@ -235,10 +153,9 @@ export default function ChatBOX(props: {
|
|||||||
setChatStore={setChatStore}
|
setChatStore={setChatStore}
|
||||||
show={showSettings}
|
show={showSettings}
|
||||||
setShow={setShowSettings}
|
setShow={setShowSettings}
|
||||||
selectedChatStoreIndex={props.selectedChatIndex}
|
|
||||||
/>
|
/>
|
||||||
<p
|
<p
|
||||||
className="cursor-pointer rounded bg-cyan-300 dark:text-white p-1 dark:bg-cyan-800"
|
className="cursor-pointer dark:text-white"
|
||||||
onClick={() => setShowSettings(true)}
|
onClick={() => setShowSettings(true)}
|
||||||
>
|
>
|
||||||
<div>
|
<div>
|
||||||
@@ -252,24 +169,11 @@ export default function ChatBOX(props: {
|
|||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
<div className="text-xs">
|
<div className="text-xs">
|
||||||
<span className="underline">{chatStore.model}</span>{" "}
|
<span>Total: {chatStore.totalTokens}</span>{" "}
|
||||||
<span>
|
<span>Max: {chatStore.maxTokens}</span>{" "}
|
||||||
Tokens:{" "}
|
<span>Margin: {chatStore.tokenMargin}</span>{" "}
|
||||||
<span className="underline">
|
<span>Message: {chatStore.history.length}</span>{" "}
|
||||||
{chatStore.totalTokens}/{chatStore.maxTokens}
|
<span>Cut: {chatStore.postBeginIndex}</span>
|
||||||
</span>
|
|
||||||
</span>{" "}
|
|
||||||
<span>
|
|
||||||
Cut:{" "}
|
|
||||||
<span className="underline">
|
|
||||||
{chatStore.postBeginIndex}/
|
|
||||||
{chatStore.history.filter(({ hide }) => !hide).length}
|
|
||||||
</span>{" "}
|
|
||||||
</span>{" "}
|
|
||||||
<span>
|
|
||||||
Cost:{" "}
|
|
||||||
<span className="underline">${chatStore.cost.toFixed(4)}</span>
|
|
||||||
</span>
|
|
||||||
</div>
|
</div>
|
||||||
</p>
|
</p>
|
||||||
<div className="grow overflow-scroll">
|
<div className="grow overflow-scroll">
|
||||||
@@ -284,33 +188,8 @@ export default function ChatBOX(props: {
|
|||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
{chatStore.history.length === 0 && (
|
{chatStore.history.length === 0 && (
|
||||||
<p className="break-all opacity-60 p-6 rounded bg-white my-3 text-left dark:text-black">
|
<p className="opacity-60 p-6 rounded bg-white my-3 text-left dark:text-black">
|
||||||
暂无历史对话记录
|
暂无历史对话记录
|
||||||
<br />
|
|
||||||
⚙Model: {chatStore.model}
|
|
||||||
<br />
|
|
||||||
⚙Key: {chatStore.apiKey}
|
|
||||||
<br />
|
|
||||||
⚙Endpoint: {chatStore.apiEndpoint}
|
|
||||||
<br />
|
|
||||||
⬆点击上方更改此对话的参数(请勿泄漏)
|
|
||||||
<br />
|
|
||||||
↖点击左上角 NEW 新建对话
|
|
||||||
<br />
|
|
||||||
请注意,使用 ChatGPT API
|
|
||||||
的生成文本质量和速度会受到会话上下文的影响,同时历史上下文过长会被裁切。API
|
|
||||||
会根据发送的上下文总量进行计费,因此建议您为不相关的问题或者不需要上文的问题创建新的对话,以避免不必要的计费。
|
|
||||||
<br />
|
|
||||||
⚠所有历史对话与参数储存在浏览器本地
|
|
||||||
<br />
|
|
||||||
⚠详细文档与源代码:{" "}
|
|
||||||
<a
|
|
||||||
className="underline"
|
|
||||||
href="https://github.com/heimoshuiyu/chatgpt-api-web"
|
|
||||||
target="_blank"
|
|
||||||
>
|
|
||||||
github.com/heimoshuiyu/chatgpt-api-web
|
|
||||||
</a>
|
|
||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
{chatStore.history.map((_, messageIndex) => (
|
{chatStore.history.map((_, messageIndex) => (
|
||||||
@@ -321,33 +200,13 @@ export default function ChatBOX(props: {
|
|||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
{showGenerating && (
|
{showGenerating && (
|
||||||
<p className="p-2 my-2 animate-pulse dark:text-white message-content">
|
<p className="p-2 my-2 animate-pulse dark:text-white">
|
||||||
{generatingMessage || "生成中,最长可能需要一分钟,请保持网络稳定"}
|
{generatingMessage
|
||||||
|
? generatingMessage.split("\n").map((line) => <p>{line}</p>)
|
||||||
|
: "生成中,请保持网络稳定"}
|
||||||
...
|
...
|
||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
<p className="p-2 my-2 text-center opacity-50 dark:text-white">
|
|
||||||
{chatStore.responseModelName && (
|
|
||||||
<>Generated by {chatStore.responseModelName}</>
|
|
||||||
)}
|
|
||||||
{chatStore.postBeginIndex !== 0 && (
|
|
||||||
<>
|
|
||||||
<br />
|
|
||||||
提示:会话过长,已裁切前 {chatStore.postBeginIndex} 条消息
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</p>
|
|
||||||
{chatStore.chatgpt_api_web_version < "v1.3.0" && (
|
|
||||||
<p className="p-2 my-2 text-center dark:text-white">
|
|
||||||
<br />
|
|
||||||
提示:当前会话版本 {chatStore.chatgpt_api_web_version}。
|
|
||||||
<br />
|
|
||||||
v1.3.0
|
|
||||||
引入与旧版不兼容的消息裁切算法。继续使用旧版可能会导致消息裁切过多或过少(表现为失去上下文或输出不完整)。
|
|
||||||
<br />
|
|
||||||
请在左上角创建新会话:)
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
{showRetry && (
|
{showRetry && (
|
||||||
<p className="text-right p-2 my-2 dark:text-white">
|
<p className="text-right p-2 my-2 dark:text-white">
|
||||||
<button
|
<button
|
||||||
|
|||||||
@@ -4,14 +4,12 @@ export interface Message {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface ChunkMessage {
|
export interface ChunkMessage {
|
||||||
model: string;
|
|
||||||
choices: {
|
choices: {
|
||||||
delta: { role: "assitant" | undefined; content: string | undefined };
|
delta: { role: "assitant" | undefined; content: string | undefined };
|
||||||
}[];
|
}[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface FetchResponse {
|
export interface FetchResponse {
|
||||||
error?: any;
|
|
||||||
id: string;
|
id: string;
|
||||||
object: string;
|
object: string;
|
||||||
created: number;
|
created: number;
|
||||||
@@ -27,14 +25,6 @@ export interface FetchResponse {
|
|||||||
index: number | undefined;
|
index: number | undefined;
|
||||||
}[];
|
}[];
|
||||||
}
|
}
|
||||||
// https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
|
|
||||||
export function calculate_token_length(content: string): number {
|
|
||||||
const totalCount = content.length;
|
|
||||||
const chineseCount = content.match(/[\u00ff-\uffff]|\S+/g)?.length ?? 0;
|
|
||||||
const englishCount = totalCount - chineseCount;
|
|
||||||
const tokenLength = englishCount / 4 + (chineseCount * 4) / 3;
|
|
||||||
return ~~tokenLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
class Chat {
|
class Chat {
|
||||||
OPENAI_API_KEY: string;
|
OPENAI_API_KEY: string;
|
||||||
@@ -44,7 +34,6 @@ class Chat {
|
|||||||
max_tokens: number;
|
max_tokens: number;
|
||||||
tokens_margin: number;
|
tokens_margin: number;
|
||||||
apiEndpoint: string;
|
apiEndpoint: string;
|
||||||
model: string;
|
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
OPENAI_API_KEY: string | undefined,
|
OPENAI_API_KEY: string | undefined,
|
||||||
@@ -53,7 +42,6 @@ class Chat {
|
|||||||
max_tokens = 4096,
|
max_tokens = 4096,
|
||||||
tokens_margin = 1024,
|
tokens_margin = 1024,
|
||||||
apiEndPoint = "https://api.openai.com/v1/chat/completions",
|
apiEndPoint = "https://api.openai.com/v1/chat/completions",
|
||||||
model = "gpt-3.5-turbo",
|
|
||||||
} = {}
|
} = {}
|
||||||
) {
|
) {
|
||||||
if (OPENAI_API_KEY === undefined) {
|
if (OPENAI_API_KEY === undefined) {
|
||||||
@@ -66,7 +54,6 @@ class Chat {
|
|||||||
this.tokens_margin = tokens_margin;
|
this.tokens_margin = tokens_margin;
|
||||||
this.sysMessageContent = systemMessage;
|
this.sysMessageContent = systemMessage;
|
||||||
this.apiEndpoint = apiEndPoint;
|
this.apiEndpoint = apiEndPoint;
|
||||||
this.model = model;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_fetch(stream = false) {
|
_fetch(stream = false) {
|
||||||
@@ -77,7 +64,7 @@ class Chat {
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: this.model,
|
model: "gpt-3.5-turbo",
|
||||||
messages: [
|
messages: [
|
||||||
{ role: "system", content: this.sysMessageContent },
|
{ role: "system", content: this.sysMessageContent },
|
||||||
...this.messages,
|
...this.messages,
|
||||||
@@ -89,11 +76,7 @@ class Chat {
|
|||||||
|
|
||||||
async fetch(): Promise<FetchResponse> {
|
async fetch(): Promise<FetchResponse> {
|
||||||
const resp = await this._fetch();
|
const resp = await this._fetch();
|
||||||
const j = await resp.json();
|
return await resp.json();
|
||||||
if (j.error !== undefined) {
|
|
||||||
throw JSON.stringify(j.error);
|
|
||||||
}
|
|
||||||
return j;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async say(content: string): Promise<string> {
|
async say(content: string): Promise<string> {
|
||||||
@@ -103,9 +86,6 @@ class Chat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
processFetchResponse(resp: FetchResponse): string {
|
processFetchResponse(resp: FetchResponse): string {
|
||||||
if (resp.error !== undefined) {
|
|
||||||
throw JSON.stringify(resp.error);
|
|
||||||
}
|
|
||||||
this.total_tokens = resp?.usage?.total_tokens ?? 0;
|
this.total_tokens = resp?.usage?.total_tokens ?? 0;
|
||||||
if (resp?.choices[0]?.message) {
|
if (resp?.choices[0]?.message) {
|
||||||
this.messages.push(resp?.choices[0]?.message);
|
this.messages.push(resp?.choices[0]?.message);
|
||||||
@@ -134,8 +114,13 @@ class Chat {
|
|||||||
return this._fetch(true);
|
return this._fetch(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
|
||||||
calculate_token_length(content: string): number {
|
calculate_token_length(content: string): number {
|
||||||
return calculate_token_length(content);
|
const totalCount = content.length;
|
||||||
|
const chineseCount = content.match(/[\u00ff-\uffff]|\S+/g)?.length ?? 0;
|
||||||
|
const englishCount = totalCount - chineseCount;
|
||||||
|
const tokenLength = englishCount / 4 + (chineseCount * 4) / 3;
|
||||||
|
return ~~tokenLength;
|
||||||
}
|
}
|
||||||
|
|
||||||
user(...messages: string[]) {
|
user(...messages: string[]) {
|
||||||
|
|||||||
@@ -2,12 +2,6 @@
|
|||||||
@tailwind components;
|
@tailwind components;
|
||||||
@tailwind utilities;
|
@tailwind utilities;
|
||||||
|
|
||||||
html,
|
|
||||||
body,
|
|
||||||
#app {
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Hide scrollbar for webkit based browsers */
|
/* Hide scrollbar for webkit based browsers */
|
||||||
::-webkit-scrollbar {
|
::-webkit-scrollbar {
|
||||||
display: none;
|
display: none;
|
||||||
|
|||||||
@@ -1,5 +1,10 @@
|
|||||||
|
import Markdown from "preact-markdown";
|
||||||
import { ChatStore } from "./app";
|
import { ChatStore } from "./app";
|
||||||
import { calculate_token_length } from "./chatgpt";
|
|
||||||
|
const Pre: React.FC<any> = ({ children, props }) => (
|
||||||
|
<div class="rounded p-1 bg-black text-white" {...props}>{children}</div>
|
||||||
|
);
|
||||||
|
const Code: React.FC<any> = ({ children }) => <code className="overflow-scroll break-keep">{children}</code>;
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
messageIndex: number;
|
messageIndex: number;
|
||||||
@@ -15,37 +20,28 @@ export default function Message(props: Props) {
|
|||||||
chat.role === "user" ? "left-0" : "right-0"
|
chat.role === "user" ? "left-0" : "right-0"
|
||||||
}`}
|
}`}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
chatStore.history[messageIndex].hide =
|
if (
|
||||||
!chatStore.history[messageIndex].hide;
|
confirm(
|
||||||
|
`Are you sure to delete this message?\n${chat.content.slice(
|
||||||
//chatStore.totalTokens =
|
0,
|
||||||
chatStore.totalTokens = 0;
|
39
|
||||||
for (const i of chatStore.history
|
)}...`
|
||||||
.filter(({ hide }) => !hide)
|
)
|
||||||
.slice(chatStore.postBeginIndex)
|
) {
|
||||||
.map(({ token }) => token)) {
|
chatStore.history.splice(messageIndex, 1);
|
||||||
chatStore.totalTokens += i;
|
chatStore.postBeginIndex = Math.max(chatStore.postBeginIndex - 1, 0);
|
||||||
}
|
|
||||||
setChatStore({ ...chatStore });
|
setChatStore({ ...chatStore });
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
🗑️
|
🗑️
|
||||||
</button>
|
</button>
|
||||||
);
|
);
|
||||||
|
const codeMatches = chat.content.match(/(```([\s\S]*?)```$)/);
|
||||||
|
const AnyMarkdown = Markdown as any;
|
||||||
|
console.log("codeMatches", codeMatches);
|
||||||
|
if (codeMatches) console.log("matches", codeMatches[0]);
|
||||||
return (
|
return (
|
||||||
<>
|
|
||||||
{chatStore.postBeginIndex !== 0 &&
|
|
||||||
!chatStore.history[messageIndex].hide &&
|
|
||||||
chatStore.postBeginIndex ===
|
|
||||||
chatStore.history.slice(0, messageIndex).filter(({ hide }) => !hide)
|
|
||||||
.length && (
|
|
||||||
<div className="flex items-center relative justify-center">
|
|
||||||
<hr className="w-full h-px my-4 border-0 bg-slate-800 dark:bg-white" />
|
|
||||||
<span className="absolute px-3 bg-slate-800 text-white rounded p-1 dark:bg-white dark:text-black">
|
|
||||||
Above messages are "forgotten"
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<div
|
<div
|
||||||
className={`flex ${
|
className={`flex ${
|
||||||
chat.role === "assistant" ? "justify-start" : "justify-end"
|
chat.role === "assistant" ? "justify-start" : "justify-end"
|
||||||
@@ -56,16 +52,21 @@ export default function Message(props: Props) {
|
|||||||
chat.role === "assistant"
|
chat.role === "assistant"
|
||||||
? "bg-white dark:bg-gray-700 dark:text-white"
|
? "bg-white dark:bg-gray-700 dark:text-white"
|
||||||
: "bg-green-400"
|
: "bg-green-400"
|
||||||
} ${chat.hide ? "opacity-50" : ""}`}
|
}`}
|
||||||
>
|
>
|
||||||
<p className="message-content">
|
<p className="message-content">
|
||||||
{chat.hide
|
<AnyMarkdown
|
||||||
? chat.content.split("\n")[0].slice(0, 16) + "... (deleted)"
|
markdown={chat.content}
|
||||||
: chat.content}
|
markupOpts={{
|
||||||
|
components: {
|
||||||
|
code: Code,
|
||||||
|
pre: Pre,
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
/>
|
||||||
</p>
|
</p>
|
||||||
<DeleteIcon />
|
<DeleteIcon />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,56 +0,0 @@
|
|||||||
interface Model {
|
|
||||||
maxToken: number;
|
|
||||||
price: {
|
|
||||||
prompt: number;
|
|
||||||
completion: number;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const models: Record<string, Model> = {
|
|
||||||
"gpt-3.5-turbo": {
|
|
||||||
maxToken: 4096,
|
|
||||||
price: { prompt: 0.0015 / 1000, completion: 0.002 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-3.5-turbo-16k": {
|
|
||||||
maxToken: 16384,
|
|
||||||
price: { prompt: 0.0003 / 1000, completion: 0.004 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-3.5-turbo-0613": {
|
|
||||||
maxToken: 4096,
|
|
||||||
price: { prompt: 0.0015 / 1000, completion: 0.002 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-3.5-turbo-16k-0613": {
|
|
||||||
maxToken: 16384,
|
|
||||||
price: { prompt: 0.0003 / 1000, completion: 0.004 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-3.5-turbo-0301": {
|
|
||||||
maxToken: 4096,
|
|
||||||
price: { prompt: 0.0015 / 1000, completion: 0.002 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-4": {
|
|
||||||
maxToken: 8192,
|
|
||||||
price: { prompt: 0.03 / 1000, completion: 0.06 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-4-0613": {
|
|
||||||
maxToken: 8192,
|
|
||||||
price: { prompt: 0.03 / 1000, completion: 0.06 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-4-32k": {
|
|
||||||
maxToken: 8192,
|
|
||||||
price: { prompt: 0.06 / 1000, completion: 0.12 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-4-32k-0613": {
|
|
||||||
maxToken: 8192,
|
|
||||||
price: { prompt: 0.06 / 1000, completion: 0.12 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-4-0314": {
|
|
||||||
maxToken: 8192,
|
|
||||||
price: { prompt: 0.03 / 1000, completion: 0.06 / 1000 },
|
|
||||||
},
|
|
||||||
"gpt-4-32k-0314": {
|
|
||||||
maxToken: 8192,
|
|
||||||
price: { prompt: 0.06 / 1000, completion: 0.12 / 1000 },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
export default models;
|
|
||||||
121
src/settings.tsx
121
src/settings.tsx
@@ -1,7 +1,5 @@
|
|||||||
import { createRef } from "preact";
|
|
||||||
import { StateUpdater } from "preact/hooks";
|
import { StateUpdater } from "preact/hooks";
|
||||||
import { ChatStore } from "./app";
|
import { ChatStore } from "./app";
|
||||||
import models from "./models";
|
|
||||||
|
|
||||||
const Help = (props: { children: any; help: string }) => {
|
const Help = (props: { children: any; help: string }) => {
|
||||||
return (
|
return (
|
||||||
@@ -19,32 +17,6 @@ const Help = (props: { children: any; help: string }) => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const SelectModel = (props: {
|
|
||||||
chatStore: ChatStore;
|
|
||||||
setChatStore: (cs: ChatStore) => void;
|
|
||||||
help: string;
|
|
||||||
}) => {
|
|
||||||
return (
|
|
||||||
<Help help={props.help}>
|
|
||||||
<label className="m-2 p-2">Model</label>
|
|
||||||
<select
|
|
||||||
className="m-2 p-2"
|
|
||||||
value={props.chatStore.model}
|
|
||||||
onChange={(event: any) => {
|
|
||||||
const model = event.target.value as string;
|
|
||||||
props.chatStore.model = model;
|
|
||||||
props.chatStore.maxTokens = models[model].maxToken;
|
|
||||||
props.setChatStore({ ...props.chatStore });
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{Object.keys(models).map((opt) => (
|
|
||||||
<option value={opt}>{opt}</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
</Help>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const Input = (props: {
|
const Input = (props: {
|
||||||
chatStore: ChatStore;
|
chatStore: ChatStore;
|
||||||
setChatStore: (cs: ChatStore) => void;
|
setChatStore: (cs: ChatStore) => void;
|
||||||
@@ -118,7 +90,6 @@ export default (props: {
|
|||||||
setChatStore: (cs: ChatStore) => void;
|
setChatStore: (cs: ChatStore) => void;
|
||||||
show: boolean;
|
show: boolean;
|
||||||
setShow: StateUpdater<boolean>;
|
setShow: StateUpdater<boolean>;
|
||||||
selectedChatStoreIndex: number;
|
|
||||||
}) => {
|
}) => {
|
||||||
if (!props.show) return <div></div>;
|
if (!props.show) return <div></div>;
|
||||||
const link =
|
const link =
|
||||||
@@ -130,19 +101,12 @@ export default (props: {
|
|||||||
props.chatStore.apiKey
|
props.chatStore.apiKey
|
||||||
)}&api=${encodeURIComponent(props.chatStore.apiEndpoint)}&mode=${
|
)}&api=${encodeURIComponent(props.chatStore.apiEndpoint)}&mode=${
|
||||||
props.chatStore.streamMode ? "stream" : "fetch"
|
props.chatStore.streamMode ? "stream" : "fetch"
|
||||||
}&model=${props.chatStore.model}&sys=${encodeURIComponent(
|
}&sys=${encodeURIComponent(props.chatStore.systemMessageContent)}`;
|
||||||
props.chatStore.systemMessageContent
|
|
||||||
)}`;
|
|
||||||
|
|
||||||
const importFileRef = createRef();
|
|
||||||
return (
|
return (
|
||||||
<div className="left-0 top-0 overflow-scroll flex justify-center absolute w-screen h-full bg-black bg-opacity-50 z-10">
|
<div className="left-0 top-0 overflow-scroll flex justify-center absolute w-screen h-screen bg-black bg-opacity-50 z-10">
|
||||||
<div className="m-2 p-2 bg-white rounded-lg h-fit">
|
<div className="m-2 p-2 bg-white rounded-lg h-fit">
|
||||||
<h3 className="text-xl">Settings</h3>
|
<h3 className="text-xl">Settings</h3>
|
||||||
<hr />
|
<hr />
|
||||||
<p className="m-2 p-2">
|
|
||||||
Total cost in this section ${props.chatStore.cost.toFixed(4)}
|
|
||||||
</p>
|
|
||||||
<div className="box">
|
<div className="box">
|
||||||
<Input
|
<Input
|
||||||
field="systemMessageContent"
|
field="systemMessageContent"
|
||||||
@@ -164,13 +128,9 @@ export default (props: {
|
|||||||
help="流模式,使用 stream mode 将可以动态看到生成内容,但无法准确计算 token 数量,在 token 数量过多时可能会裁切过多或过少历史消息"
|
help="流模式,使用 stream mode 将可以动态看到生成内容,但无法准确计算 token 数量,在 token 数量过多时可能会裁切过多或过少历史消息"
|
||||||
{...props}
|
{...props}
|
||||||
/>
|
/>
|
||||||
<SelectModel
|
|
||||||
help="模型,默认 3.5。不同模型性能和定价也不同,请参考 API 文档。(GPT-4 模型处于内测阶段,需要向 OPENAI 申请, 请确保您有访问权限)"
|
|
||||||
{...props}
|
|
||||||
/>
|
|
||||||
<Number
|
<Number
|
||||||
field="maxTokens"
|
field="maxTokens"
|
||||||
help="最大 token 数量。如果使用非gpt-3.5模型,请手动修改上限。gpt-4 & gpt-4-0314: 8192。gpt-4-32k & gpt-4-32k-0314: 32768"
|
help="最大 token 数量,这个详情参考 OPENAI API 文档"
|
||||||
readOnly={false}
|
readOnly={false}
|
||||||
{...props}
|
{...props}
|
||||||
/>
|
/>
|
||||||
@@ -192,81 +152,6 @@ export default (props: {
|
|||||||
readOnly={true}
|
readOnly={true}
|
||||||
{...props}
|
{...props}
|
||||||
/>
|
/>
|
||||||
<p className="flex justify-evenly">
|
|
||||||
<button
|
|
||||||
className="p-2 m-2 rounded bg-amber-500"
|
|
||||||
onClick={() => {
|
|
||||||
let dataStr =
|
|
||||||
"data:text/json;charset=utf-8," +
|
|
||||||
encodeURIComponent(
|
|
||||||
JSON.stringify(props.chatStore, null, "\t")
|
|
||||||
);
|
|
||||||
let downloadAnchorNode = document.createElement("a");
|
|
||||||
downloadAnchorNode.setAttribute("href", dataStr);
|
|
||||||
downloadAnchorNode.setAttribute(
|
|
||||||
"download",
|
|
||||||
`chatgpt-api-web-${props.selectedChatStoreIndex}.json`
|
|
||||||
);
|
|
||||||
document.body.appendChild(downloadAnchorNode); // required for firefox
|
|
||||||
downloadAnchorNode.click();
|
|
||||||
downloadAnchorNode.remove();
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Export
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
className="p-2 m-2 rounded bg-amber-500"
|
|
||||||
onClick={() => {
|
|
||||||
if (
|
|
||||||
!confirm(
|
|
||||||
"This will OVERWRITE the current chat history! Continue?"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return;
|
|
||||||
console.log("importFileRef", importFileRef);
|
|
||||||
importFileRef.current.click();
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Import
|
|
||||||
</button>
|
|
||||||
<input
|
|
||||||
className="hidden"
|
|
||||||
ref={importFileRef}
|
|
||||||
type="file"
|
|
||||||
onChange={() => {
|
|
||||||
const file = importFileRef.current.files[0];
|
|
||||||
console.log("file to import", file);
|
|
||||||
if (!file || file.type !== "application/json") {
|
|
||||||
alert("Please select a json file");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const reader = new FileReader();
|
|
||||||
reader.onload = () => {
|
|
||||||
console.log("import content", reader.result);
|
|
||||||
if (!reader) {
|
|
||||||
alert("Empty file");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const newChatStore: ChatStore = JSON.parse(
|
|
||||||
reader.result as string
|
|
||||||
);
|
|
||||||
if (!newChatStore.chatgpt_api_web_version) {
|
|
||||||
throw "This is not an exported chatgpt-api-web chatstore file. The key 'chatgpt_api_web_version' is missing!";
|
|
||||||
}
|
|
||||||
props.setChatStore({ ...newChatStore });
|
|
||||||
} catch (e) {
|
|
||||||
alert(`Import error on parsing json: ${e}`);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
reader.readAsText(file);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</p>
|
|
||||||
<p className="text-center m-2 p-2">
|
|
||||||
chatgpt-api-web ChatStore Version{" "}
|
|
||||||
{props.chatStore.chatgpt_api_web_version}
|
|
||||||
</p>
|
|
||||||
</div>
|
</div>
|
||||||
<hr />
|
<hr />
|
||||||
<div className="flex justify-between">
|
<div className="flex justify-between">
|
||||||
|
|||||||
18
yarn.lock
18
yarn.lock
@@ -804,6 +804,11 @@ lru-cache@^5.1.1:
|
|||||||
dependencies:
|
dependencies:
|
||||||
yallist "^3.0.2"
|
yallist "^3.0.2"
|
||||||
|
|
||||||
|
marked@^4.0.10:
|
||||||
|
version "4.3.0"
|
||||||
|
resolved "https://registry.npmmirror.com/marked/-/marked-4.3.0.tgz#796362821b019f734054582038b116481b456cf3"
|
||||||
|
integrity sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==
|
||||||
|
|
||||||
merge2@^1.3.0:
|
merge2@^1.3.0:
|
||||||
version "1.4.1"
|
version "1.4.1"
|
||||||
resolved "https://registry.npmmirror.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
|
resolved "https://registry.npmmirror.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
|
||||||
@@ -925,6 +930,19 @@ postcss@^8.0.9, postcss@^8.4.21:
|
|||||||
picocolors "^1.0.0"
|
picocolors "^1.0.0"
|
||||||
source-map-js "^1.0.2"
|
source-map-js "^1.0.2"
|
||||||
|
|
||||||
|
preact-markdown@^2.1.0:
|
||||||
|
version "2.1.0"
|
||||||
|
resolved "https://registry.npmmirror.com/preact-markdown/-/preact-markdown-2.1.0.tgz#c271cdd084b8854778f7d8e3640bbe9a7ea6ba4d"
|
||||||
|
integrity sha512-6c2hfarjLFkVDNa1hUKytXID6wl6yilZnGb2y83xKXnfk5SpXYAwhJc+JENgffAcNALWggqvX/ezlk8/8qJsuA==
|
||||||
|
dependencies:
|
||||||
|
marked "^4.0.10"
|
||||||
|
preact-markup "^2.1.1"
|
||||||
|
|
||||||
|
preact-markup@^2.1.1:
|
||||||
|
version "2.1.1"
|
||||||
|
resolved "https://registry.npmmirror.com/preact-markup/-/preact-markup-2.1.1.tgz#0451e7eed1dac732d7194c34a7f16ff45a2cfdd7"
|
||||||
|
integrity sha512-8JL2p36mzK8XkspOyhBxUSPjYwMxDM0L5BWBZWxsZMVW8WsGQrYQDgVuDKkRspt2hwrle+Cxr/053hpc9BJwfw==
|
||||||
|
|
||||||
preact@^10.11.3:
|
preact@^10.11.3:
|
||||||
version "10.13.1"
|
version "10.13.1"
|
||||||
resolved "https://registry.npmmirror.com/preact/-/preact-10.13.1.tgz#d220bd8771b8fa197680d4917f3cefc5eed88720"
|
resolved "https://registry.npmmirror.com/preact/-/preact-10.13.1.tgz#d220bd8771b8fa197680d4917f3cefc5eed88720"
|
||||||
|
|||||||
Reference in New Issue
Block a user