14 Commits

9 changed files with 181 additions and 87 deletions

View File

@@ -10,9 +10,10 @@
与官方 ChatGPT 相比:
- 对话记录使用浏览器的 localStorage 保存在本地
- API 调用速度更快更稳定
- 对话记录、API 密钥等使用浏览器的 localStorage 保存在本地
- 可删除对话消息
- 可以设置 system message (如:"你是一个猫娘",参见官方 [API 文档](https://platform.openai.com/docs/guides/chat))
- 可以设置 system message (如:"你是一个猫娘" 或 "你是一个有用的助理" 或 "将我的话翻译成英语",参见官方 [API 文档](https://platform.openai.com/docs/guides/chat))
- 可以为不同对话设置不同 APIKEY
- 小(整个网页 30k 左右)
- 可以设置不同的 API Endpoint方便墙内人士使用反向代理转发 API 请求)
@@ -36,7 +37,7 @@
- `api`: API Endpoint 默认为 `https://api.openai.com/v1/chat/completions`
- `mode`: `fetch``stream` 模式stream 模式下可以动态看到 api 返回的数据,但无法得知 token 数量,只能进行估算,在 token 数量过多时可能会裁切过多或过少历史消息
例如 `http://localhost:1234/?key=xxxx` 那么新创建的会话将会使用该默认 API
例如 `http://localhost:1234/?key=xxxx&api=xxxx` 那么 **新创建** 的会话将会使用该默认 API 和 API Endpoint
以上参数应用于单个对话,随时可在顶部更改

View File

@@ -12,6 +12,7 @@
"autoprefixer": "^10.4.14",
"postcss": "^8.4.21",
"preact": "^10.11.3",
"preact-markdown": "^2.1.0",
"sakura.css": "^1.4.1",
"tailwindcss": "^3.2.7"
},

View File

@@ -91,7 +91,7 @@ export function App() {
return (
<div className="flex text-sm h-screen bg-slate-200 dark:bg-slate-800 dark:text-white">
<div className="flex flex-col h-full p-4 border-r-indigo-500 border-2 dark:border-slate-800 dark:border-r-indigo-500 dark:text-black">
<div className="flex flex-col h-full p-2 border-r-indigo-500 border-2 dark:border-slate-800 dark:border-r-indigo-500 dark:text-black">
<div className="grow overflow-scroll">
<button
className="bg-violet-300 p-1 rounded hover:bg-violet-400"
@@ -164,7 +164,8 @@ export function App() {
}
// find nex selected chat index
const next = newAllChatStoreIndexes[0];
const next =
newAllChatStoreIndexes[newAllChatStoreIndexes.length - 1];
console.log("next is", next);
setSelectedChatIndex(next);

View File

@@ -1,6 +1,8 @@
import { useState } from "preact/hooks";
import { createRef } from "preact";
import { useEffect, useState } from "preact/hooks";
import type { ChatStore } from "./app";
import ChatGPT, { ChunkMessage } from "./chatgpt";
import ChatGPT, { ChunkMessage, FetchResponse } from "./chatgpt";
import Message from "./message";
import Settings from "./settings";
export default function ChatBOX(props: {
@@ -13,12 +15,18 @@ export default function ChatBOX(props: {
const [inputMsg, setInputMsg] = useState("");
const [showGenerating, setShowGenerating] = useState(false);
const [generatingMessage, setGeneratingMessage] = useState("");
const [showRetry, setShowRetry] = useState(false);
const messagesEndRef = createRef();
useEffect(() => {
console.log("ref", messagesEndRef);
messagesEndRef.current.scrollIntoView({ behavior: "smooth" });
}, [showRetry, showGenerating, generatingMessage]);
const client = new ChatGPT(chatStore.apiKey);
const _completeWithStreamMode = async () => {
const _completeWithStreamMode = async (response: Response) => {
// call api, return reponse text
const response = await client.completeWithSteam();
console.log("response", response);
const reader = response.body?.getReader();
const allChunkMessage: string[] = [];
@@ -81,10 +89,10 @@ export default function ChatBOX(props: {
});
};
const _completeWithFetchMode = async () => {
// call api, return reponse text
const response = await client.complete();
chatStore.history.push({ role: "assistant", content: response });
const _completeWithFetchMode = async (response: Response) => {
const data = (await response.json()) as FetchResponse;
const content = client.processFetchResponse(data);
chatStore.history.push({ role: "assistant", content });
setShowGenerating(false);
};
@@ -96,10 +104,14 @@ export default function ChatBOX(props: {
client.messages = chatStore.history.slice(chatStore.postBeginIndex);
try {
setShowGenerating(true);
if (chatStore.streamMode) {
await _completeWithStreamMode();
const response = await client._fetch(chatStore.streamMode);
const contentType = response.headers.get("content-type");
if (contentType === "text/event-stream") {
await _completeWithStreamMode(response);
} else if (contentType === "application/json") {
await _completeWithFetchMode(response);
} else {
await _completeWithFetchMode();
throw `unknown response content type ${contentType}`;
}
// manually copy status from client to chatStore
chatStore.maxTokens = client.max_tokens;
@@ -113,6 +125,7 @@ export default function ChatBOX(props: {
console.log("postBeginIndex", chatStore.postBeginIndex);
setChatStore({ ...chatStore });
} catch (error) {
setShowRetry(true);
alert(error);
} finally {
setShowGenerating(false);
@@ -130,12 +143,11 @@ export default function ChatBOX(props: {
setChatStore({ ...chatStore });
setInputMsg("");
await complete();
setChatStore({ ...chatStore });
};
const [showSettings, setShowSettings] = useState(false);
return (
<div className="grow flex flex-col p-4 dark:text-black">
<div className="grow flex flex-col p-2 dark:text-black">
<Settings
chatStore={chatStore}
setChatStore={setChatStore}
@@ -160,9 +172,7 @@ export default function ChatBOX(props: {
<span>Total: {chatStore.totalTokens}</span>{" "}
<span>Max: {chatStore.maxTokens}</span>{" "}
<span>Margin: {chatStore.tokenMargin}</span>{" "}
<span>
Message: {chatStore.history.length - chatStore.postBeginIndex}
</span>{" "}
<span>Message: {chatStore.history.length}</span>{" "}
<span>Cut: {chatStore.postBeginIndex}</span>
</div>
</p>
@@ -182,51 +192,13 @@ export default function ChatBOX(props: {
</p>
)}
{chatStore.history.map((chat, i) => {
const pClassName =
chat.role === "assistant"
? "p-2 rounded relative bg-white my-2 text-left dark:bg-gray-700 dark:text-white"
: "p-2 rounded relative bg-green-400 my-2 text-right";
const iconClassName =
chat.role === "user"
? "absolute bottom-0 left-0"
: "absolute bottom-0 right-0";
const DeleteIcon = () => (
<button
className={iconClassName}
onClick={() => {
if (
confirm(
`Are you sure to delete this message?\n${chat.content.slice(
0,
39
)}...`
)
) {
chatStore.history.splice(i, 1);
chatStore.postBeginIndex = Math.max(
chatStore.postBeginIndex - 1,
0
);
setChatStore({ ...chatStore });
}
}}
>
🗑
</button>
);
return (
<p className={pClassName}>
{chat.content
.split("\n")
.filter((line) => line)
.map((line) => (
<p className="my-1">{line}</p>
))}
<DeleteIcon />
</p>
);
})}
{chatStore.history.map((_, messageIndex) => (
<Message
chatStore={chatStore}
setChatStore={setChatStore}
messageIndex={messageIndex}
/>
))}
{showGenerating && (
<p className="p-2 my-2 animate-pulse dark:text-white">
{generatingMessage
@@ -235,9 +207,24 @@ export default function ChatBOX(props: {
...
</p>
)}
{showRetry && (
<p className="text-right p-2 my-2 dark:text-white">
<button
className="p-1 rounded bg-rose-500"
onClick={async () => {
setShowRetry(false);
await complete();
}}
>
Retry
</button>
</p>
)}
<div ref={messagesEndRef}></div>
</div>
<div className="flex justify-between">
<textarea
rows={Math.min(10, (inputMsg.match(/\n/g) || []).length + 2)}
value={inputMsg}
onChange={(event: any) => setInputMsg(event.target.value)}
onKeyPress={(event: any) => {

View File

@@ -9,6 +9,23 @@ export interface ChunkMessage {
}[];
}
export interface FetchResponse {
id: string;
object: string;
created: number;
model: string;
usage: {
prompt_tokens: number | undefined;
completion_tokens: number | undefined;
total_tokens: number | undefined;
};
choices: {
message: Message | undefined;
finish_reason: "stop" | "length";
index: number | undefined;
}[];
}
class Chat {
OPENAI_API_KEY: string;
messages: Message[];
@@ -57,22 +74,7 @@ class Chat {
});
}
async fetch(): Promise<{
id: string;
object: string;
created: number;
model: string;
usage: {
prompt_tokens: number | undefined;
completion_tokens: number | undefined;
total_tokens: number | undefined;
};
choices: {
message: Message | undefined;
finish_reason: "stop" | "length";
index: number | undefined;
}[];
}> {
async fetch(): Promise<FetchResponse> {
const resp = await this._fetch();
return await resp.json();
}
@@ -83,8 +85,7 @@ class Chat {
return this.messages.slice(-1)[0].content;
}
async complete(): Promise<string> {
const resp = await this.fetch();
processFetchResponse(resp: FetchResponse): string {
this.total_tokens = resp?.usage?.total_tokens ?? 0;
if (resp?.choices[0]?.message) {
this.messages.push(resp?.choices[0]?.message);
@@ -101,6 +102,11 @@ class Chat {
);
}
async complete(): Promise<string> {
const resp = await this.fetch();
return this.processFetchResponse(resp);
}
completeWithSteam() {
this.total_tokens = this.messages
.map((msg) => this.calculate_token_length(msg.content) + 20)

View File

@@ -21,3 +21,7 @@
body::-webkit-scrollbar {
display: none;
}
p.message-content {
white-space: pre-wrap;
}

72
src/message.tsx Normal file
View File

@@ -0,0 +1,72 @@
import Markdown from "preact-markdown";
import { ChatStore } from "./app";
const Pre: React.FC<any> = ({ children, props }) => (
<div class="rounded p-1 bg-black text-white" {...props}>{children}</div>
);
const Code: React.FC<any> = ({ children }) => <code className="overflow-scroll break-keep">{children}</code>;
interface Props {
messageIndex: number;
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
}
export default function Message(props: Props) {
const { chatStore, messageIndex, setChatStore } = props;
const chat = chatStore.history[messageIndex];
const DeleteIcon = () => (
<button
className={`absolute bottom-0 ${
chat.role === "user" ? "left-0" : "right-0"
}`}
onClick={() => {
if (
confirm(
`Are you sure to delete this message?\n${chat.content.slice(
0,
39
)}...`
)
) {
chatStore.history.splice(messageIndex, 1);
chatStore.postBeginIndex = Math.max(chatStore.postBeginIndex - 1, 0);
setChatStore({ ...chatStore });
}
}}
>
🗑
</button>
);
const codeMatches = chat.content.match(/(```([\s\S]*?)```$)/);
const AnyMarkdown = Markdown as any;
console.log("codeMatches", codeMatches);
if (codeMatches) console.log("matches", codeMatches[0]);
return (
<div
className={`flex ${
chat.role === "assistant" ? "justify-start" : "justify-end"
}`}
>
<div
className={`relative w-fit p-2 rounded my-2 ${
chat.role === "assistant"
? "bg-white dark:bg-gray-700 dark:text-white"
: "bg-green-400"
}`}
>
<p className="message-content">
<AnyMarkdown
markdown={chat.content}
markupOpts={{
components: {
code: Code,
pre: Pre,
},
}}
/>
</p>
<DeleteIcon />
</div>
</div>
);
}

View File

@@ -110,13 +110,17 @@ export default (props: {
<div className="box">
<Input
field="systemMessageContent"
help="系统消息用于指示ChatGPT的角色和一些前置条件"
help="系统消息用于指示ChatGPT的角色和一些前置条件,例如“你是一个有帮助的人工智能助理”,或者“你是一个专业英语翻译,把我的话全部翻译成英语”,详情参考 OPEAN AI API 文档"
{...props}
/>
<Input
field="apiKey"
help="OPEN AI API 密钥,请勿泄漏此密钥"
{...props}
/>
<Input field="apiKey" help="OPEN AI API 密钥" {...props} />
<Input
field="apiEndpoint"
help="API 端点,方便在不支持的地区使用反向代理服务"
help="API 端点,方便在不支持的地区使用反向代理服务,默认为 https://api.openai.com/v1/chat/completions"
{...props}
/>
<Choice

View File

@@ -804,6 +804,11 @@ lru-cache@^5.1.1:
dependencies:
yallist "^3.0.2"
marked@^4.0.10:
version "4.3.0"
resolved "https://registry.npmmirror.com/marked/-/marked-4.3.0.tgz#796362821b019f734054582038b116481b456cf3"
integrity sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==
merge2@^1.3.0:
version "1.4.1"
resolved "https://registry.npmmirror.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
@@ -925,6 +930,19 @@ postcss@^8.0.9, postcss@^8.4.21:
picocolors "^1.0.0"
source-map-js "^1.0.2"
preact-markdown@^2.1.0:
version "2.1.0"
resolved "https://registry.npmmirror.com/preact-markdown/-/preact-markdown-2.1.0.tgz#c271cdd084b8854778f7d8e3640bbe9a7ea6ba4d"
integrity sha512-6c2hfarjLFkVDNa1hUKytXID6wl6yilZnGb2y83xKXnfk5SpXYAwhJc+JENgffAcNALWggqvX/ezlk8/8qJsuA==
dependencies:
marked "^4.0.10"
preact-markup "^2.1.1"
preact-markup@^2.1.1:
version "2.1.1"
resolved "https://registry.npmmirror.com/preact-markup/-/preact-markup-2.1.1.tgz#0451e7eed1dac732d7194c34a7f16ff45a2cfdd7"
integrity sha512-8JL2p36mzK8XkspOyhBxUSPjYwMxDM0L5BWBZWxsZMVW8WsGQrYQDgVuDKkRspt2hwrle+Cxr/053hpc9BJwfw==
preact@^10.11.3:
version "10.13.1"
resolved "https://registry.npmmirror.com/preact/-/preact-10.13.1.tgz#d220bd8771b8fa197680d4917f3cefc5eed88720"