14 Commits

Author SHA1 Message Date
f0c16a3cd1 localStorage follow
All checks were successful
Build static content / build (push) Successful in 3m8s
2024-03-30 11:40:34 +08:00
f54b192616 add follow scroll option 2024-03-30 11:37:19 +08:00
b20de667a4 fix: set logprobs to default false 2024-03-16 18:32:39 +08:00
a76cf224f6 new button w-full
All checks were successful
Build static content / build (push) Successful in 5m8s
2024-03-16 15:15:31 +08:00
943cb5f392 add gitea action
All checks were successful
Build static content / build (push) Successful in 4m30s
2024-03-07 02:08:22 +08:00
74b60b4e95 fix: logprobs in vision model not permit 2024-02-24 09:47:44 +08:00
24aba9ae07 fix: old version logprobs to false 2024-02-23 19:46:09 +08:00
4b1f81f72b fix: build 2024-02-23 19:04:41 +08:00
2224d2e5ed fix: old version logprobs to true 2024-02-23 19:02:08 +08:00
c9c51a85cf support logprobs 2024-02-23 19:00:20 +08:00
d01d7c747b change default model to gpt-3.5-turbo-0125 2024-02-20 15:28:43 +08:00
159d0615c9 fix: new chatStore of param with old chatStore 2024-02-18 22:31:41 +08:00
e8650e2c7e add gpt-3.5-turbo-0125 2024-02-10 15:28:41 +08:00
7f20e9b35f change default model to gpt-3.5-turbo 2024-02-10 11:48:04 +08:00
9 changed files with 221 additions and 76 deletions

View File

@@ -0,0 +1,27 @@
name: Build static content
on:
# Runs on pushes targeting the default branch
push:
branches: ["master"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: 'npm'
- run: npm install
- run: npm run build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: dist-files
path: './dist/'

View File

@@ -246,6 +246,7 @@ export function AddImage({
token: 65,
example: false,
audio: null,
logprobs: null,
});
setChatStore({ ...chatStore });

View File

@@ -2,10 +2,10 @@ import { IDBPDatabase, openDB } from "idb";
import { useEffect, useState } from "preact/hooks";
import "./global.css";
import { calculate_token_length, Message } from "./chatgpt";
import { calculate_token_length, Logprobs, Message } from "./chatgpt";
import getDefaultParams from "./getDefaultParam";
import ChatBOX from "./chatbox";
import models from "./models";
import models, { defaultModel } from "./models";
import { Tr, langCodeContext, LANG_OPTIONS } from "./translate";
import CHATGPT_API_WEB_VERSION from "./CHATGPT_API_WEB_VERSION";
@@ -15,6 +15,7 @@ export interface ChatStoreMessage extends Message {
token: number;
example: boolean;
audio: Blob | null;
logprobs: Logprobs | null;
}
export interface TemplateAPI {
@@ -63,6 +64,7 @@ export interface ChatStore {
image_gen_api: string;
image_gen_key: string;
json_mode: boolean;
logprobs: boolean;
}
const _defaultAPIEndpoint = "https://api.openai.com/v1/chat/completions";
@@ -71,7 +73,7 @@ export const newChatStore = (
systemMessageContent = "",
apiEndpoint = _defaultAPIEndpoint,
streamMode = true,
model = "gpt-3.5-turbo-1106",
model = defaultModel,
temperature = 0.7,
dev = false,
whisper_api = "https://api.openai.com/v1/audio/transcriptions",
@@ -84,7 +86,8 @@ export const newChatStore = (
toolsString = "",
image_gen_api = "https://api.openai.com/v1/images/generations",
image_gen_key = "",
json_mode = false
json_mode = false,
logprobs = false
): ChatStore => {
return {
chatgpt_api_web_version: CHATGPT_API_WEB_VERSION,
@@ -94,7 +97,10 @@ export const newChatStore = (
postBeginIndex: 0,
tokenMargin: 1024,
totalTokens: 0,
maxTokens: getDefaultParams("max", models[getDefaultParams("model", model)]?.maxToken ?? 2048),
maxTokens: getDefaultParams(
"max",
models[getDefaultParams("model", model)]?.maxToken ?? 2048
),
maxGenTokens: 2048,
maxGenTokens_enabled: true,
apiKey: getDefaultParams("key", apiKey),
@@ -121,6 +127,7 @@ export const newChatStore = (
image_gen_key: image_gen_key,
json_mode: json_mode,
tts_format: tts_format,
logprobs,
};
};
@@ -197,7 +204,7 @@ export function App() {
// handle read from old version chatstore
if (ret.maxGenTokens === undefined) ret.maxGenTokens = 2048;
if (ret.maxGenTokens_enabled === undefined) ret.maxGenTokens_enabled = true;
if (ret.model === undefined) ret.model = "gpt-3.5-turbo";
if (ret.model === undefined) ret.model = defaultModel;
if (ret.responseModelName === undefined) ret.responseModelName = "";
if (ret.toolsString === undefined) ret.toolsString = "";
if (ret.chatgpt_api_web_version === undefined)
@@ -259,7 +266,7 @@ export function App() {
[]
);
const handleNewChatStore = async () => {
const handleNewChatStoreWithOldOne = async (chatStore: ChatStore) => {
const newKey = await (
await db
).add(
@@ -282,12 +289,16 @@ export function App() {
chatStore.toolsString,
chatStore.image_gen_api,
chatStore.image_gen_key,
chatStore.json_mode
chatStore.json_mode,
false // logprobs default to false
)
);
setSelectedChatIndex(newKey as number);
setAllChatStoreIndexes(await (await db).getAllKeys(STORAGE_NAME));
};
const handleNewChatStore = async () => {
return handleNewChatStoreWithOldOne(chatStore);
};
// if there are any params in URL, create a new chatStore
useEffect(() => {
@@ -299,7 +310,7 @@ export function App() {
const mode = getDefaultParams("mode", "");
const model = getDefaultParams("model", "");
const max = getDefaultParams("max", 0);
console.log('max is', max, 'chatStore.max is', chatStore.maxTokens)
console.log("max is", max, "chatStore.max is", chatStore.maxTokens);
// only create new chatStore if the params in URL are NOT
// equal to the current selected chatStore
if (
@@ -310,8 +321,8 @@ export function App() {
(model && model !== chatStore.model) ||
(max !== 0 && max !== chatStore.maxTokens)
) {
console.log('create new chatStore because of params in URL')
handleNewChatStore();
console.log("create new chatStore because of params in URL");
handleNewChatStoreWithOldOne(chatStore);
}
await db;
const allidx = await (await db).getAllKeys(STORAGE_NAME);
@@ -328,7 +339,7 @@ export function App() {
<div className="flex flex-col h-full p-2 border-r-indigo-500 border-2 dark:border-slate-800 dark:border-r-indigo-500 dark:text-black">
<div className="grow overflow-scroll">
<button
className="bg-violet-300 p-1 rounded hover:bg-violet-400"
className="w-full bg-violet-300 p-1 rounded hover:bg-violet-400"
onClick={handleNewChatStore}
>
{Tr("NEW")}
@@ -342,8 +353,9 @@ export function App() {
return (
<li>
<button
className={`w-full my-1 p-1 rounded hover:bg-blue-500 ${i === selectedChatIndex ? "bg-blue-500" : "bg-blue-200"
}`}
className={`w-full my-1 p-1 rounded hover:bg-blue-500 ${
i === selectedChatIndex ? "bg-blue-500" : "bg-blue-200"
}`}
onClick={() => {
setSelectedChatIndex(i);
}}

View File

@@ -22,6 +22,7 @@ import ChatGPT, {
Message as MessageType,
MessageDetail,
ToolCall,
Logprobs,
} from "./chatgpt";
import Message from "./message";
import models from "./models";
@@ -55,11 +56,24 @@ export default function ChatBOX(props: {
const [showAddToolMsg, setShowAddToolMsg] = useState(false);
const [newToolCallID, setNewToolCallID] = useState("");
const [newToolContent, setNewToolContent] = useState("");
let default_follow = localStorage.getItem("follow");
if (default_follow === null) {
default_follow = "true";
}
const [follow, _setFollow] = useState(default_follow === "true");
const mediaRef = createRef();
const setFollow = (follow: boolean) => {
console.log("set follow", follow);
localStorage.setItem("follow", follow.toString());
_setFollow(follow);
};
const messagesEndRef = createRef();
useEffect(() => {
messagesEndRef.current.scrollIntoView({ behavior: "smooth" });
if (follow) {
messagesEndRef.current.scrollIntoView({ behavior: "smooth" });
}
}, [showRetry, showGenerating, generatingMessage]);
const client = new ChatGPT(chatStore.apiKey);
@@ -82,15 +96,29 @@ export default function ChatBOX(props: {
const allChunkMessage: string[] = [];
const allChunkTool: ToolCall[] = [];
setShowGenerating(true);
const logprobs: Logprobs = {
content: [],
};
for await (const i of client.processStreamResponse(response)) {
chatStore.responseModelName = i.model;
responseTokenCount += 1;
// skip if choice is empty (e.g. azure)
if (!i.choices[0]) continue;
const c = i.choices[0];
allChunkMessage.push(i.choices[0].delta.content ?? "");
const tool_calls = i.choices[0].delta.tool_calls;
// skip if choice is empty (e.g. azure)
if (!c) continue;
const logprob = c?.logprobs?.content[0]?.logprob;
if (logprob !== undefined) {
logprobs.content.push({
token: c.delta.content ?? "",
logprob,
});
console.log(c.delta.content, logprob);
}
allChunkMessage.push(c.delta.content ?? "");
const tool_calls = c.delta.tool_calls;
if (tool_calls) {
for (const tool_call of tool_calls) {
// init
@@ -149,6 +177,7 @@ export default function ChatBOX(props: {
chatStore.cost += cost;
addTotalCost(cost);
console.log("save logprobs", logprobs);
const newMsg: ChatStoreMessage = {
role: "assistant",
content,
@@ -156,6 +185,7 @@ export default function ChatBOX(props: {
token: responseTokenCount,
example: false,
audio: null,
logprobs,
};
if (allChunkTool.length > 0) newMsg.tool_calls = allChunkTool;
@@ -210,6 +240,7 @@ export default function ChatBOX(props: {
data.usage.completion_tokens ?? calculate_token_length(msg.content),
example: false,
audio: null,
logprobs: data.choices[0]?.logprobs,
});
setShowGenerating(false);
};
@@ -257,7 +288,10 @@ export default function ChatBOX(props: {
try {
setShowGenerating(true);
const response = await client._fetch(chatStore.streamMode);
const response = await client._fetch(
chatStore.streamMode,
chatStore.logprobs
);
const contentType = response.headers.get("content-type");
if (contentType?.startsWith("text/event-stream")) {
await _completeWithStreamMode(response);
@@ -306,6 +340,7 @@ export default function ChatBOX(props: {
token: calculate_token_length(inputMsg.trim()),
example: false,
audio: null,
logprobs: null,
});
// manually calculate token length
@@ -782,6 +817,18 @@ export default function ChatBOX(props: {
</div>
)}
{generatingMessage && (
<span
class="p-2 m-2 rounded bg-white dark:text-black dark:bg-white dark:bg-opacity-50 dark:text-black dark:bg-opacity-50"
style={{ textAlign: "right" }}
onClick={() => {
setFollow(!follow);
}}
>
<label>Follow</label>
<input type="checkbox" checked={follow} />
</span>
)}
<div className="flex justify-between">
{(chatStore.model.match("vision") ||
(chatStore.image_gen_api && chatStore.image_gen_key)) && (
@@ -972,6 +1019,7 @@ export default function ChatBOX(props: {
hide: false,
example: false,
audio: null,
logprobs: null,
});
update_total_tokens();
setInputMsg("");
@@ -1066,6 +1114,7 @@ export default function ChatBOX(props: {
hide: false,
example: false,
audio: null,
logprobs: null,
});
update_total_tokens();
setChatStore({ ...chatStore });

View File

@@ -35,6 +35,16 @@ interface Choices {
index: number;
delta: Delta;
finish_reason: string | null;
logprobs: Logprobs | null;
}
export interface Logprobs {
content: LogprobsContent[];
}
interface LogprobsContent {
token: string;
logprob: number;
}
export interface StreamingResponseChunk {
@@ -85,6 +95,7 @@ export interface FetchResponse {
message: Message | undefined;
finish_reason: "stop" | "length";
index: number | undefined;
logprobs: Logprobs | null;
}[];
}
@@ -174,7 +185,7 @@ class Chat {
this.json_mode = json_mode;
}
_fetch(stream = false) {
_fetch(stream = false, logprobs = false) {
// perform role type check
let hasNonSystemMessage = false;
for (const msg of this.messages) {
@@ -225,6 +236,9 @@ class Chat {
type: "json_object",
};
}
if (logprobs) {
body["logprobs"] = true;
}
// parse toolsString to function call format
const ts = this.toolsString.trim();
@@ -253,15 +267,6 @@ class Chat {
});
}
async fetch(): Promise<FetchResponse> {
const resp = await this._fetch();
const j = await resp.json();
if (j.error !== undefined) {
throw JSON.stringify(j.error);
}
return j;
}
async *processStreamResponse(resp: Response) {
const reader = resp?.body?.pipeThrough(new TextDecoderStream()).getReader();
if (reader === undefined) {

14
src/logprob.tsx Normal file
View File

@@ -0,0 +1,14 @@
const logprobToColor = (logprob: number) => {
// 将logprob转换为百分比
const percent = Math.exp(logprob) * 100;
// 计算颜色值
// 绿色的RGB值为(0, 255, 0)红色的RGB值为(255, 0, 0)
const red = Math.round(255 * (1 - percent / 100));
const green = Math.round(255 * (percent / 100));
const color = `rgb(${red}, ${green}, 0)`;
return color;
};
export default logprobToColor;

View File

@@ -9,6 +9,7 @@ import { MessageDetail } from "./messageDetail";
import { MessageToolCall } from "./messageToolCall";
import { MessageToolResp } from "./messageToolResp";
import { EditMessage } from "./editMessage";
import logprobToColor from "./logprob";
export const isVailedJSON = (str: string): boolean => {
try {
@@ -32,6 +33,7 @@ export default function Message(props: Props) {
const [showEdit, setShowEdit] = useState(false);
const [showCopiedHint, setShowCopiedHint] = useState(false);
const [renderMarkdown, setRenderWorkdown] = useState(false);
const [renderColor, setRenderColor] = useState(false);
const DeleteIcon = () => (
<button
onClick={() => {
@@ -125,7 +127,21 @@ export default function Message(props: Props) {
{
// only show when content is string or list of message
// this check is used to avoid rendering tool call
chat.content && getMessageText(chat)
chat.content &&
(chat.logprobs && renderColor
? chat.logprobs.content
.filter((c) => c.token)
.map((c) => (
<div
style={{
color: logprobToColor(c.logprob),
display: "inline",
}}
>
{c.token}
</div>
))
: getMessageText(chat))
}
</div>
)}
@@ -200,6 +216,10 @@ export default function Message(props: Props) {
<label className="dark:text-white">{Tr("render")}</label>
<input type="checkbox" checked={renderMarkdown} />
</span>
<span onClick={(event: any) => setRenderColor(!renderColor)}>
<label className="dark:text-white">{Tr("color")}</label>
<input type="checkbox" checked={renderColor} />
</span>
</div>
)}
</div>

View File

@@ -7,6 +7,10 @@ interface Model {
}
const models: Record<string, Model> = {
"gpt-3.5-turbo-0125": {
maxToken: 16385,
price: { prompt: 0.0005 / 1000, completion: 0.0015 / 1000 },
},
"gpt-3.5-turbo-1106": {
maxToken: 16385,
price: { prompt: 0.001 / 1000, completion: 0.002 / 1000 },
@@ -69,4 +73,6 @@ const models: Record<string, Model> = {
},
};
export const defaultModel = "gpt-3.5-turbo-0125";
export default models;

View File

@@ -47,45 +47,54 @@ const SelectModel = (props: {
setChatStore: (cs: ChatStore) => void;
help: string;
}) => {
let shouldIUseCustomModel: boolean = true
let shouldIUseCustomModel: boolean = true;
for (const model in models) {
if (props.chatStore.model === model) {
shouldIUseCustomModel = false
shouldIUseCustomModel = false;
}
}
const [useCustomModel, setUseCustomModel] = useState(shouldIUseCustomModel);
return (
<Help help={props.help}>
<label className="m-2 p-2">Model</label>
<span onClick={() => {
setUseCustomModel(!useCustomModel);
}} className="m-2 p-2">
<span
onClick={() => {
setUseCustomModel(!useCustomModel);
}}
className="m-2 p-2"
>
<label>{Tr("Custom")}</label>
<input className="" type="checkbox" checked={useCustomModel} />
</span>
{
useCustomModel ?
<input
className="m-2 p-2 border rounded focus w-32 md:w-fit"
value={props.chatStore.model} onChange={(event: any) => {
const model = event.target.value as string;
props.chatStore.model = model;
props.setChatStore({ ...props.chatStore });
}} /> : <select
className="m-2 p-2"
value={props.chatStore.model}
onChange={(event: any) => {
const model = event.target.value as string;
props.chatStore.model = model;
props.chatStore.maxTokens = getDefaultParams('max', models[model].maxToken);
props.setChatStore({ ...props.chatStore });
}}
>
{Object.keys(models).map((opt) => (
<option value={opt}>{opt}</option>
))}
</select>
}
{useCustomModel ? (
<input
className="m-2 p-2 border rounded focus w-32 md:w-fit"
value={props.chatStore.model}
onChange={(event: any) => {
const model = event.target.value as string;
props.chatStore.model = model;
props.setChatStore({ ...props.chatStore });
}}
/>
) : (
<select
className="m-2 p-2"
value={props.chatStore.model}
onChange={(event: any) => {
const model = event.target.value as string;
props.chatStore.model = model;
props.chatStore.maxTokens = getDefaultParams(
"max",
models[model].maxToken
);
props.setChatStore({ ...props.chatStore });
}}
>
{Object.keys(models).map((opt) => (
<option value={opt}>{opt}</option>
))}
</select>
)}
</Help>
);
};
@@ -118,14 +127,14 @@ const Input = (props: {
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
field:
| "apiKey"
| "apiEndpoint"
| "whisper_api"
| "whisper_key"
| "tts_api"
| "tts_key"
| "image_gen_api"
| "image_gen_key";
| "apiKey"
| "apiEndpoint"
| "whisper_api"
| "whisper_key"
| "tts_api"
| "tts_key"
| "image_gen_api"
| "image_gen_key";
help: string;
}) => {
const [hideInput, setHideInput] = useState(true);
@@ -225,13 +234,13 @@ const Number = (props: {
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
field:
| "totalTokens"
| "maxTokens"
| "maxGenTokens"
| "tokenMargin"
| "postBeginIndex"
| "presence_penalty"
| "frequency_penalty";
| "totalTokens"
| "maxTokens"
| "maxGenTokens"
| "tokenMargin"
| "postBeginIndex"
| "presence_penalty"
| "frequency_penalty";
readOnly: boolean;
help: string;
}) => {
@@ -275,7 +284,7 @@ const Number = (props: {
const Choice = (props: {
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
field: "streamMode" | "develop_mode" | "json_mode";
field: "streamMode" | "develop_mode" | "json_mode" | "logprobs";
help: string;
}) => {
return (
@@ -319,7 +328,8 @@ export default (props: {
location.pathname +
`?key=${encodeURIComponent(
props.chatStore.apiKey
)}&api=${encodeURIComponent(props.chatStore.apiEndpoint)}&mode=${props.chatStore.streamMode ? "stream" : "fetch"
)}&api=${encodeURIComponent(props.chatStore.apiEndpoint)}&mode=${
props.chatStore.streamMode ? "stream" : "fetch"
}&model=${props.chatStore.model}&sys=${encodeURIComponent(
props.chatStore.systemMessageContent
)}`;
@@ -467,6 +477,7 @@ export default (props: {
help="流模式,使用 stream mode 将可以动态看到生成内容,但无法准确计算 token 数量,在 token 数量过多时可能会裁切过多或过少历史消息"
{...props}
/>
<Choice field="logprobs" help="返回每个Token的概率" {...props} />
<Choice
field="develop_mode"
help="开发者模式,开启后会显示更多选项及功能"