add support for tool call (function call)

This commit is contained in:
2023-11-09 17:04:27 +08:00
parent 4b7d601840
commit 81660d563f
5 changed files with 73 additions and 14 deletions

View File

@@ -21,9 +21,11 @@ export interface TemplateAPI {
key: string;
endpoint: string;
}
export interface ChatStore {
chatgpt_api_web_version: string;
systemMessageContent: string;
toolsString: string;
history: ChatStoreMessage[];
postBeginIndex: number;
tokenMargin: number;
@@ -67,11 +69,13 @@ export const newChatStore = (
tts_api = "",
tts_key = "",
tts_speed = 1.0,
tts_speed_enabled = false
tts_speed_enabled = false,
toolsString = ""
): ChatStore => {
return {
chatgpt_api_web_version: CHATGPT_API_WEB_VERSION,
systemMessageContent: getDefaultParams("sys", systemMessageContent),
toolsString,
history: [],
postBeginIndex: 0,
tokenMargin: 1024,
@@ -173,6 +177,7 @@ export function App() {
if (ret.maxGenTokens_enabled === undefined) ret.maxGenTokens_enabled = true;
if (ret.model === undefined) ret.model = "gpt-3.5-turbo";
if (ret.responseModelName === undefined) ret.responseModelName = "";
if (ret.toolsString === undefined) ret.toolsString = "";
if (ret.chatgpt_api_web_version === undefined)
// this is from old version becasue it is undefined,
// so no higher than v1.3.0

View File

@@ -127,7 +127,7 @@ export default function ChatBOX(props: {
chatStore.cost += cost;
addTotalCost(cost);
}
const content = client.processFetchResponse(data);
const msg = client.processFetchResponse(data);
// estimate user's input message token
let aboveToken = 0;
@@ -147,9 +147,11 @@ export default function ChatBOX(props: {
chatStore.history.push({
role: "assistant",
content,
content: msg.content,
tool_calls: msg.tool_calls,
hide: false,
token: data.usage.completion_tokens ?? calculate_token_length(content),
token:
data.usage.completion_tokens ?? calculate_token_length(msg.content),
example: false,
});
setShowGenerating(false);
@@ -160,6 +162,7 @@ export default function ChatBOX(props: {
// manually copy status from chatStore to client
client.apiEndpoint = chatStore.apiEndpoint;
client.sysMessageContent = chatStore.systemMessageContent;
client.toolsString = chatStore.toolsString;
client.tokens_margin = chatStore.tokenMargin;
client.temperature = chatStore.temperature;
client.enable_temperature = chatStore.temperature_enabled;

View File

@@ -12,6 +12,11 @@ export interface Message {
role: "system" | "user" | "assistant" | "function";
content: string | MessageDetail[];
name?: "example_user" | "example_assistant";
tool_calls?: {
id: string;
type: string;
function: any;
}[];
}
export const getMessageText = (message: Message): string => {
if (typeof message.content === "string") {
@@ -78,6 +83,7 @@ class Chat {
OPENAI_API_KEY: string;
messages: Message[];
sysMessageContent: string;
toolsString: string;
total_tokens: number;
max_tokens: number;
max_gen_tokens: number;
@@ -96,6 +102,7 @@ class Chat {
OPENAI_API_KEY: string | undefined,
{
systemMessage = "",
toolsString = "",
max_tokens = 4096,
max_gen_tokens = 2048,
enable_max_gen_tokens = true,
@@ -121,6 +128,7 @@ class Chat {
this.enable_max_gen_tokens = enable_max_gen_tokens;
this.tokens_margin = tokens_margin;
this.sysMessageContent = systemMessage;
this.toolsString = toolsString;
this.apiEndpoint = apiEndPoint;
this.model = model;
this.temperature = temperature;
@@ -178,6 +186,25 @@ class Chat {
body["max_tokens"] = this.max_gen_tokens;
}
// parse toolsString to function call format
const ts = this.toolsString.trim();
if (ts) {
try {
const fcList: any[] = JSON.parse(ts);
body["tools"] = fcList.map((fc) => {
return {
type: "function",
function: fc,
};
});
} catch (e) {
console.log("toolsString parse error");
throw (
"Function call toolsString parse error, not a valied json list: " + e
);
}
}
return fetch(this.apiEndpoint, {
method: "POST",
headers: {
@@ -234,7 +261,7 @@ class Chat {
}
}
processFetchResponse(resp: FetchResponse): string {
processFetchResponse(resp: FetchResponse): Message {
if (resp.error !== undefined) {
throw JSON.stringify(resp.error);
}
@@ -249,15 +276,19 @@ class Chat {
this.forgetSomeMessages();
}
return (
(resp?.choices[0]?.message?.content as string) ??
`Error: ${JSON.stringify(resp)}`
);
}
let content = "";
if (
!resp.choices[0]?.message?.content &&
!resp.choices[0]?.message?.tool_calls
) {
content = `Unparsed response: ${JSON.stringify(resp)}`;
}
async complete(): Promise<string> {
const resp = await this.fetch();
return this.processFetchResponse(resp);
return {
role: "assistant",
content,
tool_calls: resp?.choices[0]?.message?.tool_calls,
};
}
completeWithSteam() {

View File

@@ -288,6 +288,21 @@ export default function Message(props: Props) {
: "bg-green-400"
} ${chat.hide ? "opacity-50" : ""}`}
>
{chat.tool_calls && chat.hide ? (
<div className="message-content">Tool Call</div>
) : (
<div className="message-content">
<div>
{chat.tool_calls?.map((tool_call) => (
<div className="bg-blue-300 dark:bg-blue-800 p-1 rounded">
<strong>Tool Call ID: {tool_call?.id}</strong>
<p>Type: {tool_call?.type}</p>
<p>Function: {JSON.stringify(tool_call?.function)}</p>
</div>
))}
</div>
</div>
)}
<p className={renderMarkdown ? "" : "message-content"}>
{typeof chat.content !== "string" ? (
// render for multiple messages

View File

@@ -60,7 +60,7 @@ const SelectModel = (props: {
const LongInput = (props: {
chatStore: ChatStore;
setChatStore: (cs: ChatStore) => void;
field: "systemMessageContent";
field: "systemMessageContent" | "toolsString";
help: string;
}) => {
return (
@@ -373,6 +373,11 @@ export default (props: {
help="系统消息用于指示ChatGPT的角色和一些前置条件例如“你是一个有帮助的人工智能助理”或者“你是一个专业英语翻译把我的话全部翻译成英语”详情参考 OPEAN AI API 文档"
{...props}
/>
<LongInput
field="toolsString"
help="function call tools, should be valied json format in list"
{...props}
/>
<Input
field="apiKey"
help="OPEN AI API 密钥,请勿泄漏此密钥"