Merge remote-tracking branch 'shanghai/master'

This commit is contained in:
2023-10-24 00:54:01 +08:00
6 changed files with 236 additions and 266 deletions

180
README.md
View File

@@ -1,6 +1,99 @@
# API文档
# openai-api-route 文档
本文档提供了使用该负载君恩和能够API的方法和端点的详细说明。
这是一个 OpenAI API 负载均衡的简易工具,使用 golang 原生 reverse proxy 方法转发请求到 OpenAI 上游
功能包括:
- 更改 Authorization 验证头
- 多种负载均衡策略
- 记录完整的请求内容、IP 地址、响应时间以及 GPT 回复文本
- 上游返回错误时发送 飞书 或 Matrix 消息通知
本文档详细介绍了如何使用负载均衡和能力 API 的方法和端点。
## 部署方法
### 编译
以下是编译和运行该负载均衡 API 的步骤:
1. 首先,确保您已经安装了 golang 和 gcc。
2. 克隆本仓库到您的本地机器上。
3. 打开终端,并进入到仓库目录中。
4. 在终端中执行以下命令来编译代码:
```
make
```
这将会编译代码并生成可执行文件。
5. 编译成功后,您可以直接运行以下命令来启动负载均衡 API
```
./openai-api-route
```
默认情况下API 将会在本地的 8888 端口进行监听。
如果您希望使用不同的监听地址,可以使用 `-addr` 参数来指定,例如:
```
./openai-api-route -addr 0.0.0.0:8080
```
这将会将监听地址设置为 0.0.0.0:8080。
6. 如果数据库不存在,系统会自动创建一个名为 `db.sqlite` 的数据库文件。
如果您希望使用不同的数据库地址,可以使用 `-database` 参数来指定,例如:
```
./openai-api-route -database /path/to/database.db
```
这将会将数据库地址设置为 `/path/to/database.db`。
7. 现在,您已经成功编译并运行了负载均衡和能力 API。您可以根据需要添加上游、管理上游并使用 API 进行相关操作。
### 运行
以下是运行命令的用法:
```
Usage of ./openai-api-route:
-add
添加一个 OpenAI 上游
-addr string
监听地址(默认为 ":8888"
-database string
数据库地址(默认为 "./db.sqlite"
-endpoint string
OpenAI API 基地址(默认为 "https://api.openai.com/v1"
-list
列出所有上游
-noauth
不检查传入的授权头
-sk string
OpenAI API 密钥sk-xxxxx
```
您可以直接运行 `./openai-api-route` 命令,如果数据库不存在,系统会自动创建。
### 上游管理
您可以使用以下命令添加一个上游:
```bash
./openai-api-route -add -sk sk-xxxxx -endpoint https://api.openai.com/v1
```
您也可以使用 `/admin/upstreams` 的 HTTP 接口进行控制。
另外,您还可以直接编辑数据库中的 `openai_upstreams` 表。
## 身份验证
@@ -8,89 +101,12 @@
1. 从请求头中获取`Authorization`字段的值。
2. 检查`Authorization`字段的值是否以`"Bearer"`开头。
- 如果不是,则返回错误信息:"authorization header should start with 'Bearer'"HTTP状态码403
- 如果不是,则返回错误信息:"authorization header should start with 'Bearer'"HTTP 状态码 403
3. 去除`Authorization`字段值开头的`"Bearer"`和前后的空格。
4. 将剩余的值与预先设置的身份验证配置进行比较。
- 如果不匹配,则返回错误信息:"wrong authorization header"HTTP状态码403
- 如果不匹配,则返回错误信息:"wrong authorization header"HTTP 状态码 403
5. 如果身份验证通过,则返回`nil`。
## 上游管理
### 获取所有上游
- URL: `/admin/upstreams`
- 方法: GET
- 权限要求: 需要进行身份验证
- 返回数据类型: JSON
- 请求示例:
```bash
curl -X GET -H "Authorization: Bearer access_token" http://localhost:8080/admin/upstreams
```
- 返回示例:
```json
[
{
"ID": 1,
"SK": "sk_value",
"Endpoint": "endpoint_value"
},
{
"ID": 2,
"SK": "sk_value",
"Endpoint": "endpoint_value"
}
]
```
### 创建新的上游
- URL: `/admin/upstreams`
- 方法: POST
- 权限要求: 需要进行身份验证
- 请求数据类型: JSON
- 请求示例:
```bash
curl -X POST -H "Authorization: Bearer access_token" -H "Content-Type: application/json" -d '{"SK": "sk_value", "Endpoint": "endpoint_value"}' http://localhost:8080/admin/upstreams
```
- 返回数据类型: JSON
- 返回示例:
```json
{
"message": "success"
}
```
### 删除指定ID的上游
- URL: `/admin/upstreams/:id`
- 方法: DELETE
- 权限要求: 需要进行身份验证
- 返回数据类型: JSON
- 请求示例:
```bash
curl -X DELETE -H "Authorization: Bearer access_token" http://localhost:8080/admin/upstreams/1
```
- 返回示例:
```json
{
"message": "success"
}
```
### 更新指定ID的上游
- URL: `/admin/upstreams/:id`
- 方法: PUT
- 权限要求: 需要进行身份验证
- 请求数据类型: JSON
- 请求示例:
```bash
curl -X PUT -H "Authorization: Bearer access_token" -H "Content-Type: application/json" -d '{"SK": "sk_value", "Endpoint": "endpoint_value"}' http://localhost:8080/admin/upstreams/1
```
- 返回数据类型: JSON
- 返回示例:
```json
{
"message": "success"
}
```
没什么好说的,直接操作数据库 `openai_upstreams` 表,改动立即生效

View File

@@ -20,6 +20,7 @@ func handleAuth(c *gin.Context) error {
authorization = strings.Trim(authorization[len("Bearer"):], " ")
log.Println("Received authorization", authorization)
if authorization != authConfig.Value {
err = errors.New("wrong authorization header")
c.AbortWithError(403, err)

18
cors.go
View File

@@ -1,18 +0,0 @@
package main
import (
"github.com/gin-gonic/gin"
)
// Middleware function to handle CORS requests
func handleCORS(c *gin.Context) {
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
c.Writer.Header().Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE, PATCH")
c.Writer.Header().Set("Access-Control-Allow-Headers", "Origin, Authorization, Content-Type")
if c.Request.Method == "OPTIONS" {
c.AbortWithStatus(200)
return
}
}

263
main.go
View File

@@ -2,6 +2,7 @@ package main
import (
"bytes"
"encoding/json"
"errors"
"flag"
"fmt"
@@ -10,12 +11,10 @@ import (
"net/http"
"net/http/httputil"
"net/url"
"strconv"
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/penglongli/gin-metrics/ginmetrics"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
@@ -69,9 +68,9 @@ func main() {
if *listMode {
result := make([]OPENAI_UPSTREAM, 0)
db.Find(&result)
fmt.Println("SK\tEndpoint\tSuccess\tFailed\tLast Success Time")
fmt.Println("SK\tEndpoint")
for _, upstream := range result {
fmt.Println(upstream.SK, upstream.Endpoint, upstream.SuccessCount, upstream.FailedCount, upstream.LastCallSuccessTime)
fmt.Println(upstream.SK, upstream.Endpoint)
}
return
}
@@ -97,13 +96,30 @@ func main() {
})
// CORS handler
engine.Use(handleCORS)
engine.OPTIONS("/v1/*any", func(ctx *gin.Context) {
header := ctx.Writer.Header()
header.Set("Access-Control-Allow-Origin", "*")
header.Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE, PATCH")
header.Set("Access-Control-Allow-Headers", "Origin, Authorization, Content-Type")
ctx.AbortWithStatus(200)
})
// get authorization config from db
db.Take(&authConfig, "key = ?", "authorization")
engine.POST("/v1/*any", func(c *gin.Context) {
trackID := uuid.New()
record := Record{
IP: c.ClientIP(),
CreatedAt: time.Now(),
Authorization: c.Request.Header.Get("Authorization"),
}
defer func() {
if err := recover(); err != nil {
log.Println("Error:", err)
c.AbortWithError(500, fmt.Errorf("%s", err))
}
}()
// check authorization header
if !*noauth {
if handleAuth(c) != nil {
@@ -144,6 +160,8 @@ func main() {
return
}
record.UpstreamID = upstream.ID
// reverse proxy
remote, err := url.Parse(upstream.Endpoint)
if err != nil {
@@ -164,7 +182,7 @@ func main() {
}
// record chat message from user
go recordUserMessage(c, db, trackID, body)
record.Body = string(body)
out.Body = io.NopCloser(bytes.NewReader(body))
@@ -174,24 +192,35 @@ func main() {
out.URL.Path = in.URL.Path
out.Header = http.Header{}
out.Header.Set("Host", remote.Host)
out.Header.Set("Authorization", "Bearer "+upstream.SK)
if upstream.SK == "asis" {
out.Header.Set("Authorization", c.Request.Header.Get("Authorization"))
} else {
out.Header.Set("Authorization", "Bearer "+upstream.SK)
}
out.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
}
var buf bytes.Buffer
var contentType string
proxy.ModifyResponse = func(r *http.Response) error {
record.Status = r.StatusCode
r.Header.Del("Access-Control-Allow-Origin")
r.Header.Del("Access-Control-Allow-Methods")
r.Header.Del("Access-Control-Allow-Headers")
r.Header.Set("Access-Control-Allow-Origin", "*")
r.Header.Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE, PATCH")
r.Header.Set("Access-Control-Allow-Headers", "Origin, Authorization, Content-Type")
if r.StatusCode != 200 {
body, err := io.ReadAll(r.Body)
if err != nil {
return errors.New("failed to read response from upstream " + err.Error())
record.Response = "failed to read response from upstream " + err.Error()
return errors.New(record.Response)
}
return fmt.Errorf("upstream return '%s' with '%s'", r.Status, string(body))
record.Response = fmt.Sprintf("openai-api-route upstream return '%s' with '%s'", r.Status, string(body))
record.Status = r.StatusCode
return fmt.Errorf(record.Response)
}
// count success
go db.Model(&upstream).Updates(map[string]interface{}{
"success_count": gorm.Expr("success_count + ?", 1),
"last_call_success_time": time.Now(),
})
r.Body = io.NopCloser(io.TeeReader(r.Body, &buf))
contentType = r.Header.Get("content-type")
return nil
@@ -199,142 +228,98 @@ func main() {
proxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, err error) {
log.Println("Error", err, upstream.SK, upstream.Endpoint)
log.Println("debug", r)
// abort to error handle
c.AbortWithError(502, err)
// send notification
upstreams := []OPENAI_UPSTREAM{}
db.Find(&upstreams)
upstreamDescriptions := make([]string, 0)
for _, upstream := range upstreams {
upstreamDescriptions = append(upstreamDescriptions, fmt.Sprintf("ID: %d, %s: %s 成功次数: %d, 失败次数: %d, 最后成功调用: %s",
upstream.ID, upstream.SK, upstream.Endpoint, upstream.SuccessCount, upstream.FailedCount, upstream.LastCallSuccessTime,
))
log.Println("response is", r.Response)
if record.Status == 0 {
record.Status = 502
}
content := fmt.Sprintf("[%s] OpenAI 转发出错 ID: %d... 密钥: [%s] 上游: [%s] 错误: %s\n---\n%s",
c.ClientIP(),
upstream.ID, upstream.SK[:10], upstream.Endpoint, err.Error(),
strings.Join(upstreamDescriptions, "\n"),
)
go sendMatrixMessage(content)
if err.Error() != "context canceled" && r.Response.StatusCode != 400 {
// count failed
go db.Model(&upstream).Update("failed_count", gorm.Expr("failed_count + ?", 1))
go sendFeishuMessage(content)
if record.Response == "" {
record.Response = err.Error()
}
if r.Response != nil {
record.Status = r.Response.StatusCode
}
log.Println("response is", r.Response)
}
proxy.ServeHTTP(c.Writer, c.Request)
func() {
defer func() {
if err := recover(); err != nil {
log.Println("Panic recover :", err)
}
}()
proxy.ServeHTTP(c.Writer, c.Request)
}()
resp, err := io.ReadAll(io.NopCloser(&buf))
if err != nil {
log.Println("Failed to read from response tee buffer", err)
record.Response = "failed to read response from upstream " + err.Error()
log.Println(record.Response)
} else {
// record response
// stream mode
if strings.HasPrefix(contentType, "text/event-stream") {
for _, line := range strings.Split(string(resp), "\n") {
chunk := StreamModeChunk{}
line = strings.TrimPrefix(line, "data:")
line = strings.TrimSpace(line)
if line == "" {
continue
}
err := json.Unmarshal([]byte(line), &chunk)
if err != nil {
log.Println(err)
continue
}
if len(chunk.Choices) == 0 {
continue
}
record.Response += chunk.Choices[0].Delta.Content
}
} else if strings.HasPrefix(contentType, "application/json") {
var fetchResp FetchModeResponse
err := json.Unmarshal(resp, &fetchResp)
if err != nil {
log.Println("Error parsing fetch response:", err)
return
}
if !strings.HasPrefix(fetchResp.Model, "gpt-") {
log.Println("Not GPT model, skip recording response:", fetchResp.Model)
return
}
if len(fetchResp.Choices) == 0 {
log.Println("Error: fetch response choice length is 0")
return
}
record.Response = fetchResp.Choices[0].Message.Content
} else {
log.Println("Unknown content type", contentType)
}
}
if len(record.Body) > 1024*512 {
record.Body = ""
}
log.Println("Record result:", record.Status, record.Response)
record.ElapsedTime = time.Now().Sub(record.CreatedAt)
if db.Create(&record).Error != nil {
log.Println("Error to save record:", record)
}
if record.Status != 200 && record.Response != "context canceled" {
errMessage := fmt.Sprintf("IP: %s request %s error %d with %s", record.IP, upstream.Endpoint, record.Status, record.Response)
go sendFeishuMessage(errMessage)
go sendMatrixMessage(errMessage)
}
go recordAssistantResponse(contentType, db, trackID, resp)
})
// ---------------------------------
// admin APIs
engine.POST("/admin/login", func(c *gin.Context) {
// check authorization headers
if handleAuth(c) != nil {
return
}
c.JSON(200, gin.H{
"message": "success",
})
})
engine.GET("/admin/upstreams", func(c *gin.Context) {
// check authorization headers
if handleAuth(c) != nil {
return
}
upstreams := make([]OPENAI_UPSTREAM, 0)
db.Find(&upstreams)
c.JSON(200, upstreams)
})
engine.POST("/admin/upstreams", func(c *gin.Context) {
// check authorization headers
if handleAuth(c) != nil {
return
}
newUpstream := OPENAI_UPSTREAM{}
err := c.BindJSON(&newUpstream)
if err != nil {
c.AbortWithError(502, errors.New("can't parse OPENAI_UPSTREAM object"))
return
}
if newUpstream.SK == "" || newUpstream.Endpoint == "" {
c.AbortWithError(403, errors.New("can't create new OPENAI_UPSTREAM with empty sk or endpoint"))
return
}
log.Println("Saveing new OPENAI_UPSTREAM", newUpstream)
err = db.Create(&newUpstream).Error
if err != nil {
c.AbortWithError(403, err)
return
}
})
engine.DELETE("/admin/upstreams/:id", func(ctx *gin.Context) {
// check authorization headers
if handleAuth(ctx) != nil {
return
}
id, err := strconv.Atoi(ctx.Param("id"))
if err != nil {
ctx.AbortWithError(502, err)
return
}
upstream := OPENAI_UPSTREAM{}
upstream.ID = uint(id)
db.Delete(&upstream)
ctx.JSON(200, gin.H{
"message": "success",
})
})
engine.PUT("/admin/upstreams/:id", func(c *gin.Context) {
// check authorization headers
if handleAuth(c) != nil {
return
}
upstream := OPENAI_UPSTREAM{}
err := c.BindJSON(&upstream)
if err != nil {
c.AbortWithError(502, errors.New("can't parse OPENAI_UPSTREAM object"))
return
}
if upstream.SK == "" || upstream.Endpoint == "" {
c.AbortWithError(403, errors.New("can't create new OPENAI_UPSTREAM with empty sk or endpoint"))
return
}
id, err := strconv.Atoi(c.Param("id"))
if err != nil {
c.AbortWithError(502, err)
return
}
upstream.ID = uint(id)
log.Println("Saveing new OPENAI_UPSTREAM", upstream)
err = db.Create(&upstream).Error
if err != nil {
c.AbortWithError(403, err)
return
}
c.JSON(200, gin.H{
"message": "success",
})
})
engine.GET("/admin/request_records", func(c *gin.Context) {
// check authorization headers
if handleAuth(c) != nil {
return
}
requestRecords := []Record{}
err := db.Order("id desc").Limit(100).Find(&requestRecords).Error
if err != nil {
c.AbortWithError(502, err)
return
}
c.JSON(200, requestRecords)
})
engine.Run(*listenAddr)
}

View File

@@ -6,30 +6,20 @@ import (
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"gorm.io/gorm"
)
type Record struct {
ID uuid.UUID `gorm:"type:uuid"`
CreatedAt time.Time
IP string
Body string
Response string
}
func recordUserMessage(c *gin.Context, db *gorm.DB, trackID uuid.UUID, body []byte) {
bodyStr := string(body)
requestRecord := Record{
Body: bodyStr,
ID: trackID,
IP: c.ClientIP(),
}
err := db.Create(&requestRecord).Error
if err != nil {
log.Println("Error record request:", err)
}
ID int64 `gorm:"primaryKey,autoIncrement"`
CreatedAt time.Time
IP string
Body string `gorm:"serializer:json"`
Response string
ElapsedTime time.Duration
Status int
UpstreamID uint
Authorization string
}
type StreamModeChunk struct {
@@ -62,7 +52,7 @@ type FetchModeUsage struct {
TotalTokens int64 `json:"total_tokens"`
}
func recordAssistantResponse(contentType string, db *gorm.DB, trackID uuid.UUID, body []byte) {
func recordAssistantResponse(contentType string, db *gorm.DB, trackID uuid.UUID, body []byte, elapsedTime time.Duration) {
result := ""
// stream mode
if strings.HasPrefix(contentType, "text/event-stream") {
@@ -113,6 +103,7 @@ func recordAssistantResponse(contentType string, db *gorm.DB, trackID uuid.UUID,
return
}
record.Response = result
record.ElapsedTime = elapsedTime
if db.Save(&record).Error != nil {
log.Println("Error to save record:", record)
return

View File

@@ -1,17 +1,12 @@
package main
import (
"time"
"gorm.io/gorm"
)
// one openai upstream contain a pair of key and endpoint
type OPENAI_UPSTREAM struct {
gorm.Model
SK string `gorm:"index:idx_sk_endpoint,unique"` // key
Endpoint string `gorm:"index:idx_sk_endpoint,unique"` // endpoint
SuccessCount int64
FailedCount int64
LastCallSuccessTime time.Time
SK string `gorm:"index:idx_sk_endpoint,unique"` // key
Endpoint string `gorm:"index:idx_sk_endpoint,unique"` // endpoint
}