Compare commits
6 Commits
043fb3db85
...
1a1226568f
| Author | SHA1 | Date | |
|---|---|---|---|
|
1a1226568f
|
|||
|
1e770db740
|
|||
|
de1f9c1e94
|
|||
|
f5dc8147e6
|
|||
|
c6cf75d2f6
|
|||
|
8342e47e4b
|
93
README.md
93
README.md
@@ -1,4 +1,13 @@
|
||||
# API 文档
|
||||
# openai-api-route 文档
|
||||
|
||||
这是一个 OpenAI API 负载均衡的简易工具,使用 golang 原生 reverse proxy 方法转发请求到 OpenAI 上游
|
||||
|
||||
功能包括:
|
||||
|
||||
- 更改 Authorization 验证头
|
||||
- 多种负载均衡策略
|
||||
- 记录完整的请求内容、IP 地址、响应时间以及 GPT 回复文本
|
||||
- 上游返回错误时发送 飞书 或 Matrix 消息通知
|
||||
|
||||
本文档详细介绍了如何使用负载均衡和能力 API 的方法和端点。
|
||||
|
||||
@@ -22,7 +31,7 @@
|
||||
|
||||
这将会编译代码并生成可执行文件。
|
||||
|
||||
5. 编译成功后,您可以直接运行以下命令来启动负载均衡和能力 API:
|
||||
5. 编译成功后,您可以直接运行以下命令来启动负载均衡 API:
|
||||
|
||||
```
|
||||
./openai-api-route
|
||||
@@ -85,6 +94,7 @@ Usage of ./openai-api-route:
|
||||
您也可以使用 `/admin/upstreams` 的 HTTP 接口进行控制。
|
||||
|
||||
另外,您还可以直接编辑数据库中的 `openai_upstreams` 表。
|
||||
|
||||
## 身份验证
|
||||
|
||||
### 身份验证中间件流程
|
||||
@@ -99,81 +109,4 @@ Usage of ./openai-api-route:
|
||||
|
||||
## 上游管理
|
||||
|
||||
### 获取所有上游
|
||||
|
||||
- URL: `/admin/upstreams`
|
||||
- 方法: GET
|
||||
- 权限要求: 需要进行身份验证
|
||||
- 返回数据类型: JSON
|
||||
- 请求示例:
|
||||
```bash
|
||||
curl -X GET -H "Authorization: Bearer access_token" http://localhost:8080/admin/upstreams
|
||||
```
|
||||
- 返回示例:
|
||||
```json
|
||||
[
|
||||
{
|
||||
"ID": 1,
|
||||
"SK": "sk_value",
|
||||
"Endpoint": "endpoint_value"
|
||||
},
|
||||
{
|
||||
"ID": 2,
|
||||
"SK": "sk_value",
|
||||
"Endpoint": "endpoint_value"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### 创建新的上游
|
||||
|
||||
- URL: `/admin/upstreams`
|
||||
- 方法: POST
|
||||
- 权限要求: 需要进行身份验证
|
||||
- 请求数据类型: JSON
|
||||
- 请求示例:
|
||||
```bash
|
||||
curl -X POST -H "Authorization: Bearer access_token" -H "Content-Type: application/json" -d '{"SK": "sk_value", "Endpoint": "endpoint_value"}' http://localhost:8080/admin/upstreams
|
||||
```
|
||||
- 返回数据类型: JSON
|
||||
- 返回示例:
|
||||
```json
|
||||
{
|
||||
"message": "success"
|
||||
}
|
||||
```
|
||||
|
||||
### 删除指定 ID 的上游
|
||||
|
||||
- URL: `/admin/upstreams/:id`
|
||||
- 方法: DELETE
|
||||
- 权限要求: 需要进行身份验证
|
||||
- 返回数据类型: JSON
|
||||
- 请求示例:
|
||||
```bash
|
||||
curl -X DELETE -H "Authorization: Bearer access_token" http://localhost:8080/admin/upstreams/1
|
||||
```
|
||||
- 返回示例:
|
||||
```json
|
||||
{
|
||||
"message": "success"
|
||||
}
|
||||
```
|
||||
|
||||
### 更新指定 ID 的上游
|
||||
|
||||
- URL: `/admin/upstreams/:id`
|
||||
- 方法: PUT
|
||||
- 权限要求: 需要进行身份验证
|
||||
- 请求数据类型: JSON
|
||||
- 请求示例:
|
||||
```bash
|
||||
curl -X PUT -H "Authorization: Bearer access_token" -H "Content-Type: application/json" -d '{"SK": "sk_value", "Endpoint": "endpoint_value"}' http://localhost:8080/admin/upstreams/1
|
||||
```
|
||||
- 返回数据类型: JSON
|
||||
- 返回示例:
|
||||
```json
|
||||
{
|
||||
"message": "success"
|
||||
}
|
||||
```
|
||||
没什么好说的,直接操作数据库 `openai_upstreams` 表,改动立即生效
|
||||
|
||||
2
auth.go
2
auth.go
@@ -21,7 +21,7 @@ func handleAuth(c *gin.Context) error {
|
||||
authorization = strings.Trim(authorization[len("Bearer"):], " ")
|
||||
log.Println("Received authorization", authorization)
|
||||
|
||||
if authConfig.Value != "asis" && authorization != authConfig.Value {
|
||||
if authorization != authConfig.Value {
|
||||
err = errors.New("wrong authorization header")
|
||||
c.AbortWithError(403, err)
|
||||
return err
|
||||
|
||||
43
main.go
43
main.go
@@ -67,9 +67,9 @@ func main() {
|
||||
if *listMode {
|
||||
result := make([]OPENAI_UPSTREAM, 0)
|
||||
db.Find(&result)
|
||||
fmt.Println("SK\tEndpoint\tSuccess\tFailed\tLast Success Time")
|
||||
fmt.Println("SK\tEndpoint")
|
||||
for _, upstream := range result {
|
||||
fmt.Println(upstream.SK, upstream.Endpoint, upstream.SuccessCount, upstream.FailedCount, upstream.LastCallSuccessTime)
|
||||
fmt.Println(upstream.SK, upstream.Endpoint)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -103,8 +103,9 @@ func main() {
|
||||
|
||||
engine.POST("/v1/*any", func(c *gin.Context) {
|
||||
record := Record{
|
||||
IP: c.ClientIP(),
|
||||
CreatedAt: time.Now(),
|
||||
IP: c.ClientIP(),
|
||||
CreatedAt: time.Now(),
|
||||
Authorization: c.Request.Header.Get("Authorization"),
|
||||
}
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
@@ -209,7 +210,8 @@ func main() {
|
||||
record.Response = "failed to read response from upstream " + err.Error()
|
||||
return errors.New(record.Response)
|
||||
}
|
||||
record.Response = fmt.Sprintf("upstream return '%s' with '%s'", r.Status, string(body))
|
||||
record.Response = fmt.Sprintf("openai-api-route upstream return '%s' with '%s'", r.Status, string(body))
|
||||
record.Status = r.StatusCode
|
||||
return fmt.Errorf(record.Response)
|
||||
}
|
||||
// count success
|
||||
@@ -220,29 +222,23 @@ func main() {
|
||||
proxy.ErrorHandler = func(w http.ResponseWriter, r *http.Request, err error) {
|
||||
log.Println("Error", err, upstream.SK, upstream.Endpoint)
|
||||
|
||||
log.Println("debug", r)
|
||||
|
||||
// abort to error handle
|
||||
c.AbortWithError(502, err)
|
||||
|
||||
// send notification
|
||||
upstreams := []OPENAI_UPSTREAM{}
|
||||
db.Find(&upstreams)
|
||||
upstreamDescriptions := make([]string, 0)
|
||||
for _, upstream := range upstreams {
|
||||
upstreamDescriptions = append(upstreamDescriptions, fmt.Sprintf("ID: %d, %s: %s 成功次数: %d, 失败次数: %d, 最后成功调用: %s",
|
||||
upstream.ID, upstream.SK, upstream.Endpoint, upstream.SuccessCount, upstream.FailedCount, upstream.LastCallSuccessTime,
|
||||
))
|
||||
log.Println("response is", r.Response)
|
||||
|
||||
if record.Status == 0 {
|
||||
record.Status = 502
|
||||
}
|
||||
content := fmt.Sprintf("[%s] OpenAI 转发出错 ID: %d... 密钥: [%s] 上游: [%s] 错误: %s\n---\n%s",
|
||||
c.ClientIP(),
|
||||
upstream.ID, upstream.SK, upstream.Endpoint, err.Error(),
|
||||
strings.Join(upstreamDescriptions, "\n"),
|
||||
)
|
||||
go sendMatrixMessage(content)
|
||||
if err.Error() != "context canceled" && r.Response.StatusCode != 400 {
|
||||
go sendFeishuMessage(content)
|
||||
if record.Response == "" {
|
||||
record.Response = err.Error()
|
||||
}
|
||||
if r.Response != nil {
|
||||
record.Status = r.Response.StatusCode
|
||||
}
|
||||
|
||||
log.Println("response is", r.Response)
|
||||
}
|
||||
|
||||
func() {
|
||||
@@ -300,7 +296,6 @@ func main() {
|
||||
record.Response = fetchResp.Choices[0].Message.Content
|
||||
} else {
|
||||
log.Println("Unknown content type", contentType)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@@ -308,7 +303,7 @@ func main() {
|
||||
record.Body = ""
|
||||
}
|
||||
|
||||
log.Println("Record result:", record.Response)
|
||||
log.Println("Record result:", record.Status, record.Response)
|
||||
record.ElapsedTime = time.Now().Sub(record.CreatedAt)
|
||||
if db.Create(&record).Error != nil {
|
||||
log.Println("Error to save record:", record)
|
||||
|
||||
17
record.go
17
record.go
@@ -11,14 +11,15 @@ import (
|
||||
)
|
||||
|
||||
type Record struct {
|
||||
ID int64 `gorm:"primaryKey,autoIncrement"`
|
||||
CreatedAt time.Time
|
||||
IP string
|
||||
Body string `gorm:"serializer:json"`
|
||||
Response string
|
||||
ElapsedTime time.Duration
|
||||
Status int
|
||||
UpstreamID uint
|
||||
ID int64 `gorm:"primaryKey,autoIncrement"`
|
||||
CreatedAt time.Time
|
||||
IP string
|
||||
Body string `gorm:"serializer:json"`
|
||||
Response string
|
||||
ElapsedTime time.Duration
|
||||
Status int
|
||||
UpstreamID uint
|
||||
Authorization string
|
||||
}
|
||||
|
||||
type StreamModeChunk struct {
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// one openai upstream contain a pair of key and endpoint
|
||||
type OPENAI_UPSTREAM struct {
|
||||
gorm.Model
|
||||
SK string `gorm:"index:idx_sk_endpoint,unique"` // key
|
||||
Endpoint string `gorm:"index:idx_sk_endpoint,unique"` // endpoint
|
||||
SuccessCount int64
|
||||
FailedCount int64
|
||||
LastCallSuccessTime time.Time
|
||||
SK string `gorm:"index:idx_sk_endpoint,unique"` // key
|
||||
Endpoint string `gorm:"index:idx_sk_endpoint,unique"` // endpoint
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user