fix: replicate response format

This commit is contained in:
2024-01-23 15:43:48 +08:00
parent b1a9d6b685
commit 33f341026f
2 changed files with 13 additions and 8 deletions

View File

@@ -19,6 +19,7 @@ var replicate_model_url_template = "https://api.replicate.com/v1/models/%s/predi
func processReplicateRequest(c *gin.Context, upstream *OPENAI_UPSTREAM, record *Record, shouldResponse bool) error { func processReplicateRequest(c *gin.Context, upstream *OPENAI_UPSTREAM, record *Record, shouldResponse bool) error {
err := _processReplicateRequest(c, upstream, record, shouldResponse) err := _processReplicateRequest(c, upstream, record, shouldResponse)
if shouldResponse { if shouldResponse {
sendCORSHeaders(c)
if err != nil { if err != nil {
c.AbortWithError(502, err) c.AbortWithError(502, err)
} }
@@ -212,14 +213,16 @@ func _processReplicateRequest(c *gin.Context, upstream *OPENAI_UPSTREAM, record
break break
} }
sendCORSHeaders(c)
// create OpenAI response chunk // create OpenAI response chunk
c.SSEvent("", &OpenAIChatResponse{ c.SSEvent("", &OpenAIChatResponseChunk{
ID: chunkObj.Event, ID: "",
Model: outResponse.Model, Model: outResponse.Model,
Choices: []OpenAIChatResponseChoice{ Choices: []OpenAIChatResponseChunkChoice{
{ {
Index: indexCount, Index: indexCount,
Message: OpenAIChatMessage{ Delta: OpenAIChatMessage{
Role: "assistant", Role: "assistant",
Content: chunkObj.Data, Content: chunkObj.Data,
}, },
@@ -229,13 +232,14 @@ func _processReplicateRequest(c *gin.Context, upstream *OPENAI_UPSTREAM, record
c.Writer.Flush() c.Writer.Flush()
indexCount += 1 indexCount += 1
} }
c.SSEvent("", &OpenAIChatResponse{ sendCORSHeaders(c)
c.SSEvent("", &OpenAIChatResponseChunk{
ID: "", ID: "",
Model: outResponse.Model, Model: outResponse.Model,
Choices: []OpenAIChatResponseChoice{ Choices: []OpenAIChatResponseChunkChoice{
{ {
Index: indexCount, Index: indexCount,
Message: OpenAIChatMessage{ Delta: OpenAIChatMessage{
Role: "assistant", Role: "assistant",
Content: "", Content: "",
}, },
@@ -315,6 +319,7 @@ func _processReplicateRequest(c *gin.Context, upstream *OPENAI_UPSTREAM, record
record.Status = 200 record.Status = 200
// gin return // gin return
sendCORSHeaders(c)
c.JSON(200, openAIResult) c.JSON(200, openAIResult)
} }

View File

@@ -176,5 +176,5 @@ type OpenAIChatResponseChunk struct {
type OpenAIChatResponseChunkChoice struct { type OpenAIChatResponseChunkChoice struct {
Index int64 `json:"index"` Index int64 `json:"index"`
Delta OpenAIChatMessage `json:"delta"` Delta OpenAIChatMessage `json:"delta"`
FinishReason *string `json:"finish_reason"` FinishReason string `json:"finish_reason"`
} }