From 61c6d2002b0dd1961faf6b38e86f1201f6df368c Mon Sep 17 00:00:00 2001 From: "Laisky.Cai" Date: Tue, 19 Dec 2023 02:21:12 +0000 Subject: [PATCH] fix: openai response should contains `model` - Update model attributes in `claudeHandler` for `relay-claude.go` - Implement model type for fullTextResponse in `relay-gemini.go` - Add new `Model` field to `OpenAITextResponse` struct in `relay.go` --- controller/relay-claude.go | 1 + controller/relay-gemini.go | 1 + controller/relay.go | 1 + 3 files changed, 3 insertions(+) diff --git a/controller/relay-claude.go b/controller/relay-claude.go index 1b72b47d..ca7a701a 100644 --- a/controller/relay-claude.go +++ b/controller/relay-claude.go @@ -204,6 +204,7 @@ func claudeHandler(c *gin.Context, resp *http.Response, promptTokens int, model }, nil } fullTextResponse := responseClaude2OpenAI(&claudeResponse) + fullTextResponse.Model = model completionTokens := countTokenText(claudeResponse.Completion, model) usage := Usage{ PromptTokens: promptTokens, diff --git a/controller/relay-gemini.go b/controller/relay-gemini.go index 2458458e..523018de 100644 --- a/controller/relay-gemini.go +++ b/controller/relay-gemini.go @@ -287,6 +287,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo }, nil } fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse) + fullTextResponse.Model = model completionTokens := countTokenText(geminiResponse.GetResponseText(), model) usage := Usage{ PromptTokens: promptTokens, diff --git a/controller/relay.go b/controller/relay.go index 15021997..b7906d08 100644 --- a/controller/relay.go +++ b/controller/relay.go @@ -206,6 +206,7 @@ type OpenAITextResponseChoice struct { type OpenAITextResponse struct { Id string `json:"id"` + Model string `json:"model,omitempty"` Object string `json:"object"` Created int64 `json:"created"` Choices []OpenAITextResponseChoice `json:"choices"`