diff --git a/controller/relay-ali.go b/controller/relay-ali.go index 7968bfb6..df1cc084 100644 --- a/controller/relay-ali.go +++ b/controller/relay-ali.go @@ -310,6 +310,7 @@ func aliHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode }, nil } fullTextResponse := responseAli2OpenAI(&aliResponse) + fullTextResponse.Model = "qwen" jsonResponse, err := json.Marshal(fullTextResponse) if err != nil { return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil diff --git a/controller/relay-baidu.go b/controller/relay-baidu.go index c75ec09a..dca30da1 100644 --- a/controller/relay-baidu.go +++ b/controller/relay-baidu.go @@ -255,6 +255,7 @@ func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCo }, nil } fullTextResponse := responseBaidu2OpenAI(&baiduResponse) + fullTextResponse.Model = "ernie-bot" jsonResponse, err := json.Marshal(fullTextResponse) if err != nil { return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil diff --git a/controller/relay-claude.go b/controller/relay-claude.go index 1b72b47d..ca7a701a 100644 --- a/controller/relay-claude.go +++ b/controller/relay-claude.go @@ -204,6 +204,7 @@ func claudeHandler(c *gin.Context, resp *http.Response, promptTokens int, model }, nil } fullTextResponse := responseClaude2OpenAI(&claudeResponse) + fullTextResponse.Model = model completionTokens := countTokenText(claudeResponse.Completion, model) usage := Usage{ PromptTokens: promptTokens, diff --git a/controller/relay-gemini.go b/controller/relay-gemini.go index 2458458e..523018de 100644 --- a/controller/relay-gemini.go +++ b/controller/relay-gemini.go @@ -287,6 +287,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo }, nil } fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse) + fullTextResponse.Model = model completionTokens := countTokenText(geminiResponse.GetResponseText(), model) usage := Usage{ PromptTokens: promptTokens, diff --git a/controller/relay-palm.go b/controller/relay-palm.go index 2bd0bcd8..0c1c8af6 100644 --- a/controller/relay-palm.go +++ b/controller/relay-palm.go @@ -187,6 +187,7 @@ func palmHandler(c *gin.Context, resp *http.Response, promptTokens int, model st }, nil } fullTextResponse := responsePaLM2OpenAI(&palmResponse) + fullTextResponse.Model = model completionTokens := countTokenText(palmResponse.Candidates[0].Content, model) usage := Usage{ PromptTokens: promptTokens, diff --git a/controller/relay-tencent.go b/controller/relay-tencent.go index f66bf38f..5930ae89 100644 --- a/controller/relay-tencent.go +++ b/controller/relay-tencent.go @@ -237,6 +237,7 @@ func tencentHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatus }, nil } fullTextResponse := responseTencent2OpenAI(&TencentResponse) + fullTextResponse.Model = "hunyuan" jsonResponse, err := json.Marshal(fullTextResponse) if err != nil { return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil diff --git a/controller/relay-zhipu.go b/controller/relay-zhipu.go index 2e345ab5..cb5a78cf 100644 --- a/controller/relay-zhipu.go +++ b/controller/relay-zhipu.go @@ -290,6 +290,7 @@ func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCo }, nil } fullTextResponse := responseZhipu2OpenAI(&zhipuResponse) + fullTextResponse.Model = "chatglm" jsonResponse, err := json.Marshal(fullTextResponse) if err != nil { return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil diff --git a/controller/relay.go b/controller/relay.go index 15021997..b7906d08 100644 --- a/controller/relay.go +++ b/controller/relay.go @@ -206,6 +206,7 @@ type OpenAITextResponseChoice struct { type OpenAITextResponse struct { Id string `json:"id"` + Model string `json:"model,omitempty"` Object string `json:"object"` Created int64 `json:"created"` Choices []OpenAITextResponseChoice `json:"choices"`