chore: set model name response for models

This commit is contained in:
JustSong 2023-12-24 16:57:45 +08:00
parent 61c6d2002b
commit 1d7080bfa4
5 changed files with 5 additions and 0 deletions

View File

@ -303,6 +303,7 @@ func aliHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode
}, nil }, nil
} }
fullTextResponse := responseAli2OpenAI(&aliResponse) fullTextResponse := responseAli2OpenAI(&aliResponse)
fullTextResponse.Model = "qwen"
jsonResponse, err := json.Marshal(fullTextResponse) jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil { if err != nil {
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil

View File

@ -255,6 +255,7 @@ func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCo
}, nil }, nil
} }
fullTextResponse := responseBaidu2OpenAI(&baiduResponse) fullTextResponse := responseBaidu2OpenAI(&baiduResponse)
fullTextResponse.Model = "ernie-bot"
jsonResponse, err := json.Marshal(fullTextResponse) jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil { if err != nil {
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil

View File

@ -187,6 +187,7 @@ func palmHandler(c *gin.Context, resp *http.Response, promptTokens int, model st
}, nil }, nil
} }
fullTextResponse := responsePaLM2OpenAI(&palmResponse) fullTextResponse := responsePaLM2OpenAI(&palmResponse)
fullTextResponse.Model = model
completionTokens := countTokenText(palmResponse.Candidates[0].Content, model) completionTokens := countTokenText(palmResponse.Candidates[0].Content, model)
usage := Usage{ usage := Usage{
PromptTokens: promptTokens, PromptTokens: promptTokens,

View File

@ -237,6 +237,7 @@ func tencentHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatus
}, nil }, nil
} }
fullTextResponse := responseTencent2OpenAI(&TencentResponse) fullTextResponse := responseTencent2OpenAI(&TencentResponse)
fullTextResponse.Model = "hunyuan"
jsonResponse, err := json.Marshal(fullTextResponse) jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil { if err != nil {
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil

View File

@ -290,6 +290,7 @@ func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCo
}, nil }, nil
} }
fullTextResponse := responseZhipu2OpenAI(&zhipuResponse) fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
fullTextResponse.Model = "chatglm"
jsonResponse, err := json.Marshal(fullTextResponse) jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil { if err != nil {
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil