From d72ebbda0ba014a3d633ff418a99b10c859f6d9d Mon Sep 17 00:00:00 2001 From: hongsheng Date: Thu, 25 Jan 2024 04:54:14 +0800 Subject: [PATCH] =?UTF-8?q?=E6=99=BA=E8=B0=B1V4=20stream=E5=9B=9E=E5=A4=8D?= =?UTF-8?q?=E5=B8=A6=E4=B8=8A=E5=87=86=E7=A1=AE=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- relay/channel/zhipu_v4/main.go | 14 +++++++------- relay/controller/util.go | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/relay/channel/zhipu_v4/main.go b/relay/channel/zhipu_v4/main.go index 46e766b8..baa7f77b 100644 --- a/relay/channel/zhipu_v4/main.go +++ b/relay/channel/zhipu_v4/main.go @@ -99,7 +99,7 @@ func ConvertRequest(request openai.GeneralOpenAIRequest) *Request { } } -func StreamResponseZhipuV42OpenAI(zhipuResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse) { +func StreamResponseZhipuV42OpenAI(zhipuResponse *StreamResponse, reqModel string) *openai.ChatCompletionsStreamResponse { var choice openai.ChatCompletionsStreamResponseChoice choice.Delta.Content = zhipuResponse.Choices[0].Delta.Content choice.Delta.Role = zhipuResponse.Choices[0].Delta.Role @@ -110,18 +110,18 @@ func StreamResponseZhipuV42OpenAI(zhipuResponse *StreamResponse) (*openai.ChatCo Id: zhipuResponse.Id, Object: "chat.completion.chunk", Created: zhipuResponse.Created, - Model: "glm-4", + Model: reqModel, Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, } return &response } -func LastStreamResponseZhipuV42OpenAI(zhipuResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *openai.Usage) { - response := StreamResponseZhipuV42OpenAI(zhipuResponse) +func LastStreamResponseZhipuV42OpenAI(zhipuResponse *StreamResponse, reqModel string) (*openai.ChatCompletionsStreamResponse, *openai.Usage) { + response := StreamResponseZhipuV42OpenAI(zhipuResponse, reqModel) return response, &zhipuResponse.Usage } -func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage) { +func StreamHandler(c *gin.Context, resp *http.Response, reqModel string) (*openai.ErrorWithStatusCode, *openai.Usage) { var usage *openai.Usage scanner := bufio.NewScanner(resp.Body) scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { @@ -168,9 +168,9 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus } var response *openai.ChatCompletionsStreamResponse if strings.Contains(data, "prompt_tokens") { - response, usage = LastStreamResponseZhipuV42OpenAI(&streamResponse) + response, usage = LastStreamResponseZhipuV42OpenAI(&streamResponse, reqModel) } else { - response = StreamResponseZhipuV42OpenAI(&streamResponse) + response = StreamResponseZhipuV42OpenAI(&streamResponse, reqModel) } jsonResponse, err := json.Marshal(response) if err != nil { diff --git a/relay/controller/util.go b/relay/controller/util.go index 618c2f87..f8789a30 100644 --- a/relay/controller/util.go +++ b/relay/controller/util.go @@ -301,7 +301,7 @@ func DoResponse(c *gin.Context, textRequest *openai.GeneralOpenAIRequest, resp * } case constant.APITypeZhipu_v4: if isStream { - err, usage = zhipu_v4.StreamHandler(c, resp) + err, usage = zhipu_v4.StreamHandler(c, resp, textRequest.Model) } else { err, usage = zhipu_v4.Handler(c, resp) }