Update relay-ali.go:改进stream模式,添加联网搜索能力

通义千问支持stream的增量模式,不需要每次去掉上次的前缀;实测qwen-max联网模式效果不错,添加了联网模式。如果别的模型有问题可以改为单独给qwen-max开放
This commit is contained in:
moondie 2023-12-22 03:33:18 +08:00 committed by GitHub
parent b7fcb319da
commit 6718afa273
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -95,12 +95,14 @@ func requestOpenAI2Ali(request GeneralOpenAIRequest) *AliChatRequest {
Input: AliInput{
Messages: messages,
},
//Parameters: AliParameters{ // ChatGPT's parameters are not compatible with Ali's
Parameters: AliParameters{ // ChatGPT's parameters are not compatible with Ali's
// TopP: request.TopP,
// TopK: 50,
// //Seed: 0,
// //EnableSearch: false,
//},
EnableSearch: true,
incremental_output=true,
stream=request.Stream,
},
}
}
@ -202,7 +204,7 @@ func streamResponseAli2OpenAI(aliResponse *AliChatResponse) *ChatCompletionsStre
Id: aliResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Model: "ernie-bot",
Model: "qwen",
Choices: []ChatCompletionsStreamResponseChoice{choice},
}
return &response
@ -240,7 +242,7 @@ func aliStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStat
stopChan <- true
}()
setEventStreamHeaders(c)
lastResponseText := ""
//lastResponseText := ""
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
@ -256,8 +258,8 @@ func aliStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStat
usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
}
response := streamResponseAli2OpenAI(&aliResponse)
response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
lastResponseText = aliResponse.Output.Text
//response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
//lastResponseText = aliResponse.Output.Text
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())