feat: initial function call support for xunfei
This commit is contained in:
parent
06a3fc5421
commit
f89ae5ad58
@ -26,7 +26,11 @@ import (
|
|||||||
|
|
||||||
func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string, domain string) *ChatRequest {
|
func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string, domain string) *ChatRequest {
|
||||||
messages := make([]Message, 0, len(request.Messages))
|
messages := make([]Message, 0, len(request.Messages))
|
||||||
|
var lastToolCalls []model.Tool
|
||||||
for _, message := range request.Messages {
|
for _, message := range request.Messages {
|
||||||
|
if message.ToolCalls != nil {
|
||||||
|
lastToolCalls = message.ToolCalls
|
||||||
|
}
|
||||||
messages = append(messages, Message{
|
messages = append(messages, Message{
|
||||||
Role: message.Role,
|
Role: message.Role,
|
||||||
Content: message.StringContent(),
|
Content: message.StringContent(),
|
||||||
@ -39,9 +43,33 @@ func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string
|
|||||||
xunfeiRequest.Parameter.Chat.TopK = request.N
|
xunfeiRequest.Parameter.Chat.TopK = request.N
|
||||||
xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens
|
xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens
|
||||||
xunfeiRequest.Payload.Message.Text = messages
|
xunfeiRequest.Payload.Message.Text = messages
|
||||||
|
if len(lastToolCalls) != 0 {
|
||||||
|
for _, toolCall := range lastToolCalls {
|
||||||
|
xunfeiRequest.Payload.Functions.Text = append(xunfeiRequest.Payload.Functions.Text, toolCall.Function)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &xunfeiRequest
|
return &xunfeiRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getToolCalls(response *ChatResponse) []model.Tool {
|
||||||
|
var toolCalls []model.Tool
|
||||||
|
if len(response.Payload.Choices.Text) == 0 {
|
||||||
|
return toolCalls
|
||||||
|
}
|
||||||
|
item := response.Payload.Choices.Text[0]
|
||||||
|
if item.FunctionCall == nil {
|
||||||
|
return toolCalls
|
||||||
|
}
|
||||||
|
toolCall := model.Tool{
|
||||||
|
Id: fmt.Sprintf("call_%s", helper.GetUUID()),
|
||||||
|
Type: "function",
|
||||||
|
Function: *item.FunctionCall,
|
||||||
|
}
|
||||||
|
toolCalls = append(toolCalls, toolCall)
|
||||||
|
return toolCalls
|
||||||
|
}
|
||||||
|
|
||||||
func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse {
|
func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse {
|
||||||
if len(response.Payload.Choices.Text) == 0 {
|
if len(response.Payload.Choices.Text) == 0 {
|
||||||
response.Payload.Choices.Text = []ChatResponseTextItem{
|
response.Payload.Choices.Text = []ChatResponseTextItem{
|
||||||
@ -55,6 +83,7 @@ func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse {
|
|||||||
Message: model.Message{
|
Message: model.Message{
|
||||||
Role: "assistant",
|
Role: "assistant",
|
||||||
Content: response.Payload.Choices.Text[0].Content,
|
Content: response.Payload.Choices.Text[0].Content,
|
||||||
|
ToolCalls: getToolCalls(response),
|
||||||
},
|
},
|
||||||
FinishReason: constant.StopFinishReason,
|
FinishReason: constant.StopFinishReason,
|
||||||
}
|
}
|
||||||
@ -78,6 +107,7 @@ func streamResponseXunfei2OpenAI(xunfeiResponse *ChatResponse) *openai.ChatCompl
|
|||||||
}
|
}
|
||||||
var choice openai.ChatCompletionsStreamResponseChoice
|
var choice openai.ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content
|
choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content
|
||||||
|
choice.Delta.ToolCalls = getToolCalls(xunfeiResponse)
|
||||||
if xunfeiResponse.Payload.Choices.Status == 2 {
|
if xunfeiResponse.Payload.Choices.Status == 2 {
|
||||||
choice.FinishReason = &constant.StopFinishReason
|
choice.FinishReason = &constant.StopFinishReason
|
||||||
}
|
}
|
||||||
|
@ -26,6 +26,9 @@ type ChatRequest struct {
|
|||||||
Message struct {
|
Message struct {
|
||||||
Text []Message `json:"text"`
|
Text []Message `json:"text"`
|
||||||
} `json:"message"`
|
} `json:"message"`
|
||||||
|
Functions struct {
|
||||||
|
Text []model.Function `json:"text,omitempty"`
|
||||||
|
} `json:"functions"`
|
||||||
} `json:"payload"`
|
} `json:"payload"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,6 +36,8 @@ type ChatResponseTextItem struct {
|
|||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
Role string `json:"role"`
|
Role string `json:"role"`
|
||||||
Index int `json:"index"`
|
Index int `json:"index"`
|
||||||
|
ContentType string `json:"content_type"`
|
||||||
|
FunctionCall *model.Function `json:"function_call"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatResponse struct {
|
type ChatResponse struct {
|
||||||
|
Loading…
Reference in New Issue
Block a user