🐛 fix: hunyuan model name change (#100)

This commit is contained in:
Buer 2024-03-11 16:16:22 +08:00 committed by GitHub
parent 27738aa3c7
commit b70378148e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 48 additions and 40 deletions

View File

@ -152,6 +152,11 @@ func init() {
// ¥0.1 / 1k tokens // https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0 // ¥0.1 / 1k tokens // https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
"hunyuan": {[]float64{7.143, 7.143}, ChannelTypeTencent}, "hunyuan": {[]float64{7.143, 7.143}, ChannelTypeTencent},
// https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
// ¥0.01 / 1k tokens
"ChatStd": {[]float64{0.7143, 0.7143}, ChannelTypeTencent},
//¥0.1 / 1k tokens
"ChatPro": {[]float64{7.143, 7.143}, ChannelTypeTencent},
"Baichuan2-Turbo": {[]float64{0.5715, 0.5715}, ChannelTypeBaichuan}, // ¥0.008 / 1k tokens "Baichuan2-Turbo": {[]float64{0.5715, 0.5715}, ChannelTypeBaichuan}, // ¥0.008 / 1k tokens
"Baichuan2-Turbo-192k": {[]float64{1.143, 1.143}, ChannelTypeBaichuan}, // ¥0.016 / 1k tokens "Baichuan2-Turbo-192k": {[]float64{1.143, 1.143}, ChannelTypeBaichuan}, // ¥0.016 / 1k tokens

View File

@ -131,40 +131,45 @@ func (p *BaiduProvider) convertToChatOpenai(response *BaiduChatResponse, request
} }
func convertFromChatOpenai(request *types.ChatCompletionRequest) *BaiduChatRequest { func convertFromChatOpenai(request *types.ChatCompletionRequest) *BaiduChatRequest {
messages := make([]BaiduMessage, 0, len(request.Messages)) baiduChatRequest := &BaiduChatRequest{
Messages: make([]BaiduMessage, 0, len(request.Messages)),
Temperature: request.Temperature,
Stream: request.Stream,
TopP: request.TopP,
PenaltyScore: request.FrequencyPenalty,
Stop: request.Stop,
MaxOutputTokens: request.MaxTokens,
}
if request.ResponseFormat != nil {
baiduChatRequest.ResponseFormat = request.ResponseFormat.Type
}
for _, message := range request.Messages { for _, message := range request.Messages {
if message.Role == types.ChatMessageRoleSystem { if message.Role == types.ChatMessageRoleSystem {
messages = append(messages, BaiduMessage{ baiduChatRequest.System = message.StringContent()
Role: types.ChatMessageRoleUser, continue
Content: message.StringContent(),
})
messages = append(messages, BaiduMessage{
Role: types.ChatMessageRoleAssistant,
Content: "Okay",
})
} else if message.Role == types.ChatMessageRoleFunction { } else if message.Role == types.ChatMessageRoleFunction {
messages = append(messages, BaiduMessage{ baiduChatRequest.Messages = append(baiduChatRequest.Messages, BaiduMessage{
Role: types.ChatMessageRoleAssistant, Role: types.ChatMessageRoleAssistant,
Content: "Okay", FunctionCall: &types.ChatCompletionToolCallsFunction{
Name: *message.Name,
Arguments: "{}",
},
}) })
messages = append(messages, BaiduMessage{ baiduChatRequest.Messages = append(baiduChatRequest.Messages, BaiduMessage{
Role: types.ChatMessageRoleUser, Role: types.ChatMessageRoleUser,
Content: "这是函数调用返回的内容,请回答之前的问题:\n" + message.StringContent(), Content: "这是函数调用返回的内容,请回答之前的问题:\n" + message.StringContent(),
}) })
} else { } else {
messages = append(messages, BaiduMessage{ baiduChatRequest.Messages = append(baiduChatRequest.Messages, BaiduMessage{
Role: message.Role, Role: message.Role,
Content: message.StringContent(), Content: message.StringContent(),
}) })
} }
} }
baiduChatRequest := &BaiduChatRequest{
Messages: messages,
Temperature: request.Temperature,
Stream: request.Stream,
}
if request.Tools != nil { if request.Tools != nil {
functions := make([]*types.ChatCompletionFunction, 0, len(request.Tools)) functions := make([]*types.ChatCompletionFunction, 0, len(request.Tools))
for _, tool := range request.Tools { for _, tool := range request.Tools {

View File

@ -15,14 +15,22 @@ type BaiduAccessToken struct {
type BaiduMessage struct { type BaiduMessage struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content,omitempty"`
FunctionCall *types.ChatCompletionToolCallsFunction `json:"function_call,omitempty"`
} }
type BaiduChatRequest struct { type BaiduChatRequest struct {
Messages []BaiduMessage `json:"messages"` Messages []BaiduMessage `json:"messages"`
Functions []*types.ChatCompletionFunction `json:"functions,omitempty"` Functions []*types.ChatCompletionFunction `json:"functions,omitempty"`
Temperature float64 `json:"temperature,omitempty"` Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
PenaltyScore float64 `json:"penalty_score,omitempty"`
Stream bool `json:"stream"` Stream bool `json:"stream"`
System string `json:"system,omitempty"`
Stop []string `json:"stop,omitempty"`
DisableSearch bool `json:"disable_search,omitempty"`
MaxOutputTokens int `json:"max_output_tokens,omitempty"`
ResponseFormat string `json:"response_format,omitempty"`
UserId string `json:"user_id,omitempty"` UserId string `json:"user_id,omitempty"`
} }

View File

@ -75,6 +75,7 @@ func (p *TencentProvider) getChatRequest(request *types.ChatCompletionRequest) (
// 获取请求头 // 获取请求头
headers := p.GetRequestHeaders() headers := p.GetRequestHeaders()
headers["Authorization"] = sign headers["Authorization"] = sign
headers["X-TC-Action"] = request.Model
if request.Stream { if request.Stream {
headers["Accept"] = "text/event-stream" headers["Accept"] = "text/event-stream"
} }
@ -125,17 +126,6 @@ func convertFromChatOpenai(request *types.ChatCompletionRequest) *TencentChatReq
messages := make([]TencentMessage, 0, len(request.Messages)) messages := make([]TencentMessage, 0, len(request.Messages))
for i := 0; i < len(request.Messages); i++ { for i := 0; i < len(request.Messages); i++ {
message := request.Messages[i] message := request.Messages[i]
if message.Role == "system" {
messages = append(messages, TencentMessage{
Role: "user",
Content: message.StringContent(),
})
messages = append(messages, TencentMessage{
Role: "assistant",
Content: "Okay",
})
continue
}
messages = append(messages, TencentMessage{ messages = append(messages, TencentMessage{
Content: message.StringContent(), Content: message.StringContent(),
Role: message.Role, Role: message.Role,

View File

@ -122,8 +122,8 @@ const typeConfig = {
}, },
23: { 23: {
input: { input: {
models: ['hunyuan'], models: ['ChatStd', 'ChatPro'],
test_model: 'hunyuan' test_model: 'ChatStd'
}, },
prompt: { prompt: {
key: '按照如下格式输入AppId|SecretId|SecretKey' key: '按照如下格式输入AppId|SecretId|SecretKey'