🐛 fix: hunyuan model name change (#100)
This commit is contained in:
parent
27738aa3c7
commit
b70378148e
@ -152,6 +152,11 @@ func init() {
|
||||
|
||||
// ¥0.1 / 1k tokens // https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
|
||||
"hunyuan": {[]float64{7.143, 7.143}, ChannelTypeTencent},
|
||||
// https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
|
||||
// ¥0.01 / 1k tokens
|
||||
"ChatStd": {[]float64{0.7143, 0.7143}, ChannelTypeTencent},
|
||||
//¥0.1 / 1k tokens
|
||||
"ChatPro": {[]float64{7.143, 7.143}, ChannelTypeTencent},
|
||||
|
||||
"Baichuan2-Turbo": {[]float64{0.5715, 0.5715}, ChannelTypeBaichuan}, // ¥0.008 / 1k tokens
|
||||
"Baichuan2-Turbo-192k": {[]float64{1.143, 1.143}, ChannelTypeBaichuan}, // ¥0.016 / 1k tokens
|
||||
|
@ -131,40 +131,45 @@ func (p *BaiduProvider) convertToChatOpenai(response *BaiduChatResponse, request
|
||||
}
|
||||
|
||||
func convertFromChatOpenai(request *types.ChatCompletionRequest) *BaiduChatRequest {
|
||||
messages := make([]BaiduMessage, 0, len(request.Messages))
|
||||
baiduChatRequest := &BaiduChatRequest{
|
||||
Messages: make([]BaiduMessage, 0, len(request.Messages)),
|
||||
Temperature: request.Temperature,
|
||||
Stream: request.Stream,
|
||||
TopP: request.TopP,
|
||||
PenaltyScore: request.FrequencyPenalty,
|
||||
Stop: request.Stop,
|
||||
MaxOutputTokens: request.MaxTokens,
|
||||
}
|
||||
|
||||
if request.ResponseFormat != nil {
|
||||
baiduChatRequest.ResponseFormat = request.ResponseFormat.Type
|
||||
|
||||
}
|
||||
|
||||
for _, message := range request.Messages {
|
||||
if message.Role == types.ChatMessageRoleSystem {
|
||||
messages = append(messages, BaiduMessage{
|
||||
Role: types.ChatMessageRoleUser,
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
messages = append(messages, BaiduMessage{
|
||||
Role: types.ChatMessageRoleAssistant,
|
||||
Content: "Okay",
|
||||
})
|
||||
baiduChatRequest.System = message.StringContent()
|
||||
continue
|
||||
} else if message.Role == types.ChatMessageRoleFunction {
|
||||
messages = append(messages, BaiduMessage{
|
||||
Role: types.ChatMessageRoleAssistant,
|
||||
Content: "Okay",
|
||||
baiduChatRequest.Messages = append(baiduChatRequest.Messages, BaiduMessage{
|
||||
Role: types.ChatMessageRoleAssistant,
|
||||
FunctionCall: &types.ChatCompletionToolCallsFunction{
|
||||
Name: *message.Name,
|
||||
Arguments: "{}",
|
||||
},
|
||||
})
|
||||
messages = append(messages, BaiduMessage{
|
||||
baiduChatRequest.Messages = append(baiduChatRequest.Messages, BaiduMessage{
|
||||
Role: types.ChatMessageRoleUser,
|
||||
Content: "这是函数调用返回的内容,请回答之前的问题:\n" + message.StringContent(),
|
||||
})
|
||||
} else {
|
||||
messages = append(messages, BaiduMessage{
|
||||
baiduChatRequest.Messages = append(baiduChatRequest.Messages, BaiduMessage{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
baiduChatRequest := &BaiduChatRequest{
|
||||
Messages: messages,
|
||||
Temperature: request.Temperature,
|
||||
Stream: request.Stream,
|
||||
}
|
||||
|
||||
if request.Tools != nil {
|
||||
functions := make([]*types.ChatCompletionFunction, 0, len(request.Tools))
|
||||
for _, tool := range request.Tools {
|
||||
|
@ -14,16 +14,24 @@ type BaiduAccessToken struct {
|
||||
}
|
||||
|
||||
type BaiduMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content,omitempty"`
|
||||
FunctionCall *types.ChatCompletionToolCallsFunction `json:"function_call,omitempty"`
|
||||
}
|
||||
|
||||
type BaiduChatRequest struct {
|
||||
Messages []BaiduMessage `json:"messages"`
|
||||
Functions []*types.ChatCompletionFunction `json:"functions,omitempty"`
|
||||
Temperature float64 `json:"temperature,omitempty"`
|
||||
Stream bool `json:"stream"`
|
||||
UserId string `json:"user_id,omitempty"`
|
||||
Messages []BaiduMessage `json:"messages"`
|
||||
Functions []*types.ChatCompletionFunction `json:"functions,omitempty"`
|
||||
Temperature float64 `json:"temperature,omitempty"`
|
||||
TopP float64 `json:"top_p,omitempty"`
|
||||
PenaltyScore float64 `json:"penalty_score,omitempty"`
|
||||
Stream bool `json:"stream"`
|
||||
System string `json:"system,omitempty"`
|
||||
Stop []string `json:"stop,omitempty"`
|
||||
DisableSearch bool `json:"disable_search,omitempty"`
|
||||
MaxOutputTokens int `json:"max_output_tokens,omitempty"`
|
||||
ResponseFormat string `json:"response_format,omitempty"`
|
||||
UserId string `json:"user_id,omitempty"`
|
||||
}
|
||||
|
||||
type BaiduChatResponse struct {
|
||||
|
@ -75,6 +75,7 @@ func (p *TencentProvider) getChatRequest(request *types.ChatCompletionRequest) (
|
||||
// 获取请求头
|
||||
headers := p.GetRequestHeaders()
|
||||
headers["Authorization"] = sign
|
||||
headers["X-TC-Action"] = request.Model
|
||||
if request.Stream {
|
||||
headers["Accept"] = "text/event-stream"
|
||||
}
|
||||
@ -125,17 +126,6 @@ func convertFromChatOpenai(request *types.ChatCompletionRequest) *TencentChatReq
|
||||
messages := make([]TencentMessage, 0, len(request.Messages))
|
||||
for i := 0; i < len(request.Messages); i++ {
|
||||
message := request.Messages[i]
|
||||
if message.Role == "system" {
|
||||
messages = append(messages, TencentMessage{
|
||||
Role: "user",
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
messages = append(messages, TencentMessage{
|
||||
Role: "assistant",
|
||||
Content: "Okay",
|
||||
})
|
||||
continue
|
||||
}
|
||||
messages = append(messages, TencentMessage{
|
||||
Content: message.StringContent(),
|
||||
Role: message.Role,
|
||||
|
@ -122,8 +122,8 @@ const typeConfig = {
|
||||
},
|
||||
23: {
|
||||
input: {
|
||||
models: ['hunyuan'],
|
||||
test_model: 'hunyuan'
|
||||
models: ['ChatStd', 'ChatPro'],
|
||||
test_model: 'ChatStd'
|
||||
},
|
||||
prompt: {
|
||||
key: '按照如下格式输入:AppId|SecretId|SecretKey'
|
||||
|
Loading…
Reference in New Issue
Block a user