diff --git a/common/constants.go b/common/constants.go index 2a3cf446..28911a9f 100644 --- a/common/constants.go +++ b/common/constants.go @@ -178,49 +178,51 @@ const ( ChannelTypeStabilityAI = 37 ChannelTypeCoze = 38 ChannelTypeOllama = 39 + ChannelTypeHunyuan = 40 ) var ChannelBaseURLs = []string{ - "", // 0 - "https://api.openai.com", // 1 - "https://oa.api2d.net", // 2 - "", // 3 - "https://api.closeai-proxy.xyz", // 4 - "https://api.openai-sb.com", // 5 - "https://api.openaimax.com", // 6 - "https://api.ohmygpt.com", // 7 - "", // 8 - "https://api.caipacity.com", // 9 - "https://api.aiproxy.io", // 10 - "", // 11 - "https://api.api2gpt.com", // 12 - "https://api.aigc2d.com", // 13 - "https://api.anthropic.com", // 14 - "https://aip.baidubce.com", // 15 - "https://open.bigmodel.cn", // 16 - "https://dashscope.aliyuncs.com", // 17 - "", // 18 - "https://ai.360.cn", // 19 - "https://openrouter.ai/api", // 20 - "https://api.aiproxy.io", // 21 - "https://fastgpt.run/api/openapi", // 22 - "https://hunyuan.cloud.tencent.com", //23 - "", //24 - "", //25 - "https://api.baichuan-ai.com", //26 - "https://api.minimax.chat/v1", //27 - "https://api.deepseek.com", //28 - "https://api.moonshot.cn", //29 - "https://api.mistral.ai", //30 - "https://api.groq.com/openai", //31 - "", //32 - "https://api.lingyiwanwu.com", //33 - "", //34 - "", //35 - "https://api.cohere.ai/v1", //36 - "https://api.stability.ai/v2beta", //37 - "https://api.coze.com/open_api", //38 - "", //39 + "", // 0 + "https://api.openai.com", // 1 + "https://oa.api2d.net", // 2 + "", // 3 + "https://api.closeai-proxy.xyz", // 4 + "https://api.openai-sb.com", // 5 + "https://api.openaimax.com", // 6 + "https://api.ohmygpt.com", // 7 + "", // 8 + "https://api.caipacity.com", // 9 + "https://api.aiproxy.io", // 10 + "", // 11 + "https://api.api2gpt.com", // 12 + "https://api.aigc2d.com", // 13 + "https://api.anthropic.com", // 14 + "https://aip.baidubce.com", // 15 + "https://open.bigmodel.cn", // 16 + "https://dashscope.aliyuncs.com", // 17 + "", // 18 + "https://ai.360.cn", // 19 + "https://openrouter.ai/api", // 20 + "https://api.aiproxy.io", // 21 + "https://fastgpt.run/api/openapi", // 22 + "https://hunyuan.cloud.tencent.com", //23 + "", //24 + "", //25 + "https://api.baichuan-ai.com", //26 + "https://api.minimax.chat/v1", //27 + "https://api.deepseek.com", //28 + "https://api.moonshot.cn", //29 + "https://api.mistral.ai", //30 + "https://api.groq.com/openai", //31 + "", //32 + "https://api.lingyiwanwu.com", //33 + "", //34 + "", //35 + "https://api.cohere.ai/v1", //36 + "https://api.stability.ai/v2beta", //37 + "https://api.coze.com/open_api", //38 + "", //39 + "https://hunyuan.tencentcloudapi.com", //40 } const ( diff --git a/model/price.go b/model/price.go index 211a8a87..c1f42123 100644 --- a/model/price.go +++ b/model/price.go @@ -313,6 +313,12 @@ func GetDefaultPrice() []*Price { "sd3-turbo": {[]float64{20, 20}, common.ChannelTypeStabilityAI}, // 0.03 "stable-image-core": {[]float64{15, 15}, common.ChannelTypeStabilityAI}, + + // hunyuan + "hunyuan-lite": {[]float64{0, 0}, common.ChannelTypeHunyuan}, + "hunyuan-standard": {[]float64{0.3214, 0.3571}, common.ChannelTypeHunyuan}, + "hunyuan-standard-256k": {[]float64{1.0714, 4.2857}, common.ChannelTypeHunyuan}, + "hunyuan-pro": {[]float64{2.1429, 7.1429}, common.ChannelTypeHunyuan}, } var prices []*Price diff --git a/providers/hunyuan/base.go b/providers/hunyuan/base.go new file mode 100644 index 00000000..3c16d56d --- /dev/null +++ b/providers/hunyuan/base.go @@ -0,0 +1,79 @@ +package hunyuan + +import ( + "encoding/json" + "errors" + "net/http" + "one-api/common/requester" + "one-api/model" + "one-api/providers/base" + "one-api/types" + "strings" +) + +type HunyuanProviderFactory struct{} + +// 创建 HunyuanProvider +func (f HunyuanProviderFactory) Create(channel *model.Channel) base.ProviderInterface { + return &HunyuanProvider{ + BaseProvider: base.BaseProvider{ + Config: getConfig(), + Channel: channel, + Requester: requester.NewHTTPRequester(*channel.Proxy, requestErrorHandle), + }, + } +} + +type HunyuanProvider struct { + base.BaseProvider +} + +func getConfig() base.ProviderConfig { + return base.ProviderConfig{ + BaseURL: "https://hunyuan.tencentcloudapi.com", + ChatCompletions: "ChatCompletions", + } +} + +// 请求错误处理 +func requestErrorHandle(resp *http.Response) *types.OpenAIError { + tunyuanError := &HunyuanResponseError{} + err := json.NewDecoder(resp.Body).Decode(tunyuanError) + if err != nil { + return nil + } + + return errorHandle(tunyuanError) +} + +// 错误处理 +func errorHandle(tunyuanError *HunyuanResponseError) *types.OpenAIError { + if tunyuanError.Error == nil { + return nil + } + return &types.OpenAIError{ + Message: tunyuanError.Error.Message, + Type: "tunyuan_error", + Code: tunyuanError.Error.Code, + } +} + +// 获取请求头 +func (p *HunyuanProvider) GetRequestHeaders() (headers map[string]string) { + headers = make(map[string]string) + p.CommonRequestHeaders(headers) + + return headers +} + +func (p *HunyuanProvider) parseHunyuanConfig(config string) (secretId string, secretKey string, err error) { + parts := strings.Split(config, "|") + if len(parts) != 2 { + err = errors.New("invalid tunyuan config") + return + } + + secretId = parts[0] + secretKey = parts[1] + return +} diff --git a/providers/hunyuan/chat.go b/providers/hunyuan/chat.go new file mode 100644 index 00000000..d5be9f06 --- /dev/null +++ b/providers/hunyuan/chat.go @@ -0,0 +1,181 @@ +package hunyuan + +import ( + "encoding/json" + "net/http" + "one-api/common" + "one-api/common/requester" + "one-api/types" + "strings" +) + +type tunyuanStreamHandler struct { + Usage *types.Usage + Request *types.ChatCompletionRequest +} + +func (p *HunyuanProvider) CreateChatCompletion(request *types.ChatCompletionRequest) (*types.ChatCompletionResponse, *types.OpenAIErrorWithStatusCode) { + req, errWithCode := p.getChatRequest(request) + if errWithCode != nil { + return nil, errWithCode + } + defer req.Body.Close() + + tunyuanChatResponse := &ChatCompletionsResponse{} + // 发送请求 + _, errWithCode = p.Requester.SendRequest(req, tunyuanChatResponse, false) + if errWithCode != nil { + return nil, errWithCode + } + + return p.convertToChatOpenai(tunyuanChatResponse, request) +} + +func (p *HunyuanProvider) CreateChatCompletionStream(request *types.ChatCompletionRequest) (requester.StreamReaderInterface[string], *types.OpenAIErrorWithStatusCode) { + req, errWithCode := p.getChatRequest(request) + if errWithCode != nil { + return nil, errWithCode + } + defer req.Body.Close() + + // 发送请求 + resp, errWithCode := p.Requester.SendRequestRaw(req) + if errWithCode != nil { + return nil, errWithCode + } + + chatHandler := &tunyuanStreamHandler{ + Usage: p.Usage, + Request: request, + } + + return requester.RequestStream[string](p.Requester, resp, chatHandler.handlerStream) +} + +func (p *HunyuanProvider) getChatRequest(request *types.ChatCompletionRequest) (*http.Request, *types.OpenAIErrorWithStatusCode) { + action, errWithCode := p.GetSupportedAPIUri(common.RelayModeChatCompletions) + if errWithCode != nil { + return nil, errWithCode + } + + tunyuanRequest := convertFromChatOpenai(request) + req, errWithCode := p.sign(tunyuanRequest, action, http.MethodPost) + if errWithCode != nil { + return nil, errWithCode + } + + return req, nil +} + +func (p *HunyuanProvider) convertToChatOpenai(response *ChatCompletionsResponse, request *types.ChatCompletionRequest) (openaiResponse *types.ChatCompletionResponse, errWithCode *types.OpenAIErrorWithStatusCode) { + error := errorHandle(&response.Response.HunyuanResponseError) + if error != nil { + errWithCode = &types.OpenAIErrorWithStatusCode{ + OpenAIError: *error, + StatusCode: http.StatusBadRequest, + } + return + } + + txResponse := response.Response + + openaiResponse = &types.ChatCompletionResponse{ + Object: "chat.completion", + Created: txResponse.Created, + Usage: &types.Usage{ + PromptTokens: txResponse.Usage.PromptTokens, + CompletionTokens: txResponse.Usage.CompletionTokens, + TotalTokens: txResponse.Usage.TotalTokens, + }, + Model: request.Model, + } + + for _, choice := range txResponse.Choices { + openaiResponse.Choices = append(openaiResponse.Choices, types.ChatCompletionChoice{ + Index: 0, + Message: types.ChatCompletionMessage{Role: choice.Message.Role, Content: choice.Message.Content}, + FinishReason: choice.FinishReason, + }) + + } + + *p.Usage = *openaiResponse.Usage + + return +} + +func convertFromChatOpenai(request *types.ChatCompletionRequest) *ChatCompletionsRequest { + request.ClearEmptyMessages() + + messages := make([]*Message, 0, len(request.Messages)) + for _, message := range request.Messages { + messages = append(messages, &Message{ + Content: message.StringContent(), + Role: message.Role, + }) + } + + return &ChatCompletionsRequest{ + Model: request.Model, + Messages: messages, + Stream: request.Stream, + TopP: &request.TopP, + Temperature: &request.Temperature, + } +} + +// 转换为OpenAI聊天流式请求体 +func (h *tunyuanStreamHandler) handlerStream(rawLine *[]byte, dataChan chan string, errChan chan error) { + // 如果rawLine 前缀不为data:,则直接返回 + if !strings.HasPrefix(string(*rawLine), "data:") { + *rawLine = nil + return + } + + // 去除前缀 + *rawLine = (*rawLine)[5:] + + var tunyuanChatResponse ChatCompletionsResponseParams + err := json.Unmarshal(*rawLine, &tunyuanChatResponse) + if err != nil { + errChan <- common.ErrorToOpenAIError(err) + return + } + + error := errorHandle(&tunyuanChatResponse.HunyuanResponseError) + if error != nil { + errChan <- error + return + } + + h.convertToOpenaiStream(&tunyuanChatResponse, dataChan) + +} + +func (h *tunyuanStreamHandler) convertToOpenaiStream(tunyuanChatResponse *ChatCompletionsResponseParams, dataChan chan string) { + streamResponse := types.ChatCompletionStreamResponse{ + Object: "chat.completion.chunk", + Created: tunyuanChatResponse.Created, + Model: h.Request.Model, + } + + for _, choice := range tunyuanChatResponse.Choices { + streamResponse.Choices = append(streamResponse.Choices, types.ChatCompletionStreamChoice{ + FinishReason: choice.FinishReason, + Delta: types.ChatCompletionStreamChoiceDelta{ + Role: choice.Delta.Role, + Content: choice.Delta.Content, + }, + Index: 0, + }) + } + + responseBody, _ := json.Marshal(streamResponse) + dataChan <- string(responseBody) + + *h.Usage = types.Usage{ + PromptTokens: tunyuanChatResponse.Usage.PromptTokens, + CompletionTokens: tunyuanChatResponse.Usage.CompletionTokens, + TotalTokens: tunyuanChatResponse.Usage.TotalTokens, + } +} diff --git a/providers/hunyuan/sign.go b/providers/hunyuan/sign.go new file mode 100644 index 00000000..ee1bb60c --- /dev/null +++ b/providers/hunyuan/sign.go @@ -0,0 +1,101 @@ +package hunyuan + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "net/http" + "one-api/common" + "one-api/types" + "strconv" + "strings" + "time" +) + +func sha256hex(s string) string { + b := sha256.Sum256([]byte(s)) + return hex.EncodeToString(b[:]) +} + +func hmacsha256(s, key string) string { + hashed := hmac.New(sha256.New, []byte(key)) + hashed.Write([]byte(s)) + return string(hashed.Sum(nil)) +} + +func (p *HunyuanProvider) sign(body any, action, method string) (*http.Request, *types.OpenAIErrorWithStatusCode) { + service := "hunyuan" + version := "2023-09-01" + region := "" + host := strings.Replace(p.GetBaseURL(), "https://", "", 1) + algorithm := "TC3-HMAC-SHA256" + var timestamp = time.Now().Unix() + + secretId, secretKey, err := p.parseHunyuanConfig(p.Channel.Key) + if err != nil { + return nil, common.ErrorWrapper(err, "get_tunyuan_secret_failed", http.StatusInternalServerError) + } + + // ************* 步骤 1:拼接规范请求串 ************* + contentType := "application/json; charset=utf-8" + canonicalHeaders := fmt.Sprintf("content-type:%s\nhost:%s\nx-tc-action:%s\n", + contentType, host, strings.ToLower(action)) + signedHeaders := "content-type;host;x-tc-action" + payloadJson, _ := json.Marshal(body) + payloadStr := string(payloadJson) + + hashedRequestPayload := sha256hex(payloadStr) + canonicalRequest := fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s", + method, + "/", + "", + canonicalHeaders, + signedHeaders, + hashedRequestPayload) + + // ************* 步骤 2:拼接待签名字符串 ************* + date := time.Unix(timestamp, 0).UTC().Format("2006-01-02") + credentialScope := fmt.Sprintf("%s/%s/tc3_request", date, service) + hashedCanonicalRequest := sha256hex(canonicalRequest) + string2sign := fmt.Sprintf("%s\n%d\n%s\n%s", + algorithm, + timestamp, + credentialScope, + hashedCanonicalRequest) + + // ************* 步骤 3:计算签名 ************* + secretDate := hmacsha256(date, "TC3"+secretKey) + secretService := hmacsha256(service, secretDate) + secretSigning := hmacsha256("tc3_request", secretService) + signature := hex.EncodeToString([]byte(hmacsha256(string2sign, secretSigning))) + + // ************* 步骤 4:拼接 Authorization ************* + authorization := fmt.Sprintf("%s Credential=%s/%s, SignedHeaders=%s, Signature=%s", + algorithm, + secretId, + credentialScope, + signedHeaders, + signature) + + // ************* 步骤 5:构造并发起请求 ************* + headers := map[string]string{ + "Host": host, + "X-TC-Action": action, + "X-TC-Version": version, + "X-TC-Timestamp": strconv.FormatInt(timestamp, 10), + "Content-Type": contentType, + "Authorization": authorization, + } + if region != "" { + headers["X-TC-Region"] = region + } + + req, err := p.Requester.NewRequest(method, p.GetBaseURL(), p.Requester.WithBody(body), p.Requester.WithHeader(headers)) + if err != nil { + return nil, common.ErrorWrapper(err, "get_tunyuan_secret_failed", http.StatusInternalServerError) + } + + return req, nil +} diff --git a/providers/hunyuan/type.go b/providers/hunyuan/type.go new file mode 100644 index 00000000..87fd43a6 --- /dev/null +++ b/providers/hunyuan/type.go @@ -0,0 +1,87 @@ +package hunyuan + +type HunyuanError struct { + Message string `json:"Message,omitempty" name:"Message"` + Code string `json:"Code,omitempty" name:"Code"` +} + +type ChatCompletionsRequest struct { + Model string `json:"Model,omitempty" name:"Model"` + Messages []*Message `json:"Messages,omitempty" name:"Messages"` + Stream bool `json:"Stream,omitempty" name:"Stream"` + StreamModeration *bool `json:"StreamModeration,omitempty" name:"StreamModeration"` + TopP *float64 `json:"TopP,omitempty" name:"TopP"` + Temperature *float64 `json:"Temperature,omitempty" name:"Temperature"` + EnableEnhancement *bool `json:"EnableEnhancement,omitempty" name:"EnableEnhancement"` +} + +type Message struct { + Role string `json:"Role,omitempty" name:"Role"` + Content string `json:"Content,omitempty" name:"Content"` +} + +type ChatCompletionsResponse struct { + Response *ChatCompletionsResponseParams `json:"Response"` +} + +type ChatCompletionsResponseParams struct { + // Unix 时间戳,单位为秒。 + Created *int64 `json:"Created,omitempty" name:"Created"` + + // Token 统计信息。 + // 按照总 Token 数量计费。 + Usage *HunyuanUsage `json:"Usage,omitempty" name:"Usage"` + + // 免责声明。 + Note *string `json:"Note,omitempty" name:"Note"` + + // 本轮对话的 ID。 + Id *string `json:"Id,omitempty" name:"Id"` + + // 回复内容。 + Choices []*Choice `json:"Choices,omitempty" name:"Choices"` + + // 唯一请求 ID,由服务端生成,每次请求都会返回(若请求因其他原因未能抵达服务端,则该次请求不会获得 RequestId)。定位问题时需要提供该次请求的 RequestId。本接口为流式响应接口,当请求成功时,RequestId 会被放在 HTTP 响应的 Header "X-TC-RequestId" 中。 + RequestId *string `json:"RequestId,omitempty" name:"RequestId"` + HunyuanResponseError +} + +type Choice struct { + // 结束标志位,可能为 stop 或 sensitive。 + // stop 表示输出正常结束,sensitive 只在开启流式输出审核时会出现,表示安全审核未通过。 + FinishReason *string `json:"FinishReason,omitempty" name:"FinishReason"` + + // 增量返回值,流式调用时使用该字段。 + // 注意:此字段可能返回 null,表示取不到有效值。 + Delta *Delta `json:"Delta,omitempty" name:"Delta"` + + // 返回值,非流式调用时使用该字段。 + // 注意:此字段可能返回 null,表示取不到有效值。 + Message *Message `json:"Message,omitempty" name:"Message"` +} + +type Delta struct { + // 角色名称。 + Role string `json:"Role,omitempty" name:"Role"` + + // 内容详情。 + Content string `json:"Content,omitempty" name:"Content"` +} + +type HunyuanUsage struct { + // 输入 Token 数量。 + PromptTokens int `json:"PromptTokens,omitempty" name:"PromptTokens"` + + // 输出 Token 数量。 + CompletionTokens int `json:"CompletionTokens,omitempty" name:"CompletionTokens"` + + // 总 Token 数量。 + TotalTokens int `json:"TotalTokens,omitempty" name:"TotalTokens"` +} + +type HunyuanResponseError struct { + // 错误信息。 + // 如果流式返回中服务处理异常,返回该错误信息。 + // 注意:此字段可能返回 null,表示取不到有效值。 + Error *HunyuanError `json:"Error,omitempty" name:"Error"` +} diff --git a/providers/providers.go b/providers/providers.go index e9bce9bb..fdbe6ad8 100644 --- a/providers/providers.go +++ b/providers/providers.go @@ -17,6 +17,7 @@ import ( "one-api/providers/deepseek" "one-api/providers/gemini" "one-api/providers/groq" + "one-api/providers/hunyuan" "one-api/providers/lingyi" "one-api/providers/midjourney" "one-api/providers/minimax" @@ -68,6 +69,7 @@ func init() { providerFactories[common.ChannelTypeOllama] = ollama.OllamaProviderFactory{} providerFactories[common.ChannelTypeMoonshot] = moonshot.MoonshotProviderFactory{} providerFactories[common.ChannelTypeLingyi] = lingyi.LingyiProviderFactory{} + providerFactories[common.ChannelTypeHunyuan] = hunyuan.HunyuanProviderFactory{} } diff --git a/relay/util/type.go b/relay/util/type.go index 0ab72196..6794ca16 100644 --- a/relay/util/type.go +++ b/relay/util/type.go @@ -28,6 +28,8 @@ func init() { common.ChannelTypeCloudflareAI: "Cloudflare AI", common.ChannelTypeCohere: "Cohere", common.ChannelTypeStabilityAI: "Stability AI", + common.ChannelTypeCoze: "Coze", common.ChannelTypeOllama: "Ollama", + common.ChannelTypeHunyuan: "Hunyuan", } } diff --git a/web/src/constants/ChannelConstants.js b/web/src/constants/ChannelConstants.js index 1998f6f3..104a042c 100644 --- a/web/src/constants/ChannelConstants.js +++ b/web/src/constants/ChannelConstants.js @@ -71,7 +71,7 @@ export const CHANNEL_OPTIONS = { }, 23: { key: 23, - text: '腾讯混元', + text: '腾讯混元(旧)', value: 23, color: 'default', url: 'https://cloud.tencent.com/product/hunyuan' @@ -174,6 +174,13 @@ export const CHANNEL_OPTIONS = { color: 'orange', url: '' }, + 40: { + key: 40, + text: '腾讯混元', + value: 40, + color: 'default', + url: 'https://cloud.tencent.com/product/hunyuan' + }, 24: { key: 24, text: 'Azure Speech', diff --git a/web/src/views/Channel/type/Config.js b/web/src/views/Channel/type/Config.js index 5d6c8057..f58b0d44 100644 --- a/web/src/views/Channel/type/Config.js +++ b/web/src/views/Channel/type/Config.js @@ -356,6 +356,16 @@ const typeConfig = { base_url: '请输入你部署的Ollama地址,例如:http://127.0.0.1:11434,如果你使用了cloudflare Zero Trust,可以在下方插件填入授权信息', key: '请随意填写' } + }, + 40: { + input: { + models: ['hunyuan-lite', 'hunyuan-pro', 'hunyuan-standard-256K', 'hunyuan-standard'], + test_model: 'hunyuan-lite' + }, + prompt: { + key: '按照如下格式输入:SecretId|SecretKey' + }, + modelGroup: 'Hunyuan' } };