feat: support zhipu new API (#44)

This commit is contained in:
Buer 2024-01-19 22:14:30 +08:00 committed by GitHub
parent 0be687905f
commit b42d4d44aa
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 344 additions and 128 deletions

View File

@ -78,6 +78,11 @@ func init() {
"chatglm_pro": {0.7143, ChannelTypeZhipu}, // ¥0.01 / 1k tokens
"chatglm_std": {0.3572, ChannelTypeZhipu}, // ¥0.005 / 1k tokens
"chatglm_lite": {0.1429, ChannelTypeZhipu}, // ¥0.002 / 1k tokens
"glm-3-turbo": {0.3572, ChannelTypeZhipu}, // ¥0.005 / 1k tokens
"glm-4": {0.7143, ChannelTypeZhipu}, // ¥0.01 / 1k tokens
"glm-4v": {0.7143, ChannelTypeZhipu}, // ¥0.01 / 1k tokens
"embedding-2": {0.0357, ChannelTypeZhipu}, // ¥0.0005 / 1k tokens
"cogview-3": {17.8571, ChannelTypeZhipu}, // ¥0.25 / 1张图片
"qwen-turbo": {0.5715, ChannelTypeAli}, // ¥0.008 / 1k tokens // https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-thousand-questions-metering-and-billing
"qwen-plus": {1.4286, ChannelTypeAli}, // ¥0.02 / 1k tokens
"qwen-max": {1.4286, ChannelTypeAli}, // ¥0.02 / 1k tokens

View File

@ -231,7 +231,8 @@ func calculateToken(model string, size string, n int, quality string) (int, erro
}
}
} else {
return 0, errors.New("size not supported for this image model")
imageCostRatio = 1
// return 0, errors.New("size not supported for this image model")
}
return int(imageCostRatio*1000) * n, nil

View File

@ -41,7 +41,7 @@ func getConfig() base.ProviderConfig {
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
var aliError *AliError
aliError := &AliError{}
err := json.NewDecoder(resp.Body).Decode(aliError)
if err != nil {
return nil

View File

@ -44,7 +44,7 @@ func getConfig() base.ProviderConfig {
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
var baiduError *BaiduError
baiduError := &BaiduError{}
err := json.NewDecoder(resp.Body).Decode(baiduError)
if err != nil {
return nil

View File

@ -35,7 +35,7 @@ func getConfig() base.ProviderConfig {
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
var claudeError *ClaudeResponseError
claudeError := &ClaudeResponseError{}
err := json.NewDecoder(resp.Body).Decode(claudeError)
if err != nil {
return nil

View File

@ -37,7 +37,7 @@ func getConfig() base.ProviderConfig {
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
var geminiError *GeminiErrorResponse
geminiError := &GeminiErrorResponse{}
err := json.NewDecoder(resp.Body).Decode(geminiError)
if err != nil {
return nil

View File

@ -62,7 +62,7 @@ func getOpenAIConfig(baseURL string) base.ProviderConfig {
// 请求错误处理
func RequestErrorHandle(resp *http.Response) *types.OpenAIError {
var errorResponse *types.OpenAIErrorResponse
errorResponse := &types.OpenAIErrorResponse{}
err := json.NewDecoder(resp.Body).Decode(errorResponse)
if err != nil {
return nil

View File

@ -37,7 +37,7 @@ func getConfig() base.ProviderConfig {
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
var palmError *PaLMErrorResponse
palmError := &PaLMErrorResponse{}
err := json.NewDecoder(resp.Body).Decode(palmError)
if err != nil {
return nil

View File

@ -43,7 +43,7 @@ func getConfig() base.ProviderConfig {
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
var tencentError *TencentResponseError
tencentError := &TencentResponseError{}
err := json.NewDecoder(resp.Body).Decode(tencentError)
if err != nil {
return nil

View File

@ -38,29 +38,31 @@ type ZhipuProvider struct {
func getConfig() base.ProviderConfig {
return base.ProviderConfig{
BaseURL: "https://open.bigmodel.cn",
ChatCompletions: "/api/paas/v3/model-api",
BaseURL: "https://open.bigmodel.cn/api/paas/v4",
ChatCompletions: "/chat/completions",
Embeddings: "/embeddings",
ImagesGenerations: "/images/generations",
}
}
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
var zhipuError *ZhipuResponse
zhipuError := &ZhipuResponseError{}
err := json.NewDecoder(resp.Body).Decode(zhipuError)
if err != nil {
return nil
}
return errorHandle(zhipuError)
return errorHandle(&zhipuError.Error)
}
// 错误处理
func errorHandle(zhipuError *ZhipuResponse) *types.OpenAIError {
if zhipuError.Success {
func errorHandle(zhipuError *ZhipuError) *types.OpenAIError {
if zhipuError.Message == "" {
return nil
}
return &types.OpenAIError{
Message: zhipuError.Msg,
Message: zhipuError.Message,
Type: "zhipu_error",
Code: zhipuError.Code,
}
@ -79,7 +81,7 @@ func (p *ZhipuProvider) GetRequestHeaders() (headers map[string]string) {
func (p *ZhipuProvider) GetFullRequestURL(requestURL string, modelName string) string {
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
return fmt.Sprintf("%s%s/%s", baseURL, requestURL, modelName)
return fmt.Sprintf("%s%s", baseURL, requestURL)
}
func (p *ZhipuProvider) getZhipuToken() string {
@ -129,3 +131,24 @@ func (p *ZhipuProvider) getZhipuToken() string {
return tokenString
}
func convertRole(roleName string) string {
switch roleName {
case types.ChatMessageRoleFunction:
return types.ChatMessageRoleTool
case types.ChatMessageRoleTool, types.ChatMessageRoleSystem, types.ChatMessageRoleAssistant:
return roleName
default:
return types.ChatMessageRoleUser
}
}
func convertTopP(topP float64) float64 {
// 检测 topP 是否在 0-1 之间 如果等于0 设为0.1 如果大于等于1 设为0.9
if topP <= 0 {
return 0.1
} else if topP >= 1 {
return 0.9
}
return topP
}

View File

@ -2,7 +2,6 @@ package zhipu
import (
"encoding/json"
"fmt"
"net/http"
"one-api/common"
"one-api/common/requester"
@ -62,19 +61,14 @@ func (p *ZhipuProvider) getChatRequest(request *types.ChatCompletionRequest) (*h
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(url, request.Model)
if fullRequestURL == "" {
return nil, common.ErrorWrapper(nil, "invalid_baidu_config", http.StatusInternalServerError)
return nil, common.ErrorWrapper(nil, "invalid_zhipu_config", http.StatusInternalServerError)
}
// 获取请求头
headers := p.GetRequestHeaders()
if request.Stream {
headers["Accept"] = "text/event-stream"
fullRequestURL += "/sse-invoke"
} else {
fullRequestURL += "/invoke"
}
zhipuRequest := convertFromChatOpenai(request)
// 创建请求
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(zhipuRequest), p.Requester.WithHeader(headers))
if err != nil {
@ -85,7 +79,7 @@ func (p *ZhipuProvider) getChatRequest(request *types.ChatCompletionRequest) (*h
}
func (p *ZhipuProvider) convertToChatOpenai(response *ZhipuResponse, request *types.ChatCompletionRequest) (openaiResponse *types.ChatCompletionResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
error := errorHandle(response)
error := errorHandle(&response.Error)
if error != nil {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: *error,
@ -95,114 +89,110 @@ func (p *ZhipuProvider) convertToChatOpenai(response *ZhipuResponse, request *ty
}
openaiResponse = &types.ChatCompletionResponse{
ID: response.Data.TaskId,
ID: response.ID,
Object: "chat.completion",
Created: common.GetTimestamp(),
Model: request.Model,
Choices: make([]types.ChatCompletionChoice, 0, len(response.Data.Choices)),
Usage: &response.Data.Usage,
}
for i, choice := range response.Data.Choices {
openaiChoice := types.ChatCompletionChoice{
Index: i,
Message: types.ChatCompletionMessage{
Role: choice.Role,
Content: strings.Trim(choice.Content, "\""),
},
FinishReason: "",
}
if i == len(response.Data.Choices)-1 {
openaiChoice.FinishReason = "stop"
}
openaiResponse.Choices = append(openaiResponse.Choices, openaiChoice)
Created: response.Created,
Model: response.Model,
Choices: response.Choices,
Usage: response.Usage,
}
*p.Usage = response.Data.Usage
*p.Usage = *response.Usage
return
}
func convertFromChatOpenai(request *types.ChatCompletionRequest) *ZhipuRequest {
messages := make([]ZhipuMessage, 0, len(request.Messages))
for _, message := range request.Messages {
if message.Role == "system" {
messages = append(messages, ZhipuMessage{
Role: "system",
Content: message.StringContent(),
for i := range request.Messages {
request.Messages[i].Role = convertRole(request.Messages[i].Role)
}
zhipuRequest := &ZhipuRequest{
Model: request.Model,
Messages: request.Messages,
Stream: request.Stream,
Temperature: request.Temperature,
TopP: convertTopP(request.TopP),
MaxTokens: request.MaxTokens,
Stop: request.Stop,
ToolChoice: request.ToolChoice,
}
if request.Functions != nil {
zhipuRequest.Tools = make([]ZhipuTool, 0, len(request.Functions))
for _, function := range request.Functions {
zhipuRequest.Tools = append(zhipuRequest.Tools, ZhipuTool{
Type: "function",
Function: *function,
})
messages = append(messages, ZhipuMessage{
Role: "user",
Content: "Okay",
})
} else {
messages = append(messages, ZhipuMessage{
Role: message.Role,
Content: message.StringContent(),
}
} else if request.Tools != nil {
zhipuRequest.Tools = make([]ZhipuTool, 0, len(request.Tools))
for _, tool := range request.Tools {
zhipuRequest.Tools = append(zhipuRequest.Tools, ZhipuTool{
Type: "function",
Function: tool.Function,
})
}
}
return &ZhipuRequest{
Prompt: messages,
Temperature: request.Temperature,
TopP: request.TopP,
Incremental: false,
}
return zhipuRequest
}
// 转换为OpenAI聊天流式请求体
func (h *zhipuStreamHandler) handlerStream(rawLine *[]byte, isFinished *bool, response *[]types.ChatCompletionStreamResponse) error {
// 如果rawLine 前缀不为data: 或者 meta:,则直接返回
if !strings.HasPrefix(string(*rawLine), "data:") && !strings.HasPrefix(string(*rawLine), "meta:") {
if !strings.HasPrefix(string(*rawLine), "data: ") {
*rawLine = nil
return nil
}
if strings.HasPrefix(string(*rawLine), "meta:") {
*rawLine = (*rawLine)[5:]
var zhipuStreamMetaResponse ZhipuStreamMetaResponse
err := json.Unmarshal(*rawLine, &zhipuStreamMetaResponse)
if err != nil {
return common.ErrorToOpenAIError(err)
}
*rawLine = (*rawLine)[6:]
if strings.HasPrefix(string(*rawLine), "[DONE]") {
*isFinished = true
return h.handlerMeta(&zhipuStreamMetaResponse, response)
return nil
}
*rawLine = (*rawLine)[5:]
return h.convertToOpenaiStream(string(*rawLine), response)
zhipuResponse := &ZhipuStreamResponse{}
err := json.Unmarshal(*rawLine, zhipuResponse)
if err != nil {
return common.ErrorToOpenAIError(err)
}
error := errorHandle(&zhipuResponse.Error)
if error != nil {
return error
}
return h.convertToOpenaiStream(zhipuResponse, response)
}
func (h *zhipuStreamHandler) convertToOpenaiStream(content string, response *[]types.ChatCompletionStreamResponse) error {
var choice types.ChatCompletionStreamChoice
choice.Delta.Content = content
func (h *zhipuStreamHandler) convertToOpenaiStream(zhipuResponse *ZhipuStreamResponse, response *[]types.ChatCompletionStreamResponse) error {
streamResponse := types.ChatCompletionStreamResponse{
ID: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
ID: zhipuResponse.ID,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: zhipuResponse.Created,
Model: h.Request.Model,
Choices: []types.ChatCompletionStreamChoice{choice},
}
*response = append(*response, streamResponse)
return nil
}
func (h *zhipuStreamHandler) handlerMeta(zhipuResponse *ZhipuStreamMetaResponse, response *[]types.ChatCompletionStreamResponse) error {
var choice types.ChatCompletionStreamChoice
choice.Delta.Content = ""
choice.FinishReason = types.FinishReasonStop
streamResponse := types.ChatCompletionStreamResponse{
ID: zhipuResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Model: h.Request.Model,
Choices: []types.ChatCompletionStreamChoice{choice},
}
*response = append(*response, streamResponse)
*h.Usage = zhipuResponse.Usage
choice := zhipuResponse.Choices[0]
if choice.Delta.ToolCalls != nil {
choices := choice.ConvertOpenaiStream()
for _, choice := range choices {
chatCompletionCopy := streamResponse
chatCompletionCopy.Choices = []types.ChatCompletionStreamChoice{choice}
*response = append(*response, chatCompletionCopy)
}
} else {
streamResponse.Choices = []types.ChatCompletionStreamChoice{choice}
*response = append(*response, streamResponse)
}
if zhipuResponse.Usage != nil {
*h.Usage = *zhipuResponse.Usage
}
return nil
}

View File

@ -0,0 +1,69 @@
package zhipu
import (
"net/http"
"one-api/common"
"one-api/types"
)
func (p *ZhipuProvider) CreateEmbeddings(request *types.EmbeddingRequest) (*types.EmbeddingResponse, *types.OpenAIErrorWithStatusCode) {
url, errWithCode := p.GetSupportedAPIUri(common.RelayModeEmbeddings)
if errWithCode != nil {
return nil, errWithCode
}
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(url, request.Model)
if fullRequestURL == "" {
return nil, common.ErrorWrapper(nil, "invalid_zhipu_config", http.StatusInternalServerError)
}
// 获取请求头
headers := p.GetRequestHeaders()
aliRequest := convertFromEmbeddingOpenai(request)
// 创建请求
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(aliRequest), p.Requester.WithHeader(headers))
if err != nil {
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
defer req.Body.Close()
zhipuResponse := &ZhipuEmbeddingResponse{}
// 发送请求
_, errWithCode = p.Requester.SendRequest(req, zhipuResponse, false)
if errWithCode != nil {
return nil, errWithCode
}
return p.convertToEmbeddingOpenai(zhipuResponse, request)
}
func convertFromEmbeddingOpenai(request *types.EmbeddingRequest) *ZhipuEmbeddingRequest {
return &ZhipuEmbeddingRequest{
Model: request.Model,
Input: request.ParseInputString(),
}
}
func (p *ZhipuProvider) convertToEmbeddingOpenai(response *ZhipuEmbeddingResponse, request *types.EmbeddingRequest) (openaiResponse *types.EmbeddingResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
error := errorHandle(&response.Error)
if error != nil {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: *error,
StatusCode: http.StatusBadRequest,
}
return
}
openAIEmbeddingResponse := &types.EmbeddingResponse{
Object: "list",
Data: response.Data,
Model: request.Model,
Usage: response.Usage,
}
*p.Usage = *response.Usage
return openAIEmbeddingResponse, nil
}

View File

@ -0,0 +1,68 @@
package zhipu
import (
"net/http"
"one-api/common"
"one-api/types"
"time"
)
func (p *ZhipuProvider) CreateImageGenerations(request *types.ImageRequest) (*types.ImageResponse, *types.OpenAIErrorWithStatusCode) {
url, errWithCode := p.GetSupportedAPIUri(common.RelayModeImagesGenerations)
if errWithCode != nil {
return nil, errWithCode
}
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(url, request.Model)
if fullRequestURL == "" {
return nil, common.ErrorWrapper(nil, "invalid_zhipu_config", http.StatusInternalServerError)
}
// 获取请求头
headers := p.GetRequestHeaders()
zhipuRequest := convertFromIamgeOpenai(request)
// 创建请求
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(zhipuRequest), p.Requester.WithHeader(headers))
if err != nil {
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
defer req.Body.Close()
zhipuResponse := &ZhipuImageGenerationResponse{}
// 发送请求
_, errWithCode = p.Requester.SendRequest(req, zhipuResponse, false)
if errWithCode != nil {
return nil, errWithCode
}
return p.convertToImageOpenai(zhipuResponse, request)
}
func (p *ZhipuProvider) convertToImageOpenai(response *ZhipuImageGenerationResponse, request *types.ImageRequest) (openaiResponse *types.ImageResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
error := errorHandle(&response.Error)
if error != nil {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: *error,
StatusCode: http.StatusBadRequest,
}
return
}
openaiResponse = &types.ImageResponse{
Created: time.Now().Unix(),
Data: response.Data,
}
p.Usage.PromptTokens = 1000
return
}
func convertFromIamgeOpenai(request *types.ImageRequest) *ZhipuImageGenerationRequest {
return &ZhipuImageGenerationRequest{
Model: request.Model,
Prompt: request.Prompt,
}
}

View File

@ -5,41 +5,83 @@ import (
"time"
)
type ZhipuMessage struct {
Role string `json:"role"`
Content string `json:"content"`
type ZhipuWebSearch struct {
Enable bool `json:"enable"`
SearchQuery string `json:"search_query,omitempty"`
}
type ZhipuTool struct {
Type string `json:"type"`
Function types.ChatCompletionFunction `json:"function"`
WebSearch string `json:"web_search,omitempty"`
}
type ZhipuRequest struct {
Prompt []ZhipuMessage `json:"prompt"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
RequestId string `json:"request_id,omitempty"`
Incremental bool `json:"incremental,omitempty"`
Model string `json:"model"`
Messages []types.ChatCompletionMessage `json:"messages"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Stop []string `json:"stop,omitempty"`
Tools []ZhipuTool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
}
type ZhipuResponseData struct {
TaskId string `json:"task_id"`
RequestId string `json:"request_id"`
TaskStatus string `json:"task_status"`
Choices []ZhipuMessage `json:"choices"`
types.Usage `json:"usage"`
}
// type ZhipuMessage struct {
// Role string `json:"role"`
// Content string `json:"content"`
// ToolCalls []*types.ChatCompletionToolCalls `json:"tool_calls,omitempty"`
// ToolCallId string `json:"tool_call_id,omitempty"`
// }
type ZhipuResponse struct {
Code int `json:"code"`
Msg string `json:"msg"`
Success bool `json:"success"`
Data ZhipuResponseData `json:"data"`
Model string `json:"model,omitempty"`
ID string `json:"id"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []types.ChatCompletionChoice `json:"choices"`
Usage *types.Usage `json:"usage,omitempty"`
ZhipuResponseError
}
type ZhipuStreamMetaResponse struct {
RequestId string `json:"request_id"`
TaskId string `json:"task_id"`
TaskStatus string `json:"task_status"`
types.Usage `json:"usage"`
Model string `json:"model,omitempty"`
type ZhipuStreamResponse struct {
ID string `json:"id"`
Created int64 `json:"created"`
Choices []types.ChatCompletionStreamChoice `json:"choices"`
Usage *types.Usage `json:"usage,omitempty"`
ZhipuResponseError
}
type ZhipuResponseError struct {
Error ZhipuError `json:"error,omitempty"`
}
type ZhipuError struct {
Code string `json:"code"`
Message string `json:"message"`
}
type ZhipuEmbeddingRequest struct {
Model string `json:"model"`
Input string `json:"input"`
}
type ZhipuEmbeddingResponse struct {
Model string `json:"model"`
Data []types.Embedding `json:"data"`
Object string `json:"object"`
Usage *types.Usage `json:"usage"`
ZhipuResponseError
}
type ZhipuImageGenerationRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
}
type ZhipuImageGenerationResponse struct {
Model string `json:"model"`
Data []types.ImageResponseDataInner `json:"data,omitempty"`
ZhipuResponseError
}
type zhipuTokenData struct {

View File

@ -38,3 +38,21 @@ func (r EmbeddingRequest) ParseInput() []string {
}
return input
}
func (r EmbeddingRequest) ParseInputString() string {
if r.Input == nil {
return ""
}
var input string
switch r.Input.(type) {
case string:
input = r.Input.(string)
case []any:
// 取第一个
if len(r.Input.([]any)) > 0 {
input = r.Input.([]any)[0].(string)
}
}
return input
}