✨ feat: support zhipu new API (#44)
This commit is contained in:
parent
0be687905f
commit
b42d4d44aa
@ -78,6 +78,11 @@ func init() {
|
|||||||
"chatglm_pro": {0.7143, ChannelTypeZhipu}, // ¥0.01 / 1k tokens
|
"chatglm_pro": {0.7143, ChannelTypeZhipu}, // ¥0.01 / 1k tokens
|
||||||
"chatglm_std": {0.3572, ChannelTypeZhipu}, // ¥0.005 / 1k tokens
|
"chatglm_std": {0.3572, ChannelTypeZhipu}, // ¥0.005 / 1k tokens
|
||||||
"chatglm_lite": {0.1429, ChannelTypeZhipu}, // ¥0.002 / 1k tokens
|
"chatglm_lite": {0.1429, ChannelTypeZhipu}, // ¥0.002 / 1k tokens
|
||||||
|
"glm-3-turbo": {0.3572, ChannelTypeZhipu}, // ¥0.005 / 1k tokens
|
||||||
|
"glm-4": {0.7143, ChannelTypeZhipu}, // ¥0.01 / 1k tokens
|
||||||
|
"glm-4v": {0.7143, ChannelTypeZhipu}, // ¥0.01 / 1k tokens
|
||||||
|
"embedding-2": {0.0357, ChannelTypeZhipu}, // ¥0.0005 / 1k tokens
|
||||||
|
"cogview-3": {17.8571, ChannelTypeZhipu}, // ¥0.25 / 1张图片
|
||||||
"qwen-turbo": {0.5715, ChannelTypeAli}, // ¥0.008 / 1k tokens // https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-thousand-questions-metering-and-billing
|
"qwen-turbo": {0.5715, ChannelTypeAli}, // ¥0.008 / 1k tokens // https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-thousand-questions-metering-and-billing
|
||||||
"qwen-plus": {1.4286, ChannelTypeAli}, // ¥0.02 / 1k tokens
|
"qwen-plus": {1.4286, ChannelTypeAli}, // ¥0.02 / 1k tokens
|
||||||
"qwen-max": {1.4286, ChannelTypeAli}, // ¥0.02 / 1k tokens
|
"qwen-max": {1.4286, ChannelTypeAli}, // ¥0.02 / 1k tokens
|
||||||
|
@ -231,7 +231,8 @@ func calculateToken(model string, size string, n int, quality string) (int, erro
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return 0, errors.New("size not supported for this image model")
|
imageCostRatio = 1
|
||||||
|
// return 0, errors.New("size not supported for this image model")
|
||||||
}
|
}
|
||||||
|
|
||||||
return int(imageCostRatio*1000) * n, nil
|
return int(imageCostRatio*1000) * n, nil
|
||||||
|
@ -41,7 +41,7 @@ func getConfig() base.ProviderConfig {
|
|||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var aliError *AliError
|
aliError := &AliError{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(aliError)
|
err := json.NewDecoder(resp.Body).Decode(aliError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -44,7 +44,7 @@ func getConfig() base.ProviderConfig {
|
|||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var baiduError *BaiduError
|
baiduError := &BaiduError{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(baiduError)
|
err := json.NewDecoder(resp.Body).Decode(baiduError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -35,7 +35,7 @@ func getConfig() base.ProviderConfig {
|
|||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var claudeError *ClaudeResponseError
|
claudeError := &ClaudeResponseError{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(claudeError)
|
err := json.NewDecoder(resp.Body).Decode(claudeError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -37,7 +37,7 @@ func getConfig() base.ProviderConfig {
|
|||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var geminiError *GeminiErrorResponse
|
geminiError := &GeminiErrorResponse{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(geminiError)
|
err := json.NewDecoder(resp.Body).Decode(geminiError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -62,7 +62,7 @@ func getOpenAIConfig(baseURL string) base.ProviderConfig {
|
|||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func RequestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func RequestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var errorResponse *types.OpenAIErrorResponse
|
errorResponse := &types.OpenAIErrorResponse{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(errorResponse)
|
err := json.NewDecoder(resp.Body).Decode(errorResponse)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -37,7 +37,7 @@ func getConfig() base.ProviderConfig {
|
|||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var palmError *PaLMErrorResponse
|
palmError := &PaLMErrorResponse{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(palmError)
|
err := json.NewDecoder(resp.Body).Decode(palmError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -43,7 +43,7 @@ func getConfig() base.ProviderConfig {
|
|||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var tencentError *TencentResponseError
|
tencentError := &TencentResponseError{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(tencentError)
|
err := json.NewDecoder(resp.Body).Decode(tencentError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -38,29 +38,31 @@ type ZhipuProvider struct {
|
|||||||
|
|
||||||
func getConfig() base.ProviderConfig {
|
func getConfig() base.ProviderConfig {
|
||||||
return base.ProviderConfig{
|
return base.ProviderConfig{
|
||||||
BaseURL: "https://open.bigmodel.cn",
|
BaseURL: "https://open.bigmodel.cn/api/paas/v4",
|
||||||
ChatCompletions: "/api/paas/v3/model-api",
|
ChatCompletions: "/chat/completions",
|
||||||
|
Embeddings: "/embeddings",
|
||||||
|
ImagesGenerations: "/images/generations",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 请求错误处理
|
// 请求错误处理
|
||||||
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
|
||||||
var zhipuError *ZhipuResponse
|
zhipuError := &ZhipuResponseError{}
|
||||||
err := json.NewDecoder(resp.Body).Decode(zhipuError)
|
err := json.NewDecoder(resp.Body).Decode(zhipuError)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return errorHandle(zhipuError)
|
return errorHandle(&zhipuError.Error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 错误处理
|
// 错误处理
|
||||||
func errorHandle(zhipuError *ZhipuResponse) *types.OpenAIError {
|
func errorHandle(zhipuError *ZhipuError) *types.OpenAIError {
|
||||||
if zhipuError.Success {
|
if zhipuError.Message == "" {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
return &types.OpenAIError{
|
return &types.OpenAIError{
|
||||||
Message: zhipuError.Msg,
|
Message: zhipuError.Message,
|
||||||
Type: "zhipu_error",
|
Type: "zhipu_error",
|
||||||
Code: zhipuError.Code,
|
Code: zhipuError.Code,
|
||||||
}
|
}
|
||||||
@ -79,7 +81,7 @@ func (p *ZhipuProvider) GetRequestHeaders() (headers map[string]string) {
|
|||||||
func (p *ZhipuProvider) GetFullRequestURL(requestURL string, modelName string) string {
|
func (p *ZhipuProvider) GetFullRequestURL(requestURL string, modelName string) string {
|
||||||
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
|
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
|
||||||
|
|
||||||
return fmt.Sprintf("%s%s/%s", baseURL, requestURL, modelName)
|
return fmt.Sprintf("%s%s", baseURL, requestURL)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *ZhipuProvider) getZhipuToken() string {
|
func (p *ZhipuProvider) getZhipuToken() string {
|
||||||
@ -129,3 +131,24 @@ func (p *ZhipuProvider) getZhipuToken() string {
|
|||||||
|
|
||||||
return tokenString
|
return tokenString
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func convertRole(roleName string) string {
|
||||||
|
switch roleName {
|
||||||
|
case types.ChatMessageRoleFunction:
|
||||||
|
return types.ChatMessageRoleTool
|
||||||
|
case types.ChatMessageRoleTool, types.ChatMessageRoleSystem, types.ChatMessageRoleAssistant:
|
||||||
|
return roleName
|
||||||
|
default:
|
||||||
|
return types.ChatMessageRoleUser
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertTopP(topP float64) float64 {
|
||||||
|
// 检测 topP 是否在 0-1 之间 如果等于0 设为0.1 如果大于等于1 设为0.9
|
||||||
|
if topP <= 0 {
|
||||||
|
return 0.1
|
||||||
|
} else if topP >= 1 {
|
||||||
|
return 0.9
|
||||||
|
}
|
||||||
|
return topP
|
||||||
|
}
|
||||||
|
@ -2,7 +2,6 @@ package zhipu
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"one-api/common/requester"
|
"one-api/common/requester"
|
||||||
@ -62,19 +61,14 @@ func (p *ZhipuProvider) getChatRequest(request *types.ChatCompletionRequest) (*h
|
|||||||
// 获取请求地址
|
// 获取请求地址
|
||||||
fullRequestURL := p.GetFullRequestURL(url, request.Model)
|
fullRequestURL := p.GetFullRequestURL(url, request.Model)
|
||||||
if fullRequestURL == "" {
|
if fullRequestURL == "" {
|
||||||
return nil, common.ErrorWrapper(nil, "invalid_baidu_config", http.StatusInternalServerError)
|
return nil, common.ErrorWrapper(nil, "invalid_zhipu_config", http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取请求头
|
// 获取请求头
|
||||||
headers := p.GetRequestHeaders()
|
headers := p.GetRequestHeaders()
|
||||||
if request.Stream {
|
|
||||||
headers["Accept"] = "text/event-stream"
|
|
||||||
fullRequestURL += "/sse-invoke"
|
|
||||||
} else {
|
|
||||||
fullRequestURL += "/invoke"
|
|
||||||
}
|
|
||||||
|
|
||||||
zhipuRequest := convertFromChatOpenai(request)
|
zhipuRequest := convertFromChatOpenai(request)
|
||||||
|
|
||||||
// 创建请求
|
// 创建请求
|
||||||
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(zhipuRequest), p.Requester.WithHeader(headers))
|
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(zhipuRequest), p.Requester.WithHeader(headers))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -85,7 +79,7 @@ func (p *ZhipuProvider) getChatRequest(request *types.ChatCompletionRequest) (*h
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (p *ZhipuProvider) convertToChatOpenai(response *ZhipuResponse, request *types.ChatCompletionRequest) (openaiResponse *types.ChatCompletionResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
|
func (p *ZhipuProvider) convertToChatOpenai(response *ZhipuResponse, request *types.ChatCompletionRequest) (openaiResponse *types.ChatCompletionResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||||
error := errorHandle(response)
|
error := errorHandle(&response.Error)
|
||||||
if error != nil {
|
if error != nil {
|
||||||
errWithCode = &types.OpenAIErrorWithStatusCode{
|
errWithCode = &types.OpenAIErrorWithStatusCode{
|
||||||
OpenAIError: *error,
|
OpenAIError: *error,
|
||||||
@ -95,114 +89,110 @@ func (p *ZhipuProvider) convertToChatOpenai(response *ZhipuResponse, request *ty
|
|||||||
}
|
}
|
||||||
|
|
||||||
openaiResponse = &types.ChatCompletionResponse{
|
openaiResponse = &types.ChatCompletionResponse{
|
||||||
ID: response.Data.TaskId,
|
ID: response.ID,
|
||||||
Object: "chat.completion",
|
Object: "chat.completion",
|
||||||
Created: common.GetTimestamp(),
|
Created: response.Created,
|
||||||
Model: request.Model,
|
Model: response.Model,
|
||||||
Choices: make([]types.ChatCompletionChoice, 0, len(response.Data.Choices)),
|
Choices: response.Choices,
|
||||||
Usage: &response.Data.Usage,
|
Usage: response.Usage,
|
||||||
}
|
|
||||||
for i, choice := range response.Data.Choices {
|
|
||||||
openaiChoice := types.ChatCompletionChoice{
|
|
||||||
Index: i,
|
|
||||||
Message: types.ChatCompletionMessage{
|
|
||||||
Role: choice.Role,
|
|
||||||
Content: strings.Trim(choice.Content, "\""),
|
|
||||||
},
|
|
||||||
FinishReason: "",
|
|
||||||
}
|
|
||||||
if i == len(response.Data.Choices)-1 {
|
|
||||||
openaiChoice.FinishReason = "stop"
|
|
||||||
}
|
|
||||||
openaiResponse.Choices = append(openaiResponse.Choices, openaiChoice)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*p.Usage = response.Data.Usage
|
*p.Usage = *response.Usage
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertFromChatOpenai(request *types.ChatCompletionRequest) *ZhipuRequest {
|
func convertFromChatOpenai(request *types.ChatCompletionRequest) *ZhipuRequest {
|
||||||
messages := make([]ZhipuMessage, 0, len(request.Messages))
|
for i := range request.Messages {
|
||||||
for _, message := range request.Messages {
|
request.Messages[i].Role = convertRole(request.Messages[i].Role)
|
||||||
if message.Role == "system" {
|
}
|
||||||
messages = append(messages, ZhipuMessage{
|
|
||||||
Role: "system",
|
zhipuRequest := &ZhipuRequest{
|
||||||
Content: message.StringContent(),
|
Model: request.Model,
|
||||||
|
Messages: request.Messages,
|
||||||
|
Stream: request.Stream,
|
||||||
|
Temperature: request.Temperature,
|
||||||
|
TopP: convertTopP(request.TopP),
|
||||||
|
MaxTokens: request.MaxTokens,
|
||||||
|
Stop: request.Stop,
|
||||||
|
ToolChoice: request.ToolChoice,
|
||||||
|
}
|
||||||
|
|
||||||
|
if request.Functions != nil {
|
||||||
|
zhipuRequest.Tools = make([]ZhipuTool, 0, len(request.Functions))
|
||||||
|
for _, function := range request.Functions {
|
||||||
|
zhipuRequest.Tools = append(zhipuRequest.Tools, ZhipuTool{
|
||||||
|
Type: "function",
|
||||||
|
Function: *function,
|
||||||
})
|
})
|
||||||
messages = append(messages, ZhipuMessage{
|
}
|
||||||
Role: "user",
|
} else if request.Tools != nil {
|
||||||
Content: "Okay",
|
zhipuRequest.Tools = make([]ZhipuTool, 0, len(request.Tools))
|
||||||
})
|
for _, tool := range request.Tools {
|
||||||
} else {
|
zhipuRequest.Tools = append(zhipuRequest.Tools, ZhipuTool{
|
||||||
messages = append(messages, ZhipuMessage{
|
Type: "function",
|
||||||
Role: message.Role,
|
Function: tool.Function,
|
||||||
Content: message.StringContent(),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return &ZhipuRequest{
|
|
||||||
Prompt: messages,
|
return zhipuRequest
|
||||||
Temperature: request.Temperature,
|
|
||||||
TopP: request.TopP,
|
|
||||||
Incremental: false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 转换为OpenAI聊天流式请求体
|
// 转换为OpenAI聊天流式请求体
|
||||||
func (h *zhipuStreamHandler) handlerStream(rawLine *[]byte, isFinished *bool, response *[]types.ChatCompletionStreamResponse) error {
|
func (h *zhipuStreamHandler) handlerStream(rawLine *[]byte, isFinished *bool, response *[]types.ChatCompletionStreamResponse) error {
|
||||||
// 如果rawLine 前缀不为data: 或者 meta:,则直接返回
|
// 如果rawLine 前缀不为data: 或者 meta:,则直接返回
|
||||||
if !strings.HasPrefix(string(*rawLine), "data:") && !strings.HasPrefix(string(*rawLine), "meta:") {
|
if !strings.HasPrefix(string(*rawLine), "data: ") {
|
||||||
*rawLine = nil
|
*rawLine = nil
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.HasPrefix(string(*rawLine), "meta:") {
|
*rawLine = (*rawLine)[6:]
|
||||||
*rawLine = (*rawLine)[5:]
|
|
||||||
var zhipuStreamMetaResponse ZhipuStreamMetaResponse
|
if strings.HasPrefix(string(*rawLine), "[DONE]") {
|
||||||
err := json.Unmarshal(*rawLine, &zhipuStreamMetaResponse)
|
|
||||||
if err != nil {
|
|
||||||
return common.ErrorToOpenAIError(err)
|
|
||||||
}
|
|
||||||
*isFinished = true
|
*isFinished = true
|
||||||
return h.handlerMeta(&zhipuStreamMetaResponse, response)
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
*rawLine = (*rawLine)[5:]
|
zhipuResponse := &ZhipuStreamResponse{}
|
||||||
return h.convertToOpenaiStream(string(*rawLine), response)
|
err := json.Unmarshal(*rawLine, zhipuResponse)
|
||||||
|
if err != nil {
|
||||||
|
return common.ErrorToOpenAIError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
error := errorHandle(&zhipuResponse.Error)
|
||||||
|
if error != nil {
|
||||||
|
return error
|
||||||
|
}
|
||||||
|
|
||||||
|
return h.convertToOpenaiStream(zhipuResponse, response)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *zhipuStreamHandler) convertToOpenaiStream(content string, response *[]types.ChatCompletionStreamResponse) error {
|
func (h *zhipuStreamHandler) convertToOpenaiStream(zhipuResponse *ZhipuStreamResponse, response *[]types.ChatCompletionStreamResponse) error {
|
||||||
var choice types.ChatCompletionStreamChoice
|
|
||||||
choice.Delta.Content = content
|
|
||||||
streamResponse := types.ChatCompletionStreamResponse{
|
streamResponse := types.ChatCompletionStreamResponse{
|
||||||
ID: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
|
ID: zhipuResponse.ID,
|
||||||
Object: "chat.completion.chunk",
|
Object: "chat.completion.chunk",
|
||||||
Created: common.GetTimestamp(),
|
Created: zhipuResponse.Created,
|
||||||
Model: h.Request.Model,
|
Model: h.Request.Model,
|
||||||
Choices: []types.ChatCompletionStreamChoice{choice},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
*response = append(*response, streamResponse)
|
choice := zhipuResponse.Choices[0]
|
||||||
|
|
||||||
return nil
|
if choice.Delta.ToolCalls != nil {
|
||||||
}
|
choices := choice.ConvertOpenaiStream()
|
||||||
|
for _, choice := range choices {
|
||||||
func (h *zhipuStreamHandler) handlerMeta(zhipuResponse *ZhipuStreamMetaResponse, response *[]types.ChatCompletionStreamResponse) error {
|
chatCompletionCopy := streamResponse
|
||||||
var choice types.ChatCompletionStreamChoice
|
chatCompletionCopy.Choices = []types.ChatCompletionStreamChoice{choice}
|
||||||
choice.Delta.Content = ""
|
*response = append(*response, chatCompletionCopy)
|
||||||
choice.FinishReason = types.FinishReasonStop
|
}
|
||||||
streamResponse := types.ChatCompletionStreamResponse{
|
} else {
|
||||||
ID: zhipuResponse.RequestId,
|
streamResponse.Choices = []types.ChatCompletionStreamChoice{choice}
|
||||||
Object: "chat.completion.chunk",
|
*response = append(*response, streamResponse)
|
||||||
Created: common.GetTimestamp(),
|
}
|
||||||
Model: h.Request.Model,
|
|
||||||
Choices: []types.ChatCompletionStreamChoice{choice},
|
if zhipuResponse.Usage != nil {
|
||||||
}
|
*h.Usage = *zhipuResponse.Usage
|
||||||
|
}
|
||||||
*response = append(*response, streamResponse)
|
|
||||||
|
|
||||||
*h.Usage = zhipuResponse.Usage
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
69
providers/zhipu/embeddings.go
Normal file
69
providers/zhipu/embeddings.go
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
package zhipu
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"one-api/common"
|
||||||
|
"one-api/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (p *ZhipuProvider) CreateEmbeddings(request *types.EmbeddingRequest) (*types.EmbeddingResponse, *types.OpenAIErrorWithStatusCode) {
|
||||||
|
url, errWithCode := p.GetSupportedAPIUri(common.RelayModeEmbeddings)
|
||||||
|
if errWithCode != nil {
|
||||||
|
return nil, errWithCode
|
||||||
|
}
|
||||||
|
// 获取请求地址
|
||||||
|
fullRequestURL := p.GetFullRequestURL(url, request.Model)
|
||||||
|
if fullRequestURL == "" {
|
||||||
|
return nil, common.ErrorWrapper(nil, "invalid_zhipu_config", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取请求头
|
||||||
|
headers := p.GetRequestHeaders()
|
||||||
|
|
||||||
|
aliRequest := convertFromEmbeddingOpenai(request)
|
||||||
|
// 创建请求
|
||||||
|
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(aliRequest), p.Requester.WithHeader(headers))
|
||||||
|
if err != nil {
|
||||||
|
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
defer req.Body.Close()
|
||||||
|
|
||||||
|
zhipuResponse := &ZhipuEmbeddingResponse{}
|
||||||
|
|
||||||
|
// 发送请求
|
||||||
|
_, errWithCode = p.Requester.SendRequest(req, zhipuResponse, false)
|
||||||
|
if errWithCode != nil {
|
||||||
|
return nil, errWithCode
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.convertToEmbeddingOpenai(zhipuResponse, request)
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertFromEmbeddingOpenai(request *types.EmbeddingRequest) *ZhipuEmbeddingRequest {
|
||||||
|
return &ZhipuEmbeddingRequest{
|
||||||
|
Model: request.Model,
|
||||||
|
Input: request.ParseInputString(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ZhipuProvider) convertToEmbeddingOpenai(response *ZhipuEmbeddingResponse, request *types.EmbeddingRequest) (openaiResponse *types.EmbeddingResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||||
|
error := errorHandle(&response.Error)
|
||||||
|
if error != nil {
|
||||||
|
errWithCode = &types.OpenAIErrorWithStatusCode{
|
||||||
|
OpenAIError: *error,
|
||||||
|
StatusCode: http.StatusBadRequest,
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
openAIEmbeddingResponse := &types.EmbeddingResponse{
|
||||||
|
Object: "list",
|
||||||
|
Data: response.Data,
|
||||||
|
Model: request.Model,
|
||||||
|
Usage: response.Usage,
|
||||||
|
}
|
||||||
|
|
||||||
|
*p.Usage = *response.Usage
|
||||||
|
|
||||||
|
return openAIEmbeddingResponse, nil
|
||||||
|
}
|
68
providers/zhipu/image_generations.go
Normal file
68
providers/zhipu/image_generations.go
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
package zhipu
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"one-api/common"
|
||||||
|
"one-api/types"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (p *ZhipuProvider) CreateImageGenerations(request *types.ImageRequest) (*types.ImageResponse, *types.OpenAIErrorWithStatusCode) {
|
||||||
|
url, errWithCode := p.GetSupportedAPIUri(common.RelayModeImagesGenerations)
|
||||||
|
if errWithCode != nil {
|
||||||
|
return nil, errWithCode
|
||||||
|
}
|
||||||
|
// 获取请求地址
|
||||||
|
fullRequestURL := p.GetFullRequestURL(url, request.Model)
|
||||||
|
if fullRequestURL == "" {
|
||||||
|
return nil, common.ErrorWrapper(nil, "invalid_zhipu_config", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取请求头
|
||||||
|
headers := p.GetRequestHeaders()
|
||||||
|
|
||||||
|
zhipuRequest := convertFromIamgeOpenai(request)
|
||||||
|
// 创建请求
|
||||||
|
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(zhipuRequest), p.Requester.WithHeader(headers))
|
||||||
|
if err != nil {
|
||||||
|
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
defer req.Body.Close()
|
||||||
|
|
||||||
|
zhipuResponse := &ZhipuImageGenerationResponse{}
|
||||||
|
|
||||||
|
// 发送请求
|
||||||
|
_, errWithCode = p.Requester.SendRequest(req, zhipuResponse, false)
|
||||||
|
if errWithCode != nil {
|
||||||
|
return nil, errWithCode
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.convertToImageOpenai(zhipuResponse, request)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *ZhipuProvider) convertToImageOpenai(response *ZhipuImageGenerationResponse, request *types.ImageRequest) (openaiResponse *types.ImageResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||||
|
error := errorHandle(&response.Error)
|
||||||
|
if error != nil {
|
||||||
|
errWithCode = &types.OpenAIErrorWithStatusCode{
|
||||||
|
OpenAIError: *error,
|
||||||
|
StatusCode: http.StatusBadRequest,
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
openaiResponse = &types.ImageResponse{
|
||||||
|
Created: time.Now().Unix(),
|
||||||
|
Data: response.Data,
|
||||||
|
}
|
||||||
|
|
||||||
|
p.Usage.PromptTokens = 1000
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertFromIamgeOpenai(request *types.ImageRequest) *ZhipuImageGenerationRequest {
|
||||||
|
return &ZhipuImageGenerationRequest{
|
||||||
|
Model: request.Model,
|
||||||
|
Prompt: request.Prompt,
|
||||||
|
}
|
||||||
|
}
|
@ -5,41 +5,83 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ZhipuMessage struct {
|
type ZhipuWebSearch struct {
|
||||||
Role string `json:"role"`
|
Enable bool `json:"enable"`
|
||||||
Content string `json:"content"`
|
SearchQuery string `json:"search_query,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ZhipuTool struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Function types.ChatCompletionFunction `json:"function"`
|
||||||
|
WebSearch string `json:"web_search,omitempty"`
|
||||||
|
}
|
||||||
type ZhipuRequest struct {
|
type ZhipuRequest struct {
|
||||||
Prompt []ZhipuMessage `json:"prompt"`
|
Model string `json:"model"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Messages []types.ChatCompletionMessage `json:"messages"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
RequestId string `json:"request_id,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
Incremental bool `json:"incremental,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
|
Stop []string `json:"stop,omitempty"`
|
||||||
|
Tools []ZhipuTool `json:"tools,omitempty"`
|
||||||
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ZhipuResponseData struct {
|
// type ZhipuMessage struct {
|
||||||
TaskId string `json:"task_id"`
|
// Role string `json:"role"`
|
||||||
RequestId string `json:"request_id"`
|
// Content string `json:"content"`
|
||||||
TaskStatus string `json:"task_status"`
|
// ToolCalls []*types.ChatCompletionToolCalls `json:"tool_calls,omitempty"`
|
||||||
Choices []ZhipuMessage `json:"choices"`
|
// ToolCallId string `json:"tool_call_id,omitempty"`
|
||||||
types.Usage `json:"usage"`
|
// }
|
||||||
}
|
|
||||||
|
|
||||||
type ZhipuResponse struct {
|
type ZhipuResponse struct {
|
||||||
Code int `json:"code"`
|
ID string `json:"id"`
|
||||||
Msg string `json:"msg"`
|
Created int64 `json:"created"`
|
||||||
Success bool `json:"success"`
|
Model string `json:"model"`
|
||||||
Data ZhipuResponseData `json:"data"`
|
Choices []types.ChatCompletionChoice `json:"choices"`
|
||||||
Model string `json:"model,omitempty"`
|
Usage *types.Usage `json:"usage,omitempty"`
|
||||||
|
ZhipuResponseError
|
||||||
}
|
}
|
||||||
|
|
||||||
type ZhipuStreamMetaResponse struct {
|
type ZhipuStreamResponse struct {
|
||||||
RequestId string `json:"request_id"`
|
ID string `json:"id"`
|
||||||
TaskId string `json:"task_id"`
|
Created int64 `json:"created"`
|
||||||
TaskStatus string `json:"task_status"`
|
Choices []types.ChatCompletionStreamChoice `json:"choices"`
|
||||||
types.Usage `json:"usage"`
|
Usage *types.Usage `json:"usage,omitempty"`
|
||||||
Model string `json:"model,omitempty"`
|
ZhipuResponseError
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZhipuResponseError struct {
|
||||||
|
Error ZhipuError `json:"error,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZhipuError struct {
|
||||||
|
Code string `json:"code"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZhipuEmbeddingRequest struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
Input string `json:"input"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZhipuEmbeddingResponse struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
Data []types.Embedding `json:"data"`
|
||||||
|
Object string `json:"object"`
|
||||||
|
Usage *types.Usage `json:"usage"`
|
||||||
|
ZhipuResponseError
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZhipuImageGenerationRequest struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
Prompt string `json:"prompt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ZhipuImageGenerationResponse struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
Data []types.ImageResponseDataInner `json:"data,omitempty"`
|
||||||
|
ZhipuResponseError
|
||||||
}
|
}
|
||||||
|
|
||||||
type zhipuTokenData struct {
|
type zhipuTokenData struct {
|
||||||
|
@ -38,3 +38,21 @@ func (r EmbeddingRequest) ParseInput() []string {
|
|||||||
}
|
}
|
||||||
return input
|
return input
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r EmbeddingRequest) ParseInputString() string {
|
||||||
|
if r.Input == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
var input string
|
||||||
|
switch r.Input.(type) {
|
||||||
|
case string:
|
||||||
|
input = r.Input.(string)
|
||||||
|
case []any:
|
||||||
|
// 取第一个
|
||||||
|
if len(r.Input.([]any)) > 0 {
|
||||||
|
input = r.Input.([]any)[0].(string)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return input
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user