feat: MiniMax support (#50)

This commit is contained in:
Buer 2024-01-22 12:25:55 +08:00 committed by GitHub
parent 2cc120f35b
commit 705804e6dd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 601 additions and 2 deletions

View File

@ -190,6 +190,7 @@ const (
ChannelTypeAzureSpeech = 24
ChannelTypeGemini = 25
ChannelTypeBaichuan = 26
ChannelTypeMiniMax = 27
)
var ChannelBaseURLs = []string{
@ -220,6 +221,7 @@ var ChannelBaseURLs = []string{
"", //24
"", //25
"https://api.baichuan-ai.com", //26
"https://api.minimax.chat/v1", //27
}
const (

View File

@ -99,6 +99,10 @@ func init() {
"Baichuan2-Turbo-192k": {1.143, ChannelTypeBaichuan}, // ¥0.016 / 1k tokens
"Baichuan2-53B": {1.4286, ChannelTypeBaichuan}, // ¥0.02 / 1k tokens
"Baichuan-Text-Embedding": {0.0357, ChannelTypeBaichuan}, // ¥0.0005 / 1k tokens
"abab5.5s-chat": {0.3572, ChannelTypeMiniMax}, // ¥0.005 / 1k tokens
"abab5.5-chat": {1.0714, ChannelTypeMiniMax}, // ¥0.015 / 1k tokens
"abab6-chat": {14.2857, ChannelTypeMiniMax}, // ¥0.2 / 1k tokens
"embo-01": {0.0357, ChannelTypeMiniMax}, // ¥0.0005 / 1k tokens
}
ModelRatio = make(map[string]float64)

View File

@ -10,6 +10,7 @@ import (
"one-api/common"
"one-api/types"
"strconv"
"strings"
"github.com/gin-gonic/gin"
)
@ -122,7 +123,7 @@ func (r *HTTPRequester) SendRequestRaw(req *http.Request) (*http.Response, *type
// 获取流式响应
func RequestStream[T streamable](requester *HTTPRequester, resp *http.Response, handlerPrefix HandlerPrefix[T]) (*streamReader[T], *types.OpenAIErrorWithStatusCode) {
// 如果返回的头是json格式 说明有错误
if resp.Header.Get("Content-Type") == "application/json" {
if strings.Contains(resp.Header.Get("Content-Type"), "application/json") {
return nil, HandleErrorResp(resp, requester.ErrorHandler)
}

View File

@ -9,7 +9,7 @@ import (
type Channel struct {
Id int `json:"id"`
Type int `json:"type" gorm:"default:0"`
Key string `json:"key" gorm:"not null;index"`
Key string `json:"key" gorm:"type:varchar(767);not null;index"`
Status int `json:"status" gorm:"default:1"`
Name string `json:"name" gorm:"index"`
Weight *uint `json:"weight" gorm:"default:0"`

115
providers/minimax/base.go Normal file
View File

@ -0,0 +1,115 @@
package minimax
import (
"encoding/json"
"fmt"
"net/http"
"one-api/common/requester"
"one-api/model"
"one-api/providers/base"
"one-api/types"
"strings"
)
type MiniMaxProviderFactory struct{}
// 创建 MiniMaxProvider
func (f MiniMaxProviderFactory) Create(channel *model.Channel) base.ProviderInterface {
return &MiniMaxProvider{
BaseProvider: base.BaseProvider{
Config: getConfig(),
Channel: channel,
Requester: requester.NewHTTPRequester(*channel.Proxy, requestErrorHandle),
},
}
}
type MiniMaxProvider struct {
base.BaseProvider
}
func getConfig() base.ProviderConfig {
return base.ProviderConfig{
BaseURL: "https://api.minimax.chat/v1",
ChatCompletions: "/text/chatcompletion_pro",
Embeddings: "/embeddings",
}
}
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
minimaxError := &MiniMaxBaseResp{}
err := json.NewDecoder(resp.Body).Decode(minimaxError)
if err != nil {
return nil
}
return errorHandle(&minimaxError.BaseResp)
}
// 错误处理
func errorHandle(minimaxError *BaseResp) *types.OpenAIError {
if minimaxError.StatusCode == 0 {
return nil
}
return &types.OpenAIError{
Message: minimaxError.StatusMsg,
Type: "minimax_error",
Code: minimaxError.StatusCode,
}
}
func (p *MiniMaxProvider) GetFullRequestURL(requestURL string, modelName string) string {
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
keys := strings.Split(p.Channel.Key, "|")
if len(keys) != 2 {
return ""
}
return fmt.Sprintf("%s%s?GroupId=%s", baseURL, requestURL, keys[1])
}
// 获取请求头
func (p *MiniMaxProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
p.CommonRequestHeaders(headers)
keys := strings.Split(p.Channel.Key, "|")
headers["Authorization"] = "Bearer " + keys[0]
return headers
}
func defaultBot() MiniMaxBotSetting {
return MiniMaxBotSetting{
BotName: types.ChatMessageRoleAssistant,
Content: "You are a helpful assistant. You can help me by answering my questions. You can also ask me questions.",
}
}
func defaultReplyConstraints() ReplyConstraints {
return ReplyConstraints{
SenderType: "BOT",
SenderName: types.ChatMessageRoleAssistant,
}
}
func convertRole(roleName string) (string, string) {
switch roleName {
case types.ChatMessageRoleTool, types.ChatMessageRoleFunction:
return "FUNCTION", types.ChatMessageRoleAssistant
case types.ChatMessageRoleSystem, types.ChatMessageRoleAssistant:
return "BOT", types.ChatMessageRoleAssistant
default:
return "USER", types.ChatMessageRoleUser
}
}
func convertFinishReason(finishReason string) string {
switch finishReason {
case "max_output":
return types.FinishReasonLength
default:
return finishReason
}
}

288
providers/minimax/chat.go Normal file
View File

@ -0,0 +1,288 @@
package minimax
import (
"encoding/json"
"net/http"
"one-api/common"
"one-api/common/requester"
"one-api/types"
"strings"
)
type minimaxStreamHandler struct {
Usage *types.Usage
Request *types.ChatCompletionRequest
LastContent string
}
func (p *MiniMaxProvider) CreateChatCompletion(request *types.ChatCompletionRequest) (*types.ChatCompletionResponse, *types.OpenAIErrorWithStatusCode) {
req, errWithCode := p.getChatRequest(request)
if errWithCode != nil {
return nil, errWithCode
}
defer req.Body.Close()
response := &MiniMaxChatResponse{}
// 发送请求
_, errWithCode = p.Requester.SendRequest(req, response, false)
if errWithCode != nil {
return nil, errWithCode
}
return p.convertToChatOpenai(response, request)
}
func (p *MiniMaxProvider) CreateChatCompletionStream(request *types.ChatCompletionRequest) (requester.StreamReaderInterface[types.ChatCompletionStreamResponse], *types.OpenAIErrorWithStatusCode) {
req, errWithCode := p.getChatRequest(request)
if errWithCode != nil {
return nil, errWithCode
}
defer req.Body.Close()
// 发送请求
resp, errWithCode := p.Requester.SendRequestRaw(req)
if errWithCode != nil {
return nil, errWithCode
}
chatHandler := &minimaxStreamHandler{
Usage: p.Usage,
Request: request,
}
return requester.RequestStream[types.ChatCompletionStreamResponse](p.Requester, resp, chatHandler.handlerStream)
}
func (p *MiniMaxProvider) getChatRequest(request *types.ChatCompletionRequest) (*http.Request, *types.OpenAIErrorWithStatusCode) {
url, errWithCode := p.GetSupportedAPIUri(common.RelayModeChatCompletions)
if errWithCode != nil {
return nil, errWithCode
}
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(url, request.Model)
if fullRequestURL == "" {
return nil, common.ErrorWrapper(nil, "invalid_minimax_config", http.StatusInternalServerError)
}
// 获取请求头
headers := p.GetRequestHeaders()
zhipuRequest := convertFromChatOpenai(request)
// 创建请求
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(zhipuRequest), p.Requester.WithHeader(headers))
if err != nil {
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
return req, nil
}
func (p *MiniMaxProvider) convertToChatOpenai(response *MiniMaxChatResponse, request *types.ChatCompletionRequest) (openaiResponse *types.ChatCompletionResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
error := errorHandle(&response.MiniMaxBaseResp.BaseResp)
if error != nil {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: *error,
StatusCode: http.StatusBadRequest,
}
return
}
openaiResponse = &types.ChatCompletionResponse{
ID: response.ID,
Object: "chat.completion",
Created: response.Created,
Model: response.Model,
Choices: make([]types.ChatCompletionChoice, 0, len(response.Choices)),
}
for _, choice := range response.Choices {
openaiChoice := types.ChatCompletionChoice{
FinishReason: convertFinishReason(choice.FinishReason),
}
if choice.Messages[0].FunctionCall != nil {
if request.Functions != nil {
openaiChoice.Message.FunctionCall = choice.Messages[0].FunctionCall
} else {
openaiChoice.Message.ToolCalls = append(openaiChoice.Message.ToolCalls, &types.ChatCompletionToolCalls{
Type: types.ChatMessageRoleFunction,
Function: choice.Messages[0].FunctionCall,
})
}
} else {
openaiChoice.Message.Role = choice.Messages[0].SenderName
openaiChoice.Message.Content = choice.Messages[0].Text
}
openaiResponse.Choices = append(openaiResponse.Choices, openaiChoice)
}
if response.Usage.TotalTokens < p.Usage.PromptTokens {
p.Usage.PromptTokens = response.Usage.TotalTokens
}
p.Usage.TotalTokens = response.Usage.TotalTokens
p.Usage.CompletionTokens = response.Usage.TotalTokens - p.Usage.PromptTokens
openaiResponse.Usage = p.Usage
return
}
func convertFromChatOpenai(request *types.ChatCompletionRequest) *MiniMaxChatRequest {
var botSettings []MiniMaxBotSetting
var messges []MiniMaxChatMessage
for _, message := range request.Messages {
if message.Role == types.ChatMessageRoleSystem {
botSettings = append(botSettings, MiniMaxBotSetting{
BotName: types.ChatMessageRoleAssistant,
Content: message.StringContent(),
})
continue
}
miniMessage := MiniMaxChatMessage{
Text: message.StringContent(),
}
// 如果role为function 则需要在前面一条记录添加function_call如果没有消息则添加一个message
if message.Role == types.ChatMessageRoleFunction {
if len(messges) == 0 {
messges = append(messges, MiniMaxChatMessage{
SenderType: "USER",
SenderName: types.ChatMessageRoleUser,
})
}
messges[len(messges)-1].FunctionCall = &types.ChatCompletionToolCallsFunction{
Name: "funciton",
Arguments: "arguments",
}
}
miniMessage.SenderType, miniMessage.SenderName = convertRole(message.Role)
messges = append(messges, miniMessage)
}
if len(botSettings) == 0 {
botSettings = append(botSettings, defaultBot())
}
miniRequest := &MiniMaxChatRequest{
Model: request.Model,
Messages: messges,
Stream: request.Stream,
Temperature: request.Temperature,
TopP: request.TopP,
TokensToGenerate: request.MaxTokens,
BotSetting: botSettings,
ReplyConstraints: defaultReplyConstraints(),
}
if request.Functions != nil {
miniRequest.Functions = request.Functions
} else if request.Tools != nil {
miniRequest.Functions = make([]*types.ChatCompletionFunction, 0, len(request.Tools))
for _, tool := range request.Tools {
miniRequest.Functions = append(miniRequest.Functions, &tool.Function)
}
}
return miniRequest
}
// 转换为OpenAI聊天流式请求体
func (h *minimaxStreamHandler) handlerStream(rawLine *[]byte, isFinished *bool, response *[]types.ChatCompletionStreamResponse) error {
// 如果rawLine 前缀不为data: 或者 meta:,则直接返回
if !strings.HasPrefix(string(*rawLine), "data: ") {
*rawLine = nil
return nil
}
*rawLine = (*rawLine)[6:]
miniResponse := &MiniMaxChatResponse{}
err := json.Unmarshal(*rawLine, miniResponse)
if err != nil {
return common.ErrorToOpenAIError(err)
}
error := errorHandle(&miniResponse.BaseResp)
if error != nil {
return error
}
choice := miniResponse.Choices[0]
if choice.Messages[0].FunctionCall != nil && choice.FinishReason == "" {
*rawLine = nil
return nil
}
return h.convertToOpenaiStream(miniResponse, response)
}
func (h *minimaxStreamHandler) convertToOpenaiStream(miniResponse *MiniMaxChatResponse, response *[]types.ChatCompletionStreamResponse) error {
streamResponse := types.ChatCompletionStreamResponse{
ID: miniResponse.RequestID,
Object: "chat.completion.chunk",
Created: miniResponse.Created,
Model: h.Request.Model,
}
miniChoice := miniResponse.Choices[0]
openaiChoice := types.ChatCompletionStreamChoice{}
if miniChoice.Messages[0].FunctionCall == nil && miniChoice.FinishReason != "" {
streamResponse.ID = miniResponse.ID
openaiChoice.FinishReason = convertFinishReason(miniChoice.FinishReason)
h.appendResponse(&streamResponse, &openaiChoice, response)
return nil
}
openaiChoice.Delta = types.ChatCompletionStreamChoiceDelta{
Role: miniChoice.Messages[0].SenderName,
}
if miniChoice.Messages[0].FunctionCall != nil {
h.handleFunctionCall(&miniChoice, &openaiChoice)
convertChoices := openaiChoice.ConvertOpenaiStream()
for _, convertChoice := range convertChoices {
chatCompletionCopy := streamResponse
h.appendResponse(&chatCompletionCopy, &convertChoice, response)
}
} else {
openaiChoice.Delta.Content = miniChoice.Messages[0].Text
h.appendResponse(&streamResponse, &openaiChoice, response)
}
if miniResponse.Usage != nil {
h.handleUsage(miniResponse)
}
return nil
}
func (h *minimaxStreamHandler) handleFunctionCall(choice *Choice, openaiChoice *types.ChatCompletionStreamChoice) {
if h.Request.Functions != nil {
openaiChoice.Delta.FunctionCall = choice.Messages[0].FunctionCall
} else {
openaiChoice.Delta.ToolCalls = append(openaiChoice.Delta.ToolCalls, &types.ChatCompletionToolCalls{
Type: types.ChatMessageRoleFunction,
Function: choice.Messages[0].FunctionCall,
})
}
}
func (h *minimaxStreamHandler) appendResponse(streamResponse *types.ChatCompletionStreamResponse, openaiChoice *types.ChatCompletionStreamChoice, response *[]types.ChatCompletionStreamResponse) {
streamResponse.Choices = []types.ChatCompletionStreamChoice{*openaiChoice}
*response = append(*response, *streamResponse)
}
func (h *minimaxStreamHandler) handleUsage(miniResponse *MiniMaxChatResponse) {
if miniResponse.Usage.TotalTokens < h.Usage.PromptTokens {
h.Usage.PromptTokens = miniResponse.Usage.TotalTokens
}
h.Usage.TotalTokens = miniResponse.Usage.TotalTokens
h.Usage.CompletionTokens = miniResponse.Usage.TotalTokens - h.Usage.PromptTokens
}

View File

@ -0,0 +1,93 @@
package minimax
import (
"net/http"
"one-api/common"
"one-api/types"
)
func (p *MiniMaxProvider) CreateEmbeddings(request *types.EmbeddingRequest) (*types.EmbeddingResponse, *types.OpenAIErrorWithStatusCode) {
url, errWithCode := p.GetSupportedAPIUri(common.RelayModeEmbeddings)
if errWithCode != nil {
return nil, errWithCode
}
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(url, request.Model)
if fullRequestURL == "" {
return nil, common.ErrorWrapper(nil, "invalid_minimax_config", http.StatusInternalServerError)
}
// 获取请求头
headers := p.GetRequestHeaders()
minimaxRequest := convertFromEmbeddingOpenai(request)
// 创建请求
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(minimaxRequest), p.Requester.WithHeader(headers))
if err != nil {
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
defer req.Body.Close()
minimaxResponse := &MiniMaxEmbeddingResponse{}
// 发送请求
_, errWithCode = p.Requester.SendRequest(req, minimaxResponse, false)
if errWithCode != nil {
return nil, errWithCode
}
return p.convertToEmbeddingOpenai(minimaxResponse, request)
}
func convertFromEmbeddingOpenai(request *types.EmbeddingRequest) *MiniMaxEmbeddingRequest {
minimaxRequest := &MiniMaxEmbeddingRequest{
Model: request.Model,
Type: "db",
}
if input, ok := request.Input.(string); ok {
minimaxRequest.Texts = []string{input}
} else if inputs, ok := request.Input.([]any); ok {
for _, item := range inputs {
if input, ok := item.(string); ok {
minimaxRequest.Texts = append(minimaxRequest.Texts, input)
}
}
}
return minimaxRequest
}
func (p *MiniMaxProvider) convertToEmbeddingOpenai(response *MiniMaxEmbeddingResponse, request *types.EmbeddingRequest) (openaiResponse *types.EmbeddingResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
error := errorHandle(&response.BaseResp)
if error != nil {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: *error,
StatusCode: http.StatusBadRequest,
}
return
}
openaiResponse = &types.EmbeddingResponse{
Object: "list",
Model: request.Model,
}
for _, item := range response.Vectors {
openaiResponse.Data = append(openaiResponse.Data, types.Embedding{
Object: "embedding",
Embedding: item,
})
}
if response.TotalTokens < p.Usage.PromptTokens {
p.Usage.PromptTokens = response.TotalTokens
}
p.Usage.TotalTokens = response.TotalTokens
p.Usage.CompletionTokens = response.TotalTokens - p.Usage.PromptTokens
openaiResponse.Usage = p.Usage
return
}

79
providers/minimax/type.go Normal file
View File

@ -0,0 +1,79 @@
package minimax
import "one-api/types"
type MiniMaxChatRequest struct {
Model string `json:"model"`
Stream bool `json:"stream,omitempty"`
TokensToGenerate int `json:"tokens_to_generate,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Messages []MiniMaxChatMessage `json:"messages"`
BotSetting []MiniMaxBotSetting `json:"bot_setting,omitempty"`
ReplyConstraints ReplyConstraints `json:"reply_constraints,omitempty"`
Functions []*types.ChatCompletionFunction `json:"functions,omitempty"`
}
type MiniMaxChatMessage struct {
SenderType string `json:"sender_type"`
SenderName string `json:"sender_name"`
Text string `json:"text"`
FunctionCall *types.ChatCompletionToolCallsFunction `json:"function_call,omitempty"`
}
type MiniMaxBotSetting struct {
BotName string `json:"bot_name"`
Content string `json:"content"`
}
type ReplyConstraints struct {
SenderType string `json:"sender_type"`
SenderName string `json:"sender_name"`
}
type MiniMaxChatResponse struct {
Created int64 `json:"created"`
Model string `json:"model"`
Reply string `json:"reply"`
InputSensitive bool `json:"input_sensitive,omitempty"`
InputSensitiveType int64 `json:"input_sensitive_type,omitempty"`
OutputSensitive bool `json:"output_sensitive"`
OutputSensitiveType int64 `json:"output_sensitive_type,omitempty"`
Choices []Choice `json:"choices"`
Usage *Usage `json:"usage,omitempty"`
ID string `json:"id,omitempty"`
RequestID string `json:"request_id"`
FunctionCall *types.ChatCompletionToolCallsFunction `json:"function_call,omitempty"`
MiniMaxBaseResp
}
type Choice struct {
Messages []MiniMaxChatMessage `json:"messages"`
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
}
type Usage struct {
TotalTokens int `json:"total_tokens"`
}
type MiniMaxBaseResp struct {
BaseResp BaseResp `json:"base_resp"`
}
type BaseResp struct {
StatusCode int64 `json:"status_code"`
StatusMsg string `json:"status_msg"`
}
type MiniMaxEmbeddingRequest struct {
Model string `json:"model"`
Texts []string `json:"texts"`
Type string `json:"type"`
}
type MiniMaxEmbeddingResponse struct {
Vectors []any `json:"vectors"`
TotalTokens int `json:"total_tokens"`
MiniMaxBaseResp
}

View File

@ -16,6 +16,7 @@ import (
"one-api/providers/claude"
"one-api/providers/closeai"
"one-api/providers/gemini"
"one-api/providers/minimax"
"one-api/providers/openai"
"one-api/providers/openaisb"
"one-api/providers/palm"
@ -54,6 +55,7 @@ func init() {
providerFactories[common.ChannelTypeAzureSpeech] = azurespeech.AzureSpeechProviderFactory{}
providerFactories[common.ChannelTypeGemini] = gemini.GeminiProviderFactory{}
providerFactories[common.ChannelTypeBaichuan] = baichuan.BaichuanProviderFactory{}
providerFactories[common.ChannelTypeMiniMax] = minimax.MiniMaxProviderFactory{}
}

View File

@ -71,6 +71,12 @@ export const CHANNEL_OPTIONS = {
value: 26,
color: 'orange'
},
27: {
key: 27,
text: 'MiniMax',
value: 27,
color: 'orange'
},
24: {
key: 24,
text: 'Azure Speech',

View File

@ -166,6 +166,15 @@ const typeConfig = {
prompt: {
test_model: ''
}
},
27: {
input: {
models: ['abab5.5-chat', 'abab5.5s-chat', 'abab6-chat', 'embo-01'],
test_model: 'abab5.5-chat'
},
prompt: {
key: '按照如下格式输入APISecret|groupID'
}
}
};