From 1c7c2d40bbb7c86525f94d9cb3421e1bdd192ffe Mon Sep 17 00:00:00 2001 From: Martial BE Date: Wed, 29 Nov 2023 16:54:37 +0800 Subject: [PATCH] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor:=20=E9=87=8D?= =?UTF-8?q?=E6=9E=84moderation=E6=8E=A5=E5=8F=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 3 +- common/constants.go | 13 +++++++ controller/relay-text.go | 69 ++++++++++++++++++++++++++++++++-- controller/relay.go | 30 ++++----------- providers/base/common.go | 22 +++++++++++ providers/base/interface.go | 7 ++++ providers/openai/base.go | 1 + providers/openai/moderation.go | 49 ++++++++++++++++++++++++ providers/openai/type.go | 5 +++ types/moderation.go | 12 ++++++ 10 files changed, 183 insertions(+), 28 deletions(-) create mode 100644 providers/openai/moderation.go create mode 100644 types/moderation.go diff --git a/.gitignore b/.gitignore index 60abb13e..4eaf9868 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,5 @@ upload build *.db-journal logs -data \ No newline at end of file +data +tmp/ \ No newline at end of file diff --git a/common/constants.go b/common/constants.go index c7d3f222..36e88769 100644 --- a/common/constants.go +++ b/common/constants.go @@ -214,3 +214,16 @@ var ChannelBaseURLs = []string{ "https://fastgpt.run/api/openapi", // 22 "https://hunyuan.cloud.tencent.com", //23 } + +const ( + RelayModeUnknown = iota + RelayModeChatCompletions + RelayModeCompletions + RelayModeEmbeddings + RelayModeModerations + RelayModeImagesGenerations + RelayModeEdits + RelayModeAudioSpeech + RelayModeAudioTranscription + RelayModeAudioTranslation +) diff --git a/controller/relay-text.go b/controller/relay-text.go index 528b0690..1f13a907 100644 --- a/controller/relay-text.go +++ b/controller/relay-text.go @@ -24,7 +24,11 @@ func relayTextHelper(c *gin.Context, relayMode int) *types.OpenAIErrorWithStatus // 获取 Provider provider := providers.GetProvider(channelType, c) if provider == nil { - return types.ErrorWrapper(errors.New("channel not implemented"), "channel_not_implemented", http.StatusNotImplemented) + return types.ErrorWrapper(errors.New("channel not found"), "channel_not_found", http.StatusNotImplemented) + } + + if !provider.SupportAPI(relayMode) { + return types.ErrorWrapper(errors.New("channel does not support this API"), "channel_not_support_api", http.StatusNotImplemented) } modelMap, err := parseModelMapping(c.GetString("model_mapping")) @@ -45,12 +49,14 @@ func relayTextHelper(c *gin.Context, relayMode int) *types.OpenAIErrorWithStatus var openAIErrorWithStatusCode *types.OpenAIErrorWithStatusCode switch relayMode { - case RelayModeChatCompletions: + case common.RelayModeChatCompletions: usage, openAIErrorWithStatusCode = handleChatCompletions(c, provider, modelMap, quotaInfo, group) - case RelayModeCompletions: + case common.RelayModeCompletions: usage, openAIErrorWithStatusCode = handleCompletions(c, provider, modelMap, quotaInfo, group) - case RelayModeEmbeddings: + case common.RelayModeEmbeddings: usage, openAIErrorWithStatusCode = handleEmbeddings(c, provider, modelMap, quotaInfo, group) + case common.RelayModeModerations: + usage, openAIErrorWithStatusCode = handleModerations(c, provider, modelMap, quotaInfo, group) default: return types.ErrorWrapper(errors.New("invalid relay mode"), "invalid_relay_mode", http.StatusBadRequest) } @@ -84,14 +90,21 @@ func relayTextHelper(c *gin.Context, relayMode int) *types.OpenAIErrorWithStatus func handleChatCompletions(c *gin.Context, provider providers_base.ProviderInterface, modelMap map[string]string, quotaInfo *QuotaInfo, group string) (*types.Usage, *types.OpenAIErrorWithStatusCode) { var chatRequest types.ChatCompletionRequest isModelMapped := false + chatProvider, ok := provider.(providers_base.ChatInterface) if !ok { return nil, types.ErrorWrapper(errors.New("channel not implemented"), "channel_not_implemented", http.StatusNotImplemented) } + err := common.UnmarshalBodyReusable(c, &chatRequest) if err != nil { return nil, types.ErrorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) } + + if chatRequest.Messages == nil || len(chatRequest.Messages) == 0 { + return nil, types.ErrorWrapper(errors.New("field messages is required"), "required_field_missing", http.StatusBadRequest) + } + if modelMap != nil && modelMap[chatRequest.Model] != "" { chatRequest.Model = modelMap[chatRequest.Model] isModelMapped = true @@ -114,10 +127,16 @@ func handleCompletions(c *gin.Context, provider providers_base.ProviderInterface if !ok { return nil, types.ErrorWrapper(errors.New("channel not implemented"), "channel_not_implemented", http.StatusNotImplemented) } + err := common.UnmarshalBodyReusable(c, &completionRequest) if err != nil { return nil, types.ErrorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) } + + if completionRequest.Prompt == "" { + return nil, types.ErrorWrapper(errors.New("field prompt is required"), "required_field_missing", http.StatusBadRequest) + } + if modelMap != nil && modelMap[completionRequest.Model] != "" { completionRequest.Model = modelMap[completionRequest.Model] isModelMapped = true @@ -140,10 +159,16 @@ func handleEmbeddings(c *gin.Context, provider providers_base.ProviderInterface, if !ok { return nil, types.ErrorWrapper(errors.New("channel not implemented"), "channel_not_implemented", http.StatusNotImplemented) } + err := common.UnmarshalBodyReusable(c, &embeddingsRequest) if err != nil { return nil, types.ErrorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) } + + if embeddingsRequest.Input == "" { + return nil, types.ErrorWrapper(errors.New("field input is required"), "required_field_missing", http.StatusBadRequest) + } + if modelMap != nil && modelMap[embeddingsRequest.Model] != "" { embeddingsRequest.Model = modelMap[embeddingsRequest.Model] isModelMapped = true @@ -158,3 +183,39 @@ func handleEmbeddings(c *gin.Context, provider providers_base.ProviderInterface, } return embeddingsProvider.EmbeddingsAction(&embeddingsRequest, isModelMapped, promptTokens) } + +func handleModerations(c *gin.Context, provider providers_base.ProviderInterface, modelMap map[string]string, quotaInfo *QuotaInfo, group string) (*types.Usage, *types.OpenAIErrorWithStatusCode) { + var moderationRequest types.ModerationRequest + isModelMapped := false + moderationProvider, ok := provider.(providers_base.ModerationInterface) + if !ok { + return nil, types.ErrorWrapper(errors.New("channel not implemented"), "channel_not_implemented", http.StatusNotImplemented) + } + + err := common.UnmarshalBodyReusable(c, &moderationRequest) + if err != nil { + return nil, types.ErrorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) + } + + if moderationRequest.Input == "" { + return nil, types.ErrorWrapper(errors.New("field input is required"), "required_field_missing", http.StatusBadRequest) + } + + if moderationRequest.Model == "" { + moderationRequest.Model = "text-moderation-latest" + } + + if modelMap != nil && modelMap[moderationRequest.Model] != "" { + moderationRequest.Model = modelMap[moderationRequest.Model] + isModelMapped = true + } + promptTokens := common.CountTokenInput(moderationRequest.Input, moderationRequest.Model) + + quotaInfo.modelName = moderationRequest.Model + quotaInfo.initQuotaInfo(group) + quota_err := quotaInfo.preQuotaConsumption() + if quota_err != nil { + return nil, quota_err + } + return moderationProvider.ModerationAction(&moderationRequest, isModelMapped, promptTokens) +} diff --git a/controller/relay.go b/controller/relay.go index dfa185e8..c577b848 100644 --- a/controller/relay.go +++ b/controller/relay.go @@ -56,19 +56,6 @@ func (m Message) StringContent() string { return "" } -const ( - RelayModeUnknown = iota - RelayModeChatCompletions - RelayModeCompletions - RelayModeEmbeddings - RelayModeModerations - RelayModeImagesGenerations - RelayModeEdits - RelayModeAudioSpeech - RelayModeAudioTranscription - RelayModeAudioTranslation -) - // https://platform.openai.com/docs/api-reference/chat type ResponseFormat struct { @@ -237,21 +224,18 @@ type CompletionsStreamResponse struct { func Relay(c *gin.Context) { var err *types.OpenAIErrorWithStatusCode - relayMode := RelayModeUnknown + relayMode := common.RelayModeUnknown if strings.HasPrefix(c.Request.URL.Path, "/v1/chat/completions") { - // err = relayChatHelper(c) - relayMode = RelayModeChatCompletions + relayMode = common.RelayModeChatCompletions } else if strings.HasPrefix(c.Request.URL.Path, "/v1/completions") { - // err = relayCompletionHelper(c) - relayMode = RelayModeCompletions + relayMode = common.RelayModeCompletions } else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") { - // err = relayEmbeddingsHelper(c) - relayMode = RelayModeEmbeddings + relayMode = common.RelayModeEmbeddings } else if strings.HasSuffix(c.Request.URL.Path, "embeddings") { - relayMode = RelayModeEmbeddings + relayMode = common.RelayModeEmbeddings + } else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") { + relayMode = common.RelayModeModerations } - // } else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") { - // relayMode = RelayModeModerations // } else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") { // relayMode = RelayModeImagesGenerations // } else if strings.HasPrefix(c.Request.URL.Path, "/v1/edits") { diff --git a/providers/base/common.go b/providers/base/common.go index c02a2f38..17a27b29 100644 --- a/providers/base/common.go +++ b/providers/base/common.go @@ -21,6 +21,7 @@ type BaseProvider struct { ChatCompletions string Embeddings string AudioSpeech string + Moderation string AudioTranscriptions string AudioTranslations string Proxy string @@ -125,3 +126,24 @@ func (p *BaseProvider) HandleErrorResp(resp *http.Response) (openAIErrorWithStat } return } + +func (p *BaseProvider) SupportAPI(relayMode int) bool { + switch relayMode { + case common.RelayModeChatCompletions: + return p.ChatCompletions != "" + case common.RelayModeCompletions: + return p.Completions != "" + case common.RelayModeEmbeddings: + return p.Embeddings != "" + case common.RelayModeAudioSpeech: + return p.AudioSpeech != "" + case common.RelayModeAudioTranscription: + return p.AudioTranscriptions != "" + case common.RelayModeAudioTranslation: + return p.AudioTranslations != "" + case common.RelayModeModerations: + return p.Moderation != "" + default: + return false + } +} diff --git a/providers/base/interface.go b/providers/base/interface.go index f0f8e4a0..90fac4d6 100644 --- a/providers/base/interface.go +++ b/providers/base/interface.go @@ -11,6 +11,7 @@ type ProviderInterface interface { GetBaseURL() string GetFullRequestURL(requestURL string, modelName string) string GetRequestHeaders() (headers map[string]string) + SupportAPI(relayMode int) bool } // 完成接口 @@ -31,6 +32,12 @@ type EmbeddingsInterface interface { EmbeddingsAction(request *types.EmbeddingRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) } +// 审查接口 +type ModerationInterface interface { + ProviderInterface + ModerationAction(request *types.ModerationRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) +} + // 余额接口 type BalanceInterface interface { BalanceAction(channel *model.Channel) (float64, error) diff --git a/providers/openai/base.go b/providers/openai/base.go index bd4e9cd0..a2c6a008 100644 --- a/providers/openai/base.go +++ b/providers/openai/base.go @@ -34,6 +34,7 @@ func CreateOpenAIProvider(c *gin.Context, baseURL string) *OpenAIProvider { Completions: "/v1/completions", ChatCompletions: "/v1/chat/completions", Embeddings: "/v1/embeddings", + Moderation: "/v1/moderations", AudioSpeech: "/v1/audio/speech", AudioTranscriptions: "/v1/audio/transcriptions", AudioTranslations: "/v1/audio/translations", diff --git a/providers/openai/moderation.go b/providers/openai/moderation.go new file mode 100644 index 00000000..67df21c1 --- /dev/null +++ b/providers/openai/moderation.go @@ -0,0 +1,49 @@ +package openai + +import ( + "net/http" + "one-api/common" + "one-api/types" +) + +func (c *OpenAIProviderModerationResponse) responseHandler(resp *http.Response) (errWithCode *types.OpenAIErrorWithStatusCode) { + if c.Error.Type != "" { + errWithCode = &types.OpenAIErrorWithStatusCode{ + OpenAIError: c.Error, + StatusCode: resp.StatusCode, + } + return + } + return nil +} + +func (p *OpenAIProvider) ModerationAction(request *types.ModerationRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) { + + requestBody, err := p.getRequestBody(&request, isModelMapped) + if err != nil { + return nil, types.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError) + } + + fullRequestURL := p.GetFullRequestURL(p.Moderation, request.Model) + headers := p.GetRequestHeaders() + + client := common.NewClient() + req, err := client.NewRequest(p.Context.Request.Method, fullRequestURL, common.WithBody(requestBody), common.WithHeader(headers)) + if err != nil { + return nil, types.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError) + } + + openAIProviderModerationResponse := &OpenAIProviderModerationResponse{} + errWithCode = p.sendRequest(req, openAIProviderModerationResponse) + if errWithCode != nil { + return + } + + usage = &types.Usage{ + PromptTokens: promptTokens, + CompletionTokens: 0, + TotalTokens: promptTokens, + } + + return +} diff --git a/providers/openai/type.go b/providers/openai/type.go index f8fee787..544e3e5f 100644 --- a/providers/openai/type.go +++ b/providers/openai/type.go @@ -21,3 +21,8 @@ type OpenAIProviderEmbeddingsResponse struct { types.EmbeddingResponse types.OpenAIErrorResponse } + +type OpenAIProviderModerationResponse struct { + types.ModerationResponse + types.OpenAIErrorResponse +} diff --git a/types/moderation.go b/types/moderation.go new file mode 100644 index 00000000..ebf264f9 --- /dev/null +++ b/types/moderation.go @@ -0,0 +1,12 @@ +package types + +type ModerationRequest struct { + Input string `json:"input,omitempty"` + Model string `json:"model,omitempty"` +} + +type ModerationResponse struct { + ID string `json:"id"` + Model string `json:"model"` + Results any `json:"results"` +}