ai-gateway/providers/openai/moderation.go

50 lines
1.4 KiB
Go
Raw Permalink Normal View History

package openai
import (
"net/http"
"one-api/common"
"one-api/types"
)
func (c *OpenAIProviderModerationResponse) ResponseHandler(resp *http.Response) (OpenAIResponse any, errWithCode *types.OpenAIErrorWithStatusCode) {
if c.Error.Type != "" {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: c.Error,
StatusCode: resp.StatusCode,
}
return
}
return nil, nil
}
func (p *OpenAIProvider) ModerationAction(request *types.ModerationRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
2023-12-01 09:20:22 +00:00
requestBody, err := p.GetRequestBody(&request, isModelMapped)
if err != nil {
2023-12-01 19:28:18 +00:00
return nil, common.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError)
}
fullRequestURL := p.GetFullRequestURL(p.Moderation, request.Model)
headers := p.GetRequestHeaders()
client := common.NewClient()
req, err := client.NewRequest(p.Context.Request.Method, fullRequestURL, common.WithBody(requestBody), common.WithHeader(headers))
if err != nil {
2023-12-01 19:28:18 +00:00
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
openAIProviderModerationResponse := &OpenAIProviderModerationResponse{}
errWithCode = p.SendRequest(req, openAIProviderModerationResponse, true)
if errWithCode != nil {
return
}
usage = &types.Usage{
PromptTokens: promptTokens,
CompletionTokens: 0,
TotalTokens: promptTokens,
}
return
}