Merge remote-tracking branch 'songquanpeng/main'

# Conflicts:
#	controller/relay-text.go
This commit is contained in:
quzard 2023-06-25 14:12:30 +08:00
commit 1693435c3c
6 changed files with 43 additions and 13 deletions

View File

@ -31,7 +31,7 @@ var ModelRatio = map[string]float64{
"curie": 10, "curie": 10,
"babbage": 10, "babbage": 10,
"ada": 10, "ada": 10,
"text-embedding-ada-002": 0.2, "text-embedding-ada-002": 0.05,
"text-search-ada-doc-001": 10, "text-search-ada-doc-001": 10,
"text-moderation-stable": 0.1, "text-moderation-stable": 0.1,
"text-moderation-latest": 0.1, "text-moderation-latest": 0.1,

View File

@ -224,6 +224,24 @@ func init() {
Root: "text-moderation-stable", Root: "text-moderation-stable",
Parent: nil, Parent: nil,
}, },
{
Id: "text-davinci-edit-001",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-davinci-edit-001",
Parent: nil,
},
{
Id: "code-davinci-edit-001",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "code-davinci-edit-001",
Parent: nil,
},
} }
openAIModelsMap = make(map[string]OpenAIModels) openAIModelsMap = make(map[string]OpenAIModels)
for _, model := range openAIModels { for _, model := range openAIModels {

View File

@ -28,7 +28,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
} }
} }
if relayMode == RelayModeModeration && textRequest.Model == "" { if relayMode == RelayModeModerations && textRequest.Model == "" {
textRequest.Model = "text-moderation-latest" textRequest.Model = "text-moderation-latest"
} }
// request validation // request validation
@ -38,16 +38,20 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
switch relayMode { switch relayMode {
case RelayModeCompletions: case RelayModeCompletions:
if textRequest.Prompt == "" { if textRequest.Prompt == "" {
return errorWrapper(errors.New("prompt is required"), "required_field_missing", http.StatusBadRequest) return errorWrapper(errors.New("field prompt is required"), "required_field_missing", http.StatusBadRequest)
} }
case RelayModeChatCompletions: case RelayModeChatCompletions:
if len(textRequest.Messages) == 0 { if textRequest.Messages == nil || len(textRequest.Messages) == 0 {
return errorWrapper(errors.New("messages is required"), "required_field_missing", http.StatusBadRequest) return errorWrapper(errors.New("field messages is required"), "required_field_missing", http.StatusBadRequest)
} }
case RelayModeEmbeddings: case RelayModeEmbeddings:
case RelayModeModeration: case RelayModeModerations:
if textRequest.Input == "" { if textRequest.Input == "" {
return errorWrapper(errors.New("input is required"), "required_field_missing", http.StatusBadRequest) return errorWrapper(errors.New("field input is required"), "required_field_missing", http.StatusBadRequest)
}
case RelayModeEdits:
if textRequest.Instruction == "" {
return errorWrapper(errors.New("field instruction is required"), "required_field_missing", http.StatusBadRequest)
} }
} }
baseURL := common.ChannelBaseURLs[channelType] baseURL := common.ChannelBaseURLs[channelType]
@ -85,7 +89,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
promptTokens = countTokenMessages(textRequest.Messages, textRequest.Model) promptTokens = countTokenMessages(textRequest.Messages, textRequest.Model)
case RelayModeCompletions: case RelayModeCompletions:
promptTokens = countTokenInput(textRequest.Prompt, textRequest.Model) promptTokens = countTokenInput(textRequest.Prompt, textRequest.Model)
case RelayModeModeration: case RelayModeModerations:
promptTokens = countTokenInput(textRequest.Input, textRequest.Model) promptTokens = countTokenInput(textRequest.Input, textRequest.Model)
} }
preConsumedTokens := common.PreConsumedQuota preConsumedTokens := common.PreConsumedQuota
@ -145,7 +149,10 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
defer func() { defer func() {
if consumeQuota { if consumeQuota {
quota := 0 quota := 0
completionRatio := 1.333333 // default for gpt-3 completionRatio := 1.0
if strings.HasPrefix(textRequest.Model, "gpt-3.5") {
completionRatio = 1.333333
}
if strings.HasPrefix(textRequest.Model, "gpt-4") { if strings.HasPrefix(textRequest.Model, "gpt-4") {
completionRatio = 2 completionRatio = 2
} }

View File

@ -19,8 +19,9 @@ const (
RelayModeChatCompletions RelayModeChatCompletions
RelayModeCompletions RelayModeCompletions
RelayModeEmbeddings RelayModeEmbeddings
RelayModeModeration RelayModeModerations
RelayModeImagesGenerations RelayModeImagesGenerations
RelayModeEdits
) )
// https://platform.openai.com/docs/api-reference/chat // https://platform.openai.com/docs/api-reference/chat
@ -35,6 +36,7 @@ type GeneralOpenAIRequest struct {
TopP float64 `json:"top_p"` TopP float64 `json:"top_p"`
N int `json:"n"` N int `json:"n"`
Input any `json:"input"` Input any `json:"input"`
Instruction string `json:"instruction"`
} }
type ChatRequest struct { type ChatRequest struct {
@ -100,9 +102,11 @@ func Relay(c *gin.Context) {
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") { } else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") {
relayMode = RelayModeEmbeddings relayMode = RelayModeEmbeddings
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") { } else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") {
relayMode = RelayModeModeration relayMode = RelayModeModerations
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") { } else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
relayMode = RelayModeImagesGenerations relayMode = RelayModeImagesGenerations
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/edits") {
relayMode = RelayModeEdits
} }
var err *OpenAIErrorWithStatusCode var err *OpenAIErrorWithStatusCode
switch relayMode { switch relayMode {

View File

@ -19,7 +19,7 @@ func SetRelayRouter(router *gin.Engine) {
{ {
relayV1Router.POST("/completions", controller.Relay) relayV1Router.POST("/completions", controller.Relay)
relayV1Router.POST("/chat/completions", controller.Relay) relayV1Router.POST("/chat/completions", controller.Relay)
relayV1Router.POST("/edits", controller.RelayNotImplemented) relayV1Router.POST("/edits", controller.Relay)
relayV1Router.POST("/images/generations", controller.RelayNotImplemented) relayV1Router.POST("/images/generations", controller.RelayNotImplemented)
relayV1Router.POST("/images/edits", controller.RelayNotImplemented) relayV1Router.POST("/images/edits", controller.RelayNotImplemented)
relayV1Router.POST("/images/variations", controller.RelayNotImplemented) relayV1Router.POST("/images/variations", controller.RelayNotImplemented)

View File

@ -2,6 +2,7 @@ import React, { useEffect, useState } from 'react';
import { Button, Form, Header, Segment } from 'semantic-ui-react'; import { Button, Form, Header, Segment } from 'semantic-ui-react';
import { useParams } from 'react-router-dom'; import { useParams } from 'react-router-dom';
import { API, showError, showSuccess } from '../../helpers'; import { API, showError, showSuccess } from '../../helpers';
import { renderQuota, renderQuotaWithPrompt } from '../../helpers/render';
const EditUser = () => { const EditUser = () => {
const params = useParams(); const params = useParams();
@ -134,7 +135,7 @@ const EditUser = () => {
</Form.Field> </Form.Field>
<Form.Field> <Form.Field>
<Form.Input <Form.Input
label='剩余额度' label={`剩余额度${renderQuotaWithPrompt(quota)}`}
name='quota' name='quota'
placeholder={'请输入新的剩余额度'} placeholder={'请输入新的剩余额度'}
onChange={handleInputChange} onChange={handleInputChange}