feat: add Proxy channel type and relay mode (#1678)
Add the Proxy channel type and relay mode to support proxying requests to custom upstream services.
This commit is contained in:
parent
296ab013b8
commit
c936198ac8
@ -34,6 +34,8 @@ func relayHelper(c *gin.Context, relayMode int) *model.ErrorWithStatusCode {
|
|||||||
fallthrough
|
fallthrough
|
||||||
case relaymode.AudioTranscription:
|
case relaymode.AudioTranscription:
|
||||||
err = controller.RelayAudioHelper(c, relayMode)
|
err = controller.RelayAudioHelper(c, relayMode)
|
||||||
|
case relaymode.Proxy:
|
||||||
|
err = controller.RelayProxyHelper(c, relayMode)
|
||||||
default:
|
default:
|
||||||
err = controller.RelayTextHelper(c)
|
err = controller.RelayTextHelper(c)
|
||||||
}
|
}
|
||||||
@ -85,12 +87,15 @@ func Relay(c *gin.Context) {
|
|||||||
channelId := c.GetInt(ctxkey.ChannelId)
|
channelId := c.GetInt(ctxkey.ChannelId)
|
||||||
lastFailedChannelId = channelId
|
lastFailedChannelId = channelId
|
||||||
channelName := c.GetString(ctxkey.ChannelName)
|
channelName := c.GetString(ctxkey.ChannelName)
|
||||||
|
// BUG: bizErr is in race condition
|
||||||
go processChannelRelayError(ctx, userId, channelId, channelName, bizErr)
|
go processChannelRelayError(ctx, userId, channelId, channelName, bizErr)
|
||||||
}
|
}
|
||||||
if bizErr != nil {
|
if bizErr != nil {
|
||||||
if bizErr.StatusCode == http.StatusTooManyRequests {
|
if bizErr.StatusCode == http.StatusTooManyRequests {
|
||||||
bizErr.Error.Message = "当前分组上游负载已饱和,请稍后再试"
|
bizErr.Error.Message = "当前分组上游负载已饱和,请稍后再试"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BUG: bizErr is in race condition
|
||||||
bizErr.Error.Message = helper.MessageWithRequestId(bizErr.Error.Message, requestId)
|
bizErr.Error.Message = helper.MessageWithRequestId(bizErr.Error.Message, requestId)
|
||||||
c.JSON(bizErr.StatusCode, gin.H{
|
c.JSON(bizErr.StatusCode, gin.H{
|
||||||
"error": bizErr.Error,
|
"error": bizErr.Error,
|
||||||
|
@ -140,6 +140,12 @@ func TokenAuth() func(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// set channel id for proxy relay
|
||||||
|
if channelId := c.Param("channelid"); channelId != "" {
|
||||||
|
c.Set(ctxkey.SpecificChannelId, channelId)
|
||||||
|
}
|
||||||
|
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,6 +15,7 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/palm"
|
"github.com/songquanpeng/one-api/relay/adaptor/palm"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/proxy"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/tencent"
|
"github.com/songquanpeng/one-api/relay/adaptor/tencent"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/vertexai"
|
"github.com/songquanpeng/one-api/relay/adaptor/vertexai"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/xunfei"
|
"github.com/songquanpeng/one-api/relay/adaptor/xunfei"
|
||||||
@ -58,6 +59,8 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
|
|||||||
return &deepl.Adaptor{}
|
return &deepl.Adaptor{}
|
||||||
case apitype.VertexAI:
|
case apitype.VertexAI:
|
||||||
return &vertexai.Adaptor{}
|
return &vertexai.Adaptor{}
|
||||||
|
case apitype.Proxy:
|
||||||
|
return &proxy.Adaptor{}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
89
relay/adaptor/proxy/adaptor.go
Normal file
89
relay/adaptor/proxy/adaptor.go
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
package proxy
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
|
channelhelper "github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ adaptor.Adaptor = new(Adaptor)
|
||||||
|
|
||||||
|
const channelName = "proxy"
|
||||||
|
|
||||||
|
type Adaptor struct{}
|
||||||
|
|
||||||
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||||
|
return nil, errors.New("notimplement")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||||
|
for k, v := range resp.Header {
|
||||||
|
for _, vv := range v {
|
||||||
|
c.Writer.Header().Set(k, vv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
|
if _, gerr := io.Copy(c.Writer, resp.Body); gerr != nil {
|
||||||
|
return nil, &relaymodel.ErrorWithStatusCode{
|
||||||
|
StatusCode: http.StatusInternalServerError,
|
||||||
|
Error: relaymodel.Error{
|
||||||
|
Message: gerr.Error(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetModelList() (models []string) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetChannelName() string {
|
||||||
|
return channelName
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetRequestURL remove static prefix, and return the real request url to the upstream service
|
||||||
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
|
prefix := fmt.Sprintf("/v1/oneapi/proxy/%d", meta.ChannelId)
|
||||||
|
return meta.BaseURL + strings.TrimPrefix(meta.RequestURLPath, prefix), nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||||
|
for k, v := range c.Request.Header {
|
||||||
|
req.Header.Set(k, v[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove unnecessary headers
|
||||||
|
req.Header.Del("Host")
|
||||||
|
req.Header.Del("Content-Length")
|
||||||
|
req.Header.Del("Accept-Encoding")
|
||||||
|
req.Header.Del("Connection")
|
||||||
|
|
||||||
|
// set authorization header
|
||||||
|
req.Header.Set("Authorization", meta.APIKey)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||||
|
return nil, errors.Errorf("not implement")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
|
||||||
|
return channelhelper.DoRequestHelper(a, c, meta, requestBody)
|
||||||
|
}
|
@ -18,6 +18,7 @@ const (
|
|||||||
Cloudflare
|
Cloudflare
|
||||||
DeepL
|
DeepL
|
||||||
VertexAI
|
VertexAI
|
||||||
|
Proxy
|
||||||
|
|
||||||
Dummy // this one is only for count, do not add any channel after this
|
Dummy // this one is only for count, do not add any channel after this
|
||||||
)
|
)
|
||||||
|
@ -44,5 +44,6 @@ const (
|
|||||||
Doubao
|
Doubao
|
||||||
Novita
|
Novita
|
||||||
VertextAI
|
VertextAI
|
||||||
|
Proxy
|
||||||
Dummy
|
Dummy
|
||||||
)
|
)
|
||||||
|
@ -37,6 +37,8 @@ func ToAPIType(channelType int) int {
|
|||||||
apiType = apitype.DeepL
|
apiType = apitype.DeepL
|
||||||
case VertextAI:
|
case VertextAI:
|
||||||
apiType = apitype.VertexAI
|
apiType = apitype.VertexAI
|
||||||
|
case Proxy:
|
||||||
|
apiType = apitype.Proxy
|
||||||
}
|
}
|
||||||
|
|
||||||
return apiType
|
return apiType
|
||||||
|
@ -44,6 +44,7 @@ var ChannelBaseURLs = []string{
|
|||||||
"https://ark.cn-beijing.volces.com", // 40
|
"https://ark.cn-beijing.volces.com", // 40
|
||||||
"https://api.novita.ai/v3/openai", // 41
|
"https://api.novita.ai/v3/openai", // 41
|
||||||
"", // 42
|
"", // 42
|
||||||
|
"", // 43
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
41
relay/controller/proxy.go
Normal file
41
relay/controller/proxy.go
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
// Package controller is a package for handling the relay controller
|
||||||
|
package controller
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
|
"github.com/songquanpeng/one-api/relay"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
|
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RelayProxyHelper is a helper function to proxy the request to the upstream service
|
||||||
|
func RelayProxyHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
|
||||||
|
ctx := c.Request.Context()
|
||||||
|
meta := meta.GetByContext(c)
|
||||||
|
|
||||||
|
adaptor := relay.GetAdaptor(meta.APIType)
|
||||||
|
if adaptor == nil {
|
||||||
|
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
|
||||||
|
}
|
||||||
|
adaptor.Init(meta)
|
||||||
|
|
||||||
|
resp, err := adaptor.DoRequest(c, meta, c.Request.Body)
|
||||||
|
if err != nil {
|
||||||
|
logger.Errorf(ctx, "DoRequest failed: %s", err.Error())
|
||||||
|
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
|
||||||
|
// do response
|
||||||
|
_, respErr := adaptor.DoResponse(c, resp, meta)
|
||||||
|
if respErr != nil {
|
||||||
|
logger.Errorf(ctx, "respErr is not nil: %+v", respErr)
|
||||||
|
return respErr
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -18,6 +18,7 @@ type Meta struct {
|
|||||||
UserId int
|
UserId int
|
||||||
Group string
|
Group string
|
||||||
ModelMapping map[string]string
|
ModelMapping map[string]string
|
||||||
|
// BaseURL is the proxy url set in the channel config
|
||||||
BaseURL string
|
BaseURL string
|
||||||
APIKey string
|
APIKey string
|
||||||
APIType int
|
APIType int
|
||||||
|
@ -11,4 +11,6 @@ const (
|
|||||||
AudioSpeech
|
AudioSpeech
|
||||||
AudioTranscription
|
AudioTranscription
|
||||||
AudioTranslation
|
AudioTranslation
|
||||||
|
// Proxy is a special relay mode for proxying requests to custom upstream
|
||||||
|
Proxy
|
||||||
)
|
)
|
||||||
|
@ -24,6 +24,8 @@ func GetByPath(path string) int {
|
|||||||
relayMode = AudioTranscription
|
relayMode = AudioTranscription
|
||||||
} else if strings.HasPrefix(path, "/v1/audio/translations") {
|
} else if strings.HasPrefix(path, "/v1/audio/translations") {
|
||||||
relayMode = AudioTranslation
|
relayMode = AudioTranslation
|
||||||
|
} else if strings.HasPrefix(path, "/v1/oneapi/proxy") {
|
||||||
|
relayMode = Proxy
|
||||||
}
|
}
|
||||||
return relayMode
|
return relayMode
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@ func SetRelayRouter(router *gin.Engine) {
|
|||||||
relayV1Router := router.Group("/v1")
|
relayV1Router := router.Group("/v1")
|
||||||
relayV1Router.Use(middleware.RelayPanicRecover(), middleware.TokenAuth(), middleware.Distribute())
|
relayV1Router.Use(middleware.RelayPanicRecover(), middleware.TokenAuth(), middleware.Distribute())
|
||||||
{
|
{
|
||||||
|
relayV1Router.Any("/oneapi/proxy/:channelid/*target", controller.Relay)
|
||||||
relayV1Router.POST("/completions", controller.Relay)
|
relayV1Router.POST("/completions", controller.Relay)
|
||||||
relayV1Router.POST("/chat/completions", controller.Relay)
|
relayV1Router.POST("/chat/completions", controller.Relay)
|
||||||
relayV1Router.POST("/edits", controller.Relay)
|
relayV1Router.POST("/edits", controller.Relay)
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
export const CHANNEL_OPTIONS = [
|
export const CHANNEL_OPTIONS = [
|
||||||
{ key: 1, text: 'OpenAI', value: 1, color: 'green' },
|
{ key: 1, text: 'OpenAI', value: 1, color: 'green' },
|
||||||
{ key: 14, text: 'Anthropic Claude', value: 14, color: 'black' },
|
{ key: 14, text: 'Anthropic Claude', value: 14, color: 'black' },
|
||||||
|
{ key: 33, text: 'AWS', value: 33, color: 'black' },
|
||||||
{ key: 3, text: 'Azure OpenAI', value: 3, color: 'olive' },
|
{ key: 3, text: 'Azure OpenAI', value: 3, color: 'olive' },
|
||||||
{ key: 11, text: 'Google PaLM2', value: 11, color: 'orange' },
|
{ key: 11, text: 'Google PaLM2', value: 11, color: 'orange' },
|
||||||
{ key: 24, text: 'Google Gemini', value: 24, color: 'orange' },
|
{ key: 24, text: 'Google Gemini', value: 24, color: 'orange' },
|
||||||
{ key: 28, text: 'Mistral AI', value: 28, color: 'orange' },
|
{ key: 28, text: 'Mistral AI', value: 28, color: 'orange' },
|
||||||
|
{ key: 41, text: 'Novita', value: 41, color: 'purple' },
|
||||||
|
{ key: 40, text: '字节跳动豆包', value: 40, color: 'blue' },
|
||||||
{ key: 15, text: '百度文心千帆', value: 15, color: 'blue' },
|
{ key: 15, text: '百度文心千帆', value: 15, color: 'blue' },
|
||||||
{ key: 17, text: '阿里通义千问', value: 17, color: 'orange' },
|
{ key: 17, text: '阿里通义千问', value: 17, color: 'orange' },
|
||||||
{ key: 18, text: '讯飞星火认知', value: 18, color: 'blue' },
|
{ key: 18, text: '讯飞星火认知', value: 18, color: 'blue' },
|
||||||
@ -17,6 +20,15 @@ export const CHANNEL_OPTIONS = [
|
|||||||
{ key: 29, text: 'Groq', value: 29, color: 'orange' },
|
{ key: 29, text: 'Groq', value: 29, color: 'orange' },
|
||||||
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
|
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
|
||||||
{ key: 31, text: '零一万物', value: 31, color: 'green' },
|
{ key: 31, text: '零一万物', value: 31, color: 'green' },
|
||||||
|
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
|
||||||
|
{ key: 34, text: 'Coze', value: 34, color: 'blue' },
|
||||||
|
{ key: 35, text: 'Cohere', value: 35, color: 'blue' },
|
||||||
|
{ key: 36, text: 'DeepSeek', value: 36, color: 'black' },
|
||||||
|
{ key: 37, text: 'Cloudflare', value: 37, color: 'orange' },
|
||||||
|
{ key: 38, text: 'DeepL', value: 38, color: 'black' },
|
||||||
|
{ key: 39, text: 'together.ai', value: 39, color: 'blue' },
|
||||||
|
{ key: 42, text: 'VertexAI', value: 42, color: 'blue' },
|
||||||
|
{ key: 43, text: 'Proxy', value: 43, color: 'blue' },
|
||||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||||
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||||
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||||
|
@ -167,6 +167,12 @@ export const CHANNEL_OPTIONS = {
|
|||||||
value: 42,
|
value: 42,
|
||||||
color: 'primary'
|
color: 'primary'
|
||||||
},
|
},
|
||||||
|
43: {
|
||||||
|
key: 43,
|
||||||
|
text: 'Proxy',
|
||||||
|
value: 43,
|
||||||
|
color: 'primary'
|
||||||
|
},
|
||||||
41: {
|
41: {
|
||||||
key: 41,
|
key: 41,
|
||||||
text: 'Novita',
|
text: 'Novita',
|
||||||
|
@ -1,44 +1,45 @@
|
|||||||
export const CHANNEL_OPTIONS = [
|
export const CHANNEL_OPTIONS = [
|
||||||
{key: 1, text: 'OpenAI', value: 1, color: 'green'},
|
{ key: 1, text: 'OpenAI', value: 1, color: 'green' },
|
||||||
{key: 14, text: 'Anthropic Claude', value: 14, color: 'black'},
|
{ key: 14, text: 'Anthropic Claude', value: 14, color: 'black' },
|
||||||
{key: 33, text: 'AWS', value: 33, color: 'black'},
|
{ key: 33, text: 'AWS', value: 33, color: 'black' },
|
||||||
{key: 3, text: 'Azure OpenAI', value: 3, color: 'olive'},
|
{ key: 3, text: 'Azure OpenAI', value: 3, color: 'olive' },
|
||||||
{key: 11, text: 'Google PaLM2', value: 11, color: 'orange'},
|
{ key: 11, text: 'Google PaLM2', value: 11, color: 'orange' },
|
||||||
{key: 24, text: 'Google Gemini', value: 24, color: 'orange'},
|
{ key: 24, text: 'Google Gemini', value: 24, color: 'orange' },
|
||||||
{key: 28, text: 'Mistral AI', value: 28, color: 'orange'},
|
{ key: 28, text: 'Mistral AI', value: 28, color: 'orange' },
|
||||||
{key: 41, text: 'Novita', value: 41, color: 'purple'},
|
{ key: 41, text: 'Novita', value: 41, color: 'purple' },
|
||||||
{key: 40, text: '字节跳动豆包', value: 40, color: 'blue'},
|
{ key: 40, text: '字节跳动豆包', value: 40, color: 'blue' },
|
||||||
{key: 15, text: '百度文心千帆', value: 15, color: 'blue'},
|
{ key: 15, text: '百度文心千帆', value: 15, color: 'blue' },
|
||||||
{key: 17, text: '阿里通义千问', value: 17, color: 'orange'},
|
{ key: 17, text: '阿里通义千问', value: 17, color: 'orange' },
|
||||||
{key: 18, text: '讯飞星火认知', value: 18, color: 'blue'},
|
{ key: 18, text: '讯飞星火认知', value: 18, color: 'blue' },
|
||||||
{key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet'},
|
{ key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet' },
|
||||||
{key: 19, text: '360 智脑', value: 19, color: 'blue'},
|
{ key: 19, text: '360 智脑', value: 19, color: 'blue' },
|
||||||
{key: 25, text: 'Moonshot AI', value: 25, color: 'black'},
|
{ key: 25, text: 'Moonshot AI', value: 25, color: 'black' },
|
||||||
{key: 23, text: '腾讯混元', value: 23, color: 'teal'},
|
{ key: 23, text: '腾讯混元', value: 23, color: 'teal' },
|
||||||
{key: 26, text: '百川大模型', value: 26, color: 'orange'},
|
{ key: 26, text: '百川大模型', value: 26, color: 'orange' },
|
||||||
{key: 27, text: 'MiniMax', value: 27, color: 'red'},
|
{ key: 27, text: 'MiniMax', value: 27, color: 'red' },
|
||||||
{key: 29, text: 'Groq', value: 29, color: 'orange'},
|
{ key: 29, text: 'Groq', value: 29, color: 'orange' },
|
||||||
{key: 30, text: 'Ollama', value: 30, color: 'black'},
|
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
|
||||||
{key: 31, text: '零一万物', value: 31, color: 'green'},
|
{ key: 31, text: '零一万物', value: 31, color: 'green' },
|
||||||
{key: 32, text: '阶跃星辰', value: 32, color: 'blue'},
|
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
|
||||||
{key: 34, text: 'Coze', value: 34, color: 'blue'},
|
{ key: 34, text: 'Coze', value: 34, color: 'blue' },
|
||||||
{key: 35, text: 'Cohere', value: 35, color: 'blue'},
|
{ key: 35, text: 'Cohere', value: 35, color: 'blue' },
|
||||||
{key: 36, text: 'DeepSeek', value: 36, color: 'black'},
|
{ key: 36, text: 'DeepSeek', value: 36, color: 'black' },
|
||||||
{key: 37, text: 'Cloudflare', value: 37, color: 'orange'},
|
{ key: 37, text: 'Cloudflare', value: 37, color: 'orange' },
|
||||||
{key: 38, text: 'DeepL', value: 38, color: 'black'},
|
{ key: 38, text: 'DeepL', value: 38, color: 'black' },
|
||||||
{key: 39, text: 'together.ai', value: 39, color: 'blue'},
|
{ key: 39, text: 'together.ai', value: 39, color: 'blue' },
|
||||||
{key: 42, text: 'VertexAI', value: 42, color: 'blue'},
|
{ key: 42, text: 'VertexAI', value: 42, color: 'blue' },
|
||||||
{key: 8, text: '自定义渠道', value: 8, color: 'pink'},
|
{ key: 43, text: 'Proxy', value: 43, color: 'blue' },
|
||||||
{key: 22, text: '知识库:FastGPT', value: 22, color: 'blue'},
|
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||||
{key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple'},
|
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||||
{key: 20, text: '代理:OpenRouter', value: 20, color: 'black'},
|
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||||
{key: 2, text: '代理:API2D', value: 2, color: 'blue'},
|
{ key: 20, text: '代理:OpenRouter', value: 20, color: 'black' },
|
||||||
{key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown'},
|
{ key: 2, text: '代理:API2D', value: 2, color: 'blue' },
|
||||||
{key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple'},
|
{ key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown' },
|
||||||
{key: 10, text: '代理:AI Proxy', value: 10, color: 'purple'},
|
{ key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple' },
|
||||||
{key: 4, text: '代理:CloseAI', value: 4, color: 'teal'},
|
{ key: 10, text: '代理:AI Proxy', value: 10, color: 'purple' },
|
||||||
{key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet'},
|
{ key: 4, text: '代理:CloseAI', value: 4, color: 'teal' },
|
||||||
{key: 9, text: '代理:AI.LS', value: 9, color: 'yellow'},
|
{ key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet' },
|
||||||
{key: 12, text: '代理:API2GPT', value: 12, color: 'blue'},
|
{ key: 9, text: '代理:AI.LS', value: 9, color: 'yellow' },
|
||||||
{key: 13, text: '代理:AIGC2D', value: 13, color: 'purple'}
|
{ key: 12, text: '代理:API2GPT', value: 12, color: 'blue' },
|
||||||
|
{ key: 13, text: '代理:AIGC2D', value: 13, color: 'purple' }
|
||||||
];
|
];
|
||||||
|
@ -170,7 +170,7 @@ const EditChannel = () => {
|
|||||||
showInfo('请填写渠道名称和渠道密钥!');
|
showInfo('请填写渠道名称和渠道密钥!');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (inputs.models.length === 0) {
|
if (inputs.type !== 43 && inputs.models.length === 0) {
|
||||||
showInfo('请至少选择一个模型!');
|
showInfo('请至少选择一个模型!');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -370,6 +370,8 @@ const EditChannel = () => {
|
|||||||
</Message>
|
</Message>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
inputs.type !== 43 && (
|
||||||
<Form.Field>
|
<Form.Field>
|
||||||
<Form.Dropdown
|
<Form.Dropdown
|
||||||
label='模型'
|
label='模型'
|
||||||
@ -389,6 +391,10 @@ const EditChannel = () => {
|
|||||||
options={modelOptions}
|
options={modelOptions}
|
||||||
/>
|
/>
|
||||||
</Form.Field>
|
</Form.Field>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
{
|
||||||
|
inputs.type !== 43 && (
|
||||||
<div style={{ lineHeight: '40px', marginBottom: '12px' }}>
|
<div style={{ lineHeight: '40px', marginBottom: '12px' }}>
|
||||||
<Button type={'button'} onClick={() => {
|
<Button type={'button'} onClick={() => {
|
||||||
handleInputChange(null, { name: 'models', value: basicModels });
|
handleInputChange(null, { name: 'models', value: basicModels });
|
||||||
@ -416,6 +422,10 @@ const EditChannel = () => {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
{
|
||||||
|
inputs.type !== 43 && (
|
||||||
<Form.Field>
|
<Form.Field>
|
||||||
<Form.TextArea
|
<Form.TextArea
|
||||||
label='模型重定向'
|
label='模型重定向'
|
||||||
@ -427,6 +437,8 @@ const EditChannel = () => {
|
|||||||
autoComplete='new-password'
|
autoComplete='new-password'
|
||||||
/>
|
/>
|
||||||
</Form.Field>
|
</Form.Field>
|
||||||
|
)
|
||||||
|
}
|
||||||
{
|
{
|
||||||
inputs.type === 33 && (
|
inputs.type === 33 && (
|
||||||
<Form.Field>
|
<Form.Field>
|
||||||
|
Loading…
Reference in New Issue
Block a user