Merge branch 'main' into patch/images-edits
This commit is contained in:
commit
1264ddcef5
@ -84,6 +84,9 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
|||||||
+ [x] [阶跃星辰](https://platform.stepfun.com/)
|
+ [x] [阶跃星辰](https://platform.stepfun.com/)
|
||||||
+ [x] [Coze](https://www.coze.com/)
|
+ [x] [Coze](https://www.coze.com/)
|
||||||
+ [x] [Cohere](https://cohere.com/)
|
+ [x] [Cohere](https://cohere.com/)
|
||||||
|
+ [x] [DeepSeek](https://www.deepseek.com/)
|
||||||
|
+ [x] [Cloudflare Workers AI](https://developers.cloudflare.com/workers-ai/)
|
||||||
|
+ [x] [DeepL](https://www.deepl.com/)
|
||||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
||||||
3. 支持通过**负载均衡**的方式访问多个渠道。
|
3. 支持通过**负载均衡**的方式访问多个渠道。
|
||||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
package ctxkey
|
|
||||||
|
|
||||||
const (
|
|
||||||
ConfigPrefix = "cfg_"
|
|
||||||
|
|
||||||
ConfigAPIVersion = ConfigPrefix + "api_version"
|
|
||||||
ConfigLibraryID = ConfigPrefix + "library_id"
|
|
||||||
ConfigPlugin = ConfigPrefix + "plugin"
|
|
||||||
ConfigSK = ConfigPrefix + "sk"
|
|
||||||
ConfigAK = ConfigPrefix + "ak"
|
|
||||||
ConfigRegion = ConfigPrefix + "region"
|
|
||||||
ConfigUserID = ConfigPrefix + "user_id"
|
|
||||||
)
|
|
@ -1,6 +1,7 @@
|
|||||||
package ctxkey
|
package ctxkey
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
Config = "config"
|
||||||
Id = "id"
|
Id = "id"
|
||||||
RequestId = "X-Oneapi-Request-Id"
|
RequestId = "X-Oneapi-Request-Id"
|
||||||
Username = "username"
|
Username = "username"
|
||||||
|
@ -2,6 +2,7 @@ package helper
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/random"
|
"github.com/songquanpeng/one-api/common/random"
|
||||||
"html/template"
|
"html/template"
|
||||||
"log"
|
"log"
|
||||||
@ -105,6 +106,11 @@ func GenRequestID() string {
|
|||||||
return GetTimeString() + random.GetRandomNumberString(8)
|
return GetTimeString() + random.GetRandomNumberString(8)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetResponseID(c *gin.Context) string {
|
||||||
|
logID := c.GetString(RequestIdKey)
|
||||||
|
return fmt.Sprintf("chatcmpl-%s", logID)
|
||||||
|
}
|
||||||
|
|
||||||
func Max(a int, b int) int {
|
func Max(a int, b int) int {
|
||||||
if a >= b {
|
if a >= b {
|
||||||
return a
|
return a
|
||||||
|
5
common/helper/key.go
Normal file
5
common/helper/key.go
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
package helper
|
||||||
|
|
||||||
|
const (
|
||||||
|
RequestIdKey = "X-Oneapi-Request-Id"
|
||||||
|
)
|
@ -88,7 +88,7 @@ func logHelper(ctx context.Context, level string, msg string) {
|
|||||||
if level == loggerINFO {
|
if level == loggerINFO {
|
||||||
writer = gin.DefaultWriter
|
writer = gin.DefaultWriter
|
||||||
}
|
}
|
||||||
id := ctx.Value(ctxkey.RequestId)
|
id := ctx.Value(helper.RequestIdKey)
|
||||||
if id == nil {
|
if id == nil {
|
||||||
id = helper.GenRequestID()
|
id = helper.GenRequestID()
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,15 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/songquanpeng/one-api/common/config"
|
"github.com/songquanpeng/one-api/common/config"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
@ -18,14 +27,6 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/http/httptest"
|
|
||||||
"net/url"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
)
|
)
|
||||||
@ -57,6 +58,8 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
|||||||
c.Request.Header.Set("Content-Type", "application/json")
|
c.Request.Header.Set("Content-Type", "application/json")
|
||||||
c.Set(ctxkey.Channel, channel.Type)
|
c.Set(ctxkey.Channel, channel.Type)
|
||||||
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
|
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
|
||||||
|
cfg, _ := channel.LoadConfig()
|
||||||
|
c.Set(ctxkey.Config, cfg)
|
||||||
middleware.SetupContextForSelectedChannel(c, channel, "")
|
middleware.SetupContextForSelectedChannel(c, channel, "")
|
||||||
meta := meta.GetByContext(c)
|
meta := meta.GetByContext(c)
|
||||||
apiType := channeltype.ToAPIType(channel.Type)
|
apiType := channeltype.ToAPIType(channel.Type)
|
||||||
@ -67,6 +70,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
|||||||
adaptor.Init(meta)
|
adaptor.Init(meta)
|
||||||
var modelName string
|
var modelName string
|
||||||
modelList := adaptor.GetModelList()
|
modelList := adaptor.GetModelList()
|
||||||
|
modelMap := channel.GetModelMapping()
|
||||||
if len(modelList) != 0 {
|
if len(modelList) != 0 {
|
||||||
modelName = modelList[0]
|
modelName = modelList[0]
|
||||||
}
|
}
|
||||||
@ -75,6 +79,9 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
|||||||
if len(modelNames) > 0 {
|
if len(modelNames) > 0 {
|
||||||
modelName = modelNames[0]
|
modelName = modelNames[0]
|
||||||
}
|
}
|
||||||
|
if modelMap != nil && modelMap[modelName] != "" {
|
||||||
|
modelName = modelMap[modelName]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
request := buildTestRequest()
|
request := buildTestRequest()
|
||||||
request.Model = modelName
|
request.Model = modelName
|
||||||
|
@ -66,21 +66,29 @@ func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, mode
|
|||||||
c.Set(ctxkey.OriginalModel, modelName) // for retry
|
c.Set(ctxkey.OriginalModel, modelName) // for retry
|
||||||
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
|
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
|
||||||
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
|
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
|
||||||
|
cfg, _ := channel.LoadConfig()
|
||||||
// this is for backward compatibility
|
// this is for backward compatibility
|
||||||
switch channel.Type {
|
switch channel.Type {
|
||||||
case channeltype.Azure:
|
case channeltype.Azure:
|
||||||
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
|
if cfg.APIVersion == "" {
|
||||||
|
cfg.APIVersion = channel.Other
|
||||||
|
}
|
||||||
case channeltype.Xunfei:
|
case channeltype.Xunfei:
|
||||||
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
|
if cfg.APIVersion == "" {
|
||||||
|
cfg.APIVersion = channel.Other
|
||||||
|
}
|
||||||
case channeltype.Gemini:
|
case channeltype.Gemini:
|
||||||
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
|
if cfg.APIVersion == "" {
|
||||||
|
cfg.APIVersion = channel.Other
|
||||||
|
}
|
||||||
case channeltype.AIProxyLibrary:
|
case channeltype.AIProxyLibrary:
|
||||||
c.Set(ctxkey.ConfigLibraryID, channel.Other)
|
if cfg.LibraryID == "" {
|
||||||
|
cfg.LibraryID = channel.Other
|
||||||
|
}
|
||||||
case channeltype.Ali:
|
case channeltype.Ali:
|
||||||
c.Set(ctxkey.ConfigPlugin, channel.Other)
|
if cfg.Plugin == "" {
|
||||||
}
|
cfg.Plugin = channel.Other
|
||||||
cfg, _ := channel.LoadConfig()
|
|
||||||
for k, v := range cfg {
|
|
||||||
c.Set(ctxkey.ConfigPrefix+k, v)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
c.Set(ctxkey.Config, cfg)
|
||||||
|
}
|
||||||
|
@ -4,14 +4,14 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
)
|
)
|
||||||
|
|
||||||
func SetUpLogger(server *gin.Engine) {
|
func SetUpLogger(server *gin.Engine) {
|
||||||
server.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {
|
server.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {
|
||||||
var requestID string
|
var requestID string
|
||||||
if param.Keys != nil {
|
if param.Keys != nil {
|
||||||
requestID = param.Keys[ctxkey.RequestId].(string)
|
requestID = param.Keys[helper.RequestIdKey].(string)
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("[GIN] %s | %s | %3d | %13v | %15s | %7s %s\n",
|
return fmt.Sprintf("[GIN] %s | %s | %3d | %13v | %15s | %7s %s\n",
|
||||||
param.TimeStamp.Format("2006/01/02 - 15:04:05"),
|
param.TimeStamp.Format("2006/01/02 - 15:04:05"),
|
||||||
|
@ -11,10 +11,10 @@ import (
|
|||||||
func RequestId() func(c *gin.Context) {
|
func RequestId() func(c *gin.Context) {
|
||||||
return func(c *gin.Context) {
|
return func(c *gin.Context) {
|
||||||
id := helper.GenRequestID()
|
id := helper.GenRequestID()
|
||||||
c.Set(ctxkey.RequestId, id)
|
c.Set(helper.RequestIdKey, id)
|
||||||
ctx := context.WithValue(c.Request.Context(), ctxkey.RequestId, id)
|
ctx := context.WithValue(c.Request.Context(), helper.RequestIdKey, id)
|
||||||
c.Request = c.Request.WithContext(ctx)
|
c.Request = c.Request.WithContext(ctx)
|
||||||
c.Header(ctxkey.RequestId, id)
|
c.Header(helper.RequestIdKey, id)
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ import (
|
|||||||
func abortWithMessage(c *gin.Context, statusCode int, message string) {
|
func abortWithMessage(c *gin.Context, statusCode int, message string) {
|
||||||
c.JSON(statusCode, gin.H{
|
c.JSON(statusCode, gin.H{
|
||||||
"error": gin.H{
|
"error": gin.H{
|
||||||
"message": helper.MessageWithRequestId(message, c.GetString(ctxkey.RequestId)),
|
"message": helper.MessageWithRequestId(message, c.GetString(helper.RequestIdKey)),
|
||||||
"type": "one_api_error",
|
"type": "one_api_error",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
@ -38,6 +38,16 @@ type Channel struct {
|
|||||||
Config string `json:"config"`
|
Config string `json:"config"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ChannelConfig struct {
|
||||||
|
Region string `json:"region,omitempty"`
|
||||||
|
SK string `json:"sk,omitempty"`
|
||||||
|
AK string `json:"ak,omitempty"`
|
||||||
|
UserID string `json:"user_id,omitempty"`
|
||||||
|
APIVersion string `json:"api_version,omitempty"`
|
||||||
|
LibraryID string `json:"library_id,omitempty"`
|
||||||
|
Plugin string `json:"plugin,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
func GetAllChannels(startIdx int, num int, scope string) ([]*Channel, error) {
|
func GetAllChannels(startIdx int, num int, scope string) ([]*Channel, error) {
|
||||||
var channels []*Channel
|
var channels []*Channel
|
||||||
var err error
|
var err error
|
||||||
@ -161,14 +171,14 @@ func (channel *Channel) Delete() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (channel *Channel) LoadConfig() (map[string]string, error) {
|
func (channel *Channel) LoadConfig() (ChannelConfig, error) {
|
||||||
|
var cfg ChannelConfig
|
||||||
if channel.Config == "" {
|
if channel.Config == "" {
|
||||||
return nil, nil
|
return cfg, nil
|
||||||
}
|
}
|
||||||
cfg := make(map[string]string)
|
|
||||||
err := json.Unmarshal([]byte(channel.Config), &cfg)
|
err := json.Unmarshal([]byte(channel.Config), &cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return cfg, err
|
||||||
}
|
}
|
||||||
return cfg, nil
|
return cfg, nil
|
||||||
}
|
}
|
||||||
|
@ -7,8 +7,10 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/aws"
|
"github.com/songquanpeng/one-api/relay/adaptor/aws"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/baidu"
|
"github.com/songquanpeng/one-api/relay/adaptor/baidu"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/cloudflare"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/cohere"
|
"github.com/songquanpeng/one-api/relay/adaptor/cohere"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/coze"
|
"github.com/songquanpeng/one-api/relay/adaptor/coze"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/deepl"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/gemini"
|
"github.com/songquanpeng/one-api/relay/adaptor/gemini"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
@ -49,6 +51,10 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
|
|||||||
return &coze.Adaptor{}
|
return &coze.Adaptor{}
|
||||||
case apitype.Cohere:
|
case apitype.Cohere:
|
||||||
return &cohere.Adaptor{}
|
return &cohere.Adaptor{}
|
||||||
|
case apitype.Cloudflare:
|
||||||
|
return &cloudflare.Adaptor{}
|
||||||
|
case apitype.DeepL:
|
||||||
|
return &deepl.Adaptor{}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
@ -13,10 +12,11 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
|
meta *meta.Meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
a.meta = meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
@ -34,7 +34,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
|||||||
return nil, errors.New("request is nil")
|
return nil, errors.New("request is nil")
|
||||||
}
|
}
|
||||||
aiProxyLibraryRequest := ConvertRequest(*request)
|
aiProxyLibraryRequest := ConvertRequest(*request)
|
||||||
aiProxyLibraryRequest.LibraryId = c.GetString(ctxkey.ConfigLibraryID)
|
aiProxyLibraryRequest.LibraryId = a.meta.Config.LibraryID
|
||||||
return aiProxyLibraryRequest, nil
|
return aiProxyLibraryRequest, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
@ -16,10 +15,11 @@ import (
|
|||||||
// https://help.aliyun.com/zh/dashscope/developer-reference/api-details
|
// https://help.aliyun.com/zh/dashscope/developer-reference/api-details
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
|
meta *meta.Meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
a.meta = meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
@ -47,8 +47,8 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
|
|||||||
if meta.Mode == relaymode.ImagesGenerations {
|
if meta.Mode == relaymode.ImagesGenerations {
|
||||||
req.Header.Set("X-DashScope-Async", "enable")
|
req.Header.Set("X-DashScope-Async", "enable")
|
||||||
}
|
}
|
||||||
if c.GetString(ctxkey.ConfigPlugin) != "" {
|
if a.meta.Config.Plugin != "" {
|
||||||
req.Header.Set("X-DashScope-Plugin", c.GetString(ctxkey.ConfigPlugin))
|
req.Header.Set("X-DashScope-Plugin", a.meta.Config.Plugin)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -4,6 +4,10 @@ import (
|
|||||||
"bufio"
|
"bufio"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common"
|
"github.com/songquanpeng/one-api/common"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
@ -11,9 +15,6 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func stopReasonClaude2OpenAI(reason *string) string {
|
func stopReasonClaude2OpenAI(reason *string) string {
|
||||||
@ -192,7 +193,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
|
|||||||
select {
|
select {
|
||||||
case data := <-dataChan:
|
case data := <-dataChan:
|
||||||
// some implementations may add \r at the end of data
|
// some implementations may add \r at the end of data
|
||||||
data = strings.TrimSuffix(data, "\r")
|
data = strings.TrimSpace(data)
|
||||||
var claudeResponse StreamResponse
|
var claudeResponse StreamResponse
|
||||||
err := json.Unmarshal([]byte(data), &claudeResponse)
|
err := json.Unmarshal([]byte(data), &claudeResponse)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
package aws
|
package aws
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/aws/aws-sdk-go-v2/aws"
|
||||||
|
"github.com/aws/aws-sdk-go-v2/credentials"
|
||||||
|
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -16,10 +19,16 @@ import (
|
|||||||
var _ adaptor.Adaptor = new(Adaptor)
|
var _ adaptor.Adaptor = new(Adaptor)
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
|
meta *meta.Meta
|
||||||
|
awsClient *bedrockruntime.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
a.meta = meta
|
||||||
|
a.awsClient = bedrockruntime.New(bedrockruntime.Options{
|
||||||
|
Region: meta.Config.Region,
|
||||||
|
Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(meta.Config.AK, meta.Config.SK, "")),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
@ -54,9 +63,9 @@ func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Read
|
|||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||||
if meta.IsStream {
|
if meta.IsStream {
|
||||||
err, usage = StreamHandler(c, resp)
|
err, usage = StreamHandler(c, a.awsClient)
|
||||||
} else {
|
} else {
|
||||||
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
err, usage = Handler(c, a.awsClient, meta.ActualModelName)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -65,7 +74,6 @@ func (a *Adaptor) GetModelList() (models []string) {
|
|||||||
for n := range awsModelIDMap {
|
for n := range awsModelIDMap {
|
||||||
models = append(models, n)
|
models = append(models, n)
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,7 +10,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/aws/aws-sdk-go-v2/aws"
|
"github.com/aws/aws-sdk-go-v2/aws"
|
||||||
"github.com/aws/aws-sdk-go-v2/credentials"
|
|
||||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
|
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
|
||||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
|
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
@ -23,18 +22,6 @@ import (
|
|||||||
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
func newAwsClient(c *gin.Context) (*bedrockruntime.Client, error) {
|
|
||||||
ak := c.GetString(ctxkey.ConfigAK)
|
|
||||||
sk := c.GetString(ctxkey.ConfigSK)
|
|
||||||
region := c.GetString(ctxkey.ConfigRegion)
|
|
||||||
client := bedrockruntime.New(bedrockruntime.Options{
|
|
||||||
Region: region,
|
|
||||||
Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(ak, sk, "")),
|
|
||||||
})
|
|
||||||
|
|
||||||
return client, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func wrapErr(err error) *relaymodel.ErrorWithStatusCode {
|
func wrapErr(err error) *relaymodel.ErrorWithStatusCode {
|
||||||
return &relaymodel.ErrorWithStatusCode{
|
return &relaymodel.ErrorWithStatusCode{
|
||||||
StatusCode: http.StatusInternalServerError,
|
StatusCode: http.StatusInternalServerError,
|
||||||
@ -62,12 +49,7 @@ func awsModelID(requestModel string) (string, error) {
|
|||||||
return "", errors.Errorf("model %s not found", requestModel)
|
return "", errors.Errorf("model %s not found", requestModel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
|
func Handler(c *gin.Context, awsCli *bedrockruntime.Client, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
|
||||||
awsCli, err := newAwsClient(c)
|
|
||||||
if err != nil {
|
|
||||||
return wrapErr(errors.Wrap(err, "newAwsClient")), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return wrapErr(errors.Wrap(err, "awsModelID")), nil
|
return wrapErr(errors.Wrap(err, "awsModelID")), nil
|
||||||
@ -120,13 +102,8 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
|
|||||||
return nil, &usage
|
return nil, &usage
|
||||||
}
|
}
|
||||||
|
|
||||||
func StreamHandler(c *gin.Context, resp *http.Response) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
|
func StreamHandler(c *gin.Context, awsCli *bedrockruntime.Client) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
|
||||||
createdTime := helper.GetTimestamp()
|
createdTime := helper.GetTimestamp()
|
||||||
awsCli, err := newAwsClient(c)
|
|
||||||
if err != nil {
|
|
||||||
return wrapErr(errors.Wrap(err, "newAwsClient")), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return wrapErr(errors.Wrap(err, "awsModelID")), nil
|
return wrapErr(errors.Wrap(err, "awsModelID")), nil
|
||||||
|
@ -1,15 +0,0 @@
|
|||||||
package azure
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
|
||||||
)
|
|
||||||
|
|
||||||
func GetAPIVersion(c *gin.Context) string {
|
|
||||||
query := c.Request.URL.Query()
|
|
||||||
apiVersion := query.Get("api-version")
|
|
||||||
if apiVersion == "" {
|
|
||||||
apiVersion = c.GetString(ctxkey.ConfigAPIVersion)
|
|
||||||
}
|
|
||||||
return apiVersion
|
|
||||||
}
|
|
66
relay/adaptor/cloudflare/adaptor.go
Normal file
66
relay/adaptor/cloudflare/adaptor.go
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
package cloudflare
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Adaptor struct {
|
||||||
|
meta *meta.Meta
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConvertImageRequest implements adaptor.Adaptor.
|
||||||
|
func (*Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||||
|
return nil, errors.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConvertImageRequest implements adaptor.Adaptor.
|
||||||
|
|
||||||
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
a.meta = meta
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
|
return fmt.Sprintf("%s/client/v4/accounts/%s/ai/run/%s", meta.BaseURL, meta.Config.UserID, meta.ActualModelName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||||
|
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||||
|
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||||
|
if request == nil {
|
||||||
|
return nil, errors.New("request is nil")
|
||||||
|
}
|
||||||
|
return ConvertRequest(*request), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
|
||||||
|
return adaptor.DoRequestHelper(a, c, meta, requestBody)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||||
|
if meta.IsStream {
|
||||||
|
err, usage = StreamHandler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||||
|
} else {
|
||||||
|
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetModelList() []string {
|
||||||
|
return ModelList
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetChannelName() string {
|
||||||
|
return "cloudflare"
|
||||||
|
}
|
36
relay/adaptor/cloudflare/constant.go
Normal file
36
relay/adaptor/cloudflare/constant.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package cloudflare
|
||||||
|
|
||||||
|
var ModelList = []string{
|
||||||
|
"@cf/meta/llama-2-7b-chat-fp16",
|
||||||
|
"@cf/meta/llama-2-7b-chat-int8",
|
||||||
|
"@cf/mistral/mistral-7b-instruct-v0.1",
|
||||||
|
"@hf/thebloke/deepseek-coder-6.7b-base-awq",
|
||||||
|
"@hf/thebloke/deepseek-coder-6.7b-instruct-awq",
|
||||||
|
"@cf/deepseek-ai/deepseek-math-7b-base",
|
||||||
|
"@cf/deepseek-ai/deepseek-math-7b-instruct",
|
||||||
|
"@cf/thebloke/discolm-german-7b-v1-awq",
|
||||||
|
"@cf/tiiuae/falcon-7b-instruct",
|
||||||
|
"@cf/google/gemma-2b-it-lora",
|
||||||
|
"@hf/google/gemma-7b-it",
|
||||||
|
"@cf/google/gemma-7b-it-lora",
|
||||||
|
"@hf/nousresearch/hermes-2-pro-mistral-7b",
|
||||||
|
"@hf/thebloke/llama-2-13b-chat-awq",
|
||||||
|
"@cf/meta-llama/llama-2-7b-chat-hf-lora",
|
||||||
|
"@cf/meta/llama-3-8b-instruct",
|
||||||
|
"@hf/thebloke/llamaguard-7b-awq",
|
||||||
|
"@hf/thebloke/mistral-7b-instruct-v0.1-awq",
|
||||||
|
"@hf/mistralai/mistral-7b-instruct-v0.2",
|
||||||
|
"@cf/mistral/mistral-7b-instruct-v0.2-lora",
|
||||||
|
"@hf/thebloke/neural-chat-7b-v3-1-awq",
|
||||||
|
"@cf/openchat/openchat-3.5-0106",
|
||||||
|
"@hf/thebloke/openhermes-2.5-mistral-7b-awq",
|
||||||
|
"@cf/microsoft/phi-2",
|
||||||
|
"@cf/qwen/qwen1.5-0.5b-chat",
|
||||||
|
"@cf/qwen/qwen1.5-1.8b-chat",
|
||||||
|
"@cf/qwen/qwen1.5-14b-chat-awq",
|
||||||
|
"@cf/qwen/qwen1.5-7b-chat-awq",
|
||||||
|
"@cf/defog/sqlcoder-7b-2",
|
||||||
|
"@hf/nexusflow/starling-lm-7b-beta",
|
||||||
|
"@cf/tinyllama/tinyllama-1.1b-chat-v1.0",
|
||||||
|
"@hf/thebloke/zephyr-7b-beta-awq",
|
||||||
|
}
|
152
relay/adaptor/cloudflare/main.go
Normal file
152
relay/adaptor/cloudflare/main.go
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
package cloudflare
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/common"
|
||||||
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||||
|
lastMessage := textRequest.Messages[len(textRequest.Messages)-1]
|
||||||
|
return &Request{
|
||||||
|
MaxTokens: textRequest.MaxTokens,
|
||||||
|
Prompt: lastMessage.StringContent(),
|
||||||
|
Stream: textRequest.Stream,
|
||||||
|
Temperature: textRequest.Temperature,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ResponseCloudflare2OpenAI(cloudflareResponse *Response) *openai.TextResponse {
|
||||||
|
choice := openai.TextResponseChoice{
|
||||||
|
Index: 0,
|
||||||
|
Message: model.Message{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: cloudflareResponse.Result.Response,
|
||||||
|
},
|
||||||
|
FinishReason: "stop",
|
||||||
|
}
|
||||||
|
fullTextResponse := openai.TextResponse{
|
||||||
|
Object: "chat.completion",
|
||||||
|
Created: helper.GetTimestamp(),
|
||||||
|
Choices: []openai.TextResponseChoice{choice},
|
||||||
|
}
|
||||||
|
return &fullTextResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
func StreamResponseCloudflare2OpenAI(cloudflareResponse *StreamResponse) *openai.ChatCompletionsStreamResponse {
|
||||||
|
var choice openai.ChatCompletionsStreamResponseChoice
|
||||||
|
choice.Delta.Content = cloudflareResponse.Response
|
||||||
|
choice.Delta.Role = "assistant"
|
||||||
|
openaiResponse := openai.ChatCompletionsStreamResponse{
|
||||||
|
Object: "chat.completion.chunk",
|
||||||
|
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
|
||||||
|
Created: helper.GetTimestamp(),
|
||||||
|
}
|
||||||
|
return &openaiResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
func StreamHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||||
|
scanner := bufio.NewScanner(resp.Body)
|
||||||
|
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
|
if atEOF && len(data) == 0 {
|
||||||
|
return 0, nil, nil
|
||||||
|
}
|
||||||
|
if i := bytes.IndexByte(data, '\n'); i >= 0 {
|
||||||
|
return i + 1, data[0:i], nil
|
||||||
|
}
|
||||||
|
if atEOF {
|
||||||
|
return len(data), data, nil
|
||||||
|
}
|
||||||
|
return 0, nil, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
dataChan := make(chan string)
|
||||||
|
stopChan := make(chan bool)
|
||||||
|
go func() {
|
||||||
|
for scanner.Scan() {
|
||||||
|
data := scanner.Text()
|
||||||
|
if len(data) < len("data: ") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
data = strings.TrimPrefix(data, "data: ")
|
||||||
|
dataChan <- data
|
||||||
|
}
|
||||||
|
stopChan <- true
|
||||||
|
}()
|
||||||
|
common.SetEventStreamHeaders(c)
|
||||||
|
id := helper.GetResponseID(c)
|
||||||
|
responseModel := c.GetString("original_model")
|
||||||
|
var responseText string
|
||||||
|
c.Stream(func(w io.Writer) bool {
|
||||||
|
select {
|
||||||
|
case data := <-dataChan:
|
||||||
|
// some implementations may add \r at the end of data
|
||||||
|
data = strings.TrimSuffix(data, "\r")
|
||||||
|
var cloudflareResponse StreamResponse
|
||||||
|
err := json.Unmarshal([]byte(data), &cloudflareResponse)
|
||||||
|
if err != nil {
|
||||||
|
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
response := StreamResponseCloudflare2OpenAI(&cloudflareResponse)
|
||||||
|
if response == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
responseText += cloudflareResponse.Response
|
||||||
|
response.Id = id
|
||||||
|
response.Model = responseModel
|
||||||
|
jsonStr, err := json.Marshal(response)
|
||||||
|
if err != nil {
|
||||||
|
logger.SysError("error marshalling stream response: " + err.Error())
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
|
||||||
|
return true
|
||||||
|
case <-stopChan:
|
||||||
|
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
usage := openai.ResponseText2Usage(responseText, responseModel, promptTokens)
|
||||||
|
return nil, usage
|
||||||
|
}
|
||||||
|
|
||||||
|
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||||
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
var cloudflareResponse Response
|
||||||
|
err = json.Unmarshal(responseBody, &cloudflareResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
fullTextResponse := ResponseCloudflare2OpenAI(&cloudflareResponse)
|
||||||
|
fullTextResponse.Model = modelName
|
||||||
|
usage := openai.ResponseText2Usage(cloudflareResponse.Result.Response, modelName, promptTokens)
|
||||||
|
fullTextResponse.Usage = *usage
|
||||||
|
fullTextResponse.Id = helper.GetResponseID(c)
|
||||||
|
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
|
_, err = c.Writer.Write(jsonResponse)
|
||||||
|
return nil, usage
|
||||||
|
}
|
25
relay/adaptor/cloudflare/model.go
Normal file
25
relay/adaptor/cloudflare/model.go
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
package cloudflare
|
||||||
|
|
||||||
|
type Request struct {
|
||||||
|
Lora string `json:"lora,omitempty"`
|
||||||
|
MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
|
Prompt string `json:"prompt,omitempty"`
|
||||||
|
Raw bool `json:"raw,omitempty"`
|
||||||
|
Stream bool `json:"stream,omitempty"`
|
||||||
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Result struct {
|
||||||
|
Response string `json:"response"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Response struct {
|
||||||
|
Result Result `json:"result"`
|
||||||
|
Success bool `json:"success"`
|
||||||
|
Errors []string `json:"errors"`
|
||||||
|
Messages []string `json:"messages"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StreamResponse struct {
|
||||||
|
Response string `json:"response"`
|
||||||
|
}
|
@ -5,3 +5,10 @@ var ModelList = []string{
|
|||||||
"command-light", "command-light-nightly",
|
"command-light", "command-light-nightly",
|
||||||
"command-r", "command-r-plus",
|
"command-r", "command-r-plus",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
num := len(ModelList)
|
||||||
|
for i := 0; i < num; i++ {
|
||||||
|
ModelList = append(ModelList, ModelList[i]+"-internet")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -17,6 +17,10 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
WebSearchConnector = Connector{ID: "web-search"}
|
||||||
|
)
|
||||||
|
|
||||||
func stopReasonCohere2OpenAI(reason *string) string {
|
func stopReasonCohere2OpenAI(reason *string) string {
|
||||||
if reason == nil {
|
if reason == nil {
|
||||||
return ""
|
return ""
|
||||||
@ -45,6 +49,10 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
|||||||
if cohereRequest.Model == "" {
|
if cohereRequest.Model == "" {
|
||||||
cohereRequest.Model = "command-r"
|
cohereRequest.Model = "command-r"
|
||||||
}
|
}
|
||||||
|
if strings.HasSuffix(cohereRequest.Model, "-internet") {
|
||||||
|
cohereRequest.Model = strings.TrimSuffix(cohereRequest.Model, "-internet")
|
||||||
|
cohereRequest.Connectors = append(cohereRequest.Connectors, WebSearchConnector)
|
||||||
|
}
|
||||||
for _, message := range textRequest.Messages {
|
for _, message := range textRequest.Messages {
|
||||||
if message.Role == "user" {
|
if message.Role == "user" {
|
||||||
cohereRequest.Message = message.Content.(string)
|
cohereRequest.Message = message.Content.(string)
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
@ -14,10 +13,11 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
|
meta *meta.Meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
a.meta = meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
@ -34,7 +34,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
|||||||
if request == nil {
|
if request == nil {
|
||||||
return nil, errors.New("request is nil")
|
return nil, errors.New("request is nil")
|
||||||
}
|
}
|
||||||
request.User = c.GetString(ctxkey.ConfigUserID)
|
request.User = a.meta.Config.UserID
|
||||||
return ConvertRequest(*request), nil
|
return ConvertRequest(*request), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
73
relay/adaptor/deepl/adaptor.go
Normal file
73
relay/adaptor/deepl/adaptor.go
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
package deepl
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Adaptor struct {
|
||||||
|
meta *meta.Meta
|
||||||
|
promptText string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
a.meta = meta
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
|
return fmt.Sprintf("%s/v2/translate", meta.BaseURL), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||||
|
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||||
|
req.Header.Set("Authorization", "DeepL-Auth-Key "+meta.APIKey)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||||
|
if request == nil {
|
||||||
|
return nil, errors.New("request is nil")
|
||||||
|
}
|
||||||
|
convertedRequest, text := ConvertRequest(*request)
|
||||||
|
a.promptText = text
|
||||||
|
return convertedRequest, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||||
|
if request == nil {
|
||||||
|
return nil, errors.New("request is nil")
|
||||||
|
}
|
||||||
|
return request, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
|
||||||
|
return adaptor.DoRequestHelper(a, c, meta, requestBody)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||||
|
if meta.IsStream {
|
||||||
|
err = StreamHandler(c, resp, meta.ActualModelName)
|
||||||
|
} else {
|
||||||
|
err = Handler(c, resp, meta.ActualModelName)
|
||||||
|
}
|
||||||
|
promptTokens := len(a.promptText)
|
||||||
|
usage = &model.Usage{
|
||||||
|
PromptTokens: promptTokens,
|
||||||
|
TotalTokens: promptTokens,
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetModelList() []string {
|
||||||
|
return ModelList
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetChannelName() string {
|
||||||
|
return "deepl"
|
||||||
|
}
|
9
relay/adaptor/deepl/constants.go
Normal file
9
relay/adaptor/deepl/constants.go
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
package deepl
|
||||||
|
|
||||||
|
// https://developers.deepl.com/docs/api-reference/glossaries
|
||||||
|
|
||||||
|
var ModelList = []string{
|
||||||
|
"deepl-zh",
|
||||||
|
"deepl-en",
|
||||||
|
"deepl-ja",
|
||||||
|
}
|
11
relay/adaptor/deepl/helper.go
Normal file
11
relay/adaptor/deepl/helper.go
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
package deepl
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
func parseLangFromModelName(modelName string) string {
|
||||||
|
parts := strings.Split(modelName, "-")
|
||||||
|
if len(parts) == 1 {
|
||||||
|
return "ZH"
|
||||||
|
}
|
||||||
|
return parts[1]
|
||||||
|
}
|
137
relay/adaptor/deepl/main.go
Normal file
137
relay/adaptor/deepl/main.go
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
package deepl
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/common"
|
||||||
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
|
"github.com/songquanpeng/one-api/relay/constant"
|
||||||
|
"github.com/songquanpeng/one-api/relay/constant/finishreason"
|
||||||
|
"github.com/songquanpeng/one-api/relay/constant/role"
|
||||||
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// https://developers.deepl.com/docs/getting-started/your-first-api-request
|
||||||
|
|
||||||
|
func ConvertRequest(textRequest model.GeneralOpenAIRequest) (*Request, string) {
|
||||||
|
var text string
|
||||||
|
if len(textRequest.Messages) != 0 {
|
||||||
|
text = textRequest.Messages[len(textRequest.Messages)-1].StringContent()
|
||||||
|
}
|
||||||
|
deeplRequest := Request{
|
||||||
|
TargetLang: parseLangFromModelName(textRequest.Model),
|
||||||
|
Text: []string{text},
|
||||||
|
}
|
||||||
|
return &deeplRequest, text
|
||||||
|
}
|
||||||
|
|
||||||
|
func StreamResponseDeepL2OpenAI(deeplResponse *Response) *openai.ChatCompletionsStreamResponse {
|
||||||
|
var choice openai.ChatCompletionsStreamResponseChoice
|
||||||
|
if len(deeplResponse.Translations) != 0 {
|
||||||
|
choice.Delta.Content = deeplResponse.Translations[0].Text
|
||||||
|
}
|
||||||
|
choice.Delta.Role = role.Assistant
|
||||||
|
choice.FinishReason = &constant.StopFinishReason
|
||||||
|
openaiResponse := openai.ChatCompletionsStreamResponse{
|
||||||
|
Object: constant.StreamObject,
|
||||||
|
Created: helper.GetTimestamp(),
|
||||||
|
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
|
||||||
|
}
|
||||||
|
return &openaiResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
func ResponseDeepL2OpenAI(deeplResponse *Response) *openai.TextResponse {
|
||||||
|
var responseText string
|
||||||
|
if len(deeplResponse.Translations) != 0 {
|
||||||
|
responseText = deeplResponse.Translations[0].Text
|
||||||
|
}
|
||||||
|
choice := openai.TextResponseChoice{
|
||||||
|
Index: 0,
|
||||||
|
Message: model.Message{
|
||||||
|
Role: role.Assistant,
|
||||||
|
Content: responseText,
|
||||||
|
Name: nil,
|
||||||
|
},
|
||||||
|
FinishReason: finishreason.Stop,
|
||||||
|
}
|
||||||
|
fullTextResponse := openai.TextResponse{
|
||||||
|
Object: constant.NonStreamObject,
|
||||||
|
Created: helper.GetTimestamp(),
|
||||||
|
Choices: []openai.TextResponseChoice{choice},
|
||||||
|
}
|
||||||
|
return &fullTextResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
func StreamHandler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
|
||||||
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
var deeplResponse Response
|
||||||
|
err = json.Unmarshal(responseBody, &deeplResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
fullTextResponse := StreamResponseDeepL2OpenAI(&deeplResponse)
|
||||||
|
fullTextResponse.Model = modelName
|
||||||
|
fullTextResponse.Id = helper.GetResponseID(c)
|
||||||
|
jsonData, err := json.Marshal(fullTextResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
common.SetEventStreamHeaders(c)
|
||||||
|
c.Stream(func(w io.Writer) bool {
|
||||||
|
if jsonData != nil {
|
||||||
|
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonData)})
|
||||||
|
jsonData = nil
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Handler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
|
||||||
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
var deeplResponse Response
|
||||||
|
err = json.Unmarshal(responseBody, &deeplResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
if deeplResponse.Message != "" {
|
||||||
|
return &model.ErrorWithStatusCode{
|
||||||
|
Error: model.Error{
|
||||||
|
Message: deeplResponse.Message,
|
||||||
|
Code: "deepl_error",
|
||||||
|
},
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fullTextResponse := ResponseDeepL2OpenAI(&deeplResponse)
|
||||||
|
fullTextResponse.Model = modelName
|
||||||
|
fullTextResponse.Id = helper.GetResponseID(c)
|
||||||
|
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
|
_, err = c.Writer.Write(jsonResponse)
|
||||||
|
return nil
|
||||||
|
}
|
16
relay/adaptor/deepl/model.go
Normal file
16
relay/adaptor/deepl/model.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package deepl
|
||||||
|
|
||||||
|
type Request struct {
|
||||||
|
Text []string `json:"text"`
|
||||||
|
TargetLang string `json:"target_lang"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Translation struct {
|
||||||
|
DetectedSourceLanguage string `json:"detected_source_language,omitempty"`
|
||||||
|
Text string `json:"text,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Response struct {
|
||||||
|
Translations []Translation `json:"translations,omitempty"`
|
||||||
|
Message string `json:"message,omitempty"`
|
||||||
|
}
|
6
relay/adaptor/deepseek/constants.go
Normal file
6
relay/adaptor/deepseek/constants.go
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
package deepseek
|
||||||
|
|
||||||
|
var ModelList = []string{
|
||||||
|
"deepseek-chat",
|
||||||
|
"deepseek-coder",
|
||||||
|
}
|
@ -3,6 +3,9 @@ package gemini
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/config"
|
"github.com/songquanpeng/one-api/common/config"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
@ -10,8 +13,6 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
@ -22,10 +23,10 @@ func (a *Adaptor) Init(meta *meta.Meta) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
version := helper.AssignOrDefault(meta.APIVersion, config.GeminiVersion)
|
version := helper.AssignOrDefault(meta.Config.APIVersion, config.GeminiVersion)
|
||||||
action := "generateContent"
|
action := "generateContent"
|
||||||
if meta.IsStream {
|
if meta.IsStream {
|
||||||
action = "streamGenerateContent"
|
action = "streamGenerateContent?alt=sse"
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil
|
return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil
|
||||||
}
|
}
|
||||||
|
@ -232,8 +232,6 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatC
|
|||||||
|
|
||||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
|
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
|
||||||
responseText := ""
|
responseText := ""
|
||||||
dataChan := make(chan string)
|
|
||||||
stopChan := make(chan bool)
|
|
||||||
scanner := bufio.NewScanner(resp.Body)
|
scanner := bufio.NewScanner(resp.Body)
|
||||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
if atEOF && len(data) == 0 {
|
if atEOF && len(data) == 0 {
|
||||||
@ -247,14 +245,16 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
|
|||||||
}
|
}
|
||||||
return 0, nil, nil
|
return 0, nil, nil
|
||||||
})
|
})
|
||||||
|
dataChan := make(chan string)
|
||||||
|
stopChan := make(chan bool)
|
||||||
go func() {
|
go func() {
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
data := scanner.Text()
|
data := scanner.Text()
|
||||||
data = strings.TrimSpace(data)
|
data = strings.TrimSpace(data)
|
||||||
if !strings.HasPrefix(data, "\"text\": \"") {
|
if !strings.HasPrefix(data, "data: ") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
data = strings.TrimPrefix(data, "\"text\": \"")
|
data = strings.TrimPrefix(data, "data: ")
|
||||||
data = strings.TrimSuffix(data, "\"")
|
data = strings.TrimSuffix(data, "\"")
|
||||||
dataChan <- data
|
dataChan <- data
|
||||||
}
|
}
|
||||||
@ -264,23 +264,17 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
|
|||||||
c.Stream(func(w io.Writer) bool {
|
c.Stream(func(w io.Writer) bool {
|
||||||
select {
|
select {
|
||||||
case data := <-dataChan:
|
case data := <-dataChan:
|
||||||
// this is used to prevent annoying \ related format bug
|
var geminiResponse ChatResponse
|
||||||
data = fmt.Sprintf("{\"content\": \"%s\"}", data)
|
err := json.Unmarshal([]byte(data), &geminiResponse)
|
||||||
type dummyStruct struct {
|
if err != nil {
|
||||||
Content string `json:"content"`
|
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
var dummy dummyStruct
|
response := streamResponseGeminiChat2OpenAI(&geminiResponse)
|
||||||
err := json.Unmarshal([]byte(data), &dummy)
|
if response == nil {
|
||||||
responseText += dummy.Content
|
return true
|
||||||
var choice openai.ChatCompletionsStreamResponseChoice
|
|
||||||
choice.Delta.Content = dummy.Content
|
|
||||||
response := openai.ChatCompletionsStreamResponse{
|
|
||||||
Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
|
|
||||||
Object: "chat.completion.chunk",
|
|
||||||
Created: helper.GetTimestamp(),
|
|
||||||
Model: "gemini-pro",
|
|
||||||
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
|
|
||||||
}
|
}
|
||||||
|
responseText += response.Choices[0].Delta.StringContent()
|
||||||
jsonResponse, err := json.Marshal(response)
|
jsonResponse, err := json.Marshal(response)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.SysError("error marshalling stream response: " + err.Error())
|
logger.SysError("error marshalling stream response: " + err.Error())
|
||||||
|
@ -1,5 +1,11 @@
|
|||||||
package ollama
|
package ollama
|
||||||
|
|
||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
|
"codellama:7b-instruct",
|
||||||
|
"llama2:7b",
|
||||||
|
"llama2:latest",
|
||||||
|
"llama3:latest",
|
||||||
|
"phi3:latest",
|
||||||
"qwen:0.5b-chat",
|
"qwen:0.5b-chat",
|
||||||
|
"qwen:7b",
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,7 @@ import (
|
|||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common"
|
"github.com/songquanpeng/one-api/common"
|
||||||
|
"github.com/songquanpeng/one-api/common/image"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/constant"
|
"github.com/songquanpeng/one-api/relay/constant"
|
||||||
@ -32,9 +33,22 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
Stream: request.Stream,
|
Stream: request.Stream,
|
||||||
}
|
}
|
||||||
for _, message := range request.Messages {
|
for _, message := range request.Messages {
|
||||||
|
openaiContent := message.ParseContent()
|
||||||
|
var imageUrls []string
|
||||||
|
var contentText string
|
||||||
|
for _, part := range openaiContent {
|
||||||
|
switch part.Type {
|
||||||
|
case model.ContentTypeText:
|
||||||
|
contentText = part.Text
|
||||||
|
case model.ContentTypeImageURL:
|
||||||
|
_, data, _ := image.GetImageFromUrl(part.ImageURL.Url)
|
||||||
|
imageUrls = append(imageUrls, data)
|
||||||
|
}
|
||||||
|
}
|
||||||
ollamaRequest.Messages = append(ollamaRequest.Messages, Message{
|
ollamaRequest.Messages = append(ollamaRequest.Messages, Message{
|
||||||
Role: message.Role,
|
Role: message.Role,
|
||||||
Content: message.StringContent(),
|
Content: contentText,
|
||||||
|
Images: imageUrls,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return &ollamaRequest
|
return &ollamaRequest
|
||||||
|
@ -29,13 +29,13 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
|||||||
if meta.Mode == relaymode.ImagesGenerations {
|
if meta.Mode == relaymode.ImagesGenerations {
|
||||||
// https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api
|
// https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api
|
||||||
// https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview
|
// https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview
|
||||||
fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.APIVersion)
|
fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.Config.APIVersion)
|
||||||
return fullRequestURL, nil
|
return fullRequestURL, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
|
// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
|
||||||
requestURL := strings.Split(meta.RequestURLPath, "?")[0]
|
requestURL := strings.Split(meta.RequestURLPath, "?")[0]
|
||||||
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.APIVersion)
|
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.Config.APIVersion)
|
||||||
task := strings.TrimPrefix(requestURL, "/v1/")
|
task := strings.TrimPrefix(requestURL, "/v1/")
|
||||||
model_ := meta.ActualModelName
|
model_ := meta.ActualModelName
|
||||||
model_ = strings.Replace(model_, ".", "", -1)
|
model_ = strings.Replace(model_, ".", "", -1)
|
||||||
|
@ -3,6 +3,7 @@ package openai
|
|||||||
import (
|
import (
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/ai360"
|
"github.com/songquanpeng/one-api/relay/adaptor/ai360"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/baichuan"
|
"github.com/songquanpeng/one-api/relay/adaptor/baichuan"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/deepseek"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/groq"
|
"github.com/songquanpeng/one-api/relay/adaptor/groq"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu"
|
"github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
|
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
|
||||||
@ -22,6 +23,7 @@ var CompatibleChannels = []int{
|
|||||||
channeltype.Groq,
|
channeltype.Groq,
|
||||||
channeltype.LingYiWanWu,
|
channeltype.LingYiWanWu,
|
||||||
channeltype.StepFun,
|
channeltype.StepFun,
|
||||||
|
channeltype.DeepSeek,
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetCompatibleChannelMeta(channelType int) (string, []string) {
|
func GetCompatibleChannelMeta(channelType int) (string, []string) {
|
||||||
@ -44,6 +46,8 @@ func GetCompatibleChannelMeta(channelType int) (string, []string) {
|
|||||||
return "lingyiwanwu", lingyiwanwu.ModelList
|
return "lingyiwanwu", lingyiwanwu.ModelList
|
||||||
case channeltype.StepFun:
|
case channeltype.StepFun:
|
||||||
return "stepfun", stepfun.ModelList
|
return "stepfun", stepfun.ModelList
|
||||||
|
case channeltype.DeepSeek:
|
||||||
|
return "deepseek", deepseek.ModelList
|
||||||
default:
|
default:
|
||||||
return "openai", ModelList
|
return "openai", ModelList
|
||||||
}
|
}
|
||||||
|
@ -134,7 +134,7 @@ type ChatCompletionsStreamResponse struct {
|
|||||||
Created int64 `json:"created"`
|
Created int64 `json:"created"`
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
|
Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
|
||||||
Usage *model.Usage `json:"usage"`
|
Usage *model.Usage `json:"usage,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type CompletionsStreamResponse struct {
|
type CompletionsStreamResponse struct {
|
||||||
|
@ -206,3 +206,7 @@ func CountTokenText(text string, model string) int {
|
|||||||
tokenEncoder := getTokenEncoder(model)
|
tokenEncoder := getTokenEncoder(model)
|
||||||
return getTokenNum(tokenEncoder, text)
|
return getTokenNum(tokenEncoder, text)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func CountToken(text string) int {
|
||||||
|
return CountTokenInput(text, "gpt-3.5-turbo")
|
||||||
|
}
|
||||||
|
@ -14,10 +14,11 @@ import (
|
|||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
request *model.GeneralOpenAIRequest
|
request *model.GeneralOpenAIRequest
|
||||||
|
meta *meta.Meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
a.meta = meta
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
@ -26,6 +27,14 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
|||||||
|
|
||||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||||
adaptor.SetupCommonRequestHeader(c, req, meta)
|
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||||
|
version := parseAPIVersionByModelName(meta.ActualModelName)
|
||||||
|
if version == "" {
|
||||||
|
version = a.meta.Config.APIVersion
|
||||||
|
}
|
||||||
|
if version == "" {
|
||||||
|
version = "v1.1"
|
||||||
|
}
|
||||||
|
a.meta.Config.APIVersion = version
|
||||||
// check DoResponse for auth part
|
// check DoResponse for auth part
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -61,9 +70,9 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
|
|||||||
return nil, openai.ErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
|
return nil, openai.ErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
if meta.IsStream {
|
if meta.IsStream {
|
||||||
err, usage = StreamHandler(c, *a.request, splits[0], splits[1], splits[2])
|
err, usage = StreamHandler(c, meta, *a.request, splits[0], splits[1], splits[2])
|
||||||
} else {
|
} else {
|
||||||
err, usage = Handler(c, *a.request, splits[0], splits[1], splits[2])
|
err, usage = Handler(c, meta, *a.request, splits[0], splits[1], splits[2])
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -9,12 +9,12 @@ import (
|
|||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/gorilla/websocket"
|
"github.com/gorilla/websocket"
|
||||||
"github.com/songquanpeng/one-api/common"
|
"github.com/songquanpeng/one-api/common"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/common/random"
|
"github.com/songquanpeng/one-api/common/random"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/constant"
|
"github.com/songquanpeng/one-api/relay/constant"
|
||||||
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -149,8 +149,8 @@ func buildXunfeiAuthUrl(hostUrl string, apiKey, apiSecret string) string {
|
|||||||
return callUrl
|
return callUrl
|
||||||
}
|
}
|
||||||
|
|
||||||
func StreamHandler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
|
func StreamHandler(c *gin.Context, meta *meta.Meta, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||||
domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model)
|
domain, authUrl := getXunfeiAuthUrl(meta.Config.APIVersion, apiKey, apiSecret)
|
||||||
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
|
return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
|
||||||
@ -179,8 +179,8 @@ func StreamHandler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId
|
|||||||
return nil, &usage
|
return nil, &usage
|
||||||
}
|
}
|
||||||
|
|
||||||
func Handler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
|
func Handler(c *gin.Context, meta *meta.Meta, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||||
domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model)
|
domain, authUrl := getXunfeiAuthUrl(meta.Config.APIVersion, apiKey, apiSecret)
|
||||||
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
|
return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
|
||||||
@ -268,25 +268,12 @@ func xunfeiMakeRequest(textRequest model.GeneralOpenAIRequest, domain, authUrl,
|
|||||||
return dataChan, stopChan, nil
|
return dataChan, stopChan, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getAPIVersion(c *gin.Context, modelName string) string {
|
func parseAPIVersionByModelName(modelName string) string {
|
||||||
query := c.Request.URL.Query()
|
|
||||||
apiVersion := query.Get("api-version")
|
|
||||||
if apiVersion != "" {
|
|
||||||
return apiVersion
|
|
||||||
}
|
|
||||||
parts := strings.Split(modelName, "-")
|
parts := strings.Split(modelName, "-")
|
||||||
if len(parts) == 2 {
|
if len(parts) == 2 {
|
||||||
apiVersion = parts[1]
|
return parts[1]
|
||||||
return apiVersion
|
|
||||||
|
|
||||||
}
|
}
|
||||||
apiVersion = c.GetString(ctxkey.ConfigAPIVersion)
|
return ""
|
||||||
if apiVersion != "" {
|
|
||||||
return apiVersion
|
|
||||||
}
|
|
||||||
apiVersion = "v1.1"
|
|
||||||
logger.SysLog("api_version not found, using default: " + apiVersion)
|
|
||||||
return apiVersion
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
|
// https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
|
||||||
@ -304,8 +291,7 @@ func apiVersion2domain(apiVersion string) string {
|
|||||||
return "general" + apiVersion
|
return "general" + apiVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string, modelName string) (string, string) {
|
func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (string, string) {
|
||||||
apiVersion := getAPIVersion(c, modelName)
|
|
||||||
domain := apiVersion2domain(apiVersion)
|
domain := apiVersion2domain(apiVersion)
|
||||||
authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
|
authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
|
||||||
return domain, authUrl
|
return domain, authUrl
|
||||||
|
@ -62,8 +62,8 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
|||||||
}
|
}
|
||||||
switch relayMode {
|
switch relayMode {
|
||||||
case relaymode.Embeddings:
|
case relaymode.Embeddings:
|
||||||
baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
|
baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
|
||||||
return baiduEmbeddingRequest, nil
|
return baiduEmbeddingRequest, err
|
||||||
default:
|
default:
|
||||||
// TopP (0.0, 1.0)
|
// TopP (0.0, 1.0)
|
||||||
request.TopP = math.Min(0.99, request.TopP)
|
request.TopP = math.Min(0.99, request.TopP)
|
||||||
@ -129,11 +129,15 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
|
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) (*EmbeddingRequest, error) {
|
||||||
return &EmbeddingRequest{
|
inputs := request.ParseInput()
|
||||||
Model: "embedding-2",
|
if len(inputs) != 1 {
|
||||||
Input: request.Input.(string),
|
return nil, errors.New("invalid input length, zhipu only support one input")
|
||||||
}
|
}
|
||||||
|
return &EmbeddingRequest{
|
||||||
|
Model: request.Model,
|
||||||
|
Input: inputs[0],
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetModelList() []string {
|
func (a *Adaptor) GetModelList() []string {
|
||||||
|
@ -15,6 +15,8 @@ const (
|
|||||||
AwsClaude
|
AwsClaude
|
||||||
Coze
|
Coze
|
||||||
Cohere
|
Cohere
|
||||||
|
Cloudflare
|
||||||
|
DeepL
|
||||||
|
|
||||||
Dummy // this one is only for count, do not add any channel after this
|
Dummy // this one is only for count, do not add any channel after this
|
||||||
)
|
)
|
||||||
|
@ -170,6 +170,13 @@ var ModelRatio = map[string]float64{
|
|||||||
"command-light-nightly": 0.5,
|
"command-light-nightly": 0.5,
|
||||||
"command-r": 0.5 / 1000 * USD,
|
"command-r": 0.5 / 1000 * USD,
|
||||||
"command-r-plus ": 3.0 / 1000 * USD,
|
"command-r-plus ": 3.0 / 1000 * USD,
|
||||||
|
// https://platform.deepseek.com/api-docs/pricing/
|
||||||
|
"deepseek-chat": 1.0 / 1000 * RMB,
|
||||||
|
"deepseek-coder": 1.0 / 1000 * RMB,
|
||||||
|
// https://www.deepl.com/pro?cta=header-prices
|
||||||
|
"deepl-zh": 25.0 / 1000 * USD,
|
||||||
|
"deepl-en": 25.0 / 1000 * USD,
|
||||||
|
"deepl-ja": 25.0 / 1000 * USD,
|
||||||
}
|
}
|
||||||
|
|
||||||
var CompletionRatio = map[string]float64{}
|
var CompletionRatio = map[string]float64{}
|
||||||
@ -225,6 +232,9 @@ func GetModelRatio(name string) float64 {
|
|||||||
if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") {
|
if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") {
|
||||||
name = strings.TrimSuffix(name, "-internet")
|
name = strings.TrimSuffix(name, "-internet")
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(name, "command-") && strings.HasSuffix(name, "-internet") {
|
||||||
|
name = strings.TrimSuffix(name, "-internet")
|
||||||
|
}
|
||||||
ratio, ok := ModelRatio[name]
|
ratio, ok := ModelRatio[name]
|
||||||
if !ok {
|
if !ok {
|
||||||
ratio, ok = DefaultModelRatio[name]
|
ratio, ok = DefaultModelRatio[name]
|
||||||
@ -285,6 +295,9 @@ func GetCompletionRatio(name string) float64 {
|
|||||||
if strings.HasPrefix(name, "gemini-") {
|
if strings.HasPrefix(name, "gemini-") {
|
||||||
return 3
|
return 3
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(name, "deepseek-") {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
switch name {
|
switch name {
|
||||||
case "llama2-70b-4096":
|
case "llama2-70b-4096":
|
||||||
return 0.8 / 0.64
|
return 0.8 / 0.64
|
||||||
|
@ -37,6 +37,9 @@ const (
|
|||||||
AwsClaude
|
AwsClaude
|
||||||
Coze
|
Coze
|
||||||
Cohere
|
Cohere
|
||||||
|
DeepSeek
|
||||||
|
Cloudflare
|
||||||
|
DeepL
|
||||||
|
|
||||||
Dummy
|
Dummy
|
||||||
)
|
)
|
||||||
|
@ -31,6 +31,10 @@ func ToAPIType(channelType int) int {
|
|||||||
apiType = apitype.Coze
|
apiType = apitype.Coze
|
||||||
case Cohere:
|
case Cohere:
|
||||||
apiType = apitype.Cohere
|
apiType = apitype.Cohere
|
||||||
|
case Cloudflare:
|
||||||
|
apiType = apitype.Cloudflare
|
||||||
|
case DeepL:
|
||||||
|
apiType = apitype.DeepL
|
||||||
}
|
}
|
||||||
|
|
||||||
return apiType
|
return apiType
|
||||||
|
@ -37,6 +37,9 @@ var ChannelBaseURLs = []string{
|
|||||||
"", // 33
|
"", // 33
|
||||||
"https://api.coze.com", // 34
|
"https://api.coze.com", // 34
|
||||||
"https://api.cohere.ai", // 35
|
"https://api.cohere.ai", // 35
|
||||||
|
"https://api.deepseek.com", // 36
|
||||||
|
"https://api.cloudflare.com", // 37
|
||||||
|
"https://api-free.deepl.com", // 38
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
package constant
|
package constant
|
||||||
|
|
||||||
var StopFinishReason = "stop"
|
var StopFinishReason = "stop"
|
||||||
|
var StreamObject = "chat.completion.chunk"
|
||||||
|
var NonStreamObject = "chat.completion"
|
||||||
|
5
relay/constant/finishreason/define.go
Normal file
5
relay/constant/finishreason/define.go
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
package finishreason
|
||||||
|
|
||||||
|
const (
|
||||||
|
Stop = "stop"
|
||||||
|
)
|
5
relay/constant/role/define.go
Normal file
5
relay/constant/role/define.go
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
package role
|
||||||
|
|
||||||
|
const (
|
||||||
|
Assistant = "assistant"
|
||||||
|
)
|
@ -13,12 +13,12 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/model"
|
"github.com/songquanpeng/one-api/model"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/azure"
|
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/billing"
|
"github.com/songquanpeng/one-api/relay/billing"
|
||||||
billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
|
billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
|
||||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||||
"github.com/songquanpeng/one-api/relay/client"
|
"github.com/songquanpeng/one-api/relay/client"
|
||||||
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||||
"io"
|
"io"
|
||||||
@ -28,6 +28,7 @@ import (
|
|||||||
|
|
||||||
func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
|
func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
|
||||||
ctx := c.Request.Context()
|
ctx := c.Request.Context()
|
||||||
|
meta := meta.GetByContext(c)
|
||||||
audioModel := "whisper-1"
|
audioModel := "whisper-1"
|
||||||
|
|
||||||
tokenId := c.GetInt(ctxkey.TokenId)
|
tokenId := c.GetInt(ctxkey.TokenId)
|
||||||
@ -128,7 +129,7 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
|
|||||||
|
|
||||||
fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType)
|
fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType)
|
||||||
if channelType == channeltype.Azure {
|
if channelType == channeltype.Azure {
|
||||||
apiVersion := azure.GetAPIVersion(c)
|
apiVersion := meta.Config.APIVersion
|
||||||
if relayMode == relaymode.AudioTranscription {
|
if relayMode == relaymode.AudioTranscription {
|
||||||
// https://learn.microsoft.com/en-us/azure/ai-services/openai/whisper-quickstart?tabs=command-line#rest-api
|
// https://learn.microsoft.com/en-us/azure/ai-services/openai/whisper-quickstart?tabs=command-line#rest-api
|
||||||
fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", baseURL, audioModel, apiVersion)
|
fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", baseURL, audioModel, apiVersion)
|
||||||
|
@ -18,6 +18,7 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||||
"math"
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func getAndValidateTextRequest(c *gin.Context, relayMode int) (*relaymodel.GeneralOpenAIRequest, error) {
|
func getAndValidateTextRequest(c *gin.Context, relayMode int) (*relaymodel.GeneralOpenAIRequest, error) {
|
||||||
@ -124,9 +125,9 @@ func getPromptTokens(textRequest *relaymodel.GeneralOpenAIRequest, relayMode int
|
|||||||
}
|
}
|
||||||
|
|
||||||
func getPreConsumedQuota(textRequest *relaymodel.GeneralOpenAIRequest, promptTokens int, ratio float64) int64 {
|
func getPreConsumedQuota(textRequest *relaymodel.GeneralOpenAIRequest, promptTokens int, ratio float64) int64 {
|
||||||
preConsumedTokens := config.PreConsumedQuota
|
preConsumedTokens := config.PreConsumedQuota + int64(promptTokens)
|
||||||
if textRequest.MaxTokens != 0 {
|
if textRequest.MaxTokens != 0 {
|
||||||
preConsumedTokens = int64(promptTokens) + int64(textRequest.MaxTokens)
|
preConsumedTokens += int64(textRequest.MaxTokens)
|
||||||
}
|
}
|
||||||
return int64(float64(preConsumedTokens) * ratio)
|
return int64(float64(preConsumedTokens) * ratio)
|
||||||
}
|
}
|
||||||
@ -204,3 +205,20 @@ func getMappedModelName(modelName string, mapping map[string]string) (string, bo
|
|||||||
}
|
}
|
||||||
return modelName, false
|
return modelName, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isErrorHappened(meta *meta.Meta, resp *http.Response) bool {
|
||||||
|
if resp == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if meta.ChannelType == channeltype.DeepL {
|
||||||
|
// skip stream check for deepl
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
@ -73,6 +73,7 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
|
|||||||
if adaptor == nil {
|
if adaptor == nil {
|
||||||
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
|
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
|
adaptor.Init(meta)
|
||||||
|
|
||||||
switch meta.ChannelType {
|
switch meta.ChannelType {
|
||||||
case channeltype.Ali:
|
case channeltype.Ali:
|
||||||
|
@ -4,10 +4,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/relay"
|
"github.com/songquanpeng/one-api/relay"
|
||||||
@ -18,6 +14,8 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
)
|
)
|
||||||
|
|
||||||
func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
|
func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
|
||||||
@ -53,6 +51,7 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
|
|||||||
if adaptor == nil {
|
if adaptor == nil {
|
||||||
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
|
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
|
adaptor.Init(meta)
|
||||||
|
|
||||||
// get request body
|
// get request body
|
||||||
var requestBody io.Reader
|
var requestBody io.Reader
|
||||||
@ -87,13 +86,10 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
|
|||||||
logger.Errorf(ctx, "DoRequest failed: %s", err.Error())
|
logger.Errorf(ctx, "DoRequest failed: %s", err.Error())
|
||||||
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
|
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
if resp != nil {
|
if isErrorHappened(meta, resp) {
|
||||||
errorHappened := (resp.StatusCode != http.StatusOK) || (meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json"))
|
|
||||||
if errorHappened {
|
|
||||||
billing.ReturnPreConsumedQuota(ctx, preConsumedQuota, meta.TokenId)
|
billing.ReturnPreConsumedQuota(ctx, preConsumedQuota, meta.TokenId)
|
||||||
return RelayErrorHandler(resp)
|
return RelayErrorHandler(resp)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// do response
|
// do response
|
||||||
usage, respErr := adaptor.DoResponse(c, resp, meta)
|
usage, respErr := adaptor.DoResponse(c, resp, meta)
|
||||||
|
@ -3,7 +3,7 @@ package meta
|
|||||||
import (
|
import (
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/azure"
|
"github.com/songquanpeng/one-api/model"
|
||||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||||
"strings"
|
"strings"
|
||||||
@ -19,10 +19,9 @@ type Meta struct {
|
|||||||
Group string
|
Group string
|
||||||
ModelMapping map[string]string
|
ModelMapping map[string]string
|
||||||
BaseURL string
|
BaseURL string
|
||||||
APIVersion string
|
|
||||||
APIKey string
|
APIKey string
|
||||||
APIType int
|
APIType int
|
||||||
Config map[string]string
|
Config model.ChannelConfig
|
||||||
IsStream bool
|
IsStream bool
|
||||||
OriginModelName string
|
OriginModelName string
|
||||||
ActualModelName string
|
ActualModelName string
|
||||||
@ -40,14 +39,14 @@ func GetByContext(c *gin.Context) *Meta {
|
|||||||
UserId: c.GetInt(ctxkey.Id),
|
UserId: c.GetInt(ctxkey.Id),
|
||||||
Group: c.GetString(ctxkey.Group),
|
Group: c.GetString(ctxkey.Group),
|
||||||
ModelMapping: c.GetStringMapString(ctxkey.ModelMapping),
|
ModelMapping: c.GetStringMapString(ctxkey.ModelMapping),
|
||||||
|
OriginModelName: c.GetString(ctxkey.RequestModel),
|
||||||
BaseURL: c.GetString(ctxkey.BaseURL),
|
BaseURL: c.GetString(ctxkey.BaseURL),
|
||||||
APIVersion: c.GetString(ctxkey.ConfigAPIVersion),
|
|
||||||
APIKey: strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "),
|
APIKey: strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "),
|
||||||
Config: nil,
|
|
||||||
RequestURLPath: c.Request.URL.String(),
|
RequestURLPath: c.Request.URL.String(),
|
||||||
}
|
}
|
||||||
if meta.ChannelType == channeltype.Azure {
|
cfg, ok := c.Get(ctxkey.Config)
|
||||||
meta.APIVersion = azure.GetAPIVersion(c)
|
if ok {
|
||||||
|
meta.Config = cfg.(model.ChannelConfig)
|
||||||
}
|
}
|
||||||
if meta.BaseURL == "" {
|
if meta.BaseURL == "" {
|
||||||
meta.BaseURL = channeltype.ChannelBaseURLs[meta.ChannelType]
|
meta.BaseURL = channeltype.ChannelBaseURLs[meta.ChannelType]
|
||||||
|
@ -131,6 +131,18 @@ export const CHANNEL_OPTIONS = {
|
|||||||
value: 35,
|
value: 35,
|
||||||
color: 'primary'
|
color: 'primary'
|
||||||
},
|
},
|
||||||
|
36: {
|
||||||
|
key: 36,
|
||||||
|
text: 'DeepSeek',
|
||||||
|
value: 36,
|
||||||
|
color: 'primary'
|
||||||
|
},
|
||||||
|
38: {
|
||||||
|
key: 38,
|
||||||
|
text: 'DeepL',
|
||||||
|
value: 38,
|
||||||
|
color: 'primary'
|
||||||
|
},
|
||||||
8: {
|
8: {
|
||||||
key: 8,
|
key: 8,
|
||||||
text: '自定义渠道',
|
text: '自定义渠道',
|
||||||
|
@ -21,6 +21,9 @@ export const CHANNEL_OPTIONS = [
|
|||||||
{key: 32, text: '阶跃星辰', value: 32, color: 'blue'},
|
{key: 32, text: '阶跃星辰', value: 32, color: 'blue'},
|
||||||
{key: 34, text: 'Coze', value: 34, color: 'blue'},
|
{key: 34, text: 'Coze', value: 34, color: 'blue'},
|
||||||
{key: 35, text: 'Cohere', value: 35, color: 'blue'},
|
{key: 35, text: 'Cohere', value: 35, color: 'blue'},
|
||||||
|
{key: 36, text: 'DeepSeek', value: 36, color: 'black'},
|
||||||
|
{key: 37, text: 'Cloudflare', value: 37, color: 'orange'},
|
||||||
|
{key: 38, text: 'DeepL', value: 38, color: 'black'},
|
||||||
{key: 8, text: '自定义渠道', value: 8, color: 'pink'},
|
{key: 8, text: '自定义渠道', value: 8, color: 'pink'},
|
||||||
{key: 22, text: '知识库:FastGPT', value: 22, color: 'blue'},
|
{key: 22, text: '知识库:FastGPT', value: 22, color: 'blue'},
|
||||||
{key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple'},
|
{key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple'},
|
||||||
|
@ -488,6 +488,21 @@ const EditChannel = () => {
|
|||||||
/>
|
/>
|
||||||
</Form.Field>)
|
</Form.Field>)
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
inputs.type === 37 && (
|
||||||
|
<Form.Field>
|
||||||
|
<Form.Input
|
||||||
|
label='Account ID'
|
||||||
|
name='user_id'
|
||||||
|
required
|
||||||
|
placeholder={'请输入 Account ID,例如:d8d7c61dbc334c32d3ced580e4bf42b4'}
|
||||||
|
onChange={handleConfigChange}
|
||||||
|
value={config.user_id}
|
||||||
|
autoComplete=''
|
||||||
|
/>
|
||||||
|
</Form.Field>
|
||||||
|
)
|
||||||
|
}
|
||||||
{
|
{
|
||||||
inputs.type !== 33 && !isEdit && (
|
inputs.type !== 33 && !isEdit && (
|
||||||
<Form.Checkbox
|
<Form.Checkbox
|
||||||
|
Loading…
Reference in New Issue
Block a user