fix: update GetAdaptor function to use the actual model name

The GetAdaptor function in the Adaptor struct has been updated to use the actual model name instead of the origin model name. This change ensures that the correct adaptor is retrieved for processing the response.
This commit is contained in:
Laisky.Cai 2024-07-15 06:28:03 +00:00
parent 1c44d7e1cd
commit d6536d2907
8 changed files with 36 additions and 37 deletions

View File

@ -147,7 +147,6 @@ var InitialRootAccessToken = os.Getenv("INITIAL_ROOT_ACCESS_TOKEN")
var GeminiVersion = env.String("GEMINI_VERSION", "v1") var GeminiVersion = env.String("GEMINI_VERSION", "v1")
var OnlyOneLogFile = env.Bool("ONLY_ONE_LOG_FILE", false) var OnlyOneLogFile = env.Bool("ONLY_ONE_LOG_FILE", false)
var RelayProxy = env.String("RELAY_PROXY", "") var RelayProxy = env.String("RELAY_PROXY", "")

View File

@ -19,7 +19,7 @@ var _ adaptor.Adaptor = new(Adaptor)
const channelName = "vertexai" const channelName = "vertexai"
type Adaptor struct {} type Adaptor struct{}
func (a *Adaptor) Init(meta *meta.Meta) { func (a *Adaptor) Init(meta *meta.Meta) {
} }
@ -38,7 +38,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
} }
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
adaptor := GetAdaptor(meta.OriginModelName) adaptor := GetAdaptor(meta.ActualModelName)
if adaptor == nil { if adaptor == nil {
return nil, &relaymodel.ErrorWithStatusCode{ return nil, &relaymodel.ErrorWithStatusCode{
StatusCode: http.StatusInternalServerError, StatusCode: http.StatusInternalServerError,

View File

@ -30,13 +30,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
req := Request{ req := Request{
AnthropicVersion: anthropicVersion, AnthropicVersion: anthropicVersion,
// Model: claudeReq.Model, // Model: claudeReq.Model,
Messages: claudeReq.Messages, Messages: claudeReq.Messages,
MaxTokens: claudeReq.MaxTokens, MaxTokens: claudeReq.MaxTokens,
Temperature: claudeReq.Temperature, Temperature: claudeReq.Temperature,
TopP: claudeReq.TopP, TopP: claudeReq.TopP,
TopK: claudeReq.TopK, TopK: claudeReq.TopK,
Stream: claudeReq.Stream, Stream: claudeReq.Stream,
Tools: claudeReq.Tools, Tools: claudeReq.Tools,
} }
c.Set(ctxkey.RequestModel, request.Model) c.Set(ctxkey.RequestModel, request.Model)

View File

@ -4,16 +4,16 @@ import "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
type Request struct { type Request struct {
// AnthropicVersion must be "vertex-2023-10-16" // AnthropicVersion must be "vertex-2023-10-16"
AnthropicVersion string `json:"anthropic_version"` AnthropicVersion string `json:"anthropic_version"`
// Model string `json:"model"` // Model string `json:"model"`
Messages []anthropic.Message `json:"messages"` Messages []anthropic.Message `json:"messages"`
System string `json:"system,omitempty"` System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"` MaxTokens int `json:"max_tokens,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"` StopSequences []string `json:"stop_sequences,omitempty"`
Stream bool `json:"stream,omitempty"` Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"` Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"` TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"` TopK int `json:"top_k,omitempty"`
Tools []anthropic.Tool `json:"tools,omitempty"` Tools []anthropic.Tool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"` ToolChoice any `json:"tool_choice,omitempty"`
} }

View File

@ -32,7 +32,6 @@ func init() {
} }
} }
type innerAIAdapter interface { type innerAIAdapter interface {
ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error)
DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode)

View File

@ -26,7 +26,6 @@ type ApplicationDefaultCredentials struct {
UniverseDomain string `json:"universe_domain"` UniverseDomain string `json:"universe_domain"`
} }
var Cache = cache.New(50*time.Minute, 55*time.Minute) var Cache = cache.New(50*time.Minute, 55*time.Minute)
const defaultScope = "https://www.googleapis.com/auth/cloud-platform" const defaultScope = "https://www.googleapis.com/auth/cloud-platform"

View File

@ -43,7 +43,7 @@ var ChannelBaseURLs = []string{
"https://api.together.xyz", // 39 "https://api.together.xyz", // 39
"https://ark.cn-beijing.volces.com", // 40 "https://ark.cn-beijing.volces.com", // 40
"https://api.novita.ai/v3/openai", // 41 "https://api.novita.ai/v3/openai", // 41
"", // 42 "", // 42
} }
func init() { func init() {

View File

@ -10,20 +10,22 @@ import (
) )
type Meta struct { type Meta struct {
Mode int Mode int
ChannelType int ChannelType int
ChannelId int ChannelId int
TokenId int TokenId int
TokenName string TokenName string
UserId int UserId int
Group string Group string
ModelMapping map[string]string ModelMapping map[string]string
BaseURL string BaseURL string
APIKey string APIKey string
APIType int APIType int
Config model.ChannelConfig Config model.ChannelConfig
IsStream bool IsStream bool
// OriginModelName is the model name from the raw user request
OriginModelName string OriginModelName string
// ActualModelName is the model name after mapping
ActualModelName string ActualModelName string
RequestURLPath string RequestURLPath string
PromptTokens int // only for DoResponse PromptTokens int // only for DoResponse