fix: update GetAdaptor function to use the actual model name

The GetAdaptor function in the Adaptor struct has been updated to use the actual model name instead of the origin model name. This change ensures that the correct adaptor is retrieved for processing the response.
This commit is contained in:
Laisky.Cai 2024-07-15 06:28:03 +00:00
parent 1c44d7e1cd
commit d6536d2907
8 changed files with 36 additions and 37 deletions

View File

@ -147,7 +147,6 @@ var InitialRootAccessToken = os.Getenv("INITIAL_ROOT_ACCESS_TOKEN")
var GeminiVersion = env.String("GEMINI_VERSION", "v1")
var OnlyOneLogFile = env.Bool("ONLY_ONE_LOG_FILE", false)
var RelayProxy = env.String("RELAY_PROXY", "")

View File

@ -19,7 +19,7 @@ var _ adaptor.Adaptor = new(Adaptor)
const channelName = "vertexai"
type Adaptor struct {}
type Adaptor struct{}
func (a *Adaptor) Init(meta *meta.Meta) {
}
@ -38,7 +38,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
adaptor := GetAdaptor(meta.OriginModelName)
adaptor := GetAdaptor(meta.ActualModelName)
if adaptor == nil {
return nil, &relaymodel.ErrorWithStatusCode{
StatusCode: http.StatusInternalServerError,

View File

@ -30,13 +30,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
req := Request{
AnthropicVersion: anthropicVersion,
// Model: claudeReq.Model,
Messages: claudeReq.Messages,
MaxTokens: claudeReq.MaxTokens,
Temperature: claudeReq.Temperature,
TopP: claudeReq.TopP,
TopK: claudeReq.TopK,
Stream: claudeReq.Stream,
Tools: claudeReq.Tools,
Messages: claudeReq.Messages,
MaxTokens: claudeReq.MaxTokens,
Temperature: claudeReq.Temperature,
TopP: claudeReq.TopP,
TopK: claudeReq.TopK,
Stream: claudeReq.Stream,
Tools: claudeReq.Tools,
}
c.Set(ctxkey.RequestModel, request.Model)

View File

@ -4,16 +4,16 @@ import "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
type Request struct {
// AnthropicVersion must be "vertex-2023-10-16"
AnthropicVersion string `json:"anthropic_version"`
AnthropicVersion string `json:"anthropic_version"`
// Model string `json:"model"`
Messages []anthropic.Message `json:"messages"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Tools []anthropic.Tool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
Messages []anthropic.Message `json:"messages"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Tools []anthropic.Tool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
}

View File

@ -32,7 +32,6 @@ func init() {
}
}
type innerAIAdapter interface {
ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error)
DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode)

View File

@ -26,7 +26,6 @@ type ApplicationDefaultCredentials struct {
UniverseDomain string `json:"universe_domain"`
}
var Cache = cache.New(50*time.Minute, 55*time.Minute)
const defaultScope = "https://www.googleapis.com/auth/cloud-platform"

View File

@ -43,7 +43,7 @@ var ChannelBaseURLs = []string{
"https://api.together.xyz", // 39
"https://ark.cn-beijing.volces.com", // 40
"https://api.novita.ai/v3/openai", // 41
"", // 42
"", // 42
}
func init() {

View File

@ -10,20 +10,22 @@ import (
)
type Meta struct {
Mode int
ChannelType int
ChannelId int
TokenId int
TokenName string
UserId int
Group string
ModelMapping map[string]string
BaseURL string
APIKey string
APIType int
Config model.ChannelConfig
IsStream bool
Mode int
ChannelType int
ChannelId int
TokenId int
TokenName string
UserId int
Group string
ModelMapping map[string]string
BaseURL string
APIKey string
APIType int
Config model.ChannelConfig
IsStream bool
// OriginModelName is the model name from the raw user request
OriginModelName string
// ActualModelName is the model name after mapping
ActualModelName string
RequestURLPath string
PromptTokens int // only for DoResponse