From d027041f679a08cbc17adfe8f23576d9182d1a46 Mon Sep 17 00:00:00 2001 From: David Zhuang Date: Sat, 16 Dec 2023 02:50:46 -0500 Subject: [PATCH] fest: Add tooling to Gemini; Add OpenAI-like system prompt to Gemini --- common/constants.go | 1 + controller/relay-gemini-chat.go | 53 ++++++++++++++++++++++++++------- controller/relay-text.go | 10 ++----- 3 files changed, 46 insertions(+), 18 deletions(-) diff --git a/common/constants.go b/common/constants.go index ac8194de..45fc9535 100644 --- a/common/constants.go +++ b/common/constants.go @@ -215,4 +215,5 @@ var ChannelBaseURLs = []string{ "https://api.aiproxy.io", // 21 "https://fastgpt.run/api/openapi", // 22 "https://hunyuan.cloud.tencent.com", //23 + "", //24 } diff --git a/controller/relay-gemini-chat.go b/controller/relay-gemini-chat.go index c1a9e63d..94f723ad 100644 --- a/controller/relay-gemini-chat.go +++ b/controller/relay-gemini-chat.go @@ -12,25 +12,31 @@ import ( type GeminiChatRequest struct { Contents []GeminiChatContents `json:"contents"` - SafetySettings []GeminiChatSafetySettings `json:"safety_settings"` - GenerationConfig GeminiChatGenerationConfig `json:"generation_config"` + SafetySettings []GeminiChatSafetySettings `json:"safety_settings,omitempty"` + GenerationConfig GeminiChatGenerationConfig `json:"generation_config,omitempty"` + Tools []GeminiChatTools `json:"tools,omitempty"` } type GeminiChatParts struct { Text string `json:"text"` } type GeminiChatContents struct { - Role string `json:"role"` - Parts GeminiChatParts `json:"parts"` + Role string `json:"role"` + Parts []GeminiChatParts `json:"parts"` } type GeminiChatSafetySettings struct { Category string `json:"category"` Threshold string `json:"threshold"` } +type GeminiChatTools struct { + FunctionDeclarations any `json:"functionDeclarations,omitempty"` +} type GeminiChatGenerationConfig struct { - Temperature float64 `json:"temperature"` - TopP float64 `json:"topP"` - TopK int `json:"topK"` - MaxOutputTokens int `json:"maxOutputTokens"` + Temperature float64 `json:"temperature,omitempty"` + TopP float64 `json:"topP,omitempty"` + TopK float64 `json:"topK,omitempty"` + MaxOutputTokens int `json:"maxOutputTokens,omitempty"` + CandidateCount int `json:"candidateCount,omitempty"` + StopSequences []string `json:"stopSequences,omitempty"` } // Setting safety to the lowest possible values since Gemini is already powerless enough @@ -58,19 +64,44 @@ func requestOpenAI2GeminiChat(textRequest GeneralOpenAIRequest) *GeminiChatReque GenerationConfig: GeminiChatGenerationConfig{ Temperature: textRequest.Temperature, TopP: textRequest.TopP, - TopK: textRequest.MaxTokens, MaxOutputTokens: textRequest.MaxTokens, }, + Tools: []GeminiChatTools{ + { + FunctionDeclarations: textRequest.Functions, + }, + }, } + systemPrompt := "" for _, message := range textRequest.Messages { content := GeminiChatContents{ Role: message.Role, - Parts: GeminiChatParts{ - Text: message.StringContent(), + Parts: []GeminiChatParts{ + { + Text: message.StringContent(), + }, }, } + // there's no assistant role in gemini and API shall vomit if Role is not user or model + if content.Role == "assistant" { + content.Role = "model" + } + // Converting system prompt to prompt from user for the same reason + if content.Role == "system" { + systemPrompt = message.StringContent() + continue + } + if content.Role == "user" && systemPrompt != "" { + content.Parts = []GeminiChatParts{ + { + Text: systemPrompt + "\n\nHuman: " + message.StringContent(), + }, + } + systemPrompt = "" + } geminiRequest.Contents = append(geminiRequest.Contents, content) } + return &geminiRequest } diff --git a/controller/relay-text.go b/controller/relay-text.go index e14e18b8..662d05ec 100644 --- a/controller/relay-text.go +++ b/controller/relay-text.go @@ -119,6 +119,8 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { apiType = APITypeAIProxyLibrary case common.ChannelTypeTencent: apiType = APITypeTencent + case common.ChannelTypeGeminiChat: + apiType = APITypeGeminiChat } baseURL := common.ChannelBaseURLs[channelType] requestURL := c.Request.URL.String() @@ -179,15 +181,9 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { apiKey = strings.TrimPrefix(apiKey, "Bearer ") fullRequestURL += "?key=" + apiKey case APITypeGeminiChat: - requestURLSuffix := "/v1beta/models/gemini-pro:generateContent" switch textRequest.Model { case "gemini-pro": - requestURLSuffix = "/v1beta/models/gemini-pro:generateContent" - } - if baseURL != "" { - fullRequestURL = fmt.Sprintf("%s%s", baseURL, requestURLSuffix) - } else { - fullRequestURL = fmt.Sprintf("https://generativelanguage.googleapis.com%s", requestURLSuffix) + fullRequestURL = "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent" } apiKey := c.Request.Header.Get("Authorization") apiKey = strings.TrimPrefix(apiKey, "Bearer ")