fix: refactor Gemini adaptor to support streaming content generation (#1382)

This commit is contained in:
Wei Tingjiang 2024-04-27 15:39:59 +08:00 committed by GitHub
parent 007906216d
commit ef88497f25
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 17 additions and 22 deletions

View File

@ -3,6 +3,9 @@ package gemini
import ( import (
"errors" "errors"
"fmt" "fmt"
"io"
"net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
@ -10,8 +13,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/meta" "github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
) )
type Adaptor struct { type Adaptor struct {
@ -25,7 +26,7 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
version := helper.AssignOrDefault(meta.Config.APIVersion, config.GeminiVersion) version := helper.AssignOrDefault(meta.Config.APIVersion, config.GeminiVersion)
action := "generateContent" action := "generateContent"
if meta.IsStream { if meta.IsStream {
action = "streamGenerateContent" action = "streamGenerateContent?alt=sse"
} }
return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil
} }

View File

@ -232,8 +232,6 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatC
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) { func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
responseText := "" responseText := ""
dataChan := make(chan string)
stopChan := make(chan bool)
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 { if atEOF && len(data) == 0 {
@ -247,14 +245,16 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
return 0, nil, nil return 0, nil, nil
}) })
dataChan := make(chan string)
stopChan := make(chan bool)
go func() { go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
data = strings.TrimSpace(data) data = strings.TrimSpace(data)
if !strings.HasPrefix(data, "\"text\": \"") { if !strings.HasPrefix(data, "data: ") {
continue continue
} }
data = strings.TrimPrefix(data, "\"text\": \"") data = strings.TrimPrefix(data, "data: ")
data = strings.TrimSuffix(data, "\"") data = strings.TrimSuffix(data, "\"")
dataChan <- data dataChan <- data
} }
@ -264,23 +264,17 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
c.Stream(func(w io.Writer) bool { c.Stream(func(w io.Writer) bool {
select { select {
case data := <-dataChan: case data := <-dataChan:
// this is used to prevent annoying \ related format bug var geminiResponse ChatResponse
data = fmt.Sprintf("{\"content\": \"%s\"}", data) err := json.Unmarshal([]byte(data), &geminiResponse)
type dummyStruct struct { if err != nil {
Content string `json:"content"` logger.SysError("error unmarshalling stream response: " + err.Error())
return true
} }
var dummy dummyStruct response := streamResponseGeminiChat2OpenAI(&geminiResponse)
err := json.Unmarshal([]byte(data), &dummy) if response == nil {
responseText += dummy.Content return true
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = dummy.Content
response := openai.ChatCompletionsStreamResponse{
Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
Object: "chat.completion.chunk",
Created: helper.GetTimestamp(),
Model: "gemini-pro",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
} }
responseText += fmt.Sprintf("%v", response.Choices[0].Delta.Content)
jsonResponse, err := json.Marshal(response) jsonResponse, err := json.Marshal(response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError("error marshalling stream response: " + err.Error())