feat: support for the ollama vision model

`llava` model, pass test
This commit is contained in:
nongqiqin 2024-04-26 10:15:40 +08:00
parent c317872097
commit 2b82cd25e4

View File

@ -13,6 +13,7 @@ import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/image"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
@ -32,9 +33,24 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
Stream: request.Stream, Stream: request.Stream,
} }
for _, message := range request.Messages { for _, message := range request.Messages {
openaiContent := message.ParseContent()
var imageUrls []string
var contentText string
for _, part := range openaiContent {
logger.SysLog(part.Type)
switch part.Type {
case model.ContentTypeText:
contentText = part.Text
case model.ContentTypeImageURL:
_ , data, _ := image.GetImageFromUrl(part.ImageURL.Url)
imageUrls = append(imageUrls, data)
}
}
ollamaRequest.Messages = append(ollamaRequest.Messages, Message{ ollamaRequest.Messages = append(ollamaRequest.Messages, Message{
Role: message.Role, Role: message.Role,
Content: message.StringContent(), Content: contentText,
Images: imageUrls,
}) })
} }
return &ollamaRequest return &ollamaRequest