fix: fix gemini panic (close #833)

This commit is contained in:
JustSong 2023-12-17 23:30:45 +08:00
parent 461f5dab56
commit 97030e27f8

View File

@ -114,7 +114,7 @@ func requestOpenAI2Gemini(textRequest GeneralOpenAIRequest) *GeminiChatRequest {
Role: "model", Role: "model",
Parts: []GeminiPart{ Parts: []GeminiPart{
{ {
Text: "ok", Text: "Okay",
}, },
}, },
}) })
@ -130,6 +130,16 @@ type GeminiChatResponse struct {
PromptFeedback GeminiChatPromptFeedback `json:"promptFeedback"` PromptFeedback GeminiChatPromptFeedback `json:"promptFeedback"`
} }
func (g *GeminiChatResponse) GetResponseText() string {
if g == nil {
return ""
}
if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 {
return g.Candidates[0].Content.Parts[0].Text
}
return ""
}
type GeminiChatCandidate struct { type GeminiChatCandidate struct {
Content GeminiChatContent `json:"content"` Content GeminiChatContent `json:"content"`
FinishReason string `json:"finishReason"` FinishReason string `json:"finishReason"`
@ -158,10 +168,13 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *OpenAITextResponse
Index: i, Index: i,
Message: Message{ Message: Message{
Role: "assistant", Role: "assistant",
Content: candidate.Content.Parts[0].Text, Content: "",
}, },
FinishReason: stopFinishReason, FinishReason: stopFinishReason,
} }
if len(candidate.Content.Parts) > 0 {
choice.Message.Content = candidate.Content.Parts[0].Text
}
fullTextResponse.Choices = append(fullTextResponse.Choices, choice) fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
} }
return &fullTextResponse return &fullTextResponse
@ -169,9 +182,7 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *OpenAITextResponse
func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) *ChatCompletionsStreamResponse { func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) *ChatCompletionsStreamResponse {
var choice ChatCompletionsStreamResponseChoice var choice ChatCompletionsStreamResponseChoice
if len(geminiResponse.Candidates) > 0 && len(geminiResponse.Candidates[0].Content.Parts) > 0 { choice.Delta.Content = geminiResponse.GetResponseText()
choice.Delta.Content = geminiResponse.Candidates[0].Content.Parts[0].Text
}
choice.FinishReason = &stopFinishReason choice.FinishReason = &stopFinishReason
var response ChatCompletionsStreamResponse var response ChatCompletionsStreamResponse
response.Object = "chat.completion.chunk" response.Object = "chat.completion.chunk"
@ -276,7 +287,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo
}, nil }, nil
} }
fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse) fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
completionTokens := countTokenText(geminiResponse.Candidates[0].Content.Parts[0].Text, model) completionTokens := countTokenText(geminiResponse.GetResponseText(), model)
usage := Usage{ usage := Usage{
PromptTokens: promptTokens, PromptTokens: promptTokens,
CompletionTokens: completionTokens, CompletionTokens: completionTokens,