2024-01-14 11:21:03 +00:00
|
|
|
package anthropic
|
2023-07-22 08:18:03 +00:00
|
|
|
|
2023-07-22 09:12:13 +00:00
|
|
|
import (
|
2023-07-22 09:36:40 +00:00
|
|
|
"bufio"
|
|
|
|
"encoding/json"
|
2023-07-22 09:12:13 +00:00
|
|
|
"fmt"
|
2024-06-30 10:36:33 +00:00
|
|
|
"github.com/songquanpeng/one-api/common/render"
|
2024-04-27 07:58:07 +00:00
|
|
|
"io"
|
|
|
|
"net/http"
|
|
|
|
"strings"
|
|
|
|
|
2023-07-22 09:36:40 +00:00
|
|
|
"github.com/gin-gonic/gin"
|
2024-01-28 11:38:58 +00:00
|
|
|
"github.com/songquanpeng/one-api/common"
|
|
|
|
"github.com/songquanpeng/one-api/common/helper"
|
2024-03-08 17:12:47 +00:00
|
|
|
"github.com/songquanpeng/one-api/common/image"
|
2024-01-28 11:38:58 +00:00
|
|
|
"github.com/songquanpeng/one-api/common/logger"
|
2024-04-05 17:36:48 +00:00
|
|
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
2024-02-17 16:15:31 +00:00
|
|
|
"github.com/songquanpeng/one-api/relay/model"
|
2023-07-22 09:12:13 +00:00
|
|
|
)
|
|
|
|
|
2024-03-08 17:12:47 +00:00
|
|
|
func stopReasonClaude2OpenAI(reason *string) string {
|
|
|
|
if reason == nil {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
switch *reason {
|
|
|
|
case "end_turn":
|
|
|
|
return "stop"
|
2023-07-22 08:18:03 +00:00
|
|
|
case "stop_sequence":
|
|
|
|
return "stop"
|
|
|
|
case "max_tokens":
|
|
|
|
return "length"
|
|
|
|
default:
|
2024-03-08 17:12:47 +00:00
|
|
|
return *reason
|
2023-07-22 08:18:03 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-22 09:12:13 +00:00
|
|
|
|
2024-02-17 16:15:31 +00:00
|
|
|
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
2024-01-14 11:21:03 +00:00
|
|
|
claudeRequest := Request{
|
2024-03-08 17:12:47 +00:00
|
|
|
Model: textRequest.Model,
|
|
|
|
MaxTokens: textRequest.MaxTokens,
|
|
|
|
Temperature: textRequest.Temperature,
|
|
|
|
TopP: textRequest.TopP,
|
2024-03-30 02:43:26 +00:00
|
|
|
TopK: textRequest.TopK,
|
2024-03-08 17:12:47 +00:00
|
|
|
Stream: textRequest.Stream,
|
|
|
|
}
|
|
|
|
if claudeRequest.MaxTokens == 0 {
|
|
|
|
claudeRequest.MaxTokens = 4096
|
|
|
|
}
|
|
|
|
// legacy model name mapping
|
|
|
|
if claudeRequest.Model == "claude-instant-1" {
|
|
|
|
claudeRequest.Model = "claude-instant-1.1"
|
|
|
|
} else if claudeRequest.Model == "claude-2" {
|
|
|
|
claudeRequest.Model = "claude-2.1"
|
|
|
|
}
|
2023-07-22 09:12:13 +00:00
|
|
|
for _, message := range textRequest.Messages {
|
2024-03-08 17:12:47 +00:00
|
|
|
if message.Role == "system" && claudeRequest.System == "" {
|
|
|
|
claudeRequest.System = message.StringContent()
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
claudeMessage := Message{
|
|
|
|
Role: message.Role,
|
|
|
|
}
|
|
|
|
var content Content
|
|
|
|
if message.IsStringContent() {
|
|
|
|
content.Type = "text"
|
|
|
|
content.Text = message.StringContent()
|
|
|
|
claudeMessage.Content = append(claudeMessage.Content, content)
|
|
|
|
claudeRequest.Messages = append(claudeRequest.Messages, claudeMessage)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
var contents []Content
|
|
|
|
openaiContent := message.ParseContent()
|
|
|
|
for _, part := range openaiContent {
|
|
|
|
var content Content
|
|
|
|
if part.Type == model.ContentTypeText {
|
|
|
|
content.Type = "text"
|
|
|
|
content.Text = part.Text
|
|
|
|
} else if part.Type == model.ContentTypeImageURL {
|
|
|
|
content.Type = "image"
|
|
|
|
content.Source = &ImageSource{
|
|
|
|
Type: "base64",
|
|
|
|
}
|
|
|
|
mimeType, data, _ := image.GetImageFromUrl(part.ImageURL.Url)
|
|
|
|
content.Source.MediaType = mimeType
|
|
|
|
content.Source.Data = data
|
2023-11-24 13:39:44 +00:00
|
|
|
}
|
2024-03-08 17:12:47 +00:00
|
|
|
contents = append(contents, content)
|
2023-07-22 09:12:13 +00:00
|
|
|
}
|
2024-03-08 17:12:47 +00:00
|
|
|
claudeMessage.Content = contents
|
|
|
|
claudeRequest.Messages = append(claudeRequest.Messages, claudeMessage)
|
2023-07-22 09:12:13 +00:00
|
|
|
}
|
|
|
|
return &claudeRequest
|
|
|
|
}
|
|
|
|
|
2024-03-08 17:12:47 +00:00
|
|
|
// https://docs.anthropic.com/claude/reference/messages-streaming
|
2024-04-19 16:40:47 +00:00
|
|
|
func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
|
2024-03-08 17:12:47 +00:00
|
|
|
var response *Response
|
|
|
|
var responseText string
|
|
|
|
var stopReason string
|
|
|
|
switch claudeResponse.Type {
|
|
|
|
case "message_start":
|
|
|
|
return nil, claudeResponse.Message
|
|
|
|
case "content_block_start":
|
|
|
|
if claudeResponse.ContentBlock != nil {
|
|
|
|
responseText = claudeResponse.ContentBlock.Text
|
|
|
|
}
|
|
|
|
case "content_block_delta":
|
|
|
|
if claudeResponse.Delta != nil {
|
|
|
|
responseText = claudeResponse.Delta.Text
|
|
|
|
}
|
|
|
|
case "message_delta":
|
|
|
|
if claudeResponse.Usage != nil {
|
|
|
|
response = &Response{
|
|
|
|
Usage: *claudeResponse.Usage,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if claudeResponse.Delta != nil && claudeResponse.Delta.StopReason != nil {
|
|
|
|
stopReason = *claudeResponse.Delta.StopReason
|
|
|
|
}
|
|
|
|
}
|
2024-01-14 11:21:03 +00:00
|
|
|
var choice openai.ChatCompletionsStreamResponseChoice
|
2024-03-08 17:12:47 +00:00
|
|
|
choice.Delta.Content = responseText
|
|
|
|
choice.Delta.Role = "assistant"
|
|
|
|
finishReason := stopReasonClaude2OpenAI(&stopReason)
|
2023-08-12 03:04:53 +00:00
|
|
|
if finishReason != "null" {
|
|
|
|
choice.FinishReason = &finishReason
|
|
|
|
}
|
2024-03-08 17:12:47 +00:00
|
|
|
var openaiResponse openai.ChatCompletionsStreamResponse
|
|
|
|
openaiResponse.Object = "chat.completion.chunk"
|
|
|
|
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
|
|
|
|
return &openaiResponse, response
|
2023-07-22 09:12:13 +00:00
|
|
|
}
|
|
|
|
|
2024-04-19 16:40:47 +00:00
|
|
|
func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
|
2024-03-08 17:12:47 +00:00
|
|
|
var responseText string
|
|
|
|
if len(claudeResponse.Content) > 0 {
|
|
|
|
responseText = claudeResponse.Content[0].Text
|
|
|
|
}
|
2024-01-14 11:21:03 +00:00
|
|
|
choice := openai.TextResponseChoice{
|
2023-07-22 09:12:13 +00:00
|
|
|
Index: 0,
|
2024-02-17 16:15:31 +00:00
|
|
|
Message: model.Message{
|
2023-07-22 09:12:13 +00:00
|
|
|
Role: "assistant",
|
2024-03-08 17:12:47 +00:00
|
|
|
Content: responseText,
|
2023-07-22 09:12:13 +00:00
|
|
|
Name: nil,
|
|
|
|
},
|
|
|
|
FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason),
|
|
|
|
}
|
2024-01-14 11:21:03 +00:00
|
|
|
fullTextResponse := openai.TextResponse{
|
2024-03-08 17:12:47 +00:00
|
|
|
Id: fmt.Sprintf("chatcmpl-%s", claudeResponse.Id),
|
|
|
|
Model: claudeResponse.Model,
|
2023-07-22 09:12:13 +00:00
|
|
|
Object: "chat.completion",
|
2024-01-21 15:21:42 +00:00
|
|
|
Created: helper.GetTimestamp(),
|
2024-01-14 11:21:03 +00:00
|
|
|
Choices: []openai.TextResponseChoice{choice},
|
2023-07-22 09:12:13 +00:00
|
|
|
}
|
|
|
|
return &fullTextResponse
|
|
|
|
}
|
2023-07-22 09:36:40 +00:00
|
|
|
|
2024-03-08 17:12:47 +00:00
|
|
|
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
|
2024-01-21 15:21:42 +00:00
|
|
|
createdTime := helper.GetTimestamp()
|
2023-07-22 09:36:40 +00:00
|
|
|
scanner := bufio.NewScanner(resp.Body)
|
|
|
|
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
|
|
|
if atEOF && len(data) == 0 {
|
|
|
|
return 0, nil, nil
|
|
|
|
}
|
2024-03-08 17:12:47 +00:00
|
|
|
if i := strings.Index(string(data), "\n"); i >= 0 {
|
|
|
|
return i + 1, data[0:i], nil
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
|
|
|
if atEOF {
|
|
|
|
return len(data), data, nil
|
|
|
|
}
|
|
|
|
return 0, nil, nil
|
|
|
|
})
|
2024-06-30 10:36:33 +00:00
|
|
|
|
2024-01-14 11:21:03 +00:00
|
|
|
common.SetEventStreamHeaders(c)
|
2024-06-30 10:36:33 +00:00
|
|
|
|
2024-03-08 17:12:47 +00:00
|
|
|
var usage model.Usage
|
|
|
|
var modelName string
|
|
|
|
var id string
|
2024-06-30 10:36:33 +00:00
|
|
|
|
|
|
|
for scanner.Scan() {
|
|
|
|
data := scanner.Text()
|
|
|
|
if len(data) < 6 || !strings.HasPrefix(data, "data:") {
|
|
|
|
continue
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
2024-06-30 10:36:33 +00:00
|
|
|
data = strings.TrimPrefix(data, "data:")
|
|
|
|
data = strings.TrimSpace(data)
|
|
|
|
|
|
|
|
var claudeResponse StreamResponse
|
|
|
|
err := json.Unmarshal([]byte(data), &claudeResponse)
|
|
|
|
if err != nil {
|
|
|
|
logger.SysError("error unmarshalling stream response: " + err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
response, meta := StreamResponseClaude2OpenAI(&claudeResponse)
|
|
|
|
if meta != nil {
|
|
|
|
usage.PromptTokens += meta.Usage.InputTokens
|
|
|
|
usage.CompletionTokens += meta.Usage.OutputTokens
|
|
|
|
modelName = meta.Model
|
|
|
|
id = fmt.Sprintf("chatcmpl-%s", meta.Id)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if response == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
response.Id = id
|
|
|
|
response.Model = modelName
|
|
|
|
response.Created = createdTime
|
|
|
|
err = render.ObjectData(c, response)
|
|
|
|
if err != nil {
|
|
|
|
logger.SysError(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := scanner.Err(); err != nil {
|
|
|
|
logger.SysError("error reading stream: " + err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
render.Done(c)
|
|
|
|
|
|
|
|
err := resp.Body.Close()
|
|
|
|
if err != nil {
|
|
|
|
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
|
|
|
}
|
2024-03-08 17:12:47 +00:00
|
|
|
return nil, &usage
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
|
|
|
|
2024-02-17 16:15:31 +00:00
|
|
|
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
|
2023-07-22 09:36:40 +00:00
|
|
|
responseBody, err := io.ReadAll(resp.Body)
|
|
|
|
if err != nil {
|
2024-01-14 11:21:03 +00:00
|
|
|
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
|
|
|
err = resp.Body.Close()
|
|
|
|
if err != nil {
|
2024-01-14 11:21:03 +00:00
|
|
|
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
2024-01-14 11:21:03 +00:00
|
|
|
var claudeResponse Response
|
2023-07-22 09:36:40 +00:00
|
|
|
err = json.Unmarshal(responseBody, &claudeResponse)
|
|
|
|
if err != nil {
|
2024-01-14 11:21:03 +00:00
|
|
|
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
|
|
|
if claudeResponse.Error.Type != "" {
|
2024-02-17 16:15:31 +00:00
|
|
|
return &model.ErrorWithStatusCode{
|
|
|
|
Error: model.Error{
|
2023-07-22 09:36:40 +00:00
|
|
|
Message: claudeResponse.Error.Message,
|
|
|
|
Type: claudeResponse.Error.Type,
|
|
|
|
Param: "",
|
|
|
|
Code: claudeResponse.Error.Type,
|
|
|
|
},
|
|
|
|
StatusCode: resp.StatusCode,
|
|
|
|
}, nil
|
|
|
|
}
|
2024-04-19 16:40:47 +00:00
|
|
|
fullTextResponse := ResponseClaude2OpenAI(&claudeResponse)
|
2024-02-17 16:15:31 +00:00
|
|
|
fullTextResponse.Model = modelName
|
|
|
|
usage := model.Usage{
|
2024-03-08 17:12:47 +00:00
|
|
|
PromptTokens: claudeResponse.Usage.InputTokens,
|
|
|
|
CompletionTokens: claudeResponse.Usage.OutputTokens,
|
|
|
|
TotalTokens: claudeResponse.Usage.InputTokens + claudeResponse.Usage.OutputTokens,
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
|
|
|
fullTextResponse.Usage = usage
|
|
|
|
jsonResponse, err := json.Marshal(fullTextResponse)
|
|
|
|
if err != nil {
|
2024-01-14 11:21:03 +00:00
|
|
|
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
2023-07-22 09:36:40 +00:00
|
|
|
}
|
|
|
|
c.Writer.Header().Set("Content-Type", "application/json")
|
|
|
|
c.Writer.WriteHeader(resp.StatusCode)
|
|
|
|
_, err = c.Writer.Write(jsonResponse)
|
|
|
|
return nil, &usage
|
|
|
|
}
|