ai-gateway/relay/channel/anthropic/main.go

197 lines
5.8 KiB
Go
Raw Normal View History

2024-01-14 11:21:03 +00:00
package anthropic
2023-07-22 08:18:03 +00:00
2023-07-22 09:12:13 +00:00
import (
2023-07-22 09:36:40 +00:00
"bufio"
"encoding/json"
2023-07-22 09:12:13 +00:00
"fmt"
2023-07-22 09:36:40 +00:00
"github.com/gin-gonic/gin"
"io"
"net/http"
2023-07-22 09:12:13 +00:00
"one-api/common"
2024-01-14 11:21:03 +00:00
"one-api/relay/channel/openai"
2023-07-22 09:12:13 +00:00
"strings"
)
2023-07-22 08:18:03 +00:00
func stopReasonClaude2OpenAI(reason string) string {
switch reason {
case "stop_sequence":
return "stop"
case "max_tokens":
return "length"
default:
return reason
}
}
2023-07-22 09:12:13 +00:00
2024-01-14 11:21:03 +00:00
func ConvertRequest(textRequest openai.GeneralOpenAIRequest) *Request {
claudeRequest := Request{
2023-07-22 09:12:13 +00:00
Model: textRequest.Model,
Prompt: "",
MaxTokensToSample: textRequest.MaxTokens,
StopSequences: nil,
Temperature: textRequest.Temperature,
TopP: textRequest.TopP,
Stream: textRequest.Stream,
}
if claudeRequest.MaxTokensToSample == 0 {
claudeRequest.MaxTokensToSample = 1000000
}
prompt := ""
for _, message := range textRequest.Messages {
if message.Role == "user" {
prompt += fmt.Sprintf("\n\nHuman: %s", message.Content)
} else if message.Role == "assistant" {
prompt += fmt.Sprintf("\n\nAssistant: %s", message.Content)
} else if message.Role == "system" {
2023-11-24 13:39:44 +00:00
if prompt == "" {
prompt = message.StringContent()
}
2023-07-22 09:12:13 +00:00
}
}
prompt += "\n\nAssistant:"
2023-07-22 09:12:13 +00:00
claudeRequest.Prompt = prompt
return &claudeRequest
}
2024-01-14 11:21:03 +00:00
func streamResponseClaude2OpenAI(claudeResponse *Response) *openai.ChatCompletionsStreamResponse {
var choice openai.ChatCompletionsStreamResponseChoice
2023-07-22 09:12:13 +00:00
choice.Delta.Content = claudeResponse.Completion
finishReason := stopReasonClaude2OpenAI(claudeResponse.StopReason)
if finishReason != "null" {
choice.FinishReason = &finishReason
}
2024-01-14 11:21:03 +00:00
var response openai.ChatCompletionsStreamResponse
2023-07-22 09:12:13 +00:00
response.Object = "chat.completion.chunk"
response.Model = claudeResponse.Model
2024-01-14 11:21:03 +00:00
response.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
2023-07-22 09:12:13 +00:00
return &response
}
2024-01-14 11:21:03 +00:00
func responseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
choice := openai.TextResponseChoice{
2023-07-22 09:12:13 +00:00
Index: 0,
2024-01-14 11:21:03 +00:00
Message: openai.Message{
2023-07-22 09:12:13 +00:00
Role: "assistant",
Content: strings.TrimPrefix(claudeResponse.Completion, " "),
Name: nil,
},
FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason),
}
2024-01-14 11:21:03 +00:00
fullTextResponse := openai.TextResponse{
2023-07-22 09:12:13 +00:00
Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Object: "chat.completion",
Created: common.GetTimestamp(),
2024-01-14 11:21:03 +00:00
Choices: []openai.TextResponseChoice{choice},
2023-07-22 09:12:13 +00:00
}
return &fullTextResponse
}
2023-07-22 09:36:40 +00:00
2024-01-14 11:21:03 +00:00
func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, string) {
2023-07-22 09:36:40 +00:00
responseText := ""
responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
createdTime := common.GetTimestamp()
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\r\n\r\n"); i >= 0 {
return i + 4, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
if !strings.HasPrefix(data, "event: completion") {
continue
}
data = strings.TrimPrefix(data, "event: completion\r\ndata: ")
dataChan <- data
}
stopChan <- true
}()
2024-01-14 11:21:03 +00:00
common.SetEventStreamHeaders(c)
2023-07-22 09:36:40 +00:00
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r")
2024-01-14 11:21:03 +00:00
var claudeResponse Response
2023-07-22 09:36:40 +00:00
err := json.Unmarshal([]byte(data), &claudeResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
return true
}
responseText += claudeResponse.Completion
response := streamResponseClaude2OpenAI(&claudeResponse)
response.Id = responseId
response.Created = createdTime
jsonStr, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
}
})
err := resp.Body.Close()
if err != nil {
2024-01-14 11:21:03 +00:00
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
2023-07-22 09:36:40 +00:00
}
return nil, responseText
}
2024-01-14 11:21:03 +00:00
func Handler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*openai.ErrorWithStatusCode, *openai.Usage) {
2023-07-22 09:36:40 +00:00
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
2024-01-14 11:21:03 +00:00
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
2023-07-22 09:36:40 +00:00
}
err = resp.Body.Close()
if err != nil {
2024-01-14 11:21:03 +00:00
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
2023-07-22 09:36:40 +00:00
}
2024-01-14 11:21:03 +00:00
var claudeResponse Response
2023-07-22 09:36:40 +00:00
err = json.Unmarshal(responseBody, &claudeResponse)
if err != nil {
2024-01-14 11:21:03 +00:00
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
2023-07-22 09:36:40 +00:00
}
if claudeResponse.Error.Type != "" {
2024-01-14 11:21:03 +00:00
return &openai.ErrorWithStatusCode{
Error: openai.Error{
2023-07-22 09:36:40 +00:00
Message: claudeResponse.Error.Message,
Type: claudeResponse.Error.Type,
Param: "",
Code: claudeResponse.Error.Type,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := responseClaude2OpenAI(&claudeResponse)
fullTextResponse.Model = model
2024-01-14 11:21:03 +00:00
completionTokens := openai.CountTokenText(claudeResponse.Completion, model)
usage := openai.Usage{
2023-07-22 09:36:40 +00:00
PromptTokens: promptTokens,
CompletionTokens: completionTokens,
TotalTokens: promptTokens + completionTokens,
}
fullTextResponse.Usage = usage
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
2024-01-14 11:21:03 +00:00
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
2023-07-22 09:36:40 +00:00
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, &usage
}