diff --git a/README.md b/README.md index 8f92bb50..af9f44f8 100644 --- a/README.md +++ b/README.md @@ -60,6 +60,7 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用 ## 功能 1. 支持多种 API 访问渠道: + [x] OpenAI 官方通道(支持配置镜像) + + [x] [Anthropic Claude 系列模型](https://anthropic.com) + [x] **Azure OpenAI API** + [x] [API Distribute](https://api.gptjk.top/register?aff=QGxj) + [x] [OpenAI-SB](https://openai-sb.com) diff --git a/common/constants.go b/common/constants.go index 4402b9d9..50c0bd2f 100644 --- a/common/constants.go +++ b/common/constants.go @@ -151,6 +151,7 @@ const ( ChannelTypePaLM = 11 ChannelTypeAPI2GPT = 12 ChannelTypeAIGC2D = 13 + ChannelTypeAnthropic = 14 ) var ChannelBaseURLs = []string{ @@ -168,4 +169,5 @@ var ChannelBaseURLs = []string{ "", // 11 "https://api.api2gpt.com", // 12 "https://api.aigc2d.com", // 13 + "https://api.anthropic.com", // 14 } diff --git a/common/model-ratio.go b/common/model-ratio.go index d01c3ffb..0ba4c397 100644 --- a/common/model-ratio.go +++ b/common/model-ratio.go @@ -36,6 +36,8 @@ var ModelRatio = map[string]float64{ "text-moderation-stable": 0.1, "text-moderation-latest": 0.1, "dall-e": 8, + "claude-instant-1": 0.75, + "claude-2": 30, } func ModelRatio2JSONString() string { diff --git a/controller/model.go b/controller/model.go index 5d7becb7..cc77f621 100644 --- a/controller/model.go +++ b/controller/model.go @@ -270,6 +270,24 @@ func init() { Root: "ChatGLM2", Parent: nil, }, + { + Id: "claude-instant-1", + Object: "model", + Created: 1677649963, + OwnedBy: "anturopic", + Permission: permission, + Root: "claude-instant-1", + Parent: nil, + }, + { + Id: "claude-2", + Object: "model", + Created: 1677649963, + OwnedBy: "anturopic", + Permission: permission, + Root: "claude-2", + Parent: nil, + }, } openAIModelsMap = make(map[string]OpenAIModels) for _, model := range openAIModels { diff --git a/controller/relay-claude.go b/controller/relay-claude.go new file mode 100644 index 00000000..2b4f0c87 --- /dev/null +++ b/controller/relay-claude.go @@ -0,0 +1,40 @@ +package controller + +type ClaudeMetadata struct { + UserId string `json:"user_id"` +} + +type ClaudeRequest struct { + Model string `json:"model"` + Prompt string `json:"prompt"` + MaxTokensToSample int `json:"max_tokens_to_sample"` + StopSequences []string `json:"stop_sequences,omitempty"` + Temperature float64 `json:"temperature,omitempty"` + TopP float64 `json:"top_p,omitempty"` + TopK int `json:"top_k,omitempty"` + //ClaudeMetadata `json:"metadata,omitempty"` + Stream bool `json:"stream,omitempty"` +} + +type ClaudeError struct { + Type string `json:"type"` + Message string `json:"message"` +} + +type ClaudeResponse struct { + Completion string `json:"completion"` + StopReason string `json:"stop_reason"` + Model string `json:"model"` + Error ClaudeError `json:"error"` +} + +func stopReasonClaude2OpenAI(reason string) string { + switch reason { + case "stop_sequence": + return "stop" + case "max_tokens": + return "length" + default: + return reason + } +} diff --git a/controller/relay-text.go b/controller/relay-text.go index e9461edd..18f50966 100644 --- a/controller/relay-text.go +++ b/controller/relay-text.go @@ -15,6 +15,12 @@ import ( "github.com/gin-gonic/gin" ) +const ( + APITypeOpenAI = iota + APITypeClaude + APITypePaLM +) + func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { channelType := c.GetInt("channel") tokenId := c.GetInt("token_id") @@ -71,33 +77,42 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { isModelMapped = true } } + apiType := APITypeOpenAI + if strings.HasPrefix(textRequest.Model, "claude") { + apiType = APITypeClaude + } baseURL := common.ChannelBaseURLs[channelType] requestURL := c.Request.URL.String() if c.GetString("base_url") != "" { baseURL = c.GetString("base_url") } fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL) - if channelType == common.ChannelTypeAzure { - // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api - query := c.Request.URL.Query() - apiVersion := query.Get("api-version") - if apiVersion == "" { - apiVersion = c.GetString("api_version") + switch apiType { + case APITypeOpenAI: + if channelType == common.ChannelTypeAzure { + // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api + query := c.Request.URL.Query() + apiVersion := query.Get("api-version") + if apiVersion == "" { + apiVersion = c.GetString("api_version") + } + requestURL := strings.Split(requestURL, "?")[0] + requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, apiVersion) + baseURL = c.GetString("base_url") + task := strings.TrimPrefix(requestURL, "/v1/") + model_ := textRequest.Model + model_ = strings.Replace(model_, ".", "", -1) + // https://github.com/songquanpeng/one-api/issues/67 + model_ = strings.TrimSuffix(model_, "-0301") + model_ = strings.TrimSuffix(model_, "-0314") + model_ = strings.TrimSuffix(model_, "-0613") + fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/%s", baseURL, model_, task) + } + case APITypeClaude: + fullRequestURL = "https://api.anthropic.com/v1/complete" + if baseURL != "" { + fullRequestURL = fmt.Sprintf("%s/v1/complete", baseURL) } - requestURL := strings.Split(requestURL, "?")[0] - requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, apiVersion) - baseURL = c.GetString("base_url") - task := strings.TrimPrefix(requestURL, "/v1/") - model_ := textRequest.Model - model_ = strings.Replace(model_, ".", "", -1) - // https://github.com/songquanpeng/one-api/issues/67 - model_ = strings.TrimSuffix(model_, "-0301") - model_ = strings.TrimSuffix(model_, "-0314") - model_ = strings.TrimSuffix(model_, "-0613") - fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/%s", baseURL, model_, task) - } else if channelType == common.ChannelTypePaLM { - err := relayPaLM(textRequest, c) - return err } var promptTokens int var completionTokens int @@ -142,16 +157,58 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { } else { requestBody = c.Request.Body } + switch apiType { + case APITypeClaude: + claudeRequest := ClaudeRequest{ + Model: textRequest.Model, + Prompt: "", + MaxTokensToSample: textRequest.MaxTokens, + StopSequences: nil, + Temperature: textRequest.Temperature, + TopP: textRequest.TopP, + Stream: textRequest.Stream, + } + if claudeRequest.MaxTokensToSample == 0 { + claudeRequest.MaxTokensToSample = 1000000 + } + prompt := "" + for _, message := range textRequest.Messages { + if message.Role == "user" { + prompt += fmt.Sprintf("\n\nHuman: %s", message.Content) + } else if message.Role == "assistant" { + prompt += fmt.Sprintf("\n\nAssistant: %s", message.Content) + } else { + // ignore other roles + } + prompt += "\n\nAssistant:" + } + claudeRequest.Prompt = prompt + jsonStr, err := json.Marshal(claudeRequest) + if err != nil { + return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError) + } + requestBody = bytes.NewBuffer(jsonStr) + } req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody) if err != nil { return errorWrapper(err, "new_request_failed", http.StatusInternalServerError) } - if channelType == common.ChannelTypeAzure { - key := c.Request.Header.Get("Authorization") - key = strings.TrimPrefix(key, "Bearer ") - req.Header.Set("api-key", key) - } else { - req.Header.Set("Authorization", c.Request.Header.Get("Authorization")) + apiKey := c.Request.Header.Get("Authorization") + apiKey = strings.TrimPrefix(apiKey, "Bearer ") + switch apiType { + case APITypeOpenAI: + if channelType == common.ChannelTypeAzure { + req.Header.Set("api-key", apiKey) + } else { + req.Header.Set("Authorization", c.Request.Header.Get("Authorization")) + } + case APITypeClaude: + req.Header.Set("x-api-key", apiKey) + anthropicVersion := c.Request.Header.Get("anthropic-version") + if anthropicVersion == "" { + anthropicVersion = "2023-06-01" + } + req.Header.Set("anthropic-version", anthropicVersion) } req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type")) req.Header.Set("Accept", c.Request.Header.Get("Accept")) @@ -219,87 +276,198 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { } } }() - - if isStream { - scanner := bufio.NewScanner(resp.Body) - scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { - if atEOF && len(data) == 0 { - return 0, nil, nil - } - - if i := strings.Index(string(data), "\n"); i >= 0 { - return i + 1, data[0:i], nil - } - - if atEOF { - return len(data), data, nil - } - - return 0, nil, nil - }) - dataChan := make(chan string) - stopChan := make(chan bool) - go func() { - for scanner.Scan() { - data := scanner.Text() - if len(data) < 6 { // ignore blank line or wrong format - continue + switch apiType { + case APITypeOpenAI: + if isStream { + scanner := bufio.NewScanner(resp.Body) + scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil } - dataChan <- data - data = data[6:] - if !strings.HasPrefix(data, "[DONE]") { - switch relayMode { - case RelayModeChatCompletions: - var streamResponse ChatCompletionsStreamResponse - err = json.Unmarshal([]byte(data), &streamResponse) - if err != nil { - common.SysError("error unmarshalling stream response: " + err.Error()) - return - } - for _, choice := range streamResponse.Choices { - streamResponseText += choice.Delta.Content - } - case RelayModeCompletions: - var streamResponse CompletionsStreamResponse - err = json.Unmarshal([]byte(data), &streamResponse) - if err != nil { - common.SysError("error unmarshalling stream response: " + err.Error()) - return - } - for _, choice := range streamResponse.Choices { - streamResponseText += choice.Text + if i := strings.Index(string(data), "\n"); i >= 0 { + return i + 1, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + }) + dataChan := make(chan string) + stopChan := make(chan bool) + go func() { + for scanner.Scan() { + data := scanner.Text() + if len(data) < 6 { // ignore blank line or wrong format + continue + } + dataChan <- data + data = data[6:] + if !strings.HasPrefix(data, "[DONE]") { + switch relayMode { + case RelayModeChatCompletions: + var streamResponse ChatCompletionsStreamResponse + err = json.Unmarshal([]byte(data), &streamResponse) + if err != nil { + common.SysError("error unmarshalling stream response: " + err.Error()) + return + } + for _, choice := range streamResponse.Choices { + streamResponseText += choice.Delta.Content + } + case RelayModeCompletions: + var streamResponse CompletionsStreamResponse + err = json.Unmarshal([]byte(data), &streamResponse) + if err != nil { + common.SysError("error unmarshalling stream response: " + err.Error()) + return + } + for _, choice := range streamResponse.Choices { + streamResponseText += choice.Text + } } } } - } - stopChan <- true - }() - c.Writer.Header().Set("Content-Type", "text/event-stream") - c.Writer.Header().Set("Cache-Control", "no-cache") - c.Writer.Header().Set("Connection", "keep-alive") - c.Writer.Header().Set("Transfer-Encoding", "chunked") - c.Writer.Header().Set("X-Accel-Buffering", "no") - c.Stream(func(w io.Writer) bool { - select { - case data := <-dataChan: - if strings.HasPrefix(data, "data: [DONE]") { - data = data[:12] + stopChan <- true + }() + c.Writer.Header().Set("Content-Type", "text/event-stream") + c.Writer.Header().Set("Cache-Control", "no-cache") + c.Writer.Header().Set("Connection", "keep-alive") + c.Writer.Header().Set("Transfer-Encoding", "chunked") + c.Writer.Header().Set("X-Accel-Buffering", "no") + c.Stream(func(w io.Writer) bool { + select { + case data := <-dataChan: + if strings.HasPrefix(data, "data: [DONE]") { + data = data[:12] + } + // some implementations may add \r at the end of data + data = strings.TrimSuffix(data, "\r") + c.Render(-1, common.CustomEvent{Data: data}) + return true + case <-stopChan: + return false } - // some implementations may add \r at the end of data - data = strings.TrimSuffix(data, "\r") - c.Render(-1, common.CustomEvent{Data: data}) - return true - case <-stopChan: - return false + }) + err = resp.Body.Close() + if err != nil { + return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) } - }) - err = resp.Body.Close() - if err != nil { - return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) + return nil + } else { + if consumeQuota { + responseBody, err := io.ReadAll(resp.Body) + if err != nil { + return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError) + } + err = resp.Body.Close() + if err != nil { + return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) + } + err = json.Unmarshal(responseBody, &textResponse) + if err != nil { + return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError) + } + if textResponse.Error.Type != "" { + return &OpenAIErrorWithStatusCode{ + OpenAIError: textResponse.Error, + StatusCode: resp.StatusCode, + } + } + // Reset response body + resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) + } + // We shouldn't set the header before we parse the response body, because the parse part may fail. + // And then we will have to send an error response, but in this case, the header has already been set. + // So the client will be confused by the response. + // For example, Postman will report error, and we cannot check the response at all. + for k, v := range resp.Header { + c.Writer.Header().Set(k, v[0]) + } + c.Writer.WriteHeader(resp.StatusCode) + _, err = io.Copy(c.Writer, resp.Body) + if err != nil { + return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError) + } + err = resp.Body.Close() + if err != nil { + return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) + } + return nil } - return nil - } else { - if consumeQuota { + case APITypeClaude: + if isStream { + responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID()) + createdTime := common.GetTimestamp() + scanner := bufio.NewScanner(resp.Body) + scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := strings.Index(string(data), "\r\n\r\n"); i >= 0 { + return i + 4, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + }) + dataChan := make(chan string) + stopChan := make(chan bool) + go func() { + for scanner.Scan() { + data := scanner.Text() + if !strings.HasPrefix(data, "event: completion") { + continue + } + data = strings.TrimPrefix(data, "event: completion\r\ndata: ") + dataChan <- data + } + stopChan <- true + }() + c.Writer.Header().Set("Content-Type", "text/event-stream") + c.Writer.Header().Set("Cache-Control", "no-cache") + c.Writer.Header().Set("Connection", "keep-alive") + c.Writer.Header().Set("Transfer-Encoding", "chunked") + c.Writer.Header().Set("X-Accel-Buffering", "no") + c.Stream(func(w io.Writer) bool { + select { + case data := <-dataChan: + // some implementations may add \r at the end of data + data = strings.TrimSuffix(data, "\r") + var claudeResponse ClaudeResponse + err = json.Unmarshal([]byte(data), &claudeResponse) + if err != nil { + common.SysError("error unmarshalling stream response: " + err.Error()) + return true + } + streamResponseText += claudeResponse.Completion + var choice ChatCompletionsStreamResponseChoice + choice.Delta.Content = claudeResponse.Completion + choice.FinishReason = stopReasonClaude2OpenAI(claudeResponse.StopReason) + var response ChatCompletionsStreamResponse + response.Id = responseId + response.Created = createdTime + response.Object = "chat.completion.chunk" + response.Model = textRequest.Model + response.Choices = []ChatCompletionsStreamResponseChoice{choice} + jsonStr, err := json.Marshal(response) + if err != nil { + common.SysError("error marshalling stream response: " + err.Error()) + return true + } + c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)}) + return true + case <-stopChan: + c.Render(-1, common.CustomEvent{Data: "data: [DONE]"}) + return false + } + }) + err = resp.Body.Close() + if err != nil { + return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) + } + return nil + } else { responseBody, err := io.ReadAll(resp.Body) if err != nil { return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError) @@ -308,35 +476,54 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { if err != nil { return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) } - err = json.Unmarshal(responseBody, &textResponse) + var claudeResponse ClaudeResponse + err = json.Unmarshal(responseBody, &claudeResponse) if err != nil { return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError) } - if textResponse.Error.Type != "" { + if claudeResponse.Error.Type != "" { return &OpenAIErrorWithStatusCode{ - OpenAIError: textResponse.Error, - StatusCode: resp.StatusCode, + OpenAIError: OpenAIError{ + Message: claudeResponse.Error.Message, + Type: claudeResponse.Error.Type, + Param: "", + Code: claudeResponse.Error.Type, + }, + StatusCode: resp.StatusCode, } } - // Reset response body - resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) + choice := OpenAITextResponseChoice{ + Index: 0, + Message: Message{ + Role: "assistant", + Content: strings.TrimPrefix(claudeResponse.Completion, " "), + Name: nil, + }, + FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason), + } + completionTokens := countTokenText(claudeResponse.Completion, textRequest.Model) + fullTextResponse := OpenAITextResponse{ + Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()), + Object: "chat.completion", + Created: common.GetTimestamp(), + Choices: []OpenAITextResponseChoice{choice}, + Usage: Usage{ + PromptTokens: promptTokens, + CompletionTokens: completionTokens, + TotalTokens: promptTokens + promptTokens, + }, + } + textResponse.Usage = fullTextResponse.Usage + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError) + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, err = c.Writer.Write(jsonResponse) + return nil } - // We shouldn't set the header before we parse the response body, because the parse part may fail. - // And then we will have to send an error response, but in this case, the header has already been set. - // So the client will be confused by the response. - // For example, Postman will report error, and we cannot check the response at all. - for k, v := range resp.Header { - c.Writer.Header().Set(k, v[0]) - } - c.Writer.WriteHeader(resp.StatusCode) - _, err = io.Copy(c.Writer, resp.Body) - if err != nil { - return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError) - } - err = resp.Body.Close() - if err != nil { - return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) - } - return nil + default: + return errorWrapper(errors.New("unknown api type"), "unknown_api_type", http.StatusInternalServerError) } } diff --git a/controller/relay.go b/controller/relay.go index 45054945..88ae8acc 100644 --- a/controller/relay.go +++ b/controller/relay.go @@ -85,6 +85,20 @@ type TextResponse struct { Error OpenAIError `json:"error"` } +type OpenAITextResponseChoice struct { + Index int `json:"index"` + Message `json:"message"` + FinishReason string `json:"finish_reason"` +} + +type OpenAITextResponse struct { + Id string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Choices []OpenAITextResponseChoice `json:"choices"` + Usage `json:"usage"` +} + type ImageResponse struct { Created int `json:"created"` Data []struct { @@ -92,13 +106,19 @@ type ImageResponse struct { } } +type ChatCompletionsStreamResponseChoice struct { + Delta struct { + Content string `json:"content"` + } `json:"delta"` + FinishReason string `json:"finish_reason,omitempty"` +} + type ChatCompletionsStreamResponse struct { - Choices []struct { - Delta struct { - Content string `json:"content"` - } `json:"delta"` - FinishReason string `json:"finish_reason"` - } `json:"choices"` + Id string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` + Model string `json:"model"` + Choices []ChatCompletionsStreamResponseChoice `json:"choices"` } type CompletionsStreamResponse struct { diff --git a/web/src/constants/channel.constants.js b/web/src/constants/channel.constants.js index 7d732223..db9f5e1e 100644 --- a/web/src/constants/channel.constants.js +++ b/web/src/constants/channel.constants.js @@ -1,5 +1,6 @@ export const CHANNEL_OPTIONS = [ { key: 1, text: 'OpenAI', value: 1, color: 'green' }, + { key: 14, text: 'Anthropic', value: 14, color: 'black' }, { key: 8, text: '自定义', value: 8, color: 'pink' }, { key: 3, text: 'Azure', value: 3, color: 'olive' }, { key: 2, text: 'API2D', value: 2, color: 'blue' },