feat: support coze now
This commit is contained in:
parent
d14e4aa01b
commit
8de489cf06
@ -82,6 +82,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
|||||||
+ [x] [Ollama](https://github.com/ollama/ollama)
|
+ [x] [Ollama](https://github.com/ollama/ollama)
|
||||||
+ [x] [零一万物](https://platform.lingyiwanwu.com/)
|
+ [x] [零一万物](https://platform.lingyiwanwu.com/)
|
||||||
+ [x] [阶跃星辰](https://platform.stepfun.com/)
|
+ [x] [阶跃星辰](https://platform.stepfun.com/)
|
||||||
|
+ [x] [Coze](https://www.coze.com/)
|
||||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
||||||
3. 支持通过**负载均衡**的方式访问多个渠道。
|
3. 支持通过**负载均衡**的方式访问多个渠道。
|
||||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||||
|
@ -9,4 +9,5 @@ const (
|
|||||||
KeySK = KeyPrefix + "sk"
|
KeySK = KeyPrefix + "sk"
|
||||||
KeyAK = KeyPrefix + "ak"
|
KeyAK = KeyPrefix + "ak"
|
||||||
KeyRegion = KeyPrefix + "region"
|
KeyRegion = KeyPrefix + "region"
|
||||||
|
KeyUserID = KeyPrefix + "user_id"
|
||||||
)
|
)
|
||||||
|
@ -64,8 +64,12 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
|||||||
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
|
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
|
||||||
}
|
}
|
||||||
adaptor.Init(meta)
|
adaptor.Init(meta)
|
||||||
modelName := adaptor.GetModelList()[0]
|
var modelName string
|
||||||
if !strings.Contains(channel.Models, modelName) {
|
modelList := adaptor.GetModelList()
|
||||||
|
if len(modelList) != 0 {
|
||||||
|
modelName = modelList[0]
|
||||||
|
}
|
||||||
|
if modelName == "" || !strings.Contains(channel.Models, modelName) {
|
||||||
modelNames := strings.Split(channel.Models, ",")
|
modelNames := strings.Split(channel.Models, ",")
|
||||||
if len(modelNames) > 0 {
|
if len(modelNames) > 0 {
|
||||||
modelName = modelNames[0]
|
modelName = modelNames[0]
|
||||||
@ -82,6 +86,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err, nil
|
return err, nil
|
||||||
}
|
}
|
||||||
|
logger.SysLog(string(jsonData))
|
||||||
requestBody := bytes.NewBuffer(jsonData)
|
requestBody := bytes.NewBuffer(jsonData)
|
||||||
c.Request.Body = io.NopCloser(requestBody)
|
c.Request.Body = io.NopCloser(requestBody)
|
||||||
resp, err := adaptor.DoRequest(c, meta, requestBody)
|
resp, err := adaptor.DoRequest(c, meta, requestBody)
|
||||||
|
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/aws"
|
"github.com/songquanpeng/one-api/relay/adaptor/aws"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/baidu"
|
"github.com/songquanpeng/one-api/relay/adaptor/baidu"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/coze"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/gemini"
|
"github.com/songquanpeng/one-api/relay/adaptor/gemini"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
@ -43,6 +44,8 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
|
|||||||
return &zhipu.Adaptor{}
|
return &zhipu.Adaptor{}
|
||||||
case apitype.Ollama:
|
case apitype.Ollama:
|
||||||
return &ollama.Adaptor{}
|
return &ollama.Adaptor{}
|
||||||
|
case apitype.Coze:
|
||||||
|
return &coze.Adaptor{}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
75
relay/adaptor/coze/adaptor.go
Normal file
75
relay/adaptor/coze/adaptor.go
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
package coze
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/common/config"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Adaptor struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
|
return fmt.Sprintf("%s/open_api/v2/chat", meta.BaseURL), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||||
|
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||||
|
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||||
|
if request == nil {
|
||||||
|
return nil, errors.New("request is nil")
|
||||||
|
}
|
||||||
|
request.User = c.GetString(config.KeyUserID)
|
||||||
|
return ConvertRequest(*request), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||||
|
if request == nil {
|
||||||
|
return nil, errors.New("request is nil")
|
||||||
|
}
|
||||||
|
return request, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
|
||||||
|
return adaptor.DoRequestHelper(a, c, meta, requestBody)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||||
|
var responseText *string
|
||||||
|
if meta.IsStream {
|
||||||
|
err, responseText = StreamHandler(c, resp)
|
||||||
|
} else {
|
||||||
|
err, responseText = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||||
|
}
|
||||||
|
if responseText != nil {
|
||||||
|
usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens)
|
||||||
|
} else {
|
||||||
|
usage = &model.Usage{}
|
||||||
|
}
|
||||||
|
usage.PromptTokens = meta.PromptTokens
|
||||||
|
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetModelList() []string {
|
||||||
|
return ModelList
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetChannelName() string {
|
||||||
|
return "coze"
|
||||||
|
}
|
5
relay/adaptor/coze/constant/contenttype/define.go
Normal file
5
relay/adaptor/coze/constant/contenttype/define.go
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
package contenttype
|
||||||
|
|
||||||
|
const (
|
||||||
|
Text = "text"
|
||||||
|
)
|
7
relay/adaptor/coze/constant/event/define.go
Normal file
7
relay/adaptor/coze/constant/event/define.go
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
package event
|
||||||
|
|
||||||
|
const (
|
||||||
|
Message = "message"
|
||||||
|
Done = "done"
|
||||||
|
Error = "error"
|
||||||
|
)
|
6
relay/adaptor/coze/constant/messagetype/define.go
Normal file
6
relay/adaptor/coze/constant/messagetype/define.go
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
package messagetype
|
||||||
|
|
||||||
|
const (
|
||||||
|
Answer = "answer"
|
||||||
|
FollowUp = "follow_up"
|
||||||
|
)
|
3
relay/adaptor/coze/constants.go
Normal file
3
relay/adaptor/coze/constants.go
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
package coze
|
||||||
|
|
||||||
|
var ModelList = []string{}
|
10
relay/adaptor/coze/helper.go
Normal file
10
relay/adaptor/coze/helper.go
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
package coze
|
||||||
|
|
||||||
|
import "github.com/songquanpeng/one-api/relay/adaptor/coze/constant/event"
|
||||||
|
|
||||||
|
func event2StopReason(e *string) string {
|
||||||
|
if e == nil || *e == event.Message {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return "stop"
|
||||||
|
}
|
218
relay/adaptor/coze/main.go
Normal file
218
relay/adaptor/coze/main.go
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
package coze
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/songquanpeng/one-api/common"
|
||||||
|
"github.com/songquanpeng/one-api/common/conv"
|
||||||
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
|
||||||
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// https://www.coze.com/open
|
||||||
|
|
||||||
|
func stopReasonCoze2OpenAI(reason *string) string {
|
||||||
|
if reason == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
switch *reason {
|
||||||
|
case "end_turn":
|
||||||
|
return "stop"
|
||||||
|
case "stop_sequence":
|
||||||
|
return "stop"
|
||||||
|
case "max_tokens":
|
||||||
|
return "length"
|
||||||
|
default:
|
||||||
|
return *reason
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||||
|
cozeRequest := Request{
|
||||||
|
Stream: textRequest.Stream,
|
||||||
|
User: textRequest.User,
|
||||||
|
BotId: textRequest.Model,
|
||||||
|
}
|
||||||
|
if cozeRequest.User == "" {
|
||||||
|
cozeRequest.User = "One API User"
|
||||||
|
}
|
||||||
|
for i, message := range textRequest.Messages {
|
||||||
|
if i == len(textRequest.Messages)-1 {
|
||||||
|
cozeRequest.Query = message.StringContent()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
cozeMessage := Message{
|
||||||
|
Role: message.Role,
|
||||||
|
Content: message.StringContent(),
|
||||||
|
}
|
||||||
|
cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage)
|
||||||
|
}
|
||||||
|
return &cozeRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
|
||||||
|
var response *Response
|
||||||
|
var stopReason string
|
||||||
|
var choice openai.ChatCompletionsStreamResponseChoice
|
||||||
|
|
||||||
|
if cozeResponse.Message != nil {
|
||||||
|
if cozeResponse.Message.Type != messagetype.Answer {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
choice.Delta.Content = cozeResponse.Message.Content
|
||||||
|
}
|
||||||
|
choice.Delta.Role = "assistant"
|
||||||
|
finishReason := stopReasonCoze2OpenAI(&stopReason)
|
||||||
|
if finishReason != "null" {
|
||||||
|
choice.FinishReason = &finishReason
|
||||||
|
}
|
||||||
|
var openaiResponse openai.ChatCompletionsStreamResponse
|
||||||
|
openaiResponse.Object = "chat.completion.chunk"
|
||||||
|
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
|
||||||
|
openaiResponse.Id = cozeResponse.ConversationId
|
||||||
|
return &openaiResponse, response
|
||||||
|
}
|
||||||
|
|
||||||
|
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
|
||||||
|
var responseText string
|
||||||
|
for _, message := range cozeResponse.Messages {
|
||||||
|
if message.Type == messagetype.Answer {
|
||||||
|
responseText = message.Content
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
choice := openai.TextResponseChoice{
|
||||||
|
Index: 0,
|
||||||
|
Message: model.Message{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: responseText,
|
||||||
|
Name: nil,
|
||||||
|
},
|
||||||
|
FinishReason: "stop",
|
||||||
|
}
|
||||||
|
fullTextResponse := openai.TextResponse{
|
||||||
|
Id: fmt.Sprintf("chatcmpl-%s", cozeResponse.ConversationId),
|
||||||
|
Model: "coze-bot",
|
||||||
|
Object: "chat.completion",
|
||||||
|
Created: helper.GetTimestamp(),
|
||||||
|
Choices: []openai.TextResponseChoice{choice},
|
||||||
|
}
|
||||||
|
return &fullTextResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
|
||||||
|
var responseText string
|
||||||
|
createdTime := helper.GetTimestamp()
|
||||||
|
scanner := bufio.NewScanner(resp.Body)
|
||||||
|
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
|
if atEOF && len(data) == 0 {
|
||||||
|
return 0, nil, nil
|
||||||
|
}
|
||||||
|
if i := strings.Index(string(data), "\n"); i >= 0 {
|
||||||
|
return i + 1, data[0:i], nil
|
||||||
|
}
|
||||||
|
if atEOF {
|
||||||
|
return len(data), data, nil
|
||||||
|
}
|
||||||
|
return 0, nil, nil
|
||||||
|
})
|
||||||
|
dataChan := make(chan string)
|
||||||
|
stopChan := make(chan bool)
|
||||||
|
go func() {
|
||||||
|
for scanner.Scan() {
|
||||||
|
data := scanner.Text()
|
||||||
|
if len(data) < 5 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !strings.HasPrefix(data, "data:") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
data = strings.TrimPrefix(data, "data:")
|
||||||
|
dataChan <- data
|
||||||
|
}
|
||||||
|
stopChan <- true
|
||||||
|
}()
|
||||||
|
common.SetEventStreamHeaders(c)
|
||||||
|
var modelName string
|
||||||
|
c.Stream(func(w io.Writer) bool {
|
||||||
|
select {
|
||||||
|
case data := <-dataChan:
|
||||||
|
// some implementations may add \r at the end of data
|
||||||
|
data = strings.TrimSuffix(data, "\r")
|
||||||
|
var cozeResponse StreamResponse
|
||||||
|
err := json.Unmarshal([]byte(data), &cozeResponse)
|
||||||
|
if err != nil {
|
||||||
|
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
response, _ := StreamResponseCoze2OpenAI(&cozeResponse)
|
||||||
|
if response == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for _, choice := range response.Choices {
|
||||||
|
responseText += conv.AsString(choice.Delta.Content)
|
||||||
|
}
|
||||||
|
response.Model = modelName
|
||||||
|
response.Created = createdTime
|
||||||
|
jsonStr, err := json.Marshal(response)
|
||||||
|
if err != nil {
|
||||||
|
logger.SysError("error marshalling stream response: " + err.Error())
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
|
||||||
|
return true
|
||||||
|
case <-stopChan:
|
||||||
|
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
_ = resp.Body.Close()
|
||||||
|
return nil, &responseText
|
||||||
|
}
|
||||||
|
|
||||||
|
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
|
||||||
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
var cozeResponse Response
|
||||||
|
err = json.Unmarshal(responseBody, &cozeResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
if cozeResponse.Code != 0 {
|
||||||
|
return &model.ErrorWithStatusCode{
|
||||||
|
Error: model.Error{
|
||||||
|
Message: cozeResponse.Msg,
|
||||||
|
Code: cozeResponse.Code,
|
||||||
|
},
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
fullTextResponse := ResponseCoze2OpenAI(&cozeResponse)
|
||||||
|
fullTextResponse.Model = modelName
|
||||||
|
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
|
_, err = c.Writer.Write(jsonResponse)
|
||||||
|
var responseText string
|
||||||
|
if len(fullTextResponse.Choices) > 0 {
|
||||||
|
responseText = fullTextResponse.Choices[0].Message.StringContent()
|
||||||
|
}
|
||||||
|
return nil, &responseText
|
||||||
|
}
|
38
relay/adaptor/coze/model.go
Normal file
38
relay/adaptor/coze/model.go
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
package coze
|
||||||
|
|
||||||
|
type Message struct {
|
||||||
|
Role string `json:"role"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
ContentType string `json:"content_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ErrorInformation struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Request struct {
|
||||||
|
ConversationId string `json:"conversation_id,omitempty"`
|
||||||
|
BotId string `json:"bot_id"`
|
||||||
|
User string `json:"user"`
|
||||||
|
Query string `json:"query"`
|
||||||
|
ChatHistory []Message `json:"chat_history,omitempty"`
|
||||||
|
Stream bool `json:"stream"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Response struct {
|
||||||
|
ConversationId string `json:"conversation_id,omitempty"`
|
||||||
|
Messages []Message `json:"messages,omitempty"`
|
||||||
|
Code int `json:"code,omitempty"`
|
||||||
|
Msg string `json:"msg,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StreamResponse struct {
|
||||||
|
Event string `json:"event,omitempty"`
|
||||||
|
Message *Message `json:"message,omitempty"`
|
||||||
|
IsFinish bool `json:"is_finish,omitempty"`
|
||||||
|
Index int `json:"index,omitempty"`
|
||||||
|
ConversationId string `json:"conversation_id,omitempty"`
|
||||||
|
ErrorInformation *ErrorInformation `json:"error_information,omitempty"`
|
||||||
|
}
|
@ -13,6 +13,7 @@ const (
|
|||||||
Gemini
|
Gemini
|
||||||
Ollama
|
Ollama
|
||||||
AwsClaude
|
AwsClaude
|
||||||
|
Coze
|
||||||
|
|
||||||
Dummy // this one is only for count, do not add any channel after this
|
Dummy // this one is only for count, do not add any channel after this
|
||||||
)
|
)
|
||||||
|
@ -35,6 +35,7 @@ const (
|
|||||||
LingYiWanWu
|
LingYiWanWu
|
||||||
StepFun
|
StepFun
|
||||||
AwsClaude
|
AwsClaude
|
||||||
|
Coze
|
||||||
|
|
||||||
Dummy
|
Dummy
|
||||||
)
|
)
|
||||||
|
@ -27,6 +27,8 @@ func ToAPIType(channelType int) int {
|
|||||||
apiType = apitype.Ollama
|
apiType = apitype.Ollama
|
||||||
case AwsClaude:
|
case AwsClaude:
|
||||||
apiType = apitype.AwsClaude
|
apiType = apitype.AwsClaude
|
||||||
|
case Coze:
|
||||||
|
apiType = apitype.Coze
|
||||||
}
|
}
|
||||||
|
|
||||||
return apiType
|
return apiType
|
||||||
|
@ -35,6 +35,7 @@ var ChannelBaseURLs = []string{
|
|||||||
"https://api.lingyiwanwu.com", // 31
|
"https://api.lingyiwanwu.com", // 31
|
||||||
"https://api.stepfun.com", // 32
|
"https://api.stepfun.com", // 32
|
||||||
"", // 33
|
"", // 33
|
||||||
|
"https://api.coze.com", // 34
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -19,6 +19,7 @@ export const CHANNEL_OPTIONS = [
|
|||||||
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
|
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
|
||||||
{ key: 31, text: '零一万物', value: 31, color: 'green' },
|
{ key: 31, text: '零一万物', value: 31, color: 'green' },
|
||||||
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
|
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
|
||||||
|
{ key: 34, text: 'Coze', value: 34, color: 'blue' },
|
||||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||||
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||||
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||||
|
Loading…
Reference in New Issue
Block a user