feat: Support Cloudflare AI

This commit is contained in:
Martial BE 2024-04-16 18:08:56 +08:00
parent 5606a104f6
commit 344555418e
No known key found for this signature in database
GPG Key ID: D06C32DF0EDB9084
14 changed files with 606 additions and 50 deletions

View File

@ -172,6 +172,7 @@ const (
ChannelTypeBedrock = 32
ChannelTypeLingyi = 33
ChannelTypeMidjourney = 34
ChannelTypeCloudflareAI = 35
)
var ChannelBaseURLs = []string{
@ -210,6 +211,7 @@ var ChannelBaseURLs = []string{
"", //32
"https://api.lingyiwanwu.com", //33
"", //34
"", //35
}
const (

View File

@ -104,6 +104,10 @@ func InitDB() (err error) {
return nil
}
common.SysLog("database migration started")
// err = MigrateDB(DB)
// if err != nil {
// return err
// }
err = db.AutoMigrate(&Channel{})
if err != nil {
return err
@ -157,6 +161,24 @@ func InitDB() (err error) {
return err
}
// func MigrateDB(db *gorm.DB) error {
// if DB.Migrator().HasConstraint(&Price{}, "model") {
// fmt.Println("----Price model has constraint----")
// // 如果是主键,移除主键约束
// err := db.Migrator().DropConstraint(&Price{}, "model")
// if err != nil {
// return err
// }
// // 修改字段长度
// err = db.Migrator().AlterColumn(&Price{}, "model")
// if err != nil {
// return err
// }
// }
// return nil
// }
func CloseDB() error {
sqlDB, err := DB.DB()
if err != nil {

View File

@ -15,7 +15,7 @@ const (
)
type Price struct {
Model string `json:"model" gorm:"type:varchar(30);primaryKey" binding:"required"`
Model string `json:"model" gorm:"type:varchar(100)" binding:"required"`
Type string `json:"type" gorm:"default:'tokens'" binding:"required"`
ChannelType int `json:"channel_type" gorm:"default:0" binding:"gte=0"`
Input float64 `json:"input" gorm:"default:0" binding:"gte=0"`
@ -98,11 +98,7 @@ func DeleteAllPrices(tx *gorm.DB) error {
}
func (price *Price) Delete() error {
err := DB.Delete(price).Error
if err != nil {
return err
}
return err
return DB.Where("model = ?", price.Model).Delete(&Price{}).Error
}
type ModelType struct {
@ -289,6 +285,16 @@ func GetDefaultPrice() []*Price {
"yi-34b-chat-200k": {[]float64{0.8571, 0.8571}, common.ChannelTypeLingyi},
// 6 元 / 1M tokens 0.006 / 1k tokens
"yi-vl-plus": {[]float64{0.4286, 0.4286}, common.ChannelTypeLingyi},
"@cf/stabilityai/stable-diffusion-xl-base-1.0": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@cf/lykon/dreamshaper-8-lcm": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@cf/bytedance/stable-diffusion-xl-lightning": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@cf/qwen/qwen1.5-7b-chat-awq": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@cf/qwen/qwen1.5-14b-chat-awq": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@hf/thebloke/deepseek-coder-6.7b-base-awq": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@hf/google/gemma-7b-it": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@hf/thebloke/llama-2-13b-chat-awq": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
"@cf/openai/whisper": {[]float64{0, 0}, common.ChannelTypeCloudflareAI},
}
var prices []*Price

View File

@ -0,0 +1,86 @@
package cloudflareAI
import (
"encoding/json"
"fmt"
"net/http"
"one-api/common/requester"
"one-api/model"
"one-api/providers/base"
"one-api/types"
"strings"
)
type CloudflareAIProviderFactory struct{}
// 创建 CloudflareAIProvider
func (f CloudflareAIProviderFactory) Create(channel *model.Channel) base.ProviderInterface {
cf := &CloudflareAIProvider{
BaseProvider: base.BaseProvider{
Config: getConfig(),
Channel: channel,
Requester: requester.NewHTTPRequester(*channel.Proxy, requestErrorHandle),
},
}
tokens := strings.Split(channel.Key, "|")
if len(tokens) == 2 {
cf.AccountID = tokens[0]
cf.CFToken = tokens[1]
}
return cf
}
type CloudflareAIProvider struct {
base.BaseProvider
AccountID string
CFToken string
}
func getConfig() base.ProviderConfig {
return base.ProviderConfig{
BaseURL: "https://api.cloudflare.com/client/v4/accounts/%s/ai/run/%s",
ImagesGenerations: "true",
ChatCompletions: "true",
AudioTranscriptions: "true",
}
}
// 请求错误处理
func requestErrorHandle(resp *http.Response) *types.OpenAIError {
CloudflareAIError := &CloudflareAIError{}
err := json.NewDecoder(resp.Body).Decode(CloudflareAIError)
if err != nil {
return nil
}
return errorHandle(CloudflareAIError)
}
// 错误处理
func errorHandle(CloudflareAIError *CloudflareAIError) *types.OpenAIError {
if CloudflareAIError.Success || len(CloudflareAIError.Error) == 0 {
return nil
}
return &types.OpenAIError{
Message: CloudflareAIError.Error[0].Message,
Type: "CloudflareAI error",
Code: CloudflareAIError.Error[0].Code,
}
}
// 获取请求头
func (p *CloudflareAIProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
p.CommonRequestHeaders(headers)
headers["Authorization"] = fmt.Sprintf("Bearer %s", p.CFToken)
return headers
}
func (p *CloudflareAIProvider) GetFullRequestURL(modelName string) string {
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
return fmt.Sprintf(baseURL, p.AccountID, modelName)
}

View File

@ -0,0 +1,184 @@
package cloudflareAI
import (
"encoding/json"
"fmt"
"io"
"net/http"
"one-api/common"
"one-api/common/requester"
"one-api/types"
"strings"
)
type CloudflareAIStreamHandler struct {
Usage *types.Usage
Request *types.ChatCompletionRequest
}
func (p *CloudflareAIProvider) CreateChatCompletion(request *types.ChatCompletionRequest) (*types.ChatCompletionResponse, *types.OpenAIErrorWithStatusCode) {
req, errWithCode := p.getChatRequest(request)
if errWithCode != nil {
return nil, errWithCode
}
defer req.Body.Close()
chatResponse := &ChatRespone{}
// 发送请求
_, errWithCode = p.Requester.SendRequest(req, chatResponse, false)
if errWithCode != nil {
return nil, errWithCode
}
return p.convertToChatOpenai(chatResponse, request)
}
func (p *CloudflareAIProvider) CreateChatCompletionStream(request *types.ChatCompletionRequest) (requester.StreamReaderInterface[string], *types.OpenAIErrorWithStatusCode) {
req, errWithCode := p.getChatRequest(request)
if errWithCode != nil {
return nil, errWithCode
}
defer req.Body.Close()
// 发送请求
resp, errWithCode := p.Requester.SendRequestRaw(req)
if errWithCode != nil {
return nil, errWithCode
}
chatHandler := &CloudflareAIStreamHandler{
Usage: p.Usage,
Request: request,
}
return requester.RequestStream[string](p.Requester, resp, chatHandler.handlerStream)
}
func (p *CloudflareAIProvider) getChatRequest(request *types.ChatCompletionRequest) (*http.Request, *types.OpenAIErrorWithStatusCode) {
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(request.Model)
if fullRequestURL == "" {
return nil, common.ErrorWrapper(nil, "invalid_cloudflare_ai_config", http.StatusInternalServerError)
}
// 获取请求头
headers := p.GetRequestHeaders()
chatRequest := p.convertFromChatOpenai(request)
// 创建请求
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(chatRequest), p.Requester.WithHeader(headers))
if err != nil {
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
return req, nil
}
func (p *CloudflareAIProvider) convertToChatOpenai(response *ChatRespone, request *types.ChatCompletionRequest) (openaiResponse *types.ChatCompletionResponse, errWithCode *types.OpenAIErrorWithStatusCode) {
err := errorHandle(&response.CloudflareAIError)
if err != nil {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: *err,
StatusCode: http.StatusBadRequest,
}
return
}
openaiResponse = &types.ChatCompletionResponse{
ID: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Object: "chat.completion",
Created: common.GetTimestamp(),
Model: request.Model,
Choices: []types.ChatCompletionChoice{{
Index: 0,
Message: types.ChatCompletionMessage{
Role: types.ChatMessageRoleAssistant,
Content: response.Result.Response,
},
FinishReason: types.FinishReasonStop,
}},
}
completionTokens := common.CountTokenText(response.Result.Response, request.Model)
p.Usage.CompletionTokens = completionTokens
p.Usage.TotalTokens = p.Usage.PromptTokens + completionTokens
openaiResponse.Usage = p.Usage
return
}
func (p *CloudflareAIProvider) convertFromChatOpenai(request *types.ChatCompletionRequest) *ChatRequest {
chatRequest := &ChatRequest{
Stream: request.Stream,
MaxTokens: request.MaxTokens,
Messages: make([]Message, 0, len(request.Messages)),
}
for _, message := range request.Messages {
chatRequest.Messages = append(chatRequest.Messages, Message{
Role: message.Role,
Content: message.StringContent(),
})
}
return chatRequest
}
// 转换为OpenAI聊天流式请求体
func (h *CloudflareAIStreamHandler) handlerStream(rawLine *[]byte, dataChan chan string, errChan chan error) {
// 如果rawLine 前缀不为data: 或者 meta:,则直接返回
if !strings.HasPrefix(string(*rawLine), "data: ") {
*rawLine = nil
return
}
*rawLine = (*rawLine)[6:]
if strings.HasPrefix(string(*rawLine), "[DONE]") {
h.convertToOpenaiStream(nil, dataChan, true)
errChan <- io.EOF
*rawLine = requester.StreamClosed
return
}
chatResponse := &ChatResult{}
err := json.Unmarshal(*rawLine, chatResponse)
if err != nil {
errChan <- common.ErrorToOpenAIError(err)
return
}
h.convertToOpenaiStream(chatResponse, dataChan, false)
}
func (h *CloudflareAIStreamHandler) convertToOpenaiStream(chatResponse *ChatResult, dataChan chan string, isStop bool) {
streamResponse := types.ChatCompletionStreamResponse{
ID: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Model: h.Request.Model,
}
choice := types.ChatCompletionStreamChoice{
Index: 0,
Delta: types.ChatCompletionStreamChoiceDelta{
Role: types.ChatMessageRoleAssistant,
Content: "",
},
}
if isStop {
choice.FinishReason = types.FinishReasonStop
} else {
choice.Delta.Content = chatResponse.Response
h.Usage.CompletionTokens += common.CountTokenText(chatResponse.Response, h.Request.Model)
h.Usage.TotalTokens = h.Usage.PromptTokens + h.Usage.CompletionTokens
}
streamResponse.Choices = []types.ChatCompletionStreamChoice{choice}
responseBody, _ := json.Marshal(streamResponse)
dataChan <- string(responseBody)
}

View File

@ -0,0 +1,62 @@
package cloudflareAI
import (
"encoding/base64"
"io"
"net/http"
"one-api/common"
"one-api/types"
"time"
)
func (p *CloudflareAIProvider) CreateImageGenerations(request *types.ImageRequest) (*types.ImageResponse, *types.OpenAIErrorWithStatusCode) {
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(request.Model)
if fullRequestURL == "" {
return nil, common.ErrorWrapper(nil, "invalid_cloudflare_ai_config", http.StatusInternalServerError)
}
// 获取请求头
headers := p.GetRequestHeaders()
cfRequest := convertFromIamgeOpenai(request)
// 创建请求
req, err := p.Requester.NewRequest(http.MethodPost, fullRequestURL, p.Requester.WithBody(cfRequest), p.Requester.WithHeader(headers))
if err != nil {
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
defer req.Body.Close()
resp, errWithCode := p.Requester.SendRequestRaw(req)
if errWithCode != nil {
return nil, errWithCode
}
defer resp.Body.Close()
if resp.Header.Get("Content-Type") != "image/png" {
return nil, common.StringErrorWrapper("invalid_image_response", "invalid_image_response", http.StatusInternalServerError)
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, common.ErrorWrapper(err, "read_response_failed", http.StatusInternalServerError)
}
base64Image := base64.StdEncoding.EncodeToString(body)
openaiResponse := &types.ImageResponse{
Created: time.Now().Unix(),
Data: []types.ImageResponseDataInner{{B64JSON: base64Image}},
}
p.Usage.PromptTokens = 1000
return openaiResponse, nil
}
func convertFromIamgeOpenai(request *types.ImageRequest) *ImageRequest {
return &ImageRequest{
Prompt: request.Prompt,
}
}

View File

@ -0,0 +1,94 @@
package cloudflareAI
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"one-api/common"
"one-api/common/requester"
"one-api/types"
)
func (p *CloudflareAIProvider) CreateTranscriptions(request *types.AudioRequest) (*types.AudioResponseWrapper, *types.OpenAIErrorWithStatusCode) {
req, errWithCode := p.getRequestAudioBody(request.Model, request)
if errWithCode != nil {
return nil, errWithCode
}
defer req.Body.Close()
var resp *http.Response
var err error
audioResponse := &AudioResponse{}
resp, errWithCode = p.Requester.SendRequest(req, audioResponse, false)
if errWithCode != nil {
return nil, errWithCode
}
errWithOP := errorHandle(&audioResponse.CloudflareAIError)
if errWithOP != nil {
errWithCode = &types.OpenAIErrorWithStatusCode{
OpenAIError: *errWithOP,
StatusCode: http.StatusBadRequest,
}
return nil, errWithCode
}
chatResult := audioResponse.Result
audioResponseWrapper := &types.AudioResponseWrapper{}
audioResponseWrapper.Headers = map[string]string{
"Content-Type": resp.Header.Get("Content-Type"),
}
audioResponseWrapper.Body, err = json.Marshal(&chatResult)
if err != nil {
return nil, common.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
}
completionTokens := common.CountTokenText(chatResult.Text, request.Model)
p.Usage.CompletionTokens = completionTokens
p.Usage.TotalTokens = p.Usage.PromptTokens + p.Usage.CompletionTokens
return audioResponseWrapper, nil
}
func (p *CloudflareAIProvider) getRequestAudioBody(ModelName string, request *types.AudioRequest) (*http.Request, *types.OpenAIErrorWithStatusCode) {
// 获取请求地址
fullRequestURL := p.GetFullRequestURL(ModelName)
// 获取请求头
headers := p.GetRequestHeaders()
// 创建请求
var req *http.Request
var err error
var formBody bytes.Buffer
builder := p.Requester.CreateFormBuilder(&formBody)
if err := audioMultipartForm(request, builder); err != nil {
return nil, common.ErrorWrapper(err, "create_form_builder_failed", http.StatusInternalServerError)
}
req, err = p.Requester.NewRequest(
http.MethodPost,
fullRequestURL,
p.Requester.WithBody(&formBody),
p.Requester.WithHeader(headers),
p.Requester.WithContentType(builder.FormDataContentType()))
req.ContentLength = int64(formBody.Len())
if err != nil {
return nil, common.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
}
return req, nil
}
func audioMultipartForm(request *types.AudioRequest, b requester.FormBuilder) error {
err := b.CreateFormFile("file", request.File)
if err != nil {
return fmt.Errorf("creating form file: %w", err)
}
return b.Close()
}

View File

@ -0,0 +1,60 @@
package cloudflareAI
import "one-api/types"
type CloudflareAIError struct {
Error []struct {
Code int `json:"code"`
Message string `json:"message"`
} `json:"errors,omitempty"`
Success bool `json:"success"`
}
type ImageRequest struct {
Prompt string `json:"prompt"`
Image interface{} `json:"image,omitempty"` // 可以是 string 或者 ImageObject
Mask interface{} `json:"mask,omitempty"` // 可以是 string 或者 MaskObject
NumSteps int `json:"num_steps,omitempty"`
Strength float64 `json:"strength,omitempty"`
Guidance float64 `json:"guidance,omitempty"`
}
type ImageObject struct {
Image []float64 `json:"image"`
}
type MaskObject struct {
Mask []float64 `json:"mask"`
}
type ChatRequest struct {
Messages []Message `json:"messages"`
Stream bool `json:"stream,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
}
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
}
type ChatRespone struct {
Result ChatResult `json:"result,omitempty"`
CloudflareAIError
}
type ChatResult struct {
Response string `json:"response"`
}
type AudioResponse struct {
Result AudioResult `json:"result,omitempty"`
CloudflareAIError
}
type AudioResult struct {
Text string `json:"text,omitempty"`
WordCount int `json:"word_count,omitempty"`
Words []types.AudioWordsList `json:"words,omitempty"`
Vtt string `json:"vtt,omitempty"`
}

View File

@ -11,6 +11,7 @@ import (
"one-api/providers/base"
"one-api/providers/bedrock"
"one-api/providers/claude"
"one-api/providers/cloudflareAI"
"one-api/providers/deepseek"
"one-api/providers/gemini"
"one-api/providers/groq"
@ -54,6 +55,7 @@ func init() {
providerFactories[common.ChannelTypeGroq] = groq.GroqProviderFactory{}
providerFactories[common.ChannelTypeBedrock] = bedrock.BedrockProviderFactory{}
providerFactories[common.ChannelTypeMidjourney] = midjourney.MidjourneyProviderFactory{}
providerFactories[common.ChannelTypeCloudflareAI] = cloudflareAI.CloudflareAIProviderFactory{}
}

View File

@ -7,23 +7,24 @@ var ModelOwnedBy map[int]string
func init() {
ModelOwnedBy = map[int]string{
common.ChannelTypeOpenAI: "OpenAI",
common.ChannelTypeAnthropic: "Anthropic",
common.ChannelTypeBaidu: "Baidu",
common.ChannelTypePaLM: "Google PaLM",
common.ChannelTypeGemini: "Google Gemini",
common.ChannelTypeZhipu: "Zhipu",
common.ChannelTypeAli: "Ali",
common.ChannelTypeXunfei: "Xunfei",
common.ChannelType360: "360",
common.ChannelTypeTencent: "Tencent",
common.ChannelTypeBaichuan: "Baichuan",
common.ChannelTypeMiniMax: "MiniMax",
common.ChannelTypeDeepseek: "Deepseek",
common.ChannelTypeMoonshot: "Moonshot",
common.ChannelTypeMistral: "Mistral",
common.ChannelTypeGroq: "Groq",
common.ChannelTypeLingyi: "Lingyiwanwu",
common.ChannelTypeMidjourney: "Midjourney",
common.ChannelTypeOpenAI: "OpenAI",
common.ChannelTypeAnthropic: "Anthropic",
common.ChannelTypeBaidu: "Baidu",
common.ChannelTypePaLM: "Google PaLM",
common.ChannelTypeGemini: "Google Gemini",
common.ChannelTypeZhipu: "Zhipu",
common.ChannelTypeAli: "Ali",
common.ChannelTypeXunfei: "Xunfei",
common.ChannelType360: "360",
common.ChannelTypeTencent: "Tencent",
common.ChannelTypeBaichuan: "Baichuan",
common.ChannelTypeMiniMax: "MiniMax",
common.ChannelTypeDeepseek: "Deepseek",
common.ChannelTypeMoonshot: "Moonshot",
common.ChannelTypeMistral: "Mistral",
common.ChannelTypeGroq: "Groq",
common.ChannelTypeLingyi: "Lingyiwanwu",
common.ChannelTypeMidjourney: "Midjourney",
common.ChannelTypeCloudflareAI: "Cloudflare AI",
}
}

View File

@ -20,11 +20,18 @@ type AudioRequest struct {
}
type AudioResponse struct {
Task string `json:"task,omitempty"`
Language string `json:"language,omitempty"`
Duration float64 `json:"duration,omitempty"`
Segments any `json:"segments,omitempty"`
Text string `json:"text"`
Task string `json:"task,omitempty"`
Language string `json:"language,omitempty"`
Duration float64 `json:"duration,omitempty"`
Segments any `json:"segments,omitempty"`
Text string `json:"text"`
Words []AudioWordsList `json:"words,omitempty"`
}
type AudioWordsList struct {
Word string `json:"word"`
Start float64 `json:"start"`
End float64 `json:"end"`
}
type AudioResponseWrapper struct {

View File

@ -139,6 +139,13 @@ export const CHANNEL_OPTIONS = {
color: 'orange',
url: ''
},
35: {
key: 35,
text: 'Cloudflare AI',
value: 35,
color: 'orange',
url: ''
},
24: {
key: 24,
text: 'Azure Speech',

View File

@ -351,27 +351,29 @@ const EditModal = ({ open, channelId, onCancel, onOk, groupOptions }) => {
)}
</FormControl>
<FormControl fullWidth error={Boolean(touched.base_url && errors.base_url)} sx={{ ...theme.typography.otherInput }}>
<InputLabel htmlFor="channel-base_url-label">{inputLabel.base_url}</InputLabel>
<OutlinedInput
id="channel-base_url-label"
label={inputLabel.base_url}
type="text"
value={values.base_url}
name="base_url"
onBlur={handleBlur}
onChange={handleChange}
inputProps={{}}
aria-describedby="helper-text-channel-base_url-label"
/>
{touched.base_url && errors.base_url ? (
<FormHelperText error id="helper-tex-channel-base_url-label">
{errors.base_url}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-base_url-label"> {inputPrompt.base_url} </FormHelperText>
)}
</FormControl>
{inputPrompt.base_url && (
<FormControl fullWidth error={Boolean(touched.base_url && errors.base_url)} sx={{ ...theme.typography.otherInput }}>
<InputLabel htmlFor="channel-base_url-label">{inputLabel.base_url}</InputLabel>
<OutlinedInput
id="channel-base_url-label"
label={inputLabel.base_url}
type="text"
value={values.base_url}
name="base_url"
onBlur={handleBlur}
onChange={handleChange}
inputProps={{}}
aria-describedby="helper-text-channel-base_url-label"
/>
{touched.base_url && errors.base_url ? (
<FormHelperText error id="helper-tex-channel-base_url-label">
{errors.base_url}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-base_url-label"> {inputPrompt.base_url} </FormHelperText>
)}
</FormControl>
)}
{inputPrompt.other && (
<FormControl fullWidth error={Boolean(touched.other && errors.other)} sx={{ ...theme.typography.otherInput }}>

View File

@ -262,6 +262,27 @@ const typeConfig = {
model_mapping: ''
},
modelGroup: 'Midjourney'
},
35: {
input: {
models: [
'@cf/stabilityai/stable-diffusion-xl-base-1.0',
'@cf/lykon/dreamshaper-8-lcm',
'@cf/bytedance/stable-diffusion-xl-lightning',
'@cf/qwen/qwen1.5-7b-chat-awq',
'@cf/qwen/qwen1.5-14b-chat-awq',
'@hf/google/gemma-7b-it',
'@hf/thebloke/deepseek-coder-6.7b-base-awq',
'@hf/thebloke/llama-2-13b-chat-awq',
'@cf/openai/whisper'
],
test_model: '@hf/google/gemma-7b-it'
},
prompt: {
key: '按照如下格式输入CLOUDFLARE_ACCOUNT_ID|CLOUDFLARE_API_TOKEN',
base_url: ''
},
modelGroup: 'Cloudflare AI'
}
};