Merge branch 'songquanpeng:main' into origin-main

This commit is contained in:
取梦为饮 2024-06-04 15:53:19 +08:00 committed by GitHub
commit 168a5f1f31
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 710 additions and 519 deletions

View File

@ -384,14 +384,17 @@ graph LR
+ `TIKTOKEN_CACHE_DIR`:默认程序启动时会联网下载一些通用的词元的编码,如:`gpt-3.5-turbo`,在一些网络环境不稳定,或者离线情况,可能会导致启动有问题,可以配置此目录缓存数据,可迁移到离线环境。
+ `DATA_GYM_CACHE_DIR`:目前该配置作用与 `TIKTOKEN_CACHE_DIR` 一致,但是优先级没有它高。
17. `RELAY_TIMEOUT`:中继超时设置,单位为秒,默认不设置超时时间。
18. `SQLITE_BUSY_TIMEOUT`SQLite 锁等待超时设置,单位为毫秒,默认 `3000`
19. `GEMINI_SAFETY_SETTING`Gemini 的安全设置,默认 `BLOCK_NONE`
20. `GEMINI_VERSION`One API 所使用的 Gemini 版本,默认为 `v1`
21. `THEME`:系统的主题设置,默认为 `default`,具体可选值参考[此处](./web/README.md)。
22. `ENABLE_METRIC`:是否根据请求成功率禁用渠道,默认不开启,可选值为 `true``false`
23. `METRIC_QUEUE_SIZE`:请求成功率统计队列大小,默认为 `10`
24. `METRIC_SUCCESS_RATE_THRESHOLD`:请求成功率阈值,默认为 `0.8`
25. `INITIAL_ROOT_TOKEN`:如果设置了该值,则在系统首次启动时会自动创建一个值为该环境变量值的 root 用户令牌。
18. `RELAY_PROXY`:设置后使用该代理来请求 API。
19. `USER_CONTENT_REQUEST_TIMEOUT`:用户上传内容下载超时时间,单位为秒。
20. `USER_CONTENT_REQUEST_PROXY`:设置后使用该代理来请求用户上传的内容,例如图片。
21. `SQLITE_BUSY_TIMEOUT`SQLite 锁等待超时设置,单位为毫秒,默认 `3000`
22. `GEMINI_SAFETY_SETTING`Gemini 的安全设置,默认 `BLOCK_NONE`
23. `GEMINI_VERSION`One API 所使用的 Gemini 版本,默认为 `v1`
24. `THEME`:系统的主题设置,默认为 `default`,具体可选值参考[此处](./web/README.md)。
25. `ENABLE_METRIC`:是否根据请求成功率禁用渠道,默认不开启,可选值为 `true``false`
26. `METRIC_QUEUE_SIZE`:请求成功率统计队列大小,默认为 `10`
27. `METRIC_SUCCESS_RATE_THRESHOLD`:请求成功率阈值,默认为 `0.8`
28. `INITIAL_ROOT_TOKEN`:如果设置了该值,则在系统首次启动时会自动创建一个值为该环境变量值的 root 用户令牌。
### 命令行参数
1. `--port <port_number>`: 指定服务器监听的端口号,默认为 `3000`

60
common/client/init.go Normal file
View File

@ -0,0 +1,60 @@
package client
import (
"fmt"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/logger"
"net/http"
"net/url"
"time"
)
var HTTPClient *http.Client
var ImpatientHTTPClient *http.Client
var UserContentRequestHTTPClient *http.Client
func Init() {
if config.UserContentRequestProxy != "" {
logger.SysLog(fmt.Sprintf("using %s as proxy to fetch user content", config.UserContentRequestProxy))
proxyURL, err := url.Parse(config.UserContentRequestProxy)
if err != nil {
logger.FatalLog(fmt.Sprintf("USER_CONTENT_REQUEST_PROXY set but invalid: %s", config.UserContentRequestProxy))
}
transport := &http.Transport{
Proxy: http.ProxyURL(proxyURL),
}
UserContentRequestHTTPClient = &http.Client{
Transport: transport,
Timeout: time.Second * time.Duration(config.UserContentRequestTimeout),
}
} else {
UserContentRequestHTTPClient = &http.Client{}
}
var transport http.RoundTripper
if config.RelayProxy != "" {
logger.SysLog(fmt.Sprintf("using %s as api relay proxy", config.RelayProxy))
proxyURL, err := url.Parse(config.RelayProxy)
if err != nil {
logger.FatalLog(fmt.Sprintf("USER_CONTENT_REQUEST_PROXY set but invalid: %s", config.UserContentRequestProxy))
}
transport = &http.Transport{
Proxy: http.ProxyURL(proxyURL),
}
}
if config.RelayTimeout == 0 {
HTTPClient = &http.Client{
Transport: transport,
}
} else {
HTTPClient = &http.Client{
Timeout: time.Duration(config.RelayTimeout) * time.Second,
Transport: transport,
}
}
ImpatientHTTPClient = &http.Client{
Timeout: 5 * time.Second,
Transport: transport,
}
}

View File

@ -144,3 +144,7 @@ var MetricFailChanSize = env.Int("METRIC_FAIL_CHAN_SIZE", 128)
var InitialRootToken = os.Getenv("INITIAL_ROOT_TOKEN")
var GeminiVersion = env.String("GEMINI_VERSION", "v1")
var RelayProxy = env.String("RELAY_PROXY", "")
var UserContentRequestProxy = env.String("USER_CONTENT_REQUEST_PROXY", "")
var UserContentRequestTimeout = env.Int("USER_CONTENT_REQUEST_TIMEOUT", 30)

View File

@ -3,6 +3,7 @@ package image
import (
"bytes"
"encoding/base64"
"github.com/songquanpeng/one-api/common/client"
"image"
_ "image/gif"
_ "image/jpeg"
@ -19,7 +20,7 @@ import (
var dataURLPattern = regexp.MustCompile(`data:image/([^;]+);base64,(.*)`)
func IsImageUrl(url string) (bool, error) {
resp, err := http.Head(url)
resp, err := client.UserContentRequestHTTPClient.Head(url)
if err != nil {
return false, err
}
@ -34,7 +35,7 @@ func GetImageSizeFromUrl(url string) (width int, height int, err error) {
if !isImage {
return
}
resp, err := http.Get(url)
resp, err := client.UserContentRequestHTTPClient.Get(url)
if err != nil {
return
}

View File

@ -4,12 +4,12 @@ import (
"encoding/json"
"errors"
"fmt"
"github.com/songquanpeng/one-api/common/client"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/model"
"github.com/songquanpeng/one-api/monitor"
"github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/client"
"io"
"net/http"
"strconv"

View File

@ -4,6 +4,9 @@ import (
"bytes"
"context"
"fmt"
"io"
"net/http"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/config"
@ -16,8 +19,6 @@ import (
"github.com/songquanpeng/one-api/relay/controller"
"github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
)
// https://platform.openai.com/docs/api-reference/chat
@ -47,6 +48,7 @@ func Relay(c *gin.Context) {
logger.Debugf(ctx, "request body: %s", string(requestBody))
}
channelId := c.GetInt(ctxkey.ChannelId)
userId := c.GetInt("id")
bizErr := relayHelper(c, relayMode)
if bizErr == nil {
monitor.Emit(channelId, true)
@ -56,7 +58,7 @@ func Relay(c *gin.Context) {
channelName := c.GetString(ctxkey.ChannelName)
group := c.GetString(ctxkey.Group)
originalModel := c.GetString(ctxkey.OriginalModel)
go processChannelRelayError(ctx, channelId, channelName, bizErr)
go processChannelRelayError(ctx, userId, channelId, channelName, bizErr)
requestId := c.GetString(helper.RequestIdKey)
retryTimes := config.RetryTimes
if !shouldRetry(c, bizErr.StatusCode) {
@ -83,7 +85,7 @@ func Relay(c *gin.Context) {
channelId := c.GetInt(ctxkey.ChannelId)
lastFailedChannelId = channelId
channelName := c.GetString(ctxkey.ChannelName)
go processChannelRelayError(ctx, channelId, channelName, bizErr)
go processChannelRelayError(ctx, userId, channelId, channelName, bizErr)
}
if bizErr != nil {
if bizErr.StatusCode == http.StatusTooManyRequests {
@ -115,8 +117,8 @@ func shouldRetry(c *gin.Context, statusCode int) bool {
return true
}
func processChannelRelayError(ctx context.Context, channelId int, channelName string, err *model.ErrorWithStatusCode) {
logger.Errorf(ctx, "relay error (channel #%d): %s", channelId, err.Message)
func processChannelRelayError(ctx context.Context, userId int, channelId int, channelName string, err *model.ErrorWithStatusCode) {
logger.Errorf(ctx, "relay error (channel id %d, user id: %d): %s", channelId, userId, err.Message)
// https://platform.openai.com/docs/guides/error-codes/api-errors
if monitor.ShouldDisableChannel(&err.Error, err.StatusCode) {
monitor.DisableChannel(channelId, channelName, err.Message)

View File

@ -6,6 +6,8 @@ import (
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/common/random"
"github.com/songquanpeng/one-api/model"
"net/http"
@ -109,6 +111,7 @@ func Logout(c *gin.Context) {
}
func Register(c *gin.Context) {
ctx := c.Request.Context()
if !config.RegisterEnabled {
c.JSON(http.StatusOK, gin.H{
"message": "管理员关闭了新用户注册",
@ -173,6 +176,28 @@ func Register(c *gin.Context) {
})
return
}
go func() {
err := user.ValidateAndFill()
if err != nil {
logger.Errorf(ctx, "user.ValidateAndFill failed: %w", err)
return
}
cleanToken := model.Token{
UserId: user.Id,
Name: "default",
Key: random.GenerateKey(),
CreatedTime: helper.GetTimestamp(),
AccessedTime: helper.GetTimestamp(),
ExpiredTime: -1,
RemainQuota: -1,
UnlimitedQuota: true,
}
err = cleanToken.Insert()
if err != nil {
logger.Errorf(ctx, "cleanToken.Insert failed: %w", err)
return
}
}()
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",

View File

@ -7,6 +7,7 @@ import (
"github.com/gin-contrib/sessions/cookie"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/client"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/controller"
@ -94,6 +95,7 @@ func main() {
logger.SysLog("metric enabled, will disable channel if too much request failed")
}
openai.InitTokenEncoders()
client.Init()
// Initialize HTTP server
server := gin.New()

View File

@ -7,9 +7,9 @@ import (
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/client"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/client"
"github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/model"
"io"

View File

@ -4,7 +4,7 @@ import (
"errors"
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/client"
"github.com/songquanpeng/one-api/common/client"
"github.com/songquanpeng/one-api/relay/meta"
"io"
"net/http"

View File

@ -13,6 +13,7 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode"
)
type Adaptor struct {
@ -24,7 +25,14 @@ func (a *Adaptor) Init(meta *meta.Meta) {
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
version := helper.AssignOrDefault(meta.Config.APIVersion, config.GeminiVersion)
action := "generateContent"
action := ""
switch meta.Mode {
case relaymode.Embeddings:
action = "batchEmbedContents"
default:
action = "generateContent"
}
if meta.IsStream {
action = "streamGenerateContent?alt=sse"
}
@ -41,7 +49,14 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
if request == nil {
return nil, errors.New("request is nil")
}
return ConvertRequest(*request), nil
switch relayMode {
case relaymode.Embeddings:
geminiEmbeddingRequest := ConvertEmbeddingRequest(*request)
return geminiEmbeddingRequest, nil
default:
geminiRequest := ConvertRequest(*request)
return geminiRequest, nil
}
}
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
@ -61,7 +76,12 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
err, responseText = StreamHandler(c, resp)
usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
} else {
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
switch meta.Mode {
case relaymode.Embeddings:
err, usage = EmbeddingHandler(c, resp)
default:
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
}
}
return
}

View File

@ -4,5 +4,5 @@ package gemini
var ModelList = []string{
"gemini-pro", "gemini-1.0-pro-001", "gemini-1.5-pro",
"gemini-pro-vision", "gemini-1.0-pro-vision-001",
"gemini-pro-vision", "gemini-1.0-pro-vision-001", "embedding-001", "text-embedding-004",
}

View File

@ -134,6 +134,29 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
return &geminiRequest
}
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *BatchEmbeddingRequest {
inputs := request.ParseInput()
requests := make([]EmbeddingRequest, len(inputs))
model := fmt.Sprintf("models/%s", request.Model)
for i, input := range inputs {
requests[i] = EmbeddingRequest{
Model: model,
Content: ChatContent{
Parts: []Part{
{
Text: input,
},
},
},
}
}
return &BatchEmbeddingRequest{
Requests: requests,
}
}
type ChatResponse struct {
Candidates []ChatCandidate `json:"candidates"`
PromptFeedback ChatPromptFeedback `json:"promptFeedback"`
@ -230,6 +253,23 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatC
return &response
}
func embeddingResponseGemini2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse {
openAIEmbeddingResponse := openai.EmbeddingResponse{
Object: "list",
Data: make([]openai.EmbeddingResponseItem, 0, len(response.Embeddings)),
Model: "gemini-embedding",
Usage: model.Usage{TotalTokens: 0},
}
for _, item := range response.Embeddings {
openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{
Object: `embedding`,
Index: 0,
Embedding: item.Values,
})
}
return &openAIEmbeddingResponse
}
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
responseText := ""
scanner := bufio.NewScanner(resp.Body)
@ -337,3 +377,39 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
_, err = c.Writer.Write(jsonResponse)
return nil, &usage
}
func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
var geminiEmbeddingResponse EmbeddingResponse
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
err = json.Unmarshal(responseBody, &geminiEmbeddingResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if geminiEmbeddingResponse.Error != nil {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: geminiEmbeddingResponse.Error.Message,
Type: "gemini_error",
Param: "",
Code: geminiEmbeddingResponse.Error.Code,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := embeddingResponseGemini2OpenAI(&geminiEmbeddingResponse)
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, &fullTextResponse.Usage
}

View File

@ -7,6 +7,33 @@ type ChatRequest struct {
Tools []ChatTools `json:"tools,omitempty"`
}
type EmbeddingRequest struct {
Model string `json:"model"`
Content ChatContent `json:"content"`
TaskType string `json:"taskType,omitempty"`
Title string `json:"title,omitempty"`
OutputDimensionality int `json:"outputDimensionality,omitempty"`
}
type BatchEmbeddingRequest struct {
Requests []EmbeddingRequest `json:"requests"`
}
type EmbeddingData struct {
Values []float64 `json:"values"`
}
type EmbeddingResponse struct {
Embeddings []EmbeddingData `json:"embeddings"`
Error *Error `json:"error,omitempty"`
}
type Error struct {
Code int `json:"code,omitempty"`
Message string `json:"message,omitempty"`
Status string `json:"status,omitempty"`
}
type InlineData struct {
MimeType string `json:"mimeType"`
Data string `json:"data"`

View File

@ -24,6 +24,10 @@ func InitTokenEncoders() {
logger.FatalLog(fmt.Sprintf("failed to get gpt-3.5-turbo token encoder: %s", err.Error()))
}
defaultTokenEncoder = gpt35TokenEncoder
gpt4oTokenEncoder, err := tiktoken.EncodingForModel("gpt-4o")
if err != nil {
logger.FatalLog(fmt.Sprintf("failed to get gpt-4o token encoder: %s", err.Error()))
}
gpt4TokenEncoder, err := tiktoken.EncodingForModel("gpt-4")
if err != nil {
logger.FatalLog(fmt.Sprintf("failed to get gpt-4 token encoder: %s", err.Error()))
@ -31,6 +35,8 @@ func InitTokenEncoders() {
for model := range billingratio.ModelRatio {
if strings.HasPrefix(model, "gpt-3.5") {
tokenEncoderMap[model] = gpt35TokenEncoder
} else if strings.HasPrefix(model, "gpt-4o") {
tokenEncoderMap[model] = gpt4oTokenEncoder
} else if strings.HasPrefix(model, "gpt-4") {
tokenEncoderMap[model] = gpt4TokenEncoder
} else {

View File

@ -49,3 +49,8 @@ var ImagePromptLengthLimitations = map[string]int{
"wanx-v1": 4000,
"cogview-3": 833,
}
var ImageOriginModelName = map[string]string{
"ali-stable-diffusion-xl": "stable-diffusion-xl",
"ali-stable-diffusion-v1.5": "stable-diffusion-v1.5",
}

View File

@ -1,24 +0,0 @@
package client
import (
"github.com/songquanpeng/one-api/common/config"
"net/http"
"time"
)
var HTTPClient *http.Client
var ImpatientHTTPClient *http.Client
func init() {
if config.RelayTimeout == 0 {
HTTPClient = &http.Client{}
} else {
HTTPClient = &http.Client{
Timeout: time.Duration(config.RelayTimeout) * time.Second,
}
}
ImpatientHTTPClient = &http.Client{
Timeout: 5 * time.Second,
}
}

View File

@ -9,6 +9,7 @@ import (
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/client"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/logger"
@ -17,7 +18,6 @@ import (
"github.com/songquanpeng/one-api/relay/billing"
billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
"github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/client"
"github.com/songquanpeng/one-api/relay/meta"
relaymodel "github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode"

View File

@ -55,6 +55,11 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
return openai.ErrorWrapper(err, "get_image_cost_ratio_failed", http.StatusInternalServerError)
}
imageModel := imageRequest.Model
// Convert the original image model
imageRequest.Model, _ = getMappedModelName(imageRequest.Model, billingratio.ImageOriginModelName)
c.Set("response_format", imageRequest.ResponseFormat)
var requestBody io.Reader
if isModelMapped || meta.ChannelType == channeltype.Azure { // make Azure channel request body
jsonStr, err := json.Marshal(imageRequest)
@ -89,7 +94,7 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
requestBody = bytes.NewBuffer(jsonStr)
}
modelRatio := billingratio.GetModelRatio(imageRequest.Model)
modelRatio := billingratio.GetModelRatio(imageModel)
groupRatio := billingratio.GetGroupRatio(meta.Group)
ratio := modelRatio * groupRatio
userQuota, err := model.CacheGetUserQuota(ctx, meta.UserId)

View File

@ -26,7 +26,7 @@
"notistack": "^3.0.1",
"prop-types": "^15.8.1",
"react": "^18.2.0",
"react-apexcharts": "^1.4.0",
"react-apexcharts": "1.4.0",
"react-device-detect": "^2.2.2",
"react-dom": "^18.2.0",
"react-perfect-scrollbar": "^1.5.8",

View File

@ -11,12 +11,18 @@ export const CHANNEL_OPTIONS = {
value: 14,
color: 'primary'
},
// 33: {
// key: 33,
// text: 'AWS Claude',
// value: 33,
// color: 'primary'
// },
33: {
key: 33,
text: 'AWS Claude',
value: 33,
color: 'primary'
},
37: {
key: 37,
text: 'Cloudflare',
value: 37,
color: 'success'
},
3: {
key: 3,
text: 'Azure OpenAI',
@ -119,12 +125,12 @@ export const CHANNEL_OPTIONS = {
value: 32,
color: 'primary'
},
// 34: {
// key: 34,
// text: 'Coze',
// value: 34,
// color: 'primary'
// },
34: {
key: 34,
text: 'Coze',
value: 34,
color: 'primary'
},
35: {
key: 35,
text: 'Cohere',

View File

@ -1,24 +1,56 @@
import { closeSnackbar } from 'notistack';
import { IconX } from '@tabler/icons-react';
import { IconButton } from '@mui/material';
const action = (snackbarId) => (
<>
<IconButton
onClick={() => {
closeSnackbar(snackbarId);
}}
>
<IconX stroke={1.5} size="1.25rem" />
</IconButton>
</>
);
export const snackbarConstants = {
Common: {
ERROR: {
variant: 'error',
autoHideDuration: 5000
autoHideDuration: 5000,
preventDuplicate: true,
action
},
WARNING: {
variant: 'warning',
autoHideDuration: 10000
autoHideDuration: 10000,
preventDuplicate: true,
action
},
SUCCESS: {
variant: 'success',
autoHideDuration: 1500
autoHideDuration: 1500,
preventDuplicate: true,
action
},
INFO: {
variant: 'info',
autoHideDuration: 3000
autoHideDuration: 3000,
preventDuplicate: true,
action
},
NOTICE: {
variant: 'info',
autoHideDuration: 7000
autoHideDuration: 20000,
preventDuplicate: true,
action
},
COPY: {
variant: 'copy',
persist: true,
preventDuplicate: true,
allowDownload: true,
action
}
},
Mobile: {

View File

@ -193,3 +193,40 @@ export function removeTrailingSlash(url) {
return url;
}
}
let channelModels = undefined;
export async function loadChannelModels() {
const res = await API.get('/api/models');
const { success, data } = res.data;
if (!success) {
return;
}
channelModels = data;
localStorage.setItem('channel_models', JSON.stringify(data));
}
export function getChannelModels(type) {
if (channelModels !== undefined && type in channelModels) {
return channelModels[type];
}
let models = localStorage.getItem('channel_models');
if (!models) {
return [];
}
channelModels = JSON.parse(models);
if (type in channelModels) {
return channelModels[type];
}
return [];
}
export function copy(text, name = '') {
try {
navigator.clipboard.writeText(text);
} catch (error) {
text = `复制${name}失败,请手动复制:<br /><br />${text}`;
enqueueSnackbar(<SnackbarHTMLContent htmlContent={text} />, getSnackbarOptions('COPY'));
return;
}
showSuccess(`复制${name}成功!`);
}

View File

@ -1,22 +1,22 @@
import { useState, useEffect } from "react";
import { useSearchParams } from "react-router-dom";
import { useState, useEffect } from 'react';
import { useSearchParams } from 'react-router-dom';
// material-ui
import { Button, Stack, Typography, Alert } from "@mui/material";
import { Button, Stack, Typography, Alert } from '@mui/material';
// assets
import { showError, showInfo } from "utils/common";
import { API } from "utils/api";
import { showError, copy } from 'utils/common';
import { API } from 'utils/api';
// ===========================|| FIREBASE - REGISTER ||=========================== //
const ResetPasswordForm = () => {
const [searchParams] = useSearchParams();
const [inputs, setInputs] = useState({
email: "",
token: "",
email: '',
token: ''
});
const [newPassword, setNewPassword] = useState("");
const [newPassword, setNewPassword] = useState('');
const submit = async () => {
const res = await API.post(`/api/user/reset`, inputs);
@ -24,31 +24,25 @@ const ResetPasswordForm = () => {
if (success) {
let password = res.data.data;
setNewPassword(password);
navigator.clipboard.writeText(password);
showInfo(`新密码已复制到剪贴板:${password}`);
copy(password, '新密码');
} else {
showError(message);
}
};
useEffect(() => {
let email = searchParams.get("email");
let token = searchParams.get("token");
let email = searchParams.get('email');
let token = searchParams.get('token');
setInputs({
token,
email,
email
});
}, []);
return (
<Stack
spacing={3}
padding={"24px"}
justifyContent={"center"}
alignItems={"center"}
>
<Stack spacing={3} padding={'24px'} justifyContent={'center'} alignItems={'center'}>
{!inputs.email || !inputs.token ? (
<Typography variant="h3" sx={{ textDecoration: "none" }}>
<Typography variant="h3" sx={{ textDecoration: 'none' }}>
无效的链接
</Typography>
) : newPassword ? (
@ -57,14 +51,7 @@ const ResetPasswordForm = () => {
请登录后及时修改密码
</Alert>
) : (
<Button
fullWidth
onClick={submit}
size="large"
type="submit"
variant="contained"
color="primary"
>
<Button fullWidth onClick={submit} size="large" type="submit" variant="contained" color="primary">
点击重置密码
</Button>
)}

View File

@ -1,9 +1,9 @@
import PropTypes from "prop-types";
import { useState, useEffect } from "react";
import { CHANNEL_OPTIONS } from "constants/ChannelConstants";
import { useTheme } from "@mui/material/styles";
import { API } from "utils/api";
import { showError, showSuccess } from "utils/common";
import PropTypes from 'prop-types';
import { useState, useEffect } from 'react';
import { CHANNEL_OPTIONS } from 'constants/ChannelConstants';
import { useTheme } from '@mui/material/styles';
import { API } from 'utils/api';
import { showError, showSuccess, getChannelModels } from 'utils/common';
import {
Dialog,
DialogTitle,
@ -22,15 +22,15 @@ import {
Autocomplete,
FormHelperText,
Switch,
Checkbox,
} from "@mui/material";
Checkbox
} from '@mui/material';
import { Formik } from "formik";
import * as Yup from "yup";
import { defaultConfig, typeConfig } from "../type/Config"; //typeConfig
import { createFilterOptions } from "@mui/material/Autocomplete";
import CheckBoxOutlineBlankIcon from "@mui/icons-material/CheckBoxOutlineBlank";
import CheckBoxIcon from "@mui/icons-material/CheckBox";
import { Formik } from 'formik';
import * as Yup from 'yup';
import { defaultConfig, typeConfig } from '../type/Config'; //typeConfig
import { createFilterOptions } from '@mui/material/Autocomplete';
import CheckBoxOutlineBlankIcon from '@mui/icons-material/CheckBoxOutlineBlank';
import CheckBoxIcon from '@mui/icons-material/CheckBox';
const icon = <CheckBoxOutlineBlankIcon fontSize="small" />;
const checkedIcon = <CheckBoxIcon fontSize="small" />;
@ -38,38 +38,34 @@ const checkedIcon = <CheckBoxIcon fontSize="small" />;
const filter = createFilterOptions();
const validationSchema = Yup.object().shape({
is_edit: Yup.boolean(),
name: Yup.string().required("名称 不能为空"),
type: Yup.number().required("渠道 不能为空"),
key: Yup.string().when("is_edit", {
is: false,
then: Yup.string().required("密钥 不能为空"),
name: Yup.string().required('名称 不能为空'),
type: Yup.number().required('渠道 不能为空'),
key: Yup.string().when(['is_edit', 'type'], {
is: (is_edit, type) => !is_edit && type !== 33,
then: Yup.string().required('密钥 不能为空')
}),
other: Yup.string(),
models: Yup.array().min(1, "模型 不能为空"),
groups: Yup.array().min(1, "用户组 不能为空"),
base_url: Yup.string().when("type", {
models: Yup.array().min(1, '模型 不能为空'),
groups: Yup.array().min(1, '用户组 不能为空'),
base_url: Yup.string().when('type', {
is: (value) => [3, 8].includes(value),
then: Yup.string().required("渠道API地址 不能为空"), // base_url 是必需的
otherwise: Yup.string(), // 在其他情况下base_url 可以是任意字符串
then: Yup.string().required('渠道API地址 不能为空'), // base_url 是必需的
otherwise: Yup.string() // 在其他情况下base_url 可以是任意字符串
}),
model_mapping: Yup.string().test(
"is-json",
"必须是有效的JSON字符串",
function (value) {
try {
if (value === "" || value === null || value === undefined) {
return true;
}
const parsedValue = JSON.parse(value);
if (typeof parsedValue === "object") {
return true;
}
} catch (e) {
return false;
model_mapping: Yup.string().test('is-json', '必须是有效的JSON字符串', function (value) {
try {
if (value === '' || value === null || value === undefined) {
return true;
}
const parsedValue = JSON.parse(value);
if (typeof parsedValue === 'object') {
return true;
}
} catch (e) {
return false;
}
),
return false;
})
});
const EditModal = ({ open, channelId, onCancel, onOk }) => {
@ -81,12 +77,13 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
const [groupOptions, setGroupOptions] = useState([]);
const [modelOptions, setModelOptions] = useState([]);
const [batchAdd, setBatchAdd] = useState(false);
const [basicModels, setBasicModels] = useState([]);
const initChannel = (typeValue) => {
if (typeConfig[typeValue]?.inputLabel) {
setInputLabel({
...defaultConfig.inputLabel,
...typeConfig[typeValue].inputLabel,
...typeConfig[typeValue].inputLabel
});
} else {
setInputLabel(defaultConfig.inputLabel);
@ -95,7 +92,7 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
if (typeConfig[typeValue]?.prompt) {
setInputPrompt({
...defaultConfig.prompt,
...typeConfig[typeValue].prompt,
...typeConfig[typeValue].prompt
});
} else {
setInputPrompt(defaultConfig.prompt);
@ -104,40 +101,14 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
return typeConfig[typeValue]?.input;
};
const handleTypeChange = (setFieldValue, typeValue, values) => {
const newInput = initChannel(typeValue);
if (newInput) {
Object.keys(newInput).forEach((key) => {
if (
(!Array.isArray(values[key]) &&
values[key] !== null &&
values[key] !== undefined &&
values[key] !== "") ||
(Array.isArray(values[key]) && values[key].length > 0)
) {
return;
}
if (key === "models") {
setFieldValue(key, initialModel(newInput[key]));
return;
}
setFieldValue(key, newInput[key]);
});
initChannel(typeValue);
let localModels = getChannelModels(typeValue);
setBasicModels(localModels);
if (localModels.length > 0 && Array.isArray(values['models']) && values['models'].length == 0) {
setFieldValue('models', initialModel(localModels));
}
};
const basicModels = (channelType) => {
let modelGroup =
typeConfig[channelType]?.modelGroup || defaultConfig.modelGroup;
// 循环 modelOptions找到 modelGroup 对应的模型
let modelList = [];
modelOptions.forEach((model) => {
if (model.group === modelGroup) {
modelList.push(model);
}
});
return modelList;
setFieldValue('config', {});
};
const fetchGroups = async () => {
@ -155,7 +126,7 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
const { data } = res.data;
data.forEach((item) => {
if (!item.owned_by) {
item.owned_by = "未知";
item.owned_by = '未知';
}
});
// 先对data排序
@ -171,7 +142,7 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
data.map((model) => {
return {
id: model.id,
group: model.owned_by,
group: model.owned_by
};
})
);
@ -182,33 +153,41 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
const submit = async (values, { setErrors, setStatus, setSubmitting }) => {
setSubmitting(true);
if (values.base_url && values.base_url.endsWith("/")) {
if (values.base_url && values.base_url.endsWith('/')) {
values.base_url = values.base_url.slice(0, values.base_url.length - 1);
}
if (values.type === 3 && values.other === "") {
values.other = "2023-09-01-preview";
if (values.type === 3 && values.other === '') {
values.other = '2023-09-01-preview';
}
if (values.type === 18 && values.other === "") {
values.other = "v2.1";
if (values.type === 18 && values.other === '') {
values.other = 'v2.1';
}
if (values.key === '') {
if (values.config.ak !== '' && values.config.sk !== '' && values.config.region !== '') {
values.key = `${values.config.ak}|${values.config.sk}|${values.config.region}`;
}
}
let res;
const modelsStr = values.models.map((model) => model.id).join(",");
values.group = values.groups.join(",");
const modelsStr = values.models.map((model) => model.id).join(',');
const configStr = JSON.stringify(values.config);
values.group = values.groups.join(',');
if (channelId) {
res = await API.put(`/api/channel/`, {
...values,
id: parseInt(channelId),
models: modelsStr,
config: configStr
});
} else {
res = await API.post(`/api/channel/`, { ...values, models: modelsStr });
res = await API.post(`/api/channel/`, { ...values, models: modelsStr, config: configStr });
}
const { success, message } = res.data;
if (success) {
if (channelId) {
showSuccess("渠道更新成功!");
showSuccess('渠道更新成功!');
} else {
showSuccess("渠道创建成功!");
showSuccess('渠道创建成功!');
}
setSubmitting(false);
setStatus({ success: true });
@ -226,15 +205,15 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
}
// 如果 channelModel 是一个字符串
if (typeof channelModel === "string") {
channelModel = channelModel.split(",");
if (typeof channelModel === 'string') {
channelModel = channelModel.split(',');
}
let modelList = channelModel.map((model) => {
const modelOption = modelOptions.find((option) => option.id === model);
if (modelOption) {
return modelOption;
}
return { id: model, group: "自定义:点击或回车输入" };
return { id: model, group: '自定义:点击或回车输入' };
});
return modelList;
}
@ -243,24 +222,24 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
let res = await API.get(`/api/channel/${channelId}`);
const { success, message, data } = res.data;
if (success) {
if (data.models === "") {
if (data.models === '') {
data.models = [];
} else {
data.models = initialModel(data.models);
}
if (data.group === "") {
if (data.group === '') {
data.groups = [];
} else {
data.groups = data.group.split(",");
data.groups = data.group.split(',');
}
if (data.model_mapping !== "") {
data.model_mapping = JSON.stringify(
JSON.parse(data.model_mapping),
null,
2
);
if (data.model_mapping !== '') {
data.model_mapping = JSON.stringify(JSON.parse(data.model_mapping), null, 2);
}
data.base_url = data.base_url ?? "";
if (data.config !== '') {
data.config = JSON.parse(data.config);
}
data.base_url = data.base_url ?? '';
data.is_edit = true;
initChannel(data.type);
setInitialInput(data);
@ -286,45 +265,25 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
}, [channelId]);
return (
<Dialog open={open} onClose={onCancel} fullWidth maxWidth={"md"}>
<Dialog open={open} onClose={onCancel} fullWidth maxWidth={'md'}>
<DialogTitle
sx={{
margin: "0px",
margin: '0px',
fontWeight: 700,
lineHeight: "1.55556",
padding: "24px",
fontSize: "1.125rem",
lineHeight: '1.55556',
padding: '24px',
fontSize: '1.125rem'
}}
>
{channelId ? "编辑渠道" : "新建渠道"}
{channelId ? '编辑渠道' : '新建渠道'}
</DialogTitle>
<Divider />
<DialogContent>
<Formik
initialValues={initialInput}
enableReinitialize
validationSchema={validationSchema}
onSubmit={submit}
>
{({
errors,
handleBlur,
handleChange,
handleSubmit,
isSubmitting,
touched,
values,
setFieldValue,
}) => (
<Formik initialValues={initialInput} enableReinitialize validationSchema={validationSchema} onSubmit={submit}>
{({ errors, handleBlur, handleChange, handleSubmit, isSubmitting, touched, values, setFieldValue }) => (
<form noValidate onSubmit={handleSubmit}>
<FormControl
fullWidth
error={Boolean(touched.type && errors.type)}
sx={{ ...theme.typography.otherInput }}
>
<InputLabel htmlFor="channel-type-label">
{inputLabel.type}
</InputLabel>
<FormControl fullWidth error={Boolean(touched.type && errors.type)} sx={{ ...theme.typography.otherInput }}>
<InputLabel htmlFor="channel-type-label">{inputLabel.type}</InputLabel>
<Select
id="channel-type-label"
label={inputLabel.type}
@ -338,9 +297,9 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
MenuProps={{
PaperProps: {
style: {
maxHeight: 200,
},
},
maxHeight: 200
}
}
}}
>
{Object.values(CHANNEL_OPTIONS)
@ -360,21 +319,12 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
{errors.type}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-type-label">
{" "}
{inputPrompt.type}{" "}
</FormHelperText>
<FormHelperText id="helper-tex-channel-type-label"> {inputPrompt.type} </FormHelperText>
)}
</FormControl>
<FormControl
fullWidth
error={Boolean(touched.name && errors.name)}
sx={{ ...theme.typography.otherInput }}
>
<InputLabel htmlFor="channel-name-label">
{inputLabel.name}
</InputLabel>
<FormControl fullWidth error={Boolean(touched.name && errors.name)} sx={{ ...theme.typography.otherInput }}>
<InputLabel htmlFor="channel-name-label">{inputLabel.name}</InputLabel>
<OutlinedInput
id="channel-name-label"
label={inputLabel.name}
@ -383,7 +333,7 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
name="name"
onBlur={handleBlur}
onChange={handleChange}
inputProps={{ autoComplete: "name" }}
inputProps={{ autoComplete: 'name' }}
aria-describedby="helper-text-channel-name-label"
/>
{touched.name && errors.name ? (
@ -391,21 +341,12 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
{errors.name}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-name-label">
{" "}
{inputPrompt.name}{" "}
</FormHelperText>
<FormHelperText id="helper-tex-channel-name-label"> {inputPrompt.name} </FormHelperText>
)}
</FormControl>
<FormControl
fullWidth
error={Boolean(touched.base_url && errors.base_url)}
sx={{ ...theme.typography.otherInput }}
>
<InputLabel htmlFor="channel-base_url-label">
{inputLabel.base_url}
</InputLabel>
<FormControl fullWidth error={Boolean(touched.base_url && errors.base_url)} sx={{ ...theme.typography.otherInput }}>
<InputLabel htmlFor="channel-base_url-label">{inputLabel.base_url}</InputLabel>
<OutlinedInput
id="channel-base_url-label"
label={inputLabel.base_url}
@ -422,22 +363,13 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
{errors.base_url}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-base_url-label">
{" "}
{inputPrompt.base_url}{" "}
</FormHelperText>
<FormHelperText id="helper-tex-channel-base_url-label"> {inputPrompt.base_url} </FormHelperText>
)}
</FormControl>
{inputPrompt.other && (
<FormControl
fullWidth
error={Boolean(touched.other && errors.other)}
sx={{ ...theme.typography.otherInput }}
>
<InputLabel htmlFor="channel-other-label">
{inputLabel.other}
</InputLabel>
<FormControl fullWidth error={Boolean(touched.other && errors.other)} sx={{ ...theme.typography.otherInput }}>
<InputLabel htmlFor="channel-other-label">{inputLabel.other}</InputLabel>
<OutlinedInput
id="channel-other-label"
label={inputLabel.other}
@ -454,10 +386,7 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
{errors.other}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-other-label">
{" "}
{inputPrompt.other}{" "}
</FormHelperText>
<FormHelperText id="helper-tex-channel-other-label"> {inputPrompt.other} </FormHelperText>
)}
</FormControl>
)}
@ -471,22 +400,15 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
onChange={(e, value) => {
const event = {
target: {
name: "groups",
value: value,
},
name: 'groups',
value: value
}
};
handleChange(event);
}}
onBlur={handleBlur}
filterSelectedOptions
renderInput={(params) => (
<TextField
{...params}
name="groups"
error={Boolean(errors.groups)}
label={inputLabel.groups}
/>
)}
renderInput={(params) => <TextField {...params} name="groups" error={Boolean(errors.groups)} label={inputLabel.groups} />}
aria-describedby="helper-text-channel-groups-label"
/>
{errors.groups ? (
@ -494,10 +416,7 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
{errors.groups}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-groups-label">
{" "}
{inputPrompt.groups}{" "}
</FormHelperText>
<FormHelperText id="helper-tex-channel-groups-label"> {inputPrompt.groups} </FormHelperText>
)}
</FormControl>
@ -511,30 +430,19 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
onChange={(e, value) => {
const event = {
target: {
name: "models",
value: value.map((item) =>
typeof item === "string"
? { id: item, group: "自定义:点击或回车输入" }
: item
),
},
name: 'models',
value: value.map((item) => (typeof item === 'string' ? { id: item, group: '自定义:点击或回车输入' } : item))
}
};
handleChange(event);
}}
onBlur={handleBlur}
// filterSelectedOptions
disableCloseOnSelect
renderInput={(params) => (
<TextField
{...params}
name="models"
error={Boolean(errors.models)}
label={inputLabel.models}
/>
)}
renderInput={(params) => <TextField {...params} name="models" error={Boolean(errors.models)} label={inputLabel.models} />}
groupBy={(option) => option.group}
getOptionLabel={(option) => {
if (typeof option === "string") {
if (typeof option === 'string') {
return option;
}
if (option.inputValue) {
@ -545,25 +453,18 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
filterOptions={(options, params) => {
const filtered = filter(options, params);
const { inputValue } = params;
const isExisting = options.some(
(option) => inputValue === option.id
);
if (inputValue !== "" && !isExisting) {
const isExisting = options.some((option) => inputValue === option.id);
if (inputValue !== '' && !isExisting) {
filtered.push({
id: inputValue,
group: "自定义:点击或回车输入",
group: '自定义:点击或回车输入'
});
}
return filtered;
}}
renderOption={(props, option, { selected }) => (
<li {...props}>
<Checkbox
icon={icon}
checkedIcon={checkedIcon}
style={{ marginRight: 8 }}
checked={selected}
/>
<Checkbox icon={icon} checkedIcon={checkedIcon} style={{ marginRight: 8 }} checked={selected} />
{option.id}
</li>
)}
@ -573,103 +474,104 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
{errors.models}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-models-label">
{" "}
{inputPrompt.models}{" "}
</FormHelperText>
<FormHelperText id="helper-tex-channel-models-label"> {inputPrompt.models} </FormHelperText>
)}
</FormControl>
<Container
sx={{
textAlign: "right",
textAlign: 'right'
}}
>
<ButtonGroup
variant="outlined"
aria-label="small outlined primary button group"
>
<ButtonGroup variant="outlined" aria-label="small outlined primary button group">
<Button
onClick={() => {
setFieldValue("models", basicModels(values.type));
setFieldValue('models', initialModel(basicModels));
}}
>
填入渠道支持模型
填入相关模型
</Button>
<Button
onClick={() => {
setFieldValue("models", modelOptions);
setFieldValue('models', modelOptions);
}}
>
填入所有模型
</Button>
</ButtonGroup>
</Container>
<FormControl
fullWidth
error={Boolean(touched.key && errors.key)}
sx={{ ...theme.typography.otherInput }}
>
{!batchAdd ? (
<>
<InputLabel htmlFor="channel-key-label">
{inputLabel.key}
</InputLabel>
<OutlinedInput
id="channel-key-label"
label={inputLabel.key}
type="text"
value={values.key}
name="key"
onBlur={handleBlur}
onChange={handleChange}
inputProps={{}}
aria-describedby="helper-text-channel-key-label"
/>
</>
) : (
<TextField
multiline
id="channel-key-label"
label={inputLabel.key}
value={values.key}
name="key"
onBlur={handleBlur}
onChange={handleChange}
aria-describedby="helper-text-channel-key-label"
minRows={5}
placeholder={inputPrompt.key + ",一行一个密钥"}
/>
)}
{inputLabel.key && (
<>
<FormControl fullWidth error={Boolean(touched.key && errors.key)} sx={{ ...theme.typography.otherInput }}>
{!batchAdd ? (
<>
<InputLabel htmlFor="channel-key-label">{inputLabel.key}</InputLabel>
<OutlinedInput
id="channel-key-label"
label={inputLabel.key}
type="text"
value={values.key}
name="key"
onBlur={handleBlur}
onChange={handleChange}
inputProps={{}}
aria-describedby="helper-text-channel-key-label"
/>
</>
) : (
<TextField
multiline
id="channel-key-label"
label={inputLabel.key}
value={values.key}
name="key"
onBlur={handleBlur}
onChange={handleChange}
aria-describedby="helper-text-channel-key-label"
minRows={5}
placeholder={inputPrompt.key + ',一行一个密钥'}
/>
)}
{touched.key && errors.key ? (
<FormHelperText error id="helper-tex-channel-key-label">
{errors.key}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-key-label">
{" "}
{inputPrompt.key}{" "}
</FormHelperText>
)}
</FormControl>
{channelId === 0 && (
<Container
sx={{
textAlign: "right",
}}
>
<Switch
checked={batchAdd}
onChange={(e) => setBatchAdd(e.target.checked)}
/>
批量添加
</Container>
{touched.key && errors.key ? (
<FormHelperText error id="helper-tex-channel-key-label">
{errors.key}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-key-label"> {inputPrompt.key} </FormHelperText>
)}
</FormControl>
{channelId === 0 && (
<Container
sx={{
textAlign: 'right'
}}
>
<Switch checked={batchAdd} onChange={(e) => setBatchAdd(e.target.checked)} />
批量添加
</Container>
)}
</>
)}
<FormControl
fullWidth
error={Boolean(touched.model_mapping && errors.model_mapping)}
sx={{ ...theme.typography.otherInput }}
>
{inputLabel.config &&
Object.keys(inputLabel.config).map((configName) => {
return (
<FormControl key={'config.' + configName} fullWidth sx={{ ...theme.typography.otherInput }}>
<TextField
multiline
key={'config.' + configName}
name={'config.' + configName}
value={values.config?.[configName] || ''}
label={configName}
placeholder={inputPrompt.config[configName]}
onChange={handleChange}
/>
<FormHelperText id={`helper-tex-config.${configName}-label`}> {inputPrompt.config[configName]} </FormHelperText>
</FormControl>
);
})}
<FormControl fullWidth error={Boolean(touched.model_mapping && errors.model_mapping)} sx={{ ...theme.typography.otherInput }}>
{/* <InputLabel htmlFor="channel-model_mapping-label">{inputLabel.model_mapping}</InputLabel> */}
<TextField
multiline
@ -684,28 +586,16 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
placeholder={inputPrompt.model_mapping}
/>
{touched.model_mapping && errors.model_mapping ? (
<FormHelperText
error
id="helper-tex-channel-model_mapping-label"
>
<FormHelperText error id="helper-tex-channel-model_mapping-label">
{errors.model_mapping}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-model_mapping-label">
{" "}
{inputPrompt.model_mapping}{" "}
</FormHelperText>
<FormHelperText id="helper-tex-channel-model_mapping-label"> {inputPrompt.model_mapping} </FormHelperText>
)}
</FormControl>
<DialogActions>
<Button onClick={onCancel}>取消</Button>
<Button
disableElevation
disabled={isSubmitting}
type="submit"
variant="contained"
color="primary"
>
<Button disableElevation disabled={isSubmitting} type="submit" variant="contained" color="primary">
提交
</Button>
</DialogActions>
@ -723,5 +613,5 @@ EditModal.propTypes = {
open: PropTypes.bool,
channelId: PropTypes.number,
onCancel: PropTypes.func,
onOk: PropTypes.func,
onOk: PropTypes.func
};

View File

@ -1,20 +1,20 @@
import PropTypes from "prop-types";
import { Tooltip, Stack, Container } from "@mui/material";
import Label from "ui-component/Label";
import { styled } from "@mui/material/styles";
import { showSuccess } from "utils/common";
import PropTypes from 'prop-types';
import { Tooltip, Stack, Container } from '@mui/material';
import Label from 'ui-component/Label';
import { styled } from '@mui/material/styles';
import { showSuccess, copy } from 'utils/common';
const TooltipContainer = styled(Container)({
maxHeight: "250px",
overflow: "auto",
"&::-webkit-scrollbar": {
width: "0px", // Set the width to 0 to hide the scrollbar
},
maxHeight: '250px',
overflow: 'auto',
'&::-webkit-scrollbar': {
width: '0px' // Set the width to 0 to hide the scrollbar
}
});
const NameLabel = ({ name, models }) => {
let modelMap = [];
modelMap = models.split(",");
modelMap = models.split(',');
modelMap.sort();
return (
@ -28,8 +28,7 @@ const NameLabel = ({ name, models }) => {
variant="ghost"
key={index}
onClick={() => {
navigator.clipboard.writeText(item);
showSuccess("复制模型名称成功!");
copy(item, '模型名称');
}}
>
{item}
@ -48,7 +47,7 @@ const NameLabel = ({ name, models }) => {
NameLabel.propTypes = {
name: PropTypes.string,
models: PropTypes.string,
models: PropTypes.string
};
export default NameLabel;

View File

@ -1,5 +1,5 @@
import { useState, useEffect } from 'react';
import { showError, showSuccess, showInfo } from 'utils/common';
import { showError, showSuccess, showInfo, loadChannelModels } from 'utils/common';
import { useTheme } from '@mui/material/styles';
import Table from '@mui/material/Table';
@ -8,7 +8,6 @@ import TableContainer from '@mui/material/TableContainer';
import PerfectScrollbar from 'react-perfect-scrollbar';
import TablePagination from '@mui/material/TablePagination';
import LinearProgress from '@mui/material/LinearProgress';
import Alert from '@mui/material/Alert';
import ButtonGroup from '@mui/material/ButtonGroup';
import Toolbar from '@mui/material/Toolbar';
import useMediaQuery from '@mui/material/useMediaQuery';
@ -189,6 +188,7 @@ export default function ChannelPage() {
.catch((reason) => {
showError(reason);
});
loadChannelModels().then();
}, []);
return (
@ -200,7 +200,7 @@ export default function ChannelPage() {
</Button>
</Stack>
<Card>
<Box component="form" onSubmit={searchChannels} noValidate sx={{marginTop: 2}}>
<Box component="form" onSubmit={searchChannels} noValidate sx={{ marginTop: 2 }}>
<TableToolBar filterName={searchKeyword} handleFilterName={handleSearchKeyword} placeholder={'搜索渠道的 ID名称和密钥 ...'} />
</Box>
<Toolbar
@ -214,7 +214,7 @@ export default function ChannelPage() {
>
<Container>
{matchUpMd ? (
<ButtonGroup variant="outlined" aria-label="outlined small primary button group" sx={{marginBottom: 2}}>
<ButtonGroup variant="outlined" aria-label="outlined small primary button group" sx={{ marginBottom: 2 }}>
<Button onClick={handleRefresh} startIcon={<IconRefresh width={'18px'} />}>
刷新
</Button>

View File

@ -1,177 +1,209 @@
const defaultConfig = {
input: {
name: "",
name: '',
type: 1,
key: "",
base_url: "",
other: "",
model_mapping: "",
key: '',
base_url: '',
other: '',
model_mapping: '',
models: [],
groups: ["default"],
groups: ['default'],
config: {}
},
inputLabel: {
name: "渠道名称",
type: "渠道类型",
base_url: "渠道API地址",
key: "密钥",
other: "其他参数",
models: "模型",
model_mapping: "模型映射关系",
groups: "用户组",
name: '渠道名称',
type: '渠道类型',
base_url: '渠道API地址',
key: '密钥',
other: '其他参数',
models: '模型',
model_mapping: '模型映射关系',
groups: '用户组',
config: null
},
prompt: {
type: "请选择渠道类型",
name: "请为渠道命名",
base_url: "可空请输入中转API地址例如通过cloudflare中转",
key: "请输入渠道对应的鉴权密钥",
other: "",
models: "请选择该渠道所支持的模型",
type: '请选择渠道类型',
name: '请为渠道命名',
base_url: '可空请输入中转API地址例如通过cloudflare中转',
key: '请输入渠道对应的鉴权密钥',
other: '',
models: '请选择该渠道所支持的模型',
model_mapping:
'请输入要修改的模型映射关系格式为api请求模型ID:实际转发给渠道的模型ID使用JSON数组表示例如{"gpt-3.5": "gpt-35"}',
groups: "请选择该渠道所支持的用户组",
groups: '请选择该渠道所支持的用户组',
config: null
},
modelGroup: "openai",
modelGroup: 'openai'
};
const typeConfig = {
3: {
inputLabel: {
base_url: "AZURE_OPENAI_ENDPOINT",
other: "默认 API 版本",
base_url: 'AZURE_OPENAI_ENDPOINT',
other: '默认 API 版本'
},
prompt: {
base_url: "请填写AZURE_OPENAI_ENDPOINT",
other: "请输入默认API版本例如2024-03-01-preview",
},
base_url: '请填写AZURE_OPENAI_ENDPOINT',
other: '请输入默认API版本例如2024-03-01-preview'
}
},
11: {
input: {
models: ["PaLM-2"],
models: ['PaLM-2']
},
modelGroup: "google palm",
modelGroup: 'google palm'
},
14: {
input: {
models: ["claude-instant-1", "claude-2", "claude-2.0", "claude-2.1"],
models: ['claude-instant-1', 'claude-2', 'claude-2.0', 'claude-2.1']
},
modelGroup: "anthropic",
modelGroup: 'anthropic'
},
15: {
input: {
models: ["ERNIE-Bot", "ERNIE-Bot-turbo", "ERNIE-Bot-4", "Embedding-V1"],
models: ['ERNIE-Bot', 'ERNIE-Bot-turbo', 'ERNIE-Bot-4', 'Embedding-V1']
},
prompt: {
key: "按照如下格式输入APIKey|SecretKey",
key: '按照如下格式输入APIKey|SecretKey'
},
modelGroup: "baidu",
modelGroup: 'baidu'
},
16: {
input: {
models: ["glm-4", "glm-4v", "glm-3-turbo", "chatglm_turbo", "chatglm_pro", "chatglm_std", "chatglm_lite"],
models: ['glm-4', 'glm-4v', 'glm-3-turbo', 'chatglm_turbo', 'chatglm_pro', 'chatglm_std', 'chatglm_lite']
},
modelGroup: "zhipu",
modelGroup: 'zhipu'
},
17: {
inputLabel: {
other: "插件参数",
other: '插件参数'
},
input: {
models: [
"qwen-turbo",
"qwen-plus",
"qwen-max",
"qwen-max-longcontext",
"text-embedding-v1",
],
models: ['qwen-turbo', 'qwen-plus', 'qwen-max', 'qwen-max-longcontext', 'text-embedding-v1']
},
prompt: {
other: "请输入插件参数,即 X-DashScope-Plugin 请求头的取值",
other: '请输入插件参数,即 X-DashScope-Plugin 请求头的取值'
},
modelGroup: "ali",
modelGroup: 'ali'
},
18: {
inputLabel: {
other: "版本号",
other: '版本号'
},
input: {
models: [
"SparkDesk",
'SparkDesk-v1.1',
'SparkDesk-v2.1',
'SparkDesk-v3.1',
'SparkDesk-v3.5'
],
models: ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.5']
},
prompt: {
key: "按照如下格式输入APPID|APISecret|APIKey",
other: "请输入版本号例如v3.1",
key: '按照如下格式输入APPID|APISecret|APIKey',
other: '请输入版本号例如v3.1'
},
modelGroup: "xunfei",
modelGroup: 'xunfei'
},
19: {
input: {
models: [
"360GPT_S2_V9",
"embedding-bert-512-v1",
"embedding_s1_v1",
"semantic_similarity_s1_v1",
],
models: ['360GPT_S2_V9', 'embedding-bert-512-v1', 'embedding_s1_v1', 'semantic_similarity_s1_v1']
},
modelGroup: "360",
modelGroup: '360'
},
22: {
prompt: {
key: "按照如下格式输入APIKey-AppId例如fastgpt-0sp2gtvfdgyi4k30jwlgwf1i-64f335d84283f05518e9e041",
},
key: '按照如下格式输入APIKey-AppId例如fastgpt-0sp2gtvfdgyi4k30jwlgwf1i-64f335d84283f05518e9e041'
}
},
23: {
input: {
models: ["hunyuan"],
models: ['hunyuan']
},
prompt: {
key: "按照如下格式输入AppId|SecretId|SecretKey",
key: '按照如下格式输入AppId|SecretId|SecretKey'
},
modelGroup: "tencent",
modelGroup: 'tencent'
},
24: {
inputLabel: {
other: "版本号",
other: '版本号'
},
input: {
models: ["gemini-pro"],
models: ['gemini-pro']
},
prompt: {
other: "请输入版本号例如v1",
other: '请输入版本号例如v1'
},
modelGroup: "google gemini",
modelGroup: 'google gemini'
},
25: {
input: {
models: ['moonshot-v1-8k', 'moonshot-v1-32k', 'moonshot-v1-128k'],
models: ['moonshot-v1-8k', 'moonshot-v1-32k', 'moonshot-v1-128k']
},
modelGroup: "moonshot",
modelGroup: 'moonshot'
},
26: {
input: {
models: ['Baichuan2-Turbo', 'Baichuan2-Turbo-192k', 'Baichuan-Text-Embedding'],
models: ['Baichuan2-Turbo', 'Baichuan2-Turbo-192k', 'Baichuan-Text-Embedding']
},
modelGroup: "baichuan",
modelGroup: 'baichuan'
},
27: {
input: {
models: ['abab5.5s-chat', 'abab5.5-chat', 'abab6-chat'],
models: ['abab5.5s-chat', 'abab5.5-chat', 'abab6-chat']
},
modelGroup: "minimax",
modelGroup: 'minimax'
},
29: {
modelGroup: "groq",
modelGroup: 'groq'
},
30: {
modelGroup: "ollama",
modelGroup: 'ollama'
},
31: {
modelGroup: "lingyiwanwu",
modelGroup: 'lingyiwanwu'
},
33: {
inputLabel: {
key: '',
config: {
region: 'Region',
ak: 'Access Key',
sk: 'Secret Key'
}
},
prompt: {
key: '',
config: {
region: 'regione.g. us-west-2',
ak: 'AWS IAM Access Key',
sk: 'AWS IAM Secret Key'
}
},
modelGroup: 'anthropic'
},
37: {
inputLabel: {
config: {
user_id: 'Account ID'
}
},
prompt: {
config: {
user_id: '请输入 Account ID例如d8d7c61dbc334c32d3ced580e4bf42b4'
}
},
modelGroup: 'Cloudflare'
},
34: {
inputLabel: {
config: {
user_id: 'User ID'
}
},
prompt: {
models: '对于 Coze 而言,模型名称即 Bot ID你可以添加一个前缀 `bot-`,例如:`bot-123456`',
config: {
user_id: '生成该密钥的用户 ID'
}
},
modelGroup: 'Coze'
}
};
export { defaultConfig, typeConfig };

View File

@ -21,7 +21,7 @@ import { IconBrandWechat, IconBrandGithub, IconMail } from '@tabler/icons-react'
import Label from 'ui-component/Label';
import { API } from 'utils/api';
import { showError, showSuccess } from 'utils/common';
import { onGitHubOAuthClicked, onLarkOAuthClicked } from 'utils/common';
import { onGitHubOAuthClicked, onLarkOAuthClicked, copy } from 'utils/common';
import * as Yup from 'yup';
import WechatModal from 'views/Authentication/AuthForms/WechatModal';
import { useSelector } from 'react-redux';
@ -90,8 +90,7 @@ export default function Profile() {
const { success, message, data } = res.data;
if (success) {
setInputs((inputs) => ({ ...inputs, access_token: data }));
navigator.clipboard.writeText(data);
showSuccess(`令牌已重置并已复制到剪贴板`);
copy(data, '访问令牌');
} else {
showError(message);
}

View File

@ -18,7 +18,7 @@ import {
import Label from 'ui-component/Label';
import TableSwitch from 'ui-component/Switch';
import { timestamp2string, renderQuota, showSuccess } from 'utils/common';
import { timestamp2string, renderQuota, copy } from 'utils/common';
import { IconDotsVertical, IconEdit, IconTrash } from '@tabler/icons-react';
@ -83,8 +83,7 @@ export default function RedemptionTableRow({ item, manageRedemption, handleOpenM
variant="contained"
color="primary"
onClick={() => {
navigator.clipboard.writeText(item.key);
showSuccess('已复制到剪贴板!');
copy(item.key, '兑换码');
}}
>
复制

View File

@ -20,7 +20,7 @@ import {
} from '@mui/material';
import TableSwitch from 'ui-component/Switch';
import { renderQuota, showSuccess, timestamp2string } from 'utils/common';
import { renderQuota, timestamp2string, copy } from 'utils/common';
import { IconDotsVertical, IconEdit, IconTrash, IconCaretDownFilled } from '@tabler/icons-react';
@ -141,8 +141,7 @@ export default function TokensTableRow({ item, manageToken, handleOpenModal, set
if (type === 'link') {
window.open(text);
} else {
navigator.clipboard.writeText(text);
showSuccess('已复制到剪贴板!');
copy(text);
}
handleCloseMenu();
};
@ -192,7 +191,7 @@ export default function TokensTableRow({ item, manageToken, handleOpenModal, set
id={`switch-${item.id}`}
checked={statusSwitch === 1}
onChange={handleStatus}
// disabled={statusSwitch !== 1 && statusSwitch !== 2}
// disabled={statusSwitch !== 1 && statusSwitch !== 2}
/>
</Tooltip>
</TableCell>
@ -211,8 +210,7 @@ export default function TokensTableRow({ item, manageToken, handleOpenModal, set
<Button
color="primary"
onClick={() => {
navigator.clipboard.writeText(`sk-${item.key}`);
showSuccess('已复制到剪贴板!');
copy(`sk-${item.key}`);
}}
>
复制
@ -222,7 +220,9 @@ export default function TokensTableRow({ item, manageToken, handleOpenModal, set
</Button>
</ButtonGroup>
<ButtonGroup size="small" aria-label="split button">
<Button color="primary" onClick={(e) => handleCopy(COPY_OPTIONS[0], 'link')}>聊天</Button>
<Button color="primary" onClick={(e) => handleCopy(COPY_OPTIONS[0], 'link')}>
聊天
</Button>
<Button size="small" onClick={(e) => handleOpenMenu(e, 'link')}>
<IconCaretDownFilled size={'16px'} />
</Button>

View File

@ -4,7 +4,7 @@ import SubCard from 'ui-component/cards/SubCard';
import inviteImage from 'assets/images/invite/cwok_casual_19.webp';
import { useState } from 'react';
import { API } from 'utils/api';
import { showError, showSuccess } from 'utils/common';
import { showError, copy } from 'utils/common';
const InviteCard = () => {
const theme = useTheme();
@ -12,8 +12,7 @@ const InviteCard = () => {
const handleInviteUrl = async () => {
if (inviteUl) {
navigator.clipboard.writeText(inviteUl);
showSuccess(`邀请链接已复制到剪切板`);
copy(inviteUl, '邀请链接');
return;
}
const res = await API.get('/api/user/aff');
@ -21,8 +20,7 @@ const InviteCard = () => {
if (success) {
let link = `${window.location.origin}/register?aff=${data}`;
setInviteUrl(link);
navigator.clipboard.writeText(link);
showSuccess(`邀请链接已复制到剪切板`);
copy(link, '邀请链接');
} else {
showError(message);
}