fix: merge latest change from remote
This commit is contained in:
parent
8f0799d909
commit
256d290507
13
README.md
13
README.md
@ -87,16 +87,19 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
|
|||||||
12. 支持以美元为单位显示额度。
|
12. 支持以美元为单位显示额度。
|
||||||
13. 支持发布公告,设置充值链接,设置新用户初始额度。
|
13. 支持发布公告,设置充值链接,设置新用户初始额度。
|
||||||
14. 支持模型映射,重定向用户的请求模型。
|
14. 支持模型映射,重定向用户的请求模型。
|
||||||
15. 支持丰富的**自定义**设置,
|
15. 支持失败自动重试。
|
||||||
|
16. 支持绘图接口。
|
||||||
|
17. 支持丰富的**自定义**设置,
|
||||||
1. 支持自定义系统名称,logo 以及页脚。
|
1. 支持自定义系统名称,logo 以及页脚。
|
||||||
2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。
|
2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。
|
||||||
16. 支持通过系统访问令牌访问管理 API。
|
18. 支持通过系统访问令牌访问管理 API。
|
||||||
17. 支持 Cloudflare Turnstile 用户校验。
|
19. 支持 Cloudflare Turnstile 用户校验。
|
||||||
18. 支持用户管理,支持**多种用户登录注册方式**:
|
20. 支持用户管理,支持**多种用户登录注册方式**:
|
||||||
+ 邮箱登录注册以及通过邮箱进行密码重置。
|
+ 邮箱登录注册以及通过邮箱进行密码重置。
|
||||||
+ [GitHub 开放授权](https://github.com/settings/applications/new)。
|
+ [GitHub 开放授权](https://github.com/settings/applications/new)。
|
||||||
+ 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。
|
+ 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。
|
||||||
19. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
|
21. 支持 [ChatGLM](https://github.com/THUDM/ChatGLM2-6B)。
|
||||||
|
22. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
|
||||||
|
|
||||||
## 部署
|
## 部署
|
||||||
### 基于 Docker 进行部署
|
### 基于 Docker 进行部署
|
||||||
|
@ -72,6 +72,7 @@ var AutomaticDisableChannelEnabled = false
|
|||||||
var QuotaRemindThreshold = 1000
|
var QuotaRemindThreshold = 1000
|
||||||
var PreConsumedQuota = 500
|
var PreConsumedQuota = 500
|
||||||
var ApproximateTokenEnabled = false
|
var ApproximateTokenEnabled = false
|
||||||
|
var RetryTimes = 0
|
||||||
|
|
||||||
var RootUserEmail = ""
|
var RootUserEmail = ""
|
||||||
|
|
||||||
|
@ -252,6 +252,24 @@ func init() {
|
|||||||
Root: "code-davinci-edit-001",
|
Root: "code-davinci-edit-001",
|
||||||
Parent: nil,
|
Parent: nil,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Id: "ChatGLM",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "thudm",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "ChatGLM",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: "ChatGLM2",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "thudm",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "ChatGLM2",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
openAIModelsMap = make(map[string]OpenAIModels)
|
openAIModelsMap = make(map[string]OpenAIModels)
|
||||||
for _, model := range openAIModels {
|
for _, model := range openAIModels {
|
||||||
|
@ -22,26 +22,26 @@ func relayImageHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode
|
|||||||
consumeQuota := c.GetBool("consume_quota")
|
consumeQuota := c.GetBool("consume_quota")
|
||||||
group := c.GetString("group")
|
group := c.GetString("group")
|
||||||
|
|
||||||
var textRequest GeneralOpenAIRequest
|
var imageRequest ImageRequest
|
||||||
if consumeQuota {
|
if consumeQuota {
|
||||||
err := common.UnmarshalBodyReusable(c, &textRequest)
|
err := common.UnmarshalBodyReusable(c, &imageRequest)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
|
return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prompt validation
|
// Prompt validation
|
||||||
if textRequest.Prompt == "" {
|
if imageRequest.Prompt == "" {
|
||||||
return errorWrapper(errors.New("prompt is required"), "required_field_missing", http.StatusBadRequest)
|
return errorWrapper(errors.New("prompt is required"), "required_field_missing", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not "256x256", "512x512", or "1024x1024"
|
// Not "256x256", "512x512", or "1024x1024"
|
||||||
if textRequest.Size != "" && textRequest.Size != "256x256" && textRequest.Size != "512x512" && textRequest.Size != "1024x1024" {
|
if imageRequest.Size != "" && imageRequest.Size != "256x256" && imageRequest.Size != "512x512" && imageRequest.Size != "1024x1024" {
|
||||||
return errorWrapper(errors.New("size must be one of 256x256, 512x512, or 1024x1024"), "invalid_field_value", http.StatusBadRequest)
|
return errorWrapper(errors.New("size must be one of 256x256, 512x512, or 1024x1024"), "invalid_field_value", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
|
|
||||||
// N should between 1 to 10
|
// N should between 1 and 10
|
||||||
if textRequest.N != 0 && (textRequest.N < 1 || textRequest.N > 10) {
|
if imageRequest.N != 0 && (imageRequest.N < 1 || imageRequest.N > 10) {
|
||||||
return errorWrapper(errors.New("n must be between 1 and 10"), "invalid_field_value", http.StatusBadRequest)
|
return errorWrapper(errors.New("n must be between 1 and 10"), "invalid_field_value", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,7 +71,7 @@ func relayImageHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode
|
|||||||
|
|
||||||
var requestBody io.Reader
|
var requestBody io.Reader
|
||||||
if isModelMapped {
|
if isModelMapped {
|
||||||
jsonStr, err := json.Marshal(textRequest)
|
jsonStr, err := json.Marshal(imageRequest)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
@ -87,14 +87,14 @@ func relayImageHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode
|
|||||||
|
|
||||||
sizeRatio := 1.0
|
sizeRatio := 1.0
|
||||||
// Size
|
// Size
|
||||||
if textRequest.Size == "256x256" {
|
if imageRequest.Size == "256x256" {
|
||||||
sizeRatio = 1
|
sizeRatio = 1
|
||||||
} else if textRequest.Size == "512x512" {
|
} else if imageRequest.Size == "512x512" {
|
||||||
sizeRatio = 1.125
|
sizeRatio = 1.125
|
||||||
} else if textRequest.Size == "1024x1024" {
|
} else if imageRequest.Size == "1024x1024" {
|
||||||
sizeRatio = 1.25
|
sizeRatio = 1.25
|
||||||
}
|
}
|
||||||
quota := int(ratio * sizeRatio * 1000)
|
quota := int(ratio*sizeRatio*1000) * imageRequest.N
|
||||||
|
|
||||||
if consumeQuota && userQuota-quota < 0 {
|
if consumeQuota && userQuota-quota < 0 {
|
||||||
return errorWrapper(err, "insufficient_user_quota", http.StatusForbidden)
|
return errorWrapper(err, "insufficient_user_quota", http.StatusForbidden)
|
||||||
|
@ -33,6 +33,9 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
if relayMode == RelayModeModerations && textRequest.Model == "" {
|
if relayMode == RelayModeModerations && textRequest.Model == "" {
|
||||||
textRequest.Model = "text-moderation-latest"
|
textRequest.Model = "text-moderation-latest"
|
||||||
}
|
}
|
||||||
|
if relayMode == RelayModeEmbeddings && textRequest.Model == "" {
|
||||||
|
textRequest.Model = c.Param("model")
|
||||||
|
}
|
||||||
// request validation
|
// request validation
|
||||||
if textRequest.Model == "" {
|
if textRequest.Model == "" {
|
||||||
return errorWrapper(errors.New("model is required"), "required_field_missing", http.StatusBadRequest)
|
return errorWrapper(errors.New("model is required"), "required_field_missing", http.StatusBadRequest)
|
||||||
@ -478,7 +481,8 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
|||||||
if strings.HasPrefix(data, "data: [DONE]") {
|
if strings.HasPrefix(data, "data: [DONE]") {
|
||||||
data = data[:12]
|
data = data[:12]
|
||||||
}
|
}
|
||||||
log.Print(data)
|
// some implementations may add \r at the end of data
|
||||||
|
data = strings.TrimSuffix(data, "\r")
|
||||||
c.Render(-1, common.CustomEvent{Data: data})
|
c.Render(-1, common.CustomEvent{Data: data})
|
||||||
return true
|
return true
|
||||||
case <-stopChan:
|
case <-stopChan:
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
@ -59,6 +60,12 @@ type TextRequest struct {
|
|||||||
//Stream bool `json:"stream"`
|
//Stream bool `json:"stream"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ImageRequest struct {
|
||||||
|
Prompt string `json:"prompt"`
|
||||||
|
N int `json:"n"`
|
||||||
|
Size string `json:"size"`
|
||||||
|
}
|
||||||
|
|
||||||
type Usage struct {
|
type Usage struct {
|
||||||
PromptTokens int `json:"prompt_tokens"`
|
PromptTokens int `json:"prompt_tokens"`
|
||||||
CompletionTokens int `json:"completion_tokens"`
|
CompletionTokens int `json:"completion_tokens"`
|
||||||
@ -139,6 +146,8 @@ func Relay(c *gin.Context) {
|
|||||||
relayMode = RelayModeCompletions
|
relayMode = RelayModeCompletions
|
||||||
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") {
|
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") {
|
||||||
relayMode = RelayModeEmbeddings
|
relayMode = RelayModeEmbeddings
|
||||||
|
} else if strings.HasSuffix(c.Request.URL.Path, "embeddings") {
|
||||||
|
relayMode = RelayModeEmbeddings
|
||||||
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") {
|
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") {
|
||||||
relayMode = RelayModeModerations
|
relayMode = RelayModeModerations
|
||||||
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
|
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
|
||||||
@ -154,16 +163,25 @@ func Relay(c *gin.Context) {
|
|||||||
err = relayTextHelper(c, relayMode)
|
err = relayTextHelper(c, relayMode)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
retryTimesStr := c.Query("retry")
|
||||||
|
retryTimes, _ := strconv.Atoi(retryTimesStr)
|
||||||
|
if retryTimesStr == "" {
|
||||||
|
retryTimes = common.RetryTimes
|
||||||
|
}
|
||||||
|
if retryTimes > 0 {
|
||||||
|
c.Redirect(http.StatusTemporaryRedirect, fmt.Sprintf("%s?retry=%d", c.Request.URL.Path, retryTimes-1))
|
||||||
|
} else {
|
||||||
if err.StatusCode == http.StatusTooManyRequests {
|
if err.StatusCode == http.StatusTooManyRequests {
|
||||||
err.OpenAIError.Message = "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。"
|
err.OpenAIError.Message = "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。"
|
||||||
}
|
}
|
||||||
c.JSON(err.StatusCode, gin.H{
|
c.JSON(err.StatusCode, gin.H{
|
||||||
"error": err.OpenAIError,
|
"error": err.OpenAIError,
|
||||||
})
|
})
|
||||||
|
}
|
||||||
channelId := c.GetInt("channel_id")
|
channelId := c.GetInt("channel_id")
|
||||||
common.SysError(fmt.Sprintf("relay error (channel #%d): %s", channelId, err.Message))
|
common.SysError(fmt.Sprintf("relay error (channel #%d): %s", channelId, err.Message))
|
||||||
// https://platform.openai.com/docs/guides/error-codes/api-errors
|
// https://platform.openai.com/docs/guides/error-codes/api-errors
|
||||||
if common.AutomaticDisableChannelEnabled && (err.Type == "insufficient_quota" || err.Code == "invalid_api_key") {
|
if common.AutomaticDisableChannelEnabled && (err.Type == "insufficient_quota" || err.Code == "invalid_api_key" || err.Code == "account_deactivated") {
|
||||||
channelId := c.GetInt("channel_id")
|
channelId := c.GetInt("channel_id")
|
||||||
channelName := c.GetString("channel_name")
|
channelName := c.GetString("channel_name")
|
||||||
disableChannel(channelId, channelName, err.Message)
|
disableChannel(channelId, channelName, err.Message)
|
||||||
|
@ -74,6 +74,11 @@ func Distribute() func(c *gin.Context) {
|
|||||||
modelRequest.Model = "text-moderation-stable"
|
modelRequest.Model = "text-moderation-stable"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if strings.HasSuffix(c.Request.URL.Path, "embeddings") {
|
||||||
|
if modelRequest.Model == "" {
|
||||||
|
modelRequest.Model = c.Param("model")
|
||||||
|
}
|
||||||
|
}
|
||||||
if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
|
if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
|
||||||
if modelRequest.Model == "" {
|
if modelRequest.Model == "" {
|
||||||
modelRequest.Model = "dall-e"
|
modelRequest.Model = "dall-e"
|
||||||
|
@ -71,6 +71,7 @@ func InitOptionMap() {
|
|||||||
common.OptionMap["TopUpLink"] = common.TopUpLink
|
common.OptionMap["TopUpLink"] = common.TopUpLink
|
||||||
common.OptionMap["ChatLink"] = common.ChatLink
|
common.OptionMap["ChatLink"] = common.ChatLink
|
||||||
common.OptionMap["QuotaPerUnit"] = strconv.FormatFloat(common.QuotaPerUnit, 'f', -1, 64)
|
common.OptionMap["QuotaPerUnit"] = strconv.FormatFloat(common.QuotaPerUnit, 'f', -1, 64)
|
||||||
|
common.OptionMap["RetryTimes"] = strconv.Itoa(common.RetryTimes)
|
||||||
common.OptionMapRWMutex.Unlock()
|
common.OptionMapRWMutex.Unlock()
|
||||||
loadOptionsFromDatabase()
|
loadOptionsFromDatabase()
|
||||||
}
|
}
|
||||||
@ -205,6 +206,8 @@ func updateOptionMap(key string, value string) (err error) {
|
|||||||
common.QuotaRemindThreshold, _ = strconv.Atoi(value)
|
common.QuotaRemindThreshold, _ = strconv.Atoi(value)
|
||||||
case "PreConsumedQuota":
|
case "PreConsumedQuota":
|
||||||
common.PreConsumedQuota, _ = strconv.Atoi(value)
|
common.PreConsumedQuota, _ = strconv.Atoi(value)
|
||||||
|
case "RetryTimes":
|
||||||
|
common.RetryTimes, _ = strconv.Atoi(value)
|
||||||
case "ModelRatio":
|
case "ModelRatio":
|
||||||
err = common.UpdateModelRatioByJSONString(value)
|
err = common.UpdateModelRatioByJSONString(value)
|
||||||
case "GroupRatio":
|
case "GroupRatio":
|
||||||
|
@ -25,6 +25,7 @@ func SetRelayRouter(router *gin.Engine) {
|
|||||||
relayV1Router.POST("/images/edits", controller.RelayNotImplemented)
|
relayV1Router.POST("/images/edits", controller.RelayNotImplemented)
|
||||||
relayV1Router.POST("/images/variations", controller.RelayNotImplemented)
|
relayV1Router.POST("/images/variations", controller.RelayNotImplemented)
|
||||||
relayV1Router.POST("/embeddings", controller.Relay)
|
relayV1Router.POST("/embeddings", controller.Relay)
|
||||||
|
relayV1Router.POST("/engines/:model/embeddings", controller.Relay)
|
||||||
relayV1Router.POST("/audio/transcriptions", controller.RelayNotImplemented)
|
relayV1Router.POST("/audio/transcriptions", controller.RelayNotImplemented)
|
||||||
relayV1Router.POST("/audio/translations", controller.RelayNotImplemented)
|
relayV1Router.POST("/audio/translations", controller.RelayNotImplemented)
|
||||||
relayV1Router.GET("/files", controller.RelayNotImplemented)
|
relayV1Router.GET("/files", controller.RelayNotImplemented)
|
||||||
|
@ -20,6 +20,7 @@ const OperationSetting = () => {
|
|||||||
DisplayInCurrencyEnabled: '',
|
DisplayInCurrencyEnabled: '',
|
||||||
DisplayTokenStatEnabled: '',
|
DisplayTokenStatEnabled: '',
|
||||||
ApproximateTokenEnabled: '',
|
ApproximateTokenEnabled: '',
|
||||||
|
RetryTimes: 0,
|
||||||
});
|
});
|
||||||
const [originInputs, setOriginInputs] = useState({});
|
const [originInputs, setOriginInputs] = useState({});
|
||||||
let [loading, setLoading] = useState(false);
|
let [loading, setLoading] = useState(false);
|
||||||
@ -122,6 +123,9 @@ const OperationSetting = () => {
|
|||||||
if (originInputs['QuotaPerUnit'] !== inputs.QuotaPerUnit) {
|
if (originInputs['QuotaPerUnit'] !== inputs.QuotaPerUnit) {
|
||||||
await updateOption('QuotaPerUnit', inputs.QuotaPerUnit);
|
await updateOption('QuotaPerUnit', inputs.QuotaPerUnit);
|
||||||
}
|
}
|
||||||
|
if (originInputs['RetryTimes'] !== inputs.RetryTimes) {
|
||||||
|
await updateOption('RetryTimes', inputs.RetryTimes);
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -133,7 +137,7 @@ const OperationSetting = () => {
|
|||||||
<Header as='h3'>
|
<Header as='h3'>
|
||||||
通用设置
|
通用设置
|
||||||
</Header>
|
</Header>
|
||||||
<Form.Group widths={3}>
|
<Form.Group widths={4}>
|
||||||
<Form.Input
|
<Form.Input
|
||||||
label='充值链接'
|
label='充值链接'
|
||||||
name='TopUpLink'
|
name='TopUpLink'
|
||||||
@ -162,6 +166,17 @@ const OperationSetting = () => {
|
|||||||
step='0.01'
|
step='0.01'
|
||||||
placeholder='一单位货币能兑换的额度'
|
placeholder='一单位货币能兑换的额度'
|
||||||
/>
|
/>
|
||||||
|
<Form.Input
|
||||||
|
label='失败重试次数'
|
||||||
|
name='RetryTimes'
|
||||||
|
type={'number'}
|
||||||
|
step='1'
|
||||||
|
min='0'
|
||||||
|
onChange={handleInputChange}
|
||||||
|
autoComplete='new-password'
|
||||||
|
value={inputs.RetryTimes}
|
||||||
|
placeholder='失败重试次数'
|
||||||
|
/>
|
||||||
</Form.Group>
|
</Form.Group>
|
||||||
<Form.Group inline>
|
<Form.Group inline>
|
||||||
<Form.Checkbox
|
<Form.Checkbox
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import React, { useEffect, useState } from 'react';
|
import React, { useEffect, useState } from 'react';
|
||||||
import { Button, Form, Header, Message, Segment } from 'semantic-ui-react';
|
import { Button, Form, Header, Input, Message, Segment } from 'semantic-ui-react';
|
||||||
import { useParams } from 'react-router-dom';
|
import { useParams } from 'react-router-dom';
|
||||||
import { API, showError, showInfo, showSuccess, verifyJSON } from '../../helpers';
|
import { API, showError, showInfo, showSuccess, verifyJSON } from '../../helpers';
|
||||||
import { CHANNEL_OPTIONS } from '../../constants';
|
import { CHANNEL_OPTIONS } from '../../constants';
|
||||||
@ -32,6 +32,7 @@ const EditChannel = () => {
|
|||||||
const [groupOptions, setGroupOptions] = useState([]);
|
const [groupOptions, setGroupOptions] = useState([]);
|
||||||
const [basicModels, setBasicModels] = useState([]);
|
const [basicModels, setBasicModels] = useState([]);
|
||||||
const [fullModels, setFullModels] = useState([]);
|
const [fullModels, setFullModels] = useState([]);
|
||||||
|
const [customModel, setCustomModel] = useState('');
|
||||||
const handleInputChange = (e, { name, value }) => {
|
const handleInputChange = (e, { name, value }) => {
|
||||||
console.log(name, value)
|
console.log(name, value)
|
||||||
setInputs((inputs) => ({ ...inputs, [name]: value }));
|
setInputs((inputs) => ({ ...inputs, [name]: value }));
|
||||||
@ -45,6 +46,19 @@ const EditChannel = () => {
|
|||||||
data.models = [];
|
data.models = [];
|
||||||
} else {
|
} else {
|
||||||
data.models = data.models.split(',');
|
data.models = data.models.split(',');
|
||||||
|
setTimeout(() => {
|
||||||
|
let localModelOptions = [...modelOptions];
|
||||||
|
data.models.forEach((model) => {
|
||||||
|
if (!localModelOptions.find((option) => option.key === model)) {
|
||||||
|
localModelOptions.push({
|
||||||
|
key: model,
|
||||||
|
text: model,
|
||||||
|
value: model
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
setModelOptions(localModelOptions);
|
||||||
|
}, 1000);
|
||||||
}
|
}
|
||||||
if (data.group === '') {
|
if (data.group === '') {
|
||||||
data.groups = [];
|
data.groups = [];
|
||||||
@ -265,6 +279,27 @@ const EditChannel = () => {
|
|||||||
<Button type={'button'} onClick={() => {
|
<Button type={'button'} onClick={() => {
|
||||||
handleInputChange(null, { name: 'models', value: [] });
|
handleInputChange(null, { name: 'models', value: [] });
|
||||||
}}>清除所有模型</Button>
|
}}>清除所有模型</Button>
|
||||||
|
<Input
|
||||||
|
action={
|
||||||
|
<Button type={'button'} onClick={()=>{
|
||||||
|
let localModels = [...inputs.models];
|
||||||
|
localModels.push(customModel);
|
||||||
|
let localModelOptions = [...modelOptions];
|
||||||
|
localModelOptions.push({
|
||||||
|
key: customModel,
|
||||||
|
text: customModel,
|
||||||
|
value: customModel,
|
||||||
|
});
|
||||||
|
setModelOptions(localModelOptions);
|
||||||
|
handleInputChange(null, { name: 'models', value: localModels });
|
||||||
|
}}>填入</Button>
|
||||||
|
}
|
||||||
|
placeholder='输入自定义模型名称'
|
||||||
|
value={customModel}
|
||||||
|
onChange={(e, { value }) => {
|
||||||
|
setCustomModel(value);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
<Form.Field>
|
<Form.Field>
|
||||||
<Form.Checkbox
|
<Form.Checkbox
|
||||||
@ -321,7 +356,7 @@ const EditChannel = () => {
|
|||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
<Button positive onClick={submit}>提交</Button>
|
<Button type={isEdit ? "button" : "submit"} positive onClick={submit}>提交</Button>
|
||||||
</Form>
|
</Form>
|
||||||
</Segment>
|
</Segment>
|
||||||
</>
|
</>
|
||||||
|
Loading…
Reference in New Issue
Block a user