chore: update impl
This commit is contained in:
parent
67020a3c08
commit
ad9fcd3006
@ -87,8 +87,10 @@ var ModelRatio = map[string]float64{
|
||||
"chatglm_pro": 0.7143, // ¥0.01 / 1k tokens
|
||||
"chatglm_std": 0.3572, // ¥0.005 / 1k tokens
|
||||
"chatglm_lite": 0.1429, // ¥0.002 / 1k tokens
|
||||
"qwen-turbo": 0.8572, // ¥0.012 / 1k tokens
|
||||
"qwen-plus": 10, // ¥0.14 / 1k tokens
|
||||
"qwen-turbo": 0.5715, // ¥0.008 / 1k tokens // https://help.aliyun.com/zh/dashscope/developer-reference/tongyi-thousand-questions-metering-and-billing
|
||||
"qwen-plus": 1.4286, // ¥0.02 / 1k tokens
|
||||
"qwen-max": 1.4286, // ¥0.02 / 1k tokens
|
||||
"qwen-max-longcontext": 1.4286, // ¥0.02 / 1k tokens
|
||||
"text-embedding-v1": 0.05, // ¥0.0007 / 1k tokens
|
||||
"SparkDesk": 1.2858, // ¥0.018 / 1k tokens
|
||||
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
|
||||
|
@ -477,6 +477,24 @@ func init() {
|
||||
Root: "qwen-plus",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "qwen-max",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "ali",
|
||||
Permission: permission,
|
||||
Root: "qwen-max",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "qwen-max-longcontext",
|
||||
Object: "model",
|
||||
Created: 1677649963,
|
||||
OwnedBy: "ali",
|
||||
Permission: permission,
|
||||
Root: "qwen-max-longcontext",
|
||||
Parent: nil,
|
||||
},
|
||||
{
|
||||
Id: "text-embedding-v1",
|
||||
Object: "model",
|
||||
|
@ -83,77 +83,16 @@ type AliChatResponse struct {
|
||||
|
||||
func requestOpenAI2Ali(request GeneralOpenAIRequest) *AliChatRequest {
|
||||
messages := make([]AliMessage, 0, len(request.Messages))
|
||||
if len(messages) == 1 {
|
||||
messages = append(messages, AliMessage{
|
||||
Content: request.Messages[0].StringContent(),
|
||||
Role: "user",
|
||||
})
|
||||
} else {
|
||||
//1. 系统消息在最前面
|
||||
//2. user和assistant必须交替成对出现
|
||||
//3. 相邻同role消息合并
|
||||
lastRole := ""
|
||||
systemMessage := AliMessage{
|
||||
Content: "",
|
||||
Role: "system",
|
||||
}
|
||||
for i := 0; i < len(request.Messages); i++ {
|
||||
message := request.Messages[i]
|
||||
content := message.StringContent()
|
||||
if content == "" || len(content) <= 0 {
|
||||
continue
|
||||
}
|
||||
if strings.ToLower(message.Role) == "system" {
|
||||
systemMessage.Content += "\n" + content
|
||||
lastRole = "system"
|
||||
} else if strings.ToLower(message.Role) == "user" {
|
||||
if lastRole == "user" {
|
||||
messages[len(messages)-1].Content += "\n" + content
|
||||
} else {
|
||||
messages = append(messages, AliMessage{
|
||||
Content: content,
|
||||
Role: "user",
|
||||
Content: message.StringContent(),
|
||||
Role: strings.ToLower(message.Role),
|
||||
})
|
||||
}
|
||||
lastRole = "user"
|
||||
} else {
|
||||
if lastRole == "assistant" {
|
||||
messages[len(messages)-1].Content += "\n" + content
|
||||
} else {
|
||||
messages = append(messages, AliMessage{
|
||||
Content: content,
|
||||
Role: "assistant",
|
||||
})
|
||||
}
|
||||
lastRole = "assistant"
|
||||
}
|
||||
}
|
||||
// 用户需要首先提问
|
||||
if messages[0].Role != "user" {
|
||||
messages = append([]AliMessage{
|
||||
{
|
||||
Content: "?",
|
||||
Role: "user",
|
||||
},
|
||||
}, messages...)
|
||||
}
|
||||
// 把系统消息补充到头部
|
||||
if len(systemMessage.Content) > 0 {
|
||||
messages = append([]AliMessage{systemMessage}, messages...)
|
||||
}
|
||||
//最后如果不是user提问,补充一个没有问题了
|
||||
if messages[len(messages)-1].Role != "user" {
|
||||
messages = append(messages, AliMessage{
|
||||
Content: "?",
|
||||
Role: "user",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return &AliChatRequest{
|
||||
Model: request.Model,
|
||||
Input: AliInput{
|
||||
//Prompt: prompt,
|
||||
Messages: messages,
|
||||
},
|
||||
//Parameters: AliParameters{ // ChatGPT's parameters are not compatible with Ali's
|
||||
|
@ -69,7 +69,7 @@ const EditChannel = () => {
|
||||
localModels = ['ERNIE-Bot', 'ERNIE-Bot-turbo', 'ERNIE-Bot-4', 'Embedding-V1'];
|
||||
break;
|
||||
case 17:
|
||||
localModels = ['qwen-turbo', 'qwen-plus', 'text-embedding-v1'];
|
||||
localModels = ['qwen-turbo', 'qwen-plus', 'qwen-max', 'qwen-max-longcontext', 'text-embedding-v1'];
|
||||
break;
|
||||
case 16:
|
||||
localModels = ['chatglm_turbo', 'chatglm_pro', 'chatglm_std', 'chatglm_lite'];
|
||||
|
Loading…
Reference in New Issue
Block a user