Merge 8726729ade
into fdd7bf41c0
This commit is contained in:
commit
8ac0599955
@ -5,15 +5,15 @@ COPY ./VERSION .
|
||||
COPY ./web .
|
||||
|
||||
WORKDIR /web/default
|
||||
RUN npm install
|
||||
RUN npm config set registry https://mirrors.huaweicloud.com/repository/npm/ && npm install
|
||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
||||
|
||||
WORKDIR /web/berry
|
||||
RUN npm install
|
||||
RUN npm config set registry https://mirrors.huaweicloud.com/repository/npm/ && npm install
|
||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
||||
|
||||
WORKDIR /web/air
|
||||
RUN npm install
|
||||
RUN npm config set registry https://mirrors.huaweicloud.com/repository/npm/ && npm install
|
||||
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
|
||||
|
||||
FROM golang:alpine AS builder2
|
||||
|
@ -149,7 +149,24 @@ func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse {
|
||||
return &fullTextResponse
|
||||
}
|
||||
|
||||
func streamResponseAli2OpenAI(aliResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
|
||||
func streamResponseAli2OpenAI(aliResponse *ChatResponse) interface{} {
|
||||
if aliResponse.Code != "" {
|
||||
var choice openai.ChatCompletionsStreamResponseChoice
|
||||
choice.Index = 0
|
||||
choice.Delta = model.Message{
|
||||
Role: "assistant",
|
||||
Content: "",
|
||||
}
|
||||
response := openai.ChatCompletionsErrorStreamResponse{
|
||||
Id: aliResponse.RequestId,
|
||||
Object: "chat.completion.chunk",
|
||||
Created: helper.GetTimestamp(),
|
||||
Model: "qwen",
|
||||
ErrorCode: aliResponse.Code,
|
||||
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
|
||||
}
|
||||
return &response
|
||||
}
|
||||
if len(aliResponse.Output.Choices) == 0 {
|
||||
return nil
|
||||
}
|
||||
@ -201,6 +218,19 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for known error codes and handle accordingly
|
||||
if aliResponse.Code != "" {
|
||||
response := streamResponseAli2OpenAI(&aliResponse)
|
||||
|
||||
err = render.ObjectData(c, response)
|
||||
if err != nil {
|
||||
logger.SysError(err.Error())
|
||||
}
|
||||
render.Done(c)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if aliResponse.Usage.OutputTokens != 0 {
|
||||
usage.PromptTokens = aliResponse.Usage.InputTokens
|
||||
usage.CompletionTokens = aliResponse.Usage.OutputTokens
|
||||
@ -245,6 +275,8 @@ func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
// Check for known error codes and handle accordingly
|
||||
if aliResponse.Code != "" {
|
||||
return &model.ErrorWithStatusCode{
|
||||
Error: model.Error{
|
||||
@ -256,6 +288,7 @@ func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *
|
||||
StatusCode: resp.StatusCode,
|
||||
}, nil
|
||||
}
|
||||
|
||||
fullTextResponse := responseAli2OpenAI(&aliResponse)
|
||||
fullTextResponse.Model = "qwen"
|
||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||
|
@ -97,6 +97,16 @@ type TextResponse struct {
|
||||
model.Usage `json:"usage"`
|
||||
}
|
||||
|
||||
type ErrorTextResponse struct {
|
||||
Id string `json:"id"`
|
||||
Model string `json:"model,omitempty"`
|
||||
Object string `json:"object"`
|
||||
ErrorCode string `json:"error_code"`
|
||||
Created int64 `json:"created"`
|
||||
Choices []TextResponseChoice `json:"choices"`
|
||||
model.Usage `json:"usage"`
|
||||
}
|
||||
|
||||
type EmbeddingResponseItem struct {
|
||||
Object string `json:"object"`
|
||||
Index int `json:"index"`
|
||||
@ -137,6 +147,16 @@ type ChatCompletionsStreamResponse struct {
|
||||
Usage *model.Usage `json:"usage,omitempty"`
|
||||
}
|
||||
|
||||
type ChatCompletionsErrorStreamResponse struct {
|
||||
Id string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
Created int64 `json:"created"`
|
||||
ErrorCode string `json:"error_code"`
|
||||
Model string `json:"model"`
|
||||
Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
|
||||
Usage *model.Usage `json:"usage,omitempty"`
|
||||
}
|
||||
|
||||
type CompletionsStreamResponse struct {
|
||||
Choices []struct {
|
||||
Text string `json:"text"`
|
||||
|
Loading…
Reference in New Issue
Block a user