refactor: abusing goroutines and channel (#1561)

* refactor: abusing goroutines

* fix: trim data prefix

* refactor: move functions to render package

* refactor: add back trim & flush

---------

Co-authored-by: JustSong <quanpengsong@gmail.com>
This commit is contained in:
zijiren 2024-06-30 18:36:33 +08:00 committed by GitHub
parent ae1cd29f94
commit b21b3b5b46
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 614 additions and 728 deletions

29
common/render/render.go Normal file
View File

@ -0,0 +1,29 @@
package render
import (
"encoding/json"
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"strings"
)
func StringData(c *gin.Context, str string) {
str = strings.TrimPrefix(str, "data: ")
str = strings.TrimSuffix(str, "\r")
c.Render(-1, common.CustomEvent{Data: "data: " + str})
c.Writer.Flush()
}
func ObjectData(c *gin.Context, object interface{}) error {
jsonData, err := json.Marshal(object)
if err != nil {
return fmt.Errorf("error marshalling object: %w", err)
}
StringData(c, string(jsonData))
return nil
}
func Done(c *gin.Context) {
StringData(c, "[DONE]")
}

View File

@ -4,6 +4,12 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io"
"net/http"
"strconv"
"strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
@ -12,10 +18,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strconv"
"strings"
) )
// https://docs.aiproxy.io/dev/library#使用已经定制好的知识库进行对话问答 // https://docs.aiproxy.io/dev/library#使用已经定制好的知识库进行对话问答
@ -89,6 +91,7 @@ func streamResponseAIProxyLibrary2OpenAI(response *LibraryStreamResponse) *opena
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
var usage model.Usage var usage model.Usage
var documents []LibraryDocument
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 { if atEOF && len(data) == 0 {
@ -102,60 +105,48 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
return 0, nil, nil return 0, nil, nil
}) })
dataChan := make(chan string)
stopChan := make(chan bool) common.SetEventStreamHeaders(c)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < 5 { // ignore blank line or wrong format if len(data) < 5 || data[:5] != "data:" {
continue
}
if data[:5] != "data:" {
continue continue
} }
data = data[5:] data = data[5:]
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
var documents []LibraryDocument
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
var AIProxyLibraryResponse LibraryStreamResponse var AIProxyLibraryResponse LibraryStreamResponse
err := json.Unmarshal([]byte(data), &AIProxyLibraryResponse) err := json.Unmarshal([]byte(data), &AIProxyLibraryResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
if len(AIProxyLibraryResponse.Documents) != 0 { if len(AIProxyLibraryResponse.Documents) != 0 {
documents = AIProxyLibraryResponse.Documents documents = AIProxyLibraryResponse.Documents
} }
response := streamResponseAIProxyLibrary2OpenAI(&AIProxyLibraryResponse) response := streamResponseAIProxyLibrary2OpenAI(&AIProxyLibraryResponse)
jsonResponse, err := json.Marshal(response) err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)}) }
return true
case <-stopChan: if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
response := documentsAIProxyLibrary(documents) response := documentsAIProxyLibrary(documents)
jsonResponse, err := json.Marshal(response) err := render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)}) render.Done(c)
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false err = resp.Body.Close()
}
})
err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
} }
return nil, &usage return nil, &usage
} }

View File

@ -3,15 +3,17 @@ package ali
import ( import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"github.com/songquanpeng/one-api/common/render"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
) )
// https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r // https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r
@ -181,32 +183,21 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
return 0, nil, nil return 0, nil, nil
}) })
dataChan := make(chan string)
stopChan := make(chan bool) common.SetEventStreamHeaders(c)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < 5 { // ignore blank line or wrong format if len(data) < 5 || data[:5] != "data:" {
continue
}
if data[:5] != "data:" {
continue continue
} }
data = data[5:] data = data[5:]
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
//lastResponseText := ""
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
var aliResponse ChatResponse var aliResponse ChatResponse
err := json.Unmarshal([]byte(data), &aliResponse) err := json.Unmarshal([]byte(data), &aliResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
if aliResponse.Usage.OutputTokens != 0 { if aliResponse.Usage.OutputTokens != 0 {
usage.PromptTokens = aliResponse.Usage.InputTokens usage.PromptTokens = aliResponse.Usage.InputTokens
@ -215,22 +206,20 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
response := streamResponseAli2OpenAI(&aliResponse) response := streamResponseAli2OpenAI(&aliResponse)
if response == nil { if response == nil {
return true continue
} }
//response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText) err = render.ObjectData(c, response)
//lastResponseText = aliResponse.Output.Text
jsonResponse, err := json.Marshal(response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close() err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil

View File

@ -4,6 +4,7 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io" "io"
"net/http" "net/http"
"strings" "strings"
@ -169,64 +170,59 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
return 0, nil, nil return 0, nil, nil
}) })
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
if len(data) < 6 {
continue
}
if !strings.HasPrefix(data, "data:") {
continue
}
data = strings.TrimPrefix(data, "data:")
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c) common.SetEventStreamHeaders(c)
var usage model.Usage var usage model.Usage
var modelName string var modelName string
var id string var id string
c.Stream(func(w io.Writer) bool {
select { for scanner.Scan() {
case data := <-dataChan: data := scanner.Text()
// some implementations may add \r at the end of data if len(data) < 6 || !strings.HasPrefix(data, "data:") {
continue
}
data = strings.TrimPrefix(data, "data:")
data = strings.TrimSpace(data) data = strings.TrimSpace(data)
var claudeResponse StreamResponse var claudeResponse StreamResponse
err := json.Unmarshal([]byte(data), &claudeResponse) err := json.Unmarshal([]byte(data), &claudeResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
response, meta := StreamResponseClaude2OpenAI(&claudeResponse) response, meta := StreamResponseClaude2OpenAI(&claudeResponse)
if meta != nil { if meta != nil {
usage.PromptTokens += meta.Usage.InputTokens usage.PromptTokens += meta.Usage.InputTokens
usage.CompletionTokens += meta.Usage.OutputTokens usage.CompletionTokens += meta.Usage.OutputTokens
modelName = meta.Model modelName = meta.Model
id = fmt.Sprintf("chatcmpl-%s", meta.Id) id = fmt.Sprintf("chatcmpl-%s", meta.Id)
return true continue
} }
if response == nil { if response == nil {
return true continue
} }
response.Id = id response.Id = id
response.Model = modelName response.Model = modelName
response.Created = createdTime response.Created = createdTime
jsonStr, err := json.Marshal(response) err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
_ = resp.Body.Close() if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
return nil, &usage return nil, &usage
} }

View File

@ -5,6 +5,13 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io"
"net/http"
"strings"
"sync"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/client" "github.com/songquanpeng/one-api/common/client"
@ -12,11 +19,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
"sync"
"time"
) )
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2 // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2
@ -137,40 +139,22 @@ func embeddingResponseBaidu2OpenAI(response *EmbeddingResponse) *openai.Embeddin
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
var usage model.Usage var usage model.Usage
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(bufio.ScanLines)
if atEOF && len(data) == 0 {
return 0, nil, nil common.SetEventStreamHeaders(c)
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < 6 { // ignore blank line or wrong format if len(data) < 6 {
continue continue
} }
data = data[6:] data = data[6:]
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
var baiduResponse ChatStreamResponse var baiduResponse ChatStreamResponse
err := json.Unmarshal([]byte(data), &baiduResponse) err := json.Unmarshal([]byte(data), &baiduResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
if baiduResponse.Usage.TotalTokens != 0 { if baiduResponse.Usage.TotalTokens != 0 {
usage.TotalTokens = baiduResponse.Usage.TotalTokens usage.TotalTokens = baiduResponse.Usage.TotalTokens
@ -178,18 +162,18 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens
} }
response := streamResponseBaidu2OpenAI(&baiduResponse) response := streamResponseBaidu2OpenAI(&baiduResponse)
jsonResponse, err := json.Marshal(response) err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close() err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil

View File

@ -2,8 +2,8 @@ package cloudflare
import ( import (
"bufio" "bufio"
"bytes"
"encoding/json" "encoding/json"
"github.com/songquanpeng/one-api/common/render"
"io" "io"
"net/http" "net/http"
"strings" "strings"
@ -31,7 +31,6 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
} }
} }
func ResponseCloudflare2OpenAI(cloudflareResponse *Response) *openai.TextResponse { func ResponseCloudflare2OpenAI(cloudflareResponse *Response) *openai.TextResponse {
choice := openai.TextResponseChoice{ choice := openai.TextResponseChoice{
Index: 0, Index: 0,
@ -63,67 +62,54 @@ func StreamResponseCloudflare2OpenAI(cloudflareResponse *StreamResponse) *openai
func StreamHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { func StreamHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(bufio.ScanLines)
if atEOF && len(data) == 0 {
return 0, nil, nil common.SetEventStreamHeaders(c)
} id := helper.GetResponseID(c)
if i := bytes.IndexByte(data, '\n'); i >= 0 { responseModel := c.GetString("original_model")
return i + 1, data[0:i], nil var responseText string
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < len("data: ") { if len(data) < len("data: ") {
continue continue
} }
data = strings.TrimPrefix(data, "data: ") data = strings.TrimPrefix(data, "data: ")
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
id := helper.GetResponseID(c)
responseModel := c.GetString("original_model")
var responseText string
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r") data = strings.TrimSuffix(data, "\r")
var cloudflareResponse StreamResponse var cloudflareResponse StreamResponse
err := json.Unmarshal([]byte(data), &cloudflareResponse) err := json.Unmarshal([]byte(data), &cloudflareResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
response := StreamResponseCloudflare2OpenAI(&cloudflareResponse) response := StreamResponseCloudflare2OpenAI(&cloudflareResponse)
if response == nil { if response == nil {
return true continue
} }
responseText += cloudflareResponse.Response responseText += cloudflareResponse.Response
response.Id = id response.Id = id
response.Model = responseModel response.Model = responseModel
jsonStr, err := json.Marshal(response)
err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
_ = resp.Body.Close() if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
usage := openai.ResponseText2Usage(responseText, responseModel, promptTokens) usage := openai.ResponseText2Usage(responseText, responseModel, promptTokens)
return nil, usage return nil, usage
} }

View File

@ -2,9 +2,9 @@ package cohere
import ( import (
"bufio" "bufio"
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io" "io"
"net/http" "net/http"
"strings" "strings"
@ -134,66 +134,53 @@ func ResponseCohere2OpenAI(cohereResponse *Response) *openai.TextResponse {
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
createdTime := helper.GetTimestamp() createdTime := helper.GetTimestamp()
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(bufio.ScanLines)
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\n'); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c) common.SetEventStreamHeaders(c)
var usage model.Usage var usage model.Usage
c.Stream(func(w io.Writer) bool {
select { for scanner.Scan() {
case data := <-dataChan: data := scanner.Text()
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r") data = strings.TrimSuffix(data, "\r")
var cohereResponse StreamResponse var cohereResponse StreamResponse
err := json.Unmarshal([]byte(data), &cohereResponse) err := json.Unmarshal([]byte(data), &cohereResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
response, meta := StreamResponseCohere2OpenAI(&cohereResponse) response, meta := StreamResponseCohere2OpenAI(&cohereResponse)
if meta != nil { if meta != nil {
usage.PromptTokens += meta.Meta.Tokens.InputTokens usage.PromptTokens += meta.Meta.Tokens.InputTokens
usage.CompletionTokens += meta.Meta.Tokens.OutputTokens usage.CompletionTokens += meta.Meta.Tokens.OutputTokens
return true continue
} }
if response == nil { if response == nil {
return true continue
} }
response.Id = fmt.Sprintf("chatcmpl-%d", createdTime) response.Id = fmt.Sprintf("chatcmpl-%d", createdTime)
response.Model = c.GetString("original_model") response.Model = c.GetString("original_model")
response.Created = createdTime response.Created = createdTime
jsonStr, err := json.Marshal(response)
err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
_ = resp.Body.Close() if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
return nil, &usage return nil, &usage
} }

View File

@ -4,6 +4,11 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/conv" "github.com/songquanpeng/one-api/common/conv"
@ -12,9 +17,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype" "github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
) )
// https://www.coze.com/open // https://www.coze.com/open
@ -109,69 +111,54 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
var responseText string var responseText string
createdTime := helper.GetTimestamp() createdTime := helper.GetTimestamp()
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(bufio.ScanLines)
if atEOF && len(data) == 0 {
return 0, nil, nil common.SetEventStreamHeaders(c)
} var modelName string
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < 5 { if len(data) < 5 || !strings.HasPrefix(data, "data:") {
continue
}
if !strings.HasPrefix(data, "data:") {
continue continue
} }
data = strings.TrimPrefix(data, "data:") data = strings.TrimPrefix(data, "data:")
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
var modelName string
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r") data = strings.TrimSuffix(data, "\r")
var cozeResponse StreamResponse var cozeResponse StreamResponse
err := json.Unmarshal([]byte(data), &cozeResponse) err := json.Unmarshal([]byte(data), &cozeResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
response, _ := StreamResponseCoze2OpenAI(&cozeResponse) response, _ := StreamResponseCoze2OpenAI(&cozeResponse)
if response == nil { if response == nil {
return true continue
} }
for _, choice := range response.Choices { for _, choice := range response.Choices {
responseText += conv.AsString(choice.Delta.Content) responseText += conv.AsString(choice.Delta.Content)
} }
response.Model = modelName response.Model = modelName
response.Created = createdTime response.Created = createdTime
jsonStr, err := json.Marshal(response)
err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
_ = resp.Body.Close() if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
return nil, &responseText return nil, &responseText
} }

View File

@ -4,6 +4,7 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io" "io"
"net/http" "net/http"
"strings" "strings"
@ -275,21 +276,10 @@ func embeddingResponseGemini2OpenAI(response *EmbeddingResponse) *openai.Embeddi
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) { func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
responseText := "" responseText := ""
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(bufio.ScanLines)
if atEOF && len(data) == 0 {
return 0, nil, nil common.SetEventStreamHeaders(c)
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
data = strings.TrimSpace(data) data = strings.TrimSpace(data)
@ -298,41 +288,38 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
data = strings.TrimPrefix(data, "data: ") data = strings.TrimPrefix(data, "data: ")
data = strings.TrimSuffix(data, "\"") data = strings.TrimSuffix(data, "\"")
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
var geminiResponse ChatResponse var geminiResponse ChatResponse
err := json.Unmarshal([]byte(data), &geminiResponse) err := json.Unmarshal([]byte(data), &geminiResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
response := streamResponseGeminiChat2OpenAI(&geminiResponse) response := streamResponseGeminiChat2OpenAI(&geminiResponse)
if response == nil { if response == nil {
return true continue
} }
responseText += response.Choices[0].Delta.StringContent() responseText += response.Choices[0].Delta.StringContent()
jsonResponse, err := json.Marshal(response)
err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close() err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "" return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
} }
return nil, responseText return nil, responseText
} }

View File

@ -5,12 +5,14 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/render"
"github.com/songquanpeng/one-api/common/random"
"io" "io"
"net/http" "net/http"
"strings" "strings"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/random"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/image" "github.com/songquanpeng/one-api/common/image"
@ -105,54 +107,51 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
return 0, nil, nil return 0, nil, nil
} }
if i := strings.Index(string(data), "}\n"); i >= 0 { if i := strings.Index(string(data), "}\n"); i >= 0 {
return i + 2, data[0:i], nil return i + 2, data[0 : i+1], nil
} }
if atEOF { if atEOF {
return len(data), data, nil return len(data), data, nil
} }
return 0, nil, nil return 0, nil, nil
}) })
dataChan := make(chan string)
stopChan := make(chan bool) common.SetEventStreamHeaders(c)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := strings.TrimPrefix(scanner.Text(), "}") data := strings.TrimPrefix(scanner.Text(), "}")
dataChan <- data + "}" data = data + "}"
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
var ollamaResponse ChatResponse var ollamaResponse ChatResponse
err := json.Unmarshal([]byte(data), &ollamaResponse) err := json.Unmarshal([]byte(data), &ollamaResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
if ollamaResponse.EvalCount != 0 { if ollamaResponse.EvalCount != 0 {
usage.PromptTokens = ollamaResponse.PromptEvalCount usage.PromptTokens = ollamaResponse.PromptEvalCount
usage.CompletionTokens = ollamaResponse.EvalCount usage.CompletionTokens = ollamaResponse.EvalCount
usage.TotalTokens = ollamaResponse.PromptEvalCount + ollamaResponse.EvalCount usage.TotalTokens = ollamaResponse.PromptEvalCount + ollamaResponse.EvalCount
} }
response := streamResponseOllama2OpenAI(&ollamaResponse) response := streamResponseOllama2OpenAI(&ollamaResponse)
jsonResponse, err := json.Marshal(response) err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close() err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
} }
return nil, &usage return nil, &usage
} }

View File

@ -4,6 +4,7 @@ import (
"bufio" "bufio"
"bytes" "bytes"
"encoding/json" "encoding/json"
"github.com/songquanpeng/one-api/common/render"
"io" "io"
"net/http" "net/http"
"strings" "strings"
@ -25,22 +26,11 @@ const (
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) { func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {
responseText := "" responseText := ""
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(bufio.ScanLines)
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
var usage *model.Usage var usage *model.Usage
go func() {
common.SetEventStreamHeaders(c)
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < dataPrefixLength { // ignore blank line or wrong format if len(data) < dataPrefixLength { // ignore blank line or wrong format
@ -50,7 +40,7 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
continue continue
} }
if strings.HasPrefix(data[dataPrefixLength:], done) { if strings.HasPrefix(data[dataPrefixLength:], done) {
dataChan <- data render.StringData(c, data)
continue continue
} }
switch relayMode { switch relayMode {
@ -59,14 +49,14 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse) err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
dataChan <- data // if error happened, pass the data to client render.StringData(c, data) // if error happened, pass the data to client
continue // just ignore the error continue // just ignore the error
} }
if len(streamResponse.Choices) == 0 { if len(streamResponse.Choices) == 0 {
// but for empty choice, we should not pass it to client, this is for azure // but for empty choice, we should not pass it to client, this is for azure
continue // just ignore empty choice continue // just ignore empty choice
} }
dataChan <- data render.StringData(c, data)
for _, choice := range streamResponse.Choices { for _, choice := range streamResponse.Choices {
responseText += conv.AsString(choice.Delta.Content) responseText += conv.AsString(choice.Delta.Content)
} }
@ -74,7 +64,7 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
usage = streamResponse.Usage usage = streamResponse.Usage
} }
case relaymode.Completions: case relaymode.Completions:
dataChan <- data render.StringData(c, data)
var streamResponse CompletionsStreamResponse var streamResponse CompletionsStreamResponse
err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse) err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
if err != nil { if err != nil {
@ -86,27 +76,18 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
} }
} }
} }
stopChan <- true
}() if err := scanner.Err(); err != nil {
common.SetEventStreamHeaders(c) logger.SysError("error reading stream: " + err.Error())
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
if strings.HasPrefix(data, "data: [DONE]") {
data = data[:12]
} }
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r") render.Done(c)
c.Render(-1, common.CustomEvent{Data: data})
return true
case <-stopChan:
return false
}
})
err := resp.Body.Close() err := resp.Body.Close()
if err != nil { if err != nil {
return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "", nil return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "", nil
} }
return nil, responseText, usage return nil, responseText, usage
} }

View File

@ -3,6 +3,10 @@ package palm
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io"
"net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
@ -11,8 +15,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
) )
// https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body
@ -77,58 +79,51 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
responseText := "" responseText := ""
responseId := fmt.Sprintf("chatcmpl-%s", random.GetUUID()) responseId := fmt.Sprintf("chatcmpl-%s", random.GetUUID())
createdTime := helper.GetTimestamp() createdTime := helper.GetTimestamp()
dataChan := make(chan string)
stopChan := make(chan bool) common.SetEventStreamHeaders(c)
go func() {
responseBody, err := io.ReadAll(resp.Body) responseBody, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
logger.SysError("error reading stream response: " + err.Error()) logger.SysError("error reading stream response: " + err.Error())
stopChan <- true err := resp.Body.Close()
return if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
} }
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), ""
}
err = resp.Body.Close() err = resp.Body.Close()
if err != nil { if err != nil {
logger.SysError("error closing stream response: " + err.Error()) return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
stopChan <- true
return
} }
var palmResponse ChatResponse var palmResponse ChatResponse
err = json.Unmarshal(responseBody, &palmResponse) err = json.Unmarshal(responseBody, &palmResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
stopChan <- true return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), ""
return
} }
fullTextResponse := streamResponsePaLM2OpenAI(&palmResponse) fullTextResponse := streamResponsePaLM2OpenAI(&palmResponse)
fullTextResponse.Id = responseId fullTextResponse.Id = responseId
fullTextResponse.Created = createdTime fullTextResponse.Created = createdTime
if len(palmResponse.Candidates) > 0 { if len(palmResponse.Candidates) > 0 {
responseText = palmResponse.Candidates[0].Content responseText = palmResponse.Candidates[0].Content
} }
jsonResponse, err := json.Marshal(fullTextResponse) jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError("error marshalling stream response: " + err.Error())
stopChan <- true return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), ""
return
} }
dataChan <- string(jsonResponse)
stopChan <- true err = render.ObjectData(c, string(jsonResponse))
}()
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
c.Render(-1, common.CustomEvent{Data: "data: " + data})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
}
})
err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "" logger.SysError(err.Error())
} }
render.Done(c)
return nil, responseText return nil, responseText
} }

View File

@ -8,6 +8,13 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io"
"net/http"
"strconv"
"strings"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/conv" "github.com/songquanpeng/one-api/common/conv"
@ -17,11 +24,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strconv"
"strings"
"time"
) )
func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest { func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
@ -87,64 +89,46 @@ func streamResponseTencent2OpenAI(TencentResponse *ChatResponse) *openai.ChatCom
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) { func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
var responseText string var responseText string
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(bufio.ScanLines)
if atEOF && len(data) == 0 {
return 0, nil, nil common.SetEventStreamHeaders(c)
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < 5 { // ignore blank line or wrong format if len(data) < 5 || !strings.HasPrefix(data, "data:") {
continue continue
} }
if data[:5] != "data:" { data = strings.TrimPrefix(data, "data:")
continue
} var tencentResponse ChatResponse
data = data[5:] err := json.Unmarshal([]byte(data), &tencentResponse)
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
var TencentResponse ChatResponse
err := json.Unmarshal([]byte(data), &TencentResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
response := streamResponseTencent2OpenAI(&TencentResponse)
response := streamResponseTencent2OpenAI(&tencentResponse)
if len(response.Choices) != 0 { if len(response.Choices) != 0 {
responseText += conv.AsString(response.Choices[0].Delta.Content) responseText += conv.AsString(response.Choices[0].Delta.Content)
} }
jsonResponse, err := json.Marshal(response)
err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError(err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
})
if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close() err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "" return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
} }
return nil, responseText return nil, responseText
} }

View File

@ -3,6 +3,13 @@ package zhipu
import ( import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"github.com/songquanpeng/one-api/common/render"
"io"
"net/http"
"strings"
"sync"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/golang-jwt/jwt" "github.com/golang-jwt/jwt"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
@ -11,11 +18,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
"sync"
"time"
) )
// https://open.bigmodel.cn/doc/api#chatglm_std // https://open.bigmodel.cn/doc/api#chatglm_std
@ -155,10 +157,9 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
return 0, nil, nil return 0, nil, nil
}) })
dataChan := make(chan string)
metaChan := make(chan string) common.SetEventStreamHeaders(c)
stopChan := make(chan bool)
go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
lines := strings.Split(data, "\n") lines := strings.Split(data, "\n")
@ -166,55 +167,45 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
if len(line) < 5 { if len(line) < 5 {
continue continue
} }
if line[:5] == "data:" { if strings.HasPrefix(line, "data:") {
dataChan <- line[5:] dataSegment := line[5:]
if i != len(lines)-1 { if i != len(lines)-1 {
dataChan <- "\n" dataSegment += "\n"
} }
} else if line[:5] == "meta:" { response := streamResponseZhipu2OpenAI(dataSegment)
metaChan <- line[5:] err := render.ObjectData(c, response)
}
}
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
response := streamResponseZhipu2OpenAI(data)
jsonResponse, err := json.Marshal(response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError("error marshalling stream response: " + err.Error())
return true
} }
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)}) } else if strings.HasPrefix(line, "meta:") {
return true metaSegment := line[5:]
case data := <-metaChan:
var zhipuResponse StreamMetaResponse var zhipuResponse StreamMetaResponse
err := json.Unmarshal([]byte(data), &zhipuResponse) err := json.Unmarshal([]byte(metaSegment), &zhipuResponse)
if err != nil { if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
return true continue
} }
response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse) response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
jsonResponse, err := json.Marshal(response) err = render.ObjectData(c, response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError("error marshalling stream response: " + err.Error())
return true
} }
usage = zhipuUsage usage = zhipuUsage
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
} }
}) }
}
if err := scanner.Err(); err != nil {
logger.SysError("error reading stream: " + err.Error())
}
render.Done(c)
err := resp.Body.Close() err := resp.Body.Close()
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
} }
return nil, usage return nil, usage
} }