feat: add Proxy channel type and relay mode (#1678)

Add the Proxy channel type and relay mode to support proxying requests to custom upstream services.
This commit is contained in:
Laisky.Cai
2024-07-22 22:51:19 +08:00
committed by GitHub
parent 296ab013b8
commit c936198ac8
17 changed files with 292 additions and 106 deletions

View File

@@ -15,6 +15,7 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/adaptor/palm"
"github.com/songquanpeng/one-api/relay/adaptor/proxy"
"github.com/songquanpeng/one-api/relay/adaptor/tencent"
"github.com/songquanpeng/one-api/relay/adaptor/vertexai"
"github.com/songquanpeng/one-api/relay/adaptor/xunfei"
@@ -58,6 +59,8 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
return &deepl.Adaptor{}
case apitype.VertexAI:
return &vertexai.Adaptor{}
case apitype.Proxy:
return &proxy.Adaptor{}
}
return nil
}

View File

@@ -0,0 +1,89 @@
package proxy
import (
"fmt"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin"
"github.com/pkg/errors"
"github.com/songquanpeng/one-api/relay/adaptor"
channelhelper "github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
relaymodel "github.com/songquanpeng/one-api/relay/model"
)
var _ adaptor.Adaptor = new(Adaptor)
const channelName = "proxy"
type Adaptor struct{}
func (a *Adaptor) Init(meta *meta.Meta) {
}
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
return nil, errors.New("notimplement")
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
for k, v := range resp.Header {
for _, vv := range v {
c.Writer.Header().Set(k, vv)
}
}
c.Writer.WriteHeader(resp.StatusCode)
if _, gerr := io.Copy(c.Writer, resp.Body); gerr != nil {
return nil, &relaymodel.ErrorWithStatusCode{
StatusCode: http.StatusInternalServerError,
Error: relaymodel.Error{
Message: gerr.Error(),
},
}
}
return nil, nil
}
func (a *Adaptor) GetModelList() (models []string) {
return nil
}
func (a *Adaptor) GetChannelName() string {
return channelName
}
// GetRequestURL remove static prefix, and return the real request url to the upstream service
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
prefix := fmt.Sprintf("/v1/oneapi/proxy/%d", meta.ChannelId)
return meta.BaseURL + strings.TrimPrefix(meta.RequestURLPath, prefix), nil
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
for k, v := range c.Request.Header {
req.Header.Set(k, v[0])
}
// remove unnecessary headers
req.Header.Del("Host")
req.Header.Del("Content-Length")
req.Header.Del("Accept-Encoding")
req.Header.Del("Connection")
// set authorization header
req.Header.Set("Authorization", meta.APIKey)
return nil
}
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
return nil, errors.Errorf("not implement")
}
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return channelhelper.DoRequestHelper(a, c, meta, requestBody)
}

View File

@@ -18,6 +18,7 @@ const (
Cloudflare
DeepL
VertexAI
Proxy
Dummy // this one is only for count, do not add any channel after this
)

View File

@@ -44,5 +44,6 @@ const (
Doubao
Novita
VertextAI
Proxy
Dummy
)

View File

@@ -37,6 +37,8 @@ func ToAPIType(channelType int) int {
apiType = apitype.DeepL
case VertextAI:
apiType = apitype.VertexAI
case Proxy:
apiType = apitype.Proxy
}
return apiType

View File

@@ -44,6 +44,7 @@ var ChannelBaseURLs = []string{
"https://ark.cn-beijing.volces.com", // 40
"https://api.novita.ai/v3/openai", // 41
"", // 42
"", // 43
}
func init() {

41
relay/controller/proxy.go Normal file
View File

@@ -0,0 +1,41 @@
// Package controller is a package for handling the relay controller
package controller
import (
"fmt"
"net/http"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/meta"
relaymodel "github.com/songquanpeng/one-api/relay/model"
)
// RelayProxyHelper is a helper function to proxy the request to the upstream service
func RelayProxyHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
ctx := c.Request.Context()
meta := meta.GetByContext(c)
adaptor := relay.GetAdaptor(meta.APIType)
if adaptor == nil {
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
}
adaptor.Init(meta)
resp, err := adaptor.DoRequest(c, meta, c.Request.Body)
if err != nil {
logger.Errorf(ctx, "DoRequest failed: %s", err.Error())
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
}
// do response
_, respErr := adaptor.DoResponse(c, resp, meta)
if respErr != nil {
logger.Errorf(ctx, "respErr is not nil: %+v", respErr)
return respErr
}
return nil
}

View File

@@ -18,11 +18,12 @@ type Meta struct {
UserId int
Group string
ModelMapping map[string]string
BaseURL string
APIKey string
APIType int
Config model.ChannelConfig
IsStream bool
// BaseURL is the proxy url set in the channel config
BaseURL string
APIKey string
APIType int
Config model.ChannelConfig
IsStream bool
// OriginModelName is the model name from the raw user request
OriginModelName string
// ActualModelName is the model name after mapping

View File

@@ -11,4 +11,6 @@ const (
AudioSpeech
AudioTranscription
AudioTranslation
// Proxy is a special relay mode for proxying requests to custom upstream
Proxy
)

View File

@@ -24,6 +24,8 @@ func GetByPath(path string) int {
relayMode = AudioTranscription
} else if strings.HasPrefix(path, "/v1/audio/translations") {
relayMode = AudioTranslation
} else if strings.HasPrefix(path, "/v1/oneapi/proxy") {
relayMode = Proxy
}
return relayMode
}