Files
new-api/middleware/distributor.go

319 lines
12 KiB
Go
Raw Normal View History

2023-04-23 18:24:11 +08:00
package middleware
import (
2024-07-17 17:01:25 +08:00
"errors"
2023-04-23 18:24:11 +08:00
"fmt"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
2023-04-23 18:24:11 +08:00
"one-api/model"
relayconstant "one-api/relay/constant"
"one-api/service"
2025-08-08 11:59:04 +08:00
"one-api/setting"
"one-api/setting/ratio_setting"
"one-api/types"
2023-04-23 18:24:11 +08:00
"strconv"
"strings"
"time"
"github.com/gin-gonic/gin"
2023-04-23 18:24:11 +08:00
)
type ModelRequest struct {
Model string `json:"model"`
Group string `json:"group,omitempty"`
}
2023-04-23 18:24:11 +08:00
func Distribute() func(c *gin.Context) {
return func(c *gin.Context) {
var channel *model.Channel
channelId, ok := common.GetContextKey(c, constant.ContextKeyTokenSpecificChannelId)
2024-04-07 22:08:11 +08:00
modelRequest, shouldSelectChannel, err := getModelRequest(c)
2024-07-17 17:01:25 +08:00
if err != nil {
abortWithOpenAiMessage(c, http.StatusBadRequest, "Invalid request, "+err.Error())
return
}
2023-04-23 18:24:11 +08:00
if ok {
id, err := strconv.Atoi(channelId.(string))
if err != nil {
abortWithOpenAiMessage(c, http.StatusBadRequest, "无效的渠道 Id")
2023-04-23 18:24:11 +08:00
return
}
channel, err = model.GetChannelById(id, true)
if err != nil {
abortWithOpenAiMessage(c, http.StatusBadRequest, "无效的渠道 Id")
2023-04-23 18:24:11 +08:00
return
}
if channel.Status != common.ChannelStatusEnabled {
abortWithOpenAiMessage(c, http.StatusForbidden, "该渠道已被禁用")
2023-04-23 18:24:11 +08:00
return
}
} else {
// Select a channel for the user
// check token model mapping
modelLimitEnable := common.GetContextKeyBool(c, constant.ContextKeyTokenModelLimitEnabled)
if modelLimitEnable {
s, ok := common.GetContextKey(c, constant.ContextKeyTokenModelLimit)
2025-08-06 20:09:22 +08:00
if !ok {
// token model limit is empty, all models are not allowed
abortWithOpenAiMessage(c, http.StatusForbidden, "该令牌无权访问任何模型")
return
}
var tokenModelLimit map[string]bool
2025-08-06 20:09:22 +08:00
tokenModelLimit, ok = s.(map[string]bool)
if !ok {
tokenModelLimit = map[string]bool{}
}
2025-08-06 20:09:22 +08:00
matchName := ratio_setting.FormatMatchingModelName(modelRequest.Model) // match gpts & thinking-*
if _, ok := tokenModelLimit[matchName]; !ok {
abortWithOpenAiMessage(c, http.StatusForbidden, "该令牌无权访问模型 "+modelRequest.Model)
return
}
}
2024-01-25 20:10:32 +08:00
if shouldSelectChannel {
if modelRequest.Model == "" {
abortWithOpenAiMessage(c, http.StatusBadRequest, "未指定模型名称,模型名称不能为空")
return
}
2025-06-16 22:15:12 +08:00
var selectGroup string
2025-08-06 20:09:22 +08:00
userGroup := common.GetContextKeyString(c, constant.ContextKeyUsingGroup)
2025-08-08 11:59:04 +08:00
// check path is /pg/chat/completions
if strings.HasPrefix(c.Request.URL.Path, "/pg/chat/completions") {
playgroundRequest := &dto.PlayGroundRequest{}
err = common.UnmarshalBodyReusable(c, playgroundRequest)
if err != nil {
abortWithOpenAiMessage(c, http.StatusBadRequest, "无效的请求, "+err.Error())
return
}
if playgroundRequest.Group != "" {
if !setting.GroupInUserUsableGroups(playgroundRequest.Group) && playgroundRequest.Group != userGroup {
abortWithOpenAiMessage(c, http.StatusForbidden, "无权访问该分组")
return
}
userGroup = playgroundRequest.Group
}
}
2025-06-16 22:15:12 +08:00
channel, selectGroup, err = model.CacheGetRandomSatisfiedChannel(c, userGroup, modelRequest.Model, 0)
if err != nil {
2025-06-16 22:15:12 +08:00
showGroup := userGroup
if userGroup == "auto" {
showGroup = fmt.Sprintf("auto(%s)", selectGroup)
}
message := fmt.Sprintf("获取分组 %s 下模型 %s 的可用渠道失败数据库一致性已被破坏distributor: %s", showGroup, modelRequest.Model, err.Error())
// 如果错误,但是渠道不为空,说明是数据库一致性问题
//if channel != nil {
// common.SysError(fmt.Sprintf("渠道不存在:%d", channel.Id))
// message = "数据库一致性已被破坏,请联系管理员"
//}
abortWithOpenAiMessage(c, http.StatusServiceUnavailable, message)
return
}
if channel == nil {
abortWithOpenAiMessage(c, http.StatusServiceUnavailable, fmt.Sprintf("分组 %s 下模型 %s 无可用渠道distributor", userGroup, modelRequest.Model))
return
}
2024-01-10 00:17:03 +08:00
}
2023-04-23 18:24:11 +08:00
}
common.SetContextKey(c, constant.ContextKeyRequestStartTime, time.Now())
2024-04-07 22:08:11 +08:00
SetupContextForSelectedChannel(c, channel, modelRequest.Model)
2023-04-23 18:24:11 +08:00
c.Next()
}
}
2024-04-04 16:35:44 +08:00
2024-04-07 22:08:11 +08:00
func getModelRequest(c *gin.Context) (*ModelRequest, bool, error) {
var modelRequest ModelRequest
shouldSelectChannel := true
var err error
if strings.Contains(c.Request.URL.Path, "/mj/") {
relayMode := relayconstant.Path2RelayModeMidjourney(c.Request.URL.Path)
if relayMode == relayconstant.RelayModeMidjourneyTaskFetch ||
relayMode == relayconstant.RelayModeMidjourneyTaskFetchByCondition ||
relayMode == relayconstant.RelayModeMidjourneyNotify ||
relayMode == relayconstant.RelayModeMidjourneyTaskImageSeed {
shouldSelectChannel = false
} else {
midjourneyRequest := dto.MidjourneyRequest{}
err = common.UnmarshalBodyReusable(c, &midjourneyRequest)
if err != nil {
return nil, false, err
}
midjourneyModel, mjErr, success := service.GetMjRequestModel(relayMode, &midjourneyRequest)
if mjErr != nil {
return nil, false, fmt.Errorf(mjErr.Description)
}
if midjourneyModel == "" {
if !success {
return nil, false, fmt.Errorf("无效的请求, 无法解析模型")
} else {
// task fetch, task fetch by condition, notify
shouldSelectChannel = false
}
}
modelRequest.Model = midjourneyModel
}
c.Set("relay_mode", relayMode)
} else if strings.Contains(c.Request.URL.Path, "/suno/") {
relayMode := relayconstant.Path2RelaySuno(c.Request.Method, c.Request.URL.Path)
if relayMode == relayconstant.RelayModeSunoFetch ||
relayMode == relayconstant.RelayModeSunoFetchByID {
shouldSelectChannel = false
} else {
modelName := service.CoverTaskActionToModelName(constant.TaskPlatformSuno, c.Param("action"))
modelRequest.Model = modelName
}
c.Set("platform", string(constant.TaskPlatformSuno))
c.Set("relay_mode", relayMode)
2025-06-08 21:40:57 +08:00
} else if strings.Contains(c.Request.URL.Path, "/v1/video/generations") {
2025-06-20 15:50:00 +08:00
err = common.UnmarshalBodyReusable(c, &modelRequest)
2025-07-22 17:36:38 +08:00
relayMode := relayconstant.RelayModeUnknown
if c.Request.Method == http.MethodPost {
relayMode = relayconstant.RelayModeVideoSubmit
} else if c.Request.Method == http.MethodGet {
relayMode = relayconstant.RelayModeVideoFetchByID
shouldSelectChannel = false
2025-06-08 21:40:57 +08:00
}
c.Set("relay_mode", relayMode)
} else if strings.HasPrefix(c.Request.URL.Path, "/v1beta/models/") || strings.HasPrefix(c.Request.URL.Path, "/v1/models/") {
2025-05-26 13:34:41 +08:00
// Gemini API 路径处理: /v1beta/models/gemini-2.0-flash:generateContent
relayMode := relayconstant.RelayModeGemini
modelName := extractModelNameFromGeminiPath(c.Request.URL.Path)
if modelName != "" {
modelRequest.Model = modelName
}
c.Set("relay_mode", relayMode)
} else if !strings.HasPrefix(c.Request.URL.Path, "/v1/audio/transcriptions") && !strings.HasPrefix(c.Request.URL.Path, "/v1/images/edits") {
2024-04-07 22:08:11 +08:00
err = common.UnmarshalBodyReusable(c, &modelRequest)
}
if err != nil {
2024-07-17 17:01:25 +08:00
return nil, false, errors.New("无效的请求, " + err.Error())
2024-04-07 22:08:11 +08:00
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/realtime") {
//wss://api.openai.com/v1/realtime?model=gpt-4o-realtime-preview-2024-10-01
modelRequest.Model = c.Query("model")
}
2024-04-07 22:08:11 +08:00
if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") {
if modelRequest.Model == "" {
modelRequest.Model = "text-moderation-stable"
}
}
if strings.HasSuffix(c.Request.URL.Path, "embeddings") {
if modelRequest.Model == "" {
modelRequest.Model = c.Param("model")
}
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") {
2024-07-16 22:07:10 +08:00
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "dall-e")
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/edits") {
modelRequest.Model = common.GetStringIfEmpty(c.PostForm("model"), "gpt-image-1")
2024-04-07 22:08:11 +08:00
}
if strings.HasPrefix(c.Request.URL.Path, "/v1/audio") {
2024-07-16 22:07:10 +08:00
relayMode := relayconstant.RelayModeAudioSpeech
if strings.HasPrefix(c.Request.URL.Path, "/v1/audio/speech") {
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "tts-1")
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/audio/translations") {
2024-07-16 23:24:47 +08:00
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, c.PostForm("model"))
2024-07-16 22:07:10 +08:00
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "whisper-1")
relayMode = relayconstant.RelayModeAudioTranslation
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/audio/transcriptions") {
2024-07-16 23:24:47 +08:00
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, c.PostForm("model"))
2024-07-16 22:07:10 +08:00
modelRequest.Model = common.GetStringIfEmpty(modelRequest.Model, "whisper-1")
relayMode = relayconstant.RelayModeAudioTranscription
2024-04-07 22:08:11 +08:00
}
2024-07-16 22:07:10 +08:00
c.Set("relay_mode", relayMode)
2024-04-07 22:08:11 +08:00
}
if strings.HasPrefix(c.Request.URL.Path, "/pg/chat/completions") {
// playground chat completions
err = common.UnmarshalBodyReusable(c, &modelRequest)
if err != nil {
return nil, false, errors.New("无效的请求, " + err.Error())
}
common.SetContextKey(c, constant.ContextKeyTokenGroup, modelRequest.Group)
}
2024-04-07 22:08:11 +08:00
return &modelRequest, shouldSelectChannel, nil
}
func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, modelName string) *types.NewAPIError {
2024-04-08 13:48:36 +08:00
c.Set("original_model", modelName) // for retry
if channel == nil {
2025-07-30 22:35:31 +08:00
return types.NewError(errors.New("channel is nil"), types.ErrorCodeGetChannelFailed, types.ErrOptionWithSkipRetry())
2024-04-08 13:48:36 +08:00
}
common.SetContextKey(c, constant.ContextKeyChannelId, channel.Id)
common.SetContextKey(c, constant.ContextKeyChannelName, channel.Name)
common.SetContextKey(c, constant.ContextKeyChannelType, channel.Type)
common.SetContextKey(c, constant.ContextKeyChannelCreateTime, channel.CreatedTime)
common.SetContextKey(c, constant.ContextKeyChannelSetting, channel.GetSetting())
common.SetContextKey(c, constant.ContextKeyChannelParamOverride, channel.GetParamOverride())
if nil != channel.OpenAIOrganization && *channel.OpenAIOrganization != "" {
common.SetContextKey(c, constant.ContextKeyChannelOrganization, *channel.OpenAIOrganization)
}
common.SetContextKey(c, constant.ContextKeyChannelAutoBan, channel.GetAutoBan())
common.SetContextKey(c, constant.ContextKeyChannelModelMapping, channel.GetModelMapping())
common.SetContextKey(c, constant.ContextKeyChannelStatusCodeMapping, channel.GetStatusCodeMapping())
key, index, newAPIError := channel.GetNextEnabledKey()
if newAPIError != nil {
return newAPIError
}
if channel.ChannelInfo.IsMultiKey {
common.SetContextKey(c, constant.ContextKeyChannelIsMultiKey, true)
common.SetContextKey(c, constant.ContextKeyChannelMultiKeyIndex, index)
} else {
// 必须设置为 false否则在重试到单个 key 的时候会导致日志显示错误
common.SetContextKey(c, constant.ContextKeyChannelIsMultiKey, false)
}
// c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", key))
common.SetContextKey(c, constant.ContextKeyChannelKey, key)
common.SetContextKey(c, constant.ContextKeyChannelBaseUrl, channel.GetBaseURL())
2024-04-04 16:35:44 +08:00
// TODO: api_version统一
switch channel.Type {
case constant.ChannelTypeAzure:
2024-04-04 16:35:44 +08:00
c.Set("api_version", channel.Other)
case constant.ChannelTypeVertexAi:
2024-08-27 20:19:51 +08:00
c.Set("region", channel.Other)
case constant.ChannelTypeXunfei:
2024-04-04 16:35:44 +08:00
c.Set("api_version", channel.Other)
case constant.ChannelTypeGemini:
2024-04-04 16:35:44 +08:00
c.Set("api_version", channel.Other)
case constant.ChannelTypeAli:
2024-04-04 16:35:44 +08:00
c.Set("plugin", channel.Other)
case constant.ChannelCloudflare:
2024-07-13 19:55:22 +08:00
c.Set("api_version", channel.Other)
case constant.ChannelTypeMokaAI:
2025-01-22 04:21:08 +08:00
c.Set("api_version", channel.Other)
case constant.ChannelTypeCoze:
2025-05-13 22:23:38 +08:00
c.Set("bot_id", channel.Other)
2024-04-04 16:35:44 +08:00
}
return nil
2024-04-04 16:35:44 +08:00
}
2025-05-26 13:34:41 +08:00
// extractModelNameFromGeminiPath 从 Gemini API URL 路径中提取模型名
// 输入格式: /v1beta/models/gemini-2.0-flash:generateContent
// 输出: gemini-2.0-flash
func extractModelNameFromGeminiPath(path string) string {
// 查找 "/models/" 的位置
modelsPrefix := "/models/"
modelsIndex := strings.Index(path, modelsPrefix)
if modelsIndex == -1 {
return ""
}
// 从 "/models/" 之后开始提取
startIndex := modelsIndex + len(modelsPrefix)
if startIndex >= len(path) {
return ""
}
// 查找 ":" 的位置,模型名在 ":" 之前
colonIndex := strings.Index(path[startIndex:], ":")
if colonIndex == -1 {
// 如果没有找到 ":",返回从 "/models/" 到路径结尾的部分
return path[startIndex:]
}
// 返回模型名部分
return path[startIndex : startIndex+colonIndex]
}