2023-04-22 20:39:27 +08:00
|
|
|
|
package model
|
|
|
|
|
|
|
|
|
|
|
|
import (
|
2025-06-16 02:30:46 +08:00
|
|
|
|
"database/sql/driver"
|
2024-06-15 01:06:23 +08:00
|
|
|
|
"encoding/json"
|
2025-07-10 17:49:53 +08:00
|
|
|
|
"errors"
|
2025-07-12 14:20:59 +08:00
|
|
|
|
"fmt"
|
2025-07-06 12:37:56 +08:00
|
|
|
|
"math/rand"
|
2023-04-23 18:24:11 +08:00
|
|
|
|
"one-api/common"
|
2025-07-06 12:37:56 +08:00
|
|
|
|
"one-api/constant"
|
2025-07-07 14:26:37 +08:00
|
|
|
|
"one-api/dto"
|
2025-07-10 17:49:53 +08:00
|
|
|
|
"one-api/types"
|
2024-07-08 17:06:29 +08:00
|
|
|
|
"strings"
|
2024-12-17 12:11:24 +08:00
|
|
|
|
"sync"
|
2024-12-01 09:24:43 +08:00
|
|
|
|
|
2025-08-08 18:36:09 +08:00
|
|
|
|
"github.com/samber/lo"
|
2024-12-01 09:24:43 +08:00
|
|
|
|
"gorm.io/gorm"
|
2023-04-22 20:39:27 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
2023-04-22 21:41:16 +08:00
|
|
|
|
type Channel struct {
|
2023-05-21 16:09:54 +08:00
|
|
|
|
Id int `json:"id"`
|
|
|
|
|
|
Type int `json:"type" gorm:"default:0"`
|
2024-03-04 14:55:26 +08:00
|
|
|
|
Key string `json:"key" gorm:"not null"`
|
2023-09-15 17:59:01 +08:00
|
|
|
|
OpenAIOrganization *string `json:"openai_organization"`
|
2024-04-04 17:28:56 +08:00
|
|
|
|
TestModel *string `json:"test_model"`
|
2023-05-21 16:09:54 +08:00
|
|
|
|
Status int `json:"status" gorm:"default:1"`
|
|
|
|
|
|
Name string `json:"name" gorm:"index"`
|
2023-09-29 18:13:57 +08:00
|
|
|
|
Weight *uint `json:"weight" gorm:"default:0"`
|
2023-05-21 16:09:54 +08:00
|
|
|
|
CreatedTime int64 `json:"created_time" gorm:"bigint"`
|
|
|
|
|
|
TestTime int64 `json:"test_time" gorm:"bigint"`
|
|
|
|
|
|
ResponseTime int `json:"response_time"` // in milliseconds
|
2023-09-18 22:49:05 +08:00
|
|
|
|
BaseURL *string `json:"base_url" gorm:"column:base_url;default:''"`
|
2023-05-21 16:09:54 +08:00
|
|
|
|
Other string `json:"other"`
|
|
|
|
|
|
Balance float64 `json:"balance"` // in USD
|
|
|
|
|
|
BalanceUpdatedTime int64 `json:"balance_updated_time" gorm:"bigint"`
|
2023-06-07 23:26:00 +08:00
|
|
|
|
Models string `json:"models"`
|
2024-01-08 10:08:20 +08:00
|
|
|
|
Group string `json:"group" gorm:"type:varchar(64);default:'default'"`
|
2023-06-16 16:02:00 +08:00
|
|
|
|
UsedQuota int64 `json:"used_quota" gorm:"bigint;default:0"`
|
2025-02-06 19:51:33 +08:00
|
|
|
|
ModelMapping *string `json:"model_mapping" gorm:"type:text"`
|
2024-04-23 11:44:40 +08:00
|
|
|
|
//MaxInputTokens *int `json:"max_input_tokens" gorm:"default:0"`
|
|
|
|
|
|
StatusCodeMapping *string `json:"status_code_mapping" gorm:"type:varchar(1024);default:''"`
|
|
|
|
|
|
Priority *int64 `json:"priority" gorm:"bigint;default:0"`
|
|
|
|
|
|
AutoBan *int `json:"auto_ban" gorm:"default:1"`
|
2024-06-15 01:06:23 +08:00
|
|
|
|
OtherInfo string `json:"other_info"`
|
2025-08-10 20:21:30 +08:00
|
|
|
|
OtherSettings string `json:"settings" gorm:"column:settings"` // 其他设置
|
2024-11-19 01:13:18 +08:00
|
|
|
|
Tag *string `json:"tag" gorm:"index"`
|
2025-06-16 00:37:22 +08:00
|
|
|
|
Setting *string `json:"setting" gorm:"type:text"` // 渠道额外设置
|
2025-03-29 14:39:39 +08:00
|
|
|
|
ParamOverride *string `json:"param_override" gorm:"type:text"`
|
2025-08-24 01:02:23 +08:00
|
|
|
|
HeaderOverride *string `json:"header_override" gorm:"type:text"`
|
2025-06-16 00:37:22 +08:00
|
|
|
|
// add after v0.8.5
|
|
|
|
|
|
ChannelInfo ChannelInfo `json:"channel_info" gorm:"type:json"`
|
2025-08-02 13:16:30 +08:00
|
|
|
|
|
|
|
|
|
|
// cache info
|
|
|
|
|
|
Keys []string `json:"-" gorm:"-"`
|
2024-06-15 01:06:23 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-06-16 02:30:46 +08:00
|
|
|
|
type ChannelInfo struct {
|
2025-08-04 17:15:32 +08:00
|
|
|
|
IsMultiKey bool `json:"is_multi_key"` // 是否多Key模式
|
|
|
|
|
|
MultiKeySize int `json:"multi_key_size"` // 多Key模式下的Key数量
|
|
|
|
|
|
MultiKeyStatusList map[int]int `json:"multi_key_status_list"` // key状态列表,key index -> status
|
|
|
|
|
|
MultiKeyDisabledReason map[int]string `json:"multi_key_disabled_reason,omitempty"` // key禁用原因列表,key index -> reason
|
|
|
|
|
|
MultiKeyDisabledTime map[int]int64 `json:"multi_key_disabled_time,omitempty"` // key禁用时间列表,key index -> time
|
|
|
|
|
|
MultiKeyPollingIndex int `json:"multi_key_polling_index"` // 多Key模式下轮询的key索引
|
2025-08-04 16:52:31 +08:00
|
|
|
|
MultiKeyMode constant.MultiKeyMode `json:"multi_key_mode"`
|
2025-06-16 02:30:46 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Value implements driver.Valuer interface
|
2025-07-07 01:31:41 +08:00
|
|
|
|
func (c ChannelInfo) Value() (driver.Value, error) {
|
2025-07-10 15:02:40 +08:00
|
|
|
|
return common.Marshal(&c)
|
2025-06-16 02:30:46 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Scan implements sql.Scanner interface
|
|
|
|
|
|
func (c *ChannelInfo) Scan(value interface{}) error {
|
|
|
|
|
|
bytesValue, _ := value.([]byte)
|
2025-07-10 15:02:40 +08:00
|
|
|
|
return common.Unmarshal(bytesValue, c)
|
2025-07-06 12:37:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-08-04 16:52:31 +08:00
|
|
|
|
func (channel *Channel) GetKeys() []string {
|
2025-07-06 12:37:56 +08:00
|
|
|
|
if channel.Key == "" {
|
|
|
|
|
|
return []string{}
|
|
|
|
|
|
}
|
2025-08-02 13:16:30 +08:00
|
|
|
|
if len(channel.Keys) > 0 {
|
|
|
|
|
|
return channel.Keys
|
|
|
|
|
|
}
|
2025-07-15 12:02:04 +08:00
|
|
|
|
trimmed := strings.TrimSpace(channel.Key)
|
|
|
|
|
|
// If the key starts with '[', try to parse it as a JSON array (e.g., for Vertex AI scenarios)
|
|
|
|
|
|
if strings.HasPrefix(trimmed, "[") {
|
|
|
|
|
|
var arr []json.RawMessage
|
2025-07-30 23:26:09 +08:00
|
|
|
|
if err := common.Unmarshal([]byte(trimmed), &arr); err == nil {
|
2025-07-15 12:02:04 +08:00
|
|
|
|
res := make([]string, len(arr))
|
|
|
|
|
|
for i, v := range arr {
|
|
|
|
|
|
res[i] = string(v)
|
|
|
|
|
|
}
|
|
|
|
|
|
return res
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
// Otherwise, fall back to splitting by newline
|
2025-07-06 12:37:56 +08:00
|
|
|
|
keys := strings.Split(strings.Trim(channel.Key, "\n"), "\n")
|
|
|
|
|
|
return keys
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-12 15:14:55 +08:00
|
|
|
|
func (channel *Channel) GetNextEnabledKey() (string, int, *types.NewAPIError) {
|
2025-07-06 12:37:56 +08:00
|
|
|
|
// If not in multi-key mode, return the original key string directly.
|
|
|
|
|
|
if !channel.ChannelInfo.IsMultiKey {
|
2025-07-12 15:14:55 +08:00
|
|
|
|
return channel.Key, 0, nil
|
2025-07-06 12:37:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Obtain all keys (split by \n)
|
2025-08-04 16:52:31 +08:00
|
|
|
|
keys := channel.GetKeys()
|
2025-07-06 12:37:56 +08:00
|
|
|
|
if len(keys) == 0 {
|
|
|
|
|
|
// No keys available, return error, should disable the channel
|
2025-07-12 15:14:55 +08:00
|
|
|
|
return "", 0, types.NewError(errors.New("no keys available"), types.ErrorCodeChannelNoAvailableKey)
|
2025-07-06 12:37:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
statusList := channel.ChannelInfo.MultiKeyStatusList
|
|
|
|
|
|
// helper to get key status, default to enabled when missing
|
|
|
|
|
|
getStatus := func(idx int) int {
|
|
|
|
|
|
if statusList == nil {
|
|
|
|
|
|
return common.ChannelStatusEnabled
|
|
|
|
|
|
}
|
|
|
|
|
|
if status, ok := statusList[idx]; ok {
|
|
|
|
|
|
return status
|
|
|
|
|
|
}
|
|
|
|
|
|
return common.ChannelStatusEnabled
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Collect indexes of enabled keys
|
|
|
|
|
|
enabledIdx := make([]int, 0, len(keys))
|
|
|
|
|
|
for i := range keys {
|
|
|
|
|
|
if getStatus(i) == common.ChannelStatusEnabled {
|
|
|
|
|
|
enabledIdx = append(enabledIdx, i)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
// If no specific status list or none enabled, fall back to first key
|
|
|
|
|
|
if len(enabledIdx) == 0 {
|
2025-07-12 15:14:55 +08:00
|
|
|
|
return keys[0], 0, nil
|
2025-07-06 12:37:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
switch channel.ChannelInfo.MultiKeyMode {
|
|
|
|
|
|
case constant.MultiKeyModeRandom:
|
|
|
|
|
|
// Randomly pick one enabled key
|
2025-07-12 15:14:55 +08:00
|
|
|
|
selectedIdx := enabledIdx[rand.Intn(len(enabledIdx))]
|
|
|
|
|
|
return keys[selectedIdx], selectedIdx, nil
|
2025-07-06 12:37:56 +08:00
|
|
|
|
case constant.MultiKeyModePolling:
|
2025-07-12 11:17:08 +08:00
|
|
|
|
// Use channel-specific lock to ensure thread-safe polling
|
2025-08-04 20:44:19 +08:00
|
|
|
|
lock := GetChannelPollingLock(channel.Id)
|
2025-07-12 11:17:08 +08:00
|
|
|
|
lock.Lock()
|
|
|
|
|
|
defer lock.Unlock()
|
|
|
|
|
|
|
2025-07-12 14:20:59 +08:00
|
|
|
|
channelInfo, err := CacheGetChannelInfo(channel.Id)
|
|
|
|
|
|
if err != nil {
|
2025-07-30 22:35:31 +08:00
|
|
|
|
return "", 0, types.NewError(err, types.ErrorCodeGetChannelFailed, types.ErrOptionWithSkipRetry())
|
2025-07-12 14:20:59 +08:00
|
|
|
|
}
|
|
|
|
|
|
//println("before polling index:", channel.ChannelInfo.MultiKeyPollingIndex)
|
2025-07-11 21:12:17 +08:00
|
|
|
|
defer func() {
|
2025-07-12 14:20:59 +08:00
|
|
|
|
if common.DebugEnabled {
|
|
|
|
|
|
println(fmt.Sprintf("channel %d polling index: %d", channel.Id, channel.ChannelInfo.MultiKeyPollingIndex))
|
|
|
|
|
|
}
|
2025-07-11 21:12:17 +08:00
|
|
|
|
if !common.MemoryCacheEnabled {
|
2025-07-12 14:20:59 +08:00
|
|
|
|
_ = channel.SaveChannelInfo()
|
2025-07-11 21:12:17 +08:00
|
|
|
|
} else {
|
2025-07-12 11:17:08 +08:00
|
|
|
|
// CacheUpdateChannel(channel)
|
2025-07-11 21:12:17 +08:00
|
|
|
|
}
|
|
|
|
|
|
}()
|
2025-07-06 12:37:56 +08:00
|
|
|
|
// Start from the saved polling index and look for the next enabled key
|
2025-07-12 14:20:59 +08:00
|
|
|
|
start := channelInfo.MultiKeyPollingIndex
|
2025-07-06 12:37:56 +08:00
|
|
|
|
if start < 0 || start >= len(keys) {
|
|
|
|
|
|
start = 0
|
|
|
|
|
|
}
|
|
|
|
|
|
for i := 0; i < len(keys); i++ {
|
|
|
|
|
|
idx := (start + i) % len(keys)
|
|
|
|
|
|
if getStatus(idx) == common.ChannelStatusEnabled {
|
|
|
|
|
|
// update polling index for next call (point to the next position)
|
|
|
|
|
|
channel.ChannelInfo.MultiKeyPollingIndex = (idx + 1) % len(keys)
|
2025-07-12 15:14:55 +08:00
|
|
|
|
return keys[idx], idx, nil
|
2025-07-06 12:37:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
// Fallback – should not happen, but return first enabled key
|
2025-07-12 15:14:55 +08:00
|
|
|
|
return keys[enabledIdx[0]], enabledIdx[0], nil
|
2025-07-06 12:37:56 +08:00
|
|
|
|
default:
|
|
|
|
|
|
// Unknown mode, default to first enabled key (or original key string)
|
2025-07-12 15:14:55 +08:00
|
|
|
|
return keys[enabledIdx[0]], enabledIdx[0], nil
|
2025-07-06 12:37:56 +08:00
|
|
|
|
}
|
2025-06-16 02:30:46 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-12 14:20:59 +08:00
|
|
|
|
func (channel *Channel) SaveChannelInfo() error {
|
|
|
|
|
|
return DB.Model(channel).Update("channel_info", channel.ChannelInfo).Error
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-07-08 17:06:29 +08:00
|
|
|
|
func (channel *Channel) GetModels() []string {
|
|
|
|
|
|
if channel.Models == "" {
|
|
|
|
|
|
return []string{}
|
|
|
|
|
|
}
|
|
|
|
|
|
return strings.Split(strings.Trim(channel.Models, ","), ",")
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-05-23 01:26:52 +08:00
|
|
|
|
func (channel *Channel) GetGroups() []string {
|
|
|
|
|
|
if channel.Group == "" {
|
|
|
|
|
|
return []string{}
|
|
|
|
|
|
}
|
|
|
|
|
|
groups := strings.Split(strings.Trim(channel.Group, ","), ",")
|
|
|
|
|
|
for i, group := range groups {
|
|
|
|
|
|
groups[i] = strings.TrimSpace(group)
|
|
|
|
|
|
}
|
|
|
|
|
|
return groups
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-06-15 01:06:23 +08:00
|
|
|
|
func (channel *Channel) GetOtherInfo() map[string]interface{} {
|
2024-06-16 13:38:36 +08:00
|
|
|
|
otherInfo := make(map[string]interface{})
|
2024-06-15 01:06:23 +08:00
|
|
|
|
if channel.OtherInfo != "" {
|
2025-07-30 23:26:09 +08:00
|
|
|
|
err := common.Unmarshal([]byte(channel.OtherInfo), &otherInfo)
|
2024-06-15 01:06:23 +08:00
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to unmarshal other info: channel_id=%d, tag=%s, name=%s, error=%v", channel.Id, channel.GetTag(), channel.Name, err))
|
2024-06-15 01:06:23 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return otherInfo
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (channel *Channel) SetOtherInfo(otherInfo map[string]interface{}) {
|
|
|
|
|
|
otherInfoBytes, err := json.Marshal(otherInfo)
|
|
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to marshal other info: channel_id=%d, tag=%s, name=%s, error=%v", channel.Id, channel.GetTag(), channel.Name, err))
|
2024-06-15 01:06:23 +08:00
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
channel.OtherInfo = string(otherInfoBytes)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-11-19 01:13:18 +08:00
|
|
|
|
func (channel *Channel) GetTag() string {
|
|
|
|
|
|
if channel.Tag == nil {
|
|
|
|
|
|
return ""
|
|
|
|
|
|
}
|
|
|
|
|
|
return *channel.Tag
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (channel *Channel) SetTag(tag string) {
|
|
|
|
|
|
channel.Tag = &tag
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-08-03 17:32:28 +08:00
|
|
|
|
func (channel *Channel) GetAutoBan() bool {
|
|
|
|
|
|
if channel.AutoBan == nil {
|
|
|
|
|
|
return false
|
|
|
|
|
|
}
|
|
|
|
|
|
return *channel.AutoBan == 1
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-06-15 01:06:23 +08:00
|
|
|
|
func (channel *Channel) Save() error {
|
|
|
|
|
|
return DB.Save(channel).Error
|
2023-04-22 20:39:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-12-05 18:15:40 +08:00
|
|
|
|
func GetAllChannels(startIdx int, num int, selectAll bool, idSort bool) ([]*Channel, error) {
|
2023-04-22 22:02:59 +08:00
|
|
|
|
var channels []*Channel
|
2023-04-22 20:39:27 +08:00
|
|
|
|
var err error
|
2023-12-05 18:15:40 +08:00
|
|
|
|
order := "priority desc"
|
|
|
|
|
|
if idSort {
|
|
|
|
|
|
order = "id desc"
|
|
|
|
|
|
}
|
2023-05-15 12:36:55 +08:00
|
|
|
|
if selectAll {
|
2023-12-05 18:15:40 +08:00
|
|
|
|
err = DB.Order(order).Find(&channels).Error
|
2023-05-15 12:36:55 +08:00
|
|
|
|
} else {
|
2023-12-05 18:15:40 +08:00
|
|
|
|
err = DB.Order(order).Limit(num).Offset(startIdx).Omit("key").Find(&channels).Error
|
2023-05-15 12:36:55 +08:00
|
|
|
|
}
|
2023-04-22 22:02:59 +08:00
|
|
|
|
return channels, err
|
2023-04-22 20:39:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-09 20:38:03 +08:00
|
|
|
|
func GetChannelsByTag(tag string, idSort bool) ([]*Channel, error) {
|
2024-11-19 01:13:18 +08:00
|
|
|
|
var channels []*Channel
|
2024-12-09 20:38:03 +08:00
|
|
|
|
order := "priority desc"
|
|
|
|
|
|
if idSort {
|
|
|
|
|
|
order = "id desc"
|
|
|
|
|
|
}
|
|
|
|
|
|
err := DB.Where("tag = ?", tag).Order(order).Find(&channels).Error
|
2024-11-19 01:13:18 +08:00
|
|
|
|
return channels, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-11-19 01:39:27 +08:00
|
|
|
|
func SearchChannels(keyword string, group string, model string, idSort bool) ([]*Channel, error) {
|
2024-03-01 21:57:52 +08:00
|
|
|
|
var channels []*Channel
|
|
|
|
|
|
modelsCol := "`models`"
|
|
|
|
|
|
|
|
|
|
|
|
// 如果是 PostgreSQL,使用双引号
|
2023-10-22 18:38:29 +08:00
|
|
|
|
if common.UsingPostgreSQL {
|
2025-01-02 00:14:16 +08:00
|
|
|
|
modelsCol = `"models"`
|
2023-10-22 18:38:29 +08:00
|
|
|
|
}
|
2024-03-01 21:57:52 +08:00
|
|
|
|
|
2025-04-28 11:38:53 +08:00
|
|
|
|
baseURLCol := "`base_url`"
|
|
|
|
|
|
// 如果是 PostgreSQL,使用双引号
|
|
|
|
|
|
if common.UsingPostgreSQL {
|
|
|
|
|
|
baseURLCol = `"base_url"`
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-11-19 01:39:27 +08:00
|
|
|
|
order := "priority desc"
|
|
|
|
|
|
if idSort {
|
|
|
|
|
|
order = "id desc"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-01 21:57:52 +08:00
|
|
|
|
// 构造基础查询
|
2025-06-14 18:15:45 +08:00
|
|
|
|
baseQuery := DB.Model(&Channel{}).Omit("key")
|
2024-03-01 21:57:52 +08:00
|
|
|
|
|
|
|
|
|
|
// 构造WHERE子句
|
|
|
|
|
|
var whereClause string
|
|
|
|
|
|
var args []interface{}
|
2024-08-05 22:35:16 +08:00
|
|
|
|
if group != "" && group != "null" {
|
2024-08-08 20:45:41 +08:00
|
|
|
|
var groupCondition string
|
2024-08-09 11:38:53 +08:00
|
|
|
|
if common.UsingMySQL {
|
2025-06-14 17:51:05 +08:00
|
|
|
|
groupCondition = `CONCAT(',', ` + commonGroupCol + `, ',') LIKE ?`
|
2024-08-09 11:38:53 +08:00
|
|
|
|
} else {
|
|
|
|
|
|
// sqlite, PostgreSQL
|
2025-06-14 17:51:05 +08:00
|
|
|
|
groupCondition = `(',' || ` + commonGroupCol + ` || ',') LIKE ?`
|
2024-08-08 20:45:41 +08:00
|
|
|
|
}
|
2025-06-14 17:51:05 +08:00
|
|
|
|
whereClause = "(id = ? OR name LIKE ? OR " + commonKeyCol + " = ? OR " + baseURLCol + " LIKE ?) AND " + modelsCol + ` LIKE ? AND ` + groupCondition
|
2025-04-28 11:38:53 +08:00
|
|
|
|
args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, "%"+keyword+"%", "%"+model+"%", "%,"+group+",%")
|
2023-12-05 18:15:40 +08:00
|
|
|
|
} else {
|
2025-06-14 17:51:05 +08:00
|
|
|
|
whereClause = "(id = ? OR name LIKE ? OR " + commonKeyCol + " = ? OR " + baseURLCol + " LIKE ?) AND " + modelsCol + " LIKE ?"
|
2025-04-28 11:38:53 +08:00
|
|
|
|
args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, "%"+keyword+"%", "%"+model+"%")
|
2023-12-05 18:15:40 +08:00
|
|
|
|
}
|
2024-03-01 21:57:52 +08:00
|
|
|
|
|
|
|
|
|
|
// 执行查询
|
2024-11-19 01:39:27 +08:00
|
|
|
|
err := baseQuery.Where(whereClause, args...).Order(order).Find(&channels).Error
|
2024-03-01 21:57:52 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
return channels, nil
|
2023-04-22 20:39:27 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-23 18:24:11 +08:00
|
|
|
|
func GetChannelById(id int, selectAll bool) (*Channel, error) {
|
2025-07-11 21:12:17 +08:00
|
|
|
|
channel := &Channel{Id: id}
|
2023-04-22 22:02:59 +08:00
|
|
|
|
var err error = nil
|
2023-04-23 18:24:11 +08:00
|
|
|
|
if selectAll {
|
2025-07-11 21:12:17 +08:00
|
|
|
|
err = DB.First(channel, "id = ?", id).Error
|
2023-04-23 18:24:11 +08:00
|
|
|
|
} else {
|
2025-07-11 21:12:17 +08:00
|
|
|
|
err = DB.Omit("key").First(channel, "id = ?", id).Error
|
|
|
|
|
|
}
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
if channel == nil {
|
|
|
|
|
|
return nil, errors.New("channel not found")
|
2023-04-23 18:24:11 +08:00
|
|
|
|
}
|
2025-07-11 21:12:17 +08:00
|
|
|
|
return channel, nil
|
2023-04-22 22:02:59 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-13 17:08:13 +08:00
|
|
|
|
func BatchInsertChannels(channels []Channel) error {
|
2025-08-08 18:36:09 +08:00
|
|
|
|
if len(channels) == 0 {
|
|
|
|
|
|
return nil
|
2023-06-07 23:26:00 +08:00
|
|
|
|
}
|
2025-08-08 18:36:09 +08:00
|
|
|
|
tx := DB.Begin()
|
|
|
|
|
|
if tx.Error != nil {
|
|
|
|
|
|
return tx.Error
|
|
|
|
|
|
}
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
|
|
for _, chunk := range lo.Chunk(channels, 50) {
|
|
|
|
|
|
if err := tx.Create(&chunk).Error; err != nil {
|
|
|
|
|
|
tx.Rollback()
|
2023-06-07 23:26:00 +08:00
|
|
|
|
return err
|
|
|
|
|
|
}
|
2025-08-08 18:36:09 +08:00
|
|
|
|
for _, channel_ := range chunk {
|
|
|
|
|
|
if err := channel_.AddAbilities(tx); err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-06-07 23:26:00 +08:00
|
|
|
|
}
|
2025-08-08 18:36:09 +08:00
|
|
|
|
return tx.Commit().Error
|
2023-05-13 17:08:13 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-12-14 16:35:03 +08:00
|
|
|
|
func BatchDeleteChannels(ids []int) error {
|
2025-08-08 18:36:09 +08:00
|
|
|
|
if len(ids) == 0 {
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
// 使用事务 分批删除channel表和abilities表
|
2023-12-14 16:35:03 +08:00
|
|
|
|
tx := DB.Begin()
|
2025-08-08 18:36:09 +08:00
|
|
|
|
if tx.Error != nil {
|
|
|
|
|
|
return tx.Error
|
2023-12-14 16:35:03 +08:00
|
|
|
|
}
|
2025-08-08 18:36:09 +08:00
|
|
|
|
for _, chunk := range lo.Chunk(ids, 200) {
|
|
|
|
|
|
if err := tx.Where("id in (?)", chunk).Delete(&Channel{}).Error; err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
if err := tx.Where("channel_id in (?)", chunk).Delete(&Ability{}).Error; err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2023-12-14 16:35:03 +08:00
|
|
|
|
}
|
2025-08-08 18:36:09 +08:00
|
|
|
|
return tx.Commit().Error
|
2023-12-14 16:35:03 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-18 21:43:45 +08:00
|
|
|
|
func (channel *Channel) GetPriority() int64 {
|
2023-09-18 22:07:17 +08:00
|
|
|
|
if channel.Priority == nil {
|
2023-09-18 21:43:45 +08:00
|
|
|
|
return 0
|
|
|
|
|
|
}
|
|
|
|
|
|
return *channel.Priority
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-12-27 19:00:47 +08:00
|
|
|
|
func (channel *Channel) GetWeight() int {
|
|
|
|
|
|
if channel.Weight == nil {
|
|
|
|
|
|
return 0
|
|
|
|
|
|
}
|
|
|
|
|
|
return int(*channel.Weight)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-09-18 22:07:17 +08:00
|
|
|
|
func (channel *Channel) GetBaseURL() string {
|
|
|
|
|
|
if channel.BaseURL == nil {
|
|
|
|
|
|
return ""
|
|
|
|
|
|
}
|
2025-08-14 22:15:18 +08:00
|
|
|
|
url := *channel.BaseURL
|
|
|
|
|
|
if url == "" {
|
|
|
|
|
|
url = constant.ChannelBaseURLs[channel.Type]
|
|
|
|
|
|
}
|
|
|
|
|
|
return url
|
2023-09-18 22:07:17 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (channel *Channel) GetModelMapping() string {
|
|
|
|
|
|
if channel.ModelMapping == nil {
|
|
|
|
|
|
return ""
|
|
|
|
|
|
}
|
|
|
|
|
|
return *channel.ModelMapping
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-04-20 21:05:23 +08:00
|
|
|
|
func (channel *Channel) GetStatusCodeMapping() string {
|
|
|
|
|
|
if channel.StatusCodeMapping == nil {
|
|
|
|
|
|
return ""
|
|
|
|
|
|
}
|
|
|
|
|
|
return *channel.StatusCodeMapping
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-22 22:02:59 +08:00
|
|
|
|
func (channel *Channel) Insert() error {
|
2023-04-22 20:39:27 +08:00
|
|
|
|
var err error
|
2023-04-22 22:02:59 +08:00
|
|
|
|
err = DB.Create(channel).Error
|
2023-06-07 23:26:00 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2025-08-08 18:36:09 +08:00
|
|
|
|
err = channel.AddAbilities(nil)
|
2023-04-22 20:39:27 +08:00
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-22 22:02:59 +08:00
|
|
|
|
func (channel *Channel) Update() error {
|
2025-07-15 12:02:04 +08:00
|
|
|
|
// If this is a multi-key channel, recalculate MultiKeySize based on the current key list to avoid inconsistency after editing keys
|
2025-07-12 23:57:59 +08:00
|
|
|
|
if channel.ChannelInfo.IsMultiKey {
|
2025-07-13 00:09:27 +08:00
|
|
|
|
var keyStr string
|
|
|
|
|
|
if channel.Key != "" {
|
|
|
|
|
|
keyStr = channel.Key
|
|
|
|
|
|
} else {
|
2025-07-15 12:02:04 +08:00
|
|
|
|
// If key is not provided, read the existing key from the database
|
2025-07-13 00:09:27 +08:00
|
|
|
|
if existing, err := GetChannelById(channel.Id, true); err == nil {
|
|
|
|
|
|
keyStr = existing.Key
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-07-15 12:02:04 +08:00
|
|
|
|
// Parse the key list (supports newline separation or JSON array)
|
2025-07-13 00:09:27 +08:00
|
|
|
|
keys := []string{}
|
|
|
|
|
|
if keyStr != "" {
|
2025-07-15 12:02:04 +08:00
|
|
|
|
trimmed := strings.TrimSpace(keyStr)
|
|
|
|
|
|
if strings.HasPrefix(trimmed, "[") {
|
|
|
|
|
|
var arr []json.RawMessage
|
2025-07-30 23:26:09 +08:00
|
|
|
|
if err := common.Unmarshal([]byte(trimmed), &arr); err == nil {
|
2025-07-15 12:02:04 +08:00
|
|
|
|
keys = make([]string, len(arr))
|
|
|
|
|
|
for i, v := range arr {
|
|
|
|
|
|
keys[i] = string(v)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if len(keys) == 0 { // fallback to newline split
|
|
|
|
|
|
keys = strings.Split(strings.Trim(keyStr, "\n"), "\n")
|
|
|
|
|
|
}
|
2025-07-13 00:09:27 +08:00
|
|
|
|
}
|
2025-07-12 23:57:59 +08:00
|
|
|
|
channel.ChannelInfo.MultiKeySize = len(keys)
|
2025-07-15 12:02:04 +08:00
|
|
|
|
// Clean up status data that exceeds the new key count to prevent index out of range
|
2025-07-12 23:57:59 +08:00
|
|
|
|
if channel.ChannelInfo.MultiKeyStatusList != nil {
|
|
|
|
|
|
for idx := range channel.ChannelInfo.MultiKeyStatusList {
|
|
|
|
|
|
if idx >= channel.ChannelInfo.MultiKeySize {
|
|
|
|
|
|
delete(channel.ChannelInfo.MultiKeyStatusList, idx)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-04-22 20:39:27 +08:00
|
|
|
|
var err error
|
2023-04-22 22:02:59 +08:00
|
|
|
|
err = DB.Model(channel).Updates(channel).Error
|
2023-06-07 23:26:00 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2023-06-08 09:26:54 +08:00
|
|
|
|
DB.Model(channel).First(channel, "id = ?", channel.Id)
|
2024-12-25 14:19:00 +08:00
|
|
|
|
err = channel.UpdateAbilities(nil)
|
2023-04-22 20:39:27 +08:00
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-15 11:35:38 +08:00
|
|
|
|
func (channel *Channel) UpdateResponseTime(responseTime int64) {
|
|
|
|
|
|
err := DB.Model(channel).Select("response_time", "test_time").Updates(Channel{
|
|
|
|
|
|
TestTime: common.GetTimestamp(),
|
|
|
|
|
|
ResponseTime: int(responseTime),
|
|
|
|
|
|
}).Error
|
|
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to update response time: channel_id=%d, error=%v", channel.Id, err))
|
2023-05-15 11:35:38 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-05-21 16:09:54 +08:00
|
|
|
|
func (channel *Channel) UpdateBalance(balance float64) {
|
|
|
|
|
|
err := DB.Model(channel).Select("balance_updated_time", "balance").Updates(Channel{
|
|
|
|
|
|
BalanceUpdatedTime: common.GetTimestamp(),
|
|
|
|
|
|
Balance: balance,
|
|
|
|
|
|
}).Error
|
|
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to update balance: channel_id=%d, error=%v", channel.Id, err))
|
2023-05-21 16:09:54 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2023-04-22 22:02:59 +08:00
|
|
|
|
func (channel *Channel) Delete() error {
|
|
|
|
|
|
var err error
|
|
|
|
|
|
err = DB.Delete(channel).Error
|
2023-06-07 23:26:00 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
err = channel.DeleteAbilities()
|
2023-04-22 22:02:59 +08:00
|
|
|
|
return err
|
2023-04-22 20:39:27 +08:00
|
|
|
|
}
|
2023-05-15 17:34:09 +08:00
|
|
|
|
|
2024-12-17 12:11:24 +08:00
|
|
|
|
var channelStatusLock sync.Mutex
|
2024-12-17 15:33:16 +08:00
|
|
|
|
|
2025-07-12 11:17:08 +08:00
|
|
|
|
// channelPollingLocks stores locks for each channel.id to ensure thread-safe polling
|
|
|
|
|
|
var channelPollingLocks sync.Map
|
|
|
|
|
|
|
2025-08-04 20:44:19 +08:00
|
|
|
|
// GetChannelPollingLock returns or creates a mutex for the given channel ID
|
|
|
|
|
|
func GetChannelPollingLock(channelId int) *sync.Mutex {
|
2025-07-12 11:17:08 +08:00
|
|
|
|
if lock, exists := channelPollingLocks.Load(channelId); exists {
|
|
|
|
|
|
return lock.(*sync.Mutex)
|
|
|
|
|
|
}
|
|
|
|
|
|
// Create new lock for this channel
|
|
|
|
|
|
newLock := &sync.Mutex{}
|
|
|
|
|
|
actual, _ := channelPollingLocks.LoadOrStore(channelId, newLock)
|
|
|
|
|
|
return actual.(*sync.Mutex)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// CleanupChannelPollingLocks removes locks for channels that no longer exist
|
|
|
|
|
|
// This is optional and can be called periodically to prevent memory leaks
|
|
|
|
|
|
func CleanupChannelPollingLocks() {
|
|
|
|
|
|
var activeChannelIds []int
|
|
|
|
|
|
DB.Model(&Channel{}).Pluck("id", &activeChannelIds)
|
|
|
|
|
|
|
|
|
|
|
|
activeChannelSet := make(map[int]bool)
|
|
|
|
|
|
for _, id := range activeChannelIds {
|
|
|
|
|
|
activeChannelSet[id] = true
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
channelPollingLocks.Range(func(key, value interface{}) bool {
|
|
|
|
|
|
channelId := key.(int)
|
|
|
|
|
|
if !activeChannelSet[channelId] {
|
|
|
|
|
|
channelPollingLocks.Delete(channelId)
|
|
|
|
|
|
}
|
|
|
|
|
|
return true
|
|
|
|
|
|
})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-08-04 16:52:31 +08:00
|
|
|
|
func handlerMultiKeyUpdate(channel *Channel, usingKey string, status int, reason string) {
|
|
|
|
|
|
keys := channel.GetKeys()
|
2025-07-10 17:49:53 +08:00
|
|
|
|
if len(keys) == 0 {
|
|
|
|
|
|
channel.Status = status
|
|
|
|
|
|
} else {
|
|
|
|
|
|
var keyIndex int
|
|
|
|
|
|
for i, key := range keys {
|
|
|
|
|
|
if key == usingKey {
|
|
|
|
|
|
keyIndex = i
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if channel.ChannelInfo.MultiKeyStatusList == nil {
|
|
|
|
|
|
channel.ChannelInfo.MultiKeyStatusList = make(map[int]int)
|
|
|
|
|
|
}
|
|
|
|
|
|
if status == common.ChannelStatusEnabled {
|
|
|
|
|
|
delete(channel.ChannelInfo.MultiKeyStatusList, keyIndex)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
channel.ChannelInfo.MultiKeyStatusList[keyIndex] = status
|
2025-08-04 16:52:31 +08:00
|
|
|
|
if channel.ChannelInfo.MultiKeyDisabledReason == nil {
|
|
|
|
|
|
channel.ChannelInfo.MultiKeyDisabledReason = make(map[int]string)
|
|
|
|
|
|
}
|
|
|
|
|
|
if channel.ChannelInfo.MultiKeyDisabledTime == nil {
|
|
|
|
|
|
channel.ChannelInfo.MultiKeyDisabledTime = make(map[int]int64)
|
|
|
|
|
|
}
|
|
|
|
|
|
channel.ChannelInfo.MultiKeyDisabledReason[keyIndex] = reason
|
|
|
|
|
|
channel.ChannelInfo.MultiKeyDisabledTime[keyIndex] = common.GetTimestamp()
|
2025-07-10 17:49:53 +08:00
|
|
|
|
}
|
|
|
|
|
|
if len(channel.ChannelInfo.MultiKeyStatusList) >= channel.ChannelInfo.MultiKeySize {
|
|
|
|
|
|
channel.Status = common.ChannelStatusAutoDisabled
|
|
|
|
|
|
info := channel.GetOtherInfo()
|
|
|
|
|
|
info["status_reason"] = "All keys are disabled"
|
|
|
|
|
|
info["status_time"] = common.GetTimestamp()
|
|
|
|
|
|
channel.SetOtherInfo(info)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func UpdateChannelStatus(channelId int, usingKey string, status int, reason string) bool {
|
2024-12-17 15:33:16 +08:00
|
|
|
|
if common.MemoryCacheEnabled {
|
2024-12-17 12:11:24 +08:00
|
|
|
|
channelStatusLock.Lock()
|
2025-03-06 17:46:03 +08:00
|
|
|
|
defer channelStatusLock.Unlock()
|
|
|
|
|
|
|
2025-07-10 17:49:53 +08:00
|
|
|
|
channelCache, _ := CacheGetChannel(channelId)
|
|
|
|
|
|
if channelCache == nil {
|
2025-03-06 17:46:03 +08:00
|
|
|
|
return false
|
2024-12-17 14:30:31 +08:00
|
|
|
|
}
|
2025-07-10 17:49:53 +08:00
|
|
|
|
if channelCache.ChannelInfo.IsMultiKey {
|
|
|
|
|
|
// 如果是多Key模式,更新缓存中的状态
|
2025-08-04 16:52:31 +08:00
|
|
|
|
handlerMultiKeyUpdate(channelCache, usingKey, status, reason)
|
2025-07-12 14:20:59 +08:00
|
|
|
|
//CacheUpdateChannel(channelCache)
|
2025-07-10 17:49:53 +08:00
|
|
|
|
//return true
|
|
|
|
|
|
} else {
|
|
|
|
|
|
// 如果缓存渠道存在,且状态已是目标状态,直接返回
|
|
|
|
|
|
if channelCache.Status == status {
|
|
|
|
|
|
return false
|
|
|
|
|
|
}
|
|
|
|
|
|
CacheUpdateChannelStatus(channelId, status)
|
2024-12-17 12:11:24 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2025-07-10 17:49:53 +08:00
|
|
|
|
|
|
|
|
|
|
shouldUpdateAbilities := false
|
|
|
|
|
|
defer func() {
|
|
|
|
|
|
if shouldUpdateAbilities {
|
|
|
|
|
|
err := UpdateAbilityStatus(channelId, status == common.ChannelStatusEnabled)
|
|
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to update ability status: channel_id=%d, error=%v", channelId, err))
|
2025-07-10 17:49:53 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}()
|
|
|
|
|
|
channel, err := GetChannelById(channelId, true)
|
2023-06-07 23:26:00 +08:00
|
|
|
|
if err != nil {
|
2025-03-06 17:46:03 +08:00
|
|
|
|
return false
|
2024-06-15 01:06:23 +08:00
|
|
|
|
} else {
|
2025-03-06 17:46:03 +08:00
|
|
|
|
if channel.Status == status {
|
|
|
|
|
|
return false
|
|
|
|
|
|
}
|
2025-07-10 17:49:53 +08:00
|
|
|
|
|
|
|
|
|
|
if channel.ChannelInfo.IsMultiKey {
|
|
|
|
|
|
beforeStatus := channel.Status
|
2025-08-04 16:52:31 +08:00
|
|
|
|
handlerMultiKeyUpdate(channel, usingKey, status, reason)
|
2025-07-10 17:49:53 +08:00
|
|
|
|
if beforeStatus != channel.Status {
|
|
|
|
|
|
shouldUpdateAbilities = true
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
info := channel.GetOtherInfo()
|
|
|
|
|
|
info["status_reason"] = reason
|
|
|
|
|
|
info["status_time"] = common.GetTimestamp()
|
|
|
|
|
|
channel.SetOtherInfo(info)
|
|
|
|
|
|
channel.Status = status
|
|
|
|
|
|
shouldUpdateAbilities = true
|
|
|
|
|
|
}
|
2024-06-15 01:06:23 +08:00
|
|
|
|
err = channel.Save()
|
|
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to update channel status: channel_id=%d, status=%d, error=%v", channel.Id, status, err))
|
2025-03-06 17:46:03 +08:00
|
|
|
|
return false
|
2024-06-15 01:06:23 +08:00
|
|
|
|
}
|
2023-05-15 17:34:09 +08:00
|
|
|
|
}
|
2025-03-06 17:46:03 +08:00
|
|
|
|
return true
|
2023-05-15 17:34:09 +08:00
|
|
|
|
}
|
2023-06-16 16:02:00 +08:00
|
|
|
|
|
2024-11-19 01:13:18 +08:00
|
|
|
|
func EnableChannelByTag(tag string) error {
|
|
|
|
|
|
err := DB.Model(&Channel{}).Where("tag = ?", tag).Update("status", common.ChannelStatusEnabled).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
err = UpdateAbilityStatusByTag(tag, true)
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func DisableChannelByTag(tag string) error {
|
|
|
|
|
|
err := DB.Model(&Channel{}).Where("tag = ?", tag).Update("status", common.ChannelStatusManuallyDisabled).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
err = UpdateAbilityStatusByTag(tag, false)
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-11-29 23:58:31 +08:00
|
|
|
|
func EditChannelByTag(tag string, newTag *string, modelMapping *string, models *string, group *string, priority *int64, weight *uint) error {
|
2024-11-19 01:13:18 +08:00
|
|
|
|
updateData := Channel{}
|
2024-11-29 23:58:31 +08:00
|
|
|
|
shouldReCreateAbilities := false
|
|
|
|
|
|
updatedTag := tag
|
|
|
|
|
|
// 如果 newTag 不为空且不等于 tag,则更新 tag
|
|
|
|
|
|
if newTag != nil && *newTag != tag {
|
2024-11-19 01:13:18 +08:00
|
|
|
|
updateData.Tag = newTag
|
2024-11-29 23:58:31 +08:00
|
|
|
|
updatedTag = *newTag
|
|
|
|
|
|
}
|
|
|
|
|
|
if modelMapping != nil && *modelMapping != "" {
|
|
|
|
|
|
updateData.ModelMapping = modelMapping
|
|
|
|
|
|
}
|
|
|
|
|
|
if models != nil && *models != "" {
|
|
|
|
|
|
shouldReCreateAbilities = true
|
|
|
|
|
|
updateData.Models = *models
|
|
|
|
|
|
}
|
|
|
|
|
|
if group != nil && *group != "" {
|
|
|
|
|
|
shouldReCreateAbilities = true
|
|
|
|
|
|
updateData.Group = *group
|
2024-11-19 01:13:18 +08:00
|
|
|
|
}
|
|
|
|
|
|
if priority != nil {
|
|
|
|
|
|
updateData.Priority = priority
|
|
|
|
|
|
}
|
|
|
|
|
|
if weight != nil {
|
|
|
|
|
|
updateData.Weight = weight
|
|
|
|
|
|
}
|
2024-11-29 23:58:31 +08:00
|
|
|
|
|
2024-11-19 01:13:18 +08:00
|
|
|
|
err := DB.Model(&Channel{}).Where("tag = ?", tag).Updates(updateData).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
2024-11-29 23:58:31 +08:00
|
|
|
|
if shouldReCreateAbilities {
|
2024-12-09 20:38:03 +08:00
|
|
|
|
channels, err := GetChannelsByTag(updatedTag, false)
|
2024-11-29 23:58:31 +08:00
|
|
|
|
if err == nil {
|
|
|
|
|
|
for _, channel := range channels {
|
2024-12-25 14:19:00 +08:00
|
|
|
|
err = channel.UpdateAbilities(nil)
|
2024-11-29 23:58:31 +08:00
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to update abilities: channel_id=%d, tag=%s, error=%v", channel.Id, channel.GetTag(), err))
|
2024-11-29 23:58:31 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
err := UpdateAbilityByTag(tag, newTag, priority, weight)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil
|
2024-11-19 01:13:18 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-06-16 16:02:00 +08:00
|
|
|
|
func UpdateChannelUsedQuota(id int, quota int) {
|
2023-09-03 14:58:20 +08:00
|
|
|
|
if common.BatchUpdateEnabled {
|
|
|
|
|
|
addNewRecord(BatchUpdateTypeChannelUsedQuota, id, quota)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
updateChannelUsedQuota(id, quota)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func updateChannelUsedQuota(id int, quota int) {
|
2023-08-13 00:51:48 +08:00
|
|
|
|
err := DB.Model(&Channel{}).Where("id = ?", id).Update("used_quota", gorm.Expr("used_quota + ?", quota)).Error
|
2023-06-16 16:02:00 +08:00
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to update channel used quota: channel_id=%d, delta_quota=%d, error=%v", id, quota, err))
|
2023-06-16 16:02:00 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2023-10-02 13:06:27 +08:00
|
|
|
|
|
|
|
|
|
|
func DeleteChannelByStatus(status int64) (int64, error) {
|
|
|
|
|
|
result := DB.Where("status = ?", status).Delete(&Channel{})
|
|
|
|
|
|
return result.RowsAffected, result.Error
|
|
|
|
|
|
}
|
2023-10-14 17:25:48 +08:00
|
|
|
|
|
|
|
|
|
|
func DeleteDisabledChannel() (int64, error) {
|
|
|
|
|
|
result := DB.Where("status = ? or status = ?", common.ChannelStatusAutoDisabled, common.ChannelStatusManuallyDisabled).Delete(&Channel{})
|
|
|
|
|
|
return result.RowsAffected, result.Error
|
|
|
|
|
|
}
|
2024-12-01 09:24:43 +08:00
|
|
|
|
|
|
|
|
|
|
func GetPaginatedTags(offset int, limit int) ([]*string, error) {
|
|
|
|
|
|
var tags []*string
|
|
|
|
|
|
err := DB.Model(&Channel{}).Select("DISTINCT tag").Where("tag != ''").Offset(offset).Limit(limit).Find(&tags).Error
|
|
|
|
|
|
return tags, err
|
|
|
|
|
|
}
|
2024-12-06 22:03:50 +08:00
|
|
|
|
|
|
|
|
|
|
func SearchTags(keyword string, group string, model string, idSort bool) ([]*string, error) {
|
|
|
|
|
|
var tags []*string
|
|
|
|
|
|
modelsCol := "`models`"
|
|
|
|
|
|
|
|
|
|
|
|
// 如果是 PostgreSQL,使用双引号
|
|
|
|
|
|
if common.UsingPostgreSQL {
|
|
|
|
|
|
modelsCol = `"models"`
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-04-28 11:38:53 +08:00
|
|
|
|
baseURLCol := "`base_url`"
|
|
|
|
|
|
// 如果是 PostgreSQL,使用双引号
|
|
|
|
|
|
if common.UsingPostgreSQL {
|
|
|
|
|
|
baseURLCol = `"base_url"`
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-06 22:03:50 +08:00
|
|
|
|
order := "priority desc"
|
|
|
|
|
|
if idSort {
|
|
|
|
|
|
order = "id desc"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 构造基础查询
|
2025-06-14 18:15:45 +08:00
|
|
|
|
baseQuery := DB.Model(&Channel{}).Omit("key")
|
2024-12-06 22:03:50 +08:00
|
|
|
|
|
|
|
|
|
|
// 构造WHERE子句
|
|
|
|
|
|
var whereClause string
|
|
|
|
|
|
var args []interface{}
|
|
|
|
|
|
if group != "" && group != "null" {
|
|
|
|
|
|
var groupCondition string
|
|
|
|
|
|
if common.UsingMySQL {
|
2025-06-14 17:51:05 +08:00
|
|
|
|
groupCondition = `CONCAT(',', ` + commonGroupCol + `, ',') LIKE ?`
|
2024-12-06 22:03:50 +08:00
|
|
|
|
} else {
|
|
|
|
|
|
// sqlite, PostgreSQL
|
2025-06-14 17:51:05 +08:00
|
|
|
|
groupCondition = `(',' || ` + commonGroupCol + ` || ',') LIKE ?`
|
2024-12-06 22:03:50 +08:00
|
|
|
|
}
|
2025-06-14 17:51:05 +08:00
|
|
|
|
whereClause = "(id = ? OR name LIKE ? OR " + commonKeyCol + " = ? OR " + baseURLCol + " LIKE ?) AND " + modelsCol + ` LIKE ? AND ` + groupCondition
|
2025-04-28 11:38:53 +08:00
|
|
|
|
args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, "%"+keyword+"%", "%"+model+"%", "%,"+group+",%")
|
2024-12-06 22:03:50 +08:00
|
|
|
|
} else {
|
2025-06-14 17:51:05 +08:00
|
|
|
|
whereClause = "(id = ? OR name LIKE ? OR " + commonKeyCol + " = ? OR " + baseURLCol + " LIKE ?) AND " + modelsCol + " LIKE ?"
|
2025-04-28 11:38:53 +08:00
|
|
|
|
args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, "%"+keyword+"%", "%"+model+"%")
|
2024-12-06 22:03:50 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-09 20:38:03 +08:00
|
|
|
|
subQuery := baseQuery.Where(whereClause, args...).
|
|
|
|
|
|
Select("tag").
|
2024-12-06 22:03:50 +08:00
|
|
|
|
Where("tag != ''").
|
2024-12-09 20:38:03 +08:00
|
|
|
|
Order(order)
|
|
|
|
|
|
|
|
|
|
|
|
err := DB.Table("(?) as sub", subQuery).
|
|
|
|
|
|
Select("DISTINCT tag").
|
2024-12-06 22:03:50 +08:00
|
|
|
|
Find(&tags).Error
|
|
|
|
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
return tags, nil
|
|
|
|
|
|
}
|
2024-12-15 15:52:41 +08:00
|
|
|
|
|
2025-07-07 14:26:37 +08:00
|
|
|
|
func (channel *Channel) ValidateSettings() error {
|
|
|
|
|
|
channelParams := &dto.ChannelSettings{}
|
|
|
|
|
|
if channel.Setting != nil && *channel.Setting != "" {
|
2025-07-30 23:26:09 +08:00
|
|
|
|
err := common.Unmarshal([]byte(*channel.Setting), channelParams)
|
2025-07-07 14:26:37 +08:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (channel *Channel) GetSetting() dto.ChannelSettings {
|
|
|
|
|
|
setting := dto.ChannelSettings{}
|
2025-03-09 23:42:48 +08:00
|
|
|
|
if channel.Setting != nil && *channel.Setting != "" {
|
2025-07-30 23:26:09 +08:00
|
|
|
|
err := common.Unmarshal([]byte(*channel.Setting), &setting)
|
2024-12-15 15:52:41 +08:00
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to unmarshal setting: channel_id=%d, error=%v", channel.Id, err))
|
2025-07-10 17:49:53 +08:00
|
|
|
|
channel.Setting = nil // 清空设置以避免后续错误
|
|
|
|
|
|
_ = channel.Save() // 保存修改
|
2024-12-15 15:52:41 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return setting
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-07-07 14:26:37 +08:00
|
|
|
|
func (channel *Channel) SetSetting(setting dto.ChannelSettings) {
|
2025-07-30 23:26:09 +08:00
|
|
|
|
settingBytes, err := common.Marshal(setting)
|
2024-12-15 15:52:41 +08:00
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to marshal setting: channel_id=%d, error=%v", channel.Id, err))
|
2024-12-15 15:52:41 +08:00
|
|
|
|
return
|
|
|
|
|
|
}
|
2025-03-09 23:42:48 +08:00
|
|
|
|
channel.Setting = common.GetPointer[string](string(settingBytes))
|
2024-12-15 15:52:41 +08:00
|
|
|
|
}
|
2024-12-25 14:19:00 +08:00
|
|
|
|
|
2025-08-10 20:21:30 +08:00
|
|
|
|
func (channel *Channel) GetOtherSettings() dto.ChannelOtherSettings {
|
|
|
|
|
|
setting := dto.ChannelOtherSettings{}
|
|
|
|
|
|
if channel.OtherSettings != "" {
|
|
|
|
|
|
err := common.UnmarshalJsonStr(channel.OtherSettings, &setting)
|
|
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to unmarshal setting: channel_id=%d, error=%v", channel.Id, err))
|
2025-08-10 20:21:30 +08:00
|
|
|
|
channel.OtherSettings = "{}" // 清空设置以避免后续错误
|
|
|
|
|
|
_ = channel.Save() // 保存修改
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return setting
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (channel *Channel) SetOtherSettings(setting dto.ChannelOtherSettings) {
|
|
|
|
|
|
settingBytes, err := common.Marshal(setting)
|
|
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to marshal setting: channel_id=%d, error=%v", channel.Id, err))
|
2025-08-10 20:21:30 +08:00
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
channel.OtherSettings = string(settingBytes)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-03-29 14:39:39 +08:00
|
|
|
|
func (channel *Channel) GetParamOverride() map[string]interface{} {
|
|
|
|
|
|
paramOverride := make(map[string]interface{})
|
|
|
|
|
|
if channel.ParamOverride != nil && *channel.ParamOverride != "" {
|
2025-07-30 23:26:09 +08:00
|
|
|
|
err := common.Unmarshal([]byte(*channel.ParamOverride), ¶mOverride)
|
2025-03-29 14:39:39 +08:00
|
|
|
|
if err != nil {
|
2025-08-15 14:15:03 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to unmarshal param override: channel_id=%d, error=%v", channel.Id, err))
|
2025-03-29 14:39:39 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return paramOverride
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-08-24 01:02:23 +08:00
|
|
|
|
func (channel *Channel) GetHeaderOverride() map[string]interface{} {
|
|
|
|
|
|
headerOverride := make(map[string]interface{})
|
|
|
|
|
|
if channel.HeaderOverride != nil && *channel.HeaderOverride != "" {
|
|
|
|
|
|
err := common.Unmarshal([]byte(*channel.HeaderOverride), &headerOverride)
|
|
|
|
|
|
if err != nil {
|
2025-08-24 01:32:19 +08:00
|
|
|
|
common.SysLog(fmt.Sprintf("failed to unmarshal header override: channel_id=%d, error=%v", channel.Id, err))
|
2025-08-24 01:02:23 +08:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
return headerOverride
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-12-25 14:19:00 +08:00
|
|
|
|
func GetChannelsByIds(ids []int) ([]*Channel, error) {
|
|
|
|
|
|
var channels []*Channel
|
|
|
|
|
|
err := DB.Where("id in (?)", ids).Find(&channels).Error
|
|
|
|
|
|
return channels, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func BatchSetChannelTag(ids []int, tag *string) error {
|
|
|
|
|
|
// 开启事务
|
|
|
|
|
|
tx := DB.Begin()
|
|
|
|
|
|
if tx.Error != nil {
|
|
|
|
|
|
return tx.Error
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 更新标签
|
|
|
|
|
|
err := tx.Model(&Channel{}).Where("id in (?)", ids).Update("tag", tag).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// update ability status
|
|
|
|
|
|
channels, err := GetChannelsByIds(ids)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
for _, channel := range channels {
|
|
|
|
|
|
err = channel.UpdateAbilities(tx)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
tx.Rollback()
|
|
|
|
|
|
return err
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// 提交事务
|
|
|
|
|
|
return tx.Commit().Error
|
|
|
|
|
|
}
|
🚀 feat(pagination): unify backend-driven pagination & improve channel tag aggregation
SUMMARY
• Migrated Token, Task, Midjourney, Channel, Redemption tables to true server-side pagination.
• Added total / page / page_size metadata in API responses; switched all affected React tables to consume new structure.
• Implemented counting helpers:
– model/token.go CountUserTokens
– model/task.go TaskCountAllTasks / TaskCountAllUserTask
– model/midjourney.go CountAllTasks / CountAllUserTask
– model/channel.go CountAllChannels / CountAllTags
• Refactored controllers (token, task, midjourney, channel) for 1-based paging & aggregated returns.
• Redesigned `ChannelsTable.js`:
– `loadChannels`, `syncPageData`, `enrichChannels` for tag-mode grouping without recursion.
– Fixed runtime white-screen (maximum call-stack) by removing child duplication.
– Pagination, search, tag-mode, idSort all hot-reload correctly.
• Removed unused `log` import in controller/midjourney.go.
BREAKING CHANGES
Front-end consumers must now expect data.items / total / page / page_size from list endpoints (`/api/channel`, `/api/task`, `/api/mj`, `/api/token`, etc.).
2025-06-12 17:25:25 +08:00
|
|
|
|
|
|
|
|
|
|
// CountAllChannels returns total channels in DB
|
|
|
|
|
|
func CountAllChannels() (int64, error) {
|
|
|
|
|
|
var total int64
|
|
|
|
|
|
err := DB.Model(&Channel{}).Count(&total).Error
|
|
|
|
|
|
return total, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// CountAllTags returns number of non-empty distinct tags
|
|
|
|
|
|
func CountAllTags() (int64, error) {
|
|
|
|
|
|
var total int64
|
|
|
|
|
|
err := DB.Model(&Channel{}).Where("tag is not null AND tag != ''").Distinct("tag").Count(&total).Error
|
|
|
|
|
|
return total, err
|
|
|
|
|
|
}
|
2025-06-18 02:33:18 +08:00
|
|
|
|
|
|
|
|
|
|
// Get channels of specified type with pagination
|
|
|
|
|
|
func GetChannelsByType(startIdx int, num int, idSort bool, channelType int) ([]*Channel, error) {
|
|
|
|
|
|
var channels []*Channel
|
|
|
|
|
|
order := "priority desc"
|
|
|
|
|
|
if idSort {
|
|
|
|
|
|
order = "id desc"
|
|
|
|
|
|
}
|
|
|
|
|
|
err := DB.Where("type = ?", channelType).Order(order).Limit(num).Offset(startIdx).Omit("key").Find(&channels).Error
|
|
|
|
|
|
return channels, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Count channels of specific type
|
|
|
|
|
|
func CountChannelsByType(channelType int) (int64, error) {
|
|
|
|
|
|
var count int64
|
|
|
|
|
|
err := DB.Model(&Channel{}).Where("type = ?", channelType).Count(&count).Error
|
|
|
|
|
|
return count, err
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Return map[type]count for all channels
|
|
|
|
|
|
func CountChannelsGroupByType() (map[int64]int64, error) {
|
|
|
|
|
|
type result struct {
|
|
|
|
|
|
Type int64 `gorm:"column:type"`
|
|
|
|
|
|
Count int64 `gorm:"column:count"`
|
|
|
|
|
|
}
|
|
|
|
|
|
var results []result
|
|
|
|
|
|
err := DB.Model(&Channel{}).Select("type, count(*) as count").Group("type").Find(&results).Error
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return nil, err
|
|
|
|
|
|
}
|
|
|
|
|
|
counts := make(map[int64]int64)
|
|
|
|
|
|
for _, r := range results {
|
|
|
|
|
|
counts[r.Type] = r.Count
|
|
|
|
|
|
}
|
|
|
|
|
|
return counts, nil
|
|
|
|
|
|
}
|