feat(api): refactor cache key handling for server and user lists
This commit is contained in:
parent
82fd674ae4
commit
6dc1bee14c
@ -12,9 +12,6 @@ const SiteConfigKey = "system:site_config"
|
||||
// SubscribeConfigKey Subscribe Config Key
|
||||
const SubscribeConfigKey = "system:subscribe_config"
|
||||
|
||||
// ApplicationKey Application Key
|
||||
const ApplicationKey = "system:application"
|
||||
|
||||
// RegisterConfigKey Register Config Key
|
||||
const RegisterConfigKey = "system:register_config"
|
||||
|
||||
@ -51,26 +48,12 @@ const AuthCodeCacheKey = "auth:verify:email"
|
||||
// AuthCodeTelephoneCacheKey Register Code Cache Key
|
||||
const AuthCodeTelephoneCacheKey = "auth:verify:telephone"
|
||||
|
||||
// ServerUserListCacheKey Server User List Cache Key
|
||||
const ServerUserListCacheKey = "server:user_list:id:"
|
||||
|
||||
// ServerConfigCacheKey Server Config Cache Key
|
||||
const ServerConfigCacheKey = "server:config:id:"
|
||||
|
||||
// CommonStat Cache Key
|
||||
// CommonStatCacheKey CommonStat Cache Key
|
||||
const CommonStatCacheKey = "common:stat"
|
||||
|
||||
// ServerStatusCacheKey Server Status Cache Key
|
||||
const ServerStatusCacheKey = "server:status:id:"
|
||||
|
||||
// ServerCountCacheKey Server Count Cache Key
|
||||
const ServerCountCacheKey = "server:count"
|
||||
|
||||
// UserBindTelegramCacheKey User Bind Telegram Cache Key
|
||||
const UserBindTelegramCacheKey = "user:bind:telegram:code:"
|
||||
|
||||
const CacheSmsCount = "cache:sms:count"
|
||||
|
||||
// SendIntervalKeyPrefix Auth Code Send Interval Key Prefix
|
||||
const SendIntervalKeyPrefix = "send:interval:"
|
||||
|
||||
|
||||
@ -7,7 +7,6 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/perfect-panel/server/internal/model/node"
|
||||
|
||||
"github.com/perfect-panel/server/internal/config"
|
||||
"github.com/perfect-panel/server/internal/svc"
|
||||
"github.com/perfect-panel/server/internal/types"
|
||||
"github.com/perfect-panel/server/pkg/logger"
|
||||
@ -31,7 +30,7 @@ func NewGetServerConfigLogic(ctx *gin.Context, svcCtx *svc.ServiceContext) *GetS
|
||||
}
|
||||
|
||||
func (l *GetServerConfigLogic) GetServerConfig(req *types.GetServerConfigRequest) (resp *types.GetServerConfigResponse, err error) {
|
||||
cacheKey := fmt.Sprintf("%s%d", config.ServerConfigCacheKey, req.ServerId)
|
||||
cacheKey := fmt.Sprintf("%s%d:%s", node.ServerConfigCacheKey, req.ServerId, req.Protocol)
|
||||
cache, err := l.svcCtx.Redis.Get(l.ctx, cacheKey).Result()
|
||||
if err == nil {
|
||||
if cache != "" {
|
||||
@ -42,7 +41,7 @@ func (l *GetServerConfigLogic) GetServerConfig(req *types.GetServerConfigRequest
|
||||
return nil, xerr.StatusNotModified
|
||||
}
|
||||
l.ctx.Header("ETag", etag)
|
||||
resp := &types.GetServerConfigResponse{}
|
||||
resp = &types.GetServerConfigResponse{}
|
||||
err = json.Unmarshal([]byte(cache), resp)
|
||||
if err != nil {
|
||||
l.Errorw("[ServerConfigCacheKey] json unmarshal error", logger.Field("error", err.Error()))
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/perfect-panel/server/internal/model/node"
|
||||
|
||||
"github.com/perfect-panel/server/internal/config"
|
||||
"github.com/perfect-panel/server/internal/svc"
|
||||
"github.com/perfect-panel/server/internal/types"
|
||||
"github.com/perfect-panel/server/pkg/logger"
|
||||
@ -33,7 +32,7 @@ func NewGetServerUserListLogic(ctx *gin.Context, svcCtx *svc.ServiceContext) *Ge
|
||||
}
|
||||
|
||||
func (l *GetServerUserListLogic) GetServerUserList(req *types.GetServerUserListRequest) (resp *types.GetServerUserListResponse, err error) {
|
||||
cacheKey := fmt.Sprintf("%s%d", config.ServerUserListCacheKey, req.ServerId)
|
||||
cacheKey := fmt.Sprintf("%s%d", node.ServerUserListCacheKey, req.ServerId)
|
||||
cache, err := l.svcCtx.Redis.Get(l.ctx, cacheKey).Result()
|
||||
if cache != "" {
|
||||
etag := tool.GenerateETag([]byte(cache))
|
||||
|
||||
@ -15,16 +15,17 @@ type customServerLogicModel interface {
|
||||
|
||||
const (
|
||||
// ServerUserListCacheKey Server User List Cache Key
|
||||
ServerUserListCacheKey = "server:user_list:id:"
|
||||
ServerUserListCacheKey = "server:user:"
|
||||
|
||||
// ServerConfigCacheKey Server Config Cache Key
|
||||
ServerConfigCacheKey = "server:config:id:"
|
||||
ServerConfigCacheKey = "server:config:"
|
||||
)
|
||||
|
||||
// FilterParams Filter Server Params
|
||||
type FilterParams struct {
|
||||
Page int
|
||||
Size int
|
||||
Ids []int64 // Server IDs
|
||||
Search string
|
||||
}
|
||||
|
||||
@ -53,7 +54,9 @@ func (m *customServerModel) FilterServerList(ctx context.Context, params *Filter
|
||||
s := "%" + params.Search + "%"
|
||||
query = query.Where("`name` LIKE ? OR `address` LIKE ?", s, s)
|
||||
}
|
||||
|
||||
if len(params.Ids) > 0 {
|
||||
query = query.Where("id IN ?", params.Ids)
|
||||
}
|
||||
err := query.Count(&total).Limit(params.Size).Offset((params.Page - 1) * params.Size).Find(&servers).Error
|
||||
return total, servers, err
|
||||
}
|
||||
@ -101,7 +104,49 @@ func (m *customServerModel) ClearNodeCache(ctx context.Context, params *FilterNo
|
||||
}
|
||||
var cacheKeys []string
|
||||
for _, node := range nodes {
|
||||
cacheKeys = append(cacheKeys, fmt.Sprintf("%s%d", ServerUserListCacheKey, node.ServerId), fmt.Sprintf("%s%d", ServerConfigCacheKey, node.ServerId))
|
||||
cacheKeys = append(cacheKeys, fmt.Sprintf("%s%d", ServerUserListCacheKey, node.ServerId))
|
||||
if node.Protocol != "" {
|
||||
var cursor uint64
|
||||
for {
|
||||
keys, newCursor, err := m.Cache.Scan(ctx, cursor, fmt.Sprintf("%s%d*", ServerConfigCacheKey, node.ServerId), 100).Result()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(keys) > 0 {
|
||||
cacheKeys = append(keys, keys...)
|
||||
}
|
||||
cursor = newCursor
|
||||
if cursor == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(cacheKeys) > 0 {
|
||||
cacheKeys = tool.RemoveDuplicateElements(cacheKeys...)
|
||||
return m.Cache.Del(ctx, cacheKeys...).Err()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ClearServerCache Clear Server Cache
|
||||
func (m *customServerModel) ClearServerCache(ctx context.Context, serverId int64) error {
|
||||
var cacheKeys []string
|
||||
cacheKeys = append(cacheKeys, fmt.Sprintf("%s%d", ServerUserListCacheKey, serverId))
|
||||
var cursor uint64
|
||||
for {
|
||||
keys, newCursor, err := m.Cache.Scan(ctx, 0, fmt.Sprintf("%s%d*", ServerConfigCacheKey, serverId), 100).Result()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(keys) > 0 {
|
||||
cacheKeys = append(cacheKeys, keys...)
|
||||
}
|
||||
cursor = newCursor
|
||||
if cursor == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(cacheKeys) > 0 {
|
||||
|
||||
@ -5,8 +5,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/perfect-panel/server/internal/config"
|
||||
|
||||
"github.com/perfect-panel/server/pkg/cache"
|
||||
"github.com/redis/go-redis/v9"
|
||||
"gorm.io/gorm"
|
||||
@ -62,20 +60,23 @@ func (m *defaultServerModel) batchGetCacheKeys(Servers ...*Server) []string {
|
||||
return keys
|
||||
|
||||
}
|
||||
|
||||
func (m *defaultServerModel) getCacheKeys(data *Server) []string {
|
||||
if data == nil {
|
||||
return []string{}
|
||||
}
|
||||
detailsKey := fmt.Sprintf("%s%v", CacheServerDetailPrefix, data.Id)
|
||||
ServerIdKey := fmt.Sprintf("%s%v", cacheServerIdPrefix, data.Id)
|
||||
configIdKey := fmt.Sprintf("%s%v", config.ServerConfigCacheKey, data.Id)
|
||||
userIDKey := fmt.Sprintf("%s%d", config.ServerUserListCacheKey, data.Id)
|
||||
//configIdKey := fmt.Sprintf("%s%v", config.ServerConfigCacheKey, data.Id)
|
||||
//userIDKey := fmt.Sprintf("%s%d", config.ServerUserListCacheKey, data.Id)
|
||||
|
||||
// query protocols to get config keys
|
||||
|
||||
cacheKeys := []string{
|
||||
ServerIdKey,
|
||||
detailsKey,
|
||||
configIdKey,
|
||||
userIDKey,
|
||||
//configIdKey,
|
||||
//userIDKey,
|
||||
}
|
||||
return cacheKeys
|
||||
}
|
||||
|
||||
@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/perfect-panel/server/internal/config"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
@ -45,10 +44,10 @@ var (
|
||||
// ClearCache Clear Cache
|
||||
func (m *customServerModel) ClearCache(ctx context.Context, id int64) error {
|
||||
serverIdKey := fmt.Sprintf("%s%v", cacheServerIdPrefix, id)
|
||||
configKey := fmt.Sprintf("%s%d", config.ServerConfigCacheKey, id)
|
||||
userListKey := fmt.Sprintf("%s%v", config.ServerUserListCacheKey, id)
|
||||
//configKey := fmt.Sprintf("%s%d", config.ServerConfigCacheKey, id)
|
||||
//userListKey := fmt.Sprintf("%s%v", config.ServerUserListCacheKey, id)
|
||||
|
||||
return m.DelCacheCtx(ctx, serverIdKey, configKey, userListKey)
|
||||
return m.DelCacheCtx(ctx, serverIdKey)
|
||||
}
|
||||
|
||||
// QueryServerCountByServerGroups Query Server Count By Server Groups
|
||||
|
||||
@ -3,8 +3,11 @@ package subscription
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/perfect-panel/server/internal/model/node"
|
||||
"github.com/perfect-panel/server/pkg/tool"
|
||||
queue "github.com/perfect-panel/server/queue/types"
|
||||
|
||||
"github.com/perfect-panel/server/pkg/logger"
|
||||
@ -60,7 +63,7 @@ func (l *CheckSubscriptionLogic) ProcessTask(ctx context.Context, _ *asynq.Task)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
l.clearServerCache(ctx, list...)
|
||||
logger.Infow("[Check Subscription Traffic] Update subscribe status", logger.Field("user_ids", ids), logger.Field("count", int64(len(ids))))
|
||||
|
||||
} else {
|
||||
@ -102,6 +105,8 @@ func (l *CheckSubscriptionLogic) ProcessTask(ctx context.Context, _ *asynq.Task)
|
||||
logger.Errorw("[Check Subscription Traffic] Clear subscribe cache failed", logger.Field("error", err.Error()))
|
||||
return err
|
||||
}
|
||||
l.clearServerCache(ctx, list...)
|
||||
|
||||
logger.Info("[Check Subscription Expire] Update subscribe status", logger.Field("user_ids", ids), logger.Field("count", int64(len(ids))))
|
||||
} else {
|
||||
logger.Info("[Check Subscription Expire] No subscribe need to update")
|
||||
@ -192,3 +197,41 @@ func (l *CheckSubscriptionLogic) sendTrafficNotify(ctx context.Context, subs []i
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *CheckSubscriptionLogic) clearServerCache(ctx context.Context, userSubs ...*user.Subscribe) {
|
||||
var subs map[int64]bool
|
||||
for _, sub := range userSubs {
|
||||
if _, ok := subs[sub.SubscribeId]; !ok {
|
||||
subs[sub.SubscribeId] = true
|
||||
}
|
||||
}
|
||||
|
||||
for sub, _ := range subs {
|
||||
info, err := l.svc.SubscribeModel.FindOne(ctx, sub)
|
||||
if err != nil {
|
||||
logger.Errorw("[CheckSubscription] FindOne subscribe failed", logger.Field("error", err.Error()), logger.Field("subscribe_id", sub))
|
||||
continue
|
||||
}
|
||||
if info != nil && info.Id == sub {
|
||||
var nodes []int64
|
||||
if info.Nodes != "" {
|
||||
nodes = tool.StringToInt64Slice(info.Nodes)
|
||||
}
|
||||
var tag []string
|
||||
if info.NodeTags != "" {
|
||||
tag = strings.Split(info.NodeTags, ",")
|
||||
}
|
||||
|
||||
err = l.svc.NodeModel.ClearNodeCache(ctx, &node.FilterNodeParams{
|
||||
Page: 1,
|
||||
Size: 1000,
|
||||
Tag: tag,
|
||||
ServerId: nodes,
|
||||
})
|
||||
if err != nil {
|
||||
logger.Errorw("[CheckSubscription] ClearNodeCache failed", logger.Field("error", err.Error()), logger.Field("subscribe_id", sub))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,10 +9,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/perfect-panel/server/internal/model/log"
|
||||
"github.com/perfect-panel/server/internal/model/node"
|
||||
"github.com/perfect-panel/server/internal/model/subscribe"
|
||||
"github.com/perfect-panel/server/internal/model/user"
|
||||
"github.com/perfect-panel/server/internal/svc"
|
||||
"github.com/perfect-panel/server/pkg/logger"
|
||||
"github.com/perfect-panel/server/pkg/tool"
|
||||
"github.com/perfect-panel/server/queue/types"
|
||||
|
||||
"github.com/hibiken/asynq"
|
||||
@ -584,6 +586,8 @@ func (l *ResetTrafficLogic) isRetryableError(err error) bool {
|
||||
// clearCache clears the reset traffic cache
|
||||
func (l *ResetTrafficLogic) clearCache(ctx context.Context, list []*user.Subscribe) {
|
||||
if len(list) != 0 {
|
||||
var subs map[int64]bool
|
||||
|
||||
for _, sub := range list {
|
||||
if sub.SubscribeId > 0 {
|
||||
err := l.svc.UserModel.ClearSubscribeCache(ctx, sub)
|
||||
@ -592,10 +596,42 @@ func (l *ResetTrafficLogic) clearCache(ctx context.Context, list []*user.Subscri
|
||||
logger.Field("subscribeId", sub.SubscribeId),
|
||||
logger.Field("error", err.Error()))
|
||||
}
|
||||
if _, ok := subs[sub.SubscribeId]; !ok {
|
||||
subs[sub.SubscribeId] = true
|
||||
}
|
||||
}
|
||||
// Insert traffic reset log
|
||||
l.insertLog(ctx, sub.Id, sub.UserId)
|
||||
}
|
||||
|
||||
for sub, _ := range subs {
|
||||
info, err := l.svc.SubscribeModel.FindOne(ctx, sub)
|
||||
if err != nil {
|
||||
logger.Errorw("[CheckSubscription] FindOne subscribe failed", logger.Field("error", err.Error()), logger.Field("subscribe_id", sub))
|
||||
continue
|
||||
}
|
||||
if info != nil && info.Id == sub {
|
||||
var nodes []int64
|
||||
if info.Nodes != "" {
|
||||
nodes = tool.StringToInt64Slice(info.Nodes)
|
||||
}
|
||||
var tag []string
|
||||
if info.NodeTags != "" {
|
||||
tag = strings.Split(info.NodeTags, ",")
|
||||
}
|
||||
|
||||
err = l.svc.NodeModel.ClearNodeCache(ctx, &node.FilterNodeParams{
|
||||
Page: 1,
|
||||
Size: 1000,
|
||||
Tag: tag,
|
||||
ServerId: nodes,
|
||||
})
|
||||
if err != nil {
|
||||
logger.Errorw("[CheckSubscription] ClearNodeCache failed", logger.Field("error", err.Error()), logger.Field("subscribe_id", sub))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user