diff --git a/.gitignore b/.gitignore index 71a555b..6438892 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .vscode/ .idea/ +tmp/ *.code-workspace diff --git a/frontend/js/pages/logs/index.js b/frontend/js/pages/logs/index.js index e9c99ba..ea0c10f 100644 --- a/frontend/js/pages/logs/index.js +++ b/frontend/js/pages/logs/index.js @@ -3,11 +3,17 @@ import { apiFetchJson } from '../../services/api.js'; import LogList from './logList.js'; +// [最终版] 创建一个共享的数据仓库,用于缓存 Groups 和 Keys +const dataStore = { + groups: new Map(), + keys: new Map(), +}; + class LogsPage { constructor() { this.state = { logs: [], - pagination: { page: 1, pages: 1, total: 0, page_size: 20 }, // 包含 page_size + pagination: { page: 1, pages: 1, total: 0, page_size: 20 }, isLoading: true, filters: { page: 1, page_size: 20 } }; @@ -19,21 +25,30 @@ class LogsPage { this.initialized = !!this.elements.tableBody; if (this.initialized) { - this.logList = new LogList(this.elements.tableBody); + this.logList = new LogList(this.elements.tableBody, dataStore); } } async init() { - if (!this.initialized) { - console.error("LogsPage: Could not initialize. Essential container element 'logs-table-body' is missing."); - return; - } + if (!this.initialized) return; this.initEventListeners(); + // 页面初始化:先加载群组,再加载日志 + await this.loadGroupsOnce(); await this.loadAndRenderLogs(); } - initEventListeners() { - // 分页和筛选的事件监听器将在后续任务中添加 + initEventListeners() { /* 分页和筛选的事件监听器 */ } + + async loadGroupsOnce() { + if (dataStore.groups.size > 0) return; // 防止重复加载 + try { + const { success, data } = await apiFetchJson("/admin/keygroups"); + if (success && Array.isArray(data)) { + data.forEach(group => dataStore.groups.set(group.id, group)); + } + } catch (error) { + console.error("Failed to load key groups:", error); + } } async loadAndRenderLogs() { @@ -41,36 +56,45 @@ class LogsPage { this.logList.renderLoading(); try { - const url = `/admin/logs?page=${this.state.filters.page}&page_size=${this.state.filters.page_size}`; - const responseData = await apiFetchJson(url); + const query = new URLSearchParams(this.state.filters); + const { success, data } = await apiFetchJson(`/admin/logs?${query.toString()}`); - if (responseData && responseData.success && Array.isArray(responseData.data)) { - this.state.logs = responseData.data; + if (success && typeof data === 'object') { + const { items, total, page, page_size } = data; + this.state.logs = items; + this.state.pagination = { page, page_size, total, pages: Math.ceil(total / page_size) }; - // [假设] 由于当前响应不包含分页信息,我们基于请求和返回的数据来模拟 - // TODO: 当后端API返回分页对象时,替换此处的模拟数据 - this.state.pagination = { - page: this.state.filters.page, - page_size: this.state.filters.page_size, - total: responseData.data.length, // 这是一个不准确的临时值 - pages: Math.ceil(responseData.data.length / this.state.filters.page_size) // 同样不准确 - }; + // [核心] 在渲染前,按需批量加载本页日志所需的、尚未缓存的Key信息 + await this.enrichLogsWithKeyNames(items); - // [修改] 将分页状态传递给 render 方法 + // 调用 render,此时 dataStore 中已包含所有需要的数据 this.logList.render(this.state.logs, this.state.pagination); - } else { - console.error("API response for logs is incorrect:", responseData); this.logList.render([], this.state.pagination); } - } catch (error) - { + } catch (error) { console.error("Failed to load logs:", error); this.logList.render([], this.state.pagination); } finally { this.state.isLoading = false; } } + + async enrichLogsWithKeyNames(logs) { + const missingKeyIds = [...new Set( + logs.filter(log => log.KeyID && !dataStore.keys.has(log.KeyID)).map(log => log.KeyID) + )]; + if (missingKeyIds.length === 0) return; + try { + const idsQuery = missingKeyIds.join(','); + const { success, data } = await apiFetchJson(`/admin/apikeys?ids=${idsQuery}`); + if (success && Array.isArray(data)) { + data.forEach(key => dataStore.keys.set(key.ID, key)); + } + } catch (error) { + console.error(`Failed to fetch key details:`, error); + } + } } export default function() { diff --git a/frontend/js/pages/logs/logList.js b/frontend/js/pages/logs/logList.js index b0747b7..79655a7 100644 --- a/frontend/js/pages/logs/logList.js +++ b/frontend/js/pages/logs/logList.js @@ -1,5 +1,6 @@ // Filename: frontend/js/pages/logs/logList.js -// --- [扩展] 静态错误码与样式的映射表 (源自Gemini官方文档) --- +import { escapeHTML } from '../../utils/utils.js'; + const STATIC_ERROR_MAP = { 'API_KEY_INVALID': { type: '密钥无效', style: 'red' }, 'INVALID_ARGUMENT': { type: '参数无效', style: 'red' }, @@ -23,10 +24,8 @@ const STATUS_CODE_MAP = { 500: { type: '内部服务错误', style: 'yellow' }, 503: { type: '服务不可用', style: 'yellow' } }; -// --- [新增] 特殊场景判断规则 (高优先级) --- const SPECIAL_CASE_MAP = [ { code: 400, keyword: 'api key not found', type: '无效密钥', style: 'red' }, - // 之前实现的模型配置错误规则也可以移到这里,更加规范 { code: 404, keyword: 'call listmodels', type: '模型配置错误', style: 'orange' } ]; @@ -41,16 +40,13 @@ const styleToClass = (style) => { } }; -// [修正] 修正了正则表达式的名称,使其语义清晰 + const errorCodeRegex = /(\d+)$/; -// [修正] 移除了 MODEL_STYLE_MAP 的声明,因为它未在 _formatModelName 中使用 -// 如果未来需要,可以重新添加 -// const MODEL_STYLE_MAP = { ... }; - class LogList { - constructor(container) { + constructor(container, dataStore) { this.container = container; + this.dataStore = dataStore; if (!this.container) console.error("LogList: container element (tbody) not found."); } @@ -125,15 +121,21 @@ class LogList { } _formatModelName(modelName) { - // [修正] 移除了对 MODEL_STYLE_MAP 的依赖,简化为统一的样式 - // 这样可以避免因 MODEL_STYLE_MAP 未定义而产生的潜在错误 - const styleClass = ''; // 可根据需要添加回样式逻辑 + const styleClass = ''; return `
${modelName}
`; } createLogRowHtml(log, index) { - const groupName = log.GroupDisplayName || (log.GroupID ? `Group #${log.GroupID}` : 'N/A'); - const apiKeyName = log.APIKeyName || (log.KeyID ? `Key #${log.KeyID}` : 'N/A'); + const group = this.dataStore.groups.get(log.GroupID); + const groupName = group ? group.display_name : (log.GroupID ? `Group #${log.GroupID}` : 'N/A'); + const key = this.dataStore.keys.get(log.KeyID); + let apiKeyDisplay; + if (key && key.APIKey && key.APIKey.length >= 8) { + const masked = `${key.APIKey.substring(0, 4)}......${key.APIKey.substring(key.APIKey.length - 4)}`; + apiKeyDisplay = escapeHTML(masked); + } else { + apiKeyDisplay = log.KeyID ? `Key #${log.KeyID}` : 'N/A'; + } const errorInfo = this._interpretError(log); const modelNameFormatted = this._formatModelName(log.ModelName); const errorMessageAttr = log.ErrorMessage ? `data-error-message="${escape(log.ErrorMessage)}"` : ''; @@ -142,7 +144,7 @@ class LogList { ${index} - ${apiKeyName} + ${apiKeyDisplay} ${groupName} ${errorInfo.type} ${errorInfo.statusCodeHtml} @@ -158,5 +160,4 @@ class LogList { } } -// [核心修正] 移除了文件末尾所有多余的代码,只保留最核心的默认导出 export default LogList; diff --git a/internal/handlers/apikey_handler.go b/internal/handlers/apikey_handler.go index ccb454a..878243c 100644 --- a/internal/handlers/apikey_handler.go +++ b/internal/handlers/apikey_handler.go @@ -7,7 +7,9 @@ import ( "gemini-balancer/internal/response" "gemini-balancer/internal/service" "gemini-balancer/internal/task" + "net/http" "strconv" + "strings" "github.com/gin-gonic/gin" "gorm.io/gorm" @@ -160,6 +162,29 @@ func (h *APIKeyHandler) ListAPIKeys(c *gin.Context) { response.Error(c, errors.NewAPIError(errors.ErrBadRequest, err.Error())) return } + if params.IDs != "" { + idStrs := strings.Split(params.IDs, ",") + ids := make([]uint, 0, len(idStrs)) + for _, s := range idStrs { + id, err := strconv.ParseUint(s, 10, 64) + if err == nil { + ids = append(ids, uint(id)) + } + } + if len(ids) > 0 { + keys, err := h.apiKeyService.GetKeysByIds(ids) + if err != nil { + response.Error(c, &errors.APIError{ + HTTPStatus: http.StatusInternalServerError, + Code: "DATA_FETCH_ERROR", + Message: err.Error(), + }) + return + } + response.Success(c, keys) + return + } + } if params.Page <= 0 { params.Page = 1 } diff --git a/internal/handlers/log_handler.go b/internal/handlers/log_handler.go index c080837..8e1edeb 100644 --- a/internal/handlers/log_handler.go +++ b/internal/handlers/log_handler.go @@ -3,14 +3,13 @@ package handlers import ( "gemini-balancer/internal/errors" - "gemini-balancer/internal/models" "gemini-balancer/internal/response" "gemini-balancer/internal/service" + "strconv" "github.com/gin-gonic/gin" ) -// LogHandler 负责处理与日志相关的HTTP请求 type LogHandler struct { logService *service.LogService } @@ -20,14 +19,22 @@ func NewLogHandler(logService *service.LogService) *LogHandler { } func (h *LogHandler) GetLogs(c *gin.Context) { - // 直接将Gin的上下文传递给Service层,让Service自己去解析查询参数 - logs, err := h.logService.GetLogs(c) + // 调用新的服务函数,接收日志列表和总数 + logs, total, err := h.logService.GetLogs(c) if err != nil { response.Error(c, errors.ErrDatabase) return } - if logs == nil { - logs = []models.RequestLog{} - } - response.Success(c, logs) + + // 解析分页参数用于响应体 + page, _ := strconv.Atoi(c.DefaultQuery("page", "1")) + pageSize, _ := strconv.Atoi(c.DefaultQuery("page_size", "20")) + + // 使用标准的分页响应结构 + response.Success(c, gin.H{ + "items": logs, + "total": total, + "page": page, + "page_size": pageSize, + }) } diff --git a/internal/handlers/proxy_handler.go b/internal/handlers/proxy_handler.go index df162c6..0c4a2fc 100644 --- a/internal/handlers/proxy_handler.go +++ b/internal/handlers/proxy_handler.go @@ -136,34 +136,48 @@ func (h *ProxyHandler) serveTransparentProxy(c *gin.Context, requestBody []byte, var finalPromptTokens, finalCompletionTokens int var actualRetries int = 0 defer func() { + // 如果一次尝试都未成功(例如,在第一次获取资源时就失败),则不记录日志 if lastUsedResources == nil { + h.logger.WithField("id", correlationID).Warn("No resources were used, skipping final log event.") return } finalEvent := h.createLogEvent(c, startTime, correlationID, modelName, lastUsedResources, models.LogTypeFinal, isPreciseRouting) - finalEvent.LatencyMs = int(time.Since(startTime).Milliseconds()) - finalEvent.IsSuccess = isSuccess - finalEvent.Retries = actualRetries + + finalEvent.RequestLog.LatencyMs = int(time.Since(startTime).Milliseconds()) + finalEvent.RequestLog.IsSuccess = isSuccess + finalEvent.RequestLog.Retries = actualRetries if isSuccess { - finalEvent.PromptTokens = finalPromptTokens - finalEvent.CompletionTokens = finalCompletionTokens + finalEvent.RequestLog.PromptTokens = finalPromptTokens + finalEvent.RequestLog.CompletionTokens = finalCompletionTokens } + + // 确保即使在成功的情况下,如果recorder存在,也记录最终的状态码 if finalRecorder != nil { - finalEvent.StatusCode = finalRecorder.Code + finalEvent.RequestLog.StatusCode = finalRecorder.Code } if !isSuccess { + // 将 finalProxyErr 的信息填充到 RequestLog 中 if finalProxyErr != nil { - finalEvent.Error = finalProxyErr - finalEvent.ErrorCode = finalProxyErr.Code - finalEvent.ErrorMessage = finalProxyErr.Message + finalEvent.Error = finalProxyErr // Error 字段用于事件传递,不会被序列化到数据库 + finalEvent.RequestLog.ErrorCode = finalProxyErr.Code + finalEvent.RequestLog.ErrorMessage = finalProxyErr.Message } else if finalRecorder != nil { - apiErr := errors.NewAPIErrorWithUpstream(finalRecorder.Code, "PROXY_ERROR", "Request failed after all retries.") + // 降级处理:如果 finalProxyErr 为空但 recorder 存在且失败 + apiErr := errors.NewAPIErrorWithUpstream(finalRecorder.Code, fmt.Sprintf("UPSTREAM_%d", finalRecorder.Code), "Request failed after all retries.") finalEvent.Error = apiErr - finalEvent.ErrorCode = apiErr.Code - finalEvent.ErrorMessage = apiErr.Message + finalEvent.RequestLog.ErrorCode = apiErr.Code + finalEvent.RequestLog.ErrorMessage = apiErr.Message } } - eventData, _ := json.Marshal(finalEvent) - _ = h.store.Publish(models.TopicRequestFinished, eventData) + // 将完整的事件发布 + eventData, err := json.Marshal(finalEvent) + if err != nil { + h.logger.WithField("id", correlationID).WithError(err).Error("Failed to marshal final log event.") + return + } + if err := h.store.Publish(models.TopicRequestFinished, eventData); err != nil { + h.logger.WithField("id", correlationID).WithError(err).Error("Failed to publish final log event.") + } }() var maxRetries int if isPreciseRouting { @@ -417,18 +431,24 @@ func (h *ProxyHandler) createLogEvent(c *gin.Context, startTime time.Time, corrI } if authTokenValue, exists := c.Get("authToken"); exists { if authToken, ok := authTokenValue.(*models.AuthToken); ok { - event.AuthTokenID = &authToken.ID + event.RequestLog.AuthTokenID = &authToken.ID } } if res != nil { - event.KeyID = res.APIKey.ID - event.GroupID = res.KeyGroup.ID + // [核心修正] 填充到内嵌的 RequestLog 结构体中 + if res.APIKey != nil { + event.RequestLog.KeyID = &res.APIKey.ID + } + if res.KeyGroup != nil { + event.RequestLog.GroupID = &res.KeyGroup.ID + } if res.UpstreamEndpoint != nil { - event.UpstreamID = &res.UpstreamEndpoint.ID + event.RequestLog.UpstreamID = &res.UpstreamEndpoint.ID + // UpstreamURL 是事件传递字段,不是数据库字段,所以在这里赋值是正确的 event.UpstreamURL = &res.UpstreamEndpoint.URL } if res.ProxyConfig != nil { - event.ProxyID = &res.ProxyConfig.ID + event.RequestLog.ProxyID = &res.ProxyConfig.ID } } return event diff --git a/internal/models/dto.go b/internal/models/dto.go index f4c7abe..464ee47 100644 --- a/internal/models/dto.go +++ b/internal/models/dto.go @@ -57,6 +57,7 @@ type APIKeyQueryParams struct { PageSize int `form:"limit"` Status string `form:"status"` Keyword string `form:"keyword"` + IDs string `form:"ids"` } // APIKeyDetails is a DTO that combines APIKey info with its contextual status from the mapping. diff --git a/internal/models/events.go b/internal/models/events.go index a7be6cf..7c616fb 100644 --- a/internal/models/events.go +++ b/internal/models/events.go @@ -17,15 +17,10 @@ const ( type RequestFinishedEvent struct { RequestLog - KeyID uint - GroupID uint - IsSuccess bool - StatusCode int - Error *errors.APIError - CorrelationID string `json:"correlation_id,omitempty"` - UpstreamID *uint `json:"upstream_id"` - UpstreamURL *string `json:"upstream_url,omitempty"` - IsPreciseRouting bool `json:"is_precise_routing"` + Error *errors.APIError `json:"error,omitempty"` // Error 结构体不存入数据库,仅供事件传递 + CorrelationID string `json:"correlation_id,omitempty"` + UpstreamURL *string `json:"upstream_url,omitempty"` + IsPreciseRouting bool `json:"is_precise_routing"` } type KeyStatusChangedEvent struct { diff --git a/internal/models/models.go b/internal/models/models.go index e0ed08f..9545e5f 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -14,6 +14,7 @@ type MasterAPIKeyStatus string type PollingStrategy string type FileProcessingState string type LogType string +type ProtocolType string const ( // --- 运营状态 (在中间表中使用) --- @@ -35,8 +36,12 @@ const ( FileActive FileProcessingState = "ACTIVE" FileFailed FileProcessingState = "FAILED" - LogTypeFinal LogType = "FINAL" // Represents the final outcome of a request, including all retries. - LogTypeRetry LogType = "RETRY" // Represents a single, failed attempt that triggered a retry. + LogTypeFinal LogType = "FINAL" // Represents the final outcome of a request, including all retries. + LogTypeRetry LogType = "RETRY" // Represents a single, failed attempt that triggered a retry. + LogTypeValidation LogType = "VALIDATION" + + ProtocolOpenAI ProtocolType = "openai" + ProtocolGemini ProtocolType = "gemini" ) // ========= 核心数据库模型 ========= diff --git a/internal/repository/key_selector.go b/internal/repository/key_selector.go index 0ab9b0b..1e40c4a 100644 --- a/internal/repository/key_selector.go +++ b/internal/repository/key_selector.go @@ -11,6 +11,7 @@ import ( "io" "sort" "strconv" + "strings" "time" "gorm.io/gorm" @@ -81,13 +82,20 @@ func (r *gormKeyRepository) SelectOneActiveKey(group *models.KeyGroup) (*models. // SelectOneActiveKeyFromBasePool 为智能聚合模式设计的全新轮询器。 func (r *gormKeyRepository) SelectOneActiveKeyFromBasePool(pool *BasePool) (*models.APIKey, *models.KeyGroup, error) { + protocol := "default" + if pool.Protocol != "" { + protocol = string(pool.Protocol) + } // 生成唯一的池ID,确保不同请求组合的轮询状态相互隔离 - poolID := generatePoolID(pool.CandidateGroups) - log := r.logger.WithField("pool_id", poolID) + poolID := generatePoolID(pool.CandidateGroups, protocol) + log := r.logger.WithField("pool_id", poolID).WithField("protocol", protocol) if err := r.ensureBasePoolCacheExists(pool, poolID); err != nil { log.WithError(err).Error("Failed to ensure BasePool cache exists.") - return nil, nil, err + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, nil, err + } + return nil, nil, fmt.Errorf("unexpected error while ensuring base pool cache: %w", err) } var keyIDStr string @@ -145,25 +153,40 @@ func (r *gormKeyRepository) SelectOneActiveKeyFromBasePool(pool *BasePool) (*mod // ensureBasePoolCacheExists 动态创建 BasePool 的 Redis 结构 func (r *gormKeyRepository) ensureBasePoolCacheExists(pool *BasePool, poolID string) error { - // 使用 LIST 键作为存在性检查的标志 listKey := fmt.Sprintf(BasePoolSequential, poolID) exists, err := r.store.Exists(listKey) if err != nil { + r.logger.WithError(err).Errorf("Failed to check existence of basepool key: %s", listKey) return err } if exists { - val, err := r.store.LIndex(listKey, 0) - if err == nil && val == EmptyPoolPlaceholder { + if err != nil { + return err + } + if val == EmptyPoolPlaceholder { return gorm.ErrRecordNotFound } return nil } - + lockKey := fmt.Sprintf("lock:basepool:%s", poolID) + acquired, err := r.store.SetNX(lockKey, []byte("1"), 10*time.Second) + if err != nil { + r.logger.WithError(err).Errorf("Failed to acquire distributed lock for basepool build: %s", lockKey) + return err + } + if !acquired { + time.Sleep(100 * time.Millisecond) + return r.ensureBasePoolCacheExists(pool, poolID) + } + defer r.store.Del(lockKey) + if exists, _ := r.store.Exists(listKey); exists { + return nil + } r.logger.Infof("BasePool cache for pool_id '%s' not found. Building now...", poolID) - var allActiveKeyIDs []string lruMembers := make(map[string]float64) + for _, group := range pool.CandidateGroups { activeKeySetKey := fmt.Sprintf(KeyGroup, group.ID) groupKeyIDs, err := r.store.SMembers(activeKeySetKey) @@ -171,17 +194,21 @@ func (r *gormKeyRepository) ensureBasePoolCacheExists(pool *BasePool, poolID str r.logger.WithError(err).Warnf("Failed to get active keys for group %d during BasePool build", group.ID) continue } - allActiveKeyIDs = append(allActiveKeyIDs, groupKeyIDs...) - for _, keyIDStr := range groupKeyIDs { keyID, _ := strconv.ParseUint(keyIDStr, 10, 64) + _, mapping, err := r.getKeyDetailsFromCache(uint(keyID), group.ID) - if err == nil && mapping != nil { - var score float64 - if mapping.LastUsedAt != nil { - score = float64(mapping.LastUsedAt.UnixMilli()) + if err != nil { + if errors.Is(err, store.ErrNotFound) || strings.Contains(err.Error(), "failed to get") { + r.logger.WithError(err).Warnf("Cache inconsistency detected for KeyID %s in GroupID %d. Skipping.", keyIDStr, group.ID) + continue + } else { + return err } - lruMembers[keyIDStr] = score + } + allActiveKeyIDs = append(allActiveKeyIDs, keyIDStr) + if mapping != nil && mapping.LastUsedAt != nil { + lruMembers[keyIDStr] = float64(mapping.LastUsedAt.UnixMilli()) } } } @@ -194,23 +221,16 @@ func (r *gormKeyRepository) ensureBasePoolCacheExists(pool *BasePool, poolID str } return gorm.ErrRecordNotFound } - // 使用管道填充所有轮询结构 pipe := r.store.Pipeline() - // 1. 顺序 pipe.LPush(fmt.Sprintf(BasePoolSequential, poolID), toInterfaceSlice(allActiveKeyIDs)...) - // 2. 随机 pipe.SAdd(fmt.Sprintf(BasePoolRandomMain, poolID), toInterfaceSlice(allActiveKeyIDs)...) - - // 设置合理的过期时间,例如5分钟,以防止孤儿数据 pipe.Expire(fmt.Sprintf(BasePoolSequential, poolID), CacheTTL) pipe.Expire(fmt.Sprintf(BasePoolRandomMain, poolID), CacheTTL) pipe.Expire(fmt.Sprintf(BasePoolRandomCooldown, poolID), CacheTTL) pipe.Expire(fmt.Sprintf(BasePoolLRU, poolID), CacheTTL) - if err := pipe.Exec(); err != nil { return err } - if len(lruMembers) > 0 { r.store.ZAdd(fmt.Sprintf(BasePoolLRU, poolID), lruMembers) } @@ -226,7 +246,7 @@ func (r *gormKeyRepository) updateKeyUsageTimestampForPool(poolID string, keyID } // generatePoolID 根据候选组ID列表生成一个稳定的、唯一的字符串ID -func generatePoolID(groups []*models.KeyGroup) string { +func generatePoolID(groups []*models.KeyGroup, protocol string) string { ids := make([]int, len(groups)) for i, g := range groups { ids[i] = int(g.ID) @@ -234,7 +254,7 @@ func generatePoolID(groups []*models.KeyGroup) string { sort.Ints(ids) h := sha1.New() - io.WriteString(h, fmt.Sprintf("%v", ids)) + io.WriteString(h, fmt.Sprintf("protocol:%s;groups:%v", protocol, ids)) return fmt.Sprintf("%x", h.Sum(nil)) } diff --git a/internal/repository/repository.go b/internal/repository/repository.go index 61adde3..c62daf1 100644 --- a/internal/repository/repository.go +++ b/internal/repository/repository.go @@ -17,6 +17,7 @@ import ( type BasePool struct { CandidateGroups []*models.KeyGroup PollingStrategy models.PollingStrategy + Protocol models.ProtocolType } type KeyRepository interface { diff --git a/internal/service/analytics_service.go b/internal/service/analytics_service.go index d31ec9c..a3375c9 100644 --- a/internal/service/analytics_service.go +++ b/internal/service/analytics_service.go @@ -84,26 +84,24 @@ func (s *AnalyticsService) eventListener() { } func (s *AnalyticsService) handleAnalyticsEvent(event *models.RequestFinishedEvent) { - if event.GroupID == 0 { + if event.RequestLog.GroupID == nil { return } key := fmt.Sprintf("analytics:hourly:%s", time.Now().UTC().Format("2006-01-02T15")) - fieldPrefix := fmt.Sprintf("%d:%s", event.GroupID, event.ModelName) - + fieldPrefix := fmt.Sprintf("%d:%s", *event.RequestLog.GroupID, event.RequestLog.ModelName) pipe := s.store.Pipeline() pipe.HIncrBy(key, fieldPrefix+":requests", 1) - if event.IsSuccess { + if event.RequestLog.IsSuccess { pipe.HIncrBy(key, fieldPrefix+":success", 1) } - if event.PromptTokens > 0 { - pipe.HIncrBy(key, fieldPrefix+":prompt", int64(event.PromptTokens)) + if event.RequestLog.PromptTokens > 0 { + pipe.HIncrBy(key, fieldPrefix+":prompt", int64(event.RequestLog.PromptTokens)) } - if event.CompletionTokens > 0 { - pipe.HIncrBy(key, fieldPrefix+":completion", int64(event.CompletionTokens)) + if event.RequestLog.CompletionTokens > 0 { + pipe.HIncrBy(key, fieldPrefix+":completion", int64(event.RequestLog.CompletionTokens)) } - if err := pipe.Exec(); err != nil { - s.logger.Warnf("[%s] Failed to record analytics event to store for group %d: %v", event.CorrelationID, event.GroupID, err) + s.logger.Warnf("[%s] Failed to record analytics event to store for group %d: %v", event.CorrelationID, *event.RequestLog.GroupID, err) } } diff --git a/internal/service/apikey_service.go b/internal/service/apikey_service.go index 0108c99..63a6385 100644 --- a/internal/service/apikey_service.go +++ b/internal/service/apikey_service.go @@ -174,49 +174,40 @@ func (s *APIKeyService) Stop() { } func (s *APIKeyService) handleKeyUsageEvent(event *models.RequestFinishedEvent) { - if event.KeyID == 0 || event.GroupID == 0 { + if event.RequestLog.KeyID == nil || event.RequestLog.GroupID == nil { return } - // Handle success case: key recovery and timestamp update. - if event.IsSuccess { - mapping, err := s.keyRepo.GetMapping(event.GroupID, event.KeyID) + if event.RequestLog.IsSuccess { + mapping, err := s.keyRepo.GetMapping(*event.RequestLog.GroupID, *event.RequestLog.KeyID) if err != nil { - // Log if mapping is not found, but don't proceed. - s.logger.Warnf("[%s] Could not find mapping for G:%d K:%d on successful request: %v", event.CorrelationID, event.GroupID, event.KeyID, err) + s.logger.Warnf("[%s] Could not find mapping for G:%d K:%d on successful request: %v", event.CorrelationID, *event.RequestLog.GroupID, *event.RequestLog.KeyID, err) return } - - needsUpdate := false + statusChanged := false oldStatus := mapping.Status - - // If status was not active, it's a recovery. if mapping.Status != models.StatusActive { mapping.Status = models.StatusActive mapping.ConsecutiveErrorCount = 0 mapping.LastError = "" - needsUpdate = true + statusChanged = true } - // Always update LastUsedAt timestamp. + now := time.Now() mapping.LastUsedAt = &now - needsUpdate = true - - if needsUpdate { - if err := s.keyRepo.UpdateMapping(mapping); err != nil { - s.logger.Errorf("[%s] Failed to update mapping for G:%d K:%d after successful request: %v", event.CorrelationID, event.GroupID, event.KeyID, err) - } else if oldStatus != models.StatusActive { - // Only publish event if status actually changed. - go s.publishStatusChangeEvent(event.GroupID, event.KeyID, oldStatus, models.StatusActive, "key_recovered_after_use") - } + if err := s.keyRepo.UpdateMapping(mapping); err != nil { + s.logger.Errorf("[%s] Failed to update mapping for G:%d K:%d after successful request: %v", event.CorrelationID, *event.RequestLog.GroupID, *event.RequestLog.KeyID, err) + return + } + if statusChanged { + go s.publishStatusChangeEvent(*event.RequestLog.GroupID, *event.RequestLog.KeyID, oldStatus, models.StatusActive, "key_recovered_after_use") } return } - // Handle failure case: delegate to the centralized judgment function. if event.Error != nil { s.judgeKeyErrors( event.CorrelationID, - event.GroupID, - event.KeyID, + *event.RequestLog.GroupID, + *event.RequestLog.KeyID, event.Error, event.IsPreciseRouting, ) @@ -354,6 +345,10 @@ func (s *APIKeyService) ListAPIKeys(params *models.APIKeyQueryParams) (*Paginate }, nil } +func (s *APIKeyService) GetKeysByIds(ids []uint) ([]models.APIKey, error) { + return s.keyRepo.GetKeysByIDs(ids) +} + func (s *APIKeyService) UpdateAPIKey(key *models.APIKey) error { go func() { var oldKey models.APIKey diff --git a/internal/service/db_log_writer_service.go b/internal/service/db_log_writer_service.go index 518b7c1..892c4ee 100644 --- a/internal/service/db_log_writer_service.go +++ b/internal/service/db_log_writer_service.go @@ -1,4 +1,4 @@ -// Filename: internal/service/db_log_writer_service.go (全新文件) +// Filename: internal/service/db_log_writer_service.go package service diff --git a/internal/service/key_validation_service.go b/internal/service/key_validation_service.go index 3e348a7..36901dc 100644 --- a/internal/service/key_validation_service.go +++ b/internal/service/key_validation_service.go @@ -164,12 +164,15 @@ func (s *KeyValidationService) runTestKeysTask(taskID string, resourceID string, var currentResult models.KeyTestResult event := models.RequestFinishedEvent{ - GroupID: groupID, - KeyID: apiKeyModel.ID, + RequestLog: models.RequestLog{ + // GroupID 和 KeyID 在 RequestLog 模型中是指针,需要取地址 + GroupID: &groupID, + KeyID: &apiKeyModel.ID, + }, } if validationErr == nil { currentResult = models.KeyTestResult{Key: apiKeyModel.APIKey, Status: "valid", Message: "Validation successful."} - event.IsSuccess = true + event.RequestLog.IsSuccess = true } else { var apiErr *CustomErrors.APIError if CustomErrors.As(validationErr, &apiErr) { @@ -179,7 +182,7 @@ func (s *KeyValidationService) runTestKeysTask(taskID string, resourceID string, currentResult = models.KeyTestResult{Key: apiKeyModel.APIKey, Status: "error", Message: "Validation check failed: " + validationErr.Error()} event.Error = &CustomErrors.APIError{Message: validationErr.Error()} } - event.IsSuccess = false + event.RequestLog.IsSuccess = false } eventData, _ := json.Marshal(event) if err := s.store.Publish(models.TopicRequestFinished, eventData); err != nil { diff --git a/internal/service/log_service.go b/internal/service/log_service.go index b90d129..38cf66d 100644 --- a/internal/service/log_service.go +++ b/internal/service/log_service.go @@ -1,3 +1,4 @@ +// Filename: internal/service/log_service.go package service import ( @@ -16,28 +17,35 @@ func NewLogService(db *gorm.DB) *LogService { return &LogService{db: db} } -// Record 记录一条日志到数据库 (TODO 暂时保留简单实现,后续再重构为异步) func (s *LogService) Record(log *models.RequestLog) error { return s.db.Create(log).Error } -func (s *LogService) GetLogs(c *gin.Context) ([]models.RequestLog, error) { +func (s *LogService) GetLogs(c *gin.Context) ([]models.RequestLog, int64, error) { var logs []models.RequestLog + var total int64 - query := s.db.Model(&models.RequestLog{}).Scopes(s.filtersScope(c)).Order("request_time desc") + query := s.db.Model(&models.RequestLog{}).Scopes(s.filtersScope(c)) - // 简单的分页 ( TODO 后续可以做得更复杂) + // 先计算总数 + if err := query.Count(&total).Error; err != nil { + return nil, 0, err + } + if total == 0 { + return []models.RequestLog{}, 0, nil + } + + // 再执行分页查询 page, _ := strconv.Atoi(c.DefaultQuery("page", "1")) pageSize, _ := strconv.Atoi(c.DefaultQuery("page_size", "20")) offset := (page - 1) * pageSize - // 执行查询 - err := query.Limit(pageSize).Offset(offset).Find(&logs).Error + err := query.Order("request_time desc").Limit(pageSize).Offset(offset).Find(&logs).Error if err != nil { - return nil, err + return nil, 0, err } - return logs, nil + return logs, total, nil } func (s *LogService) filtersScope(c *gin.Context) func(db *gorm.DB) *gorm.DB { @@ -60,6 +68,11 @@ func (s *LogService) filtersScope(c *gin.Context) func(db *gorm.DB) *gorm.DB { db = db.Where("key_id = ?", keyID) } } + if groupIDStr := c.Query("group_id"); groupIDStr != "" { + if groupID, err := strconv.ParseUint(groupIDStr, 10, 64); err == nil { + db = db.Where("group_id = ?", groupID) + } + } return db } } diff --git a/tmp/main b/tmp/main deleted file mode 100755 index af7eb10..0000000 Binary files a/tmp/main and /dev/null differ diff --git a/web/static/css/output.css b/web/static/css/output.css index 4f07fd0..6f87df9 100644 --- a/web/static/css/output.css +++ b/web/static/css/output.css @@ -332,9 +332,6 @@ .pointer-events-none { pointer-events: none; } - .collapse { - visibility: collapse; - } .invisible { visibility: hidden; } @@ -490,9 +487,6 @@ .m-0 { margin: calc(var(--spacing) * 0); } - .m-7 { - margin: calc(var(--spacing) * 7); - } .mx-1 { margin-inline: calc(var(--spacing) * 1); } @@ -505,9 +499,6 @@ .my-1\.5 { margin-block: calc(var(--spacing) * 1.5); } - .mt-0 { - margin-top: calc(var(--spacing) * 0); - } .mt-0\.5 { margin-top: calc(var(--spacing) * 0.5); } @@ -604,9 +595,6 @@ .hidden { display: none; } - .inline { - display: inline; - } .inline-block { display: inline-block; } @@ -629,9 +617,6 @@ width: calc(var(--spacing) * 6); height: calc(var(--spacing) * 6); } - .h-0 { - height: calc(var(--spacing) * 0); - } .h-0\.5 { height: calc(var(--spacing) * 0.5); } @@ -713,9 +698,6 @@ .w-0 { width: calc(var(--spacing) * 0); } - .w-1 { - width: calc(var(--spacing) * 1); - } .w-1\/4 { width: calc(1/4 * 100%); } @@ -821,9 +803,6 @@ .flex-1 { flex: 1; } - .flex-shrink { - flex-shrink: 1; - } .shrink-0 { flex-shrink: 0; } @@ -836,9 +815,6 @@ .caption-bottom { caption-side: bottom; } - .border-collapse { - border-collapse: collapse; - } .origin-center { transform-origin: center; } @@ -865,10 +841,6 @@ --tw-translate-x: 100%; translate: var(--tw-translate-x) var(--tw-translate-y); } - .-translate-y-1 { - --tw-translate-y: calc(var(--spacing) * -1); - translate: var(--tw-translate-x) var(--tw-translate-y); - } .-translate-y-1\/2 { --tw-translate-y: calc(calc(1/2 * 100%) * -1); translate: var(--tw-translate-x) var(--tw-translate-y); @@ -1025,9 +997,6 @@ margin-block-end: calc(calc(var(--spacing) * 8) * calc(1 - var(--tw-space-y-reverse))); } } - .gap-x-1 { - column-gap: calc(var(--spacing) * 1); - } .gap-x-1\.5 { column-gap: calc(var(--spacing) * 1.5); } @@ -1173,9 +1142,6 @@ --tw-border-style: none; border-style: none; } - .border-black { - border-color: var(--color-black); - } .border-black\/10 { border-color: color-mix(in srgb, #000 10%, transparent); @supports (color: color-mix(in lab, red, red)) { @@ -1203,9 +1169,6 @@ .border-green-200 { border-color: var(--color-green-200); } - .border-primary { - border-color: var(--color-primary); - } .border-primary\/20 { border-color: var(--color-primary); @supports (color: color-mix(in lab, red, red)) { @@ -1242,9 +1205,6 @@ .border-zinc-300 { border-color: var(--color-zinc-300); } - .border-zinc-700 { - border-color: var(--color-zinc-700); - } .border-zinc-700\/50 { border-color: color-mix(in srgb, oklch(37% 0.013 285.805) 50%, transparent); @supports (color: color-mix(in lab, red, red)) { @@ -1257,9 +1217,6 @@ .border-b-border { border-bottom-color: var(--color-border); } - .border-b-zinc-200 { - border-bottom-color: var(--color-zinc-200); - } .bg-accent { background-color: var(--color-accent); } @@ -1320,9 +1277,6 @@ .bg-gray-500 { background-color: var(--color-gray-500); } - .bg-gray-950 { - background-color: var(--color-gray-950); - } .bg-gray-950\/5 { background-color: color-mix(in srgb, oklch(13% 0.028 261.692) 5%, transparent); @supports (color: color-mix(in lab, red, red)) { @@ -1413,15 +1367,6 @@ .bg-purple-100 { background-color: var(--color-purple-100); } - .bg-purple-500 { - background-color: var(--color-purple-500); - } - .bg-purple-500\/10 { - background-color: color-mix(in srgb, oklch(62.7% 0.265 303.9) 10%, transparent); - @supports (color: color-mix(in lab, red, red)) { - background-color: color-mix(in oklab, var(--color-purple-500) 10%, transparent); - } - } .bg-red-50 { background-color: var(--color-red-50); } @@ -1543,10 +1488,6 @@ --tw-gradient-position: to right in oklab; background-image: linear-gradient(var(--tw-gradient-stops)); } - .from-blue-500 { - --tw-gradient-from: var(--color-blue-500); - --tw-gradient-stops: var(--tw-gradient-via-stops, var(--tw-gradient-position), var(--tw-gradient-from) var(--tw-gradient-from-position), var(--tw-gradient-to) var(--tw-gradient-to-position)); - } .from-blue-500\/30 { --tw-gradient-from: color-mix(in srgb, oklch(62.3% 0.214 259.815) 30%, transparent); @supports (color: color-mix(in lab, red, red)) { @@ -1617,9 +1558,6 @@ .px-8 { padding-inline: calc(var(--spacing) * 8); } - .py-0 { - padding-block: calc(var(--spacing) * 0); - } .py-0\.5 { padding-block: calc(var(--spacing) * 0.5); } @@ -1668,9 +1606,6 @@ .pr-20 { padding-right: calc(var(--spacing) * 20); } - .pb-1 { - padding-bottom: calc(var(--spacing) * 1); - } .pb-1\.5 { padding-bottom: calc(var(--spacing) * 1.5); } @@ -1710,9 +1645,6 @@ .align-middle { vertical-align: middle; } - .font-\[\'Pixelify_Sans\'\] { - font-family: 'Pixelify Sans'; - } .font-mono { font-family: var(--font-mono); } @@ -1858,9 +1790,6 @@ .text-green-800 { color: var(--color-green-800); } - .text-indigo-500 { - color: var(--color-indigo-500); - } .text-indigo-800 { color: var(--color-indigo-800); } @@ -1885,9 +1814,6 @@ .text-primary-foreground { color: var(--color-primary-foreground); } - .text-purple-600 { - color: var(--color-purple-600); - } .text-purple-800 { color: var(--color-purple-800); } @@ -1924,9 +1850,6 @@ .text-yellow-600 { color: var(--color-yellow-600); } - .text-yellow-700 { - color: var(--color-yellow-700); - } .text-zinc-100 { color: var(--color-zinc-100); } @@ -1957,9 +1880,6 @@ .italic { font-style: italic; } - .underline { - text-decoration-line: underline; - } .opacity-0 { opacity: 0%; } @@ -2017,10 +1937,6 @@ --tw-inset-shadow: inset 0 2px 4px var(--tw-inset-shadow-color, oklab(from rgb(0 0 0 / 0.05) l a b / 25%)); box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); } - .inset-shadow-sm { - --tw-inset-shadow: inset 0 2px 4px var(--tw-inset-shadow-color, rgb(0 0 0 / 0.05)); - box-shadow: var(--tw-inset-shadow), var(--tw-inset-ring-shadow), var(--tw-ring-offset-shadow), var(--tw-ring-shadow), var(--tw-shadow); - } .ring-black { --tw-ring-color: var(--color-black); } @@ -2042,10 +1958,6 @@ --tw-ring-color: color-mix(in oklab, var(--color-black) 15%, transparent); } } - .outline { - outline-style: var(--tw-outline-style); - outline-width: 1px; - } .blur { --tw-blur: blur(8px); filter: var(--tw-blur,) var(--tw-brightness,) var(--tw-contrast,) var(--tw-grayscale,) var(--tw-hue-rotate,) var(--tw-invert,) var(--tw-saturate,) var(--tw-sepia,) var(--tw-drop-shadow,); @@ -2144,9 +2056,6 @@ -webkit-user-select: none; user-select: none; } - .\[rows\:\%v\] { - rows: %v; - } .group-hover\:opacity-100 { &:is(:where(.group):hover *) { @media (hover: hover) { @@ -2366,16 +2275,6 @@ } } } - .hover\:bg-zinc-100\/50 { - &:hover { - @media (hover: hover) { - background-color: color-mix(in srgb, oklch(96.7% 0.001 286.375) 50%, transparent); - @supports (color: color-mix(in lab, red, red)) { - background-color: color-mix(in oklab, var(--color-zinc-100) 50%, transparent); - } - } - } - } .hover\:bg-zinc-200 { &:hover { @media (hover: hover) { @@ -2777,16 +2676,6 @@ border-color: var(--color-zinc-800); } } - .dark\:border-b-zinc-700 { - &:where(.dark, .dark *) { - border-bottom-color: var(--color-zinc-700); - } - } - .dark\:border-b-zinc-800 { - &:where(.dark, .dark *) { - border-bottom-color: var(--color-zinc-800); - } - } .dark\:bg-blue-900 { &:where(.dark, .dark *) { background-color: var(--color-blue-900); @@ -3038,11 +2927,6 @@ color: var(--color-white); } } - .dark\:text-yellow-400 { - &:where(.dark, .dark *) { - color: var(--color-yellow-400); - } - } .dark\:text-zinc-100 { &:where(.dark, .dark *) { color: var(--color-zinc-100); @@ -3147,18 +3031,6 @@ } } } - .dark\:hover\:bg-zinc-800\/50 { - &:where(.dark, .dark *) { - &:hover { - @media (hover: hover) { - background-color: color-mix(in srgb, oklch(27.4% 0.006 286.033) 50%, transparent); - @supports (color: color-mix(in lab, red, red)) { - background-color: color-mix(in oklab, var(--color-zinc-800) 50%, transparent); - } - } - } - } - } .dark\:hover\:text-blue-300 { &:where(.dark, .dark *) { &:hover { @@ -5160,11 +5032,6 @@ inherits: false; initial-value: 0 0 #0000; } -@property --tw-outline-style { - syntax: "*"; - inherits: false; - initial-value: solid; -} @property --tw-blur { syntax: "*"; inherits: false; @@ -5277,6 +5144,11 @@ inherits: false; initial-value: 1; } +@property --tw-outline-style { + syntax: "*"; + inherits: false; + initial-value: solid; +} @keyframes spin { to { transform: rotate(360deg); @@ -5334,7 +5206,6 @@ --tw-ring-offset-width: 0px; --tw-ring-offset-color: #fff; --tw-ring-offset-shadow: 0 0 #0000; - --tw-outline-style: solid; --tw-blur: initial; --tw-brightness: initial; --tw-contrast: initial; @@ -5362,6 +5233,7 @@ --tw-scale-x: 1; --tw-scale-y: 1; --tw-scale-z: 1; + --tw-outline-style: solid; } } } diff --git a/web/static/js/chunk-A4OOMLXK.js b/web/static/js/chunk-A4OOMLXK.js new file mode 100644 index 0000000..772f2a2 --- /dev/null +++ b/web/static/js/chunk-A4OOMLXK.js @@ -0,0 +1,52 @@ +// frontend/js/utils/utils.js +function debounce(func, wait) { + let timeout; + const debounced = function(...args) { + const context = this; + const later = () => { + clearTimeout(timeout); + func.apply(context, args); + }; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + }; + debounced.cancel = () => { + clearTimeout(timeout); + }; + return debounced; +} +function isValidApiKeyFormat(key) { + const patterns = [ + // Google Gemini API Key: AIzaSy + 33 characters (alphanumeric, _, -) + /^AIzaSy[\w-]{33}$/, + // OpenAI API Key (新格式): sk- + 48 alphanumeric characters + /^sk-[\w]{48}$/, + // Google AI Studio Key: gsk_ + alphanumeric & hyphens + /^gsk_[\w-]{40,}$/, + // Anthropic API Key (示例): sk-ant-api03- + long string + /^sk-ant-api\d{2}-[\w-]{80,}$/, + // Fallback for other potential "sk-" keys with a reasonable length + /^sk-[\w-]{20,}$/ + ]; + return patterns.some((pattern) => pattern.test(key)); +} +function escapeHTML(str) { + if (typeof str !== "string") { + return str; + } + return str.replace(/[&<>"']/g, function(match) { + return { + "&": "&", + "<": "<", + ">": ">", + '"': """, + "'": "'" + }[match]; + }); +} + +export { + debounce, + isValidApiKeyFormat, + escapeHTML +}; diff --git a/web/static/js/keys-A2UAJYOX.js b/web/static/js/keys-4GCIJ7HW.js similarity index 99% rename from web/static/js/keys-A2UAJYOX.js rename to web/static/js/keys-4GCIJ7HW.js index e7e3c19..f0a3f9a 100644 --- a/web/static/js/keys-A2UAJYOX.js +++ b/web/static/js/keys-4GCIJ7HW.js @@ -4,6 +4,11 @@ import { taskCenterManager, toastManager } from "./chunk-EZAP7GR4.js"; +import { + debounce, + escapeHTML, + isValidApiKeyFormat +} from "./chunk-A4OOMLXK.js"; import { apiFetch, apiFetchJson @@ -670,53 +675,6 @@ var ApiKeyManager = class { }; var apiKeyManager = new ApiKeyManager(); -// frontend/js/utils/utils.js -function debounce(func, wait) { - let timeout; - const debounced = function(...args) { - const context = this; - const later = () => { - clearTimeout(timeout); - func.apply(context, args); - }; - clearTimeout(timeout); - timeout = setTimeout(later, wait); - }; - debounced.cancel = () => { - clearTimeout(timeout); - }; - return debounced; -} -function isValidApiKeyFormat(key) { - const patterns = [ - // Google Gemini API Key: AIzaSy + 33 characters (alphanumeric, _, -) - /^AIzaSy[\w-]{33}$/, - // OpenAI API Key (新格式): sk- + 48 alphanumeric characters - /^sk-[\w]{48}$/, - // Google AI Studio Key: gsk_ + alphanumeric & hyphens - /^gsk_[\w-]{40,}$/, - // Anthropic API Key (示例): sk-ant-api03- + long string - /^sk-ant-api\d{2}-[\w-]{80,}$/, - // Fallback for other potential "sk-" keys with a reasonable length - /^sk-[\w-]{20,}$/ - ]; - return patterns.some((pattern) => pattern.test(key)); -} -function escapeHTML(str) { - if (typeof str !== "string") { - return str; - } - return str.replace(/[&<>"']/g, function(match) { - return { - "&": "&", - "<": "<", - ">": ">", - '"': """, - "'": "'" - }[match]; - }); -} - // frontend/js/pages/keys/addApiModal.js var AddApiModal = class { constructor({ onImportSuccess }) { diff --git a/web/static/js/logs-4C4JG7BT.js b/web/static/js/logs-AG4TD2DO.js similarity index 78% rename from web/static/js/logs-4C4JG7BT.js rename to web/static/js/logs-AG4TD2DO.js index 78e58a3..7ec2c23 100644 --- a/web/static/js/logs-4C4JG7BT.js +++ b/web/static/js/logs-AG4TD2DO.js @@ -1,3 +1,6 @@ +import { + escapeHTML +} from "./chunk-A4OOMLXK.js"; import { apiFetchJson } from "./chunk-PLQL6WIO.js"; @@ -27,7 +30,6 @@ var STATUS_CODE_MAP = { }; var SPECIAL_CASE_MAP = [ { code: 400, keyword: "api key not found", type: "\u65E0\u6548\u5BC6\u94A5", style: "red" }, - // 之前实现的模型配置错误规则也可以移到这里,更加规范 { code: 404, keyword: "call listmodels", type: "\u6A21\u578B\u914D\u7F6E\u9519\u8BEF", style: "orange" } ]; var styleToClass = (style) => { @@ -46,8 +48,9 @@ var styleToClass = (style) => { }; var errorCodeRegex = /(\d+)$/; var LogList = class { - constructor(container) { + constructor(container, dataStore2) { this.container = container; + this.dataStore = dataStore2; if (!this.container) console.error("LogList: container element (tbody) not found."); } renderLoading() { @@ -115,8 +118,16 @@ var LogList = class { return `
${modelName}
`; } createLogRowHtml(log, index) { - const groupName = log.GroupDisplayName || (log.GroupID ? `Group #${log.GroupID}` : "N/A"); - const apiKeyName = log.APIKeyName || (log.KeyID ? `Key #${log.KeyID}` : "N/A"); + const group = this.dataStore.groups.get(log.GroupID); + const groupName = group ? group.display_name : log.GroupID ? `Group #${log.GroupID}` : "N/A"; + const key = this.dataStore.keys.get(log.KeyID); + let apiKeyDisplay; + if (key && key.APIKey && key.APIKey.length >= 8) { + const masked = `${key.APIKey.substring(0, 4)}......${key.APIKey.substring(key.APIKey.length - 4)}`; + apiKeyDisplay = escapeHTML(masked); + } else { + apiKeyDisplay = log.KeyID ? `Key #${log.KeyID}` : "N/A"; + } const errorInfo = this._interpretError(log); const modelNameFormatted = this._formatModelName(log.ModelName); const errorMessageAttr = log.ErrorMessage ? `data-error-message="${escape(log.ErrorMessage)}"` : ""; @@ -125,7 +136,7 @@ var LogList = class { ${index} - ${apiKeyName} + ${apiKeyDisplay} ${groupName} ${errorInfo.type} ${errorInfo.statusCodeHtml} @@ -143,12 +154,15 @@ var LogList = class { var logList_default = LogList; // frontend/js/pages/logs/index.js +var dataStore = { + groups: /* @__PURE__ */ new Map(), + keys: /* @__PURE__ */ new Map() +}; var LogsPage = class { constructor() { this.state = { logs: [], pagination: { page: 1, pages: 1, total: 0, page_size: 20 }, - // 包含 page_size isLoading: true, filters: { page: 1, page_size: 20 } }; @@ -157,38 +171,41 @@ var LogsPage = class { }; this.initialized = !!this.elements.tableBody; if (this.initialized) { - this.logList = new logList_default(this.elements.tableBody); + this.logList = new logList_default(this.elements.tableBody, dataStore); } } async init() { - if (!this.initialized) { - console.error("LogsPage: Could not initialize. Essential container element 'logs-table-body' is missing."); - return; - } + if (!this.initialized) return; this.initEventListeners(); + await this.loadGroupsOnce(); await this.loadAndRenderLogs(); } initEventListeners() { } + async loadGroupsOnce() { + if (dataStore.groups.size > 0) return; + try { + const { success, data } = await apiFetchJson("/admin/keygroups"); + if (success && Array.isArray(data)) { + data.forEach((group) => dataStore.groups.set(group.id, group)); + } + } catch (error) { + console.error("Failed to load key groups:", error); + } + } async loadAndRenderLogs() { this.state.isLoading = true; this.logList.renderLoading(); try { - const url = `/admin/logs?page=${this.state.filters.page}&page_size=${this.state.filters.page_size}`; - const responseData = await apiFetchJson(url); - if (responseData && responseData.success && Array.isArray(responseData.data)) { - this.state.logs = responseData.data; - this.state.pagination = { - page: this.state.filters.page, - page_size: this.state.filters.page_size, - total: responseData.data.length, - // 这是一个不准确的临时值 - pages: Math.ceil(responseData.data.length / this.state.filters.page_size) - // 同样不准确 - }; + const query = new URLSearchParams(this.state.filters); + const { success, data } = await apiFetchJson(`/admin/logs?${query.toString()}`); + if (success && typeof data === "object") { + const { items, total, page, page_size } = data; + this.state.logs = items; + this.state.pagination = { page, page_size, total, pages: Math.ceil(total / page_size) }; + await this.enrichLogsWithKeyNames(items); this.logList.render(this.state.logs, this.state.pagination); } else { - console.error("API response for logs is incorrect:", responseData); this.logList.render([], this.state.pagination); } } catch (error) { @@ -198,6 +215,21 @@ var LogsPage = class { this.state.isLoading = false; } } + async enrichLogsWithKeyNames(logs) { + const missingKeyIds = [...new Set( + logs.filter((log) => log.KeyID && !dataStore.keys.has(log.KeyID)).map((log) => log.KeyID) + )]; + if (missingKeyIds.length === 0) return; + try { + const idsQuery = missingKeyIds.join(","); + const { success, data } = await apiFetchJson(`/admin/apikeys?ids=${idsQuery}`); + if (success && Array.isArray(data)) { + data.forEach((key) => dataStore.keys.set(key.ID, key)); + } + } catch (error) { + console.error(`Failed to fetch key details:`, error); + } + } }; function logs_default() { const page = new LogsPage(); diff --git a/web/static/js/main.js b/web/static/js/main.js index 1849eff..d010638 100644 --- a/web/static/js/main.js +++ b/web/static/js/main.js @@ -180,8 +180,8 @@ var pageModules = { // 键 'dashboard' 对应一个函数,该函数调用 import() 返回一个 Promise // esbuild 看到这个 import() 语法,就会自动将 dashboard.js 及其依赖打包成一个独立的 chunk 文件 "dashboard": () => import("./dashboard-CJJWKYPR.js"), - "keys": () => import("./keys-A2UAJYOX.js"), - "logs": () => import("./logs-4C4JG7BT.js") + "keys": () => import("./keys-4GCIJ7HW.js"), + "logs": () => import("./logs-AG4TD2DO.js") // 'settings': () => import('./pages/settings.js'), // 未来启用 settings 页面 // 未来新增的页面,只需在这里添加一行映射,esbuild会自动处理 };