refactor(stats): remove daily stats aggregation

Remove the DailyStatsJob, DailyStat model, and associated database
migrations. This eliminates the pre-aggregation layer and updates the
dashboard handler to remove dependencies on the daily_stats table.
This commit is contained in:
zenfun
2026-01-02 23:08:50 +08:00
parent 08a8a1e42f
commit 481f616704
4 changed files with 3 additions and 353 deletions

View File

@@ -1,7 +1,6 @@
package api
import (
"encoding/json"
"math"
"net/http"
"time"
@@ -403,23 +402,6 @@ func previousPeriodWindow(period string) (start, end time.Time) {
}
}
// aggregateFromDailyStats sums statistics from daily_stats table for the given date range
func (h *DashboardHandler) aggregateFromDailyStats(startDate, endDate string) (aggregatedStats, error) {
var stats aggregatedStats
err := h.db.Model(&model.DailyStat{}).
Select(`
COALESCE(SUM(requests), 0) as requests,
COALESCE(SUM(success), 0) as success,
COALESCE(SUM(failed), 0) as failed,
COALESCE(SUM(tokens_in), 0) as tokens_in,
COALESCE(SUM(tokens_out), 0) as tokens_out,
COALESCE(SUM(latency_sum_ms), 0) as latency_sum_ms
`).
Where("date >= ? AND date <= ?", startDate, endDate).
Scan(&stats).Error
return stats, err
}
// aggregateFromLogRecords queries log_records directly for the given time range
func (h *DashboardHandler) aggregateFromLogRecords(start, end time.Time) (aggregatedStats, error) {
var stats struct {
@@ -452,48 +434,3 @@ func (h *DashboardHandler) aggregateFromLogRecords(start, end time.Time) (aggreg
LatencySumMs: stats.LatencySumMs,
}, err
}
// getTopModelsFromDailyStats aggregates top models from daily_stats JSON field
func (h *DashboardHandler) getTopModelsFromDailyStats(startDate, endDate string) ([]TopModelStat, error) {
var dailyStats []model.DailyStat
if err := h.db.Where("date >= ? AND date <= ?", startDate, endDate).Find(&dailyStats).Error; err != nil {
return nil, err
}
// Aggregate top models from all days
modelMap := make(map[string]TopModelStat)
for _, ds := range dailyStats {
var topModels []model.TopModelStat
if err := json.Unmarshal([]byte(ds.TopModels), &topModels); err != nil {
continue
}
for _, tm := range topModels {
existing := modelMap[tm.Model]
existing.Model = tm.Model
existing.Requests += tm.Requests
existing.Tokens += tm.Tokens
modelMap[tm.Model] = existing
}
}
// Convert to slice and sort by requests
result := make([]TopModelStat, 0, len(modelMap))
for _, tm := range modelMap {
result = append(result, tm)
}
// Simple bubble sort for top 10 (small dataset)
for i := 0; i < len(result)-1; i++ {
for j := i + 1; j < len(result); j++ {
if result[j].Requests > result[i].Requests {
result[i], result[j] = result[j], result[i]
}
}
}
if len(result) > 10 {
result = result[:10]
}
return result, nil
}