mirror of
https://github.com/EZ-Api/ez-api.git
synced 2026-01-13 17:47:51 +00:00
feat(api): add alert system with CRUD endpoints and statistics
Introduce a comprehensive alert management system for monitoring system events and notifications. Changes include: - Add Alert model with type, severity, status, and metadata fields - Implement AlertHandler with full CRUD operations (create, list, get, acknowledge, resolve, dismiss) - Add alert statistics endpoint for counts by status and severity - Register Alert model in database auto-migration - Add minute-level aggregation to log stats (limited to 6-hour range)
This commit is contained in:
@@ -235,6 +235,8 @@ type GroupedStatsItem struct {
|
||||
Month string `json:"month,omitempty"`
|
||||
// For group_by=hour
|
||||
Hour string `json:"hour,omitempty"`
|
||||
// For group_by=minute
|
||||
Minute string `json:"minute,omitempty"`
|
||||
|
||||
Count int64 `json:"count"`
|
||||
TokensIn int64 `json:"tokens_in"`
|
||||
@@ -348,24 +350,29 @@ func (h *Handler) deleteLogsBefore(cutoff time.Time, keyID uint, modelName strin
|
||||
|
||||
// LogStats godoc
|
||||
// @Summary Log stats (admin)
|
||||
// @Description Aggregate log stats with basic filtering. Use group_by param for grouped statistics (model/day/month/hour). Without group_by returns LogStatsResponse; with group_by returns GroupedStatsResponse.
|
||||
// @Description Aggregate log stats with basic filtering. Use group_by param for grouped statistics (model/day/month/hour/minute). Without group_by returns LogStatsResponse; with group_by returns GroupedStatsResponse. Note: minute-level aggregation is limited to 6-hour time ranges.
|
||||
// @Tags admin
|
||||
// @Produce json
|
||||
// @Security AdminAuth
|
||||
// @Param since query int false "unix seconds"
|
||||
// @Param until query int false "unix seconds"
|
||||
// @Param group_by query string false "group by dimension: model, day, month, hour. Returns GroupedStatsResponse when specified." Enums(model, day, month, hour)
|
||||
// @Param group_by query string false "group by dimension: model, day, month, hour, minute. Returns GroupedStatsResponse when specified." Enums(model, day, month, hour, minute)
|
||||
// @Success 200 {object} LogStatsResponse "Default aggregated stats (when group_by is not specified)"
|
||||
// @Success 200 {object} GroupedStatsResponse "Grouped stats (when group_by is specified)"
|
||||
// @Failure 400 {object} gin.H
|
||||
// @Failure 500 {object} gin.H
|
||||
// @Router /admin/logs/stats [get]
|
||||
func (h *Handler) LogStats(c *gin.Context) {
|
||||
q := h.logBaseQuery()
|
||||
|
||||
var sinceTime, untilTime *time.Time
|
||||
if t, ok := parseUnixSeconds(c.Query("since")); ok {
|
||||
q = q.Where("created_at >= ?", t)
|
||||
sinceTime = &t
|
||||
}
|
||||
if t, ok := parseUnixSeconds(c.Query("until")); ok {
|
||||
q = q.Where("created_at <= ?", t)
|
||||
untilTime = &t
|
||||
}
|
||||
|
||||
groupBy := strings.TrimSpace(c.Query("group_by"))
|
||||
@@ -382,6 +389,9 @@ func (h *Handler) LogStats(c *gin.Context) {
|
||||
case "hour":
|
||||
h.logStatsByHour(c, q)
|
||||
return
|
||||
case "minute":
|
||||
h.logStatsByMinute(c, q, sinceTime, untilTime)
|
||||
return
|
||||
}
|
||||
|
||||
// Default: aggregated stats (backward compatible)
|
||||
@@ -544,6 +554,61 @@ func (h *Handler) logStatsByHour(c *gin.Context, q *gorm.DB) {
|
||||
c.JSON(http.StatusOK, GroupedStatsResponse{Items: items})
|
||||
}
|
||||
|
||||
// maxMinuteRangeDuration is the maximum time range allowed for minute-level aggregation (6 hours)
|
||||
const maxMinuteRangeDuration = 6 * time.Hour
|
||||
|
||||
// logStatsByMinute handles group_by=minute with time range validation
|
||||
func (h *Handler) logStatsByMinute(c *gin.Context, q *gorm.DB, sinceTime, untilTime *time.Time) {
|
||||
// Validate time range - minute-level aggregation requires since/until and max 6 hours
|
||||
if sinceTime == nil || untilTime == nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "minute-level aggregation requires both 'since' and 'until' parameters"})
|
||||
return
|
||||
}
|
||||
|
||||
duration := untilTime.Sub(*sinceTime)
|
||||
if duration > maxMinuteRangeDuration {
|
||||
c.JSON(http.StatusBadRequest, gin.H{
|
||||
"error": "time range too large for minute-level aggregation",
|
||||
"max_hours": 6,
|
||||
"actual_hours": duration.Hours(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if duration < 0 {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "'until' must be after 'since'"})
|
||||
return
|
||||
}
|
||||
|
||||
type minuteStats struct {
|
||||
Minute string
|
||||
Cnt int64
|
||||
TokensIn int64
|
||||
TokensOut int64
|
||||
AvgLatencyMs float64
|
||||
}
|
||||
var rows []minuteStats
|
||||
// PostgreSQL DATE_TRUNC for minute-level aggregation
|
||||
if err := q.Select(`DATE_TRUNC('minute', created_at) as minute, COUNT(*) as cnt, COALESCE(SUM(tokens_in),0) as tokens_in, COALESCE(SUM(tokens_out),0) as tokens_out, COALESCE(AVG(latency_ms),0) as avg_latency_ms`).
|
||||
Group("minute").
|
||||
Order("minute ASC").
|
||||
Scan(&rows).Error; err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to aggregate by minute", "details": err.Error()})
|
||||
return
|
||||
}
|
||||
items := make([]GroupedStatsItem, 0, len(rows))
|
||||
for _, r := range rows {
|
||||
items = append(items, GroupedStatsItem{
|
||||
Minute: r.Minute,
|
||||
Count: r.Cnt,
|
||||
TokensIn: r.TokensIn,
|
||||
TokensOut: r.TokensOut,
|
||||
AvgLatencyMs: r.AvgLatencyMs,
|
||||
})
|
||||
}
|
||||
c.JSON(http.StatusOK, GroupedStatsResponse{Items: items})
|
||||
}
|
||||
|
||||
// ListSelfLogs godoc
|
||||
// @Summary List logs (master)
|
||||
// @Description List request logs for the authenticated master
|
||||
|
||||
Reference in New Issue
Block a user