460 lines
13 KiB
Go
460 lines
13 KiB
Go
package controllers
|
|
|
|
import (
|
|
"errors"
|
|
"hr_receiver/config"
|
|
"hr_receiver/models"
|
|
"net/http"
|
|
"sort"
|
|
"strconv"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/gin-gonic/gin"
|
|
"gorm.io/gorm"
|
|
)
|
|
|
|
type StatisticsController struct {
|
|
DB *gorm.DB
|
|
}
|
|
|
|
func NewStatisticsController() *StatisticsController {
|
|
return &StatisticsController{DB: config.DB}
|
|
}
|
|
|
|
// --- 请求参数 ---
|
|
|
|
type analysisRecordListParams struct {
|
|
PageNum int `form:"pageNum,default=1"`
|
|
PageSize int `form:"pageSize,default=10"`
|
|
RegionID uint32 `form:"regionId"`
|
|
StartTime int64 `form:"startTime"`
|
|
EndTime int64 `form:"endTime"`
|
|
}
|
|
|
|
// --- 查询接口 ---
|
|
|
|
func (sc *StatisticsController) ListAIAnalysisRecords(c *gin.Context) {
|
|
var params analysisRecordListParams
|
|
if err := c.ShouldBindQuery(¶ms); err != nil {
|
|
writeError(c, http.StatusBadRequest, err.Error())
|
|
return
|
|
}
|
|
if params.PageNum < 1 {
|
|
params.PageNum = 1
|
|
}
|
|
if params.PageSize < 1 || params.PageSize > 100 {
|
|
params.PageSize = 10
|
|
}
|
|
offset := (params.PageNum - 1) * params.PageSize
|
|
|
|
query := sc.DB.Model(&models.AIAnalysisRecord{})
|
|
if params.RegionID > 0 {
|
|
query = query.Where("region_id = ?", params.RegionID)
|
|
}
|
|
if params.StartTime > 0 {
|
|
query = query.Where("upload_time >= ?", params.StartTime)
|
|
}
|
|
if params.EndTime > 0 {
|
|
query = query.Where("upload_time <= ?", params.EndTime)
|
|
}
|
|
|
|
var total int64
|
|
if err := query.Count(&total).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to count records")
|
|
return
|
|
}
|
|
|
|
var records []models.AIAnalysisRecord
|
|
if err := query.Order("upload_time DESC").Offset(offset).Limit(params.PageSize).Find(&records).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query records")
|
|
return
|
|
}
|
|
|
|
writeSuccess(c, http.StatusOK, "query success", gin.H{
|
|
"list": records,
|
|
"pagination": gin.H{
|
|
"currentPage": params.PageNum,
|
|
"pageSize": params.PageSize,
|
|
"totalList": total,
|
|
"totalPage": int((total + int64(params.PageSize) - 1) / int64(params.PageSize)),
|
|
},
|
|
})
|
|
}
|
|
|
|
// --- 删除接口 ---
|
|
|
|
func (sc *StatisticsController) DeleteAIAnalysisRecord(c *gin.Context) {
|
|
id := strings.TrimSpace(c.Param("id"))
|
|
if id == "" {
|
|
writeError(c, http.StatusBadRequest, "id is required")
|
|
return
|
|
}
|
|
|
|
var record models.AIAnalysisRecord
|
|
if err := sc.DB.First(&record, id).Error; err != nil {
|
|
if errors.Is(err, gorm.ErrRecordNotFound) {
|
|
writeError(c, http.StatusNotFound, "record not found")
|
|
return
|
|
}
|
|
writeError(c, http.StatusInternalServerError, "failed to query record")
|
|
return
|
|
}
|
|
|
|
if err := sc.DB.Delete(&record).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to delete record")
|
|
return
|
|
}
|
|
|
|
writeSuccess(c, http.StatusOK, "delete success", nil)
|
|
}
|
|
|
|
// --- 统计接口 ---
|
|
|
|
type regionStatisticsItem struct {
|
|
RegionID uint32 `json:"regionId"`
|
|
KindergartenName string `json:"kindergartenName"`
|
|
Count int64 `json:"count"`
|
|
TotalInputTokens int64 `json:"totalInputTokens"`
|
|
TotalOutputTokens int64 `json:"totalOutputTokens"`
|
|
TotalInputSizeBytes int64 `json:"totalInputSizeBytes"`
|
|
TotalOutputSizeBytes int64 `json:"totalOutputSizeBytes"`
|
|
TotalDurationMs int64 `json:"totalDurationMs"`
|
|
AvgDurationMs float64 `json:"avgDurationMs"`
|
|
TotalOriginalFileSize int64 `json:"totalOriginalFileSize"`
|
|
TotalCompressedSize int64 `json:"totalCompressedSize"`
|
|
AnalysisTypeCounts map[string]int64 `json:"analysisTypeCounts"`
|
|
SourceTypeCounts map[string]int64 `json:"sourceTypeCounts"`
|
|
FirstUsedAt *time.Time `json:"firstUsedAt"`
|
|
LastUsedAt *time.Time `json:"lastUsedAt"`
|
|
}
|
|
|
|
func (sc *StatisticsController) StatisticsByRegion(c *gin.Context) {
|
|
regionIDStr := c.Query("regionId")
|
|
startTimeStr := c.Query("startTime")
|
|
endTimeStr := c.Query("endTime")
|
|
|
|
query := sc.DB.Model(&models.AIAnalysisRecord{})
|
|
if regionIDStr != "" {
|
|
if regionID, err := strconv.ParseUint(regionIDStr, 10, 32); err == nil {
|
|
query = query.Where("region_id = ?", uint32(regionID))
|
|
}
|
|
}
|
|
if startTimeStr != "" {
|
|
if startTime, err := strconv.ParseInt(startTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time >= ?", startTime)
|
|
}
|
|
}
|
|
if endTimeStr != "" {
|
|
if endTime, err := strconv.ParseInt(endTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time <= ?", endTime)
|
|
}
|
|
}
|
|
|
|
type rawStats struct {
|
|
RegionID *uint32
|
|
Count int64
|
|
TotalInputTokens int64
|
|
TotalOutputTokens int64
|
|
TotalInputSizeBytes int64
|
|
TotalOutputSizeBytes int64
|
|
TotalDurationMs int64
|
|
TotalOriginalFileSize int64
|
|
TotalCompressedSize int64
|
|
FirstUsedAt *int64
|
|
LastUsedAt *int64
|
|
}
|
|
|
|
var rawResults []rawStats
|
|
err := query.Select(`
|
|
region_id,
|
|
COUNT(*) as count,
|
|
COALESCE(SUM(input_tokens), 0) as total_input_tokens,
|
|
COALESCE(SUM(output_tokens), 0) as total_output_tokens,
|
|
COALESCE(SUM(input_size_bytes), 0) as total_input_size_bytes,
|
|
COALESCE(SUM(output_size_bytes), 0) as total_output_size_bytes,
|
|
COALESCE(SUM(duration_ms), 0) as total_duration_ms,
|
|
COALESCE(SUM(original_file_size), 0) as total_original_file_size,
|
|
COALESCE(SUM(compressed_content_size), 0) as total_compressed_size,
|
|
MIN(upload_time) as first_used_at,
|
|
MAX(upload_time) as last_used_at
|
|
`).Group("region_id").Scan(&rawResults).Error
|
|
|
|
if err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query statistics")
|
|
return
|
|
}
|
|
|
|
type analysisTypeCount struct {
|
|
RegionID *uint32
|
|
AnalysisType string
|
|
Count int64
|
|
}
|
|
var analysisTypeResults []analysisTypeCount
|
|
if err := query.Select("region_id, analysis_type, COUNT(*) as count").Group("region_id, analysis_type").Scan(&analysisTypeResults).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query analysis type statistics")
|
|
return
|
|
}
|
|
|
|
type sourceTypeCount struct {
|
|
RegionID *uint32
|
|
SourceType string
|
|
Count int64
|
|
}
|
|
var sourceTypeResults []sourceTypeCount
|
|
if err := query.Select("region_id, source_type, COUNT(*) as count").Group("region_id, source_type").Scan(&sourceTypeResults).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query source type statistics")
|
|
return
|
|
}
|
|
|
|
analysisTypeMap := make(map[uint32]map[string]int64)
|
|
for _, r := range analysisTypeResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
if analysisTypeMap[regionID] == nil {
|
|
analysisTypeMap[regionID] = make(map[string]int64)
|
|
}
|
|
analysisTypeMap[regionID][r.AnalysisType] = r.Count
|
|
}
|
|
|
|
sourceTypeMap := make(map[uint32]map[string]int64)
|
|
for _, r := range sourceTypeResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
if sourceTypeMap[regionID] == nil {
|
|
sourceTypeMap[regionID] = make(map[string]int64)
|
|
}
|
|
sourceTypeMap[regionID][r.SourceType] = r.Count
|
|
}
|
|
|
|
// 收集所有 regionId 查询幼儿园名称
|
|
regionIDs := make([]uint32, 0, len(rawResults))
|
|
for _, r := range rawResults {
|
|
if r.RegionID != nil && *r.RegionID > 0 {
|
|
regionIDs = append(regionIDs, *r.RegionID)
|
|
}
|
|
}
|
|
kindergartenMap := make(map[uint32]string)
|
|
if len(regionIDs) > 0 {
|
|
var kindergartens []models.Kindergarten
|
|
if err := sc.DB.Where("region_id IN ?", regionIDs).Find(&kindergartens).Error; err == nil {
|
|
for _, k := range kindergartens {
|
|
kindergartenMap[k.RegionID] = k.Name
|
|
}
|
|
}
|
|
}
|
|
|
|
overall := regionStatisticsItem{
|
|
AnalysisTypeCounts: make(map[string]int64),
|
|
SourceTypeCounts: make(map[string]int64),
|
|
}
|
|
regions := make(map[string]regionStatisticsItem, len(rawResults))
|
|
|
|
for _, r := range rawResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
avgDuration := float64(0)
|
|
if r.Count > 0 {
|
|
avgDuration = float64(r.TotalDurationMs) / float64(r.Count)
|
|
}
|
|
kgName := ""
|
|
if regionID > 0 {
|
|
kgName = kindergartenMap[regionID]
|
|
}
|
|
|
|
var firstUsedAt, lastUsedAt *time.Time
|
|
if r.FirstUsedAt != nil {
|
|
t := time.UnixMilli(*r.FirstUsedAt)
|
|
firstUsedAt = &t
|
|
}
|
|
if r.LastUsedAt != nil {
|
|
t := time.UnixMilli(*r.LastUsedAt)
|
|
lastUsedAt = &t
|
|
}
|
|
|
|
item := regionStatisticsItem{
|
|
RegionID: regionID,
|
|
KindergartenName: kgName,
|
|
Count: r.Count,
|
|
TotalInputTokens: r.TotalInputTokens,
|
|
TotalOutputTokens: r.TotalOutputTokens,
|
|
TotalInputSizeBytes: r.TotalInputSizeBytes,
|
|
TotalOutputSizeBytes: r.TotalOutputSizeBytes,
|
|
TotalDurationMs: r.TotalDurationMs,
|
|
AvgDurationMs: avgDuration,
|
|
TotalOriginalFileSize: r.TotalOriginalFileSize,
|
|
TotalCompressedSize: r.TotalCompressedSize,
|
|
AnalysisTypeCounts: analysisTypeMap[regionID],
|
|
SourceTypeCounts: sourceTypeMap[regionID],
|
|
FirstUsedAt: firstUsedAt,
|
|
LastUsedAt: lastUsedAt,
|
|
}
|
|
|
|
regions[strconv.FormatUint(uint64(regionID), 10)] = item
|
|
|
|
overall.Count += r.Count
|
|
overall.TotalInputTokens += r.TotalInputTokens
|
|
overall.TotalOutputTokens += r.TotalOutputTokens
|
|
overall.TotalInputSizeBytes += r.TotalInputSizeBytes
|
|
overall.TotalOutputSizeBytes += r.TotalOutputSizeBytes
|
|
overall.TotalDurationMs += r.TotalDurationMs
|
|
overall.TotalOriginalFileSize += r.TotalOriginalFileSize
|
|
overall.TotalCompressedSize += r.TotalCompressedSize
|
|
|
|
if firstUsedAt != nil {
|
|
if overall.FirstUsedAt == nil || firstUsedAt.Before(*overall.FirstUsedAt) {
|
|
overall.FirstUsedAt = firstUsedAt
|
|
}
|
|
}
|
|
if lastUsedAt != nil {
|
|
if overall.LastUsedAt == nil || lastUsedAt.After(*overall.LastUsedAt) {
|
|
overall.LastUsedAt = lastUsedAt
|
|
}
|
|
}
|
|
}
|
|
|
|
for _, r := range analysisTypeResults {
|
|
overall.AnalysisTypeCounts[r.AnalysisType] += r.Count
|
|
}
|
|
for _, r := range sourceTypeResults {
|
|
overall.SourceTypeCounts[r.SourceType] += r.Count
|
|
}
|
|
|
|
if overall.Count > 0 {
|
|
overall.AvgDurationMs = float64(overall.TotalDurationMs) / float64(overall.Count)
|
|
}
|
|
|
|
writeSuccess(c, http.StatusOK, "query success", gin.H{
|
|
"overall": overall,
|
|
"regions": regions,
|
|
})
|
|
}
|
|
|
|
func (sc *StatisticsController) TimelineStatistics(c *gin.Context) {
|
|
regionIDStr := c.Query("regionId")
|
|
startTimeStr := c.Query("startTime")
|
|
endTimeStr := c.Query("endTime")
|
|
|
|
query := sc.DB.Model(&models.AIAnalysisRecord{})
|
|
if regionIDStr != "" {
|
|
if regionID, err := strconv.ParseUint(regionIDStr, 10, 32); err == nil {
|
|
query = query.Where("region_id = ?", uint32(regionID))
|
|
}
|
|
}
|
|
if startTimeStr != "" {
|
|
if startTime, err := strconv.ParseInt(startTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time >= ?", startTime)
|
|
}
|
|
}
|
|
if endTimeStr != "" {
|
|
if endTime, err := strconv.ParseInt(endTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time <= ?", endTime)
|
|
}
|
|
}
|
|
|
|
type timelineItem struct {
|
|
Date string `json:"date"`
|
|
Count int64 `json:"count"`
|
|
InputTokens int64 `json:"inputTokens"`
|
|
OutputTokens int64 `json:"outputTokens"`
|
|
}
|
|
|
|
type rawRegionTimeline struct {
|
|
RegionID *uint32
|
|
Date string
|
|
Count int64
|
|
InputTokens int64
|
|
OutputTokens int64
|
|
}
|
|
|
|
var rawResults []rawRegionTimeline
|
|
err := query.Select(`
|
|
region_id,
|
|
DATE(TO_TIMESTAMP(upload_time / 1000.0)) as date,
|
|
COUNT(*) as count,
|
|
COALESCE(SUM(input_tokens), 0) as input_tokens,
|
|
COALESCE(SUM(output_tokens), 0) as output_tokens
|
|
`).Group("region_id, DATE(TO_TIMESTAMP(upload_time / 1000.0))").Order("region_id, date ASC").Scan(&rawResults).Error
|
|
|
|
if err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query timeline statistics")
|
|
return
|
|
}
|
|
|
|
overallMap := make(map[string]*timelineItem)
|
|
regionItemsMap := make(map[string][]timelineItem)
|
|
regionIDs := make([]uint32, 0)
|
|
regionIDSet := make(map[uint32]struct{})
|
|
|
|
for _, r := range rawResults {
|
|
if overallMap[r.Date] == nil {
|
|
overallMap[r.Date] = &timelineItem{Date: r.Date}
|
|
}
|
|
overallMap[r.Date].Count += r.Count
|
|
overallMap[r.Date].InputTokens += r.InputTokens
|
|
overallMap[r.Date].OutputTokens += r.OutputTokens
|
|
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
regionIDStr := strconv.FormatUint(uint64(regionID), 10)
|
|
regionItemsMap[regionIDStr] = append(regionItemsMap[regionIDStr], timelineItem{
|
|
Date: r.Date,
|
|
Count: r.Count,
|
|
InputTokens: r.InputTokens,
|
|
OutputTokens: r.OutputTokens,
|
|
})
|
|
if _, ok := regionIDSet[regionID]; !ok && regionID > 0 {
|
|
regionIDSet[regionID] = struct{}{}
|
|
regionIDs = append(regionIDs, regionID)
|
|
}
|
|
}
|
|
|
|
// 查询幼儿园名称
|
|
kindergartenMap := make(map[uint32]string)
|
|
if len(regionIDs) > 0 {
|
|
var kindergartens []models.Kindergarten
|
|
if err := sc.DB.Where("region_id IN ?", regionIDs).Find(&kindergartens).Error; err == nil {
|
|
for _, k := range kindergartens {
|
|
kindergartenMap[k.RegionID] = k.Name
|
|
}
|
|
}
|
|
}
|
|
|
|
type regionTimeline struct {
|
|
Name string `json:"name"`
|
|
Items []timelineItem `json:"items"`
|
|
}
|
|
regionsMap := make(map[string]regionTimeline)
|
|
for regionIDStr, items := range regionItemsMap {
|
|
name := ""
|
|
if regionID, err := strconv.ParseUint(regionIDStr, 10, 32); err == nil && regionID > 0 {
|
|
name = kindergartenMap[uint32(regionID)]
|
|
}
|
|
regionsMap[regionIDStr] = regionTimeline{
|
|
Name: name,
|
|
Items: items,
|
|
}
|
|
}
|
|
|
|
var overall []timelineItem
|
|
for _, item := range overallMap {
|
|
overall = append(overall, *item)
|
|
}
|
|
sort.Slice(overall, func(i, j int) bool {
|
|
return overall[i].Date < overall[j].Date
|
|
})
|
|
|
|
writeSuccess(c, http.StatusOK, "query success", gin.H{
|
|
"overall": overall,
|
|
"regions": regionsMap,
|
|
})
|
|
}
|