763 lines
24 KiB
Go
763 lines
24 KiB
Go
package controllers
|
|
|
|
import (
|
|
"errors"
|
|
"hr_receiver/config"
|
|
"hr_receiver/models"
|
|
"net/http"
|
|
"sort"
|
|
"strconv"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/gin-gonic/gin"
|
|
"gorm.io/gorm"
|
|
)
|
|
|
|
type StatisticsController struct {
|
|
DB *gorm.DB
|
|
}
|
|
|
|
func NewStatisticsController() *StatisticsController {
|
|
return &StatisticsController{DB: config.DB}
|
|
}
|
|
|
|
// --- 请求参数 ---
|
|
|
|
type analysisRecordListParams struct {
|
|
PageNum int `form:"pageNum,default=1"`
|
|
PageSize int `form:"pageSize,default=10"`
|
|
RegionID uint32 `form:"regionId"`
|
|
StartTime int64 `form:"startTime"`
|
|
EndTime int64 `form:"endTime"`
|
|
}
|
|
|
|
// --- 查询接口 ---
|
|
|
|
func (sc *StatisticsController) ListAIAnalysisRecords(c *gin.Context) {
|
|
var params analysisRecordListParams
|
|
if err := c.ShouldBindQuery(¶ms); err != nil {
|
|
writeError(c, http.StatusBadRequest, err.Error())
|
|
return
|
|
}
|
|
if params.PageNum < 1 {
|
|
params.PageNum = 1
|
|
}
|
|
if params.PageSize < 1 || params.PageSize > 100 {
|
|
params.PageSize = 10
|
|
}
|
|
offset := (params.PageNum - 1) * params.PageSize
|
|
|
|
query := sc.DB.Model(&models.AIAnalysisRecord{})
|
|
if params.RegionID > 0 {
|
|
query = query.Where("region_id = ?", params.RegionID)
|
|
}
|
|
if params.StartTime > 0 {
|
|
query = query.Where("upload_time >= ?", params.StartTime)
|
|
}
|
|
if params.EndTime > 0 {
|
|
query = query.Where("upload_time <= ?", params.EndTime)
|
|
}
|
|
|
|
var total int64
|
|
if err := query.Count(&total).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to count records")
|
|
return
|
|
}
|
|
|
|
var records []models.AIAnalysisRecord
|
|
if err := query.Order("upload_time DESC").Offset(offset).Limit(params.PageSize).Find(&records).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query records")
|
|
return
|
|
}
|
|
|
|
writeSuccess(c, http.StatusOK, "query success", gin.H{
|
|
"list": records,
|
|
"pagination": gin.H{
|
|
"currentPage": params.PageNum,
|
|
"pageSize": params.PageSize,
|
|
"totalList": total,
|
|
"totalPage": int((total + int64(params.PageSize) - 1) / int64(params.PageSize)),
|
|
},
|
|
})
|
|
}
|
|
|
|
// --- 删除接口 ---
|
|
|
|
func (sc *StatisticsController) DeleteAIAnalysisRecord(c *gin.Context) {
|
|
id := strings.TrimSpace(c.Param("id"))
|
|
if id == "" {
|
|
writeError(c, http.StatusBadRequest, "id is required")
|
|
return
|
|
}
|
|
|
|
var record models.AIAnalysisRecord
|
|
if err := sc.DB.First(&record, id).Error; err != nil {
|
|
if errors.Is(err, gorm.ErrRecordNotFound) {
|
|
writeError(c, http.StatusNotFound, "record not found")
|
|
return
|
|
}
|
|
writeError(c, http.StatusInternalServerError, "failed to query record")
|
|
return
|
|
}
|
|
|
|
if err := sc.DB.Delete(&record).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to delete record")
|
|
return
|
|
}
|
|
|
|
writeSuccess(c, http.StatusOK, "delete success", nil)
|
|
}
|
|
|
|
// --- 统计接口 ---
|
|
|
|
type regionStatisticsItem struct {
|
|
RegionID uint32 `json:"regionId"`
|
|
KindergartenName string `json:"kindergartenName"`
|
|
Count int64 `json:"count"`
|
|
TotalInputTokens int64 `json:"totalInputTokens"`
|
|
TotalOutputTokens int64 `json:"totalOutputTokens"`
|
|
TotalCacheHitTokens int64 `json:"totalCacheHitTokens"`
|
|
TotalCacheMissTokens int64 `json:"totalCacheMissTokens"`
|
|
TotalInputSizeBytes int64 `json:"totalInputSizeBytes"`
|
|
TotalOutputSizeBytes int64 `json:"totalOutputSizeBytes"`
|
|
TotalDurationMs int64 `json:"totalDurationMs"`
|
|
AvgDurationMs float64 `json:"avgDurationMs"`
|
|
AvgTotalCost float64 `json:"avgTotalCost"`
|
|
TotalOriginalFileSize int64 `json:"totalOriginalFileSize"`
|
|
TotalCompressedSize int64 `json:"totalCompressedSize"`
|
|
TotalCost float64 `json:"totalCost"`
|
|
TotalInputCost float64 `json:"totalInputCost"`
|
|
TotalOutputCost float64 `json:"totalOutputCost"`
|
|
TotalCacheHitCost float64 `json:"totalCacheHitCost"`
|
|
TotalCacheMissCost float64 `json:"totalCacheMissCost"`
|
|
AnalysisTypeCounts map[string]int64 `json:"analysisTypeCounts"`
|
|
SourceTypeCounts map[string]int64 `json:"sourceTypeCounts"`
|
|
FirstUsedAt *time.Time `json:"firstUsedAt"`
|
|
LastUsedAt *time.Time `json:"lastUsedAt"`
|
|
}
|
|
|
|
func (sc *StatisticsController) StatisticsByRegion(c *gin.Context) {
|
|
regionIDStr := c.Query("regionId")
|
|
startTimeStr := c.Query("startTime")
|
|
endTimeStr := c.Query("endTime")
|
|
|
|
query := sc.DB.Model(&models.AIAnalysisRecord{})
|
|
if regionIDStr != "" {
|
|
if regionID, err := strconv.ParseUint(regionIDStr, 10, 32); err == nil {
|
|
query = query.Where("region_id = ?", uint32(regionID))
|
|
}
|
|
}
|
|
if startTimeStr != "" {
|
|
if startTime, err := strconv.ParseInt(startTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time >= ?", startTime)
|
|
}
|
|
}
|
|
if endTimeStr != "" {
|
|
if endTime, err := strconv.ParseInt(endTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time <= ?", endTime)
|
|
}
|
|
}
|
|
|
|
type rawStats struct {
|
|
RegionID *uint32
|
|
Count int64
|
|
TotalInputTokens int64
|
|
TotalOutputTokens int64
|
|
TotalCacheHitTokens int64
|
|
TotalCacheMissTokens int64
|
|
TotalInputSizeBytes int64
|
|
TotalOutputSizeBytes int64
|
|
TotalDurationMs int64
|
|
TotalOriginalFileSize int64
|
|
TotalCompressedSize int64
|
|
TotalCost float64
|
|
TotalInputCost float64
|
|
TotalOutputCost float64
|
|
TotalCacheHitCost float64
|
|
TotalCacheMissCost float64
|
|
FirstUsedAt *int64
|
|
LastUsedAt *int64
|
|
}
|
|
|
|
var rawResults []rawStats
|
|
err := query.Select(`
|
|
region_id,
|
|
COUNT(*) as count,
|
|
COALESCE(SUM(input_tokens), 0) as total_input_tokens,
|
|
COALESCE(SUM(output_tokens), 0) as total_output_tokens,
|
|
COALESCE(SUM(cache_hit_tokens), 0) as total_cache_hit_tokens,
|
|
COALESCE(SUM(cache_miss_tokens), 0) as total_cache_miss_tokens,
|
|
COALESCE(SUM(input_size_bytes), 0) as total_input_size_bytes,
|
|
COALESCE(SUM(output_size_bytes), 0) as total_output_size_bytes,
|
|
COALESCE(SUM(duration_ms), 0) as total_duration_ms,
|
|
COALESCE(SUM(original_file_size), 0) as total_original_file_size,
|
|
COALESCE(SUM(compressed_content_size), 0) as total_compressed_size,
|
|
COALESCE(SUM(total_cost), 0) as total_cost,
|
|
COALESCE(SUM((cost_json::jsonb->>'cacheHitCost')::float8 + (cost_json::jsonb->>'cacheMissCost')::float8), 0) as total_input_cost,
|
|
COALESCE(SUM((cost_json::jsonb->>'outputCost')::float8), 0) as total_output_cost,
|
|
COALESCE(SUM((cost_json::jsonb->>'cacheHitCost')::float8), 0) as total_cache_hit_cost,
|
|
COALESCE(SUM((cost_json::jsonb->>'cacheMissCost')::float8), 0) as total_cache_miss_cost,
|
|
MIN(upload_time) as first_used_at,
|
|
MAX(upload_time) as last_used_at
|
|
`).Group("region_id").Scan(&rawResults).Error
|
|
|
|
if err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query statistics")
|
|
return
|
|
}
|
|
|
|
type analysisTypeCount struct {
|
|
RegionID *uint32
|
|
AnalysisType string
|
|
Count int64
|
|
}
|
|
var analysisTypeResults []analysisTypeCount
|
|
if err := query.Select("region_id, analysis_type, COUNT(*) as count").Group("region_id, analysis_type").Scan(&analysisTypeResults).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query analysis type statistics")
|
|
return
|
|
}
|
|
|
|
type sourceTypeCount struct {
|
|
RegionID *uint32
|
|
SourceType string
|
|
Count int64
|
|
}
|
|
var sourceTypeResults []sourceTypeCount
|
|
if err := query.Select("region_id, source_type, COUNT(*) as count").Group("region_id, source_type").Scan(&sourceTypeResults).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query source type statistics")
|
|
return
|
|
}
|
|
|
|
analysisTypeMap := make(map[uint32]map[string]int64)
|
|
for _, r := range analysisTypeResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
if analysisTypeMap[regionID] == nil {
|
|
analysisTypeMap[regionID] = make(map[string]int64)
|
|
}
|
|
analysisTypeMap[regionID][r.AnalysisType] = r.Count
|
|
}
|
|
|
|
sourceTypeMap := make(map[uint32]map[string]int64)
|
|
for _, r := range sourceTypeResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
if sourceTypeMap[regionID] == nil {
|
|
sourceTypeMap[regionID] = make(map[string]int64)
|
|
}
|
|
sourceTypeMap[regionID][r.SourceType] = r.Count
|
|
}
|
|
|
|
// 收集所有 regionId 查询幼儿园名称
|
|
regionIDs := make([]uint32, 0, len(rawResults))
|
|
for _, r := range rawResults {
|
|
if r.RegionID != nil && *r.RegionID > 0 {
|
|
regionIDs = append(regionIDs, *r.RegionID)
|
|
}
|
|
}
|
|
kindergartenMap := make(map[uint32]string)
|
|
if len(regionIDs) > 0 {
|
|
var kindergartens []models.Kindergarten
|
|
if err := sc.DB.Where("region_id IN ?", regionIDs).Find(&kindergartens).Error; err == nil {
|
|
for _, k := range kindergartens {
|
|
kindergartenMap[k.RegionID] = k.Name
|
|
}
|
|
}
|
|
}
|
|
|
|
overall := regionStatisticsItem{
|
|
AnalysisTypeCounts: make(map[string]int64),
|
|
SourceTypeCounts: make(map[string]int64),
|
|
}
|
|
regions := make(map[string]regionStatisticsItem, len(rawResults))
|
|
|
|
for _, r := range rawResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
avgDuration := float64(0)
|
|
if r.Count > 0 {
|
|
avgDuration = float64(r.TotalDurationMs) / float64(r.Count)
|
|
}
|
|
avgTotalCost := float64(0)
|
|
if r.Count > 0 {
|
|
avgTotalCost = r.TotalCost / float64(r.Count)
|
|
}
|
|
kgName := ""
|
|
if regionID > 0 {
|
|
kgName = kindergartenMap[regionID]
|
|
}
|
|
|
|
var firstUsedAt, lastUsedAt *time.Time
|
|
if r.FirstUsedAt != nil {
|
|
t := time.UnixMilli(*r.FirstUsedAt)
|
|
firstUsedAt = &t
|
|
}
|
|
if r.LastUsedAt != nil {
|
|
t := time.UnixMilli(*r.LastUsedAt)
|
|
lastUsedAt = &t
|
|
}
|
|
|
|
item := regionStatisticsItem{
|
|
RegionID: regionID,
|
|
KindergartenName: kgName,
|
|
Count: r.Count,
|
|
TotalInputTokens: r.TotalInputTokens,
|
|
TotalOutputTokens: r.TotalOutputTokens,
|
|
TotalCacheHitTokens: r.TotalCacheHitTokens,
|
|
TotalCacheMissTokens: r.TotalCacheMissTokens,
|
|
TotalInputSizeBytes: r.TotalInputSizeBytes,
|
|
TotalOutputSizeBytes: r.TotalOutputSizeBytes,
|
|
TotalDurationMs: r.TotalDurationMs,
|
|
AvgDurationMs: avgDuration,
|
|
AvgTotalCost: avgTotalCost,
|
|
TotalOriginalFileSize: r.TotalOriginalFileSize,
|
|
TotalCompressedSize: r.TotalCompressedSize,
|
|
TotalCost: r.TotalCost,
|
|
TotalInputCost: r.TotalInputCost,
|
|
TotalOutputCost: r.TotalOutputCost,
|
|
TotalCacheHitCost: r.TotalCacheHitCost,
|
|
TotalCacheMissCost: r.TotalCacheMissCost,
|
|
AnalysisTypeCounts: analysisTypeMap[regionID],
|
|
SourceTypeCounts: sourceTypeMap[regionID],
|
|
FirstUsedAt: firstUsedAt,
|
|
LastUsedAt: lastUsedAt,
|
|
}
|
|
|
|
regions[strconv.FormatUint(uint64(regionID), 10)] = item
|
|
|
|
overall.Count += r.Count
|
|
overall.TotalInputTokens += r.TotalInputTokens
|
|
overall.TotalOutputTokens += r.TotalOutputTokens
|
|
overall.TotalCacheHitTokens += r.TotalCacheHitTokens
|
|
overall.TotalCacheMissTokens += r.TotalCacheMissTokens
|
|
overall.TotalInputSizeBytes += r.TotalInputSizeBytes
|
|
overall.TotalOutputSizeBytes += r.TotalOutputSizeBytes
|
|
overall.TotalDurationMs += r.TotalDurationMs
|
|
overall.TotalOriginalFileSize += r.TotalOriginalFileSize
|
|
overall.TotalCompressedSize += r.TotalCompressedSize
|
|
overall.TotalCost += r.TotalCost
|
|
overall.TotalInputCost += r.TotalInputCost
|
|
overall.TotalOutputCost += r.TotalOutputCost
|
|
overall.TotalCacheHitCost += r.TotalCacheHitCost
|
|
overall.TotalCacheMissCost += r.TotalCacheMissCost
|
|
|
|
if firstUsedAt != nil {
|
|
if overall.FirstUsedAt == nil || firstUsedAt.Before(*overall.FirstUsedAt) {
|
|
overall.FirstUsedAt = firstUsedAt
|
|
}
|
|
}
|
|
if lastUsedAt != nil {
|
|
if overall.LastUsedAt == nil || lastUsedAt.After(*overall.LastUsedAt) {
|
|
overall.LastUsedAt = lastUsedAt
|
|
}
|
|
}
|
|
}
|
|
|
|
for _, r := range analysisTypeResults {
|
|
overall.AnalysisTypeCounts[r.AnalysisType] += r.Count
|
|
}
|
|
for _, r := range sourceTypeResults {
|
|
overall.SourceTypeCounts[r.SourceType] += r.Count
|
|
}
|
|
|
|
if overall.Count > 0 {
|
|
overall.AvgDurationMs = float64(overall.TotalDurationMs) / float64(overall.Count)
|
|
overall.AvgTotalCost = overall.TotalCost / float64(overall.Count)
|
|
}
|
|
|
|
writeSuccess(c, http.StatusOK, "query success", gin.H{
|
|
"overall": overall,
|
|
"regions": regions,
|
|
})
|
|
}
|
|
|
|
type trainingSessionRegionStatisticsItem struct {
|
|
RegionID uint32 `json:"regionId"`
|
|
KindergartenName string `json:"kindergartenName"`
|
|
Count int64 `json:"count"`
|
|
StartedCount int64 `json:"startedCount"`
|
|
EndedCount int64 `json:"endedCount"`
|
|
CompletedCount int64 `json:"completedCount"`
|
|
InProgressCount int64 `json:"inProgressCount"`
|
|
TotalDurationMs int64 `json:"totalDurationMs"`
|
|
AvgDurationMs float64 `json:"avgDurationMs"`
|
|
EventTypeCounts map[string]int64 `json:"eventTypeCounts"`
|
|
AppNameCounts map[string]int64 `json:"appNameCounts"`
|
|
FlavorTypeCounts map[string]int64 `json:"flavorTypeCounts"`
|
|
FirstPublishedAt *time.Time `json:"firstPublishedAt"`
|
|
LastPublishedAt *time.Time `json:"lastPublishedAt"`
|
|
}
|
|
|
|
func (sc *StatisticsController) TrainingSessionStatisticsByRegion(c *gin.Context) {
|
|
regionIDStr := c.Query("regionId")
|
|
flavorType := strings.TrimSpace(c.Query("flavorType"))
|
|
startTimeStr := c.Query("startTime")
|
|
endTimeStr := c.Query("endTime")
|
|
|
|
query := sc.DB.Model(&models.MqttTrainingSessionRecord{})
|
|
if regionIDStr != "" {
|
|
if regionID, err := strconv.ParseUint(regionIDStr, 10, 32); err == nil {
|
|
query = query.Where("region_id = ?", uint32(regionID))
|
|
}
|
|
}
|
|
if flavorType != "" {
|
|
query = query.Where("flavor_type = ?", flavorType)
|
|
}
|
|
if startTimeStr != "" {
|
|
if startTime, err := strconv.ParseInt(startTimeStr, 10, 64); err == nil {
|
|
query = query.Where("published_at >= ?", startTime)
|
|
}
|
|
}
|
|
if endTimeStr != "" {
|
|
if endTime, err := strconv.ParseInt(endTimeStr, 10, 64); err == nil {
|
|
query = query.Where("published_at <= ?", endTime)
|
|
}
|
|
}
|
|
|
|
type rawTrainingStats struct {
|
|
RegionID *uint32
|
|
Count int64
|
|
StartedCount int64
|
|
EndedCount int64
|
|
CompletedCount int64
|
|
InProgressCount int64
|
|
TotalDurationMs int64
|
|
FirstPublishedAt *int64
|
|
LastPublishedAt *int64
|
|
}
|
|
|
|
var rawResults []rawTrainingStats
|
|
err := query.Select(`
|
|
region_id,
|
|
COUNT(*) as count,
|
|
COALESCE(SUM(CASE WHEN started_at IS NOT NULL THEN 1 ELSE 0 END), 0) as started_count,
|
|
COALESCE(SUM(CASE WHEN ended_at IS NOT NULL THEN 1 ELSE 0 END), 0) as ended_count,
|
|
COALESCE(SUM(CASE WHEN started_at IS NOT NULL AND ended_at IS NOT NULL THEN 1 ELSE 0 END), 0) as completed_count,
|
|
COALESCE(SUM(CASE WHEN started_at IS NOT NULL AND ended_at IS NULL THEN 1 ELSE 0 END), 0) as in_progress_count,
|
|
COALESCE(SUM(CASE WHEN started_at IS NOT NULL AND ended_at IS NOT NULL AND ended_at >= started_at THEN ended_at - started_at ELSE 0 END), 0) as total_duration_ms,
|
|
MIN(published_at) as first_published_at,
|
|
MAX(published_at) as last_published_at
|
|
`).Group("region_id").Scan(&rawResults).Error
|
|
if err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query training session statistics")
|
|
return
|
|
}
|
|
|
|
type trainingEventTypeCount struct {
|
|
RegionID *uint32
|
|
EventType string
|
|
Count int64
|
|
}
|
|
var eventTypeResults []trainingEventTypeCount
|
|
if err := query.Select("region_id, event_type, COUNT(*) as count").Group("region_id, event_type").Scan(&eventTypeResults).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query training session event type statistics")
|
|
return
|
|
}
|
|
|
|
type trainingAppNameCount struct {
|
|
RegionID *uint32
|
|
AppName string
|
|
Count int64
|
|
}
|
|
var appNameResults []trainingAppNameCount
|
|
if err := query.Select("region_id, app_name, COUNT(*) as count").Group("region_id, app_name").Scan(&appNameResults).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query training session app name statistics")
|
|
return
|
|
}
|
|
|
|
type trainingFlavorTypeCount struct {
|
|
RegionID *uint32
|
|
FlavorType string
|
|
Count int64
|
|
}
|
|
var flavorTypeResults []trainingFlavorTypeCount
|
|
if err := query.Select("region_id, flavor_type, COUNT(*) as count").Group("region_id, flavor_type").Scan(&flavorTypeResults).Error; err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query training session flavor type statistics")
|
|
return
|
|
}
|
|
|
|
eventTypeMap := make(map[uint32]map[string]int64)
|
|
for _, r := range eventTypeResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
if eventTypeMap[regionID] == nil {
|
|
eventTypeMap[regionID] = make(map[string]int64)
|
|
}
|
|
eventTypeMap[regionID][r.EventType] = r.Count
|
|
}
|
|
|
|
appNameMap := make(map[uint32]map[string]int64)
|
|
for _, r := range appNameResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
if appNameMap[regionID] == nil {
|
|
appNameMap[regionID] = make(map[string]int64)
|
|
}
|
|
appNameMap[regionID][r.AppName] = r.Count
|
|
}
|
|
|
|
flavorTypeMap := make(map[uint32]map[string]int64)
|
|
for _, r := range flavorTypeResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
if flavorTypeMap[regionID] == nil {
|
|
flavorTypeMap[regionID] = make(map[string]int64)
|
|
}
|
|
flavorTypeMap[regionID][r.FlavorType] = r.Count
|
|
}
|
|
|
|
regionIDs := make([]uint32, 0, len(rawResults))
|
|
for _, r := range rawResults {
|
|
if r.RegionID != nil && *r.RegionID > 0 {
|
|
regionIDs = append(regionIDs, *r.RegionID)
|
|
}
|
|
}
|
|
kindergartenMap := make(map[uint32]string)
|
|
if len(regionIDs) > 0 {
|
|
var kindergartens []models.Kindergarten
|
|
if err := sc.DB.Where("region_id IN ?", regionIDs).Find(&kindergartens).Error; err == nil {
|
|
for _, k := range kindergartens {
|
|
kindergartenMap[k.RegionID] = k.Name
|
|
}
|
|
}
|
|
}
|
|
|
|
overall := trainingSessionRegionStatisticsItem{
|
|
EventTypeCounts: make(map[string]int64),
|
|
AppNameCounts: make(map[string]int64),
|
|
FlavorTypeCounts: make(map[string]int64),
|
|
}
|
|
regions := make(map[string]trainingSessionRegionStatisticsItem, len(rawResults))
|
|
|
|
for _, r := range rawResults {
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
avgDuration := float64(0)
|
|
if r.CompletedCount > 0 {
|
|
avgDuration = float64(r.TotalDurationMs) / float64(r.CompletedCount)
|
|
}
|
|
kgName := ""
|
|
if regionID > 0 {
|
|
kgName = kindergartenMap[regionID]
|
|
}
|
|
|
|
var firstPublishedAt, lastPublishedAt *time.Time
|
|
if r.FirstPublishedAt != nil {
|
|
t := time.UnixMilli(*r.FirstPublishedAt)
|
|
firstPublishedAt = &t
|
|
}
|
|
if r.LastPublishedAt != nil {
|
|
t := time.UnixMilli(*r.LastPublishedAt)
|
|
lastPublishedAt = &t
|
|
}
|
|
|
|
item := trainingSessionRegionStatisticsItem{
|
|
RegionID: regionID,
|
|
KindergartenName: kgName,
|
|
Count: r.Count,
|
|
StartedCount: r.StartedCount,
|
|
EndedCount: r.EndedCount,
|
|
CompletedCount: r.CompletedCount,
|
|
InProgressCount: r.InProgressCount,
|
|
TotalDurationMs: r.TotalDurationMs,
|
|
AvgDurationMs: avgDuration,
|
|
EventTypeCounts: eventTypeMap[regionID],
|
|
AppNameCounts: appNameMap[regionID],
|
|
FlavorTypeCounts: flavorTypeMap[regionID],
|
|
FirstPublishedAt: firstPublishedAt,
|
|
LastPublishedAt: lastPublishedAt,
|
|
}
|
|
|
|
regions[strconv.FormatUint(uint64(regionID), 10)] = item
|
|
|
|
overall.Count += r.Count
|
|
overall.StartedCount += r.StartedCount
|
|
overall.EndedCount += r.EndedCount
|
|
overall.CompletedCount += r.CompletedCount
|
|
overall.InProgressCount += r.InProgressCount
|
|
overall.TotalDurationMs += r.TotalDurationMs
|
|
|
|
if firstPublishedAt != nil {
|
|
if overall.FirstPublishedAt == nil || firstPublishedAt.Before(*overall.FirstPublishedAt) {
|
|
overall.FirstPublishedAt = firstPublishedAt
|
|
}
|
|
}
|
|
if lastPublishedAt != nil {
|
|
if overall.LastPublishedAt == nil || lastPublishedAt.After(*overall.LastPublishedAt) {
|
|
overall.LastPublishedAt = lastPublishedAt
|
|
}
|
|
}
|
|
}
|
|
|
|
for _, r := range eventTypeResults {
|
|
overall.EventTypeCounts[r.EventType] += r.Count
|
|
}
|
|
for _, r := range appNameResults {
|
|
overall.AppNameCounts[r.AppName] += r.Count
|
|
}
|
|
for _, r := range flavorTypeResults {
|
|
overall.FlavorTypeCounts[r.FlavorType] += r.Count
|
|
}
|
|
|
|
if overall.CompletedCount > 0 {
|
|
overall.AvgDurationMs = float64(overall.TotalDurationMs) / float64(overall.CompletedCount)
|
|
}
|
|
|
|
writeSuccess(c, http.StatusOK, "query success", gin.H{
|
|
"overall": overall,
|
|
"regions": regions,
|
|
})
|
|
}
|
|
|
|
func (sc *StatisticsController) TimelineStatistics(c *gin.Context) {
|
|
regionIDStr := c.Query("regionId")
|
|
startTimeStr := c.Query("startTime")
|
|
endTimeStr := c.Query("endTime")
|
|
|
|
query := sc.DB.Model(&models.AIAnalysisRecord{})
|
|
if regionIDStr != "" {
|
|
if regionID, err := strconv.ParseUint(regionIDStr, 10, 32); err == nil {
|
|
query = query.Where("region_id = ?", uint32(regionID))
|
|
}
|
|
}
|
|
if startTimeStr != "" {
|
|
if startTime, err := strconv.ParseInt(startTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time >= ?", startTime)
|
|
}
|
|
}
|
|
if endTimeStr != "" {
|
|
if endTime, err := strconv.ParseInt(endTimeStr, 10, 64); err == nil {
|
|
query = query.Where("upload_time <= ?", endTime)
|
|
}
|
|
}
|
|
|
|
type timelineItem struct {
|
|
Date string `json:"date"`
|
|
Count int64 `json:"count"`
|
|
InputTokens int64 `json:"inputTokens"`
|
|
OutputTokens int64 `json:"outputTokens"`
|
|
CacheHitTokens int64 `json:"cacheHitTokens"`
|
|
CacheMissTokens int64 `json:"cacheMissTokens"`
|
|
TotalCost float64 `json:"totalCost"`
|
|
}
|
|
|
|
type rawRegionTimeline struct {
|
|
RegionID *uint32
|
|
Date string
|
|
Count int64
|
|
InputTokens int64
|
|
OutputTokens int64
|
|
CacheHitTokens int64
|
|
CacheMissTokens int64
|
|
TotalCost float64
|
|
}
|
|
|
|
var rawResults []rawRegionTimeline
|
|
err := query.Select(`
|
|
region_id,
|
|
DATE(TO_TIMESTAMP(upload_time / 1000.0)) as date,
|
|
COUNT(*) as count,
|
|
COALESCE(SUM(input_tokens), 0) as input_tokens,
|
|
COALESCE(SUM(output_tokens), 0) as output_tokens,
|
|
COALESCE(SUM(cache_hit_tokens), 0) as cache_hit_tokens,
|
|
COALESCE(SUM(cache_miss_tokens), 0) as cache_miss_tokens,
|
|
COALESCE(SUM(total_cost), 0) as total_cost
|
|
`).Group("region_id, DATE(TO_TIMESTAMP(upload_time / 1000.0))").Order("region_id, date ASC").Scan(&rawResults).Error
|
|
|
|
if err != nil {
|
|
writeError(c, http.StatusInternalServerError, "failed to query timeline statistics")
|
|
return
|
|
}
|
|
|
|
overallMap := make(map[string]*timelineItem)
|
|
regionItemsMap := make(map[string][]timelineItem)
|
|
regionIDs := make([]uint32, 0)
|
|
regionIDSet := make(map[uint32]struct{})
|
|
|
|
for _, r := range rawResults {
|
|
if overallMap[r.Date] == nil {
|
|
overallMap[r.Date] = &timelineItem{Date: r.Date}
|
|
}
|
|
overallMap[r.Date].Count += r.Count
|
|
overallMap[r.Date].InputTokens += r.InputTokens
|
|
overallMap[r.Date].OutputTokens += r.OutputTokens
|
|
overallMap[r.Date].CacheHitTokens += r.CacheHitTokens
|
|
overallMap[r.Date].CacheMissTokens += r.CacheMissTokens
|
|
overallMap[r.Date].TotalCost += r.TotalCost
|
|
|
|
regionID := uint32(0)
|
|
if r.RegionID != nil {
|
|
regionID = *r.RegionID
|
|
}
|
|
regionIDStr := strconv.FormatUint(uint64(regionID), 10)
|
|
regionItemsMap[regionIDStr] = append(regionItemsMap[regionIDStr], timelineItem{
|
|
Date: r.Date,
|
|
Count: r.Count,
|
|
InputTokens: r.InputTokens,
|
|
OutputTokens: r.OutputTokens,
|
|
CacheHitTokens: r.CacheHitTokens,
|
|
CacheMissTokens: r.CacheMissTokens,
|
|
TotalCost: r.TotalCost,
|
|
})
|
|
if _, ok := regionIDSet[regionID]; !ok && regionID > 0 {
|
|
regionIDSet[regionID] = struct{}{}
|
|
regionIDs = append(regionIDs, regionID)
|
|
}
|
|
}
|
|
|
|
// 查询幼儿园名称
|
|
kindergartenMap := make(map[uint32]string)
|
|
if len(regionIDs) > 0 {
|
|
var kindergartens []models.Kindergarten
|
|
if err := sc.DB.Where("region_id IN ?", regionIDs).Find(&kindergartens).Error; err == nil {
|
|
for _, k := range kindergartens {
|
|
kindergartenMap[k.RegionID] = k.Name
|
|
}
|
|
}
|
|
}
|
|
|
|
type regionTimeline struct {
|
|
Name string `json:"name"`
|
|
Items []timelineItem `json:"items"`
|
|
}
|
|
regionsMap := make(map[string]regionTimeline)
|
|
for regionIDStr, items := range regionItemsMap {
|
|
name := ""
|
|
if regionID, err := strconv.ParseUint(regionIDStr, 10, 32); err == nil && regionID > 0 {
|
|
name = kindergartenMap[uint32(regionID)]
|
|
}
|
|
regionsMap[regionIDStr] = regionTimeline{
|
|
Name: name,
|
|
Items: items,
|
|
}
|
|
}
|
|
|
|
var overall []timelineItem
|
|
for _, item := range overallMap {
|
|
overall = append(overall, *item)
|
|
}
|
|
sort.Slice(overall, func(i, j int) bool {
|
|
return overall[i].Date < overall[j].Date
|
|
})
|
|
|
|
writeSuccess(c, http.StatusOK, "query success", gin.H{
|
|
"overall": overall,
|
|
"regions": regionsMap,
|
|
})
|
|
}
|