2025-12-18 13:50:39 +08:00
package repository
import (
"context"
2025-12-29 10:03:27 +08:00
"database/sql"
"errors"
"fmt"
"strings"
2025-12-25 20:52:47 +08:00
"time"
2025-12-29 10:03:27 +08:00
dbent "github.com/Wei-Shaw/sub2api/ent"
dbaccount "github.com/Wei-Shaw/sub2api/ent/account"
dbapikey "github.com/Wei-Shaw/sub2api/ent/apikey"
dbgroup "github.com/Wei-Shaw/sub2api/ent/group"
dbuser "github.com/Wei-Shaw/sub2api/ent/user"
dbusersub "github.com/Wei-Shaw/sub2api/ent/usersubscription"
2025-12-24 21:07:21 +08:00
"github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
"github.com/Wei-Shaw/sub2api/internal/pkg/timezone"
"github.com/Wei-Shaw/sub2api/internal/pkg/usagestats"
2025-12-29 10:03:27 +08:00
"github.com/Wei-Shaw/sub2api/internal/service"
"github.com/lib/pq"
2025-12-18 13:50:39 +08:00
)
2025-12-29 10:03:27 +08:00
const usageLogSelectColumns = "id, user_id, api_key_id, account_id, request_id, model, group_id, subscription_id, input_tokens, output_tokens, cache_creation_tokens, cache_read_tokens, cache_creation_5m_tokens, cache_creation_1h_tokens, input_cost, output_cost, cache_creation_cost, cache_read_cost, total_cost, actual_cost, rate_multiplier, billing_type, stream, duration_ms, first_token_ms, created_at"
2025-12-25 20:52:47 +08:00
type usageLogRepository struct {
2025-12-29 10:03:27 +08:00
client * dbent . Client
sql sqlExecutor
2025-12-18 13:50:39 +08:00
}
2025-12-29 10:03:27 +08:00
func NewUsageLogRepository ( client * dbent . Client , sqlDB * sql . DB ) service . UsageLogRepository {
return newUsageLogRepositoryWithSQL ( client , sqlDB )
}
func newUsageLogRepositoryWithSQL ( client * dbent . Client , sqlq sqlExecutor ) * usageLogRepository {
return & usageLogRepository { client : client , sql : sqlq }
2025-12-18 13:50:39 +08:00
}
2025-12-24 19:58:33 +08:00
// getPerformanceStats 获取 RPM 和 TPM( 近5分钟平均值, 可选按用户过滤)
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) getPerformanceStats ( ctx context . Context , userID int64 ) ( rpm , tpm int64 , err error ) {
2025-12-24 19:58:33 +08:00
fiveMinutesAgo := time . Now ( ) . Add ( - 5 * time . Minute )
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-24 10:24:02 +08:00
COUNT ( * ) as request_count ,
COALESCE ( SUM ( input_tokens + output_tokens ) , 0 ) as token_count
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE created_at >= $ 1 `
args := [ ] any { fiveMinutesAgo }
2025-12-24 10:24:02 +08:00
if userID > 0 {
2025-12-29 10:03:27 +08:00
query += " AND user_id = $2"
args = append ( args , userID )
2025-12-24 10:24:02 +08:00
}
2025-12-29 10:03:27 +08:00
var requestCount int64
var tokenCount int64
if err := r . sql . QueryRowContext ( ctx , query , args ... ) . Scan ( & requestCount , & tokenCount ) ; err != nil {
return 0 , 0 , err
}
return requestCount / 5 , tokenCount / 5 , nil
2025-12-24 10:24:02 +08:00
}
2025-12-26 15:40:24 +08:00
func ( r * usageLogRepository ) Create ( ctx context . Context , log * service . UsageLog ) error {
2025-12-29 10:03:27 +08:00
if log == nil {
return nil
2025-12-26 15:40:24 +08:00
}
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
createdAt := log . CreatedAt
if createdAt . IsZero ( ) {
createdAt = time . Now ( )
2025-12-18 13:50:39 +08:00
}
2025-12-29 10:03:27 +08:00
rateMultiplier := log . RateMultiplier
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
query := `
INSERT INTO usage_logs (
user_id ,
api_key_id ,
account_id ,
request_id ,
model ,
group_id ,
subscription_id ,
input_tokens ,
output_tokens ,
cache_creation_tokens ,
cache_read_tokens ,
cache_creation_5m_tokens ,
cache_creation_1h_tokens ,
input_cost ,
output_cost ,
cache_creation_cost ,
cache_read_cost ,
total_cost ,
actual_cost ,
rate_multiplier ,
billing_type ,
stream ,
duration_ms ,
first_token_ms ,
created_at
) VALUES (
$ 1 , $ 2 , $ 3 , $ 4 , $ 5 ,
$ 6 , $ 7 ,
$ 8 , $ 9 , $ 10 , $ 11 ,
$ 12 , $ 13 ,
$ 14 , $ 15 , $ 16 , $ 17 , $ 18 , $ 19 ,
$ 20 , $ 21 , $ 22 , $ 23 , $ 24 , $ 25
)
RETURNING id , created_at
`
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
groupID := nullInt64 ( log . GroupID )
subscriptionID := nullInt64 ( log . SubscriptionID )
duration := nullInt ( log . DurationMs )
firstToken := nullInt ( log . FirstTokenMs )
row := r . sql . QueryRowContext (
ctx ,
query ,
log . UserID ,
log . ApiKeyID ,
log . AccountID ,
log . RequestID ,
log . Model ,
groupID ,
subscriptionID ,
log . InputTokens ,
log . OutputTokens ,
log . CacheCreationTokens ,
log . CacheReadTokens ,
log . CacheCreation5mTokens ,
log . CacheCreation1hTokens ,
log . InputCost ,
log . OutputCost ,
log . CacheCreationCost ,
log . CacheReadCost ,
log . TotalCost ,
log . ActualCost ,
rateMultiplier ,
log . BillingType ,
log . Stream ,
duration ,
firstToken ,
createdAt ,
)
if err := row . Scan ( & log . ID , & log . CreatedAt ) ; err != nil {
return err
}
log . RateMultiplier = rateMultiplier
return nil
2025-12-18 13:50:39 +08:00
}
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) GetByID ( ctx context . Context , id int64 ) ( * service . UsageLog , error ) {
query := "SELECT " + usageLogSelectColumns + " FROM usage_logs WHERE id = $1"
log , err := scanUsageLog ( r . sql . QueryRowContext ( ctx , query , id ) )
if err != nil {
if errors . Is ( err , sql . ErrNoRows ) {
return nil , service . ErrUsageLogNotFound
}
return nil , err
2025-12-18 13:50:39 +08:00
}
2025-12-29 10:03:27 +08:00
return log , nil
}
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) ListByUser ( ctx context . Context , userID int64 , params pagination . PaginationParams ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
return r . listUsageLogsWithPagination ( ctx , "WHERE user_id = $1" , [ ] any { userID } , params )
}
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) ListByApiKey ( ctx context . Context , apiKeyID int64 , params pagination . PaginationParams ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
return r . listUsageLogsWithPagination ( ctx , "WHERE api_key_id = $1" , [ ] any { apiKeyID } , params )
2025-12-18 13:50:39 +08:00
}
// UserStats 用户使用统计
type UserStats struct {
TotalRequests int64 ` json:"total_requests" `
TotalTokens int64 ` json:"total_tokens" `
TotalCost float64 ` json:"total_cost" `
InputTokens int64 ` json:"input_tokens" `
OutputTokens int64 ` json:"output_tokens" `
CacheReadTokens int64 ` json:"cache_read_tokens" `
}
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetUserStats ( ctx context . Context , userID int64 , startTime , endTime time . Time ) ( * UserStats , error ) {
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as total_requests ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as total_tokens ,
COALESCE ( SUM ( actual_cost ) , 0 ) as total_cost ,
COALESCE ( SUM ( input_tokens ) , 0 ) as input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as output_tokens ,
COALESCE ( SUM ( cache_read_tokens ) , 0 ) as cache_read_tokens
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE user_id = $ 1 AND created_at >= $ 2 AND created_at < $ 3
`
stats := & UserStats { }
if err := r . sql . QueryRowContext ( ctx , query , userID , startTime , endTime ) .
Scan ( & stats . TotalRequests , & stats . TotalTokens , & stats . TotalCost , & stats . InputTokens , & stats . OutputTokens , & stats . CacheReadTokens ) ; err != nil {
return nil , err
}
return stats , nil
2025-12-18 13:50:39 +08:00
}
// DashboardStats 仪表盘统计
2025-12-24 08:41:31 +08:00
type DashboardStats = usagestats . DashboardStats
2025-12-18 13:50:39 +08:00
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetDashboardStats ( ctx context . Context ) ( * DashboardStats , error ) {
2025-12-18 13:50:39 +08:00
var stats DashboardStats
today := timezone . Today ( )
2025-12-27 16:03:37 +08:00
now := time . Now ( )
2025-12-18 13:50:39 +08:00
2025-12-27 16:03:37 +08:00
// 合并用户统计查询
2025-12-29 10:03:27 +08:00
userStatsQuery := `
2025-12-27 16:03:37 +08:00
SELECT
COUNT ( * ) as total_users ,
2025-12-29 10:03:27 +08:00
COUNT ( CASE WHEN created_at >= $ 1 THEN 1 END ) as today_new_users ,
( SELECT COUNT ( DISTINCT user_id ) FROM usage_logs WHERE created_at >= $ 2 ) as active_users
2025-12-27 16:03:37 +08:00
FROM users
2025-12-29 10:03:27 +08:00
WHERE deleted_at IS NULL
`
if err := r . sql . QueryRowContext ( ctx , userStatsQuery , today , today ) .
Scan ( & stats . TotalUsers , & stats . TodayNewUsers , & stats . ActiveUsers ) ; err != nil {
2025-12-27 16:03:37 +08:00
return nil , err
}
// 合并API Key统计查询
2025-12-29 10:03:27 +08:00
apiKeyStatsQuery := `
2025-12-27 16:03:37 +08:00
SELECT
COUNT ( * ) as total_api_keys ,
2025-12-29 10:03:27 +08:00
COUNT ( CASE WHEN status = $ 1 THEN 1 END ) as active_api_keys
2025-12-27 16:03:37 +08:00
FROM api_keys
2025-12-29 10:03:27 +08:00
WHERE deleted_at IS NULL
`
if err := r . sql . QueryRowContext ( ctx , apiKeyStatsQuery , service . StatusActive ) .
Scan ( & stats . TotalApiKeys , & stats . ActiveApiKeys ) ; err != nil {
2025-12-27 16:03:37 +08:00
return nil , err
}
// 合并账户统计查询
2025-12-29 10:03:27 +08:00
accountStatsQuery := `
2025-12-27 16:03:37 +08:00
SELECT
COUNT ( * ) as total_accounts ,
2025-12-29 10:03:27 +08:00
COUNT ( CASE WHEN status = $ 1 AND schedulable = true THEN 1 END ) as normal_accounts ,
COUNT ( CASE WHEN status = $ 2 THEN 1 END ) as error_accounts ,
COUNT ( CASE WHEN rate_limited_at IS NOT NULL AND rate_limit_reset_at > $ 3 THEN 1 END ) as ratelimit_accounts ,
COUNT ( CASE WHEN overload_until IS NOT NULL AND overload_until > $ 4 THEN 1 END ) as overload_accounts
2025-12-27 16:03:37 +08:00
FROM accounts
2025-12-29 10:03:27 +08:00
WHERE deleted_at IS NULL
`
if err := r . sql . QueryRowContext ( ctx , accountStatsQuery , service . StatusActive , service . StatusError , now , now ) .
Scan ( & stats . TotalAccounts , & stats . NormalAccounts , & stats . ErrorAccounts , & stats . RateLimitAccounts , & stats . OverloadAccounts ) ; err != nil {
2025-12-27 16:03:37 +08:00
return nil , err
}
2025-12-18 13:50:39 +08:00
// 累计 Token 统计
2025-12-29 10:03:27 +08:00
totalStatsQuery := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as total_requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as total_input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as total_output_tokens ,
COALESCE ( SUM ( cache_creation_tokens ) , 0 ) as total_cache_creation_tokens ,
COALESCE ( SUM ( cache_read_tokens ) , 0 ) as total_cache_read_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as total_cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as total_actual_cost ,
COALESCE ( AVG ( duration_ms ) , 0 ) as avg_duration_ms
2025-12-29 10:03:27 +08:00
FROM usage_logs
`
if err := r . sql . QueryRowContext ( ctx , totalStatsQuery ) .
Scan (
& stats . TotalRequests ,
& stats . TotalInputTokens ,
& stats . TotalOutputTokens ,
& stats . TotalCacheCreationTokens ,
& stats . TotalCacheReadTokens ,
& stats . TotalCost ,
& stats . TotalActualCost ,
& stats . AverageDurationMs ,
) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
stats . TotalTokens = stats . TotalInputTokens + stats . TotalOutputTokens + stats . TotalCacheCreationTokens + stats . TotalCacheReadTokens
// 今日 Token 统计
2025-12-29 10:03:27 +08:00
todayStatsQuery := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as today_requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as today_input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as today_output_tokens ,
COALESCE ( SUM ( cache_creation_tokens ) , 0 ) as today_cache_creation_tokens ,
COALESCE ( SUM ( cache_read_tokens ) , 0 ) as today_cache_read_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as today_cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as today_actual_cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE created_at >= $ 1
`
if err := r . sql . QueryRowContext ( ctx , todayStatsQuery , today ) .
Scan (
& stats . TodayRequests ,
& stats . TodayInputTokens ,
& stats . TodayOutputTokens ,
& stats . TodayCacheCreationTokens ,
& stats . TodayCacheReadTokens ,
& stats . TodayCost ,
& stats . TodayActualCost ,
) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
stats . TodayTokens = stats . TodayInputTokens + stats . TodayOutputTokens + stats . TodayCacheCreationTokens + stats . TodayCacheReadTokens
2025-12-24 10:24:02 +08:00
// 性能指标: RPM 和 TPM( 最近1分钟, 全局)
2025-12-29 10:03:27 +08:00
rpm , tpm , err := r . getPerformanceStats ( ctx , 0 )
if err != nil {
return nil , err
}
stats . Rpm = rpm
stats . Tpm = tpm
2025-12-24 10:24:02 +08:00
2025-12-18 13:50:39 +08:00
return & stats , nil
}
2025-12-26 15:40:24 +08:00
func ( r * usageLogRepository ) ListByAccount ( ctx context . Context , accountID int64 , params pagination . PaginationParams ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
2025-12-29 10:03:27 +08:00
return r . listUsageLogsWithPagination ( ctx , "WHERE account_id = $1" , [ ] any { accountID } , params )
2025-12-18 13:50:39 +08:00
}
2025-12-26 15:40:24 +08:00
func ( r * usageLogRepository ) ListByUserAndTimeRange ( ctx context . Context , userID int64 , startTime , endTime time . Time ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
2025-12-29 10:03:27 +08:00
query := "SELECT " + usageLogSelectColumns + " FROM usage_logs WHERE user_id = $1 AND created_at >= $2 AND created_at < $3 ORDER BY id DESC"
logs , err := r . queryUsageLogs ( ctx , query , userID , startTime , endTime )
return logs , nil , err
2025-12-18 13:50:39 +08:00
}
2025-12-27 16:03:37 +08:00
// GetUserStatsAggregated returns aggregated usage statistics for a user using database-level aggregation
func ( r * usageLogRepository ) GetUserStatsAggregated ( ctx context . Context , userID int64 , startTime , endTime time . Time ) ( * usagestats . UsageStats , error ) {
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-27 16:03:37 +08:00
COUNT ( * ) as total_requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as total_input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as total_output_tokens ,
COALESCE ( SUM ( cache_creation_tokens + cache_read_tokens ) , 0 ) as total_cache_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as total_cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as total_actual_cost ,
COALESCE ( AVG ( COALESCE ( duration_ms , 0 ) ) , 0 ) as avg_duration_ms
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE user_id = $ 1 AND created_at >= $ 2 AND created_at < $ 3
`
2025-12-27 16:03:37 +08:00
2025-12-29 10:03:27 +08:00
var stats usagestats . UsageStats
if err := r . sql . QueryRowContext ( ctx , query , userID , startTime , endTime ) .
Scan (
& stats . TotalRequests ,
& stats . TotalInputTokens ,
& stats . TotalOutputTokens ,
& stats . TotalCacheTokens ,
& stats . TotalCost ,
& stats . TotalActualCost ,
& stats . AverageDurationMs ,
) ; err != nil {
2025-12-27 16:03:37 +08:00
return nil , err
}
2025-12-29 10:03:27 +08:00
stats . TotalTokens = stats . TotalInputTokens + stats . TotalOutputTokens + stats . TotalCacheTokens
return & stats , nil
2025-12-27 16:03:37 +08:00
}
// GetApiKeyStatsAggregated returns aggregated usage statistics for an API key using database-level aggregation
func ( r * usageLogRepository ) GetApiKeyStatsAggregated ( ctx context . Context , apiKeyID int64 , startTime , endTime time . Time ) ( * usagestats . UsageStats , error ) {
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-27 16:03:37 +08:00
COUNT ( * ) as total_requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as total_input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as total_output_tokens ,
COALESCE ( SUM ( cache_creation_tokens + cache_read_tokens ) , 0 ) as total_cache_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as total_cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as total_actual_cost ,
COALESCE ( AVG ( COALESCE ( duration_ms , 0 ) ) , 0 ) as avg_duration_ms
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE api_key_id = $ 1 AND created_at >= $ 2 AND created_at < $ 3
`
2025-12-27 16:03:37 +08:00
2025-12-29 10:03:27 +08:00
var stats usagestats . UsageStats
if err := r . sql . QueryRowContext ( ctx , query , apiKeyID , startTime , endTime ) .
Scan (
& stats . TotalRequests ,
& stats . TotalInputTokens ,
& stats . TotalOutputTokens ,
& stats . TotalCacheTokens ,
& stats . TotalCost ,
& stats . TotalActualCost ,
& stats . AverageDurationMs ,
) ; err != nil {
2025-12-27 16:03:37 +08:00
return nil , err
}
2025-12-29 10:03:27 +08:00
stats . TotalTokens = stats . TotalInputTokens + stats . TotalOutputTokens + stats . TotalCacheTokens
return & stats , nil
2025-12-27 16:03:37 +08:00
}
2025-12-26 15:40:24 +08:00
func ( r * usageLogRepository ) ListByApiKeyAndTimeRange ( ctx context . Context , apiKeyID int64 , startTime , endTime time . Time ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
2025-12-29 10:03:27 +08:00
query := "SELECT " + usageLogSelectColumns + " FROM usage_logs WHERE api_key_id = $1 AND created_at >= $2 AND created_at < $3 ORDER BY id DESC"
logs , err := r . queryUsageLogs ( ctx , query , apiKeyID , startTime , endTime )
return logs , nil , err
2025-12-18 13:50:39 +08:00
}
2025-12-26 15:40:24 +08:00
func ( r * usageLogRepository ) ListByAccountAndTimeRange ( ctx context . Context , accountID int64 , startTime , endTime time . Time ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
2025-12-29 10:03:27 +08:00
query := "SELECT " + usageLogSelectColumns + " FROM usage_logs WHERE account_id = $1 AND created_at >= $2 AND created_at < $3 ORDER BY id DESC"
logs , err := r . queryUsageLogs ( ctx , query , accountID , startTime , endTime )
return logs , nil , err
2025-12-18 13:50:39 +08:00
}
2025-12-26 15:40:24 +08:00
func ( r * usageLogRepository ) ListByModelAndTimeRange ( ctx context . Context , modelName string , startTime , endTime time . Time ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
2025-12-29 10:03:27 +08:00
query := "SELECT " + usageLogSelectColumns + " FROM usage_logs WHERE model = $1 AND created_at >= $2 AND created_at < $3 ORDER BY id DESC"
logs , err := r . queryUsageLogs ( ctx , query , modelName , startTime , endTime )
return logs , nil , err
2025-12-18 13:50:39 +08:00
}
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) Delete ( ctx context . Context , id int64 ) error {
2025-12-29 10:03:27 +08:00
_ , err := r . sql . ExecContext ( ctx , "DELETE FROM usage_logs WHERE id = $1" , id )
return err
2025-12-18 13:50:39 +08:00
}
// GetAccountTodayStats 获取账号今日统计
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetAccountTodayStats ( ctx context . Context , accountID int64 ) ( * usagestats . AccountStats , error ) {
2025-12-18 13:50:39 +08:00
today := timezone . Today ( )
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as requests ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as tokens ,
COALESCE ( SUM ( actual_cost ) , 0 ) as cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE account_id = $ 1 AND created_at >= $ 2
`
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
stats := & usagestats . AccountStats { }
if err := r . sql . QueryRowContext ( ctx , query , accountID , today ) .
Scan ( & stats . Requests , & stats . Tokens , & stats . Cost ) ; err != nil {
2025-12-18 13:50:39 +08:00
return nil , err
}
2025-12-29 10:03:27 +08:00
return stats , nil
2025-12-18 13:50:39 +08:00
}
// GetAccountWindowStats 获取账号时间窗口内的统计
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetAccountWindowStats ( ctx context . Context , accountID int64 , startTime time . Time ) ( * usagestats . AccountStats , error ) {
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as requests ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as tokens ,
COALESCE ( SUM ( actual_cost ) , 0 ) as cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE account_id = $ 1 AND created_at >= $ 2
`
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
stats := & usagestats . AccountStats { }
if err := r . sql . QueryRowContext ( ctx , query , accountID , startTime ) .
Scan ( & stats . Requests , & stats . Tokens , & stats . Cost ) ; err != nil {
2025-12-18 13:50:39 +08:00
return nil , err
}
2025-12-29 10:03:27 +08:00
return stats , nil
2025-12-18 13:50:39 +08:00
}
// TrendDataPoint represents a single point in trend data
2025-12-24 08:41:31 +08:00
type TrendDataPoint = usagestats . TrendDataPoint
2025-12-18 13:50:39 +08:00
// ModelStat represents usage statistics for a single model
2025-12-24 08:41:31 +08:00
type ModelStat = usagestats . ModelStat
2025-12-18 13:50:39 +08:00
// UserUsageTrendPoint represents user usage trend data point
2025-12-24 08:41:31 +08:00
type UserUsageTrendPoint = usagestats . UserUsageTrendPoint
2025-12-18 13:50:39 +08:00
// ApiKeyUsageTrendPoint represents API key usage trend data point
2025-12-24 08:41:31 +08:00
type ApiKeyUsageTrendPoint = usagestats . ApiKeyUsageTrendPoint
2025-12-18 13:50:39 +08:00
// GetApiKeyUsageTrend returns usage trend data grouped by API key and date
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetApiKeyUsageTrend ( ctx context . Context , startTime , endTime time . Time , granularity string , limit int ) ( [ ] ApiKeyUsageTrendPoint , error ) {
2025-12-29 10:03:27 +08:00
dateFormat := "YYYY-MM-DD"
2025-12-18 13:50:39 +08:00
if granularity == "hour" {
dateFormat = "YYYY-MM-DD HH24:00"
}
2025-12-29 10:03:27 +08:00
query := fmt . Sprintf ( `
2025-12-18 13:50:39 +08:00
WITH top_keys AS (
SELECT api_key_id
FROM usage_logs
2025-12-29 10:03:27 +08:00
WHERE created_at >= $ 1 AND created_at < $ 2
2025-12-18 13:50:39 +08:00
GROUP BY api_key_id
ORDER BY SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) DESC
2025-12-29 10:03:27 +08:00
LIMIT $ 3
2025-12-18 13:50:39 +08:00
)
SELECT
2025-12-29 10:03:27 +08:00
TO_CHAR ( u . created_at , ' % s ' ) as date ,
2025-12-18 13:50:39 +08:00
u . api_key_id ,
COALESCE ( k . name , ' ' ) as key_name ,
COUNT ( * ) as requests ,
COALESCE ( SUM ( u . input_tokens + u . output_tokens + u . cache_creation_tokens + u . cache_read_tokens ) , 0 ) as tokens
FROM usage_logs u
LEFT JOIN api_keys k ON u . api_key_id = k . id
WHERE u . api_key_id IN ( SELECT api_key_id FROM top_keys )
2025-12-29 10:03:27 +08:00
AND u . created_at >= $ 4 AND u . created_at < $ 5
2025-12-18 13:50:39 +08:00
GROUP BY date , u . api_key_id , k . name
ORDER BY date ASC , tokens DESC
2025-12-29 10:03:27 +08:00
` , dateFormat )
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
rows , err := r . sql . QueryContext ( ctx , query , startTime , endTime , limit , startTime , endTime )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
defer rows . Close ( )
results := make ( [ ] ApiKeyUsageTrendPoint , 0 )
for rows . Next ( ) {
var row ApiKeyUsageTrendPoint
if err := rows . Scan ( & row . Date , & row . ApiKeyID , & row . KeyName , & row . Requests , & row . Tokens ) ; err != nil {
return nil , err
}
results = append ( results , row )
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
return results , nil
}
// GetUserUsageTrend returns usage trend data grouped by user and date
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetUserUsageTrend ( ctx context . Context , startTime , endTime time . Time , granularity string , limit int ) ( [ ] UserUsageTrendPoint , error ) {
2025-12-29 10:03:27 +08:00
dateFormat := "YYYY-MM-DD"
2025-12-18 13:50:39 +08:00
if granularity == "hour" {
dateFormat = "YYYY-MM-DD HH24:00"
}
2025-12-29 10:03:27 +08:00
query := fmt . Sprintf ( `
2025-12-18 13:50:39 +08:00
WITH top_users AS (
SELECT user_id
FROM usage_logs
2025-12-29 10:03:27 +08:00
WHERE created_at >= $ 1 AND created_at < $ 2
2025-12-18 13:50:39 +08:00
GROUP BY user_id
ORDER BY SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) DESC
2025-12-29 10:03:27 +08:00
LIMIT $ 3
2025-12-18 13:50:39 +08:00
)
SELECT
2025-12-29 10:03:27 +08:00
TO_CHAR ( u . created_at , ' % s ' ) as date ,
2025-12-18 13:50:39 +08:00
u . user_id ,
COALESCE ( us . email , ' ' ) as email ,
COUNT ( * ) as requests ,
COALESCE ( SUM ( u . input_tokens + u . output_tokens + u . cache_creation_tokens + u . cache_read_tokens ) , 0 ) as tokens ,
COALESCE ( SUM ( u . total_cost ) , 0 ) as cost ,
COALESCE ( SUM ( u . actual_cost ) , 0 ) as actual_cost
FROM usage_logs u
LEFT JOIN users us ON u . user_id = us . id
WHERE u . user_id IN ( SELECT user_id FROM top_users )
2025-12-29 10:03:27 +08:00
AND u . created_at >= $ 4 AND u . created_at < $ 5
2025-12-18 13:50:39 +08:00
GROUP BY date , u . user_id , us . email
ORDER BY date ASC , tokens DESC
2025-12-29 10:03:27 +08:00
` , dateFormat )
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
rows , err := r . sql . QueryContext ( ctx , query , startTime , endTime , limit , startTime , endTime )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
defer rows . Close ( )
results := make ( [ ] UserUsageTrendPoint , 0 )
for rows . Next ( ) {
var row UserUsageTrendPoint
if err := rows . Scan ( & row . Date , & row . UserID , & row . Email , & row . Requests , & row . Tokens , & row . Cost , & row . ActualCost ) ; err != nil {
return nil , err
}
results = append ( results , row )
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
return results , nil
}
// UserDashboardStats 用户仪表盘统计
2025-12-24 08:41:31 +08:00
type UserDashboardStats = usagestats . UserDashboardStats
2025-12-18 13:50:39 +08:00
// GetUserDashboardStats 获取用户专属的仪表盘统计
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetUserDashboardStats ( ctx context . Context , userID int64 ) ( * UserDashboardStats , error ) {
2025-12-29 10:03:27 +08:00
stats := & UserDashboardStats { }
2025-12-18 13:50:39 +08:00
today := timezone . Today ( )
// API Key 统计
2025-12-29 10:03:27 +08:00
if err := r . sql . QueryRowContext ( ctx , "SELECT COUNT(*) FROM api_keys WHERE user_id = $1 AND deleted_at IS NULL" , userID ) .
Scan ( & stats . TotalApiKeys ) ; err != nil {
return nil , err
}
if err := r . sql . QueryRowContext ( ctx , "SELECT COUNT(*) FROM api_keys WHERE user_id = $1 AND status = $2 AND deleted_at IS NULL" , userID , service . StatusActive ) .
Scan ( & stats . ActiveApiKeys ) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
// 累计 Token 统计
2025-12-29 10:03:27 +08:00
totalStatsQuery := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as total_requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as total_input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as total_output_tokens ,
COALESCE ( SUM ( cache_creation_tokens ) , 0 ) as total_cache_creation_tokens ,
COALESCE ( SUM ( cache_read_tokens ) , 0 ) as total_cache_read_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as total_cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as total_actual_cost ,
COALESCE ( AVG ( duration_ms ) , 0 ) as avg_duration_ms
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE user_id = $ 1
`
if err := r . sql . QueryRowContext ( ctx , totalStatsQuery , userID ) .
Scan (
& stats . TotalRequests ,
& stats . TotalInputTokens ,
& stats . TotalOutputTokens ,
& stats . TotalCacheCreationTokens ,
& stats . TotalCacheReadTokens ,
& stats . TotalCost ,
& stats . TotalActualCost ,
& stats . AverageDurationMs ,
) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
stats . TotalTokens = stats . TotalInputTokens + stats . TotalOutputTokens + stats . TotalCacheCreationTokens + stats . TotalCacheReadTokens
// 今日 Token 统计
2025-12-29 10:03:27 +08:00
todayStatsQuery := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as today_requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as today_input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as today_output_tokens ,
COALESCE ( SUM ( cache_creation_tokens ) , 0 ) as today_cache_creation_tokens ,
COALESCE ( SUM ( cache_read_tokens ) , 0 ) as today_cache_read_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as today_cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as today_actual_cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE user_id = $ 1 AND created_at >= $ 2
`
if err := r . sql . QueryRowContext ( ctx , todayStatsQuery , userID , today ) .
Scan (
& stats . TodayRequests ,
& stats . TodayInputTokens ,
& stats . TodayOutputTokens ,
& stats . TodayCacheCreationTokens ,
& stats . TodayCacheReadTokens ,
& stats . TodayCost ,
& stats . TodayActualCost ,
) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
stats . TodayTokens = stats . TodayInputTokens + stats . TodayOutputTokens + stats . TodayCacheCreationTokens + stats . TodayCacheReadTokens
2025-12-24 10:24:02 +08:00
// 性能指标: RPM 和 TPM( 最近1分钟, 仅统计该用户的请求)
2025-12-29 10:03:27 +08:00
rpm , tpm , err := r . getPerformanceStats ( ctx , userID )
if err != nil {
return nil , err
}
stats . Rpm = rpm
stats . Tpm = tpm
2025-12-24 10:24:02 +08:00
2025-12-29 10:03:27 +08:00
return stats , nil
2025-12-18 13:50:39 +08:00
}
// GetUserUsageTrendByUserID 获取指定用户的使用趋势
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetUserUsageTrendByUserID ( ctx context . Context , userID int64 , startTime , endTime time . Time , granularity string ) ( [ ] TrendDataPoint , error ) {
2025-12-29 10:03:27 +08:00
dateFormat := "YYYY-MM-DD"
2025-12-18 13:50:39 +08:00
if granularity == "hour" {
dateFormat = "YYYY-MM-DD HH24:00"
}
2025-12-29 10:03:27 +08:00
query := fmt . Sprintf ( `
SELECT
TO_CHAR ( created_at , ' % s ' ) as date ,
2025-12-18 13:50:39 +08:00
COUNT ( * ) as requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as output_tokens ,
COALESCE ( SUM ( cache_creation_tokens + cache_read_tokens ) , 0 ) as cache_tokens ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as total_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as actual_cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE user_id = $ 1 AND created_at >= $ 2 AND created_at < $ 3
GROUP BY date
ORDER BY date ASC
` , dateFormat )
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
rows , err := r . sql . QueryContext ( ctx , query , userID , startTime , endTime )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
defer rows . Close ( )
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
return scanTrendRows ( rows )
2025-12-18 13:50:39 +08:00
}
// GetUserModelStats 获取指定用户的模型统计
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetUserModelStats ( ctx context . Context , userID int64 , startTime , endTime time . Time ) ( [ ] ModelStat , error ) {
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-18 13:50:39 +08:00
model ,
COUNT ( * ) as requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as output_tokens ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as total_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as actual_cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE user_id = $ 1 AND created_at >= $ 2 AND created_at < $ 3
GROUP BY model
ORDER BY total_tokens DESC
`
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
rows , err := r . sql . QueryContext ( ctx , query , userID , startTime , endTime )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
defer rows . Close ( )
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
return scanModelStatsRows ( rows )
2025-12-18 13:50:39 +08:00
}
// UsageLogFilters represents filters for usage log queries
2025-12-24 08:41:31 +08:00
type UsageLogFilters = usagestats . UsageLogFilters
2025-12-18 13:50:39 +08:00
// ListWithFilters lists usage logs with optional filters (for admin)
2025-12-26 15:40:24 +08:00
func ( r * usageLogRepository ) ListWithFilters ( ctx context . Context , params pagination . PaginationParams , filters UsageLogFilters ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
2025-12-29 10:03:27 +08:00
conditions := make ( [ ] string , 0 , 8 )
args := make ( [ ] any , 0 , 8 )
2025-12-18 13:50:39 +08:00
if filters . UserID > 0 {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "user_id = $%d" , len ( args ) + 1 ) )
args = append ( args , filters . UserID )
2025-12-18 13:50:39 +08:00
}
if filters . ApiKeyID > 0 {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "api_key_id = $%d" , len ( args ) + 1 ) )
args = append ( args , filters . ApiKeyID )
2025-12-18 13:50:39 +08:00
}
2025-12-27 10:50:25 +08:00
if filters . AccountID > 0 {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "account_id = $%d" , len ( args ) + 1 ) )
args = append ( args , filters . AccountID )
2025-12-27 10:50:25 +08:00
}
if filters . GroupID > 0 {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "group_id = $%d" , len ( args ) + 1 ) )
args = append ( args , filters . GroupID )
2025-12-27 10:50:25 +08:00
}
if filters . Model != "" {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "model = $%d" , len ( args ) + 1 ) )
args = append ( args , filters . Model )
2025-12-27 10:50:25 +08:00
}
if filters . Stream != nil {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "stream = $%d" , len ( args ) + 1 ) )
args = append ( args , * filters . Stream )
2025-12-27 10:50:25 +08:00
}
if filters . BillingType != nil {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "billing_type = $%d" , len ( args ) + 1 ) )
args = append ( args , int16 ( * filters . BillingType ) )
2025-12-27 10:50:25 +08:00
}
2025-12-18 13:50:39 +08:00
if filters . StartTime != nil {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "created_at >= $%d" , len ( args ) + 1 ) )
args = append ( args , * filters . StartTime )
2025-12-18 13:50:39 +08:00
}
if filters . EndTime != nil {
2025-12-29 10:03:27 +08:00
conditions = append ( conditions , fmt . Sprintf ( "created_at <= $%d" , len ( args ) + 1 ) )
args = append ( args , * filters . EndTime )
2025-12-18 13:50:39 +08:00
}
2025-12-29 10:03:27 +08:00
whereClause := buildWhere ( conditions )
logs , page , err := r . listUsageLogsWithPagination ( ctx , whereClause , args , params )
if err != nil {
2025-12-18 13:50:39 +08:00
return nil , nil , err
}
2025-12-29 10:03:27 +08:00
if err := r . hydrateUsageLogAssociations ( ctx , logs ) ; err != nil {
2025-12-18 13:50:39 +08:00
return nil , nil , err
}
2025-12-29 10:03:27 +08:00
return logs , page , nil
2025-12-18 13:50:39 +08:00
}
// UsageStats represents usage statistics
2025-12-24 08:41:31 +08:00
type UsageStats = usagestats . UsageStats
2025-12-18 13:50:39 +08:00
// BatchUserUsageStats represents usage stats for a single user
2025-12-24 08:41:31 +08:00
type BatchUserUsageStats = usagestats . BatchUserUsageStats
2025-12-18 13:50:39 +08:00
// GetBatchUserUsageStats gets today and total actual_cost for multiple users
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetBatchUserUsageStats ( ctx context . Context , userIDs [ ] int64 ) ( map [ int64 ] * BatchUserUsageStats , error ) {
2025-12-29 10:03:27 +08:00
result := make ( map [ int64 ] * BatchUserUsageStats )
2025-12-18 13:50:39 +08:00
if len ( userIDs ) == 0 {
2025-12-29 10:03:27 +08:00
return result , nil
2025-12-18 13:50:39 +08:00
}
for _ , id := range userIDs {
result [ id ] = & BatchUserUsageStats { UserID : id }
}
2025-12-29 10:03:27 +08:00
query := `
SELECT user_id , COALESCE ( SUM ( actual_cost ) , 0 ) as total_cost
FROM usage_logs
WHERE user_id = ANY ( $ 1 )
GROUP BY user_id
`
rows , err := r . sql . QueryContext ( ctx , query , pq . Array ( userIDs ) )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
for rows . Next ( ) {
var userID int64
var total float64
if err := rows . Scan ( & userID , & total ) ; err != nil {
_ = rows . Close ( )
return nil , err
}
if stats , ok := result [ userID ] ; ok {
stats . TotalActualCost = total
2025-12-18 13:50:39 +08:00
}
}
2025-12-29 10:03:27 +08:00
if err := rows . Close ( ) ; err != nil {
return nil , err
2025-12-18 13:50:39 +08:00
}
2025-12-29 10:03:27 +08:00
today := timezone . Today ( )
todayQuery := `
SELECT user_id , COALESCE ( SUM ( actual_cost ) , 0 ) as today_cost
FROM usage_logs
WHERE user_id = ANY ( $ 1 ) AND created_at >= $ 2
GROUP BY user_id
`
rows , err = r . sql . QueryContext ( ctx , todayQuery , pq . Array ( userIDs ) , today )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
for rows . Next ( ) {
var userID int64
var total float64
if err := rows . Scan ( & userID , & total ) ; err != nil {
_ = rows . Close ( )
return nil , err
}
if stats , ok := result [ userID ] ; ok {
stats . TodayActualCost = total
2025-12-18 13:50:39 +08:00
}
}
2025-12-29 10:03:27 +08:00
if err := rows . Close ( ) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
return result , nil
}
// BatchApiKeyUsageStats represents usage stats for a single API key
2025-12-24 08:41:31 +08:00
type BatchApiKeyUsageStats = usagestats . BatchApiKeyUsageStats
2025-12-18 13:50:39 +08:00
// GetBatchApiKeyUsageStats gets today and total actual_cost for multiple API keys
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetBatchApiKeyUsageStats ( ctx context . Context , apiKeyIDs [ ] int64 ) ( map [ int64 ] * BatchApiKeyUsageStats , error ) {
2025-12-29 10:03:27 +08:00
result := make ( map [ int64 ] * BatchApiKeyUsageStats )
2025-12-18 13:50:39 +08:00
if len ( apiKeyIDs ) == 0 {
2025-12-29 10:03:27 +08:00
return result , nil
2025-12-18 13:50:39 +08:00
}
for _ , id := range apiKeyIDs {
result [ id ] = & BatchApiKeyUsageStats { ApiKeyID : id }
}
2025-12-29 10:03:27 +08:00
query := `
SELECT api_key_id , COALESCE ( SUM ( actual_cost ) , 0 ) as total_cost
FROM usage_logs
WHERE api_key_id = ANY ( $ 1 )
GROUP BY api_key_id
`
rows , err := r . sql . QueryContext ( ctx , query , pq . Array ( apiKeyIDs ) )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
for rows . Next ( ) {
var apiKeyID int64
var total float64
if err := rows . Scan ( & apiKeyID , & total ) ; err != nil {
_ = rows . Close ( )
return nil , err
}
if stats , ok := result [ apiKeyID ] ; ok {
stats . TotalActualCost = total
2025-12-18 13:50:39 +08:00
}
}
2025-12-29 10:03:27 +08:00
if err := rows . Close ( ) ; err != nil {
return nil , err
2025-12-18 13:50:39 +08:00
}
2025-12-29 10:03:27 +08:00
today := timezone . Today ( )
todayQuery := `
SELECT api_key_id , COALESCE ( SUM ( actual_cost ) , 0 ) as today_cost
FROM usage_logs
WHERE api_key_id = ANY ( $ 1 ) AND created_at >= $ 2
GROUP BY api_key_id
`
rows , err = r . sql . QueryContext ( ctx , todayQuery , pq . Array ( apiKeyIDs ) , today )
2025-12-18 13:50:39 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
for rows . Next ( ) {
var apiKeyID int64
var total float64
if err := rows . Scan ( & apiKeyID , & total ) ; err != nil {
_ = rows . Close ( )
return nil , err
}
if stats , ok := result [ apiKeyID ] ; ok {
stats . TodayActualCost = total
2025-12-18 13:50:39 +08:00
}
}
2025-12-29 10:03:27 +08:00
if err := rows . Close ( ) ; err != nil {
return nil , err
}
2025-12-18 13:50:39 +08:00
return result , nil
}
2025-12-20 10:06:55 +08:00
// GetUsageTrendWithFilters returns usage trend data with optional user/api_key filters
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetUsageTrendWithFilters ( ctx context . Context , startTime , endTime time . Time , granularity string , userID , apiKeyID int64 ) ( [ ] TrendDataPoint , error ) {
2025-12-29 10:03:27 +08:00
dateFormat := "YYYY-MM-DD"
2025-12-20 10:06:55 +08:00
if granularity == "hour" {
dateFormat = "YYYY-MM-DD HH24:00"
}
2025-12-29 10:03:27 +08:00
query := fmt . Sprintf ( `
SELECT
TO_CHAR ( created_at , ' % s ' ) as date ,
2025-12-20 10:06:55 +08:00
COUNT ( * ) as requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as output_tokens ,
COALESCE ( SUM ( cache_creation_tokens + cache_read_tokens ) , 0 ) as cache_tokens ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as total_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as actual_cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE created_at >= $ 1 AND created_at < $ 2
` , dateFormat )
2025-12-20 10:06:55 +08:00
2025-12-29 10:03:27 +08:00
args := [ ] any { startTime , endTime }
2025-12-20 10:06:55 +08:00
if userID > 0 {
2025-12-29 10:03:27 +08:00
query += fmt . Sprintf ( " AND user_id = $%d" , len ( args ) + 1 )
args = append ( args , userID )
2025-12-20 10:06:55 +08:00
}
if apiKeyID > 0 {
2025-12-29 10:03:27 +08:00
query += fmt . Sprintf ( " AND api_key_id = $%d" , len ( args ) + 1 )
args = append ( args , apiKeyID )
2025-12-20 10:06:55 +08:00
}
2025-12-29 10:03:27 +08:00
query += " GROUP BY date ORDER BY date ASC"
2025-12-20 10:06:55 +08:00
2025-12-29 10:03:27 +08:00
rows , err := r . sql . QueryContext ( ctx , query , args ... )
2025-12-20 10:06:55 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
defer rows . Close ( )
2025-12-20 10:06:55 +08:00
2025-12-29 10:03:27 +08:00
return scanTrendRows ( rows )
2025-12-20 10:06:55 +08:00
}
// GetModelStatsWithFilters returns model statistics with optional user/api_key filters
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetModelStatsWithFilters ( ctx context . Context , startTime , endTime time . Time , userID , apiKeyID , accountID int64 ) ( [ ] ModelStat , error ) {
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-20 10:06:55 +08:00
model ,
COUNT ( * ) as requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as output_tokens ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as total_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as actual_cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE created_at >= $ 1 AND created_at < $ 2
`
2025-12-20 10:06:55 +08:00
2025-12-29 10:03:27 +08:00
args := [ ] any { startTime , endTime }
2025-12-20 10:06:55 +08:00
if userID > 0 {
2025-12-29 10:03:27 +08:00
query += fmt . Sprintf ( " AND user_id = $%d" , len ( args ) + 1 )
args = append ( args , userID )
2025-12-20 10:06:55 +08:00
}
if apiKeyID > 0 {
2025-12-29 10:03:27 +08:00
query += fmt . Sprintf ( " AND api_key_id = $%d" , len ( args ) + 1 )
args = append ( args , apiKeyID )
2025-12-20 10:06:55 +08:00
}
2025-12-23 13:42:33 +08:00
if accountID > 0 {
2025-12-29 10:03:27 +08:00
query += fmt . Sprintf ( " AND account_id = $%d" , len ( args ) + 1 )
args = append ( args , accountID )
2025-12-23 13:42:33 +08:00
}
2025-12-29 10:03:27 +08:00
query += " GROUP BY model ORDER BY total_tokens DESC"
2025-12-20 10:06:55 +08:00
2025-12-29 10:03:27 +08:00
rows , err := r . sql . QueryContext ( ctx , query , args ... )
2025-12-20 10:06:55 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
defer rows . Close ( )
2025-12-20 10:06:55 +08:00
2025-12-29 10:03:27 +08:00
return scanModelStatsRows ( rows )
2025-12-20 10:06:55 +08:00
}
2025-12-18 13:50:39 +08:00
// GetGlobalStats gets usage statistics for all users within a time range
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetGlobalStats ( ctx context . Context , startTime , endTime time . Time ) ( * UsageStats , error ) {
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-18 13:50:39 +08:00
COUNT ( * ) as total_requests ,
COALESCE ( SUM ( input_tokens ) , 0 ) as total_input_tokens ,
COALESCE ( SUM ( output_tokens ) , 0 ) as total_output_tokens ,
COALESCE ( SUM ( cache_creation_tokens + cache_read_tokens ) , 0 ) as total_cache_tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as total_cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as total_actual_cost ,
COALESCE ( AVG ( duration_ms ) , 0 ) as avg_duration_ms
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE created_at >= $ 1 AND created_at <= $ 2
`
2025-12-18 13:50:39 +08:00
2025-12-29 10:03:27 +08:00
stats := & UsageStats { }
if err := r . sql . QueryRowContext ( ctx , query , startTime , endTime ) .
Scan (
& stats . TotalRequests ,
& stats . TotalInputTokens ,
& stats . TotalOutputTokens ,
& stats . TotalCacheTokens ,
& stats . TotalCost ,
& stats . TotalActualCost ,
& stats . AverageDurationMs ,
) ; err != nil {
2025-12-18 13:50:39 +08:00
return nil , err
}
2025-12-29 10:03:27 +08:00
stats . TotalTokens = stats . TotalInputTokens + stats . TotalOutputTokens + stats . TotalCacheTokens
return stats , nil
2025-12-18 13:50:39 +08:00
}
2025-12-23 13:42:33 +08:00
// AccountUsageHistory represents daily usage history for an account
2025-12-24 08:41:31 +08:00
type AccountUsageHistory = usagestats . AccountUsageHistory
2025-12-23 13:42:33 +08:00
// AccountUsageSummary represents summary statistics for an account
2025-12-24 08:41:31 +08:00
type AccountUsageSummary = usagestats . AccountUsageSummary
2025-12-23 13:42:33 +08:00
// AccountUsageStatsResponse represents the full usage statistics response for an account
2025-12-24 08:41:31 +08:00
type AccountUsageStatsResponse = usagestats . AccountUsageStatsResponse
2025-12-23 13:42:33 +08:00
// GetAccountUsageStats returns comprehensive usage statistics for an account over a time range
2025-12-25 20:52:47 +08:00
func ( r * usageLogRepository ) GetAccountUsageStats ( ctx context . Context , accountID int64 , startTime , endTime time . Time ) ( * AccountUsageStatsResponse , error ) {
2025-12-23 13:42:33 +08:00
daysCount := int ( endTime . Sub ( startTime ) . Hours ( ) / 24 ) + 1
if daysCount <= 0 {
daysCount = 30
}
2025-12-29 10:03:27 +08:00
query := `
SELECT
2025-12-23 13:42:33 +08:00
TO_CHAR ( created_at , ' YYYY - MM - DD ' ) as date ,
COUNT ( * ) as requests ,
COALESCE ( SUM ( input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens ) , 0 ) as tokens ,
COALESCE ( SUM ( total_cost ) , 0 ) as cost ,
COALESCE ( SUM ( actual_cost ) , 0 ) as actual_cost
2025-12-29 10:03:27 +08:00
FROM usage_logs
WHERE account_id = $ 1 AND created_at >= $ 2 AND created_at < $ 3
GROUP BY date
ORDER BY date ASC
`
rows , err := r . sql . QueryContext ( ctx , query , accountID , startTime , endTime )
2025-12-23 13:42:33 +08:00
if err != nil {
return nil , err
}
2025-12-29 10:03:27 +08:00
defer rows . Close ( )
history := make ( [ ] AccountUsageHistory , 0 )
for rows . Next ( ) {
var date string
var requests int64
var tokens int64
var cost float64
var actualCost float64
if err := rows . Scan ( & date , & requests , & tokens , & cost , & actualCost ) ; err != nil {
return nil , err
}
t , _ := time . Parse ( "2006-01-02" , date )
2025-12-23 13:42:33 +08:00
history = append ( history , AccountUsageHistory {
2025-12-29 10:03:27 +08:00
Date : date ,
Label : t . Format ( "01/02" ) ,
Requests : requests ,
Tokens : tokens ,
Cost : cost ,
ActualCost : actualCost ,
2025-12-23 13:42:33 +08:00
} )
}
2025-12-29 10:03:27 +08:00
if err := rows . Err ( ) ; err != nil {
return nil , err
}
2025-12-23 13:42:33 +08:00
var totalActualCost , totalStandardCost float64
var totalRequests , totalTokens int64
var highestCostDay , highestRequestDay * AccountUsageHistory
for i := range history {
h := & history [ i ]
totalActualCost += h . ActualCost
totalStandardCost += h . Cost
totalRequests += h . Requests
totalTokens += h . Tokens
if highestCostDay == nil || h . ActualCost > highestCostDay . ActualCost {
highestCostDay = h
}
if highestRequestDay == nil || h . Requests > highestRequestDay . Requests {
highestRequestDay = h
}
}
actualDaysUsed := len ( history )
if actualDaysUsed == 0 {
actualDaysUsed = 1
}
2025-12-29 10:03:27 +08:00
avgQuery := "SELECT COALESCE(AVG(duration_ms), 0) as avg_duration_ms FROM usage_logs WHERE account_id = $1 AND created_at >= $2 AND created_at < $3"
var avgDuration float64
if err := r . sql . QueryRowContext ( ctx , avgQuery , accountID , startTime , endTime ) . Scan ( & avgDuration ) ; err != nil {
return nil , err
2025-12-23 13:42:33 +08:00
}
summary := AccountUsageSummary {
Days : daysCount ,
ActualDaysUsed : actualDaysUsed ,
TotalCost : totalActualCost ,
TotalStandardCost : totalStandardCost ,
TotalRequests : totalRequests ,
TotalTokens : totalTokens ,
AvgDailyCost : totalActualCost / float64 ( actualDaysUsed ) ,
AvgDailyRequests : float64 ( totalRequests ) / float64 ( actualDaysUsed ) ,
AvgDailyTokens : float64 ( totalTokens ) / float64 ( actualDaysUsed ) ,
2025-12-29 10:03:27 +08:00
AvgDurationMs : avgDuration ,
2025-12-23 13:42:33 +08:00
}
todayStr := timezone . Now ( ) . Format ( "2006-01-02" )
for i := range history {
if history [ i ] . Date == todayStr {
summary . Today = & struct {
Date string ` json:"date" `
Cost float64 ` json:"cost" `
Requests int64 ` json:"requests" `
Tokens int64 ` json:"tokens" `
} {
Date : history [ i ] . Date ,
Cost : history [ i ] . ActualCost ,
Requests : history [ i ] . Requests ,
Tokens : history [ i ] . Tokens ,
}
break
}
}
if highestCostDay != nil {
summary . HighestCostDay = & struct {
Date string ` json:"date" `
Label string ` json:"label" `
Cost float64 ` json:"cost" `
Requests int64 ` json:"requests" `
} {
Date : highestCostDay . Date ,
Label : highestCostDay . Label ,
Cost : highestCostDay . ActualCost ,
Requests : highestCostDay . Requests ,
}
}
if highestRequestDay != nil {
summary . HighestRequestDay = & struct {
Date string ` json:"date" `
Label string ` json:"label" `
Requests int64 ` json:"requests" `
Cost float64 ` json:"cost" `
} {
Date : highestRequestDay . Date ,
Label : highestRequestDay . Label ,
Requests : highestRequestDay . Requests ,
Cost : highestRequestDay . ActualCost ,
}
}
models , err := r . GetModelStatsWithFilters ( ctx , startTime , endTime , 0 , 0 , accountID )
if err != nil {
models = [ ] ModelStat { }
}
return & AccountUsageStatsResponse {
History : history ,
Summary : summary ,
Models : models ,
} , nil
}
2025-12-26 15:40:24 +08:00
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) listUsageLogsWithPagination ( ctx context . Context , whereClause string , args [ ] any , params pagination . PaginationParams ) ( [ ] service . UsageLog , * pagination . PaginationResult , error ) {
countQuery := "SELECT COUNT(*) FROM usage_logs " + whereClause
var total int64
if err := r . sql . QueryRowContext ( ctx , countQuery , args ... ) . Scan ( & total ) ; err != nil {
return nil , nil , err
}
limitPos := len ( args ) + 1
offsetPos := len ( args ) + 2
listArgs := append ( append ( [ ] any { } , args ... ) , params . Limit ( ) , params . Offset ( ) )
query := fmt . Sprintf ( "SELECT %s FROM usage_logs %s ORDER BY id DESC LIMIT $%d OFFSET $%d" , usageLogSelectColumns , whereClause , limitPos , offsetPos )
logs , err := r . queryUsageLogs ( ctx , query , listArgs ... )
if err != nil {
return nil , nil , err
}
return logs , paginationResultFromTotal ( total , params ) , nil
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) queryUsageLogs ( ctx context . Context , query string , args ... any ) ( [ ] service . UsageLog , error ) {
rows , err := r . sql . QueryContext ( ctx , query , args ... )
if err != nil {
return nil , err
}
defer rows . Close ( )
2025-12-26 15:40:24 +08:00
2025-12-29 10:03:27 +08:00
logs := make ( [ ] service . UsageLog , 0 )
for rows . Next ( ) {
log , err := scanUsageLog ( rows )
if err != nil {
return nil , err
}
logs = append ( logs , * log )
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
return logs , nil
}
func ( r * usageLogRepository ) hydrateUsageLogAssociations ( ctx context . Context , logs [ ] service . UsageLog ) error {
// 关联数据使用 Ent 批量加载,避免把复杂 SQL 继续膨胀。
if len ( logs ) == 0 {
2025-12-26 15:40:24 +08:00
return nil
}
2025-12-29 10:03:27 +08:00
ids := collectUsageLogIDs ( logs )
users , err := r . loadUsers ( ctx , ids . userIDs )
if err != nil {
return err
}
apiKeys , err := r . loadApiKeys ( ctx , ids . apiKeyIDs )
if err != nil {
return err
}
accounts , err := r . loadAccounts ( ctx , ids . accountIDs )
if err != nil {
return err
}
groups , err := r . loadGroups ( ctx , ids . groupIDs )
if err != nil {
return err
}
subs , err := r . loadSubscriptions ( ctx , ids . subscriptionIDs )
if err != nil {
return err
}
for i := range logs {
if user , ok := users [ logs [ i ] . UserID ] ; ok {
logs [ i ] . User = user
}
if key , ok := apiKeys [ logs [ i ] . ApiKeyID ] ; ok {
logs [ i ] . ApiKey = key
}
if acc , ok := accounts [ logs [ i ] . AccountID ] ; ok {
logs [ i ] . Account = acc
}
if logs [ i ] . GroupID != nil {
if group , ok := groups [ * logs [ i ] . GroupID ] ; ok {
logs [ i ] . Group = group
}
}
if logs [ i ] . SubscriptionID != nil {
if sub , ok := subs [ * logs [ i ] . SubscriptionID ] ; ok {
logs [ i ] . Subscription = sub
}
}
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
return nil
}
type usageLogIDs struct {
userIDs [ ] int64
apiKeyIDs [ ] int64
accountIDs [ ] int64
groupIDs [ ] int64
subscriptionIDs [ ] int64
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
func collectUsageLogIDs ( logs [ ] service . UsageLog ) usageLogIDs {
idSet := func ( ) map [ int64 ] struct { } { return make ( map [ int64 ] struct { } ) }
userIDs := idSet ( )
apiKeyIDs := idSet ( )
accountIDs := idSet ( )
groupIDs := idSet ( )
subscriptionIDs := idSet ( )
for i := range logs {
userIDs [ logs [ i ] . UserID ] = struct { } { }
apiKeyIDs [ logs [ i ] . ApiKeyID ] = struct { } { }
accountIDs [ logs [ i ] . AccountID ] = struct { } { }
if logs [ i ] . GroupID != nil {
groupIDs [ * logs [ i ] . GroupID ] = struct { } { }
}
if logs [ i ] . SubscriptionID != nil {
subscriptionIDs [ * logs [ i ] . SubscriptionID ] = struct { } { }
2025-12-26 15:40:24 +08:00
}
}
2025-12-29 10:03:27 +08:00
return usageLogIDs {
userIDs : setToSlice ( userIDs ) ,
apiKeyIDs : setToSlice ( apiKeyIDs ) ,
accountIDs : setToSlice ( accountIDs ) ,
groupIDs : setToSlice ( groupIDs ) ,
subscriptionIDs : setToSlice ( subscriptionIDs ) ,
}
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) loadUsers ( ctx context . Context , ids [ ] int64 ) ( map [ int64 ] * service . User , error ) {
out := make ( map [ int64 ] * service . User )
if len ( ids ) == 0 {
return out , nil
}
models , err := r . client . User . Query ( ) . Where ( dbuser . IDIn ( ids ... ) ) . All ( ctx )
if err != nil {
return nil , err
}
for _ , m := range models {
out [ m . ID ] = userEntityToService ( m )
}
return out , nil
}
func ( r * usageLogRepository ) loadApiKeys ( ctx context . Context , ids [ ] int64 ) ( map [ int64 ] * service . ApiKey , error ) {
out := make ( map [ int64 ] * service . ApiKey )
if len ( ids ) == 0 {
return out , nil
}
models , err := r . client . ApiKey . Query ( ) . Where ( dbapikey . IDIn ( ids ... ) ) . All ( ctx )
if err != nil {
return nil , err
}
for _ , m := range models {
out [ m . ID ] = apiKeyEntityToService ( m )
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
return out , nil
}
func ( r * usageLogRepository ) loadAccounts ( ctx context . Context , ids [ ] int64 ) ( map [ int64 ] * service . Account , error ) {
out := make ( map [ int64 ] * service . Account )
if len ( ids ) == 0 {
return out , nil
}
models , err := r . client . Account . Query ( ) . Where ( dbaccount . IDIn ( ids ... ) ) . All ( ctx )
if err != nil {
return nil , err
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
for _ , m := range models {
out [ m . ID ] = accountEntityToService ( m )
}
return out , nil
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
func ( r * usageLogRepository ) loadGroups ( ctx context . Context , ids [ ] int64 ) ( map [ int64 ] * service . Group , error ) {
out := make ( map [ int64 ] * service . Group )
if len ( ids ) == 0 {
return out , nil
}
models , err := r . client . Group . Query ( ) . Where ( dbgroup . IDIn ( ids ... ) ) . All ( ctx )
if err != nil {
return nil , err
}
for _ , m := range models {
out [ m . ID ] = groupEntityToService ( m )
2025-12-26 15:40:24 +08:00
}
2025-12-29 10:03:27 +08:00
return out , nil
}
func ( r * usageLogRepository ) loadSubscriptions ( ctx context . Context , ids [ ] int64 ) ( map [ int64 ] * service . UserSubscription , error ) {
out := make ( map [ int64 ] * service . UserSubscription )
if len ( ids ) == 0 {
return out , nil
}
models , err := r . client . UserSubscription . Query ( ) . Where ( dbusersub . IDIn ( ids ... ) ) . All ( ctx )
if err != nil {
return nil , err
}
for _ , m := range models {
out [ m . ID ] = userSubscriptionEntityToService ( m )
}
return out , nil
}
func scanUsageLog ( scanner interface { Scan ( ... any ) error } ) ( * service . UsageLog , error ) {
var (
id int64
userID int64
apiKeyID int64
accountID int64
requestID sql . NullString
model string
groupID sql . NullInt64
subscriptionID sql . NullInt64
inputTokens int
outputTokens int
cacheCreationTokens int
cacheReadTokens int
cacheCreation5m int
cacheCreation1h int
inputCost float64
outputCost float64
cacheCreationCost float64
cacheReadCost float64
totalCost float64
actualCost float64
rateMultiplier float64
billingType int16
stream bool
durationMs sql . NullInt64
firstTokenMs sql . NullInt64
createdAt time . Time
)
if err := scanner . Scan (
& id ,
& userID ,
& apiKeyID ,
& accountID ,
& requestID ,
& model ,
& groupID ,
& subscriptionID ,
& inputTokens ,
& outputTokens ,
& cacheCreationTokens ,
& cacheReadTokens ,
& cacheCreation5m ,
& cacheCreation1h ,
& inputCost ,
& outputCost ,
& cacheCreationCost ,
& cacheReadCost ,
& totalCost ,
& actualCost ,
& rateMultiplier ,
& billingType ,
& stream ,
& durationMs ,
& firstTokenMs ,
& createdAt ,
) ; err != nil {
return nil , err
}
log := & service . UsageLog {
ID : id ,
UserID : userID ,
ApiKeyID : apiKeyID ,
AccountID : accountID ,
Model : model ,
InputTokens : inputTokens ,
OutputTokens : outputTokens ,
CacheCreationTokens : cacheCreationTokens ,
CacheReadTokens : cacheReadTokens ,
CacheCreation5mTokens : cacheCreation5m ,
CacheCreation1hTokens : cacheCreation1h ,
InputCost : inputCost ,
OutputCost : outputCost ,
CacheCreationCost : cacheCreationCost ,
CacheReadCost : cacheReadCost ,
TotalCost : totalCost ,
ActualCost : actualCost ,
RateMultiplier : rateMultiplier ,
BillingType : int8 ( billingType ) ,
Stream : stream ,
CreatedAt : createdAt ,
}
if requestID . Valid {
log . RequestID = requestID . String
}
if groupID . Valid {
value := groupID . Int64
log . GroupID = & value
}
if subscriptionID . Valid {
value := subscriptionID . Int64
log . SubscriptionID = & value
}
if durationMs . Valid {
value := int ( durationMs . Int64 )
log . DurationMs = & value
}
if firstTokenMs . Valid {
value := int ( firstTokenMs . Int64 )
log . FirstTokenMs = & value
}
return log , nil
}
func scanTrendRows ( rows * sql . Rows ) ( [ ] TrendDataPoint , error ) {
results := make ( [ ] TrendDataPoint , 0 )
for rows . Next ( ) {
var row TrendDataPoint
if err := rows . Scan (
& row . Date ,
& row . Requests ,
& row . InputTokens ,
& row . OutputTokens ,
& row . CacheTokens ,
& row . TotalTokens ,
& row . Cost ,
& row . ActualCost ,
) ; err != nil {
return nil , err
}
results = append ( results , row )
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
return results , nil
}
func scanModelStatsRows ( rows * sql . Rows ) ( [ ] ModelStat , error ) {
results := make ( [ ] ModelStat , 0 )
for rows . Next ( ) {
var row ModelStat
if err := rows . Scan (
& row . Model ,
& row . Requests ,
& row . InputTokens ,
& row . OutputTokens ,
& row . TotalTokens ,
& row . Cost ,
& row . ActualCost ,
) ; err != nil {
return nil , err
}
results = append ( results , row )
}
if err := rows . Err ( ) ; err != nil {
return nil , err
}
return results , nil
}
func buildWhere ( conditions [ ] string ) string {
if len ( conditions ) == 0 {
return ""
}
return "WHERE " + strings . Join ( conditions , " AND " )
}
func nullInt64 ( v * int64 ) sql . NullInt64 {
if v == nil {
return sql . NullInt64 { }
}
return sql . NullInt64 { Int64 : * v , Valid : true }
}
func nullInt ( v * int ) sql . NullInt64 {
if v == nil {
return sql . NullInt64 { }
}
return sql . NullInt64 { Int64 : int64 ( * v ) , Valid : true }
}
func setToSlice ( set map [ int64 ] struct { } ) [ ] int64 {
out := make ( [ ] int64 , 0 , len ( set ) )
for id := range set {
out = append ( out , id )
}
return out
2025-12-26 15:40:24 +08:00
}