mirror of
https://gitee.com/wanwujie/sub2api
synced 2026-04-18 22:04:45 +08:00
Merge pull request #1019 from Ethan0x0000/feat/usage-endpoint-distribution
feat: add endpoint metadata and usage endpoint distribution insights
This commit is contained in:
@@ -523,6 +523,8 @@ func usageLogFromServiceUser(l *service.UsageLog) UsageLog {
|
|||||||
Model: l.Model,
|
Model: l.Model,
|
||||||
ServiceTier: l.ServiceTier,
|
ServiceTier: l.ServiceTier,
|
||||||
ReasoningEffort: l.ReasoningEffort,
|
ReasoningEffort: l.ReasoningEffort,
|
||||||
|
InboundEndpoint: l.InboundEndpoint,
|
||||||
|
UpstreamEndpoint: l.UpstreamEndpoint,
|
||||||
GroupID: l.GroupID,
|
GroupID: l.GroupID,
|
||||||
SubscriptionID: l.SubscriptionID,
|
SubscriptionID: l.SubscriptionID,
|
||||||
InputTokens: l.InputTokens,
|
InputTokens: l.InputTokens,
|
||||||
|
|||||||
@@ -76,10 +76,14 @@ func TestUsageLogFromService_IncludesServiceTierForUserAndAdmin(t *testing.T) {
|
|||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
serviceTier := "priority"
|
serviceTier := "priority"
|
||||||
|
inboundEndpoint := "/v1/chat/completions"
|
||||||
|
upstreamEndpoint := "/v1/responses"
|
||||||
log := &service.UsageLog{
|
log := &service.UsageLog{
|
||||||
RequestID: "req_3",
|
RequestID: "req_3",
|
||||||
Model: "gpt-5.4",
|
Model: "gpt-5.4",
|
||||||
ServiceTier: &serviceTier,
|
ServiceTier: &serviceTier,
|
||||||
|
InboundEndpoint: &inboundEndpoint,
|
||||||
|
UpstreamEndpoint: &upstreamEndpoint,
|
||||||
AccountRateMultiplier: f64Ptr(1.5),
|
AccountRateMultiplier: f64Ptr(1.5),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,8 +92,16 @@ func TestUsageLogFromService_IncludesServiceTierForUserAndAdmin(t *testing.T) {
|
|||||||
|
|
||||||
require.NotNil(t, userDTO.ServiceTier)
|
require.NotNil(t, userDTO.ServiceTier)
|
||||||
require.Equal(t, serviceTier, *userDTO.ServiceTier)
|
require.Equal(t, serviceTier, *userDTO.ServiceTier)
|
||||||
|
require.NotNil(t, userDTO.InboundEndpoint)
|
||||||
|
require.Equal(t, inboundEndpoint, *userDTO.InboundEndpoint)
|
||||||
|
require.NotNil(t, userDTO.UpstreamEndpoint)
|
||||||
|
require.Equal(t, upstreamEndpoint, *userDTO.UpstreamEndpoint)
|
||||||
require.NotNil(t, adminDTO.ServiceTier)
|
require.NotNil(t, adminDTO.ServiceTier)
|
||||||
require.Equal(t, serviceTier, *adminDTO.ServiceTier)
|
require.Equal(t, serviceTier, *adminDTO.ServiceTier)
|
||||||
|
require.NotNil(t, adminDTO.InboundEndpoint)
|
||||||
|
require.Equal(t, inboundEndpoint, *adminDTO.InboundEndpoint)
|
||||||
|
require.NotNil(t, adminDTO.UpstreamEndpoint)
|
||||||
|
require.Equal(t, upstreamEndpoint, *adminDTO.UpstreamEndpoint)
|
||||||
require.NotNil(t, adminDTO.AccountRateMultiplier)
|
require.NotNil(t, adminDTO.AccountRateMultiplier)
|
||||||
require.InDelta(t, 1.5, *adminDTO.AccountRateMultiplier, 1e-12)
|
require.InDelta(t, 1.5, *adminDTO.AccountRateMultiplier, 1e-12)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -337,6 +337,10 @@ type UsageLog struct {
|
|||||||
// ReasoningEffort is the request's reasoning effort level (OpenAI Responses API).
|
// ReasoningEffort is the request's reasoning effort level (OpenAI Responses API).
|
||||||
// nil means not provided / not applicable.
|
// nil means not provided / not applicable.
|
||||||
ReasoningEffort *string `json:"reasoning_effort,omitempty"`
|
ReasoningEffort *string `json:"reasoning_effort,omitempty"`
|
||||||
|
// InboundEndpoint is the client-facing API endpoint path, e.g. /v1/chat/completions.
|
||||||
|
InboundEndpoint *string `json:"inbound_endpoint,omitempty"`
|
||||||
|
// UpstreamEndpoint is the normalized upstream endpoint path, e.g. /v1/responses.
|
||||||
|
UpstreamEndpoint *string `json:"upstream_endpoint,omitempty"`
|
||||||
|
|
||||||
GroupID *int64 `json:"group_id"`
|
GroupID *int64 `json:"group_id"`
|
||||||
SubscriptionID *int64 `json:"subscription_id"`
|
SubscriptionID *int64 `json:"subscription_id"`
|
||||||
|
|||||||
@@ -256,14 +256,16 @@ func (h *OpenAIGatewayHandler) ChatCompletions(c *gin.Context) {
|
|||||||
|
|
||||||
h.submitUsageRecordTask(func(ctx context.Context) {
|
h.submitUsageRecordTask(func(ctx context.Context) {
|
||||||
if err := h.gatewayService.RecordUsage(ctx, &service.OpenAIRecordUsageInput{
|
if err := h.gatewayService.RecordUsage(ctx, &service.OpenAIRecordUsageInput{
|
||||||
Result: result,
|
Result: result,
|
||||||
APIKey: apiKey,
|
APIKey: apiKey,
|
||||||
User: apiKey.User,
|
User: apiKey.User,
|
||||||
Account: account,
|
Account: account,
|
||||||
Subscription: subscription,
|
Subscription: subscription,
|
||||||
UserAgent: userAgent,
|
InboundEndpoint: normalizedOpenAIInboundEndpoint(c, openAIInboundEndpointChatCompletions),
|
||||||
IPAddress: clientIP,
|
UpstreamEndpoint: normalizedOpenAIUpstreamEndpoint(c, openAIUpstreamEndpointResponses),
|
||||||
APIKeyService: h.apiKeyService,
|
UserAgent: userAgent,
|
||||||
|
IPAddress: clientIP,
|
||||||
|
APIKeyService: h.apiKeyService,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
logger.L().With(
|
logger.L().With(
|
||||||
zap.String("component", "handler.openai_gateway.chat_completions"),
|
zap.String("component", "handler.openai_gateway.chat_completions"),
|
||||||
|
|||||||
@@ -0,0 +1,57 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNormalizedOpenAIUpstreamEndpoint(t *testing.T) {
|
||||||
|
gin.SetMode(gin.TestMode)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
path string
|
||||||
|
fallback string
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "responses root maps to responses upstream",
|
||||||
|
path: "/v1/responses",
|
||||||
|
fallback: openAIUpstreamEndpointResponses,
|
||||||
|
want: "/v1/responses",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "responses compact keeps compact suffix",
|
||||||
|
path: "/openai/v1/responses/compact",
|
||||||
|
fallback: openAIUpstreamEndpointResponses,
|
||||||
|
want: "/v1/responses/compact",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "responses nested suffix preserved",
|
||||||
|
path: "/openai/v1/responses/compact/detail",
|
||||||
|
fallback: openAIUpstreamEndpointResponses,
|
||||||
|
want: "/v1/responses/compact/detail",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "non responses path uses fallback",
|
||||||
|
path: "/v1/messages",
|
||||||
|
fallback: openAIUpstreamEndpointResponses,
|
||||||
|
want: "/v1/responses",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
rec := httptest.NewRecorder()
|
||||||
|
c, _ := gin.CreateTestContext(rec)
|
||||||
|
c.Request = httptest.NewRequest(http.MethodPost, tt.path, nil)
|
||||||
|
|
||||||
|
got := normalizedOpenAIUpstreamEndpoint(c, tt.fallback)
|
||||||
|
require.Equal(t, tt.want, got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -37,6 +37,13 @@ type OpenAIGatewayHandler struct {
|
|||||||
cfg *config.Config
|
cfg *config.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
openAIInboundEndpointResponses = "/v1/responses"
|
||||||
|
openAIInboundEndpointMessages = "/v1/messages"
|
||||||
|
openAIInboundEndpointChatCompletions = "/v1/chat/completions"
|
||||||
|
openAIUpstreamEndpointResponses = "/v1/responses"
|
||||||
|
)
|
||||||
|
|
||||||
// NewOpenAIGatewayHandler creates a new OpenAIGatewayHandler
|
// NewOpenAIGatewayHandler creates a new OpenAIGatewayHandler
|
||||||
func NewOpenAIGatewayHandler(
|
func NewOpenAIGatewayHandler(
|
||||||
gatewayService *service.OpenAIGatewayService,
|
gatewayService *service.OpenAIGatewayService,
|
||||||
@@ -362,6 +369,8 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) {
|
|||||||
User: apiKey.User,
|
User: apiKey.User,
|
||||||
Account: account,
|
Account: account,
|
||||||
Subscription: subscription,
|
Subscription: subscription,
|
||||||
|
InboundEndpoint: normalizedOpenAIInboundEndpoint(c, openAIInboundEndpointResponses),
|
||||||
|
UpstreamEndpoint: normalizedOpenAIUpstreamEndpoint(c, openAIUpstreamEndpointResponses),
|
||||||
UserAgent: userAgent,
|
UserAgent: userAgent,
|
||||||
IPAddress: clientIP,
|
IPAddress: clientIP,
|
||||||
RequestPayloadHash: requestPayloadHash,
|
RequestPayloadHash: requestPayloadHash,
|
||||||
@@ -738,6 +747,8 @@ func (h *OpenAIGatewayHandler) Messages(c *gin.Context) {
|
|||||||
User: apiKey.User,
|
User: apiKey.User,
|
||||||
Account: account,
|
Account: account,
|
||||||
Subscription: subscription,
|
Subscription: subscription,
|
||||||
|
InboundEndpoint: normalizedOpenAIInboundEndpoint(c, openAIInboundEndpointMessages),
|
||||||
|
UpstreamEndpoint: normalizedOpenAIUpstreamEndpoint(c, openAIUpstreamEndpointResponses),
|
||||||
UserAgent: userAgent,
|
UserAgent: userAgent,
|
||||||
IPAddress: clientIP,
|
IPAddress: clientIP,
|
||||||
RequestPayloadHash: requestPayloadHash,
|
RequestPayloadHash: requestPayloadHash,
|
||||||
@@ -1235,6 +1246,8 @@ func (h *OpenAIGatewayHandler) ResponsesWebSocket(c *gin.Context) {
|
|||||||
User: apiKey.User,
|
User: apiKey.User,
|
||||||
Account: account,
|
Account: account,
|
||||||
Subscription: subscription,
|
Subscription: subscription,
|
||||||
|
InboundEndpoint: normalizedOpenAIInboundEndpoint(c, openAIInboundEndpointResponses),
|
||||||
|
UpstreamEndpoint: normalizedOpenAIUpstreamEndpoint(c, openAIUpstreamEndpointResponses),
|
||||||
UserAgent: userAgent,
|
UserAgent: userAgent,
|
||||||
IPAddress: clientIP,
|
IPAddress: clientIP,
|
||||||
RequestPayloadHash: service.HashUsageRequestPayload(firstMessage),
|
RequestPayloadHash: service.HashUsageRequestPayload(firstMessage),
|
||||||
@@ -1530,6 +1543,62 @@ func openAIWSIngressFallbackSessionSeed(userID, apiKeyID int64, groupID *int64)
|
|||||||
return fmt.Sprintf("openai_ws_ingress:%d:%d:%d", gid, userID, apiKeyID)
|
return fmt.Sprintf("openai_ws_ingress:%d:%d:%d", gid, userID, apiKeyID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func normalizedOpenAIInboundEndpoint(c *gin.Context, fallback string) string {
|
||||||
|
path := strings.TrimSpace(fallback)
|
||||||
|
if c != nil {
|
||||||
|
if fullPath := strings.TrimSpace(c.FullPath()); fullPath != "" {
|
||||||
|
path = fullPath
|
||||||
|
} else if c.Request != nil && c.Request.URL != nil {
|
||||||
|
if requestPath := strings.TrimSpace(c.Request.URL.Path); requestPath != "" {
|
||||||
|
path = requestPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.Contains(path, openAIInboundEndpointChatCompletions):
|
||||||
|
return openAIInboundEndpointChatCompletions
|
||||||
|
case strings.Contains(path, openAIInboundEndpointMessages):
|
||||||
|
return openAIInboundEndpointMessages
|
||||||
|
case strings.Contains(path, openAIInboundEndpointResponses):
|
||||||
|
return openAIInboundEndpointResponses
|
||||||
|
default:
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizedOpenAIUpstreamEndpoint(c *gin.Context, fallback string) string {
|
||||||
|
base := strings.TrimSpace(fallback)
|
||||||
|
if base == "" {
|
||||||
|
base = openAIUpstreamEndpointResponses
|
||||||
|
}
|
||||||
|
base = strings.TrimRight(base, "/")
|
||||||
|
|
||||||
|
if c == nil || c.Request == nil || c.Request.URL == nil {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
|
||||||
|
path := strings.TrimRight(strings.TrimSpace(c.Request.URL.Path), "/")
|
||||||
|
if path == "" {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := strings.LastIndex(path, "/responses")
|
||||||
|
if idx < 0 {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
|
||||||
|
suffix := strings.TrimSpace(path[idx+len("/responses"):])
|
||||||
|
if suffix == "" || suffix == "/" {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
if !strings.HasPrefix(suffix, "/") {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
|
||||||
|
return base + suffix
|
||||||
|
}
|
||||||
|
|
||||||
func isOpenAIWSUpgradeRequest(r *http.Request) bool {
|
func isOpenAIWSUpgradeRequest(r *http.Request) bool {
|
||||||
if r == nil {
|
if r == nil {
|
||||||
return false
|
return false
|
||||||
|
|||||||
@@ -334,6 +334,14 @@ func (s *stubUsageLogRepo) GetUsageTrendWithFilters(ctx context.Context, startTi
|
|||||||
func (s *stubUsageLogRepo) GetModelStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.ModelStat, error) {
|
func (s *stubUsageLogRepo) GetModelStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.ModelStat, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *stubUsageLogRepo) GetEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.EndpointStat, error) {
|
||||||
|
return []usagestats.EndpointStat{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *stubUsageLogRepo) GetUpstreamEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.EndpointStat, error) {
|
||||||
|
return []usagestats.EndpointStat{}, nil
|
||||||
|
}
|
||||||
func (s *stubUsageLogRepo) GetGroupStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.GroupStat, error) {
|
func (s *stubUsageLogRepo) GetGroupStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.GroupStat, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -81,6 +81,15 @@ type ModelStat struct {
|
|||||||
ActualCost float64 `json:"actual_cost"` // 实际扣除
|
ActualCost float64 `json:"actual_cost"` // 实际扣除
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// EndpointStat represents usage statistics for a single request endpoint.
|
||||||
|
type EndpointStat struct {
|
||||||
|
Endpoint string `json:"endpoint"`
|
||||||
|
Requests int64 `json:"requests"`
|
||||||
|
TotalTokens int64 `json:"total_tokens"`
|
||||||
|
Cost float64 `json:"cost"` // 标准计费
|
||||||
|
ActualCost float64 `json:"actual_cost"` // 实际扣除
|
||||||
|
}
|
||||||
|
|
||||||
// GroupStat represents usage statistics for a single group
|
// GroupStat represents usage statistics for a single group
|
||||||
type GroupStat struct {
|
type GroupStat struct {
|
||||||
GroupID int64 `json:"group_id"`
|
GroupID int64 `json:"group_id"`
|
||||||
@@ -179,15 +188,18 @@ type UsageLogFilters struct {
|
|||||||
|
|
||||||
// UsageStats represents usage statistics
|
// UsageStats represents usage statistics
|
||||||
type UsageStats struct {
|
type UsageStats struct {
|
||||||
TotalRequests int64 `json:"total_requests"`
|
TotalRequests int64 `json:"total_requests"`
|
||||||
TotalInputTokens int64 `json:"total_input_tokens"`
|
TotalInputTokens int64 `json:"total_input_tokens"`
|
||||||
TotalOutputTokens int64 `json:"total_output_tokens"`
|
TotalOutputTokens int64 `json:"total_output_tokens"`
|
||||||
TotalCacheTokens int64 `json:"total_cache_tokens"`
|
TotalCacheTokens int64 `json:"total_cache_tokens"`
|
||||||
TotalTokens int64 `json:"total_tokens"`
|
TotalTokens int64 `json:"total_tokens"`
|
||||||
TotalCost float64 `json:"total_cost"`
|
TotalCost float64 `json:"total_cost"`
|
||||||
TotalActualCost float64 `json:"total_actual_cost"`
|
TotalActualCost float64 `json:"total_actual_cost"`
|
||||||
TotalAccountCost *float64 `json:"total_account_cost,omitempty"`
|
TotalAccountCost *float64 `json:"total_account_cost,omitempty"`
|
||||||
AverageDurationMs float64 `json:"average_duration_ms"`
|
AverageDurationMs float64 `json:"average_duration_ms"`
|
||||||
|
Endpoints []EndpointStat `json:"endpoints,omitempty"`
|
||||||
|
UpstreamEndpoints []EndpointStat `json:"upstream_endpoints,omitempty"`
|
||||||
|
EndpointPaths []EndpointStat `json:"endpoint_paths,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// BatchUserUsageStats represents usage stats for a single user
|
// BatchUserUsageStats represents usage stats for a single user
|
||||||
@@ -254,7 +266,9 @@ type AccountUsageSummary struct {
|
|||||||
|
|
||||||
// AccountUsageStatsResponse represents the full usage statistics response for an account
|
// AccountUsageStatsResponse represents the full usage statistics response for an account
|
||||||
type AccountUsageStatsResponse struct {
|
type AccountUsageStatsResponse struct {
|
||||||
History []AccountUsageHistory `json:"history"`
|
History []AccountUsageHistory `json:"history"`
|
||||||
Summary AccountUsageSummary `json:"summary"`
|
Summary AccountUsageSummary `json:"summary"`
|
||||||
Models []ModelStat `json:"models"`
|
Models []ModelStat `json:"models"`
|
||||||
|
Endpoints []EndpointStat `json:"endpoints"`
|
||||||
|
UpstreamEndpoints []EndpointStat `json:"upstream_endpoints"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ import (
|
|||||||
gocache "github.com/patrickmn/go-cache"
|
gocache "github.com/patrickmn/go-cache"
|
||||||
)
|
)
|
||||||
|
|
||||||
const usageLogSelectColumns = "id, user_id, api_key_id, account_id, request_id, model, group_id, subscription_id, input_tokens, output_tokens, cache_creation_tokens, cache_read_tokens, cache_creation_5m_tokens, cache_creation_1h_tokens, input_cost, output_cost, cache_creation_cost, cache_read_cost, total_cost, actual_cost, rate_multiplier, account_rate_multiplier, billing_type, request_type, stream, openai_ws_mode, duration_ms, first_token_ms, user_agent, ip_address, image_count, image_size, media_type, service_tier, reasoning_effort, cache_ttl_overridden, created_at"
|
const usageLogSelectColumns = "id, user_id, api_key_id, account_id, request_id, model, group_id, subscription_id, input_tokens, output_tokens, cache_creation_tokens, cache_read_tokens, cache_creation_5m_tokens, cache_creation_1h_tokens, input_cost, output_cost, cache_creation_cost, cache_read_cost, total_cost, actual_cost, rate_multiplier, account_rate_multiplier, billing_type, request_type, stream, openai_ws_mode, duration_ms, first_token_ms, user_agent, ip_address, image_count, image_size, media_type, service_tier, reasoning_effort, inbound_endpoint, upstream_endpoint, cache_ttl_overridden, created_at"
|
||||||
|
|
||||||
var usageLogInsertArgTypes = [...]string{
|
var usageLogInsertArgTypes = [...]string{
|
||||||
"bigint",
|
"bigint",
|
||||||
@@ -65,6 +65,8 @@ var usageLogInsertArgTypes = [...]string{
|
|||||||
"text",
|
"text",
|
||||||
"text",
|
"text",
|
||||||
"text",
|
"text",
|
||||||
|
"text",
|
||||||
|
"text",
|
||||||
"boolean",
|
"boolean",
|
||||||
"timestamptz",
|
"timestamptz",
|
||||||
}
|
}
|
||||||
@@ -304,6 +306,8 @@ func (r *usageLogRepository) createSingle(ctx context.Context, sqlq sqlExecutor,
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
) VALUES (
|
) VALUES (
|
||||||
@@ -312,7 +316,7 @@ func (r *usageLogRepository) createSingle(ctx context.Context, sqlq sqlExecutor,
|
|||||||
$8, $9, $10, $11,
|
$8, $9, $10, $11,
|
||||||
$12, $13,
|
$12, $13,
|
||||||
$14, $15, $16, $17, $18, $19,
|
$14, $15, $16, $17, $18, $19,
|
||||||
$20, $21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36
|
$20, $21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38
|
||||||
)
|
)
|
||||||
ON CONFLICT (request_id, api_key_id) DO NOTHING
|
ON CONFLICT (request_id, api_key_id) DO NOTHING
|
||||||
RETURNING id, created_at
|
RETURNING id, created_at
|
||||||
@@ -732,11 +736,13 @@ func buildUsageLogBatchInsertQuery(keys []string, preparedByKey map[string]usage
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
) AS (VALUES `)
|
) AS (VALUES `)
|
||||||
|
|
||||||
args := make([]any, 0, len(keys)*37)
|
args := make([]any, 0, len(keys)*38)
|
||||||
argPos := 1
|
argPos := 1
|
||||||
for idx, key := range keys {
|
for idx, key := range keys {
|
||||||
if idx > 0 {
|
if idx > 0 {
|
||||||
@@ -799,6 +805,8 @@ func buildUsageLogBatchInsertQuery(keys []string, preparedByKey map[string]usage
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
)
|
)
|
||||||
@@ -837,6 +845,8 @@ func buildUsageLogBatchInsertQuery(keys []string, preparedByKey map[string]usage
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
FROM input
|
FROM input
|
||||||
@@ -915,11 +925,13 @@ func buildUsageLogBestEffortInsertQuery(preparedList []usageLogInsertPrepared) (
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
) AS (VALUES `)
|
) AS (VALUES `)
|
||||||
|
|
||||||
args := make([]any, 0, len(preparedList)*36)
|
args := make([]any, 0, len(preparedList)*38)
|
||||||
argPos := 1
|
argPos := 1
|
||||||
for idx, prepared := range preparedList {
|
for idx, prepared := range preparedList {
|
||||||
if idx > 0 {
|
if idx > 0 {
|
||||||
@@ -979,6 +991,8 @@ func buildUsageLogBestEffortInsertQuery(preparedList []usageLogInsertPrepared) (
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
)
|
)
|
||||||
@@ -1017,6 +1031,8 @@ func buildUsageLogBestEffortInsertQuery(preparedList []usageLogInsertPrepared) (
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
FROM input
|
FROM input
|
||||||
@@ -1063,6 +1079,8 @@ func execUsageLogInsertNoResult(ctx context.Context, sqlq sqlExecutor, prepared
|
|||||||
media_type,
|
media_type,
|
||||||
service_tier,
|
service_tier,
|
||||||
reasoning_effort,
|
reasoning_effort,
|
||||||
|
inbound_endpoint,
|
||||||
|
upstream_endpoint,
|
||||||
cache_ttl_overridden,
|
cache_ttl_overridden,
|
||||||
created_at
|
created_at
|
||||||
) VALUES (
|
) VALUES (
|
||||||
@@ -1071,7 +1089,7 @@ func execUsageLogInsertNoResult(ctx context.Context, sqlq sqlExecutor, prepared
|
|||||||
$8, $9, $10, $11,
|
$8, $9, $10, $11,
|
||||||
$12, $13,
|
$12, $13,
|
||||||
$14, $15, $16, $17, $18, $19,
|
$14, $15, $16, $17, $18, $19,
|
||||||
$20, $21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36
|
$20, $21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38
|
||||||
)
|
)
|
||||||
ON CONFLICT (request_id, api_key_id) DO NOTHING
|
ON CONFLICT (request_id, api_key_id) DO NOTHING
|
||||||
`, prepared.args...)
|
`, prepared.args...)
|
||||||
@@ -1101,6 +1119,8 @@ func prepareUsageLogInsert(log *service.UsageLog) usageLogInsertPrepared {
|
|||||||
mediaType := nullString(log.MediaType)
|
mediaType := nullString(log.MediaType)
|
||||||
serviceTier := nullString(log.ServiceTier)
|
serviceTier := nullString(log.ServiceTier)
|
||||||
reasoningEffort := nullString(log.ReasoningEffort)
|
reasoningEffort := nullString(log.ReasoningEffort)
|
||||||
|
inboundEndpoint := nullString(log.InboundEndpoint)
|
||||||
|
upstreamEndpoint := nullString(log.UpstreamEndpoint)
|
||||||
|
|
||||||
var requestIDArg any
|
var requestIDArg any
|
||||||
if requestID != "" {
|
if requestID != "" {
|
||||||
@@ -1147,6 +1167,8 @@ func prepareUsageLogInsert(log *service.UsageLog) usageLogInsertPrepared {
|
|||||||
mediaType,
|
mediaType,
|
||||||
serviceTier,
|
serviceTier,
|
||||||
reasoningEffort,
|
reasoningEffort,
|
||||||
|
inboundEndpoint,
|
||||||
|
upstreamEndpoint,
|
||||||
log.CacheTTLOverridden,
|
log.CacheTTLOverridden,
|
||||||
createdAt,
|
createdAt,
|
||||||
},
|
},
|
||||||
@@ -2505,7 +2527,7 @@ func (r *usageLogRepository) ListWithFilters(ctx context.Context, params paginat
|
|||||||
args = append(args, *filters.StartTime)
|
args = append(args, *filters.StartTime)
|
||||||
}
|
}
|
||||||
if filters.EndTime != nil {
|
if filters.EndTime != nil {
|
||||||
conditions = append(conditions, fmt.Sprintf("created_at <= $%d", len(args)+1))
|
conditions = append(conditions, fmt.Sprintf("created_at < $%d", len(args)+1))
|
||||||
args = append(args, *filters.EndTime)
|
args = append(args, *filters.EndTime)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3040,7 +3062,7 @@ func (r *usageLogRepository) GetStatsWithFilters(ctx context.Context, filters Us
|
|||||||
args = append(args, *filters.StartTime)
|
args = append(args, *filters.StartTime)
|
||||||
}
|
}
|
||||||
if filters.EndTime != nil {
|
if filters.EndTime != nil {
|
||||||
conditions = append(conditions, fmt.Sprintf("created_at <= $%d", len(args)+1))
|
conditions = append(conditions, fmt.Sprintf("created_at < $%d", len(args)+1))
|
||||||
args = append(args, *filters.EndTime)
|
args = append(args, *filters.EndTime)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3080,6 +3102,35 @@ func (r *usageLogRepository) GetStatsWithFilters(ctx context.Context, filters Us
|
|||||||
stats.TotalAccountCost = &totalAccountCost
|
stats.TotalAccountCost = &totalAccountCost
|
||||||
}
|
}
|
||||||
stats.TotalTokens = stats.TotalInputTokens + stats.TotalOutputTokens + stats.TotalCacheTokens
|
stats.TotalTokens = stats.TotalInputTokens + stats.TotalOutputTokens + stats.TotalCacheTokens
|
||||||
|
|
||||||
|
start := time.Unix(0, 0).UTC()
|
||||||
|
if filters.StartTime != nil {
|
||||||
|
start = *filters.StartTime
|
||||||
|
}
|
||||||
|
end := time.Now().UTC()
|
||||||
|
if filters.EndTime != nil {
|
||||||
|
end = *filters.EndTime
|
||||||
|
}
|
||||||
|
|
||||||
|
endpoints, endpointErr := r.GetEndpointStatsWithFilters(ctx, start, end, filters.UserID, filters.APIKeyID, filters.AccountID, filters.GroupID, filters.Model, filters.RequestType, filters.Stream, filters.BillingType)
|
||||||
|
if endpointErr != nil {
|
||||||
|
logger.LegacyPrintf("repository.usage_log", "GetEndpointStatsWithFilters failed in GetStatsWithFilters: %v", endpointErr)
|
||||||
|
endpoints = []EndpointStat{}
|
||||||
|
}
|
||||||
|
upstreamEndpoints, upstreamEndpointErr := r.GetUpstreamEndpointStatsWithFilters(ctx, start, end, filters.UserID, filters.APIKeyID, filters.AccountID, filters.GroupID, filters.Model, filters.RequestType, filters.Stream, filters.BillingType)
|
||||||
|
if upstreamEndpointErr != nil {
|
||||||
|
logger.LegacyPrintf("repository.usage_log", "GetUpstreamEndpointStatsWithFilters failed in GetStatsWithFilters: %v", upstreamEndpointErr)
|
||||||
|
upstreamEndpoints = []EndpointStat{}
|
||||||
|
}
|
||||||
|
endpointPaths, endpointPathErr := r.getEndpointPathStatsWithFilters(ctx, start, end, filters.UserID, filters.APIKeyID, filters.AccountID, filters.GroupID, filters.Model, filters.RequestType, filters.Stream, filters.BillingType)
|
||||||
|
if endpointPathErr != nil {
|
||||||
|
logger.LegacyPrintf("repository.usage_log", "getEndpointPathStatsWithFilters failed in GetStatsWithFilters: %v", endpointPathErr)
|
||||||
|
endpointPaths = []EndpointStat{}
|
||||||
|
}
|
||||||
|
stats.Endpoints = endpoints
|
||||||
|
stats.UpstreamEndpoints = upstreamEndpoints
|
||||||
|
stats.EndpointPaths = endpointPaths
|
||||||
|
|
||||||
return stats, nil
|
return stats, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3092,6 +3143,163 @@ type AccountUsageSummary = usagestats.AccountUsageSummary
|
|||||||
// AccountUsageStatsResponse represents the full usage statistics response for an account
|
// AccountUsageStatsResponse represents the full usage statistics response for an account
|
||||||
type AccountUsageStatsResponse = usagestats.AccountUsageStatsResponse
|
type AccountUsageStatsResponse = usagestats.AccountUsageStatsResponse
|
||||||
|
|
||||||
|
// EndpointStat represents endpoint usage statistics row.
|
||||||
|
type EndpointStat = usagestats.EndpointStat
|
||||||
|
|
||||||
|
func (r *usageLogRepository) getEndpointStatsByColumnWithFilters(ctx context.Context, endpointColumn string, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) (results []EndpointStat, err error) {
|
||||||
|
actualCostExpr := "COALESCE(SUM(actual_cost), 0) as actual_cost"
|
||||||
|
if accountID > 0 && userID == 0 && apiKeyID == 0 {
|
||||||
|
actualCostExpr = "COALESCE(SUM(total_cost * COALESCE(account_rate_multiplier, 1)), 0) as actual_cost"
|
||||||
|
}
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`
|
||||||
|
SELECT
|
||||||
|
COALESCE(NULLIF(TRIM(%s), ''), 'unknown') AS endpoint,
|
||||||
|
COUNT(*) AS requests,
|
||||||
|
COALESCE(SUM(input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens), 0) AS total_tokens,
|
||||||
|
COALESCE(SUM(total_cost), 0) as cost,
|
||||||
|
%s
|
||||||
|
FROM usage_logs
|
||||||
|
WHERE created_at >= $1 AND created_at < $2
|
||||||
|
`, endpointColumn, actualCostExpr)
|
||||||
|
|
||||||
|
args := []any{startTime, endTime}
|
||||||
|
if userID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND user_id = $%d", len(args)+1)
|
||||||
|
args = append(args, userID)
|
||||||
|
}
|
||||||
|
if apiKeyID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND api_key_id = $%d", len(args)+1)
|
||||||
|
args = append(args, apiKeyID)
|
||||||
|
}
|
||||||
|
if accountID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND account_id = $%d", len(args)+1)
|
||||||
|
args = append(args, accountID)
|
||||||
|
}
|
||||||
|
if groupID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND group_id = $%d", len(args)+1)
|
||||||
|
args = append(args, groupID)
|
||||||
|
}
|
||||||
|
if model != "" {
|
||||||
|
query += fmt.Sprintf(" AND model = $%d", len(args)+1)
|
||||||
|
args = append(args, model)
|
||||||
|
}
|
||||||
|
query, args = appendRequestTypeOrStreamQueryFilter(query, args, requestType, stream)
|
||||||
|
if billingType != nil {
|
||||||
|
query += fmt.Sprintf(" AND billing_type = $%d", len(args)+1)
|
||||||
|
args = append(args, int16(*billingType))
|
||||||
|
}
|
||||||
|
query += " GROUP BY endpoint ORDER BY requests DESC"
|
||||||
|
|
||||||
|
rows, err := r.sql.QueryContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if closeErr := rows.Close(); closeErr != nil && err == nil {
|
||||||
|
err = closeErr
|
||||||
|
results = nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
results = make([]EndpointStat, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var row EndpointStat
|
||||||
|
if err := rows.Scan(&row.Endpoint, &row.Requests, &row.TotalTokens, &row.Cost, &row.ActualCost); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
results = append(results, row)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *usageLogRepository) getEndpointPathStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) (results []EndpointStat, err error) {
|
||||||
|
actualCostExpr := "COALESCE(SUM(actual_cost), 0) as actual_cost"
|
||||||
|
if accountID > 0 && userID == 0 && apiKeyID == 0 {
|
||||||
|
actualCostExpr = "COALESCE(SUM(total_cost * COALESCE(account_rate_multiplier, 1)), 0) as actual_cost"
|
||||||
|
}
|
||||||
|
|
||||||
|
query := fmt.Sprintf(`
|
||||||
|
SELECT
|
||||||
|
CONCAT(
|
||||||
|
COALESCE(NULLIF(TRIM(inbound_endpoint), ''), 'unknown'),
|
||||||
|
' -> ',
|
||||||
|
COALESCE(NULLIF(TRIM(upstream_endpoint), ''), 'unknown')
|
||||||
|
) AS endpoint,
|
||||||
|
COUNT(*) AS requests,
|
||||||
|
COALESCE(SUM(input_tokens + output_tokens + cache_creation_tokens + cache_read_tokens), 0) AS total_tokens,
|
||||||
|
COALESCE(SUM(total_cost), 0) as cost,
|
||||||
|
%s
|
||||||
|
FROM usage_logs
|
||||||
|
WHERE created_at >= $1 AND created_at < $2
|
||||||
|
`, actualCostExpr)
|
||||||
|
|
||||||
|
args := []any{startTime, endTime}
|
||||||
|
if userID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND user_id = $%d", len(args)+1)
|
||||||
|
args = append(args, userID)
|
||||||
|
}
|
||||||
|
if apiKeyID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND api_key_id = $%d", len(args)+1)
|
||||||
|
args = append(args, apiKeyID)
|
||||||
|
}
|
||||||
|
if accountID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND account_id = $%d", len(args)+1)
|
||||||
|
args = append(args, accountID)
|
||||||
|
}
|
||||||
|
if groupID > 0 {
|
||||||
|
query += fmt.Sprintf(" AND group_id = $%d", len(args)+1)
|
||||||
|
args = append(args, groupID)
|
||||||
|
}
|
||||||
|
if model != "" {
|
||||||
|
query += fmt.Sprintf(" AND model = $%d", len(args)+1)
|
||||||
|
args = append(args, model)
|
||||||
|
}
|
||||||
|
query, args = appendRequestTypeOrStreamQueryFilter(query, args, requestType, stream)
|
||||||
|
if billingType != nil {
|
||||||
|
query += fmt.Sprintf(" AND billing_type = $%d", len(args)+1)
|
||||||
|
args = append(args, int16(*billingType))
|
||||||
|
}
|
||||||
|
query += " GROUP BY endpoint ORDER BY requests DESC"
|
||||||
|
|
||||||
|
rows, err := r.sql.QueryContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if closeErr := rows.Close(); closeErr != nil && err == nil {
|
||||||
|
err = closeErr
|
||||||
|
results = nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
results = make([]EndpointStat, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var row EndpointStat
|
||||||
|
if err := rows.Scan(&row.Endpoint, &row.Requests, &row.TotalTokens, &row.Cost, &row.ActualCost); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
results = append(results, row)
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetEndpointStatsWithFilters returns inbound endpoint statistics with optional filters.
|
||||||
|
func (r *usageLogRepository) GetEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]EndpointStat, error) {
|
||||||
|
return r.getEndpointStatsByColumnWithFilters(ctx, "inbound_endpoint", startTime, endTime, userID, apiKeyID, accountID, groupID, model, requestType, stream, billingType)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetUpstreamEndpointStatsWithFilters returns upstream endpoint statistics with optional filters.
|
||||||
|
func (r *usageLogRepository) GetUpstreamEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]EndpointStat, error) {
|
||||||
|
return r.getEndpointStatsByColumnWithFilters(ctx, "upstream_endpoint", startTime, endTime, userID, apiKeyID, accountID, groupID, model, requestType, stream, billingType)
|
||||||
|
}
|
||||||
|
|
||||||
// GetAccountUsageStats returns comprehensive usage statistics for an account over a time range
|
// GetAccountUsageStats returns comprehensive usage statistics for an account over a time range
|
||||||
func (r *usageLogRepository) GetAccountUsageStats(ctx context.Context, accountID int64, startTime, endTime time.Time) (resp *AccountUsageStatsResponse, err error) {
|
func (r *usageLogRepository) GetAccountUsageStats(ctx context.Context, accountID int64, startTime, endTime time.Time) (resp *AccountUsageStatsResponse, err error) {
|
||||||
daysCount := int(endTime.Sub(startTime).Hours()/24) + 1
|
daysCount := int(endTime.Sub(startTime).Hours()/24) + 1
|
||||||
@@ -3254,11 +3462,23 @@ func (r *usageLogRepository) GetAccountUsageStats(ctx context.Context, accountID
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
models = []ModelStat{}
|
models = []ModelStat{}
|
||||||
}
|
}
|
||||||
|
endpoints, endpointErr := r.GetEndpointStatsWithFilters(ctx, startTime, endTime, 0, 0, accountID, 0, "", nil, nil, nil)
|
||||||
|
if endpointErr != nil {
|
||||||
|
logger.LegacyPrintf("repository.usage_log", "GetEndpointStatsWithFilters failed in GetAccountUsageStats: %v", endpointErr)
|
||||||
|
endpoints = []EndpointStat{}
|
||||||
|
}
|
||||||
|
upstreamEndpoints, upstreamEndpointErr := r.GetUpstreamEndpointStatsWithFilters(ctx, startTime, endTime, 0, 0, accountID, 0, "", nil, nil, nil)
|
||||||
|
if upstreamEndpointErr != nil {
|
||||||
|
logger.LegacyPrintf("repository.usage_log", "GetUpstreamEndpointStatsWithFilters failed in GetAccountUsageStats: %v", upstreamEndpointErr)
|
||||||
|
upstreamEndpoints = []EndpointStat{}
|
||||||
|
}
|
||||||
|
|
||||||
resp = &AccountUsageStatsResponse{
|
resp = &AccountUsageStatsResponse{
|
||||||
History: history,
|
History: history,
|
||||||
Summary: summary,
|
Summary: summary,
|
||||||
Models: models,
|
Models: models,
|
||||||
|
Endpoints: endpoints,
|
||||||
|
UpstreamEndpoints: upstreamEndpoints,
|
||||||
}
|
}
|
||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
@@ -3541,6 +3761,8 @@ func scanUsageLog(scanner interface{ Scan(...any) error }) (*service.UsageLog, e
|
|||||||
mediaType sql.NullString
|
mediaType sql.NullString
|
||||||
serviceTier sql.NullString
|
serviceTier sql.NullString
|
||||||
reasoningEffort sql.NullString
|
reasoningEffort sql.NullString
|
||||||
|
inboundEndpoint sql.NullString
|
||||||
|
upstreamEndpoint sql.NullString
|
||||||
cacheTTLOverridden bool
|
cacheTTLOverridden bool
|
||||||
createdAt time.Time
|
createdAt time.Time
|
||||||
)
|
)
|
||||||
@@ -3581,6 +3803,8 @@ func scanUsageLog(scanner interface{ Scan(...any) error }) (*service.UsageLog, e
|
|||||||
&mediaType,
|
&mediaType,
|
||||||
&serviceTier,
|
&serviceTier,
|
||||||
&reasoningEffort,
|
&reasoningEffort,
|
||||||
|
&inboundEndpoint,
|
||||||
|
&upstreamEndpoint,
|
||||||
&cacheTTLOverridden,
|
&cacheTTLOverridden,
|
||||||
&createdAt,
|
&createdAt,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
@@ -3656,6 +3880,12 @@ func scanUsageLog(scanner interface{ Scan(...any) error }) (*service.UsageLog, e
|
|||||||
if reasoningEffort.Valid {
|
if reasoningEffort.Valid {
|
||||||
log.ReasoningEffort = &reasoningEffort.String
|
log.ReasoningEffort = &reasoningEffort.String
|
||||||
}
|
}
|
||||||
|
if inboundEndpoint.Valid {
|
||||||
|
log.InboundEndpoint = &inboundEndpoint.String
|
||||||
|
}
|
||||||
|
if upstreamEndpoint.Valid {
|
||||||
|
log.UpstreamEndpoint = &upstreamEndpoint.String
|
||||||
|
}
|
||||||
|
|
||||||
return log, nil
|
return log, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -73,6 +73,8 @@ func TestUsageLogRepositoryCreateSyncRequestTypeAndLegacyFields(t *testing.T) {
|
|||||||
sqlmock.AnyArg(), // media_type
|
sqlmock.AnyArg(), // media_type
|
||||||
sqlmock.AnyArg(), // service_tier
|
sqlmock.AnyArg(), // service_tier
|
||||||
sqlmock.AnyArg(), // reasoning_effort
|
sqlmock.AnyArg(), // reasoning_effort
|
||||||
|
sqlmock.AnyArg(), // inbound_endpoint
|
||||||
|
sqlmock.AnyArg(), // upstream_endpoint
|
||||||
log.CacheTTLOverridden,
|
log.CacheTTLOverridden,
|
||||||
createdAt,
|
createdAt,
|
||||||
).
|
).
|
||||||
@@ -141,6 +143,8 @@ func TestUsageLogRepositoryCreate_PersistsServiceTier(t *testing.T) {
|
|||||||
sqlmock.AnyArg(),
|
sqlmock.AnyArg(),
|
||||||
serviceTier,
|
serviceTier,
|
||||||
sqlmock.AnyArg(),
|
sqlmock.AnyArg(),
|
||||||
|
sqlmock.AnyArg(),
|
||||||
|
sqlmock.AnyArg(),
|
||||||
log.CacheTTLOverridden,
|
log.CacheTTLOverridden,
|
||||||
createdAt,
|
createdAt,
|
||||||
).
|
).
|
||||||
@@ -376,6 +380,8 @@ func TestScanUsageLogRequestTypeAndLegacyFallback(t *testing.T) {
|
|||||||
sql.NullString{},
|
sql.NullString{},
|
||||||
sql.NullString{Valid: true, String: "priority"},
|
sql.NullString{Valid: true, String: "priority"},
|
||||||
sql.NullString{},
|
sql.NullString{},
|
||||||
|
sql.NullString{},
|
||||||
|
sql.NullString{},
|
||||||
false,
|
false,
|
||||||
now,
|
now,
|
||||||
}})
|
}})
|
||||||
@@ -415,6 +421,8 @@ func TestScanUsageLogRequestTypeAndLegacyFallback(t *testing.T) {
|
|||||||
sql.NullString{},
|
sql.NullString{},
|
||||||
sql.NullString{Valid: true, String: "flex"},
|
sql.NullString{Valid: true, String: "flex"},
|
||||||
sql.NullString{},
|
sql.NullString{},
|
||||||
|
sql.NullString{},
|
||||||
|
sql.NullString{},
|
||||||
false,
|
false,
|
||||||
now,
|
now,
|
||||||
}})
|
}})
|
||||||
@@ -454,6 +462,8 @@ func TestScanUsageLogRequestTypeAndLegacyFallback(t *testing.T) {
|
|||||||
sql.NullString{},
|
sql.NullString{},
|
||||||
sql.NullString{Valid: true, String: "priority"},
|
sql.NullString{Valid: true, String: "priority"},
|
||||||
sql.NullString{},
|
sql.NullString{},
|
||||||
|
sql.NullString{},
|
||||||
|
sql.NullString{},
|
||||||
false,
|
false,
|
||||||
now,
|
now,
|
||||||
}})
|
}})
|
||||||
|
|||||||
@@ -1624,6 +1624,14 @@ func (r *stubUsageLogRepo) GetModelStatsWithFilters(ctx context.Context, startTi
|
|||||||
return nil, errors.New("not implemented")
|
return nil, errors.New("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *stubUsageLogRepo) GetEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.EndpointStat, error) {
|
||||||
|
return nil, errors.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *stubUsageLogRepo) GetUpstreamEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.EndpointStat, error) {
|
||||||
|
return nil, errors.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
func (r *stubUsageLogRepo) GetGroupStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.GroupStat, error) {
|
func (r *stubUsageLogRepo) GetGroupStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.GroupStat, error) {
|
||||||
return nil, errors.New("not implemented")
|
return nil, errors.New("not implemented")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -45,6 +45,8 @@ type UsageLogRepository interface {
|
|||||||
GetDashboardStats(ctx context.Context) (*usagestats.DashboardStats, error)
|
GetDashboardStats(ctx context.Context) (*usagestats.DashboardStats, error)
|
||||||
GetUsageTrendWithFilters(ctx context.Context, startTime, endTime time.Time, granularity string, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.TrendDataPoint, error)
|
GetUsageTrendWithFilters(ctx context.Context, startTime, endTime time.Time, granularity string, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.TrendDataPoint, error)
|
||||||
GetModelStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.ModelStat, error)
|
GetModelStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.ModelStat, error)
|
||||||
|
GetEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.EndpointStat, error)
|
||||||
|
GetUpstreamEndpointStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, model string, requestType *int16, stream *bool, billingType *int8) ([]usagestats.EndpointStat, error)
|
||||||
GetGroupStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.GroupStat, error)
|
GetGroupStatsWithFilters(ctx context.Context, startTime, endTime time.Time, userID, apiKeyID, accountID, groupID int64, requestType *int16, stream *bool, billingType *int8) ([]usagestats.GroupStat, error)
|
||||||
GetAPIKeyUsageTrend(ctx context.Context, startTime, endTime time.Time, granularity string, limit int) ([]usagestats.APIKeyUsageTrendPoint, error)
|
GetAPIKeyUsageTrend(ctx context.Context, startTime, endTime time.Time, granularity string, limit int) ([]usagestats.APIKeyUsageTrendPoint, error)
|
||||||
GetUserUsageTrend(ctx context.Context, startTime, endTime time.Time, granularity string, limit int) ([]usagestats.UserUsageTrendPoint, error)
|
GetUserUsageTrend(ctx context.Context, startTime, endTime time.Time, granularity string, limit int) ([]usagestats.UserUsageTrendPoint, error)
|
||||||
|
|||||||
@@ -226,6 +226,41 @@ func TestOpenAIGatewayServiceRecordUsage_UsesUserSpecificGroupRate(t *testing.T)
|
|||||||
require.Equal(t, 1, userRepo.deductCalls)
|
require.Equal(t, 1, userRepo.deductCalls)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestOpenAIGatewayServiceRecordUsage_IncludesEndpointMetadata(t *testing.T) {
|
||||||
|
usageRepo := &openAIRecordUsageLogRepoStub{inserted: true}
|
||||||
|
userRepo := &openAIRecordUsageUserRepoStub{}
|
||||||
|
subRepo := &openAIRecordUsageSubRepoStub{}
|
||||||
|
rateRepo := &openAIUserGroupRateRepoStub{}
|
||||||
|
svc := newOpenAIRecordUsageServiceForTest(usageRepo, userRepo, subRepo, rateRepo)
|
||||||
|
|
||||||
|
err := svc.RecordUsage(context.Background(), &OpenAIRecordUsageInput{
|
||||||
|
Result: &OpenAIForwardResult{
|
||||||
|
RequestID: "resp_endpoint_metadata",
|
||||||
|
Usage: OpenAIUsage{
|
||||||
|
InputTokens: 8,
|
||||||
|
OutputTokens: 2,
|
||||||
|
},
|
||||||
|
Model: "gpt-5.1",
|
||||||
|
Duration: time.Second,
|
||||||
|
},
|
||||||
|
APIKey: &APIKey{
|
||||||
|
ID: 1002,
|
||||||
|
Group: &Group{RateMultiplier: 1},
|
||||||
|
},
|
||||||
|
User: &User{ID: 2002},
|
||||||
|
Account: &Account{ID: 3002},
|
||||||
|
InboundEndpoint: " /v1/chat/completions ",
|
||||||
|
UpstreamEndpoint: " /v1/responses ",
|
||||||
|
})
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, usageRepo.lastLog)
|
||||||
|
require.NotNil(t, usageRepo.lastLog.InboundEndpoint)
|
||||||
|
require.Equal(t, "/v1/chat/completions", *usageRepo.lastLog.InboundEndpoint)
|
||||||
|
require.NotNil(t, usageRepo.lastLog.UpstreamEndpoint)
|
||||||
|
require.Equal(t, "/v1/responses", *usageRepo.lastLog.UpstreamEndpoint)
|
||||||
|
}
|
||||||
|
|
||||||
func TestOpenAIGatewayServiceRecordUsage_FallsBackToGroupDefaultRateOnResolverError(t *testing.T) {
|
func TestOpenAIGatewayServiceRecordUsage_FallsBackToGroupDefaultRateOnResolverError(t *testing.T) {
|
||||||
groupID := int64(12)
|
groupID := int64(12)
|
||||||
groupRate := 1.6
|
groupRate := 1.6
|
||||||
|
|||||||
@@ -4028,6 +4028,8 @@ type OpenAIRecordUsageInput struct {
|
|||||||
User *User
|
User *User
|
||||||
Account *Account
|
Account *Account
|
||||||
Subscription *UserSubscription
|
Subscription *UserSubscription
|
||||||
|
InboundEndpoint string
|
||||||
|
UpstreamEndpoint string
|
||||||
UserAgent string // 请求的 User-Agent
|
UserAgent string // 请求的 User-Agent
|
||||||
IPAddress string // 请求的客户端 IP 地址
|
IPAddress string // 请求的客户端 IP 地址
|
||||||
RequestPayloadHash string
|
RequestPayloadHash string
|
||||||
@@ -4106,6 +4108,8 @@ func (s *OpenAIGatewayService) RecordUsage(ctx context.Context, input *OpenAIRec
|
|||||||
Model: billingModel,
|
Model: billingModel,
|
||||||
ServiceTier: result.ServiceTier,
|
ServiceTier: result.ServiceTier,
|
||||||
ReasoningEffort: result.ReasoningEffort,
|
ReasoningEffort: result.ReasoningEffort,
|
||||||
|
InboundEndpoint: optionalTrimmedStringPtr(input.InboundEndpoint),
|
||||||
|
UpstreamEndpoint: optionalTrimmedStringPtr(input.UpstreamEndpoint),
|
||||||
InputTokens: actualInputTokens,
|
InputTokens: actualInputTokens,
|
||||||
OutputTokens: result.Usage.OutputTokens,
|
OutputTokens: result.Usage.OutputTokens,
|
||||||
CacheCreationTokens: result.Usage.CacheCreationInputTokens,
|
CacheCreationTokens: result.Usage.CacheCreationInputTokens,
|
||||||
@@ -4125,7 +4129,6 @@ func (s *OpenAIGatewayService) RecordUsage(ctx context.Context, input *OpenAIRec
|
|||||||
FirstTokenMs: result.FirstTokenMs,
|
FirstTokenMs: result.FirstTokenMs,
|
||||||
CreatedAt: time.Now(),
|
CreatedAt: time.Now(),
|
||||||
}
|
}
|
||||||
|
|
||||||
// 添加 UserAgent
|
// 添加 UserAgent
|
||||||
if input.UserAgent != "" {
|
if input.UserAgent != "" {
|
||||||
usageLog.UserAgent = &input.UserAgent
|
usageLog.UserAgent = &input.UserAgent
|
||||||
@@ -4668,3 +4671,11 @@ func normalizeOpenAIReasoningEffort(raw string) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func optionalTrimmedStringPtr(raw string) *string {
|
||||||
|
trimmed := strings.TrimSpace(raw)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &trimmed
|
||||||
|
}
|
||||||
|
|||||||
@@ -103,6 +103,10 @@ type UsageLog struct {
|
|||||||
// ReasoningEffort is the request's reasoning effort level (OpenAI Responses API),
|
// ReasoningEffort is the request's reasoning effort level (OpenAI Responses API),
|
||||||
// e.g. "low" / "medium" / "high" / "xhigh". Nil means not provided / not applicable.
|
// e.g. "low" / "medium" / "high" / "xhigh". Nil means not provided / not applicable.
|
||||||
ReasoningEffort *string
|
ReasoningEffort *string
|
||||||
|
// InboundEndpoint is the client-facing API endpoint path, e.g. /v1/chat/completions.
|
||||||
|
InboundEndpoint *string
|
||||||
|
// UpstreamEndpoint is the normalized upstream endpoint path, e.g. /v1/responses.
|
||||||
|
UpstreamEndpoint *string
|
||||||
|
|
||||||
GroupID *int64
|
GroupID *int64
|
||||||
SubscriptionID *int64
|
SubscriptionID *int64
|
||||||
|
|||||||
5
backend/migrations/074_add_usage_log_endpoints.sql
Normal file
5
backend/migrations/074_add_usage_log_endpoints.sql
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
-- Add endpoint tracking fields to usage_logs.
|
||||||
|
-- inbound_endpoint: client-facing API route (e.g. /v1/chat/completions, /v1/messages, /v1/responses)
|
||||||
|
-- upstream_endpoint: normalized upstream route (e.g. /v1/responses)
|
||||||
|
ALTER TABLE usage_logs ADD COLUMN IF NOT EXISTS inbound_endpoint VARCHAR(128);
|
||||||
|
ALTER TABLE usage_logs ADD COLUMN IF NOT EXISTS upstream_endpoint VARCHAR(128);
|
||||||
@@ -5,6 +5,7 @@
|
|||||||
|
|
||||||
import { apiClient } from '../client'
|
import { apiClient } from '../client'
|
||||||
import type { AdminUsageLog, UsageQueryParams, PaginatedResponse, UsageRequestType } from '@/types'
|
import type { AdminUsageLog, UsageQueryParams, PaginatedResponse, UsageRequestType } from '@/types'
|
||||||
|
import type { EndpointStat } from '@/types'
|
||||||
|
|
||||||
// ==================== Types ====================
|
// ==================== Types ====================
|
||||||
|
|
||||||
@@ -18,6 +19,9 @@ export interface AdminUsageStatsResponse {
|
|||||||
total_actual_cost: number
|
total_actual_cost: number
|
||||||
total_account_cost?: number
|
total_account_cost?: number
|
||||||
average_duration_ms: number
|
average_duration_ms: number
|
||||||
|
endpoints?: EndpointStat[]
|
||||||
|
upstream_endpoints?: EndpointStat[]
|
||||||
|
endpoint_paths?: EndpointStat[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SimpleUser {
|
export interface SimpleUser {
|
||||||
|
|||||||
@@ -446,6 +446,18 @@
|
|||||||
|
|
||||||
<!-- Model Distribution -->
|
<!-- Model Distribution -->
|
||||||
<ModelDistributionChart :model-stats="stats.models" :loading="false" />
|
<ModelDistributionChart :model-stats="stats.models" :loading="false" />
|
||||||
|
|
||||||
|
<EndpointDistributionChart
|
||||||
|
:endpoint-stats="stats.endpoints || []"
|
||||||
|
:loading="false"
|
||||||
|
:title="t('usage.inboundEndpoint')"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<EndpointDistributionChart
|
||||||
|
:endpoint-stats="stats.upstream_endpoints || []"
|
||||||
|
:loading="false"
|
||||||
|
:title="t('usage.upstreamEndpoint')"
|
||||||
|
/>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<!-- No Data State -->
|
<!-- No Data State -->
|
||||||
@@ -489,6 +501,7 @@ import { Line } from 'vue-chartjs'
|
|||||||
import BaseDialog from '@/components/common/BaseDialog.vue'
|
import BaseDialog from '@/components/common/BaseDialog.vue'
|
||||||
import LoadingSpinner from '@/components/common/LoadingSpinner.vue'
|
import LoadingSpinner from '@/components/common/LoadingSpinner.vue'
|
||||||
import ModelDistributionChart from '@/components/charts/ModelDistributionChart.vue'
|
import ModelDistributionChart from '@/components/charts/ModelDistributionChart.vue'
|
||||||
|
import EndpointDistributionChart from '@/components/charts/EndpointDistributionChart.vue'
|
||||||
import Icon from '@/components/icons/Icon.vue'
|
import Icon from '@/components/icons/Icon.vue'
|
||||||
import { adminAPI } from '@/api/admin'
|
import { adminAPI } from '@/api/admin'
|
||||||
import type { Account, AccountUsageStatsResponse } from '@/types'
|
import type { Account, AccountUsageStatsResponse } from '@/types'
|
||||||
|
|||||||
@@ -410,6 +410,18 @@
|
|||||||
|
|
||||||
<!-- Model Distribution -->
|
<!-- Model Distribution -->
|
||||||
<ModelDistributionChart :model-stats="stats.models" :loading="false" />
|
<ModelDistributionChart :model-stats="stats.models" :loading="false" />
|
||||||
|
|
||||||
|
<EndpointDistributionChart
|
||||||
|
:endpoint-stats="stats.endpoints || []"
|
||||||
|
:loading="false"
|
||||||
|
:title="t('usage.inboundEndpoint')"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<EndpointDistributionChart
|
||||||
|
:endpoint-stats="stats.upstream_endpoints || []"
|
||||||
|
:loading="false"
|
||||||
|
:title="t('usage.upstreamEndpoint')"
|
||||||
|
/>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<!-- No Data State -->
|
<!-- No Data State -->
|
||||||
@@ -453,6 +465,7 @@ import { Line } from 'vue-chartjs'
|
|||||||
import BaseDialog from '@/components/common/BaseDialog.vue'
|
import BaseDialog from '@/components/common/BaseDialog.vue'
|
||||||
import LoadingSpinner from '@/components/common/LoadingSpinner.vue'
|
import LoadingSpinner from '@/components/common/LoadingSpinner.vue'
|
||||||
import ModelDistributionChart from '@/components/charts/ModelDistributionChart.vue'
|
import ModelDistributionChart from '@/components/charts/ModelDistributionChart.vue'
|
||||||
|
import EndpointDistributionChart from '@/components/charts/EndpointDistributionChart.vue'
|
||||||
import Icon from '@/components/icons/Icon.vue'
|
import Icon from '@/components/icons/Icon.vue'
|
||||||
import { adminAPI } from '@/api/admin'
|
import { adminAPI } from '@/api/admin'
|
||||||
import type { Account, AccountUsageStatsResponse } from '@/types'
|
import type { Account, AccountUsageStatsResponse } from '@/types'
|
||||||
|
|||||||
@@ -35,6 +35,19 @@
|
|||||||
</span>
|
</span>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
|
<template #cell-endpoint="{ row }">
|
||||||
|
<div class="max-w-[320px] space-y-1 text-xs">
|
||||||
|
<div class="break-all text-gray-700 dark:text-gray-300">
|
||||||
|
<span class="font-medium text-gray-500 dark:text-gray-400">{{ t('usage.inbound') }}:</span>
|
||||||
|
<span class="ml-1">{{ row.inbound_endpoint?.trim() || '-' }}</span>
|
||||||
|
</div>
|
||||||
|
<div class="break-all text-gray-700 dark:text-gray-300">
|
||||||
|
<span class="font-medium text-gray-500 dark:text-gray-400">{{ t('usage.upstream') }}:</span>
|
||||||
|
<span class="ml-1">{{ row.upstream_endpoint?.trim() || '-' }}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
<template #cell-group="{ row }">
|
<template #cell-group="{ row }">
|
||||||
<span v-if="row.group" class="inline-flex items-center rounded px-2 py-0.5 text-xs font-medium bg-indigo-100 text-indigo-800 dark:bg-indigo-900 dark:text-indigo-200">
|
<span v-if="row.group" class="inline-flex items-center rounded px-2 py-0.5 text-xs font-medium bg-indigo-100 text-indigo-800 dark:bg-indigo-900 dark:text-indigo-200">
|
||||||
{{ row.group.name }}
|
{{ row.group.name }}
|
||||||
@@ -328,6 +341,7 @@ const getRequestTypeBadgeClass = (row: AdminUsageLog): string => {
|
|||||||
if (requestType === 'sync') return 'bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-200'
|
if (requestType === 'sync') return 'bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-200'
|
||||||
return 'bg-amber-100 text-amber-800 dark:bg-amber-900 dark:text-amber-200'
|
return 'bg-amber-100 text-amber-800 dark:bg-amber-900 dark:text-amber-200'
|
||||||
}
|
}
|
||||||
|
|
||||||
const formatCacheTokens = (tokens: number): string => {
|
const formatCacheTokens = (tokens: number): string => {
|
||||||
if (tokens >= 1000000) return `${(tokens / 1000000).toFixed(1)}M`
|
if (tokens >= 1000000) return `${(tokens / 1000000).toFixed(1)}M`
|
||||||
if (tokens >= 1000) return `${(tokens / 1000).toFixed(1)}K`
|
if (tokens >= 1000) return `${(tokens / 1000).toFixed(1)}K`
|
||||||
|
|||||||
257
frontend/src/components/charts/EndpointDistributionChart.vue
Normal file
257
frontend/src/components/charts/EndpointDistributionChart.vue
Normal file
@@ -0,0 +1,257 @@
|
|||||||
|
<template>
|
||||||
|
<div class="card p-4">
|
||||||
|
<div class="mb-4 flex items-start justify-between gap-3">
|
||||||
|
<h3 class="text-sm font-semibold text-gray-900 dark:text-white">
|
||||||
|
{{ title || t('usage.endpointDistribution') }}
|
||||||
|
</h3>
|
||||||
|
<div class="flex flex-col items-end gap-2">
|
||||||
|
<div
|
||||||
|
v-if="showSourceToggle"
|
||||||
|
class="inline-flex rounded-lg border border-gray-200 bg-gray-50 p-0.5 dark:border-gray-700 dark:bg-dark-800"
|
||||||
|
>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
class="rounded-md px-2.5 py-1 text-xs font-medium transition-colors"
|
||||||
|
:class="source === 'inbound'
|
||||||
|
? 'bg-white text-gray-900 shadow-sm dark:bg-dark-700 dark:text-white'
|
||||||
|
: 'text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200'"
|
||||||
|
@click="emit('update:source', 'inbound')"
|
||||||
|
>
|
||||||
|
{{ t('usage.inbound') }}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
class="rounded-md px-2.5 py-1 text-xs font-medium transition-colors"
|
||||||
|
:class="source === 'upstream'
|
||||||
|
? 'bg-white text-gray-900 shadow-sm dark:bg-dark-700 dark:text-white'
|
||||||
|
: 'text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200'"
|
||||||
|
@click="emit('update:source', 'upstream')"
|
||||||
|
>
|
||||||
|
{{ t('usage.upstream') }}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
class="rounded-md px-2.5 py-1 text-xs font-medium transition-colors"
|
||||||
|
:class="source === 'path'
|
||||||
|
? 'bg-white text-gray-900 shadow-sm dark:bg-dark-700 dark:text-white'
|
||||||
|
: 'text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200'"
|
||||||
|
@click="emit('update:source', 'path')"
|
||||||
|
>
|
||||||
|
{{ t('usage.path') }}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div
|
||||||
|
v-if="showMetricToggle"
|
||||||
|
class="inline-flex rounded-lg border border-gray-200 bg-gray-50 p-0.5 dark:border-gray-700 dark:bg-dark-800"
|
||||||
|
>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
class="rounded-md px-2.5 py-1 text-xs font-medium transition-colors"
|
||||||
|
:class="metric === 'tokens'
|
||||||
|
? 'bg-white text-gray-900 shadow-sm dark:bg-dark-700 dark:text-white'
|
||||||
|
: 'text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200'"
|
||||||
|
@click="emit('update:metric', 'tokens')"
|
||||||
|
>
|
||||||
|
{{ t('admin.dashboard.metricTokens') }}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
class="rounded-md px-2.5 py-1 text-xs font-medium transition-colors"
|
||||||
|
:class="metric === 'actual_cost'
|
||||||
|
? 'bg-white text-gray-900 shadow-sm dark:bg-dark-700 dark:text-white'
|
||||||
|
: 'text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200'"
|
||||||
|
@click="emit('update:metric', 'actual_cost')"
|
||||||
|
>
|
||||||
|
{{ t('admin.dashboard.metricActualCost') }}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div v-if="loading" class="flex h-48 items-center justify-center">
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>
|
||||||
|
<div v-else-if="displayEndpointStats.length > 0 && chartData" class="flex items-center gap-6">
|
||||||
|
<div class="h-48 w-48">
|
||||||
|
<Doughnut :data="chartData" :options="doughnutOptions" />
|
||||||
|
</div>
|
||||||
|
<div class="max-h-48 flex-1 overflow-y-auto">
|
||||||
|
<table class="w-full text-xs">
|
||||||
|
<thead>
|
||||||
|
<tr class="text-gray-500 dark:text-gray-400">
|
||||||
|
<th class="pb-2 text-left">{{ t('usage.endpoint') }}</th>
|
||||||
|
<th class="pb-2 text-right">{{ t('admin.dashboard.requests') }}</th>
|
||||||
|
<th class="pb-2 text-right">{{ t('admin.dashboard.tokens') }}</th>
|
||||||
|
<th class="pb-2 text-right">{{ t('admin.dashboard.actual') }}</th>
|
||||||
|
<th class="pb-2 text-right">{{ t('admin.dashboard.standard') }}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr
|
||||||
|
v-for="item in displayEndpointStats"
|
||||||
|
:key="item.endpoint"
|
||||||
|
class="border-t border-gray-100 dark:border-gray-700"
|
||||||
|
>
|
||||||
|
<td class="max-w-[180px] truncate py-1.5 font-medium text-gray-900 dark:text-white" :title="item.endpoint">
|
||||||
|
{{ item.endpoint }}
|
||||||
|
</td>
|
||||||
|
<td class="py-1.5 text-right text-gray-600 dark:text-gray-400">
|
||||||
|
{{ formatNumber(item.requests) }}
|
||||||
|
</td>
|
||||||
|
<td class="py-1.5 text-right text-gray-600 dark:text-gray-400">
|
||||||
|
{{ formatTokens(item.total_tokens) }}
|
||||||
|
</td>
|
||||||
|
<td class="py-1.5 text-right text-green-600 dark:text-green-400">
|
||||||
|
${{ formatCost(item.actual_cost) }}
|
||||||
|
</td>
|
||||||
|
<td class="py-1.5 text-right text-gray-400 dark:text-gray-500">
|
||||||
|
${{ formatCost(item.cost) }}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div v-else class="flex h-48 items-center justify-center text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.dashboard.noDataAvailable') }}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
|
||||||
|
<script setup lang="ts">
|
||||||
|
import { computed } from 'vue'
|
||||||
|
import { useI18n } from 'vue-i18n'
|
||||||
|
import { Chart as ChartJS, ArcElement, Tooltip, Legend } from 'chart.js'
|
||||||
|
import { Doughnut } from 'vue-chartjs'
|
||||||
|
import LoadingSpinner from '@/components/common/LoadingSpinner.vue'
|
||||||
|
import type { EndpointStat } from '@/types'
|
||||||
|
|
||||||
|
ChartJS.register(ArcElement, Tooltip, Legend)
|
||||||
|
|
||||||
|
const { t } = useI18n()
|
||||||
|
|
||||||
|
type DistributionMetric = 'tokens' | 'actual_cost'
|
||||||
|
type EndpointSource = 'inbound' | 'upstream' | 'path'
|
||||||
|
|
||||||
|
const props = withDefaults(
|
||||||
|
defineProps<{
|
||||||
|
endpointStats: EndpointStat[]
|
||||||
|
upstreamEndpointStats?: EndpointStat[]
|
||||||
|
endpointPathStats?: EndpointStat[]
|
||||||
|
loading?: boolean
|
||||||
|
title?: string
|
||||||
|
metric?: DistributionMetric
|
||||||
|
source?: EndpointSource
|
||||||
|
showMetricToggle?: boolean
|
||||||
|
showSourceToggle?: boolean
|
||||||
|
}>(),
|
||||||
|
{
|
||||||
|
upstreamEndpointStats: () => [],
|
||||||
|
endpointPathStats: () => [],
|
||||||
|
loading: false,
|
||||||
|
title: '',
|
||||||
|
metric: 'tokens',
|
||||||
|
source: 'inbound',
|
||||||
|
showMetricToggle: false,
|
||||||
|
showSourceToggle: false
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const emit = defineEmits<{
|
||||||
|
'update:metric': [value: DistributionMetric]
|
||||||
|
'update:source': [value: EndpointSource]
|
||||||
|
}>()
|
||||||
|
|
||||||
|
const chartColors = [
|
||||||
|
'#3b82f6',
|
||||||
|
'#10b981',
|
||||||
|
'#f59e0b',
|
||||||
|
'#ef4444',
|
||||||
|
'#8b5cf6',
|
||||||
|
'#ec4899',
|
||||||
|
'#14b8a6',
|
||||||
|
'#f97316',
|
||||||
|
'#6366f1',
|
||||||
|
'#84cc16',
|
||||||
|
'#06b6d4',
|
||||||
|
'#a855f7'
|
||||||
|
]
|
||||||
|
|
||||||
|
const displayEndpointStats = computed(() => {
|
||||||
|
const sourceStats = props.source === 'upstream'
|
||||||
|
? props.upstreamEndpointStats
|
||||||
|
: props.source === 'path'
|
||||||
|
? props.endpointPathStats
|
||||||
|
: props.endpointStats
|
||||||
|
if (!sourceStats?.length) return []
|
||||||
|
|
||||||
|
const metricKey = props.metric === 'actual_cost' ? 'actual_cost' : 'total_tokens'
|
||||||
|
return [...sourceStats].sort((a, b) => b[metricKey] - a[metricKey])
|
||||||
|
})
|
||||||
|
|
||||||
|
const chartData = computed(() => {
|
||||||
|
if (!displayEndpointStats.value?.length) return null
|
||||||
|
|
||||||
|
return {
|
||||||
|
labels: displayEndpointStats.value.map((item) => item.endpoint),
|
||||||
|
datasets: [
|
||||||
|
{
|
||||||
|
data: displayEndpointStats.value.map((item) =>
|
||||||
|
props.metric === 'actual_cost' ? item.actual_cost : item.total_tokens
|
||||||
|
),
|
||||||
|
backgroundColor: chartColors.slice(0, displayEndpointStats.value.length),
|
||||||
|
borderWidth: 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const doughnutOptions = computed(() => ({
|
||||||
|
responsive: true,
|
||||||
|
maintainAspectRatio: false,
|
||||||
|
plugins: {
|
||||||
|
legend: {
|
||||||
|
display: false
|
||||||
|
},
|
||||||
|
tooltip: {
|
||||||
|
callbacks: {
|
||||||
|
label: (context: any) => {
|
||||||
|
const value = context.raw as number
|
||||||
|
const total = context.dataset.data.reduce((a: number, b: number) => a + b, 0)
|
||||||
|
const percentage = total > 0 ? ((value / total) * 100).toFixed(1) : '0.0'
|
||||||
|
const formattedValue = props.metric === 'actual_cost'
|
||||||
|
? `$${formatCost(value)}`
|
||||||
|
: formatTokens(value)
|
||||||
|
return `${context.label}: ${formattedValue} (${percentage}%)`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
|
||||||
|
const formatTokens = (value: number): string => {
|
||||||
|
if (value >= 1_000_000_000) {
|
||||||
|
return `${(value / 1_000_000_000).toFixed(2)}B`
|
||||||
|
} else if (value >= 1_000_000) {
|
||||||
|
return `${(value / 1_000_000).toFixed(2)}M`
|
||||||
|
} else if (value >= 1_000) {
|
||||||
|
return `${(value / 1_000).toFixed(2)}K`
|
||||||
|
}
|
||||||
|
return value.toLocaleString()
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatNumber = (value: number): string => {
|
||||||
|
return value.toLocaleString()
|
||||||
|
}
|
||||||
|
|
||||||
|
const formatCost = (value: number): string => {
|
||||||
|
if (value >= 1000) {
|
||||||
|
return (value / 1000).toFixed(2) + 'K'
|
||||||
|
} else if (value >= 1) {
|
||||||
|
return value.toFixed(2)
|
||||||
|
} else if (value >= 0.01) {
|
||||||
|
return value.toFixed(3)
|
||||||
|
}
|
||||||
|
return value.toFixed(4)
|
||||||
|
}
|
||||||
|
</script>
|
||||||
@@ -718,6 +718,13 @@ export default {
|
|||||||
preparingExport: 'Preparing export...',
|
preparingExport: 'Preparing export...',
|
||||||
model: 'Model',
|
model: 'Model',
|
||||||
reasoningEffort: 'Reasoning Effort',
|
reasoningEffort: 'Reasoning Effort',
|
||||||
|
endpoint: 'Endpoint',
|
||||||
|
endpointDistribution: 'Endpoint Distribution',
|
||||||
|
inbound: 'Inbound',
|
||||||
|
upstream: 'Upstream',
|
||||||
|
path: 'Path',
|
||||||
|
inboundEndpoint: 'Inbound Endpoint',
|
||||||
|
upstreamEndpoint: 'Upstream Endpoint',
|
||||||
type: 'Type',
|
type: 'Type',
|
||||||
tokens: 'Tokens',
|
tokens: 'Tokens',
|
||||||
cost: 'Cost',
|
cost: 'Cost',
|
||||||
|
|||||||
@@ -723,6 +723,13 @@ export default {
|
|||||||
preparingExport: '正在准备导出...',
|
preparingExport: '正在准备导出...',
|
||||||
model: '模型',
|
model: '模型',
|
||||||
reasoningEffort: '推理强度',
|
reasoningEffort: '推理强度',
|
||||||
|
endpoint: '端点',
|
||||||
|
endpointDistribution: '端点分布',
|
||||||
|
inbound: '入站',
|
||||||
|
upstream: '上游',
|
||||||
|
path: '路径',
|
||||||
|
inboundEndpoint: '入站端点',
|
||||||
|
upstreamEndpoint: '上游端点',
|
||||||
type: '类型',
|
type: '类型',
|
||||||
tokens: 'Token',
|
tokens: 'Token',
|
||||||
cost: '费用',
|
cost: '费用',
|
||||||
|
|||||||
@@ -962,6 +962,8 @@ export interface UsageLog {
|
|||||||
model: string
|
model: string
|
||||||
service_tier?: string | null
|
service_tier?: string | null
|
||||||
reasoning_effort?: string | null
|
reasoning_effort?: string | null
|
||||||
|
inbound_endpoint?: string | null
|
||||||
|
upstream_endpoint?: string | null
|
||||||
|
|
||||||
group_id: number | null
|
group_id: number | null
|
||||||
subscription_id: number | null
|
subscription_id: number | null
|
||||||
@@ -1168,6 +1170,14 @@ export interface ModelStat {
|
|||||||
actual_cost: number // 实际扣除
|
actual_cost: number // 实际扣除
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface EndpointStat {
|
||||||
|
endpoint: string
|
||||||
|
requests: number
|
||||||
|
total_tokens: number
|
||||||
|
cost: number
|
||||||
|
actual_cost: number
|
||||||
|
}
|
||||||
|
|
||||||
export interface GroupStat {
|
export interface GroupStat {
|
||||||
group_id: number
|
group_id: number
|
||||||
group_name: string
|
group_name: string
|
||||||
@@ -1362,6 +1372,8 @@ export interface AccountUsageStatsResponse {
|
|||||||
history: AccountUsageHistory[]
|
history: AccountUsageHistory[]
|
||||||
summary: AccountUsageSummary
|
summary: AccountUsageSummary
|
||||||
models: ModelStat[]
|
models: ModelStat[]
|
||||||
|
endpoints: EndpointStat[]
|
||||||
|
upstream_endpoints: EndpointStat[]
|
||||||
}
|
}
|
||||||
|
|
||||||
// ==================== User Attribute Types ====================
|
// ==================== User Attribute Types ====================
|
||||||
|
|||||||
@@ -26,7 +26,20 @@
|
|||||||
:show-metric-toggle="true"
|
:show-metric-toggle="true"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<TokenUsageTrend :trend-data="trendData" :loading="chartsLoading" />
|
<div class="grid grid-cols-1 gap-6 lg:grid-cols-2">
|
||||||
|
<EndpointDistributionChart
|
||||||
|
v-model:source="endpointDistributionSource"
|
||||||
|
v-model:metric="endpointDistributionMetric"
|
||||||
|
:endpoint-stats="inboundEndpointStats"
|
||||||
|
:upstream-endpoint-stats="upstreamEndpointStats"
|
||||||
|
:endpoint-path-stats="endpointPathStats"
|
||||||
|
:loading="endpointStatsLoading"
|
||||||
|
:show-source-toggle="true"
|
||||||
|
:show-metric-toggle="true"
|
||||||
|
:title="t('usage.endpointDistribution')"
|
||||||
|
/>
|
||||||
|
<TokenUsageTrend :trend-data="trendData" :loading="chartsLoading" />
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<UsageFilters v-model="filters" v-model:startDate="startDate" v-model:endDate="endDate" :exporting="exporting" @change="applyFilters" @refresh="refreshData" @reset="resetFilters" @cleanup="openCleanupDialog" @export="exportToExcel">
|
<UsageFilters v-model="filters" v-model:startDate="startDate" v-model:endDate="endDate" :exporting="exporting" @change="applyFilters" @refresh="refreshData" @reset="resetFilters" @cleanup="openCleanupDialog" @export="exportToExcel">
|
||||||
<template #after-reset>
|
<template #after-reset>
|
||||||
@@ -99,19 +112,28 @@ import UsageTable from '@/components/admin/usage/UsageTable.vue'; import UsageEx
|
|||||||
import UsageCleanupDialog from '@/components/admin/usage/UsageCleanupDialog.vue'
|
import UsageCleanupDialog from '@/components/admin/usage/UsageCleanupDialog.vue'
|
||||||
import UserBalanceHistoryModal from '@/components/admin/user/UserBalanceHistoryModal.vue'
|
import UserBalanceHistoryModal from '@/components/admin/user/UserBalanceHistoryModal.vue'
|
||||||
import ModelDistributionChart from '@/components/charts/ModelDistributionChart.vue'; import GroupDistributionChart from '@/components/charts/GroupDistributionChart.vue'; import TokenUsageTrend from '@/components/charts/TokenUsageTrend.vue'
|
import ModelDistributionChart from '@/components/charts/ModelDistributionChart.vue'; import GroupDistributionChart from '@/components/charts/GroupDistributionChart.vue'; import TokenUsageTrend from '@/components/charts/TokenUsageTrend.vue'
|
||||||
|
import EndpointDistributionChart from '@/components/charts/EndpointDistributionChart.vue'
|
||||||
import Icon from '@/components/icons/Icon.vue'
|
import Icon from '@/components/icons/Icon.vue'
|
||||||
import type { AdminUsageLog, TrendDataPoint, ModelStat, GroupStat, AdminUser } from '@/types'; import type { AdminUsageStatsResponse, AdminUsageQueryParams } from '@/api/admin/usage'
|
import type { AdminUsageLog, TrendDataPoint, ModelStat, GroupStat, EndpointStat, AdminUser } from '@/types'; import type { AdminUsageStatsResponse, AdminUsageQueryParams } from '@/api/admin/usage'
|
||||||
|
|
||||||
const { t } = useI18n()
|
const { t } = useI18n()
|
||||||
const appStore = useAppStore()
|
const appStore = useAppStore()
|
||||||
type DistributionMetric = 'tokens' | 'actual_cost'
|
type DistributionMetric = 'tokens' | 'actual_cost'
|
||||||
|
type EndpointSource = 'inbound' | 'upstream' | 'path'
|
||||||
const route = useRoute()
|
const route = useRoute()
|
||||||
const usageStats = ref<AdminUsageStatsResponse | null>(null); const usageLogs = ref<AdminUsageLog[]>([]); const loading = ref(false); const exporting = ref(false)
|
const usageStats = ref<AdminUsageStatsResponse | null>(null); const usageLogs = ref<AdminUsageLog[]>([]); const loading = ref(false); const exporting = ref(false)
|
||||||
const trendData = ref<TrendDataPoint[]>([]); const modelStats = ref<ModelStat[]>([]); const groupStats = ref<GroupStat[]>([]); const chartsLoading = ref(false); const granularity = ref<'day' | 'hour'>('day')
|
const trendData = ref<TrendDataPoint[]>([]); const modelStats = ref<ModelStat[]>([]); const groupStats = ref<GroupStat[]>([]); const chartsLoading = ref(false); const granularity = ref<'day' | 'hour'>('day')
|
||||||
const modelDistributionMetric = ref<DistributionMetric>('tokens')
|
const modelDistributionMetric = ref<DistributionMetric>('tokens')
|
||||||
const groupDistributionMetric = ref<DistributionMetric>('tokens')
|
const groupDistributionMetric = ref<DistributionMetric>('tokens')
|
||||||
|
const endpointDistributionMetric = ref<DistributionMetric>('tokens')
|
||||||
|
const endpointDistributionSource = ref<EndpointSource>('inbound')
|
||||||
|
const inboundEndpointStats = ref<EndpointStat[]>([])
|
||||||
|
const upstreamEndpointStats = ref<EndpointStat[]>([])
|
||||||
|
const endpointPathStats = ref<EndpointStat[]>([])
|
||||||
|
const endpointStatsLoading = ref(false)
|
||||||
let abortController: AbortController | null = null; let exportAbortController: AbortController | null = null
|
let abortController: AbortController | null = null; let exportAbortController: AbortController | null = null
|
||||||
let chartReqSeq = 0
|
let chartReqSeq = 0
|
||||||
|
let statsReqSeq = 0
|
||||||
const exportProgress = reactive({ show: false, progress: 0, current: 0, total: 0, estimatedTime: '' })
|
const exportProgress = reactive({ show: false, progress: 0, current: 0, total: 0, estimatedTime: '' })
|
||||||
const cleanupDialogVisible = ref(false)
|
const cleanupDialogVisible = ref(false)
|
||||||
// Balance history modal state
|
// Balance history modal state
|
||||||
@@ -183,13 +205,25 @@ const loadLogs = async () => {
|
|||||||
} catch (error: any) { if(error?.name !== 'AbortError') console.error('Failed to load usage logs:', error) } finally { if(abortController === c) loading.value = false }
|
} catch (error: any) { if(error?.name !== 'AbortError') console.error('Failed to load usage logs:', error) } finally { if(abortController === c) loading.value = false }
|
||||||
}
|
}
|
||||||
const loadStats = async () => {
|
const loadStats = async () => {
|
||||||
|
const seq = ++statsReqSeq
|
||||||
|
endpointStatsLoading.value = true
|
||||||
try {
|
try {
|
||||||
const requestType = filters.value.request_type
|
const requestType = filters.value.request_type
|
||||||
const legacyStream = requestType ? requestTypeToLegacyStream(requestType) : filters.value.stream
|
const legacyStream = requestType ? requestTypeToLegacyStream(requestType) : filters.value.stream
|
||||||
const s = await adminAPI.usage.getStats({ ...filters.value, stream: legacyStream === null ? undefined : legacyStream })
|
const s = await adminAPI.usage.getStats({ ...filters.value, stream: legacyStream === null ? undefined : legacyStream })
|
||||||
|
if (seq !== statsReqSeq) return
|
||||||
usageStats.value = s
|
usageStats.value = s
|
||||||
|
inboundEndpointStats.value = s.endpoints || []
|
||||||
|
upstreamEndpointStats.value = s.upstream_endpoints || []
|
||||||
|
endpointPathStats.value = s.endpoint_paths || []
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
if (seq !== statsReqSeq) return
|
||||||
console.error('Failed to load usage stats:', error)
|
console.error('Failed to load usage stats:', error)
|
||||||
|
inboundEndpointStats.value = []
|
||||||
|
upstreamEndpointStats.value = []
|
||||||
|
endpointPathStats.value = []
|
||||||
|
} finally {
|
||||||
|
if (seq === statsReqSeq) endpointStatsLoading.value = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const loadChartData = async () => {
|
const loadChartData = async () => {
|
||||||
@@ -246,6 +280,7 @@ const exportToExcel = async () => {
|
|||||||
const headers = [
|
const headers = [
|
||||||
t('usage.time'), t('admin.usage.user'), t('usage.apiKeyFilter'),
|
t('usage.time'), t('admin.usage.user'), t('usage.apiKeyFilter'),
|
||||||
t('admin.usage.account'), t('usage.model'), t('usage.reasoningEffort'), t('admin.usage.group'),
|
t('admin.usage.account'), t('usage.model'), t('usage.reasoningEffort'), t('admin.usage.group'),
|
||||||
|
t('usage.inboundEndpoint'), t('usage.upstreamEndpoint'),
|
||||||
t('usage.type'),
|
t('usage.type'),
|
||||||
t('admin.usage.inputTokens'), t('admin.usage.outputTokens'),
|
t('admin.usage.inputTokens'), t('admin.usage.outputTokens'),
|
||||||
t('admin.usage.cacheReadTokens'), t('admin.usage.cacheCreationTokens'),
|
t('admin.usage.cacheReadTokens'), t('admin.usage.cacheCreationTokens'),
|
||||||
@@ -263,7 +298,8 @@ const exportToExcel = async () => {
|
|||||||
if (c.signal.aborted) break; if (p === 1) { total = res.total; exportProgress.total = total }
|
if (c.signal.aborted) break; if (p === 1) { total = res.total; exportProgress.total = total }
|
||||||
const rows = (res.items || []).map((log: AdminUsageLog) => [
|
const rows = (res.items || []).map((log: AdminUsageLog) => [
|
||||||
log.created_at, log.user?.email || '', log.api_key?.name || '', log.account?.name || '', log.model,
|
log.created_at, log.user?.email || '', log.api_key?.name || '', log.account?.name || '', log.model,
|
||||||
formatReasoningEffort(log.reasoning_effort), log.group?.name || '', getRequestTypeLabel(log),
|
formatReasoningEffort(log.reasoning_effort), log.group?.name || '',
|
||||||
|
log.inbound_endpoint || '', log.upstream_endpoint || '', getRequestTypeLabel(log),
|
||||||
log.input_tokens, log.output_tokens, log.cache_read_tokens, log.cache_creation_tokens,
|
log.input_tokens, log.output_tokens, log.cache_read_tokens, log.cache_creation_tokens,
|
||||||
log.input_cost?.toFixed(6) || '0.000000', log.output_cost?.toFixed(6) || '0.000000',
|
log.input_cost?.toFixed(6) || '0.000000', log.output_cost?.toFixed(6) || '0.000000',
|
||||||
log.cache_read_cost?.toFixed(6) || '0.000000', log.cache_creation_cost?.toFixed(6) || '0.000000',
|
log.cache_read_cost?.toFixed(6) || '0.000000', log.cache_creation_cost?.toFixed(6) || '0.000000',
|
||||||
@@ -301,6 +337,7 @@ const allColumns = computed(() => [
|
|||||||
{ key: 'account', label: t('admin.usage.account'), sortable: false },
|
{ key: 'account', label: t('admin.usage.account'), sortable: false },
|
||||||
{ key: 'model', label: t('usage.model'), sortable: true },
|
{ key: 'model', label: t('usage.model'), sortable: true },
|
||||||
{ key: 'reasoning_effort', label: t('usage.reasoningEffort'), sortable: false },
|
{ key: 'reasoning_effort', label: t('usage.reasoningEffort'), sortable: false },
|
||||||
|
{ key: 'endpoint', label: t('usage.endpoint'), sortable: false },
|
||||||
{ key: 'group', label: t('admin.usage.group'), sortable: false },
|
{ key: 'group', label: t('admin.usage.group'), sortable: false },
|
||||||
{ key: 'stream', label: t('usage.type'), sortable: false },
|
{ key: 'stream', label: t('usage.type'), sortable: false },
|
||||||
{ key: 'tokens', label: t('usage.tokens'), sortable: false },
|
{ key: 'tokens', label: t('usage.tokens'), sortable: false },
|
||||||
@@ -343,12 +380,18 @@ const loadSavedColumns = () => {
|
|||||||
try {
|
try {
|
||||||
const saved = localStorage.getItem(HIDDEN_COLUMNS_KEY)
|
const saved = localStorage.getItem(HIDDEN_COLUMNS_KEY)
|
||||||
if (saved) {
|
if (saved) {
|
||||||
(JSON.parse(saved) as string[]).forEach(key => hiddenColumns.add(key))
|
(JSON.parse(saved) as string[]).forEach((key) => {
|
||||||
|
hiddenColumns.add(key)
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
DEFAULT_HIDDEN_COLUMNS.forEach(key => hiddenColumns.add(key))
|
DEFAULT_HIDDEN_COLUMNS.forEach((key) => {
|
||||||
|
hiddenColumns.add(key)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
DEFAULT_HIDDEN_COLUMNS.forEach(key => hiddenColumns.add(key))
|
DEFAULT_HIDDEN_COLUMNS.forEach((key) => {
|
||||||
|
hiddenColumns.add(key)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -166,6 +166,12 @@
|
|||||||
</span>
|
</span>
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
|
<template #cell-endpoint="{ row }">
|
||||||
|
<span class="text-sm text-gray-600 dark:text-gray-300 block max-w-[320px] whitespace-normal break-all">
|
||||||
|
{{ formatUsageEndpoints(row) }}
|
||||||
|
</span>
|
||||||
|
</template>
|
||||||
|
|
||||||
<template #cell-stream="{ row }">
|
<template #cell-stream="{ row }">
|
||||||
<span
|
<span
|
||||||
class="inline-flex items-center rounded px-2 py-0.5 text-xs font-medium"
|
class="inline-flex items-center rounded px-2 py-0.5 text-xs font-medium"
|
||||||
@@ -516,6 +522,7 @@ const columns = computed<Column[]>(() => [
|
|||||||
{ key: 'api_key', label: t('usage.apiKeyFilter'), sortable: false },
|
{ key: 'api_key', label: t('usage.apiKeyFilter'), sortable: false },
|
||||||
{ key: 'model', label: t('usage.model'), sortable: true },
|
{ key: 'model', label: t('usage.model'), sortable: true },
|
||||||
{ key: 'reasoning_effort', label: t('usage.reasoningEffort'), sortable: false },
|
{ key: 'reasoning_effort', label: t('usage.reasoningEffort'), sortable: false },
|
||||||
|
{ key: 'endpoint', label: t('usage.endpoint'), sortable: false },
|
||||||
{ key: 'stream', label: t('usage.type'), sortable: false },
|
{ key: 'stream', label: t('usage.type'), sortable: false },
|
||||||
{ key: 'tokens', label: t('usage.tokens'), sortable: false },
|
{ key: 'tokens', label: t('usage.tokens'), sortable: false },
|
||||||
{ key: 'cost', label: t('usage.cost'), sortable: false },
|
{ key: 'cost', label: t('usage.cost'), sortable: false },
|
||||||
@@ -615,6 +622,11 @@ const getRequestTypeExportText = (log: UsageLog): string => {
|
|||||||
return 'Unknown'
|
return 'Unknown'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const formatUsageEndpoints = (log: UsageLog): string => {
|
||||||
|
const inbound = log.inbound_endpoint?.trim()
|
||||||
|
return inbound || '-'
|
||||||
|
}
|
||||||
|
|
||||||
const formatTokens = (value: number): string => {
|
const formatTokens = (value: number): string => {
|
||||||
if (value >= 1_000_000_000) {
|
if (value >= 1_000_000_000) {
|
||||||
return `${(value / 1_000_000_000).toFixed(2)}B`
|
return `${(value / 1_000_000_000).toFixed(2)}B`
|
||||||
@@ -789,6 +801,7 @@ const exportToCSV = async () => {
|
|||||||
'API Key Name',
|
'API Key Name',
|
||||||
'Model',
|
'Model',
|
||||||
'Reasoning Effort',
|
'Reasoning Effort',
|
||||||
|
'Inbound Endpoint',
|
||||||
'Type',
|
'Type',
|
||||||
'Input Tokens',
|
'Input Tokens',
|
||||||
'Output Tokens',
|
'Output Tokens',
|
||||||
@@ -806,6 +819,7 @@ const exportToCSV = async () => {
|
|||||||
log.api_key?.name || '',
|
log.api_key?.name || '',
|
||||||
log.model,
|
log.model,
|
||||||
formatReasoningEffort(log.reasoning_effort),
|
formatReasoningEffort(log.reasoning_effort),
|
||||||
|
log.inbound_endpoint || '',
|
||||||
getRequestTypeExportText(log),
|
getRequestTypeExportText(log),
|
||||||
log.input_tokens,
|
log.input_tokens,
|
||||||
log.output_tokens,
|
log.output_tokens,
|
||||||
|
|||||||
Reference in New Issue
Block a user