mirror of
https://gitee.com/wanwujie/sub2api
synced 2026-04-21 23:24:46 +08:00
feat(sora): 新增 Sora 平台支持并修复高危安全和性能问题
新增功能: - 新增 Sora 账号管理和 OAuth 认证 - 新增 Sora 视频/图片生成 API 网关 - 新增 Sora 任务调度和缓存机制 - 新增 Sora 使用统计和计费支持 - 前端增加 Sora 平台配置界面 安全修复(代码审核): - [SEC-001] 限制媒体下载响应体大小(图片 20MB、视频 200MB),防止 DoS 攻击 - [SEC-002] 限制 SDK API 响应大小(1MB),防止内存耗尽 - [SEC-003] 修复 SSRF 风险,添加 URL 验证并强制使用代理配置 BUG 修复(代码审核): - [BUG-001] 修复 for 循环内 defer 累积导致的资源泄漏 - [BUG-002] 修复图片并发槽位获取失败时已持有锁未释放的永久泄漏 性能优化(代码审核): - [PERF-001] 添加 Sentinel Token 缓存(3 分钟有效期),减少 PoW 计算开销 技术细节: - 使用 io.LimitReader 限制所有外部输入的大小 - 添加 urlvalidator 验证防止 SSRF 攻击 - 使用 sync.Map 实现线程安全的包级缓存 - 优化并发槽位管理,添加 releaseAll 模式防止泄漏 影响范围: - 后端:新增 Sora 相关数据模型、服务、网关和管理接口 - 前端:新增 Sora 平台配置、账号管理和监控界面 - 配置:新增 Sora 相关配置项和环境变量 Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -69,6 +69,8 @@ func provideCleanup(
|
|||||||
opsScheduledReport *service.OpsScheduledReportService,
|
opsScheduledReport *service.OpsScheduledReportService,
|
||||||
schedulerSnapshot *service.SchedulerSnapshotService,
|
schedulerSnapshot *service.SchedulerSnapshotService,
|
||||||
tokenRefresh *service.TokenRefreshService,
|
tokenRefresh *service.TokenRefreshService,
|
||||||
|
soraTokenRefresh *service.SoraTokenRefreshService,
|
||||||
|
soraCacheCleanup *service.SoraCacheCleanupService,
|
||||||
accountExpiry *service.AccountExpiryService,
|
accountExpiry *service.AccountExpiryService,
|
||||||
usageCleanup *service.UsageCleanupService,
|
usageCleanup *service.UsageCleanupService,
|
||||||
pricing *service.PricingService,
|
pricing *service.PricingService,
|
||||||
@@ -134,6 +136,18 @@ func provideCleanup(
|
|||||||
tokenRefresh.Stop()
|
tokenRefresh.Stop()
|
||||||
return nil
|
return nil
|
||||||
}},
|
}},
|
||||||
|
{"SoraTokenRefreshService", func() error {
|
||||||
|
if soraTokenRefresh != nil {
|
||||||
|
soraTokenRefresh.Stop()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}},
|
||||||
|
{"SoraCacheCleanupService", func() error {
|
||||||
|
if soraCacheCleanup != nil {
|
||||||
|
soraCacheCleanup.Stop()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}},
|
||||||
{"AccountExpiryService", func() error {
|
{"AccountExpiryService", func() error {
|
||||||
accountExpiry.Stop()
|
accountExpiry.Stop()
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -129,6 +129,9 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
|
|||||||
proxyHandler := admin.NewProxyHandler(adminService)
|
proxyHandler := admin.NewProxyHandler(adminService)
|
||||||
adminRedeemHandler := admin.NewRedeemHandler(adminService)
|
adminRedeemHandler := admin.NewRedeemHandler(adminService)
|
||||||
promoHandler := admin.NewPromoHandler(promoService)
|
promoHandler := admin.NewPromoHandler(promoService)
|
||||||
|
soraAccountRepository := repository.NewSoraAccountRepository(client)
|
||||||
|
soraUsageStatRepository := repository.NewSoraUsageStatRepository(client, db)
|
||||||
|
soraAccountHandler := admin.NewSoraAccountHandler(adminService, soraAccountRepository, soraUsageStatRepository)
|
||||||
opsRepository := repository.NewOpsRepository(db)
|
opsRepository := repository.NewOpsRepository(db)
|
||||||
schedulerOutboxRepository := repository.NewSchedulerOutboxRepository(db)
|
schedulerOutboxRepository := repository.NewSchedulerOutboxRepository(db)
|
||||||
schedulerSnapshotService := service.ProvideSchedulerSnapshotService(schedulerCache, schedulerOutboxRepository, accountRepository, groupRepository, configConfig)
|
schedulerSnapshotService := service.ProvideSchedulerSnapshotService(schedulerCache, schedulerOutboxRepository, accountRepository, groupRepository, configConfig)
|
||||||
@@ -161,11 +164,16 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
|
|||||||
userAttributeValueRepository := repository.NewUserAttributeValueRepository(client)
|
userAttributeValueRepository := repository.NewUserAttributeValueRepository(client)
|
||||||
userAttributeService := service.NewUserAttributeService(userAttributeDefinitionRepository, userAttributeValueRepository)
|
userAttributeService := service.NewUserAttributeService(userAttributeDefinitionRepository, userAttributeValueRepository)
|
||||||
userAttributeHandler := admin.NewUserAttributeHandler(userAttributeService)
|
userAttributeHandler := admin.NewUserAttributeHandler(userAttributeService)
|
||||||
adminHandlers := handler.ProvideAdminHandlers(dashboardHandler, adminUserHandler, groupHandler, accountHandler, oAuthHandler, openAIOAuthHandler, geminiOAuthHandler, antigravityOAuthHandler, proxyHandler, adminRedeemHandler, promoHandler, settingHandler, opsHandler, systemHandler, adminSubscriptionHandler, adminUsageHandler, userAttributeHandler)
|
adminHandlers := handler.ProvideAdminHandlers(dashboardHandler, adminUserHandler, groupHandler, accountHandler, oAuthHandler, openAIOAuthHandler, geminiOAuthHandler, antigravityOAuthHandler, proxyHandler, adminRedeemHandler, promoHandler, soraAccountHandler, settingHandler, opsHandler, systemHandler, adminSubscriptionHandler, adminUsageHandler, userAttributeHandler)
|
||||||
gatewayHandler := handler.NewGatewayHandler(gatewayService, geminiMessagesCompatService, antigravityGatewayService, userService, concurrencyService, billingCacheService, configConfig)
|
gatewayHandler := handler.NewGatewayHandler(gatewayService, geminiMessagesCompatService, antigravityGatewayService, userService, concurrencyService, billingCacheService, configConfig)
|
||||||
openAIGatewayHandler := handler.NewOpenAIGatewayHandler(openAIGatewayService, concurrencyService, billingCacheService, configConfig)
|
openAIGatewayHandler := handler.NewOpenAIGatewayHandler(openAIGatewayService, concurrencyService, billingCacheService, configConfig)
|
||||||
|
soraTaskRepository := repository.NewSoraTaskRepository(client)
|
||||||
|
soraCacheFileRepository := repository.NewSoraCacheFileRepository(client)
|
||||||
|
soraCacheService := service.NewSoraCacheService(configConfig, soraCacheFileRepository, settingService, accountRepository, httpUpstream)
|
||||||
|
soraGatewayService := service.NewSoraGatewayService(accountRepository, soraAccountRepository, soraUsageStatRepository, soraTaskRepository, soraCacheService, settingService, concurrencyService, configConfig, httpUpstream)
|
||||||
|
soraGatewayHandler := handler.NewSoraGatewayHandler(gatewayService, soraGatewayService, concurrencyService, billingCacheService, configConfig)
|
||||||
handlerSettingHandler := handler.ProvideSettingHandler(settingService, buildInfo)
|
handlerSettingHandler := handler.ProvideSettingHandler(settingService, buildInfo)
|
||||||
handlers := handler.ProvideHandlers(authHandler, userHandler, apiKeyHandler, usageHandler, redeemHandler, subscriptionHandler, adminHandlers, gatewayHandler, openAIGatewayHandler, handlerSettingHandler)
|
handlers := handler.ProvideHandlers(authHandler, userHandler, apiKeyHandler, usageHandler, redeemHandler, subscriptionHandler, adminHandlers, gatewayHandler, openAIGatewayHandler, soraGatewayHandler, handlerSettingHandler)
|
||||||
jwtAuthMiddleware := middleware.NewJWTAuthMiddleware(authService, userService)
|
jwtAuthMiddleware := middleware.NewJWTAuthMiddleware(authService, userService)
|
||||||
adminAuthMiddleware := middleware.NewAdminAuthMiddleware(authService, userService, settingService)
|
adminAuthMiddleware := middleware.NewAdminAuthMiddleware(authService, userService, settingService)
|
||||||
apiKeyAuthMiddleware := middleware.NewAPIKeyAuthMiddleware(apiKeyService, subscriptionService, configConfig)
|
apiKeyAuthMiddleware := middleware.NewAPIKeyAuthMiddleware(apiKeyService, subscriptionService, configConfig)
|
||||||
@@ -177,8 +185,10 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
|
|||||||
opsCleanupService := service.ProvideOpsCleanupService(opsRepository, db, redisClient, configConfig)
|
opsCleanupService := service.ProvideOpsCleanupService(opsRepository, db, redisClient, configConfig)
|
||||||
opsScheduledReportService := service.ProvideOpsScheduledReportService(opsService, userService, emailService, redisClient, configConfig)
|
opsScheduledReportService := service.ProvideOpsScheduledReportService(opsService, userService, emailService, redisClient, configConfig)
|
||||||
tokenRefreshService := service.ProvideTokenRefreshService(accountRepository, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService, compositeTokenCacheInvalidator, configConfig)
|
tokenRefreshService := service.ProvideTokenRefreshService(accountRepository, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService, compositeTokenCacheInvalidator, configConfig)
|
||||||
|
soraTokenRefreshService := service.ProvideSoraTokenRefreshService(accountRepository, soraAccountRepository, settingService, httpUpstream, configConfig)
|
||||||
|
soraCacheCleanupService := service.ProvideSoraCacheCleanupService(soraCacheFileRepository, settingService, configConfig)
|
||||||
accountExpiryService := service.ProvideAccountExpiryService(accountRepository)
|
accountExpiryService := service.ProvideAccountExpiryService(accountRepository)
|
||||||
v := provideCleanup(client, redisClient, opsMetricsCollector, opsAggregationService, opsAlertEvaluatorService, opsCleanupService, opsScheduledReportService, schedulerSnapshotService, tokenRefreshService, accountExpiryService, usageCleanupService, pricingService, emailQueueService, billingCacheService, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService)
|
v := provideCleanup(client, redisClient, opsMetricsCollector, opsAggregationService, opsAlertEvaluatorService, opsCleanupService, opsScheduledReportService, schedulerSnapshotService, tokenRefreshService, soraTokenRefreshService, soraCacheCleanupService, accountExpiryService, usageCleanupService, pricingService, emailQueueService, billingCacheService, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService)
|
||||||
application := &Application{
|
application := &Application{
|
||||||
Server: httpServer,
|
Server: httpServer,
|
||||||
Cleanup: v,
|
Cleanup: v,
|
||||||
@@ -210,6 +220,8 @@ func provideCleanup(
|
|||||||
opsScheduledReport *service.OpsScheduledReportService,
|
opsScheduledReport *service.OpsScheduledReportService,
|
||||||
schedulerSnapshot *service.SchedulerSnapshotService,
|
schedulerSnapshot *service.SchedulerSnapshotService,
|
||||||
tokenRefresh *service.TokenRefreshService,
|
tokenRefresh *service.TokenRefreshService,
|
||||||
|
soraTokenRefresh *service.SoraTokenRefreshService,
|
||||||
|
soraCacheCleanup *service.SoraCacheCleanupService,
|
||||||
accountExpiry *service.AccountExpiryService,
|
accountExpiry *service.AccountExpiryService,
|
||||||
usageCleanup *service.UsageCleanupService,
|
usageCleanup *service.UsageCleanupService,
|
||||||
pricing *service.PricingService,
|
pricing *service.PricingService,
|
||||||
@@ -274,6 +286,18 @@ func provideCleanup(
|
|||||||
tokenRefresh.Stop()
|
tokenRefresh.Stop()
|
||||||
return nil
|
return nil
|
||||||
}},
|
}},
|
||||||
|
{"SoraTokenRefreshService", func() error {
|
||||||
|
if soraTokenRefresh != nil {
|
||||||
|
soraTokenRefresh.Stop()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}},
|
||||||
|
{"SoraCacheCleanupService", func() error {
|
||||||
|
if soraCacheCleanup != nil {
|
||||||
|
soraCacheCleanup.Stop()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}},
|
||||||
{"AccountExpiryService", func() error {
|
{"AccountExpiryService", func() error {
|
||||||
accountExpiry.Stop()
|
accountExpiry.Stop()
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -24,6 +24,10 @@ import (
|
|||||||
"github.com/Wei-Shaw/sub2api/ent/proxy"
|
"github.com/Wei-Shaw/sub2api/ent/proxy"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/setting"
|
"github.com/Wei-Shaw/sub2api/ent/setting"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/user"
|
"github.com/Wei-Shaw/sub2api/ent/user"
|
||||||
@@ -58,6 +62,14 @@ type Client struct {
|
|||||||
RedeemCode *RedeemCodeClient
|
RedeemCode *RedeemCodeClient
|
||||||
// Setting is the client for interacting with the Setting builders.
|
// Setting is the client for interacting with the Setting builders.
|
||||||
Setting *SettingClient
|
Setting *SettingClient
|
||||||
|
// SoraAccount is the client for interacting with the SoraAccount builders.
|
||||||
|
SoraAccount *SoraAccountClient
|
||||||
|
// SoraCacheFile is the client for interacting with the SoraCacheFile builders.
|
||||||
|
SoraCacheFile *SoraCacheFileClient
|
||||||
|
// SoraTask is the client for interacting with the SoraTask builders.
|
||||||
|
SoraTask *SoraTaskClient
|
||||||
|
// SoraUsageStat is the client for interacting with the SoraUsageStat builders.
|
||||||
|
SoraUsageStat *SoraUsageStatClient
|
||||||
// UsageCleanupTask is the client for interacting with the UsageCleanupTask builders.
|
// UsageCleanupTask is the client for interacting with the UsageCleanupTask builders.
|
||||||
UsageCleanupTask *UsageCleanupTaskClient
|
UsageCleanupTask *UsageCleanupTaskClient
|
||||||
// UsageLog is the client for interacting with the UsageLog builders.
|
// UsageLog is the client for interacting with the UsageLog builders.
|
||||||
@@ -92,6 +104,10 @@ func (c *Client) init() {
|
|||||||
c.Proxy = NewProxyClient(c.config)
|
c.Proxy = NewProxyClient(c.config)
|
||||||
c.RedeemCode = NewRedeemCodeClient(c.config)
|
c.RedeemCode = NewRedeemCodeClient(c.config)
|
||||||
c.Setting = NewSettingClient(c.config)
|
c.Setting = NewSettingClient(c.config)
|
||||||
|
c.SoraAccount = NewSoraAccountClient(c.config)
|
||||||
|
c.SoraCacheFile = NewSoraCacheFileClient(c.config)
|
||||||
|
c.SoraTask = NewSoraTaskClient(c.config)
|
||||||
|
c.SoraUsageStat = NewSoraUsageStatClient(c.config)
|
||||||
c.UsageCleanupTask = NewUsageCleanupTaskClient(c.config)
|
c.UsageCleanupTask = NewUsageCleanupTaskClient(c.config)
|
||||||
c.UsageLog = NewUsageLogClient(c.config)
|
c.UsageLog = NewUsageLogClient(c.config)
|
||||||
c.User = NewUserClient(c.config)
|
c.User = NewUserClient(c.config)
|
||||||
@@ -200,6 +216,10 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
|
|||||||
Proxy: NewProxyClient(cfg),
|
Proxy: NewProxyClient(cfg),
|
||||||
RedeemCode: NewRedeemCodeClient(cfg),
|
RedeemCode: NewRedeemCodeClient(cfg),
|
||||||
Setting: NewSettingClient(cfg),
|
Setting: NewSettingClient(cfg),
|
||||||
|
SoraAccount: NewSoraAccountClient(cfg),
|
||||||
|
SoraCacheFile: NewSoraCacheFileClient(cfg),
|
||||||
|
SoraTask: NewSoraTaskClient(cfg),
|
||||||
|
SoraUsageStat: NewSoraUsageStatClient(cfg),
|
||||||
UsageCleanupTask: NewUsageCleanupTaskClient(cfg),
|
UsageCleanupTask: NewUsageCleanupTaskClient(cfg),
|
||||||
UsageLog: NewUsageLogClient(cfg),
|
UsageLog: NewUsageLogClient(cfg),
|
||||||
User: NewUserClient(cfg),
|
User: NewUserClient(cfg),
|
||||||
@@ -235,6 +255,10 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
|
|||||||
Proxy: NewProxyClient(cfg),
|
Proxy: NewProxyClient(cfg),
|
||||||
RedeemCode: NewRedeemCodeClient(cfg),
|
RedeemCode: NewRedeemCodeClient(cfg),
|
||||||
Setting: NewSettingClient(cfg),
|
Setting: NewSettingClient(cfg),
|
||||||
|
SoraAccount: NewSoraAccountClient(cfg),
|
||||||
|
SoraCacheFile: NewSoraCacheFileClient(cfg),
|
||||||
|
SoraTask: NewSoraTaskClient(cfg),
|
||||||
|
SoraUsageStat: NewSoraUsageStatClient(cfg),
|
||||||
UsageCleanupTask: NewUsageCleanupTaskClient(cfg),
|
UsageCleanupTask: NewUsageCleanupTaskClient(cfg),
|
||||||
UsageLog: NewUsageLogClient(cfg),
|
UsageLog: NewUsageLogClient(cfg),
|
||||||
User: NewUserClient(cfg),
|
User: NewUserClient(cfg),
|
||||||
@@ -272,9 +296,9 @@ func (c *Client) Close() error {
|
|||||||
func (c *Client) Use(hooks ...Hook) {
|
func (c *Client) Use(hooks ...Hook) {
|
||||||
for _, n := range []interface{ Use(...Hook) }{
|
for _, n := range []interface{ Use(...Hook) }{
|
||||||
c.APIKey, c.Account, c.AccountGroup, c.Group, c.PromoCode, c.PromoCodeUsage,
|
c.APIKey, c.Account, c.AccountGroup, c.Group, c.PromoCode, c.PromoCodeUsage,
|
||||||
c.Proxy, c.RedeemCode, c.Setting, c.UsageCleanupTask, c.UsageLog, c.User,
|
c.Proxy, c.RedeemCode, c.Setting, c.SoraAccount, c.SoraCacheFile, c.SoraTask,
|
||||||
c.UserAllowedGroup, c.UserAttributeDefinition, c.UserAttributeValue,
|
c.SoraUsageStat, c.UsageCleanupTask, c.UsageLog, c.User, c.UserAllowedGroup,
|
||||||
c.UserSubscription,
|
c.UserAttributeDefinition, c.UserAttributeValue, c.UserSubscription,
|
||||||
} {
|
} {
|
||||||
n.Use(hooks...)
|
n.Use(hooks...)
|
||||||
}
|
}
|
||||||
@@ -285,9 +309,9 @@ func (c *Client) Use(hooks ...Hook) {
|
|||||||
func (c *Client) Intercept(interceptors ...Interceptor) {
|
func (c *Client) Intercept(interceptors ...Interceptor) {
|
||||||
for _, n := range []interface{ Intercept(...Interceptor) }{
|
for _, n := range []interface{ Intercept(...Interceptor) }{
|
||||||
c.APIKey, c.Account, c.AccountGroup, c.Group, c.PromoCode, c.PromoCodeUsage,
|
c.APIKey, c.Account, c.AccountGroup, c.Group, c.PromoCode, c.PromoCodeUsage,
|
||||||
c.Proxy, c.RedeemCode, c.Setting, c.UsageCleanupTask, c.UsageLog, c.User,
|
c.Proxy, c.RedeemCode, c.Setting, c.SoraAccount, c.SoraCacheFile, c.SoraTask,
|
||||||
c.UserAllowedGroup, c.UserAttributeDefinition, c.UserAttributeValue,
|
c.SoraUsageStat, c.UsageCleanupTask, c.UsageLog, c.User, c.UserAllowedGroup,
|
||||||
c.UserSubscription,
|
c.UserAttributeDefinition, c.UserAttributeValue, c.UserSubscription,
|
||||||
} {
|
} {
|
||||||
n.Intercept(interceptors...)
|
n.Intercept(interceptors...)
|
||||||
}
|
}
|
||||||
@@ -314,6 +338,14 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
|
|||||||
return c.RedeemCode.mutate(ctx, m)
|
return c.RedeemCode.mutate(ctx, m)
|
||||||
case *SettingMutation:
|
case *SettingMutation:
|
||||||
return c.Setting.mutate(ctx, m)
|
return c.Setting.mutate(ctx, m)
|
||||||
|
case *SoraAccountMutation:
|
||||||
|
return c.SoraAccount.mutate(ctx, m)
|
||||||
|
case *SoraCacheFileMutation:
|
||||||
|
return c.SoraCacheFile.mutate(ctx, m)
|
||||||
|
case *SoraTaskMutation:
|
||||||
|
return c.SoraTask.mutate(ctx, m)
|
||||||
|
case *SoraUsageStatMutation:
|
||||||
|
return c.SoraUsageStat.mutate(ctx, m)
|
||||||
case *UsageCleanupTaskMutation:
|
case *UsageCleanupTaskMutation:
|
||||||
return c.UsageCleanupTask.mutate(ctx, m)
|
return c.UsageCleanupTask.mutate(ctx, m)
|
||||||
case *UsageLogMutation:
|
case *UsageLogMutation:
|
||||||
@@ -1857,6 +1889,538 @@ func (c *SettingClient) mutate(ctx context.Context, m *SettingMutation) (Value,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SoraAccountClient is a client for the SoraAccount schema.
|
||||||
|
type SoraAccountClient struct {
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraAccountClient returns a client for the SoraAccount from the given config.
|
||||||
|
func NewSoraAccountClient(c config) *SoraAccountClient {
|
||||||
|
return &SoraAccountClient{config: c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use adds a list of mutation hooks to the hooks stack.
|
||||||
|
// A call to `Use(f, g, h)` equals to `soraaccount.Hooks(f(g(h())))`.
|
||||||
|
func (c *SoraAccountClient) Use(hooks ...Hook) {
|
||||||
|
c.hooks.SoraAccount = append(c.hooks.SoraAccount, hooks...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intercept adds a list of query interceptors to the interceptors stack.
|
||||||
|
// A call to `Intercept(f, g, h)` equals to `soraaccount.Intercept(f(g(h())))`.
|
||||||
|
func (c *SoraAccountClient) Intercept(interceptors ...Interceptor) {
|
||||||
|
c.inters.SoraAccount = append(c.inters.SoraAccount, interceptors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create returns a builder for creating a SoraAccount entity.
|
||||||
|
func (c *SoraAccountClient) Create() *SoraAccountCreate {
|
||||||
|
mutation := newSoraAccountMutation(c.config, OpCreate)
|
||||||
|
return &SoraAccountCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateBulk returns a builder for creating a bulk of SoraAccount entities.
|
||||||
|
func (c *SoraAccountClient) CreateBulk(builders ...*SoraAccountCreate) *SoraAccountCreateBulk {
|
||||||
|
return &SoraAccountCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||||
|
// a builder and applies setFunc on it.
|
||||||
|
func (c *SoraAccountClient) MapCreateBulk(slice any, setFunc func(*SoraAccountCreate, int)) *SoraAccountCreateBulk {
|
||||||
|
rv := reflect.ValueOf(slice)
|
||||||
|
if rv.Kind() != reflect.Slice {
|
||||||
|
return &SoraAccountCreateBulk{err: fmt.Errorf("calling to SoraAccountClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||||
|
}
|
||||||
|
builders := make([]*SoraAccountCreate, rv.Len())
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
builders[i] = c.Create()
|
||||||
|
setFunc(builders[i], i)
|
||||||
|
}
|
||||||
|
return &SoraAccountCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns an update builder for SoraAccount.
|
||||||
|
func (c *SoraAccountClient) Update() *SoraAccountUpdate {
|
||||||
|
mutation := newSoraAccountMutation(c.config, OpUpdate)
|
||||||
|
return &SoraAccountUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOne returns an update builder for the given entity.
|
||||||
|
func (c *SoraAccountClient) UpdateOne(_m *SoraAccount) *SoraAccountUpdateOne {
|
||||||
|
mutation := newSoraAccountMutation(c.config, OpUpdateOne, withSoraAccount(_m))
|
||||||
|
return &SoraAccountUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOneID returns an update builder for the given id.
|
||||||
|
func (c *SoraAccountClient) UpdateOneID(id int64) *SoraAccountUpdateOne {
|
||||||
|
mutation := newSoraAccountMutation(c.config, OpUpdateOne, withSoraAccountID(id))
|
||||||
|
return &SoraAccountUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete returns a delete builder for SoraAccount.
|
||||||
|
func (c *SoraAccountClient) Delete() *SoraAccountDelete {
|
||||||
|
mutation := newSoraAccountMutation(c.config, OpDelete)
|
||||||
|
return &SoraAccountDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity.
|
||||||
|
func (c *SoraAccountClient) DeleteOne(_m *SoraAccount) *SoraAccountDeleteOne {
|
||||||
|
return c.DeleteOneID(_m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOneID returns a builder for deleting the given entity by its id.
|
||||||
|
func (c *SoraAccountClient) DeleteOneID(id int64) *SoraAccountDeleteOne {
|
||||||
|
builder := c.Delete().Where(soraaccount.ID(id))
|
||||||
|
builder.mutation.id = &id
|
||||||
|
builder.mutation.op = OpDeleteOne
|
||||||
|
return &SoraAccountDeleteOne{builder}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns a query builder for SoraAccount.
|
||||||
|
func (c *SoraAccountClient) Query() *SoraAccountQuery {
|
||||||
|
return &SoraAccountQuery{
|
||||||
|
config: c.config,
|
||||||
|
ctx: &QueryContext{Type: TypeSoraAccount},
|
||||||
|
inters: c.Interceptors(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a SoraAccount entity by its id.
|
||||||
|
func (c *SoraAccountClient) Get(ctx context.Context, id int64) (*SoraAccount, error) {
|
||||||
|
return c.Query().Where(soraaccount.ID(id)).Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetX is like Get, but panics if an error occurs.
|
||||||
|
func (c *SoraAccountClient) GetX(ctx context.Context, id int64) *SoraAccount {
|
||||||
|
obj, err := c.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the client hooks.
|
||||||
|
func (c *SoraAccountClient) Hooks() []Hook {
|
||||||
|
return c.hooks.SoraAccount
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interceptors returns the client interceptors.
|
||||||
|
func (c *SoraAccountClient) Interceptors() []Interceptor {
|
||||||
|
return c.inters.SoraAccount
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *SoraAccountClient) mutate(ctx context.Context, m *SoraAccountMutation) (Value, error) {
|
||||||
|
switch m.Op() {
|
||||||
|
case OpCreate:
|
||||||
|
return (&SoraAccountCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdate:
|
||||||
|
return (&SoraAccountUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdateOne:
|
||||||
|
return (&SoraAccountUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpDelete, OpDeleteOne:
|
||||||
|
return (&SoraAccountDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("ent: unknown SoraAccount mutation op: %q", m.Op())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFileClient is a client for the SoraCacheFile schema.
|
||||||
|
type SoraCacheFileClient struct {
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraCacheFileClient returns a client for the SoraCacheFile from the given config.
|
||||||
|
func NewSoraCacheFileClient(c config) *SoraCacheFileClient {
|
||||||
|
return &SoraCacheFileClient{config: c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use adds a list of mutation hooks to the hooks stack.
|
||||||
|
// A call to `Use(f, g, h)` equals to `soracachefile.Hooks(f(g(h())))`.
|
||||||
|
func (c *SoraCacheFileClient) Use(hooks ...Hook) {
|
||||||
|
c.hooks.SoraCacheFile = append(c.hooks.SoraCacheFile, hooks...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intercept adds a list of query interceptors to the interceptors stack.
|
||||||
|
// A call to `Intercept(f, g, h)` equals to `soracachefile.Intercept(f(g(h())))`.
|
||||||
|
func (c *SoraCacheFileClient) Intercept(interceptors ...Interceptor) {
|
||||||
|
c.inters.SoraCacheFile = append(c.inters.SoraCacheFile, interceptors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create returns a builder for creating a SoraCacheFile entity.
|
||||||
|
func (c *SoraCacheFileClient) Create() *SoraCacheFileCreate {
|
||||||
|
mutation := newSoraCacheFileMutation(c.config, OpCreate)
|
||||||
|
return &SoraCacheFileCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateBulk returns a builder for creating a bulk of SoraCacheFile entities.
|
||||||
|
func (c *SoraCacheFileClient) CreateBulk(builders ...*SoraCacheFileCreate) *SoraCacheFileCreateBulk {
|
||||||
|
return &SoraCacheFileCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||||
|
// a builder and applies setFunc on it.
|
||||||
|
func (c *SoraCacheFileClient) MapCreateBulk(slice any, setFunc func(*SoraCacheFileCreate, int)) *SoraCacheFileCreateBulk {
|
||||||
|
rv := reflect.ValueOf(slice)
|
||||||
|
if rv.Kind() != reflect.Slice {
|
||||||
|
return &SoraCacheFileCreateBulk{err: fmt.Errorf("calling to SoraCacheFileClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||||
|
}
|
||||||
|
builders := make([]*SoraCacheFileCreate, rv.Len())
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
builders[i] = c.Create()
|
||||||
|
setFunc(builders[i], i)
|
||||||
|
}
|
||||||
|
return &SoraCacheFileCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns an update builder for SoraCacheFile.
|
||||||
|
func (c *SoraCacheFileClient) Update() *SoraCacheFileUpdate {
|
||||||
|
mutation := newSoraCacheFileMutation(c.config, OpUpdate)
|
||||||
|
return &SoraCacheFileUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOne returns an update builder for the given entity.
|
||||||
|
func (c *SoraCacheFileClient) UpdateOne(_m *SoraCacheFile) *SoraCacheFileUpdateOne {
|
||||||
|
mutation := newSoraCacheFileMutation(c.config, OpUpdateOne, withSoraCacheFile(_m))
|
||||||
|
return &SoraCacheFileUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOneID returns an update builder for the given id.
|
||||||
|
func (c *SoraCacheFileClient) UpdateOneID(id int64) *SoraCacheFileUpdateOne {
|
||||||
|
mutation := newSoraCacheFileMutation(c.config, OpUpdateOne, withSoraCacheFileID(id))
|
||||||
|
return &SoraCacheFileUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete returns a delete builder for SoraCacheFile.
|
||||||
|
func (c *SoraCacheFileClient) Delete() *SoraCacheFileDelete {
|
||||||
|
mutation := newSoraCacheFileMutation(c.config, OpDelete)
|
||||||
|
return &SoraCacheFileDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity.
|
||||||
|
func (c *SoraCacheFileClient) DeleteOne(_m *SoraCacheFile) *SoraCacheFileDeleteOne {
|
||||||
|
return c.DeleteOneID(_m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOneID returns a builder for deleting the given entity by its id.
|
||||||
|
func (c *SoraCacheFileClient) DeleteOneID(id int64) *SoraCacheFileDeleteOne {
|
||||||
|
builder := c.Delete().Where(soracachefile.ID(id))
|
||||||
|
builder.mutation.id = &id
|
||||||
|
builder.mutation.op = OpDeleteOne
|
||||||
|
return &SoraCacheFileDeleteOne{builder}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns a query builder for SoraCacheFile.
|
||||||
|
func (c *SoraCacheFileClient) Query() *SoraCacheFileQuery {
|
||||||
|
return &SoraCacheFileQuery{
|
||||||
|
config: c.config,
|
||||||
|
ctx: &QueryContext{Type: TypeSoraCacheFile},
|
||||||
|
inters: c.Interceptors(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a SoraCacheFile entity by its id.
|
||||||
|
func (c *SoraCacheFileClient) Get(ctx context.Context, id int64) (*SoraCacheFile, error) {
|
||||||
|
return c.Query().Where(soracachefile.ID(id)).Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetX is like Get, but panics if an error occurs.
|
||||||
|
func (c *SoraCacheFileClient) GetX(ctx context.Context, id int64) *SoraCacheFile {
|
||||||
|
obj, err := c.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the client hooks.
|
||||||
|
func (c *SoraCacheFileClient) Hooks() []Hook {
|
||||||
|
return c.hooks.SoraCacheFile
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interceptors returns the client interceptors.
|
||||||
|
func (c *SoraCacheFileClient) Interceptors() []Interceptor {
|
||||||
|
return c.inters.SoraCacheFile
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *SoraCacheFileClient) mutate(ctx context.Context, m *SoraCacheFileMutation) (Value, error) {
|
||||||
|
switch m.Op() {
|
||||||
|
case OpCreate:
|
||||||
|
return (&SoraCacheFileCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdate:
|
||||||
|
return (&SoraCacheFileUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdateOne:
|
||||||
|
return (&SoraCacheFileUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpDelete, OpDeleteOne:
|
||||||
|
return (&SoraCacheFileDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("ent: unknown SoraCacheFile mutation op: %q", m.Op())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTaskClient is a client for the SoraTask schema.
|
||||||
|
type SoraTaskClient struct {
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraTaskClient returns a client for the SoraTask from the given config.
|
||||||
|
func NewSoraTaskClient(c config) *SoraTaskClient {
|
||||||
|
return &SoraTaskClient{config: c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use adds a list of mutation hooks to the hooks stack.
|
||||||
|
// A call to `Use(f, g, h)` equals to `soratask.Hooks(f(g(h())))`.
|
||||||
|
func (c *SoraTaskClient) Use(hooks ...Hook) {
|
||||||
|
c.hooks.SoraTask = append(c.hooks.SoraTask, hooks...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intercept adds a list of query interceptors to the interceptors stack.
|
||||||
|
// A call to `Intercept(f, g, h)` equals to `soratask.Intercept(f(g(h())))`.
|
||||||
|
func (c *SoraTaskClient) Intercept(interceptors ...Interceptor) {
|
||||||
|
c.inters.SoraTask = append(c.inters.SoraTask, interceptors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create returns a builder for creating a SoraTask entity.
|
||||||
|
func (c *SoraTaskClient) Create() *SoraTaskCreate {
|
||||||
|
mutation := newSoraTaskMutation(c.config, OpCreate)
|
||||||
|
return &SoraTaskCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateBulk returns a builder for creating a bulk of SoraTask entities.
|
||||||
|
func (c *SoraTaskClient) CreateBulk(builders ...*SoraTaskCreate) *SoraTaskCreateBulk {
|
||||||
|
return &SoraTaskCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||||
|
// a builder and applies setFunc on it.
|
||||||
|
func (c *SoraTaskClient) MapCreateBulk(slice any, setFunc func(*SoraTaskCreate, int)) *SoraTaskCreateBulk {
|
||||||
|
rv := reflect.ValueOf(slice)
|
||||||
|
if rv.Kind() != reflect.Slice {
|
||||||
|
return &SoraTaskCreateBulk{err: fmt.Errorf("calling to SoraTaskClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||||
|
}
|
||||||
|
builders := make([]*SoraTaskCreate, rv.Len())
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
builders[i] = c.Create()
|
||||||
|
setFunc(builders[i], i)
|
||||||
|
}
|
||||||
|
return &SoraTaskCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns an update builder for SoraTask.
|
||||||
|
func (c *SoraTaskClient) Update() *SoraTaskUpdate {
|
||||||
|
mutation := newSoraTaskMutation(c.config, OpUpdate)
|
||||||
|
return &SoraTaskUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOne returns an update builder for the given entity.
|
||||||
|
func (c *SoraTaskClient) UpdateOne(_m *SoraTask) *SoraTaskUpdateOne {
|
||||||
|
mutation := newSoraTaskMutation(c.config, OpUpdateOne, withSoraTask(_m))
|
||||||
|
return &SoraTaskUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOneID returns an update builder for the given id.
|
||||||
|
func (c *SoraTaskClient) UpdateOneID(id int64) *SoraTaskUpdateOne {
|
||||||
|
mutation := newSoraTaskMutation(c.config, OpUpdateOne, withSoraTaskID(id))
|
||||||
|
return &SoraTaskUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete returns a delete builder for SoraTask.
|
||||||
|
func (c *SoraTaskClient) Delete() *SoraTaskDelete {
|
||||||
|
mutation := newSoraTaskMutation(c.config, OpDelete)
|
||||||
|
return &SoraTaskDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity.
|
||||||
|
func (c *SoraTaskClient) DeleteOne(_m *SoraTask) *SoraTaskDeleteOne {
|
||||||
|
return c.DeleteOneID(_m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOneID returns a builder for deleting the given entity by its id.
|
||||||
|
func (c *SoraTaskClient) DeleteOneID(id int64) *SoraTaskDeleteOne {
|
||||||
|
builder := c.Delete().Where(soratask.ID(id))
|
||||||
|
builder.mutation.id = &id
|
||||||
|
builder.mutation.op = OpDeleteOne
|
||||||
|
return &SoraTaskDeleteOne{builder}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns a query builder for SoraTask.
|
||||||
|
func (c *SoraTaskClient) Query() *SoraTaskQuery {
|
||||||
|
return &SoraTaskQuery{
|
||||||
|
config: c.config,
|
||||||
|
ctx: &QueryContext{Type: TypeSoraTask},
|
||||||
|
inters: c.Interceptors(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a SoraTask entity by its id.
|
||||||
|
func (c *SoraTaskClient) Get(ctx context.Context, id int64) (*SoraTask, error) {
|
||||||
|
return c.Query().Where(soratask.ID(id)).Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetX is like Get, but panics if an error occurs.
|
||||||
|
func (c *SoraTaskClient) GetX(ctx context.Context, id int64) *SoraTask {
|
||||||
|
obj, err := c.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the client hooks.
|
||||||
|
func (c *SoraTaskClient) Hooks() []Hook {
|
||||||
|
return c.hooks.SoraTask
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interceptors returns the client interceptors.
|
||||||
|
func (c *SoraTaskClient) Interceptors() []Interceptor {
|
||||||
|
return c.inters.SoraTask
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *SoraTaskClient) mutate(ctx context.Context, m *SoraTaskMutation) (Value, error) {
|
||||||
|
switch m.Op() {
|
||||||
|
case OpCreate:
|
||||||
|
return (&SoraTaskCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdate:
|
||||||
|
return (&SoraTaskUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdateOne:
|
||||||
|
return (&SoraTaskUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpDelete, OpDeleteOne:
|
||||||
|
return (&SoraTaskDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("ent: unknown SoraTask mutation op: %q", m.Op())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStatClient is a client for the SoraUsageStat schema.
|
||||||
|
type SoraUsageStatClient struct {
|
||||||
|
config
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraUsageStatClient returns a client for the SoraUsageStat from the given config.
|
||||||
|
func NewSoraUsageStatClient(c config) *SoraUsageStatClient {
|
||||||
|
return &SoraUsageStatClient{config: c}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use adds a list of mutation hooks to the hooks stack.
|
||||||
|
// A call to `Use(f, g, h)` equals to `sorausagestat.Hooks(f(g(h())))`.
|
||||||
|
func (c *SoraUsageStatClient) Use(hooks ...Hook) {
|
||||||
|
c.hooks.SoraUsageStat = append(c.hooks.SoraUsageStat, hooks...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Intercept adds a list of query interceptors to the interceptors stack.
|
||||||
|
// A call to `Intercept(f, g, h)` equals to `sorausagestat.Intercept(f(g(h())))`.
|
||||||
|
func (c *SoraUsageStatClient) Intercept(interceptors ...Interceptor) {
|
||||||
|
c.inters.SoraUsageStat = append(c.inters.SoraUsageStat, interceptors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create returns a builder for creating a SoraUsageStat entity.
|
||||||
|
func (c *SoraUsageStatClient) Create() *SoraUsageStatCreate {
|
||||||
|
mutation := newSoraUsageStatMutation(c.config, OpCreate)
|
||||||
|
return &SoraUsageStatCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateBulk returns a builder for creating a bulk of SoraUsageStat entities.
|
||||||
|
func (c *SoraUsageStatClient) CreateBulk(builders ...*SoraUsageStatCreate) *SoraUsageStatCreateBulk {
|
||||||
|
return &SoraUsageStatCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
||||||
|
// a builder and applies setFunc on it.
|
||||||
|
func (c *SoraUsageStatClient) MapCreateBulk(slice any, setFunc func(*SoraUsageStatCreate, int)) *SoraUsageStatCreateBulk {
|
||||||
|
rv := reflect.ValueOf(slice)
|
||||||
|
if rv.Kind() != reflect.Slice {
|
||||||
|
return &SoraUsageStatCreateBulk{err: fmt.Errorf("calling to SoraUsageStatClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
||||||
|
}
|
||||||
|
builders := make([]*SoraUsageStatCreate, rv.Len())
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
builders[i] = c.Create()
|
||||||
|
setFunc(builders[i], i)
|
||||||
|
}
|
||||||
|
return &SoraUsageStatCreateBulk{config: c.config, builders: builders}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns an update builder for SoraUsageStat.
|
||||||
|
func (c *SoraUsageStatClient) Update() *SoraUsageStatUpdate {
|
||||||
|
mutation := newSoraUsageStatMutation(c.config, OpUpdate)
|
||||||
|
return &SoraUsageStatUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOne returns an update builder for the given entity.
|
||||||
|
func (c *SoraUsageStatClient) UpdateOne(_m *SoraUsageStat) *SoraUsageStatUpdateOne {
|
||||||
|
mutation := newSoraUsageStatMutation(c.config, OpUpdateOne, withSoraUsageStat(_m))
|
||||||
|
return &SoraUsageStatUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateOneID returns an update builder for the given id.
|
||||||
|
func (c *SoraUsageStatClient) UpdateOneID(id int64) *SoraUsageStatUpdateOne {
|
||||||
|
mutation := newSoraUsageStatMutation(c.config, OpUpdateOne, withSoraUsageStatID(id))
|
||||||
|
return &SoraUsageStatUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete returns a delete builder for SoraUsageStat.
|
||||||
|
func (c *SoraUsageStatClient) Delete() *SoraUsageStatDelete {
|
||||||
|
mutation := newSoraUsageStatMutation(c.config, OpDelete)
|
||||||
|
return &SoraUsageStatDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOne returns a builder for deleting the given entity.
|
||||||
|
func (c *SoraUsageStatClient) DeleteOne(_m *SoraUsageStat) *SoraUsageStatDeleteOne {
|
||||||
|
return c.DeleteOneID(_m.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteOneID returns a builder for deleting the given entity by its id.
|
||||||
|
func (c *SoraUsageStatClient) DeleteOneID(id int64) *SoraUsageStatDeleteOne {
|
||||||
|
builder := c.Delete().Where(sorausagestat.ID(id))
|
||||||
|
builder.mutation.id = &id
|
||||||
|
builder.mutation.op = OpDeleteOne
|
||||||
|
return &SoraUsageStatDeleteOne{builder}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query returns a query builder for SoraUsageStat.
|
||||||
|
func (c *SoraUsageStatClient) Query() *SoraUsageStatQuery {
|
||||||
|
return &SoraUsageStatQuery{
|
||||||
|
config: c.config,
|
||||||
|
ctx: &QueryContext{Type: TypeSoraUsageStat},
|
||||||
|
inters: c.Interceptors(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns a SoraUsageStat entity by its id.
|
||||||
|
func (c *SoraUsageStatClient) Get(ctx context.Context, id int64) (*SoraUsageStat, error) {
|
||||||
|
return c.Query().Where(sorausagestat.ID(id)).Only(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetX is like Get, but panics if an error occurs.
|
||||||
|
func (c *SoraUsageStatClient) GetX(ctx context.Context, id int64) *SoraUsageStat {
|
||||||
|
obj, err := c.Get(ctx, id)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hooks returns the client hooks.
|
||||||
|
func (c *SoraUsageStatClient) Hooks() []Hook {
|
||||||
|
return c.hooks.SoraUsageStat
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interceptors returns the client interceptors.
|
||||||
|
func (c *SoraUsageStatClient) Interceptors() []Interceptor {
|
||||||
|
return c.inters.SoraUsageStat
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *SoraUsageStatClient) mutate(ctx context.Context, m *SoraUsageStatMutation) (Value, error) {
|
||||||
|
switch m.Op() {
|
||||||
|
case OpCreate:
|
||||||
|
return (&SoraUsageStatCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdate:
|
||||||
|
return (&SoraUsageStatUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpUpdateOne:
|
||||||
|
return (&SoraUsageStatUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
||||||
|
case OpDelete, OpDeleteOne:
|
||||||
|
return (&SoraUsageStatDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("ent: unknown SoraUsageStat mutation op: %q", m.Op())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// UsageCleanupTaskClient is a client for the UsageCleanupTask schema.
|
// UsageCleanupTaskClient is a client for the UsageCleanupTask schema.
|
||||||
type UsageCleanupTaskClient struct {
|
type UsageCleanupTaskClient struct {
|
||||||
config
|
config
|
||||||
@@ -3117,13 +3681,15 @@ func (c *UserSubscriptionClient) mutate(ctx context.Context, m *UserSubscription
|
|||||||
type (
|
type (
|
||||||
hooks struct {
|
hooks struct {
|
||||||
APIKey, Account, AccountGroup, Group, PromoCode, PromoCodeUsage, Proxy,
|
APIKey, Account, AccountGroup, Group, PromoCode, PromoCodeUsage, Proxy,
|
||||||
RedeemCode, Setting, UsageCleanupTask, UsageLog, User, UserAllowedGroup,
|
RedeemCode, Setting, SoraAccount, SoraCacheFile, SoraTask, SoraUsageStat,
|
||||||
UserAttributeDefinition, UserAttributeValue, UserSubscription []ent.Hook
|
UsageCleanupTask, UsageLog, User, UserAllowedGroup, UserAttributeDefinition,
|
||||||
|
UserAttributeValue, UserSubscription []ent.Hook
|
||||||
}
|
}
|
||||||
inters struct {
|
inters struct {
|
||||||
APIKey, Account, AccountGroup, Group, PromoCode, PromoCodeUsage, Proxy,
|
APIKey, Account, AccountGroup, Group, PromoCode, PromoCodeUsage, Proxy,
|
||||||
RedeemCode, Setting, UsageCleanupTask, UsageLog, User, UserAllowedGroup,
|
RedeemCode, Setting, SoraAccount, SoraCacheFile, SoraTask, SoraUsageStat,
|
||||||
UserAttributeDefinition, UserAttributeValue, UserSubscription []ent.Interceptor
|
UsageCleanupTask, UsageLog, User, UserAllowedGroup, UserAttributeDefinition,
|
||||||
|
UserAttributeValue, UserSubscription []ent.Interceptor
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,10 @@ import (
|
|||||||
"github.com/Wei-Shaw/sub2api/ent/proxy"
|
"github.com/Wei-Shaw/sub2api/ent/proxy"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/setting"
|
"github.com/Wei-Shaw/sub2api/ent/setting"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/user"
|
"github.com/Wei-Shaw/sub2api/ent/user"
|
||||||
@@ -97,6 +101,10 @@ func checkColumn(t, c string) error {
|
|||||||
proxy.Table: proxy.ValidColumn,
|
proxy.Table: proxy.ValidColumn,
|
||||||
redeemcode.Table: redeemcode.ValidColumn,
|
redeemcode.Table: redeemcode.ValidColumn,
|
||||||
setting.Table: setting.ValidColumn,
|
setting.Table: setting.ValidColumn,
|
||||||
|
soraaccount.Table: soraaccount.ValidColumn,
|
||||||
|
soracachefile.Table: soracachefile.ValidColumn,
|
||||||
|
soratask.Table: soratask.ValidColumn,
|
||||||
|
sorausagestat.Table: sorausagestat.ValidColumn,
|
||||||
usagecleanuptask.Table: usagecleanuptask.ValidColumn,
|
usagecleanuptask.Table: usagecleanuptask.ValidColumn,
|
||||||
usagelog.Table: usagelog.ValidColumn,
|
usagelog.Table: usagelog.ValidColumn,
|
||||||
user.Table: user.ValidColumn,
|
user.Table: user.ValidColumn,
|
||||||
|
|||||||
@@ -117,6 +117,54 @@ func (f SettingFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, err
|
|||||||
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.SettingMutation", m)
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.SettingMutation", m)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The SoraAccountFunc type is an adapter to allow the use of ordinary
|
||||||
|
// function as SoraAccount mutator.
|
||||||
|
type SoraAccountFunc func(context.Context, *ent.SoraAccountMutation) (ent.Value, error)
|
||||||
|
|
||||||
|
// Mutate calls f(ctx, m).
|
||||||
|
func (f SoraAccountFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||||
|
if mv, ok := m.(*ent.SoraAccountMutation); ok {
|
||||||
|
return f(ctx, mv)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.SoraAccountMutation", m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The SoraCacheFileFunc type is an adapter to allow the use of ordinary
|
||||||
|
// function as SoraCacheFile mutator.
|
||||||
|
type SoraCacheFileFunc func(context.Context, *ent.SoraCacheFileMutation) (ent.Value, error)
|
||||||
|
|
||||||
|
// Mutate calls f(ctx, m).
|
||||||
|
func (f SoraCacheFileFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||||
|
if mv, ok := m.(*ent.SoraCacheFileMutation); ok {
|
||||||
|
return f(ctx, mv)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.SoraCacheFileMutation", m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The SoraTaskFunc type is an adapter to allow the use of ordinary
|
||||||
|
// function as SoraTask mutator.
|
||||||
|
type SoraTaskFunc func(context.Context, *ent.SoraTaskMutation) (ent.Value, error)
|
||||||
|
|
||||||
|
// Mutate calls f(ctx, m).
|
||||||
|
func (f SoraTaskFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||||
|
if mv, ok := m.(*ent.SoraTaskMutation); ok {
|
||||||
|
return f(ctx, mv)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.SoraTaskMutation", m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The SoraUsageStatFunc type is an adapter to allow the use of ordinary
|
||||||
|
// function as SoraUsageStat mutator.
|
||||||
|
type SoraUsageStatFunc func(context.Context, *ent.SoraUsageStatMutation) (ent.Value, error)
|
||||||
|
|
||||||
|
// Mutate calls f(ctx, m).
|
||||||
|
func (f SoraUsageStatFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
||||||
|
if mv, ok := m.(*ent.SoraUsageStatMutation); ok {
|
||||||
|
return f(ctx, mv)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.SoraUsageStatMutation", m)
|
||||||
|
}
|
||||||
|
|
||||||
// The UsageCleanupTaskFunc type is an adapter to allow the use of ordinary
|
// The UsageCleanupTaskFunc type is an adapter to allow the use of ordinary
|
||||||
// function as UsageCleanupTask mutator.
|
// function as UsageCleanupTask mutator.
|
||||||
type UsageCleanupTaskFunc func(context.Context, *ent.UsageCleanupTaskMutation) (ent.Value, error)
|
type UsageCleanupTaskFunc func(context.Context, *ent.UsageCleanupTaskMutation) (ent.Value, error)
|
||||||
|
|||||||
@@ -18,6 +18,10 @@ import (
|
|||||||
"github.com/Wei-Shaw/sub2api/ent/proxy"
|
"github.com/Wei-Shaw/sub2api/ent/proxy"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/setting"
|
"github.com/Wei-Shaw/sub2api/ent/setting"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/user"
|
"github.com/Wei-Shaw/sub2api/ent/user"
|
||||||
@@ -326,6 +330,114 @@ func (f TraverseSetting) Traverse(ctx context.Context, q ent.Query) error {
|
|||||||
return fmt.Errorf("unexpected query type %T. expect *ent.SettingQuery", q)
|
return fmt.Errorf("unexpected query type %T. expect *ent.SettingQuery", q)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The SoraAccountFunc type is an adapter to allow the use of ordinary function as a Querier.
|
||||||
|
type SoraAccountFunc func(context.Context, *ent.SoraAccountQuery) (ent.Value, error)
|
||||||
|
|
||||||
|
// Query calls f(ctx, q).
|
||||||
|
func (f SoraAccountFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) {
|
||||||
|
if q, ok := q.(*ent.SoraAccountQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected query type %T. expect *ent.SoraAccountQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The TraverseSoraAccount type is an adapter to allow the use of ordinary function as Traverser.
|
||||||
|
type TraverseSoraAccount func(context.Context, *ent.SoraAccountQuery) error
|
||||||
|
|
||||||
|
// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline.
|
||||||
|
func (f TraverseSoraAccount) Intercept(next ent.Querier) ent.Querier {
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traverse calls f(ctx, q).
|
||||||
|
func (f TraverseSoraAccount) Traverse(ctx context.Context, q ent.Query) error {
|
||||||
|
if q, ok := q.(*ent.SoraAccountQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("unexpected query type %T. expect *ent.SoraAccountQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The SoraCacheFileFunc type is an adapter to allow the use of ordinary function as a Querier.
|
||||||
|
type SoraCacheFileFunc func(context.Context, *ent.SoraCacheFileQuery) (ent.Value, error)
|
||||||
|
|
||||||
|
// Query calls f(ctx, q).
|
||||||
|
func (f SoraCacheFileFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) {
|
||||||
|
if q, ok := q.(*ent.SoraCacheFileQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected query type %T. expect *ent.SoraCacheFileQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The TraverseSoraCacheFile type is an adapter to allow the use of ordinary function as Traverser.
|
||||||
|
type TraverseSoraCacheFile func(context.Context, *ent.SoraCacheFileQuery) error
|
||||||
|
|
||||||
|
// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline.
|
||||||
|
func (f TraverseSoraCacheFile) Intercept(next ent.Querier) ent.Querier {
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traverse calls f(ctx, q).
|
||||||
|
func (f TraverseSoraCacheFile) Traverse(ctx context.Context, q ent.Query) error {
|
||||||
|
if q, ok := q.(*ent.SoraCacheFileQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("unexpected query type %T. expect *ent.SoraCacheFileQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The SoraTaskFunc type is an adapter to allow the use of ordinary function as a Querier.
|
||||||
|
type SoraTaskFunc func(context.Context, *ent.SoraTaskQuery) (ent.Value, error)
|
||||||
|
|
||||||
|
// Query calls f(ctx, q).
|
||||||
|
func (f SoraTaskFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) {
|
||||||
|
if q, ok := q.(*ent.SoraTaskQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected query type %T. expect *ent.SoraTaskQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The TraverseSoraTask type is an adapter to allow the use of ordinary function as Traverser.
|
||||||
|
type TraverseSoraTask func(context.Context, *ent.SoraTaskQuery) error
|
||||||
|
|
||||||
|
// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline.
|
||||||
|
func (f TraverseSoraTask) Intercept(next ent.Querier) ent.Querier {
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traverse calls f(ctx, q).
|
||||||
|
func (f TraverseSoraTask) Traverse(ctx context.Context, q ent.Query) error {
|
||||||
|
if q, ok := q.(*ent.SoraTaskQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("unexpected query type %T. expect *ent.SoraTaskQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The SoraUsageStatFunc type is an adapter to allow the use of ordinary function as a Querier.
|
||||||
|
type SoraUsageStatFunc func(context.Context, *ent.SoraUsageStatQuery) (ent.Value, error)
|
||||||
|
|
||||||
|
// Query calls f(ctx, q).
|
||||||
|
func (f SoraUsageStatFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) {
|
||||||
|
if q, ok := q.(*ent.SoraUsageStatQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unexpected query type %T. expect *ent.SoraUsageStatQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The TraverseSoraUsageStat type is an adapter to allow the use of ordinary function as Traverser.
|
||||||
|
type TraverseSoraUsageStat func(context.Context, *ent.SoraUsageStatQuery) error
|
||||||
|
|
||||||
|
// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline.
|
||||||
|
func (f TraverseSoraUsageStat) Intercept(next ent.Querier) ent.Querier {
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traverse calls f(ctx, q).
|
||||||
|
func (f TraverseSoraUsageStat) Traverse(ctx context.Context, q ent.Query) error {
|
||||||
|
if q, ok := q.(*ent.SoraUsageStatQuery); ok {
|
||||||
|
return f(ctx, q)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("unexpected query type %T. expect *ent.SoraUsageStatQuery", q)
|
||||||
|
}
|
||||||
|
|
||||||
// The UsageCleanupTaskFunc type is an adapter to allow the use of ordinary function as a Querier.
|
// The UsageCleanupTaskFunc type is an adapter to allow the use of ordinary function as a Querier.
|
||||||
type UsageCleanupTaskFunc func(context.Context, *ent.UsageCleanupTaskQuery) (ent.Value, error)
|
type UsageCleanupTaskFunc func(context.Context, *ent.UsageCleanupTaskQuery) (ent.Value, error)
|
||||||
|
|
||||||
@@ -536,6 +648,14 @@ func NewQuery(q ent.Query) (Query, error) {
|
|||||||
return &query[*ent.RedeemCodeQuery, predicate.RedeemCode, redeemcode.OrderOption]{typ: ent.TypeRedeemCode, tq: q}, nil
|
return &query[*ent.RedeemCodeQuery, predicate.RedeemCode, redeemcode.OrderOption]{typ: ent.TypeRedeemCode, tq: q}, nil
|
||||||
case *ent.SettingQuery:
|
case *ent.SettingQuery:
|
||||||
return &query[*ent.SettingQuery, predicate.Setting, setting.OrderOption]{typ: ent.TypeSetting, tq: q}, nil
|
return &query[*ent.SettingQuery, predicate.Setting, setting.OrderOption]{typ: ent.TypeSetting, tq: q}, nil
|
||||||
|
case *ent.SoraAccountQuery:
|
||||||
|
return &query[*ent.SoraAccountQuery, predicate.SoraAccount, soraaccount.OrderOption]{typ: ent.TypeSoraAccount, tq: q}, nil
|
||||||
|
case *ent.SoraCacheFileQuery:
|
||||||
|
return &query[*ent.SoraCacheFileQuery, predicate.SoraCacheFile, soracachefile.OrderOption]{typ: ent.TypeSoraCacheFile, tq: q}, nil
|
||||||
|
case *ent.SoraTaskQuery:
|
||||||
|
return &query[*ent.SoraTaskQuery, predicate.SoraTask, soratask.OrderOption]{typ: ent.TypeSoraTask, tq: q}, nil
|
||||||
|
case *ent.SoraUsageStatQuery:
|
||||||
|
return &query[*ent.SoraUsageStatQuery, predicate.SoraUsageStat, sorausagestat.OrderOption]{typ: ent.TypeSoraUsageStat, tq: q}, nil
|
||||||
case *ent.UsageCleanupTaskQuery:
|
case *ent.UsageCleanupTaskQuery:
|
||||||
return &query[*ent.UsageCleanupTaskQuery, predicate.UsageCleanupTask, usagecleanuptask.OrderOption]{typ: ent.TypeUsageCleanupTask, tq: q}, nil
|
return &query[*ent.UsageCleanupTaskQuery, predicate.UsageCleanupTask, usagecleanuptask.OrderOption]{typ: ent.TypeUsageCleanupTask, tq: q}, nil
|
||||||
case *ent.UsageLogQuery:
|
case *ent.UsageLogQuery:
|
||||||
|
|||||||
@@ -434,6 +434,172 @@ var (
|
|||||||
Columns: SettingsColumns,
|
Columns: SettingsColumns,
|
||||||
PrimaryKey: []*schema.Column{SettingsColumns[0]},
|
PrimaryKey: []*schema.Column{SettingsColumns[0]},
|
||||||
}
|
}
|
||||||
|
// SoraAccountsColumns holds the columns for the "sora_accounts" table.
|
||||||
|
SoraAccountsColumns = []*schema.Column{
|
||||||
|
{Name: "id", Type: field.TypeInt64, Increment: true},
|
||||||
|
{Name: "created_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "updated_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "account_id", Type: field.TypeInt64},
|
||||||
|
{Name: "access_token", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "session_token", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "refresh_token", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "client_id", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "email", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "username", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "remark", Type: field.TypeString, Nullable: true, SchemaType: map[string]string{"postgres": "text"}},
|
||||||
|
{Name: "use_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "plan_type", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "plan_title", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "subscription_end", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "sora_supported", Type: field.TypeBool, Default: false},
|
||||||
|
{Name: "sora_invite_code", Type: field.TypeString, Nullable: true},
|
||||||
|
{Name: "sora_redeemed_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "sora_remaining_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "sora_total_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "sora_cooldown_until", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "cooled_until", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "image_enabled", Type: field.TypeBool, Default: true},
|
||||||
|
{Name: "video_enabled", Type: field.TypeBool, Default: true},
|
||||||
|
{Name: "image_concurrency", Type: field.TypeInt, Default: -1},
|
||||||
|
{Name: "video_concurrency", Type: field.TypeInt, Default: -1},
|
||||||
|
{Name: "is_expired", Type: field.TypeBool, Default: false},
|
||||||
|
}
|
||||||
|
// SoraAccountsTable holds the schema information for the "sora_accounts" table.
|
||||||
|
SoraAccountsTable = &schema.Table{
|
||||||
|
Name: "sora_accounts",
|
||||||
|
Columns: SoraAccountsColumns,
|
||||||
|
PrimaryKey: []*schema.Column{SoraAccountsColumns[0]},
|
||||||
|
Indexes: []*schema.Index{
|
||||||
|
{
|
||||||
|
Name: "soraaccount_account_id",
|
||||||
|
Unique: true,
|
||||||
|
Columns: []*schema.Column{SoraAccountsColumns[3]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "soraaccount_plan_type",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraAccountsColumns[12]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "soraaccount_sora_supported",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraAccountsColumns[15]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "soraaccount_image_enabled",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraAccountsColumns[22]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "soraaccount_video_enabled",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraAccountsColumns[23]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
// SoraCacheFilesColumns holds the columns for the "sora_cache_files" table.
|
||||||
|
SoraCacheFilesColumns = []*schema.Column{
|
||||||
|
{Name: "id", Type: field.TypeInt64, Increment: true},
|
||||||
|
{Name: "task_id", Type: field.TypeString, Nullable: true, Size: 120},
|
||||||
|
{Name: "account_id", Type: field.TypeInt64},
|
||||||
|
{Name: "user_id", Type: field.TypeInt64},
|
||||||
|
{Name: "media_type", Type: field.TypeString, Size: 32},
|
||||||
|
{Name: "original_url", Type: field.TypeString, SchemaType: map[string]string{"postgres": "text"}},
|
||||||
|
{Name: "cache_path", Type: field.TypeString, SchemaType: map[string]string{"postgres": "text"}},
|
||||||
|
{Name: "cache_url", Type: field.TypeString, SchemaType: map[string]string{"postgres": "text"}},
|
||||||
|
{Name: "size_bytes", Type: field.TypeInt64, Default: 0},
|
||||||
|
{Name: "created_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
}
|
||||||
|
// SoraCacheFilesTable holds the schema information for the "sora_cache_files" table.
|
||||||
|
SoraCacheFilesTable = &schema.Table{
|
||||||
|
Name: "sora_cache_files",
|
||||||
|
Columns: SoraCacheFilesColumns,
|
||||||
|
PrimaryKey: []*schema.Column{SoraCacheFilesColumns[0]},
|
||||||
|
Indexes: []*schema.Index{
|
||||||
|
{
|
||||||
|
Name: "soracachefile_account_id",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraCacheFilesColumns[2]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "soracachefile_user_id",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraCacheFilesColumns[3]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "soracachefile_media_type",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraCacheFilesColumns[4]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
// SoraTasksColumns holds the columns for the "sora_tasks" table.
|
||||||
|
SoraTasksColumns = []*schema.Column{
|
||||||
|
{Name: "id", Type: field.TypeInt64, Increment: true},
|
||||||
|
{Name: "task_id", Type: field.TypeString, Unique: true, Size: 120},
|
||||||
|
{Name: "account_id", Type: field.TypeInt64},
|
||||||
|
{Name: "model", Type: field.TypeString, Size: 120},
|
||||||
|
{Name: "prompt", Type: field.TypeString, SchemaType: map[string]string{"postgres": "text"}},
|
||||||
|
{Name: "status", Type: field.TypeString, Size: 32, Default: "processing"},
|
||||||
|
{Name: "progress", Type: field.TypeFloat64, Default: 0},
|
||||||
|
{Name: "result_urls", Type: field.TypeString, Nullable: true, SchemaType: map[string]string{"postgres": "text"}},
|
||||||
|
{Name: "error_message", Type: field.TypeString, Nullable: true, SchemaType: map[string]string{"postgres": "text"}},
|
||||||
|
{Name: "retry_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "created_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "completed_at", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
}
|
||||||
|
// SoraTasksTable holds the schema information for the "sora_tasks" table.
|
||||||
|
SoraTasksTable = &schema.Table{
|
||||||
|
Name: "sora_tasks",
|
||||||
|
Columns: SoraTasksColumns,
|
||||||
|
PrimaryKey: []*schema.Column{SoraTasksColumns[0]},
|
||||||
|
Indexes: []*schema.Index{
|
||||||
|
{
|
||||||
|
Name: "soratask_account_id",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraTasksColumns[2]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "soratask_status",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraTasksColumns[5]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
// SoraUsageStatsColumns holds the columns for the "sora_usage_stats" table.
|
||||||
|
SoraUsageStatsColumns = []*schema.Column{
|
||||||
|
{Name: "id", Type: field.TypeInt64, Increment: true},
|
||||||
|
{Name: "created_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "updated_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "account_id", Type: field.TypeInt64},
|
||||||
|
{Name: "image_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "video_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "error_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "last_error_at", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "timestamptz"}},
|
||||||
|
{Name: "today_image_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "today_video_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "today_error_count", Type: field.TypeInt, Default: 0},
|
||||||
|
{Name: "today_date", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "date"}},
|
||||||
|
{Name: "consecutive_error_count", Type: field.TypeInt, Default: 0},
|
||||||
|
}
|
||||||
|
// SoraUsageStatsTable holds the schema information for the "sora_usage_stats" table.
|
||||||
|
SoraUsageStatsTable = &schema.Table{
|
||||||
|
Name: "sora_usage_stats",
|
||||||
|
Columns: SoraUsageStatsColumns,
|
||||||
|
PrimaryKey: []*schema.Column{SoraUsageStatsColumns[0]},
|
||||||
|
Indexes: []*schema.Index{
|
||||||
|
{
|
||||||
|
Name: "sorausagestat_account_id",
|
||||||
|
Unique: true,
|
||||||
|
Columns: []*schema.Column{SoraUsageStatsColumns[3]},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "sorausagestat_today_date",
|
||||||
|
Unique: false,
|
||||||
|
Columns: []*schema.Column{SoraUsageStatsColumns[11]},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
// UsageCleanupTasksColumns holds the columns for the "usage_cleanup_tasks" table.
|
// UsageCleanupTasksColumns holds the columns for the "usage_cleanup_tasks" table.
|
||||||
UsageCleanupTasksColumns = []*schema.Column{
|
UsageCleanupTasksColumns = []*schema.Column{
|
||||||
{Name: "id", Type: field.TypeInt64, Increment: true},
|
{Name: "id", Type: field.TypeInt64, Increment: true},
|
||||||
@@ -843,6 +1009,10 @@ var (
|
|||||||
ProxiesTable,
|
ProxiesTable,
|
||||||
RedeemCodesTable,
|
RedeemCodesTable,
|
||||||
SettingsTable,
|
SettingsTable,
|
||||||
|
SoraAccountsTable,
|
||||||
|
SoraCacheFilesTable,
|
||||||
|
SoraTasksTable,
|
||||||
|
SoraUsageStatsTable,
|
||||||
UsageCleanupTasksTable,
|
UsageCleanupTasksTable,
|
||||||
UsageLogsTable,
|
UsageLogsTable,
|
||||||
UsersTable,
|
UsersTable,
|
||||||
@@ -890,6 +1060,18 @@ func init() {
|
|||||||
SettingsTable.Annotation = &entsql.Annotation{
|
SettingsTable.Annotation = &entsql.Annotation{
|
||||||
Table: "settings",
|
Table: "settings",
|
||||||
}
|
}
|
||||||
|
SoraAccountsTable.Annotation = &entsql.Annotation{
|
||||||
|
Table: "sora_accounts",
|
||||||
|
}
|
||||||
|
SoraCacheFilesTable.Annotation = &entsql.Annotation{
|
||||||
|
Table: "sora_cache_files",
|
||||||
|
}
|
||||||
|
SoraTasksTable.Annotation = &entsql.Annotation{
|
||||||
|
Table: "sora_tasks",
|
||||||
|
}
|
||||||
|
SoraUsageStatsTable.Annotation = &entsql.Annotation{
|
||||||
|
Table: "sora_usage_stats",
|
||||||
|
}
|
||||||
UsageCleanupTasksTable.Annotation = &entsql.Annotation{
|
UsageCleanupTasksTable.Annotation = &entsql.Annotation{
|
||||||
Table: "usage_cleanup_tasks",
|
Table: "usage_cleanup_tasks",
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -33,6 +33,18 @@ type RedeemCode func(*sql.Selector)
|
|||||||
// Setting is the predicate function for setting builders.
|
// Setting is the predicate function for setting builders.
|
||||||
type Setting func(*sql.Selector)
|
type Setting func(*sql.Selector)
|
||||||
|
|
||||||
|
// SoraAccount is the predicate function for soraaccount builders.
|
||||||
|
type SoraAccount func(*sql.Selector)
|
||||||
|
|
||||||
|
// SoraCacheFile is the predicate function for soracachefile builders.
|
||||||
|
type SoraCacheFile func(*sql.Selector)
|
||||||
|
|
||||||
|
// SoraTask is the predicate function for soratask builders.
|
||||||
|
type SoraTask func(*sql.Selector)
|
||||||
|
|
||||||
|
// SoraUsageStat is the predicate function for sorausagestat builders.
|
||||||
|
type SoraUsageStat func(*sql.Selector)
|
||||||
|
|
||||||
// UsageCleanupTask is the predicate function for usagecleanuptask builders.
|
// UsageCleanupTask is the predicate function for usagecleanuptask builders.
|
||||||
type UsageCleanupTask func(*sql.Selector)
|
type UsageCleanupTask func(*sql.Selector)
|
||||||
|
|
||||||
|
|||||||
@@ -15,6 +15,10 @@ import (
|
|||||||
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
"github.com/Wei-Shaw/sub2api/ent/redeemcode"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/schema"
|
"github.com/Wei-Shaw/sub2api/ent/schema"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/setting"
|
"github.com/Wei-Shaw/sub2api/ent/setting"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
"github.com/Wei-Shaw/sub2api/ent/usagecleanuptask"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
"github.com/Wei-Shaw/sub2api/ent/usagelog"
|
||||||
"github.com/Wei-Shaw/sub2api/ent/user"
|
"github.com/Wei-Shaw/sub2api/ent/user"
|
||||||
@@ -496,6 +500,150 @@ func init() {
|
|||||||
setting.DefaultUpdatedAt = settingDescUpdatedAt.Default.(func() time.Time)
|
setting.DefaultUpdatedAt = settingDescUpdatedAt.Default.(func() time.Time)
|
||||||
// setting.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
// setting.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
||||||
setting.UpdateDefaultUpdatedAt = settingDescUpdatedAt.UpdateDefault.(func() time.Time)
|
setting.UpdateDefaultUpdatedAt = settingDescUpdatedAt.UpdateDefault.(func() time.Time)
|
||||||
|
soraaccountMixin := schema.SoraAccount{}.Mixin()
|
||||||
|
soraaccountMixinFields0 := soraaccountMixin[0].Fields()
|
||||||
|
_ = soraaccountMixinFields0
|
||||||
|
soraaccountFields := schema.SoraAccount{}.Fields()
|
||||||
|
_ = soraaccountFields
|
||||||
|
// soraaccountDescCreatedAt is the schema descriptor for created_at field.
|
||||||
|
soraaccountDescCreatedAt := soraaccountMixinFields0[0].Descriptor()
|
||||||
|
// soraaccount.DefaultCreatedAt holds the default value on creation for the created_at field.
|
||||||
|
soraaccount.DefaultCreatedAt = soraaccountDescCreatedAt.Default.(func() time.Time)
|
||||||
|
// soraaccountDescUpdatedAt is the schema descriptor for updated_at field.
|
||||||
|
soraaccountDescUpdatedAt := soraaccountMixinFields0[1].Descriptor()
|
||||||
|
// soraaccount.DefaultUpdatedAt holds the default value on creation for the updated_at field.
|
||||||
|
soraaccount.DefaultUpdatedAt = soraaccountDescUpdatedAt.Default.(func() time.Time)
|
||||||
|
// soraaccount.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
||||||
|
soraaccount.UpdateDefaultUpdatedAt = soraaccountDescUpdatedAt.UpdateDefault.(func() time.Time)
|
||||||
|
// soraaccountDescUseCount is the schema descriptor for use_count field.
|
||||||
|
soraaccountDescUseCount := soraaccountFields[8].Descriptor()
|
||||||
|
// soraaccount.DefaultUseCount holds the default value on creation for the use_count field.
|
||||||
|
soraaccount.DefaultUseCount = soraaccountDescUseCount.Default.(int)
|
||||||
|
// soraaccountDescSoraSupported is the schema descriptor for sora_supported field.
|
||||||
|
soraaccountDescSoraSupported := soraaccountFields[12].Descriptor()
|
||||||
|
// soraaccount.DefaultSoraSupported holds the default value on creation for the sora_supported field.
|
||||||
|
soraaccount.DefaultSoraSupported = soraaccountDescSoraSupported.Default.(bool)
|
||||||
|
// soraaccountDescSoraRedeemedCount is the schema descriptor for sora_redeemed_count field.
|
||||||
|
soraaccountDescSoraRedeemedCount := soraaccountFields[14].Descriptor()
|
||||||
|
// soraaccount.DefaultSoraRedeemedCount holds the default value on creation for the sora_redeemed_count field.
|
||||||
|
soraaccount.DefaultSoraRedeemedCount = soraaccountDescSoraRedeemedCount.Default.(int)
|
||||||
|
// soraaccountDescSoraRemainingCount is the schema descriptor for sora_remaining_count field.
|
||||||
|
soraaccountDescSoraRemainingCount := soraaccountFields[15].Descriptor()
|
||||||
|
// soraaccount.DefaultSoraRemainingCount holds the default value on creation for the sora_remaining_count field.
|
||||||
|
soraaccount.DefaultSoraRemainingCount = soraaccountDescSoraRemainingCount.Default.(int)
|
||||||
|
// soraaccountDescSoraTotalCount is the schema descriptor for sora_total_count field.
|
||||||
|
soraaccountDescSoraTotalCount := soraaccountFields[16].Descriptor()
|
||||||
|
// soraaccount.DefaultSoraTotalCount holds the default value on creation for the sora_total_count field.
|
||||||
|
soraaccount.DefaultSoraTotalCount = soraaccountDescSoraTotalCount.Default.(int)
|
||||||
|
// soraaccountDescImageEnabled is the schema descriptor for image_enabled field.
|
||||||
|
soraaccountDescImageEnabled := soraaccountFields[19].Descriptor()
|
||||||
|
// soraaccount.DefaultImageEnabled holds the default value on creation for the image_enabled field.
|
||||||
|
soraaccount.DefaultImageEnabled = soraaccountDescImageEnabled.Default.(bool)
|
||||||
|
// soraaccountDescVideoEnabled is the schema descriptor for video_enabled field.
|
||||||
|
soraaccountDescVideoEnabled := soraaccountFields[20].Descriptor()
|
||||||
|
// soraaccount.DefaultVideoEnabled holds the default value on creation for the video_enabled field.
|
||||||
|
soraaccount.DefaultVideoEnabled = soraaccountDescVideoEnabled.Default.(bool)
|
||||||
|
// soraaccountDescImageConcurrency is the schema descriptor for image_concurrency field.
|
||||||
|
soraaccountDescImageConcurrency := soraaccountFields[21].Descriptor()
|
||||||
|
// soraaccount.DefaultImageConcurrency holds the default value on creation for the image_concurrency field.
|
||||||
|
soraaccount.DefaultImageConcurrency = soraaccountDescImageConcurrency.Default.(int)
|
||||||
|
// soraaccountDescVideoConcurrency is the schema descriptor for video_concurrency field.
|
||||||
|
soraaccountDescVideoConcurrency := soraaccountFields[22].Descriptor()
|
||||||
|
// soraaccount.DefaultVideoConcurrency holds the default value on creation for the video_concurrency field.
|
||||||
|
soraaccount.DefaultVideoConcurrency = soraaccountDescVideoConcurrency.Default.(int)
|
||||||
|
// soraaccountDescIsExpired is the schema descriptor for is_expired field.
|
||||||
|
soraaccountDescIsExpired := soraaccountFields[23].Descriptor()
|
||||||
|
// soraaccount.DefaultIsExpired holds the default value on creation for the is_expired field.
|
||||||
|
soraaccount.DefaultIsExpired = soraaccountDescIsExpired.Default.(bool)
|
||||||
|
soracachefileFields := schema.SoraCacheFile{}.Fields()
|
||||||
|
_ = soracachefileFields
|
||||||
|
// soracachefileDescTaskID is the schema descriptor for task_id field.
|
||||||
|
soracachefileDescTaskID := soracachefileFields[0].Descriptor()
|
||||||
|
// soracachefile.TaskIDValidator is a validator for the "task_id" field. It is called by the builders before save.
|
||||||
|
soracachefile.TaskIDValidator = soracachefileDescTaskID.Validators[0].(func(string) error)
|
||||||
|
// soracachefileDescMediaType is the schema descriptor for media_type field.
|
||||||
|
soracachefileDescMediaType := soracachefileFields[3].Descriptor()
|
||||||
|
// soracachefile.MediaTypeValidator is a validator for the "media_type" field. It is called by the builders before save.
|
||||||
|
soracachefile.MediaTypeValidator = soracachefileDescMediaType.Validators[0].(func(string) error)
|
||||||
|
// soracachefileDescSizeBytes is the schema descriptor for size_bytes field.
|
||||||
|
soracachefileDescSizeBytes := soracachefileFields[7].Descriptor()
|
||||||
|
// soracachefile.DefaultSizeBytes holds the default value on creation for the size_bytes field.
|
||||||
|
soracachefile.DefaultSizeBytes = soracachefileDescSizeBytes.Default.(int64)
|
||||||
|
// soracachefileDescCreatedAt is the schema descriptor for created_at field.
|
||||||
|
soracachefileDescCreatedAt := soracachefileFields[8].Descriptor()
|
||||||
|
// soracachefile.DefaultCreatedAt holds the default value on creation for the created_at field.
|
||||||
|
soracachefile.DefaultCreatedAt = soracachefileDescCreatedAt.Default.(func() time.Time)
|
||||||
|
sorataskFields := schema.SoraTask{}.Fields()
|
||||||
|
_ = sorataskFields
|
||||||
|
// sorataskDescTaskID is the schema descriptor for task_id field.
|
||||||
|
sorataskDescTaskID := sorataskFields[0].Descriptor()
|
||||||
|
// soratask.TaskIDValidator is a validator for the "task_id" field. It is called by the builders before save.
|
||||||
|
soratask.TaskIDValidator = sorataskDescTaskID.Validators[0].(func(string) error)
|
||||||
|
// sorataskDescModel is the schema descriptor for model field.
|
||||||
|
sorataskDescModel := sorataskFields[2].Descriptor()
|
||||||
|
// soratask.ModelValidator is a validator for the "model" field. It is called by the builders before save.
|
||||||
|
soratask.ModelValidator = sorataskDescModel.Validators[0].(func(string) error)
|
||||||
|
// sorataskDescStatus is the schema descriptor for status field.
|
||||||
|
sorataskDescStatus := sorataskFields[4].Descriptor()
|
||||||
|
// soratask.DefaultStatus holds the default value on creation for the status field.
|
||||||
|
soratask.DefaultStatus = sorataskDescStatus.Default.(string)
|
||||||
|
// soratask.StatusValidator is a validator for the "status" field. It is called by the builders before save.
|
||||||
|
soratask.StatusValidator = sorataskDescStatus.Validators[0].(func(string) error)
|
||||||
|
// sorataskDescProgress is the schema descriptor for progress field.
|
||||||
|
sorataskDescProgress := sorataskFields[5].Descriptor()
|
||||||
|
// soratask.DefaultProgress holds the default value on creation for the progress field.
|
||||||
|
soratask.DefaultProgress = sorataskDescProgress.Default.(float64)
|
||||||
|
// sorataskDescRetryCount is the schema descriptor for retry_count field.
|
||||||
|
sorataskDescRetryCount := sorataskFields[8].Descriptor()
|
||||||
|
// soratask.DefaultRetryCount holds the default value on creation for the retry_count field.
|
||||||
|
soratask.DefaultRetryCount = sorataskDescRetryCount.Default.(int)
|
||||||
|
// sorataskDescCreatedAt is the schema descriptor for created_at field.
|
||||||
|
sorataskDescCreatedAt := sorataskFields[9].Descriptor()
|
||||||
|
// soratask.DefaultCreatedAt holds the default value on creation for the created_at field.
|
||||||
|
soratask.DefaultCreatedAt = sorataskDescCreatedAt.Default.(func() time.Time)
|
||||||
|
sorausagestatMixin := schema.SoraUsageStat{}.Mixin()
|
||||||
|
sorausagestatMixinFields0 := sorausagestatMixin[0].Fields()
|
||||||
|
_ = sorausagestatMixinFields0
|
||||||
|
sorausagestatFields := schema.SoraUsageStat{}.Fields()
|
||||||
|
_ = sorausagestatFields
|
||||||
|
// sorausagestatDescCreatedAt is the schema descriptor for created_at field.
|
||||||
|
sorausagestatDescCreatedAt := sorausagestatMixinFields0[0].Descriptor()
|
||||||
|
// sorausagestat.DefaultCreatedAt holds the default value on creation for the created_at field.
|
||||||
|
sorausagestat.DefaultCreatedAt = sorausagestatDescCreatedAt.Default.(func() time.Time)
|
||||||
|
// sorausagestatDescUpdatedAt is the schema descriptor for updated_at field.
|
||||||
|
sorausagestatDescUpdatedAt := sorausagestatMixinFields0[1].Descriptor()
|
||||||
|
// sorausagestat.DefaultUpdatedAt holds the default value on creation for the updated_at field.
|
||||||
|
sorausagestat.DefaultUpdatedAt = sorausagestatDescUpdatedAt.Default.(func() time.Time)
|
||||||
|
// sorausagestat.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
|
||||||
|
sorausagestat.UpdateDefaultUpdatedAt = sorausagestatDescUpdatedAt.UpdateDefault.(func() time.Time)
|
||||||
|
// sorausagestatDescImageCount is the schema descriptor for image_count field.
|
||||||
|
sorausagestatDescImageCount := sorausagestatFields[1].Descriptor()
|
||||||
|
// sorausagestat.DefaultImageCount holds the default value on creation for the image_count field.
|
||||||
|
sorausagestat.DefaultImageCount = sorausagestatDescImageCount.Default.(int)
|
||||||
|
// sorausagestatDescVideoCount is the schema descriptor for video_count field.
|
||||||
|
sorausagestatDescVideoCount := sorausagestatFields[2].Descriptor()
|
||||||
|
// sorausagestat.DefaultVideoCount holds the default value on creation for the video_count field.
|
||||||
|
sorausagestat.DefaultVideoCount = sorausagestatDescVideoCount.Default.(int)
|
||||||
|
// sorausagestatDescErrorCount is the schema descriptor for error_count field.
|
||||||
|
sorausagestatDescErrorCount := sorausagestatFields[3].Descriptor()
|
||||||
|
// sorausagestat.DefaultErrorCount holds the default value on creation for the error_count field.
|
||||||
|
sorausagestat.DefaultErrorCount = sorausagestatDescErrorCount.Default.(int)
|
||||||
|
// sorausagestatDescTodayImageCount is the schema descriptor for today_image_count field.
|
||||||
|
sorausagestatDescTodayImageCount := sorausagestatFields[5].Descriptor()
|
||||||
|
// sorausagestat.DefaultTodayImageCount holds the default value on creation for the today_image_count field.
|
||||||
|
sorausagestat.DefaultTodayImageCount = sorausagestatDescTodayImageCount.Default.(int)
|
||||||
|
// sorausagestatDescTodayVideoCount is the schema descriptor for today_video_count field.
|
||||||
|
sorausagestatDescTodayVideoCount := sorausagestatFields[6].Descriptor()
|
||||||
|
// sorausagestat.DefaultTodayVideoCount holds the default value on creation for the today_video_count field.
|
||||||
|
sorausagestat.DefaultTodayVideoCount = sorausagestatDescTodayVideoCount.Default.(int)
|
||||||
|
// sorausagestatDescTodayErrorCount is the schema descriptor for today_error_count field.
|
||||||
|
sorausagestatDescTodayErrorCount := sorausagestatFields[7].Descriptor()
|
||||||
|
// sorausagestat.DefaultTodayErrorCount holds the default value on creation for the today_error_count field.
|
||||||
|
sorausagestat.DefaultTodayErrorCount = sorausagestatDescTodayErrorCount.Default.(int)
|
||||||
|
// sorausagestatDescConsecutiveErrorCount is the schema descriptor for consecutive_error_count field.
|
||||||
|
sorausagestatDescConsecutiveErrorCount := sorausagestatFields[9].Descriptor()
|
||||||
|
// sorausagestat.DefaultConsecutiveErrorCount holds the default value on creation for the consecutive_error_count field.
|
||||||
|
sorausagestat.DefaultConsecutiveErrorCount = sorausagestatDescConsecutiveErrorCount.Default.(int)
|
||||||
usagecleanuptaskMixin := schema.UsageCleanupTask{}.Mixin()
|
usagecleanuptaskMixin := schema.UsageCleanupTask{}.Mixin()
|
||||||
usagecleanuptaskMixinFields0 := usagecleanuptaskMixin[0].Fields()
|
usagecleanuptaskMixinFields0 := usagecleanuptaskMixin[0].Fields()
|
||||||
_ = usagecleanuptaskMixinFields0
|
_ = usagecleanuptaskMixinFields0
|
||||||
|
|||||||
115
backend/ent/schema/sora_account.go
Normal file
115
backend/ent/schema/sora_account.go
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
// Package schema 定义 Ent ORM 的数据库 schema。
|
||||||
|
// 每个文件对应一个数据库实体(表),定义其字段、边(关联)和索引。
|
||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/schema/mixins"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/entsql"
|
||||||
|
"entgo.io/ent/schema"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"entgo.io/ent/schema/index"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraAccount 定义 Sora 账号扩展表。
|
||||||
|
type SoraAccount struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Annotations 返回 schema 的注解配置。
|
||||||
|
func (SoraAccount) Annotations() []schema.Annotation {
|
||||||
|
return []schema.Annotation{
|
||||||
|
entsql.Annotation{Table: "sora_accounts"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mixin 返回该 schema 使用的混入组件。
|
||||||
|
func (SoraAccount) Mixin() []ent.Mixin {
|
||||||
|
return []ent.Mixin{
|
||||||
|
mixins.TimeMixin{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields 定义 SoraAccount 的字段。
|
||||||
|
func (SoraAccount) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.Int64("account_id").
|
||||||
|
Comment("关联 accounts 表的 ID"),
|
||||||
|
field.String("access_token").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.String("session_token").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.String("refresh_token").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.String("client_id").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.String("email").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.String("username").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.String("remark").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "text"}),
|
||||||
|
field.Int("use_count").
|
||||||
|
Default(0),
|
||||||
|
field.String("plan_type").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.String("plan_title").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.Time("subscription_end").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
|
||||||
|
field.Bool("sora_supported").
|
||||||
|
Default(false),
|
||||||
|
field.String("sora_invite_code").
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.Int("sora_redeemed_count").
|
||||||
|
Default(0),
|
||||||
|
field.Int("sora_remaining_count").
|
||||||
|
Default(0),
|
||||||
|
field.Int("sora_total_count").
|
||||||
|
Default(0),
|
||||||
|
field.Time("sora_cooldown_until").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
|
||||||
|
field.Time("cooled_until").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
|
||||||
|
field.Bool("image_enabled").
|
||||||
|
Default(true),
|
||||||
|
field.Bool("video_enabled").
|
||||||
|
Default(true),
|
||||||
|
field.Int("image_concurrency").
|
||||||
|
Default(-1),
|
||||||
|
field.Int("video_concurrency").
|
||||||
|
Default(-1),
|
||||||
|
field.Bool("is_expired").
|
||||||
|
Default(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Indexes 定义索引。
|
||||||
|
func (SoraAccount) Indexes() []ent.Index {
|
||||||
|
return []ent.Index{
|
||||||
|
index.Fields("account_id").Unique(),
|
||||||
|
index.Fields("plan_type"),
|
||||||
|
index.Fields("sora_supported"),
|
||||||
|
index.Fields("image_enabled"),
|
||||||
|
index.Fields("video_enabled"),
|
||||||
|
}
|
||||||
|
}
|
||||||
60
backend/ent/schema/sora_cache_file.go
Normal file
60
backend/ent/schema/sora_cache_file.go
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
// Package schema 定义 Ent ORM 的数据库 schema。
|
||||||
|
// 每个文件对应一个数据库实体(表),定义其字段、边(关联)和索引。
|
||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/entsql"
|
||||||
|
"entgo.io/ent/schema"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"entgo.io/ent/schema/index"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraCacheFile 定义 Sora 缓存文件表。
|
||||||
|
type SoraCacheFile struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Annotations 返回 schema 的注解配置。
|
||||||
|
func (SoraCacheFile) Annotations() []schema.Annotation {
|
||||||
|
return []schema.Annotation{
|
||||||
|
entsql.Annotation{Table: "sora_cache_files"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields 定义 SoraCacheFile 的字段。
|
||||||
|
func (SoraCacheFile) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.String("task_id").
|
||||||
|
MaxLen(120).
|
||||||
|
Optional().
|
||||||
|
Nillable(),
|
||||||
|
field.Int64("account_id"),
|
||||||
|
field.Int64("user_id"),
|
||||||
|
field.String("media_type").
|
||||||
|
MaxLen(32),
|
||||||
|
field.String("original_url").
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "text"}),
|
||||||
|
field.String("cache_path").
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "text"}),
|
||||||
|
field.String("cache_url").
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "text"}),
|
||||||
|
field.Int64("size_bytes").
|
||||||
|
Default(0),
|
||||||
|
field.Time("created_at").
|
||||||
|
Default(time.Now).
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Indexes 定义索引。
|
||||||
|
func (SoraCacheFile) Indexes() []ent.Index {
|
||||||
|
return []ent.Index{
|
||||||
|
index.Fields("account_id"),
|
||||||
|
index.Fields("user_id"),
|
||||||
|
index.Fields("media_type"),
|
||||||
|
}
|
||||||
|
}
|
||||||
70
backend/ent/schema/sora_task.go
Normal file
70
backend/ent/schema/sora_task.go
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
// Package schema 定义 Ent ORM 的数据库 schema。
|
||||||
|
// 每个文件对应一个数据库实体(表),定义其字段、边(关联)和索引。
|
||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/entsql"
|
||||||
|
"entgo.io/ent/schema"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"entgo.io/ent/schema/index"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraTask 定义 Sora 任务记录表。
|
||||||
|
type SoraTask struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Annotations 返回 schema 的注解配置。
|
||||||
|
func (SoraTask) Annotations() []schema.Annotation {
|
||||||
|
return []schema.Annotation{
|
||||||
|
entsql.Annotation{Table: "sora_tasks"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields 定义 SoraTask 的字段。
|
||||||
|
func (SoraTask) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.String("task_id").
|
||||||
|
MaxLen(120).
|
||||||
|
Unique(),
|
||||||
|
field.Int64("account_id"),
|
||||||
|
field.String("model").
|
||||||
|
MaxLen(120),
|
||||||
|
field.String("prompt").
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "text"}),
|
||||||
|
field.String("status").
|
||||||
|
MaxLen(32).
|
||||||
|
Default("processing"),
|
||||||
|
field.Float("progress").
|
||||||
|
Default(0),
|
||||||
|
field.String("result_urls").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "text"}),
|
||||||
|
field.String("error_message").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "text"}),
|
||||||
|
field.Int("retry_count").
|
||||||
|
Default(0),
|
||||||
|
field.Time("created_at").
|
||||||
|
Default(time.Now).
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
|
||||||
|
field.Time("completed_at").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Indexes 定义索引。
|
||||||
|
func (SoraTask) Indexes() []ent.Index {
|
||||||
|
return []ent.Index{
|
||||||
|
index.Fields("account_id"),
|
||||||
|
index.Fields("status"),
|
||||||
|
}
|
||||||
|
}
|
||||||
71
backend/ent/schema/sora_usage_stat.go
Normal file
71
backend/ent/schema/sora_usage_stat.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
// Package schema 定义 Ent ORM 的数据库 schema。
|
||||||
|
// 每个文件对应一个数据库实体(表),定义其字段、边(关联)和索引。
|
||||||
|
package schema
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/schema/mixins"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/entsql"
|
||||||
|
"entgo.io/ent/schema"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"entgo.io/ent/schema/index"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraUsageStat 定义 Sora 调用统计表。
|
||||||
|
type SoraUsageStat struct {
|
||||||
|
ent.Schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// Annotations 返回 schema 的注解配置。
|
||||||
|
func (SoraUsageStat) Annotations() []schema.Annotation {
|
||||||
|
return []schema.Annotation{
|
||||||
|
entsql.Annotation{Table: "sora_usage_stats"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mixin 返回该 schema 使用的混入组件。
|
||||||
|
func (SoraUsageStat) Mixin() []ent.Mixin {
|
||||||
|
return []ent.Mixin{
|
||||||
|
mixins.TimeMixin{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields 定义 SoraUsageStat 的字段。
|
||||||
|
func (SoraUsageStat) Fields() []ent.Field {
|
||||||
|
return []ent.Field{
|
||||||
|
field.Int64("account_id").
|
||||||
|
Comment("关联 accounts 表的 ID"),
|
||||||
|
field.Int("image_count").
|
||||||
|
Default(0),
|
||||||
|
field.Int("video_count").
|
||||||
|
Default(0),
|
||||||
|
field.Int("error_count").
|
||||||
|
Default(0),
|
||||||
|
field.Time("last_error_at").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
|
||||||
|
field.Int("today_image_count").
|
||||||
|
Default(0),
|
||||||
|
field.Int("today_video_count").
|
||||||
|
Default(0),
|
||||||
|
field.Int("today_error_count").
|
||||||
|
Default(0),
|
||||||
|
field.Time("today_date").
|
||||||
|
Optional().
|
||||||
|
Nillable().
|
||||||
|
SchemaType(map[string]string{dialect.Postgres: "date"}),
|
||||||
|
field.Int("consecutive_error_count").
|
||||||
|
Default(0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Indexes 定义索引。
|
||||||
|
func (SoraUsageStat) Indexes() []ent.Index {
|
||||||
|
return []ent.Index{
|
||||||
|
index.Fields("account_id").Unique(),
|
||||||
|
index.Fields("today_date"),
|
||||||
|
}
|
||||||
|
}
|
||||||
422
backend/ent/soraaccount.go
Normal file
422
backend/ent/soraaccount.go
Normal file
@@ -0,0 +1,422 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraAccount is the model entity for the SoraAccount schema.
|
||||||
|
type SoraAccount struct {
|
||||||
|
config `json:"-"`
|
||||||
|
// ID of the ent.
|
||||||
|
ID int64 `json:"id,omitempty"`
|
||||||
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
|
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
|
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||||
|
// 关联 accounts 表的 ID
|
||||||
|
AccountID int64 `json:"account_id,omitempty"`
|
||||||
|
// AccessToken holds the value of the "access_token" field.
|
||||||
|
AccessToken *string `json:"access_token,omitempty"`
|
||||||
|
// SessionToken holds the value of the "session_token" field.
|
||||||
|
SessionToken *string `json:"session_token,omitempty"`
|
||||||
|
// RefreshToken holds the value of the "refresh_token" field.
|
||||||
|
RefreshToken *string `json:"refresh_token,omitempty"`
|
||||||
|
// ClientID holds the value of the "client_id" field.
|
||||||
|
ClientID *string `json:"client_id,omitempty"`
|
||||||
|
// Email holds the value of the "email" field.
|
||||||
|
Email *string `json:"email,omitempty"`
|
||||||
|
// Username holds the value of the "username" field.
|
||||||
|
Username *string `json:"username,omitempty"`
|
||||||
|
// Remark holds the value of the "remark" field.
|
||||||
|
Remark *string `json:"remark,omitempty"`
|
||||||
|
// UseCount holds the value of the "use_count" field.
|
||||||
|
UseCount int `json:"use_count,omitempty"`
|
||||||
|
// PlanType holds the value of the "plan_type" field.
|
||||||
|
PlanType *string `json:"plan_type,omitempty"`
|
||||||
|
// PlanTitle holds the value of the "plan_title" field.
|
||||||
|
PlanTitle *string `json:"plan_title,omitempty"`
|
||||||
|
// SubscriptionEnd holds the value of the "subscription_end" field.
|
||||||
|
SubscriptionEnd *time.Time `json:"subscription_end,omitempty"`
|
||||||
|
// SoraSupported holds the value of the "sora_supported" field.
|
||||||
|
SoraSupported bool `json:"sora_supported,omitempty"`
|
||||||
|
// SoraInviteCode holds the value of the "sora_invite_code" field.
|
||||||
|
SoraInviteCode *string `json:"sora_invite_code,omitempty"`
|
||||||
|
// SoraRedeemedCount holds the value of the "sora_redeemed_count" field.
|
||||||
|
SoraRedeemedCount int `json:"sora_redeemed_count,omitempty"`
|
||||||
|
// SoraRemainingCount holds the value of the "sora_remaining_count" field.
|
||||||
|
SoraRemainingCount int `json:"sora_remaining_count,omitempty"`
|
||||||
|
// SoraTotalCount holds the value of the "sora_total_count" field.
|
||||||
|
SoraTotalCount int `json:"sora_total_count,omitempty"`
|
||||||
|
// SoraCooldownUntil holds the value of the "sora_cooldown_until" field.
|
||||||
|
SoraCooldownUntil *time.Time `json:"sora_cooldown_until,omitempty"`
|
||||||
|
// CooledUntil holds the value of the "cooled_until" field.
|
||||||
|
CooledUntil *time.Time `json:"cooled_until,omitempty"`
|
||||||
|
// ImageEnabled holds the value of the "image_enabled" field.
|
||||||
|
ImageEnabled bool `json:"image_enabled,omitempty"`
|
||||||
|
// VideoEnabled holds the value of the "video_enabled" field.
|
||||||
|
VideoEnabled bool `json:"video_enabled,omitempty"`
|
||||||
|
// ImageConcurrency holds the value of the "image_concurrency" field.
|
||||||
|
ImageConcurrency int `json:"image_concurrency,omitempty"`
|
||||||
|
// VideoConcurrency holds the value of the "video_concurrency" field.
|
||||||
|
VideoConcurrency int `json:"video_concurrency,omitempty"`
|
||||||
|
// IsExpired holds the value of the "is_expired" field.
|
||||||
|
IsExpired bool `json:"is_expired,omitempty"`
|
||||||
|
selectValues sql.SelectValues
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
|
func (*SoraAccount) scanValues(columns []string) ([]any, error) {
|
||||||
|
values := make([]any, len(columns))
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case soraaccount.FieldSoraSupported, soraaccount.FieldImageEnabled, soraaccount.FieldVideoEnabled, soraaccount.FieldIsExpired:
|
||||||
|
values[i] = new(sql.NullBool)
|
||||||
|
case soraaccount.FieldID, soraaccount.FieldAccountID, soraaccount.FieldUseCount, soraaccount.FieldSoraRedeemedCount, soraaccount.FieldSoraRemainingCount, soraaccount.FieldSoraTotalCount, soraaccount.FieldImageConcurrency, soraaccount.FieldVideoConcurrency:
|
||||||
|
values[i] = new(sql.NullInt64)
|
||||||
|
case soraaccount.FieldAccessToken, soraaccount.FieldSessionToken, soraaccount.FieldRefreshToken, soraaccount.FieldClientID, soraaccount.FieldEmail, soraaccount.FieldUsername, soraaccount.FieldRemark, soraaccount.FieldPlanType, soraaccount.FieldPlanTitle, soraaccount.FieldSoraInviteCode:
|
||||||
|
values[i] = new(sql.NullString)
|
||||||
|
case soraaccount.FieldCreatedAt, soraaccount.FieldUpdatedAt, soraaccount.FieldSubscriptionEnd, soraaccount.FieldSoraCooldownUntil, soraaccount.FieldCooledUntil:
|
||||||
|
values[i] = new(sql.NullTime)
|
||||||
|
default:
|
||||||
|
values[i] = new(sql.UnknownType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||||
|
// to the SoraAccount fields.
|
||||||
|
func (_m *SoraAccount) assignValues(columns []string, values []any) error {
|
||||||
|
if m, n := len(values), len(columns); m < n {
|
||||||
|
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||||
|
}
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case soraaccount.FieldID:
|
||||||
|
value, ok := values[i].(*sql.NullInt64)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field id", value)
|
||||||
|
}
|
||||||
|
_m.ID = int64(value.Int64)
|
||||||
|
case soraaccount.FieldCreatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CreatedAt = value.Time
|
||||||
|
}
|
||||||
|
case soraaccount.FieldUpdatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.UpdatedAt = value.Time
|
||||||
|
}
|
||||||
|
case soraaccount.FieldAccountID:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field account_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.AccountID = value.Int64
|
||||||
|
}
|
||||||
|
case soraaccount.FieldAccessToken:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field access_token", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.AccessToken = new(string)
|
||||||
|
*_m.AccessToken = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSessionToken:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field session_token", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SessionToken = new(string)
|
||||||
|
*_m.SessionToken = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldRefreshToken:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field refresh_token", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.RefreshToken = new(string)
|
||||||
|
*_m.RefreshToken = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldClientID:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field client_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ClientID = new(string)
|
||||||
|
*_m.ClientID = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldEmail:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field email", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.Email = new(string)
|
||||||
|
*_m.Email = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldUsername:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field username", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.Username = new(string)
|
||||||
|
*_m.Username = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldRemark:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field remark", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.Remark = new(string)
|
||||||
|
*_m.Remark = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldUseCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field use_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.UseCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case soraaccount.FieldPlanType:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field plan_type", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.PlanType = new(string)
|
||||||
|
*_m.PlanType = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldPlanTitle:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field plan_title", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.PlanTitle = new(string)
|
||||||
|
*_m.PlanTitle = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSubscriptionEnd:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field subscription_end", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SubscriptionEnd = new(time.Time)
|
||||||
|
*_m.SubscriptionEnd = value.Time
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSoraSupported:
|
||||||
|
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field sora_supported", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SoraSupported = value.Bool
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSoraInviteCode:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field sora_invite_code", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SoraInviteCode = new(string)
|
||||||
|
*_m.SoraInviteCode = value.String
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSoraRedeemedCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field sora_redeemed_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SoraRedeemedCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSoraRemainingCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field sora_remaining_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SoraRemainingCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSoraTotalCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field sora_total_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SoraTotalCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case soraaccount.FieldSoraCooldownUntil:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field sora_cooldown_until", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SoraCooldownUntil = new(time.Time)
|
||||||
|
*_m.SoraCooldownUntil = value.Time
|
||||||
|
}
|
||||||
|
case soraaccount.FieldCooledUntil:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field cooled_until", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CooledUntil = new(time.Time)
|
||||||
|
*_m.CooledUntil = value.Time
|
||||||
|
}
|
||||||
|
case soraaccount.FieldImageEnabled:
|
||||||
|
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field image_enabled", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ImageEnabled = value.Bool
|
||||||
|
}
|
||||||
|
case soraaccount.FieldVideoEnabled:
|
||||||
|
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field video_enabled", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.VideoEnabled = value.Bool
|
||||||
|
}
|
||||||
|
case soraaccount.FieldImageConcurrency:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field image_concurrency", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ImageConcurrency = int(value.Int64)
|
||||||
|
}
|
||||||
|
case soraaccount.FieldVideoConcurrency:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field video_concurrency", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.VideoConcurrency = int(value.Int64)
|
||||||
|
}
|
||||||
|
case soraaccount.FieldIsExpired:
|
||||||
|
if value, ok := values[i].(*sql.NullBool); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field is_expired", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.IsExpired = value.Bool
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
_m.selectValues.Set(columns[i], values[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value returns the ent.Value that was dynamically selected and assigned to the SoraAccount.
|
||||||
|
// This includes values selected through modifiers, order, etc.
|
||||||
|
func (_m *SoraAccount) Value(name string) (ent.Value, error) {
|
||||||
|
return _m.selectValues.Get(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns a builder for updating this SoraAccount.
|
||||||
|
// Note that you need to call SoraAccount.Unwrap() before calling this method if this SoraAccount
|
||||||
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
func (_m *SoraAccount) Update() *SoraAccountUpdateOne {
|
||||||
|
return NewSoraAccountClient(_m.config).UpdateOne(_m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap unwraps the SoraAccount entity that was returned from a transaction after it was closed,
|
||||||
|
// so that all future queries will be executed through the driver which created the transaction.
|
||||||
|
func (_m *SoraAccount) Unwrap() *SoraAccount {
|
||||||
|
_tx, ok := _m.config.driver.(*txDriver)
|
||||||
|
if !ok {
|
||||||
|
panic("ent: SoraAccount is not a transactional entity")
|
||||||
|
}
|
||||||
|
_m.config.driver = _tx.drv
|
||||||
|
return _m
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements the fmt.Stringer.
|
||||||
|
func (_m *SoraAccount) String() string {
|
||||||
|
var builder strings.Builder
|
||||||
|
builder.WriteString("SoraAccount(")
|
||||||
|
builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID))
|
||||||
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(_m.CreatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(_m.UpdatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("account_id=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.AccountID))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.AccessToken; v != nil {
|
||||||
|
builder.WriteString("access_token=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.SessionToken; v != nil {
|
||||||
|
builder.WriteString("session_token=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.RefreshToken; v != nil {
|
||||||
|
builder.WriteString("refresh_token=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.ClientID; v != nil {
|
||||||
|
builder.WriteString("client_id=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.Email; v != nil {
|
||||||
|
builder.WriteString("email=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.Username; v != nil {
|
||||||
|
builder.WriteString("username=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.Remark; v != nil {
|
||||||
|
builder.WriteString("remark=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("use_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.UseCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.PlanType; v != nil {
|
||||||
|
builder.WriteString("plan_type=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.PlanTitle; v != nil {
|
||||||
|
builder.WriteString("plan_title=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.SubscriptionEnd; v != nil {
|
||||||
|
builder.WriteString("subscription_end=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("sora_supported=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.SoraSupported))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.SoraInviteCode; v != nil {
|
||||||
|
builder.WriteString("sora_invite_code=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("sora_redeemed_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.SoraRedeemedCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("sora_remaining_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.SoraRemainingCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("sora_total_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.SoraTotalCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.SoraCooldownUntil; v != nil {
|
||||||
|
builder.WriteString("sora_cooldown_until=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.CooledUntil; v != nil {
|
||||||
|
builder.WriteString("cooled_until=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("image_enabled=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.ImageEnabled))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("video_enabled=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.VideoEnabled))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("image_concurrency=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.ImageConcurrency))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("video_concurrency=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.VideoConcurrency))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("is_expired=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.IsExpired))
|
||||||
|
builder.WriteByte(')')
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccounts is a parsable slice of SoraAccount.
|
||||||
|
type SoraAccounts []*SoraAccount
|
||||||
278
backend/ent/soraaccount/soraaccount.go
Normal file
278
backend/ent/soraaccount/soraaccount.go
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package soraaccount
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Label holds the string label denoting the soraaccount type in the database.
|
||||||
|
Label = "sora_account"
|
||||||
|
// FieldID holds the string denoting the id field in the database.
|
||||||
|
FieldID = "id"
|
||||||
|
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||||
|
FieldCreatedAt = "created_at"
|
||||||
|
// FieldUpdatedAt holds the string denoting the updated_at field in the database.
|
||||||
|
FieldUpdatedAt = "updated_at"
|
||||||
|
// FieldAccountID holds the string denoting the account_id field in the database.
|
||||||
|
FieldAccountID = "account_id"
|
||||||
|
// FieldAccessToken holds the string denoting the access_token field in the database.
|
||||||
|
FieldAccessToken = "access_token"
|
||||||
|
// FieldSessionToken holds the string denoting the session_token field in the database.
|
||||||
|
FieldSessionToken = "session_token"
|
||||||
|
// FieldRefreshToken holds the string denoting the refresh_token field in the database.
|
||||||
|
FieldRefreshToken = "refresh_token"
|
||||||
|
// FieldClientID holds the string denoting the client_id field in the database.
|
||||||
|
FieldClientID = "client_id"
|
||||||
|
// FieldEmail holds the string denoting the email field in the database.
|
||||||
|
FieldEmail = "email"
|
||||||
|
// FieldUsername holds the string denoting the username field in the database.
|
||||||
|
FieldUsername = "username"
|
||||||
|
// FieldRemark holds the string denoting the remark field in the database.
|
||||||
|
FieldRemark = "remark"
|
||||||
|
// FieldUseCount holds the string denoting the use_count field in the database.
|
||||||
|
FieldUseCount = "use_count"
|
||||||
|
// FieldPlanType holds the string denoting the plan_type field in the database.
|
||||||
|
FieldPlanType = "plan_type"
|
||||||
|
// FieldPlanTitle holds the string denoting the plan_title field in the database.
|
||||||
|
FieldPlanTitle = "plan_title"
|
||||||
|
// FieldSubscriptionEnd holds the string denoting the subscription_end field in the database.
|
||||||
|
FieldSubscriptionEnd = "subscription_end"
|
||||||
|
// FieldSoraSupported holds the string denoting the sora_supported field in the database.
|
||||||
|
FieldSoraSupported = "sora_supported"
|
||||||
|
// FieldSoraInviteCode holds the string denoting the sora_invite_code field in the database.
|
||||||
|
FieldSoraInviteCode = "sora_invite_code"
|
||||||
|
// FieldSoraRedeemedCount holds the string denoting the sora_redeemed_count field in the database.
|
||||||
|
FieldSoraRedeemedCount = "sora_redeemed_count"
|
||||||
|
// FieldSoraRemainingCount holds the string denoting the sora_remaining_count field in the database.
|
||||||
|
FieldSoraRemainingCount = "sora_remaining_count"
|
||||||
|
// FieldSoraTotalCount holds the string denoting the sora_total_count field in the database.
|
||||||
|
FieldSoraTotalCount = "sora_total_count"
|
||||||
|
// FieldSoraCooldownUntil holds the string denoting the sora_cooldown_until field in the database.
|
||||||
|
FieldSoraCooldownUntil = "sora_cooldown_until"
|
||||||
|
// FieldCooledUntil holds the string denoting the cooled_until field in the database.
|
||||||
|
FieldCooledUntil = "cooled_until"
|
||||||
|
// FieldImageEnabled holds the string denoting the image_enabled field in the database.
|
||||||
|
FieldImageEnabled = "image_enabled"
|
||||||
|
// FieldVideoEnabled holds the string denoting the video_enabled field in the database.
|
||||||
|
FieldVideoEnabled = "video_enabled"
|
||||||
|
// FieldImageConcurrency holds the string denoting the image_concurrency field in the database.
|
||||||
|
FieldImageConcurrency = "image_concurrency"
|
||||||
|
// FieldVideoConcurrency holds the string denoting the video_concurrency field in the database.
|
||||||
|
FieldVideoConcurrency = "video_concurrency"
|
||||||
|
// FieldIsExpired holds the string denoting the is_expired field in the database.
|
||||||
|
FieldIsExpired = "is_expired"
|
||||||
|
// Table holds the table name of the soraaccount in the database.
|
||||||
|
Table = "sora_accounts"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Columns holds all SQL columns for soraaccount fields.
|
||||||
|
var Columns = []string{
|
||||||
|
FieldID,
|
||||||
|
FieldCreatedAt,
|
||||||
|
FieldUpdatedAt,
|
||||||
|
FieldAccountID,
|
||||||
|
FieldAccessToken,
|
||||||
|
FieldSessionToken,
|
||||||
|
FieldRefreshToken,
|
||||||
|
FieldClientID,
|
||||||
|
FieldEmail,
|
||||||
|
FieldUsername,
|
||||||
|
FieldRemark,
|
||||||
|
FieldUseCount,
|
||||||
|
FieldPlanType,
|
||||||
|
FieldPlanTitle,
|
||||||
|
FieldSubscriptionEnd,
|
||||||
|
FieldSoraSupported,
|
||||||
|
FieldSoraInviteCode,
|
||||||
|
FieldSoraRedeemedCount,
|
||||||
|
FieldSoraRemainingCount,
|
||||||
|
FieldSoraTotalCount,
|
||||||
|
FieldSoraCooldownUntil,
|
||||||
|
FieldCooledUntil,
|
||||||
|
FieldImageEnabled,
|
||||||
|
FieldVideoEnabled,
|
||||||
|
FieldImageConcurrency,
|
||||||
|
FieldVideoConcurrency,
|
||||||
|
FieldIsExpired,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
func ValidColumn(column string) bool {
|
||||||
|
for i := range Columns {
|
||||||
|
if column == Columns[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
|
DefaultCreatedAt func() time.Time
|
||||||
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
|
DefaultUpdatedAt func() time.Time
|
||||||
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
UpdateDefaultUpdatedAt func() time.Time
|
||||||
|
// DefaultUseCount holds the default value on creation for the "use_count" field.
|
||||||
|
DefaultUseCount int
|
||||||
|
// DefaultSoraSupported holds the default value on creation for the "sora_supported" field.
|
||||||
|
DefaultSoraSupported bool
|
||||||
|
// DefaultSoraRedeemedCount holds the default value on creation for the "sora_redeemed_count" field.
|
||||||
|
DefaultSoraRedeemedCount int
|
||||||
|
// DefaultSoraRemainingCount holds the default value on creation for the "sora_remaining_count" field.
|
||||||
|
DefaultSoraRemainingCount int
|
||||||
|
// DefaultSoraTotalCount holds the default value on creation for the "sora_total_count" field.
|
||||||
|
DefaultSoraTotalCount int
|
||||||
|
// DefaultImageEnabled holds the default value on creation for the "image_enabled" field.
|
||||||
|
DefaultImageEnabled bool
|
||||||
|
// DefaultVideoEnabled holds the default value on creation for the "video_enabled" field.
|
||||||
|
DefaultVideoEnabled bool
|
||||||
|
// DefaultImageConcurrency holds the default value on creation for the "image_concurrency" field.
|
||||||
|
DefaultImageConcurrency int
|
||||||
|
// DefaultVideoConcurrency holds the default value on creation for the "video_concurrency" field.
|
||||||
|
DefaultVideoConcurrency int
|
||||||
|
// DefaultIsExpired holds the default value on creation for the "is_expired" field.
|
||||||
|
DefaultIsExpired bool
|
||||||
|
)
|
||||||
|
|
||||||
|
// OrderOption defines the ordering options for the SoraAccount queries.
|
||||||
|
type OrderOption func(*sql.Selector)
|
||||||
|
|
||||||
|
// ByID orders the results by the id field.
|
||||||
|
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCreatedAt orders the results by the created_at field.
|
||||||
|
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByUpdatedAt orders the results by the updated_at field.
|
||||||
|
func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByAccountID orders the results by the account_id field.
|
||||||
|
func ByAccountID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldAccountID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByAccessToken orders the results by the access_token field.
|
||||||
|
func ByAccessToken(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldAccessToken, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySessionToken orders the results by the session_token field.
|
||||||
|
func BySessionToken(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSessionToken, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByRefreshToken orders the results by the refresh_token field.
|
||||||
|
func ByRefreshToken(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldRefreshToken, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByClientID orders the results by the client_id field.
|
||||||
|
func ByClientID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldClientID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByEmail orders the results by the email field.
|
||||||
|
func ByEmail(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldEmail, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByUsername orders the results by the username field.
|
||||||
|
func ByUsername(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldUsername, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByRemark orders the results by the remark field.
|
||||||
|
func ByRemark(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldRemark, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByUseCount orders the results by the use_count field.
|
||||||
|
func ByUseCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldUseCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByPlanType orders the results by the plan_type field.
|
||||||
|
func ByPlanType(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldPlanType, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByPlanTitle orders the results by the plan_title field.
|
||||||
|
func ByPlanTitle(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldPlanTitle, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySubscriptionEnd orders the results by the subscription_end field.
|
||||||
|
func BySubscriptionEnd(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSubscriptionEnd, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySoraSupported orders the results by the sora_supported field.
|
||||||
|
func BySoraSupported(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSoraSupported, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySoraInviteCode orders the results by the sora_invite_code field.
|
||||||
|
func BySoraInviteCode(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSoraInviteCode, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySoraRedeemedCount orders the results by the sora_redeemed_count field.
|
||||||
|
func BySoraRedeemedCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSoraRedeemedCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySoraRemainingCount orders the results by the sora_remaining_count field.
|
||||||
|
func BySoraRemainingCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSoraRemainingCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySoraTotalCount orders the results by the sora_total_count field.
|
||||||
|
func BySoraTotalCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSoraTotalCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySoraCooldownUntil orders the results by the sora_cooldown_until field.
|
||||||
|
func BySoraCooldownUntil(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSoraCooldownUntil, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCooledUntil orders the results by the cooled_until field.
|
||||||
|
func ByCooledUntil(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCooledUntil, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByImageEnabled orders the results by the image_enabled field.
|
||||||
|
func ByImageEnabled(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldImageEnabled, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByVideoEnabled orders the results by the video_enabled field.
|
||||||
|
func ByVideoEnabled(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldVideoEnabled, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByImageConcurrency orders the results by the image_concurrency field.
|
||||||
|
func ByImageConcurrency(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldImageConcurrency, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByVideoConcurrency orders the results by the video_concurrency field.
|
||||||
|
func ByVideoConcurrency(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldVideoConcurrency, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByIsExpired orders the results by the is_expired field.
|
||||||
|
func ByIsExpired(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldIsExpired, opts...).ToFunc()
|
||||||
|
}
|
||||||
1500
backend/ent/soraaccount/where.go
Normal file
1500
backend/ent/soraaccount/where.go
Normal file
File diff suppressed because it is too large
Load Diff
2367
backend/ent/soraaccount_create.go
Normal file
2367
backend/ent/soraaccount_create.go
Normal file
File diff suppressed because it is too large
Load Diff
88
backend/ent/soraaccount_delete.go
Normal file
88
backend/ent/soraaccount_delete.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraAccountDelete is the builder for deleting a SoraAccount entity.
|
||||||
|
type SoraAccountDelete struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraAccountMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraAccountDelete builder.
|
||||||
|
func (_d *SoraAccountDelete) Where(ps ...predicate.SoraAccount) *SoraAccountDelete {
|
||||||
|
_d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||||
|
func (_d *SoraAccountDelete) Exec(ctx context.Context) (int, error) {
|
||||||
|
return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraAccountDelete) ExecX(ctx context.Context) int {
|
||||||
|
n, err := _d.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_d *SoraAccountDelete) sqlExec(ctx context.Context) (int, error) {
|
||||||
|
_spec := sqlgraph.NewDeleteSpec(soraaccount.Table, sqlgraph.NewFieldSpec(soraaccount.FieldID, field.TypeInt64))
|
||||||
|
if ps := _d.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec)
|
||||||
|
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
_d.mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountDeleteOne is the builder for deleting a single SoraAccount entity.
|
||||||
|
type SoraAccountDeleteOne struct {
|
||||||
|
_d *SoraAccountDelete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraAccountDelete builder.
|
||||||
|
func (_d *SoraAccountDeleteOne) Where(ps ...predicate.SoraAccount) *SoraAccountDeleteOne {
|
||||||
|
_d._d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query.
|
||||||
|
func (_d *SoraAccountDeleteOne) Exec(ctx context.Context) error {
|
||||||
|
n, err := _d._d.Exec(ctx)
|
||||||
|
switch {
|
||||||
|
case err != nil:
|
||||||
|
return err
|
||||||
|
case n == 0:
|
||||||
|
return &NotFoundError{soraaccount.Label}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraAccountDeleteOne) ExecX(ctx context.Context) {
|
||||||
|
if err := _d.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
564
backend/ent/soraaccount_query.go
Normal file
564
backend/ent/soraaccount_query.go
Normal file
@@ -0,0 +1,564 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraAccountQuery is the builder for querying SoraAccount entities.
|
||||||
|
type SoraAccountQuery struct {
|
||||||
|
config
|
||||||
|
ctx *QueryContext
|
||||||
|
order []soraaccount.OrderOption
|
||||||
|
inters []Interceptor
|
||||||
|
predicates []predicate.SoraAccount
|
||||||
|
modifiers []func(*sql.Selector)
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where adds a new predicate for the SoraAccountQuery builder.
|
||||||
|
func (_q *SoraAccountQuery) Where(ps ...predicate.SoraAccount) *SoraAccountQuery {
|
||||||
|
_q.predicates = append(_q.predicates, ps...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit the number of records to be returned by this query.
|
||||||
|
func (_q *SoraAccountQuery) Limit(limit int) *SoraAccountQuery {
|
||||||
|
_q.ctx.Limit = &limit
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offset to start from.
|
||||||
|
func (_q *SoraAccountQuery) Offset(offset int) *SoraAccountQuery {
|
||||||
|
_q.ctx.Offset = &offset
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique configures the query builder to filter duplicate records on query.
|
||||||
|
// By default, unique is set to true, and can be disabled using this method.
|
||||||
|
func (_q *SoraAccountQuery) Unique(unique bool) *SoraAccountQuery {
|
||||||
|
_q.ctx.Unique = &unique
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order specifies how the records should be ordered.
|
||||||
|
func (_q *SoraAccountQuery) Order(o ...soraaccount.OrderOption) *SoraAccountQuery {
|
||||||
|
_q.order = append(_q.order, o...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// First returns the first SoraAccount entity from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraAccount was found.
|
||||||
|
func (_q *SoraAccountQuery) First(ctx context.Context) (*SoraAccount, error) {
|
||||||
|
nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nil, &NotFoundError{soraaccount.Label}
|
||||||
|
}
|
||||||
|
return nodes[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstX is like First, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) FirstX(ctx context.Context) *SoraAccount {
|
||||||
|
node, err := _q.First(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstID returns the first SoraAccount ID from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraAccount ID was found.
|
||||||
|
func (_q *SoraAccountQuery) FirstID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(ids) == 0 {
|
||||||
|
err = &NotFoundError{soraaccount.Label}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return ids[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) FirstIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.FirstID(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only returns a single SoraAccount entity found by the query, ensuring it only returns one.
|
||||||
|
// Returns a *NotSingularError when more than one SoraAccount entity is found.
|
||||||
|
// Returns a *NotFoundError when no SoraAccount entities are found.
|
||||||
|
func (_q *SoraAccountQuery) Only(ctx context.Context) (*SoraAccount, error) {
|
||||||
|
nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch len(nodes) {
|
||||||
|
case 1:
|
||||||
|
return nodes[0], nil
|
||||||
|
case 0:
|
||||||
|
return nil, &NotFoundError{soraaccount.Label}
|
||||||
|
default:
|
||||||
|
return nil, &NotSingularError{soraaccount.Label}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyX is like Only, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) OnlyX(ctx context.Context) *SoraAccount {
|
||||||
|
node, err := _q.Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyID is like Only, but returns the only SoraAccount ID in the query.
|
||||||
|
// Returns a *NotSingularError when more than one SoraAccount ID is found.
|
||||||
|
// Returns a *NotFoundError when no entities are found.
|
||||||
|
func (_q *SoraAccountQuery) OnlyID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch len(ids) {
|
||||||
|
case 1:
|
||||||
|
id = ids[0]
|
||||||
|
case 0:
|
||||||
|
err = &NotFoundError{soraaccount.Label}
|
||||||
|
default:
|
||||||
|
err = &NotSingularError{soraaccount.Label}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) OnlyIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.OnlyID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// All executes the query and returns a list of SoraAccounts.
|
||||||
|
func (_q *SoraAccountQuery) All(ctx context.Context) ([]*SoraAccount, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
qr := querierAll[[]*SoraAccount, *SoraAccountQuery]()
|
||||||
|
return withInterceptors[[]*SoraAccount](ctx, _q, qr, _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllX is like All, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) AllX(ctx context.Context) []*SoraAccount {
|
||||||
|
nodes, err := _q.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs executes the query and returns a list of SoraAccount IDs.
|
||||||
|
func (_q *SoraAccountQuery) IDs(ctx context.Context) (ids []int64, err error) {
|
||||||
|
if _q.ctx.Unique == nil && _q.path != nil {
|
||||||
|
_q.Unique(true)
|
||||||
|
}
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs)
|
||||||
|
if err = _q.Select(soraaccount.FieldID).Scan(ctx, &ids); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDsX is like IDs, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) IDsX(ctx context.Context) []int64 {
|
||||||
|
ids, err := _q.IDs(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return ids
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the count of the given query.
|
||||||
|
func (_q *SoraAccountQuery) Count(ctx context.Context) (int, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return withInterceptors[int](ctx, _q, querierCount[*SoraAccountQuery](), _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountX is like Count, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) CountX(ctx context.Context) int {
|
||||||
|
count, err := _q.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exist returns true if the query has elements in the graph.
|
||||||
|
func (_q *SoraAccountQuery) Exist(ctx context.Context) (bool, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist)
|
||||||
|
switch _, err := _q.FirstID(ctx); {
|
||||||
|
case IsNotFound(err):
|
||||||
|
return false, nil
|
||||||
|
case err != nil:
|
||||||
|
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||||
|
default:
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExistX is like Exist, but panics if an error occurs.
|
||||||
|
func (_q *SoraAccountQuery) ExistX(ctx context.Context) bool {
|
||||||
|
exist, err := _q.Exist(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return exist
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone returns a duplicate of the SoraAccountQuery builder, including all associated steps. It can be
|
||||||
|
// used to prepare common query builders and use them differently after the clone is made.
|
||||||
|
func (_q *SoraAccountQuery) Clone() *SoraAccountQuery {
|
||||||
|
if _q == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &SoraAccountQuery{
|
||||||
|
config: _q.config,
|
||||||
|
ctx: _q.ctx.Clone(),
|
||||||
|
order: append([]soraaccount.OrderOption{}, _q.order...),
|
||||||
|
inters: append([]Interceptor{}, _q.inters...),
|
||||||
|
predicates: append([]predicate.SoraAccount{}, _q.predicates...),
|
||||||
|
// clone intermediate query.
|
||||||
|
sql: _q.sql.Clone(),
|
||||||
|
path: _q.path,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// Count int `json:"count,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraAccount.Query().
|
||||||
|
// GroupBy(soraaccount.FieldCreatedAt).
|
||||||
|
// Aggregate(ent.Count()).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraAccountQuery) GroupBy(field string, fields ...string) *SoraAccountGroupBy {
|
||||||
|
_q.ctx.Fields = append([]string{field}, fields...)
|
||||||
|
grbuild := &SoraAccountGroupBy{build: _q}
|
||||||
|
grbuild.flds = &_q.ctx.Fields
|
||||||
|
grbuild.label = soraaccount.Label
|
||||||
|
grbuild.scan = grbuild.Scan
|
||||||
|
return grbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows the selection one or more fields/columns for the given query,
|
||||||
|
// instead of selecting all fields in the entity.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraAccount.Query().
|
||||||
|
// Select(soraaccount.FieldCreatedAt).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraAccountQuery) Select(fields ...string) *SoraAccountSelect {
|
||||||
|
_q.ctx.Fields = append(_q.ctx.Fields, fields...)
|
||||||
|
sbuild := &SoraAccountSelect{SoraAccountQuery: _q}
|
||||||
|
sbuild.label = soraaccount.Label
|
||||||
|
sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan
|
||||||
|
return sbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate returns a SoraAccountSelect configured with the given aggregations.
|
||||||
|
func (_q *SoraAccountQuery) Aggregate(fns ...AggregateFunc) *SoraAccountSelect {
|
||||||
|
return _q.Select().Aggregate(fns...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraAccountQuery) prepareQuery(ctx context.Context) error {
|
||||||
|
for _, inter := range _q.inters {
|
||||||
|
if inter == nil {
|
||||||
|
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
if trv, ok := inter.(Traverser); ok {
|
||||||
|
if err := trv.Traverse(ctx, _q); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, f := range _q.ctx.Fields {
|
||||||
|
if !soraaccount.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _q.path != nil {
|
||||||
|
prev, err := _q.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_q.sql = prev
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraAccountQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*SoraAccount, error) {
|
||||||
|
var (
|
||||||
|
nodes = []*SoraAccount{}
|
||||||
|
_spec = _q.querySpec()
|
||||||
|
)
|
||||||
|
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||||
|
return (*SoraAccount).scanValues(nil, columns)
|
||||||
|
}
|
||||||
|
_spec.Assign = func(columns []string, values []any) error {
|
||||||
|
node := &SoraAccount{config: _q.config}
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
return node.assignValues(columns, values)
|
||||||
|
}
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
for i := range hooks {
|
||||||
|
hooks[i](ctx, _spec)
|
||||||
|
}
|
||||||
|
if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraAccountQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
|
_spec := _q.querySpec()
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
_spec.Node.Columns = _q.ctx.Fields
|
||||||
|
if len(_q.ctx.Fields) > 0 {
|
||||||
|
_spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique
|
||||||
|
}
|
||||||
|
return sqlgraph.CountNodes(ctx, _q.driver, _spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraAccountQuery) querySpec() *sqlgraph.QuerySpec {
|
||||||
|
_spec := sqlgraph.NewQuerySpec(soraaccount.Table, soraaccount.Columns, sqlgraph.NewFieldSpec(soraaccount.FieldID, field.TypeInt64))
|
||||||
|
_spec.From = _q.sql
|
||||||
|
if unique := _q.ctx.Unique; unique != nil {
|
||||||
|
_spec.Unique = *unique
|
||||||
|
} else if _q.path != nil {
|
||||||
|
_spec.Unique = true
|
||||||
|
}
|
||||||
|
if fields := _q.ctx.Fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, soraaccount.FieldID)
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i] != soraaccount.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := _q.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
_spec.Limit = *limit
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
_spec.Offset = *offset
|
||||||
|
}
|
||||||
|
if ps := _q.order; len(ps) > 0 {
|
||||||
|
_spec.Order = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraAccountQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||||
|
builder := sql.Dialect(_q.driver.Dialect())
|
||||||
|
t1 := builder.Table(soraaccount.Table)
|
||||||
|
columns := _q.ctx.Fields
|
||||||
|
if len(columns) == 0 {
|
||||||
|
columns = soraaccount.Columns
|
||||||
|
}
|
||||||
|
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||||
|
if _q.sql != nil {
|
||||||
|
selector = _q.sql
|
||||||
|
selector.Select(selector.Columns(columns...)...)
|
||||||
|
}
|
||||||
|
if _q.ctx.Unique != nil && *_q.ctx.Unique {
|
||||||
|
selector.Distinct()
|
||||||
|
}
|
||||||
|
for _, m := range _q.modifiers {
|
||||||
|
m(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.predicates {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.order {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
// limit is mandatory for offset clause. We start
|
||||||
|
// with default value, and override it below if needed.
|
||||||
|
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
selector.Limit(*limit)
|
||||||
|
}
|
||||||
|
return selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForUpdate locks the selected rows against concurrent updates, and prevent them from being
|
||||||
|
// updated, deleted or "selected ... for update" by other sessions, until the transaction is
|
||||||
|
// either committed or rolled-back.
|
||||||
|
func (_q *SoraAccountQuery) ForUpdate(opts ...sql.LockOption) *SoraAccountQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForUpdate(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForShare behaves similarly to ForUpdate, except that it acquires a shared mode lock
|
||||||
|
// on any rows that are read. Other sessions can read the rows, but cannot modify them
|
||||||
|
// until your transaction commits.
|
||||||
|
func (_q *SoraAccountQuery) ForShare(opts ...sql.LockOption) *SoraAccountQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForShare(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountGroupBy is the group-by builder for SoraAccount entities.
|
||||||
|
type SoraAccountGroupBy struct {
|
||||||
|
selector
|
||||||
|
build *SoraAccountQuery
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the group-by query.
|
||||||
|
func (_g *SoraAccountGroupBy) Aggregate(fns ...AggregateFunc) *SoraAccountGroupBy {
|
||||||
|
_g.fns = append(_g.fns, fns...)
|
||||||
|
return _g
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_g *SoraAccountGroupBy) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy)
|
||||||
|
if err := _g.build.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraAccountQuery, *SoraAccountGroupBy](ctx, _g.build, _g, _g.build.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_g *SoraAccountGroupBy) sqlScan(ctx context.Context, root *SoraAccountQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx).Select()
|
||||||
|
aggregation := make([]string, 0, len(_g.fns))
|
||||||
|
for _, fn := range _g.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
if len(selector.SelectedColumns()) == 0 {
|
||||||
|
columns := make([]string, 0, len(*_g.flds)+len(_g.fns))
|
||||||
|
for _, f := range *_g.flds {
|
||||||
|
columns = append(columns, selector.C(f))
|
||||||
|
}
|
||||||
|
columns = append(columns, aggregation...)
|
||||||
|
selector.Select(columns...)
|
||||||
|
}
|
||||||
|
selector.GroupBy(selector.Columns(*_g.flds...)...)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _g.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountSelect is the builder for selecting fields of SoraAccount entities.
|
||||||
|
type SoraAccountSelect struct {
|
||||||
|
*SoraAccountQuery
|
||||||
|
selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the selector query.
|
||||||
|
func (_s *SoraAccountSelect) Aggregate(fns ...AggregateFunc) *SoraAccountSelect {
|
||||||
|
_s.fns = append(_s.fns, fns...)
|
||||||
|
return _s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_s *SoraAccountSelect) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect)
|
||||||
|
if err := _s.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraAccountQuery, *SoraAccountSelect](ctx, _s.SoraAccountQuery, _s, _s.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_s *SoraAccountSelect) sqlScan(ctx context.Context, root *SoraAccountQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx)
|
||||||
|
aggregation := make([]string, 0, len(_s.fns))
|
||||||
|
for _, fn := range _s.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
switch n := len(*_s.selector.flds); {
|
||||||
|
case n == 0 && len(aggregation) > 0:
|
||||||
|
selector.Select(aggregation...)
|
||||||
|
case n != 0 && len(aggregation) > 0:
|
||||||
|
selector.AppendSelect(aggregation...)
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _s.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
1402
backend/ent/soraaccount_update.go
Normal file
1402
backend/ent/soraaccount_update.go
Normal file
File diff suppressed because it is too large
Load Diff
197
backend/ent/soracachefile.go
Normal file
197
backend/ent/soracachefile.go
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraCacheFile is the model entity for the SoraCacheFile schema.
|
||||||
|
type SoraCacheFile struct {
|
||||||
|
config `json:"-"`
|
||||||
|
// ID of the ent.
|
||||||
|
ID int64 `json:"id,omitempty"`
|
||||||
|
// TaskID holds the value of the "task_id" field.
|
||||||
|
TaskID *string `json:"task_id,omitempty"`
|
||||||
|
// AccountID holds the value of the "account_id" field.
|
||||||
|
AccountID int64 `json:"account_id,omitempty"`
|
||||||
|
// UserID holds the value of the "user_id" field.
|
||||||
|
UserID int64 `json:"user_id,omitempty"`
|
||||||
|
// MediaType holds the value of the "media_type" field.
|
||||||
|
MediaType string `json:"media_type,omitempty"`
|
||||||
|
// OriginalURL holds the value of the "original_url" field.
|
||||||
|
OriginalURL string `json:"original_url,omitempty"`
|
||||||
|
// CachePath holds the value of the "cache_path" field.
|
||||||
|
CachePath string `json:"cache_path,omitempty"`
|
||||||
|
// CacheURL holds the value of the "cache_url" field.
|
||||||
|
CacheURL string `json:"cache_url,omitempty"`
|
||||||
|
// SizeBytes holds the value of the "size_bytes" field.
|
||||||
|
SizeBytes int64 `json:"size_bytes,omitempty"`
|
||||||
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
|
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
selectValues sql.SelectValues
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
|
func (*SoraCacheFile) scanValues(columns []string) ([]any, error) {
|
||||||
|
values := make([]any, len(columns))
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case soracachefile.FieldID, soracachefile.FieldAccountID, soracachefile.FieldUserID, soracachefile.FieldSizeBytes:
|
||||||
|
values[i] = new(sql.NullInt64)
|
||||||
|
case soracachefile.FieldTaskID, soracachefile.FieldMediaType, soracachefile.FieldOriginalURL, soracachefile.FieldCachePath, soracachefile.FieldCacheURL:
|
||||||
|
values[i] = new(sql.NullString)
|
||||||
|
case soracachefile.FieldCreatedAt:
|
||||||
|
values[i] = new(sql.NullTime)
|
||||||
|
default:
|
||||||
|
values[i] = new(sql.UnknownType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||||
|
// to the SoraCacheFile fields.
|
||||||
|
func (_m *SoraCacheFile) assignValues(columns []string, values []any) error {
|
||||||
|
if m, n := len(values), len(columns); m < n {
|
||||||
|
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||||
|
}
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case soracachefile.FieldID:
|
||||||
|
value, ok := values[i].(*sql.NullInt64)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field id", value)
|
||||||
|
}
|
||||||
|
_m.ID = int64(value.Int64)
|
||||||
|
case soracachefile.FieldTaskID:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field task_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.TaskID = new(string)
|
||||||
|
*_m.TaskID = value.String
|
||||||
|
}
|
||||||
|
case soracachefile.FieldAccountID:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field account_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.AccountID = value.Int64
|
||||||
|
}
|
||||||
|
case soracachefile.FieldUserID:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field user_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.UserID = value.Int64
|
||||||
|
}
|
||||||
|
case soracachefile.FieldMediaType:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field media_type", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.MediaType = value.String
|
||||||
|
}
|
||||||
|
case soracachefile.FieldOriginalURL:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field original_url", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.OriginalURL = value.String
|
||||||
|
}
|
||||||
|
case soracachefile.FieldCachePath:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field cache_path", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CachePath = value.String
|
||||||
|
}
|
||||||
|
case soracachefile.FieldCacheURL:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field cache_url", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CacheURL = value.String
|
||||||
|
}
|
||||||
|
case soracachefile.FieldSizeBytes:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field size_bytes", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.SizeBytes = value.Int64
|
||||||
|
}
|
||||||
|
case soracachefile.FieldCreatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CreatedAt = value.Time
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
_m.selectValues.Set(columns[i], values[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value returns the ent.Value that was dynamically selected and assigned to the SoraCacheFile.
|
||||||
|
// This includes values selected through modifiers, order, etc.
|
||||||
|
func (_m *SoraCacheFile) Value(name string) (ent.Value, error) {
|
||||||
|
return _m.selectValues.Get(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns a builder for updating this SoraCacheFile.
|
||||||
|
// Note that you need to call SoraCacheFile.Unwrap() before calling this method if this SoraCacheFile
|
||||||
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
func (_m *SoraCacheFile) Update() *SoraCacheFileUpdateOne {
|
||||||
|
return NewSoraCacheFileClient(_m.config).UpdateOne(_m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap unwraps the SoraCacheFile entity that was returned from a transaction after it was closed,
|
||||||
|
// so that all future queries will be executed through the driver which created the transaction.
|
||||||
|
func (_m *SoraCacheFile) Unwrap() *SoraCacheFile {
|
||||||
|
_tx, ok := _m.config.driver.(*txDriver)
|
||||||
|
if !ok {
|
||||||
|
panic("ent: SoraCacheFile is not a transactional entity")
|
||||||
|
}
|
||||||
|
_m.config.driver = _tx.drv
|
||||||
|
return _m
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements the fmt.Stringer.
|
||||||
|
func (_m *SoraCacheFile) String() string {
|
||||||
|
var builder strings.Builder
|
||||||
|
builder.WriteString("SoraCacheFile(")
|
||||||
|
builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID))
|
||||||
|
if v := _m.TaskID; v != nil {
|
||||||
|
builder.WriteString("task_id=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("account_id=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.AccountID))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("user_id=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.UserID))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("media_type=")
|
||||||
|
builder.WriteString(_m.MediaType)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("original_url=")
|
||||||
|
builder.WriteString(_m.OriginalURL)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("cache_path=")
|
||||||
|
builder.WriteString(_m.CachePath)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("cache_url=")
|
||||||
|
builder.WriteString(_m.CacheURL)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("size_bytes=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.SizeBytes))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(_m.CreatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteByte(')')
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFiles is a parsable slice of SoraCacheFile.
|
||||||
|
type SoraCacheFiles []*SoraCacheFile
|
||||||
124
backend/ent/soracachefile/soracachefile.go
Normal file
124
backend/ent/soracachefile/soracachefile.go
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package soracachefile
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Label holds the string label denoting the soracachefile type in the database.
|
||||||
|
Label = "sora_cache_file"
|
||||||
|
// FieldID holds the string denoting the id field in the database.
|
||||||
|
FieldID = "id"
|
||||||
|
// FieldTaskID holds the string denoting the task_id field in the database.
|
||||||
|
FieldTaskID = "task_id"
|
||||||
|
// FieldAccountID holds the string denoting the account_id field in the database.
|
||||||
|
FieldAccountID = "account_id"
|
||||||
|
// FieldUserID holds the string denoting the user_id field in the database.
|
||||||
|
FieldUserID = "user_id"
|
||||||
|
// FieldMediaType holds the string denoting the media_type field in the database.
|
||||||
|
FieldMediaType = "media_type"
|
||||||
|
// FieldOriginalURL holds the string denoting the original_url field in the database.
|
||||||
|
FieldOriginalURL = "original_url"
|
||||||
|
// FieldCachePath holds the string denoting the cache_path field in the database.
|
||||||
|
FieldCachePath = "cache_path"
|
||||||
|
// FieldCacheURL holds the string denoting the cache_url field in the database.
|
||||||
|
FieldCacheURL = "cache_url"
|
||||||
|
// FieldSizeBytes holds the string denoting the size_bytes field in the database.
|
||||||
|
FieldSizeBytes = "size_bytes"
|
||||||
|
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||||
|
FieldCreatedAt = "created_at"
|
||||||
|
// Table holds the table name of the soracachefile in the database.
|
||||||
|
Table = "sora_cache_files"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Columns holds all SQL columns for soracachefile fields.
|
||||||
|
var Columns = []string{
|
||||||
|
FieldID,
|
||||||
|
FieldTaskID,
|
||||||
|
FieldAccountID,
|
||||||
|
FieldUserID,
|
||||||
|
FieldMediaType,
|
||||||
|
FieldOriginalURL,
|
||||||
|
FieldCachePath,
|
||||||
|
FieldCacheURL,
|
||||||
|
FieldSizeBytes,
|
||||||
|
FieldCreatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
func ValidColumn(column string) bool {
|
||||||
|
for i := range Columns {
|
||||||
|
if column == Columns[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// TaskIDValidator is a validator for the "task_id" field. It is called by the builders before save.
|
||||||
|
TaskIDValidator func(string) error
|
||||||
|
// MediaTypeValidator is a validator for the "media_type" field. It is called by the builders before save.
|
||||||
|
MediaTypeValidator func(string) error
|
||||||
|
// DefaultSizeBytes holds the default value on creation for the "size_bytes" field.
|
||||||
|
DefaultSizeBytes int64
|
||||||
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
|
DefaultCreatedAt func() time.Time
|
||||||
|
)
|
||||||
|
|
||||||
|
// OrderOption defines the ordering options for the SoraCacheFile queries.
|
||||||
|
type OrderOption func(*sql.Selector)
|
||||||
|
|
||||||
|
// ByID orders the results by the id field.
|
||||||
|
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByTaskID orders the results by the task_id field.
|
||||||
|
func ByTaskID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldTaskID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByAccountID orders the results by the account_id field.
|
||||||
|
func ByAccountID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldAccountID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByUserID orders the results by the user_id field.
|
||||||
|
func ByUserID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldUserID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByMediaType orders the results by the media_type field.
|
||||||
|
func ByMediaType(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldMediaType, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByOriginalURL orders the results by the original_url field.
|
||||||
|
func ByOriginalURL(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldOriginalURL, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCachePath orders the results by the cache_path field.
|
||||||
|
func ByCachePath(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCachePath, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCacheURL orders the results by the cache_url field.
|
||||||
|
func ByCacheURL(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCacheURL, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// BySizeBytes orders the results by the size_bytes field.
|
||||||
|
func BySizeBytes(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldSizeBytes, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCreatedAt orders the results by the created_at field.
|
||||||
|
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
610
backend/ent/soracachefile/where.go
Normal file
610
backend/ent/soracachefile/where.go
Normal file
@@ -0,0 +1,610 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package soracachefile
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ID filters vertices based on their ID field.
|
||||||
|
func ID(id int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDEQ applies the EQ predicate on the ID field.
|
||||||
|
func IDEQ(id int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNEQ applies the NEQ predicate on the ID field.
|
||||||
|
func IDNEQ(id int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDIn applies the In predicate on the ID field.
|
||||||
|
func IDIn(ids ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldID, ids...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNotIn applies the NotIn predicate on the ID field.
|
||||||
|
func IDNotIn(ids ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldID, ids...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGT applies the GT predicate on the ID field.
|
||||||
|
func IDGT(id int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGTE applies the GTE predicate on the ID field.
|
||||||
|
func IDGTE(id int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLT applies the LT predicate on the ID field.
|
||||||
|
func IDLT(id int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLTE applies the LTE predicate on the ID field.
|
||||||
|
func IDLTE(id int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskID applies equality check predicate on the "task_id" field. It's identical to TaskIDEQ.
|
||||||
|
func TaskID(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountID applies equality check predicate on the "account_id" field. It's identical to AccountIDEQ.
|
||||||
|
func AccountID(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserID applies equality check predicate on the "user_id" field. It's identical to UserIDEQ.
|
||||||
|
func UserID(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldUserID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaType applies equality check predicate on the "media_type" field. It's identical to MediaTypeEQ.
|
||||||
|
func MediaType(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURL applies equality check predicate on the "original_url" field. It's identical to OriginalURLEQ.
|
||||||
|
func OriginalURL(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePath applies equality check predicate on the "cache_path" field. It's identical to CachePathEQ.
|
||||||
|
func CachePath(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURL applies equality check predicate on the "cache_url" field. It's identical to CacheURLEQ.
|
||||||
|
func CacheURL(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytes applies equality check predicate on the "size_bytes" field. It's identical to SizeBytesEQ.
|
||||||
|
func SizeBytes(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldSizeBytes, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
|
||||||
|
func CreatedAt(v time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDEQ applies the EQ predicate on the "task_id" field.
|
||||||
|
func TaskIDEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDNEQ applies the NEQ predicate on the "task_id" field.
|
||||||
|
func TaskIDNEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDIn applies the In predicate on the "task_id" field.
|
||||||
|
func TaskIDIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldTaskID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDNotIn applies the NotIn predicate on the "task_id" field.
|
||||||
|
func TaskIDNotIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldTaskID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDGT applies the GT predicate on the "task_id" field.
|
||||||
|
func TaskIDGT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDGTE applies the GTE predicate on the "task_id" field.
|
||||||
|
func TaskIDGTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDLT applies the LT predicate on the "task_id" field.
|
||||||
|
func TaskIDLT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDLTE applies the LTE predicate on the "task_id" field.
|
||||||
|
func TaskIDLTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDContains applies the Contains predicate on the "task_id" field.
|
||||||
|
func TaskIDContains(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContains(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDHasPrefix applies the HasPrefix predicate on the "task_id" field.
|
||||||
|
func TaskIDHasPrefix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasPrefix(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDHasSuffix applies the HasSuffix predicate on the "task_id" field.
|
||||||
|
func TaskIDHasSuffix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasSuffix(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDIsNil applies the IsNil predicate on the "task_id" field.
|
||||||
|
func TaskIDIsNil() predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIsNull(FieldTaskID))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDNotNil applies the NotNil predicate on the "task_id" field.
|
||||||
|
func TaskIDNotNil() predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotNull(FieldTaskID))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDEqualFold applies the EqualFold predicate on the "task_id" field.
|
||||||
|
func TaskIDEqualFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEqualFold(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDContainsFold applies the ContainsFold predicate on the "task_id" field.
|
||||||
|
func TaskIDContainsFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContainsFold(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDEQ applies the EQ predicate on the "account_id" field.
|
||||||
|
func AccountIDEQ(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDNEQ applies the NEQ predicate on the "account_id" field.
|
||||||
|
func AccountIDNEQ(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDIn applies the In predicate on the "account_id" field.
|
||||||
|
func AccountIDIn(vs ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldAccountID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDNotIn applies the NotIn predicate on the "account_id" field.
|
||||||
|
func AccountIDNotIn(vs ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldAccountID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDGT applies the GT predicate on the "account_id" field.
|
||||||
|
func AccountIDGT(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDGTE applies the GTE predicate on the "account_id" field.
|
||||||
|
func AccountIDGTE(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDLT applies the LT predicate on the "account_id" field.
|
||||||
|
func AccountIDLT(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDLTE applies the LTE predicate on the "account_id" field.
|
||||||
|
func AccountIDLTE(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDEQ applies the EQ predicate on the "user_id" field.
|
||||||
|
func UserIDEQ(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldUserID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDNEQ applies the NEQ predicate on the "user_id" field.
|
||||||
|
func UserIDNEQ(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldUserID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDIn applies the In predicate on the "user_id" field.
|
||||||
|
func UserIDIn(vs ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldUserID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDNotIn applies the NotIn predicate on the "user_id" field.
|
||||||
|
func UserIDNotIn(vs ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldUserID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDGT applies the GT predicate on the "user_id" field.
|
||||||
|
func UserIDGT(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldUserID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDGTE applies the GTE predicate on the "user_id" field.
|
||||||
|
func UserIDGTE(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldUserID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDLT applies the LT predicate on the "user_id" field.
|
||||||
|
func UserIDLT(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldUserID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserIDLTE applies the LTE predicate on the "user_id" field.
|
||||||
|
func UserIDLTE(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldUserID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeEQ applies the EQ predicate on the "media_type" field.
|
||||||
|
func MediaTypeEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeNEQ applies the NEQ predicate on the "media_type" field.
|
||||||
|
func MediaTypeNEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeIn applies the In predicate on the "media_type" field.
|
||||||
|
func MediaTypeIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldMediaType, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeNotIn applies the NotIn predicate on the "media_type" field.
|
||||||
|
func MediaTypeNotIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldMediaType, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeGT applies the GT predicate on the "media_type" field.
|
||||||
|
func MediaTypeGT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeGTE applies the GTE predicate on the "media_type" field.
|
||||||
|
func MediaTypeGTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeLT applies the LT predicate on the "media_type" field.
|
||||||
|
func MediaTypeLT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeLTE applies the LTE predicate on the "media_type" field.
|
||||||
|
func MediaTypeLTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeContains applies the Contains predicate on the "media_type" field.
|
||||||
|
func MediaTypeContains(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContains(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeHasPrefix applies the HasPrefix predicate on the "media_type" field.
|
||||||
|
func MediaTypeHasPrefix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasPrefix(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeHasSuffix applies the HasSuffix predicate on the "media_type" field.
|
||||||
|
func MediaTypeHasSuffix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasSuffix(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeEqualFold applies the EqualFold predicate on the "media_type" field.
|
||||||
|
func MediaTypeEqualFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEqualFold(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// MediaTypeContainsFold applies the ContainsFold predicate on the "media_type" field.
|
||||||
|
func MediaTypeContainsFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContainsFold(FieldMediaType, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLEQ applies the EQ predicate on the "original_url" field.
|
||||||
|
func OriginalURLEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLNEQ applies the NEQ predicate on the "original_url" field.
|
||||||
|
func OriginalURLNEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLIn applies the In predicate on the "original_url" field.
|
||||||
|
func OriginalURLIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldOriginalURL, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLNotIn applies the NotIn predicate on the "original_url" field.
|
||||||
|
func OriginalURLNotIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldOriginalURL, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLGT applies the GT predicate on the "original_url" field.
|
||||||
|
func OriginalURLGT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLGTE applies the GTE predicate on the "original_url" field.
|
||||||
|
func OriginalURLGTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLLT applies the LT predicate on the "original_url" field.
|
||||||
|
func OriginalURLLT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLLTE applies the LTE predicate on the "original_url" field.
|
||||||
|
func OriginalURLLTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLContains applies the Contains predicate on the "original_url" field.
|
||||||
|
func OriginalURLContains(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContains(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLHasPrefix applies the HasPrefix predicate on the "original_url" field.
|
||||||
|
func OriginalURLHasPrefix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasPrefix(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLHasSuffix applies the HasSuffix predicate on the "original_url" field.
|
||||||
|
func OriginalURLHasSuffix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasSuffix(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLEqualFold applies the EqualFold predicate on the "original_url" field.
|
||||||
|
func OriginalURLEqualFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEqualFold(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// OriginalURLContainsFold applies the ContainsFold predicate on the "original_url" field.
|
||||||
|
func OriginalURLContainsFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContainsFold(FieldOriginalURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathEQ applies the EQ predicate on the "cache_path" field.
|
||||||
|
func CachePathEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathNEQ applies the NEQ predicate on the "cache_path" field.
|
||||||
|
func CachePathNEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathIn applies the In predicate on the "cache_path" field.
|
||||||
|
func CachePathIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldCachePath, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathNotIn applies the NotIn predicate on the "cache_path" field.
|
||||||
|
func CachePathNotIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldCachePath, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathGT applies the GT predicate on the "cache_path" field.
|
||||||
|
func CachePathGT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathGTE applies the GTE predicate on the "cache_path" field.
|
||||||
|
func CachePathGTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathLT applies the LT predicate on the "cache_path" field.
|
||||||
|
func CachePathLT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathLTE applies the LTE predicate on the "cache_path" field.
|
||||||
|
func CachePathLTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathContains applies the Contains predicate on the "cache_path" field.
|
||||||
|
func CachePathContains(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContains(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathHasPrefix applies the HasPrefix predicate on the "cache_path" field.
|
||||||
|
func CachePathHasPrefix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasPrefix(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathHasSuffix applies the HasSuffix predicate on the "cache_path" field.
|
||||||
|
func CachePathHasSuffix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasSuffix(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathEqualFold applies the EqualFold predicate on the "cache_path" field.
|
||||||
|
func CachePathEqualFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEqualFold(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CachePathContainsFold applies the ContainsFold predicate on the "cache_path" field.
|
||||||
|
func CachePathContainsFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContainsFold(FieldCachePath, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLEQ applies the EQ predicate on the "cache_url" field.
|
||||||
|
func CacheURLEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLNEQ applies the NEQ predicate on the "cache_url" field.
|
||||||
|
func CacheURLNEQ(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLIn applies the In predicate on the "cache_url" field.
|
||||||
|
func CacheURLIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldCacheURL, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLNotIn applies the NotIn predicate on the "cache_url" field.
|
||||||
|
func CacheURLNotIn(vs ...string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldCacheURL, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLGT applies the GT predicate on the "cache_url" field.
|
||||||
|
func CacheURLGT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLGTE applies the GTE predicate on the "cache_url" field.
|
||||||
|
func CacheURLGTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLLT applies the LT predicate on the "cache_url" field.
|
||||||
|
func CacheURLLT(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLLTE applies the LTE predicate on the "cache_url" field.
|
||||||
|
func CacheURLLTE(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLContains applies the Contains predicate on the "cache_url" field.
|
||||||
|
func CacheURLContains(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContains(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLHasPrefix applies the HasPrefix predicate on the "cache_url" field.
|
||||||
|
func CacheURLHasPrefix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasPrefix(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLHasSuffix applies the HasSuffix predicate on the "cache_url" field.
|
||||||
|
func CacheURLHasSuffix(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldHasSuffix(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLEqualFold applies the EqualFold predicate on the "cache_url" field.
|
||||||
|
func CacheURLEqualFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEqualFold(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CacheURLContainsFold applies the ContainsFold predicate on the "cache_url" field.
|
||||||
|
func CacheURLContainsFold(v string) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldContainsFold(FieldCacheURL, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesEQ applies the EQ predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesEQ(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldSizeBytes, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesNEQ applies the NEQ predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesNEQ(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldSizeBytes, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesIn applies the In predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesIn(vs ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldSizeBytes, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesNotIn applies the NotIn predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesNotIn(vs ...int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldSizeBytes, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesGT applies the GT predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesGT(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldSizeBytes, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesGTE applies the GTE predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesGTE(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldSizeBytes, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesLT applies the LT predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesLT(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldSizeBytes, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// SizeBytesLTE applies the LTE predicate on the "size_bytes" field.
|
||||||
|
func SizeBytesLTE(v int64) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldSizeBytes, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtEQ(v time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtNEQ(v time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtIn applies the In predicate on the "created_at" field.
|
||||||
|
func CreatedAtIn(vs ...time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldIn(FieldCreatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotIn(vs ...time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldNotIn(FieldCreatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGT applies the GT predicate on the "created_at" field.
|
||||||
|
func CreatedAtGT(v time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGT(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtGTE(v time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldGTE(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLT applies the LT predicate on the "created_at" field.
|
||||||
|
func CreatedAtLT(v time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLT(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtLTE(v time.Time) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// And groups predicates with the AND operator between them.
|
||||||
|
func And(predicates ...predicate.SoraCacheFile) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.AndPredicates(predicates...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Or groups predicates with the OR operator between them.
|
||||||
|
func Or(predicates ...predicate.SoraCacheFile) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.OrPredicates(predicates...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not applies the not operator on the given predicate.
|
||||||
|
func Not(p predicate.SoraCacheFile) predicate.SoraCacheFile {
|
||||||
|
return predicate.SoraCacheFile(sql.NotPredicates(p))
|
||||||
|
}
|
||||||
1004
backend/ent/soracachefile_create.go
Normal file
1004
backend/ent/soracachefile_create.go
Normal file
File diff suppressed because it is too large
Load Diff
88
backend/ent/soracachefile_delete.go
Normal file
88
backend/ent/soracachefile_delete.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraCacheFileDelete is the builder for deleting a SoraCacheFile entity.
|
||||||
|
type SoraCacheFileDelete struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraCacheFileMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraCacheFileDelete builder.
|
||||||
|
func (_d *SoraCacheFileDelete) Where(ps ...predicate.SoraCacheFile) *SoraCacheFileDelete {
|
||||||
|
_d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||||
|
func (_d *SoraCacheFileDelete) Exec(ctx context.Context) (int, error) {
|
||||||
|
return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraCacheFileDelete) ExecX(ctx context.Context) int {
|
||||||
|
n, err := _d.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_d *SoraCacheFileDelete) sqlExec(ctx context.Context) (int, error) {
|
||||||
|
_spec := sqlgraph.NewDeleteSpec(soracachefile.Table, sqlgraph.NewFieldSpec(soracachefile.FieldID, field.TypeInt64))
|
||||||
|
if ps := _d.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec)
|
||||||
|
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
_d.mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFileDeleteOne is the builder for deleting a single SoraCacheFile entity.
|
||||||
|
type SoraCacheFileDeleteOne struct {
|
||||||
|
_d *SoraCacheFileDelete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraCacheFileDelete builder.
|
||||||
|
func (_d *SoraCacheFileDeleteOne) Where(ps ...predicate.SoraCacheFile) *SoraCacheFileDeleteOne {
|
||||||
|
_d._d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query.
|
||||||
|
func (_d *SoraCacheFileDeleteOne) Exec(ctx context.Context) error {
|
||||||
|
n, err := _d._d.Exec(ctx)
|
||||||
|
switch {
|
||||||
|
case err != nil:
|
||||||
|
return err
|
||||||
|
case n == 0:
|
||||||
|
return &NotFoundError{soracachefile.Label}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraCacheFileDeleteOne) ExecX(ctx context.Context) {
|
||||||
|
if err := _d.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
564
backend/ent/soracachefile_query.go
Normal file
564
backend/ent/soracachefile_query.go
Normal file
@@ -0,0 +1,564 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraCacheFileQuery is the builder for querying SoraCacheFile entities.
|
||||||
|
type SoraCacheFileQuery struct {
|
||||||
|
config
|
||||||
|
ctx *QueryContext
|
||||||
|
order []soracachefile.OrderOption
|
||||||
|
inters []Interceptor
|
||||||
|
predicates []predicate.SoraCacheFile
|
||||||
|
modifiers []func(*sql.Selector)
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where adds a new predicate for the SoraCacheFileQuery builder.
|
||||||
|
func (_q *SoraCacheFileQuery) Where(ps ...predicate.SoraCacheFile) *SoraCacheFileQuery {
|
||||||
|
_q.predicates = append(_q.predicates, ps...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit the number of records to be returned by this query.
|
||||||
|
func (_q *SoraCacheFileQuery) Limit(limit int) *SoraCacheFileQuery {
|
||||||
|
_q.ctx.Limit = &limit
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offset to start from.
|
||||||
|
func (_q *SoraCacheFileQuery) Offset(offset int) *SoraCacheFileQuery {
|
||||||
|
_q.ctx.Offset = &offset
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique configures the query builder to filter duplicate records on query.
|
||||||
|
// By default, unique is set to true, and can be disabled using this method.
|
||||||
|
func (_q *SoraCacheFileQuery) Unique(unique bool) *SoraCacheFileQuery {
|
||||||
|
_q.ctx.Unique = &unique
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order specifies how the records should be ordered.
|
||||||
|
func (_q *SoraCacheFileQuery) Order(o ...soracachefile.OrderOption) *SoraCacheFileQuery {
|
||||||
|
_q.order = append(_q.order, o...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// First returns the first SoraCacheFile entity from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraCacheFile was found.
|
||||||
|
func (_q *SoraCacheFileQuery) First(ctx context.Context) (*SoraCacheFile, error) {
|
||||||
|
nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nil, &NotFoundError{soracachefile.Label}
|
||||||
|
}
|
||||||
|
return nodes[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstX is like First, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) FirstX(ctx context.Context) *SoraCacheFile {
|
||||||
|
node, err := _q.First(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstID returns the first SoraCacheFile ID from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraCacheFile ID was found.
|
||||||
|
func (_q *SoraCacheFileQuery) FirstID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(ids) == 0 {
|
||||||
|
err = &NotFoundError{soracachefile.Label}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return ids[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) FirstIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.FirstID(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only returns a single SoraCacheFile entity found by the query, ensuring it only returns one.
|
||||||
|
// Returns a *NotSingularError when more than one SoraCacheFile entity is found.
|
||||||
|
// Returns a *NotFoundError when no SoraCacheFile entities are found.
|
||||||
|
func (_q *SoraCacheFileQuery) Only(ctx context.Context) (*SoraCacheFile, error) {
|
||||||
|
nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch len(nodes) {
|
||||||
|
case 1:
|
||||||
|
return nodes[0], nil
|
||||||
|
case 0:
|
||||||
|
return nil, &NotFoundError{soracachefile.Label}
|
||||||
|
default:
|
||||||
|
return nil, &NotSingularError{soracachefile.Label}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyX is like Only, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) OnlyX(ctx context.Context) *SoraCacheFile {
|
||||||
|
node, err := _q.Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyID is like Only, but returns the only SoraCacheFile ID in the query.
|
||||||
|
// Returns a *NotSingularError when more than one SoraCacheFile ID is found.
|
||||||
|
// Returns a *NotFoundError when no entities are found.
|
||||||
|
func (_q *SoraCacheFileQuery) OnlyID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch len(ids) {
|
||||||
|
case 1:
|
||||||
|
id = ids[0]
|
||||||
|
case 0:
|
||||||
|
err = &NotFoundError{soracachefile.Label}
|
||||||
|
default:
|
||||||
|
err = &NotSingularError{soracachefile.Label}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) OnlyIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.OnlyID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// All executes the query and returns a list of SoraCacheFiles.
|
||||||
|
func (_q *SoraCacheFileQuery) All(ctx context.Context) ([]*SoraCacheFile, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
qr := querierAll[[]*SoraCacheFile, *SoraCacheFileQuery]()
|
||||||
|
return withInterceptors[[]*SoraCacheFile](ctx, _q, qr, _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllX is like All, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) AllX(ctx context.Context) []*SoraCacheFile {
|
||||||
|
nodes, err := _q.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs executes the query and returns a list of SoraCacheFile IDs.
|
||||||
|
func (_q *SoraCacheFileQuery) IDs(ctx context.Context) (ids []int64, err error) {
|
||||||
|
if _q.ctx.Unique == nil && _q.path != nil {
|
||||||
|
_q.Unique(true)
|
||||||
|
}
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs)
|
||||||
|
if err = _q.Select(soracachefile.FieldID).Scan(ctx, &ids); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDsX is like IDs, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) IDsX(ctx context.Context) []int64 {
|
||||||
|
ids, err := _q.IDs(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return ids
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the count of the given query.
|
||||||
|
func (_q *SoraCacheFileQuery) Count(ctx context.Context) (int, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return withInterceptors[int](ctx, _q, querierCount[*SoraCacheFileQuery](), _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountX is like Count, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) CountX(ctx context.Context) int {
|
||||||
|
count, err := _q.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exist returns true if the query has elements in the graph.
|
||||||
|
func (_q *SoraCacheFileQuery) Exist(ctx context.Context) (bool, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist)
|
||||||
|
switch _, err := _q.FirstID(ctx); {
|
||||||
|
case IsNotFound(err):
|
||||||
|
return false, nil
|
||||||
|
case err != nil:
|
||||||
|
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||||
|
default:
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExistX is like Exist, but panics if an error occurs.
|
||||||
|
func (_q *SoraCacheFileQuery) ExistX(ctx context.Context) bool {
|
||||||
|
exist, err := _q.Exist(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return exist
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone returns a duplicate of the SoraCacheFileQuery builder, including all associated steps. It can be
|
||||||
|
// used to prepare common query builders and use them differently after the clone is made.
|
||||||
|
func (_q *SoraCacheFileQuery) Clone() *SoraCacheFileQuery {
|
||||||
|
if _q == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &SoraCacheFileQuery{
|
||||||
|
config: _q.config,
|
||||||
|
ctx: _q.ctx.Clone(),
|
||||||
|
order: append([]soracachefile.OrderOption{}, _q.order...),
|
||||||
|
inters: append([]Interceptor{}, _q.inters...),
|
||||||
|
predicates: append([]predicate.SoraCacheFile{}, _q.predicates...),
|
||||||
|
// clone intermediate query.
|
||||||
|
sql: _q.sql.Clone(),
|
||||||
|
path: _q.path,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// TaskID string `json:"task_id,omitempty"`
|
||||||
|
// Count int `json:"count,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraCacheFile.Query().
|
||||||
|
// GroupBy(soracachefile.FieldTaskID).
|
||||||
|
// Aggregate(ent.Count()).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraCacheFileQuery) GroupBy(field string, fields ...string) *SoraCacheFileGroupBy {
|
||||||
|
_q.ctx.Fields = append([]string{field}, fields...)
|
||||||
|
grbuild := &SoraCacheFileGroupBy{build: _q}
|
||||||
|
grbuild.flds = &_q.ctx.Fields
|
||||||
|
grbuild.label = soracachefile.Label
|
||||||
|
grbuild.scan = grbuild.Scan
|
||||||
|
return grbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows the selection one or more fields/columns for the given query,
|
||||||
|
// instead of selecting all fields in the entity.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// TaskID string `json:"task_id,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraCacheFile.Query().
|
||||||
|
// Select(soracachefile.FieldTaskID).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraCacheFileQuery) Select(fields ...string) *SoraCacheFileSelect {
|
||||||
|
_q.ctx.Fields = append(_q.ctx.Fields, fields...)
|
||||||
|
sbuild := &SoraCacheFileSelect{SoraCacheFileQuery: _q}
|
||||||
|
sbuild.label = soracachefile.Label
|
||||||
|
sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan
|
||||||
|
return sbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate returns a SoraCacheFileSelect configured with the given aggregations.
|
||||||
|
func (_q *SoraCacheFileQuery) Aggregate(fns ...AggregateFunc) *SoraCacheFileSelect {
|
||||||
|
return _q.Select().Aggregate(fns...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraCacheFileQuery) prepareQuery(ctx context.Context) error {
|
||||||
|
for _, inter := range _q.inters {
|
||||||
|
if inter == nil {
|
||||||
|
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
if trv, ok := inter.(Traverser); ok {
|
||||||
|
if err := trv.Traverse(ctx, _q); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, f := range _q.ctx.Fields {
|
||||||
|
if !soracachefile.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _q.path != nil {
|
||||||
|
prev, err := _q.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_q.sql = prev
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraCacheFileQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*SoraCacheFile, error) {
|
||||||
|
var (
|
||||||
|
nodes = []*SoraCacheFile{}
|
||||||
|
_spec = _q.querySpec()
|
||||||
|
)
|
||||||
|
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||||
|
return (*SoraCacheFile).scanValues(nil, columns)
|
||||||
|
}
|
||||||
|
_spec.Assign = func(columns []string, values []any) error {
|
||||||
|
node := &SoraCacheFile{config: _q.config}
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
return node.assignValues(columns, values)
|
||||||
|
}
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
for i := range hooks {
|
||||||
|
hooks[i](ctx, _spec)
|
||||||
|
}
|
||||||
|
if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraCacheFileQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
|
_spec := _q.querySpec()
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
_spec.Node.Columns = _q.ctx.Fields
|
||||||
|
if len(_q.ctx.Fields) > 0 {
|
||||||
|
_spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique
|
||||||
|
}
|
||||||
|
return sqlgraph.CountNodes(ctx, _q.driver, _spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraCacheFileQuery) querySpec() *sqlgraph.QuerySpec {
|
||||||
|
_spec := sqlgraph.NewQuerySpec(soracachefile.Table, soracachefile.Columns, sqlgraph.NewFieldSpec(soracachefile.FieldID, field.TypeInt64))
|
||||||
|
_spec.From = _q.sql
|
||||||
|
if unique := _q.ctx.Unique; unique != nil {
|
||||||
|
_spec.Unique = *unique
|
||||||
|
} else if _q.path != nil {
|
||||||
|
_spec.Unique = true
|
||||||
|
}
|
||||||
|
if fields := _q.ctx.Fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, soracachefile.FieldID)
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i] != soracachefile.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := _q.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
_spec.Limit = *limit
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
_spec.Offset = *offset
|
||||||
|
}
|
||||||
|
if ps := _q.order; len(ps) > 0 {
|
||||||
|
_spec.Order = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraCacheFileQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||||
|
builder := sql.Dialect(_q.driver.Dialect())
|
||||||
|
t1 := builder.Table(soracachefile.Table)
|
||||||
|
columns := _q.ctx.Fields
|
||||||
|
if len(columns) == 0 {
|
||||||
|
columns = soracachefile.Columns
|
||||||
|
}
|
||||||
|
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||||
|
if _q.sql != nil {
|
||||||
|
selector = _q.sql
|
||||||
|
selector.Select(selector.Columns(columns...)...)
|
||||||
|
}
|
||||||
|
if _q.ctx.Unique != nil && *_q.ctx.Unique {
|
||||||
|
selector.Distinct()
|
||||||
|
}
|
||||||
|
for _, m := range _q.modifiers {
|
||||||
|
m(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.predicates {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.order {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
// limit is mandatory for offset clause. We start
|
||||||
|
// with default value, and override it below if needed.
|
||||||
|
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
selector.Limit(*limit)
|
||||||
|
}
|
||||||
|
return selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForUpdate locks the selected rows against concurrent updates, and prevent them from being
|
||||||
|
// updated, deleted or "selected ... for update" by other sessions, until the transaction is
|
||||||
|
// either committed or rolled-back.
|
||||||
|
func (_q *SoraCacheFileQuery) ForUpdate(opts ...sql.LockOption) *SoraCacheFileQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForUpdate(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForShare behaves similarly to ForUpdate, except that it acquires a shared mode lock
|
||||||
|
// on any rows that are read. Other sessions can read the rows, but cannot modify them
|
||||||
|
// until your transaction commits.
|
||||||
|
func (_q *SoraCacheFileQuery) ForShare(opts ...sql.LockOption) *SoraCacheFileQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForShare(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFileGroupBy is the group-by builder for SoraCacheFile entities.
|
||||||
|
type SoraCacheFileGroupBy struct {
|
||||||
|
selector
|
||||||
|
build *SoraCacheFileQuery
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the group-by query.
|
||||||
|
func (_g *SoraCacheFileGroupBy) Aggregate(fns ...AggregateFunc) *SoraCacheFileGroupBy {
|
||||||
|
_g.fns = append(_g.fns, fns...)
|
||||||
|
return _g
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_g *SoraCacheFileGroupBy) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy)
|
||||||
|
if err := _g.build.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraCacheFileQuery, *SoraCacheFileGroupBy](ctx, _g.build, _g, _g.build.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_g *SoraCacheFileGroupBy) sqlScan(ctx context.Context, root *SoraCacheFileQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx).Select()
|
||||||
|
aggregation := make([]string, 0, len(_g.fns))
|
||||||
|
for _, fn := range _g.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
if len(selector.SelectedColumns()) == 0 {
|
||||||
|
columns := make([]string, 0, len(*_g.flds)+len(_g.fns))
|
||||||
|
for _, f := range *_g.flds {
|
||||||
|
columns = append(columns, selector.C(f))
|
||||||
|
}
|
||||||
|
columns = append(columns, aggregation...)
|
||||||
|
selector.Select(columns...)
|
||||||
|
}
|
||||||
|
selector.GroupBy(selector.Columns(*_g.flds...)...)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _g.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFileSelect is the builder for selecting fields of SoraCacheFile entities.
|
||||||
|
type SoraCacheFileSelect struct {
|
||||||
|
*SoraCacheFileQuery
|
||||||
|
selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the selector query.
|
||||||
|
func (_s *SoraCacheFileSelect) Aggregate(fns ...AggregateFunc) *SoraCacheFileSelect {
|
||||||
|
_s.fns = append(_s.fns, fns...)
|
||||||
|
return _s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_s *SoraCacheFileSelect) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect)
|
||||||
|
if err := _s.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraCacheFileQuery, *SoraCacheFileSelect](ctx, _s.SoraCacheFileQuery, _s, _s.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_s *SoraCacheFileSelect) sqlScan(ctx context.Context, root *SoraCacheFileQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx)
|
||||||
|
aggregation := make([]string, 0, len(_s.fns))
|
||||||
|
for _, fn := range _s.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
switch n := len(*_s.selector.flds); {
|
||||||
|
case n == 0 && len(aggregation) > 0:
|
||||||
|
selector.Select(aggregation...)
|
||||||
|
case n != 0 && len(aggregation) > 0:
|
||||||
|
selector.AppendSelect(aggregation...)
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _s.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
596
backend/ent/soracachefile_update.go
Normal file
596
backend/ent/soracachefile_update.go
Normal file
@@ -0,0 +1,596 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraCacheFileUpdate is the builder for updating SoraCacheFile entities.
|
||||||
|
type SoraCacheFileUpdate struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraCacheFileMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraCacheFileUpdate builder.
|
||||||
|
func (_u *SoraCacheFileUpdate) Where(ps ...predicate.SoraCacheFile) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.Where(ps...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTaskID sets the "task_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetTaskID(v string) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.SetTaskID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTaskID sets the "task_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableTaskID(v *string) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTaskID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearTaskID clears the value of the "task_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) ClearTaskID() *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.ClearTaskID()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAccountID sets the "account_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetAccountID(v int64) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.ResetAccountID()
|
||||||
|
_u.mutation.SetAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableAccountID sets the "account_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableAccountID(v *int64) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetAccountID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAccountID adds value to the "account_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) AddAccountID(v int64) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.AddAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUserID sets the "user_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetUserID(v int64) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.ResetUserID()
|
||||||
|
_u.mutation.SetUserID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUserID sets the "user_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableUserID(v *int64) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetUserID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddUserID adds value to the "user_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) AddUserID(v int64) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.AddUserID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetMediaType sets the "media_type" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetMediaType(v string) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.SetMediaType(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableMediaType sets the "media_type" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableMediaType(v *string) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetMediaType(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetOriginalURL sets the "original_url" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetOriginalURL(v string) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.SetOriginalURL(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableOriginalURL sets the "original_url" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableOriginalURL(v *string) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetOriginalURL(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCachePath sets the "cache_path" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetCachePath(v string) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.SetCachePath(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCachePath sets the "cache_path" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableCachePath(v *string) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCachePath(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCacheURL sets the "cache_url" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetCacheURL(v string) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.SetCacheURL(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCacheURL sets the "cache_url" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableCacheURL(v *string) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCacheURL(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetSizeBytes sets the "size_bytes" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetSizeBytes(v int64) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.ResetSizeBytes()
|
||||||
|
_u.mutation.SetSizeBytes(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableSizeBytes sets the "size_bytes" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableSizeBytes(v *int64) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetSizeBytes(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddSizeBytes adds value to the "size_bytes" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) AddSizeBytes(v int64) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.AddSizeBytes(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetCreatedAt(v time.Time) *SoraCacheFileUpdate {
|
||||||
|
_u.mutation.SetCreatedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdate) SetNillableCreatedAt(v *time.Time) *SoraCacheFileUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCreatedAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the SoraCacheFileMutation object of the builder.
|
||||||
|
func (_u *SoraCacheFileUpdate) Mutation() *SoraCacheFileMutation {
|
||||||
|
return _u.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
|
func (_u *SoraCacheFileUpdate) Save(ctx context.Context) (int, error) {
|
||||||
|
return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (_u *SoraCacheFileUpdate) SaveX(ctx context.Context) int {
|
||||||
|
affected, err := _u.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return affected
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (_u *SoraCacheFileUpdate) Exec(ctx context.Context) error {
|
||||||
|
_, err := _u.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_u *SoraCacheFileUpdate) ExecX(ctx context.Context) {
|
||||||
|
if err := _u.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (_u *SoraCacheFileUpdate) check() error {
|
||||||
|
if v, ok := _u.mutation.TaskID(); ok {
|
||||||
|
if err := soracachefile.TaskIDValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "task_id", err: fmt.Errorf(`ent: validator failed for field "SoraCacheFile.task_id": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := _u.mutation.MediaType(); ok {
|
||||||
|
if err := soracachefile.MediaTypeValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "media_type", err: fmt.Errorf(`ent: validator failed for field "SoraCacheFile.media_type": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_u *SoraCacheFileUpdate) sqlSave(ctx context.Context) (_node int, err error) {
|
||||||
|
if err := _u.check(); err != nil {
|
||||||
|
return _node, err
|
||||||
|
}
|
||||||
|
_spec := sqlgraph.NewUpdateSpec(soracachefile.Table, soracachefile.Columns, sqlgraph.NewFieldSpec(soracachefile.FieldID, field.TypeInt64))
|
||||||
|
if ps := _u.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TaskID(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldTaskID, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.TaskIDCleared() {
|
||||||
|
_spec.ClearField(soracachefile.FieldTaskID, field.TypeString)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AccountID(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedAccountID(); ok {
|
||||||
|
_spec.AddField(soracachefile.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.UserID(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldUserID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedUserID(); ok {
|
||||||
|
_spec.AddField(soracachefile.FieldUserID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.MediaType(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldMediaType, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.OriginalURL(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldOriginalURL, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CachePath(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldCachePath, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CacheURL(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldCacheURL, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.SizeBytes(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldSizeBytes, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedSizeBytes(); ok {
|
||||||
|
_spec.AddField(soracachefile.FieldSizeBytes, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{soracachefile.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
_u.mutation.done = true
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFileUpdateOne is the builder for updating a single SoraCacheFile entity.
|
||||||
|
type SoraCacheFileUpdateOne struct {
|
||||||
|
config
|
||||||
|
fields []string
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraCacheFileMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTaskID sets the "task_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetTaskID(v string) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.SetTaskID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTaskID sets the "task_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableTaskID(v *string) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTaskID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearTaskID clears the value of the "task_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) ClearTaskID() *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.ClearTaskID()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAccountID sets the "account_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetAccountID(v int64) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.ResetAccountID()
|
||||||
|
_u.mutation.SetAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableAccountID sets the "account_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableAccountID(v *int64) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetAccountID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAccountID adds value to the "account_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) AddAccountID(v int64) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.AddAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUserID sets the "user_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetUserID(v int64) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.ResetUserID()
|
||||||
|
_u.mutation.SetUserID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableUserID sets the "user_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableUserID(v *int64) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetUserID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddUserID adds value to the "user_id" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) AddUserID(v int64) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.AddUserID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetMediaType sets the "media_type" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetMediaType(v string) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.SetMediaType(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableMediaType sets the "media_type" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableMediaType(v *string) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetMediaType(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetOriginalURL sets the "original_url" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetOriginalURL(v string) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.SetOriginalURL(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableOriginalURL sets the "original_url" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableOriginalURL(v *string) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetOriginalURL(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCachePath sets the "cache_path" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetCachePath(v string) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.SetCachePath(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCachePath sets the "cache_path" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableCachePath(v *string) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCachePath(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCacheURL sets the "cache_url" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetCacheURL(v string) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.SetCacheURL(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCacheURL sets the "cache_url" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableCacheURL(v *string) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCacheURL(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetSizeBytes sets the "size_bytes" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetSizeBytes(v int64) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.ResetSizeBytes()
|
||||||
|
_u.mutation.SetSizeBytes(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableSizeBytes sets the "size_bytes" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableSizeBytes(v *int64) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetSizeBytes(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddSizeBytes adds value to the "size_bytes" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) AddSizeBytes(v int64) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.AddSizeBytes(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetCreatedAt(v time.Time) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.SetCreatedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SetNillableCreatedAt(v *time.Time) *SoraCacheFileUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCreatedAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the SoraCacheFileMutation object of the builder.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) Mutation() *SoraCacheFileMutation {
|
||||||
|
return _u.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraCacheFileUpdate builder.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) Where(ps ...predicate.SoraCacheFile) *SoraCacheFileUpdateOne {
|
||||||
|
_u.mutation.Where(ps...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
|
// The default is selecting all fields defined in the entity schema.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) Select(field string, fields ...string) *SoraCacheFileUpdateOne {
|
||||||
|
_u.fields = append([]string{field}, fields...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the updated SoraCacheFile entity.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) Save(ctx context.Context) (*SoraCacheFile, error) {
|
||||||
|
return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) SaveX(ctx context.Context) *SoraCacheFile {
|
||||||
|
node, err := _u.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query on the entity.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) Exec(ctx context.Context) error {
|
||||||
|
_, err := _u.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
if err := _u.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (_u *SoraCacheFileUpdateOne) check() error {
|
||||||
|
if v, ok := _u.mutation.TaskID(); ok {
|
||||||
|
if err := soracachefile.TaskIDValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "task_id", err: fmt.Errorf(`ent: validator failed for field "SoraCacheFile.task_id": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := _u.mutation.MediaType(); ok {
|
||||||
|
if err := soracachefile.MediaTypeValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "media_type", err: fmt.Errorf(`ent: validator failed for field "SoraCacheFile.media_type": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_u *SoraCacheFileUpdateOne) sqlSave(ctx context.Context) (_node *SoraCacheFile, err error) {
|
||||||
|
if err := _u.check(); err != nil {
|
||||||
|
return _node, err
|
||||||
|
}
|
||||||
|
_spec := sqlgraph.NewUpdateSpec(soracachefile.Table, soracachefile.Columns, sqlgraph.NewFieldSpec(soracachefile.FieldID, field.TypeInt64))
|
||||||
|
id, ok := _u.mutation.ID()
|
||||||
|
if !ok {
|
||||||
|
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "SoraCacheFile.id" for update`)}
|
||||||
|
}
|
||||||
|
_spec.Node.ID.Value = id
|
||||||
|
if fields := _u.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, soracachefile.FieldID)
|
||||||
|
for _, f := range fields {
|
||||||
|
if !soracachefile.ValidColumn(f) {
|
||||||
|
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
if f != soracachefile.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := _u.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TaskID(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldTaskID, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.TaskIDCleared() {
|
||||||
|
_spec.ClearField(soracachefile.FieldTaskID, field.TypeString)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AccountID(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedAccountID(); ok {
|
||||||
|
_spec.AddField(soracachefile.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.UserID(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldUserID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedUserID(); ok {
|
||||||
|
_spec.AddField(soracachefile.FieldUserID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.MediaType(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldMediaType, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.OriginalURL(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldOriginalURL, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CachePath(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldCachePath, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CacheURL(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldCacheURL, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.SizeBytes(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldSizeBytes, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedSizeBytes(); ok {
|
||||||
|
_spec.AddField(soracachefile.FieldSizeBytes, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(soracachefile.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
_node = &SoraCacheFile{config: _u.config}
|
||||||
|
_spec.Assign = _node.assignValues
|
||||||
|
_spec.ScanValues = _node.scanValues
|
||||||
|
if err = sqlgraph.UpdateNode(ctx, _u.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{soracachefile.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_u.mutation.done = true
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
227
backend/ent/soratask.go
Normal file
227
backend/ent/soratask.go
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraTask is the model entity for the SoraTask schema.
|
||||||
|
type SoraTask struct {
|
||||||
|
config `json:"-"`
|
||||||
|
// ID of the ent.
|
||||||
|
ID int64 `json:"id,omitempty"`
|
||||||
|
// TaskID holds the value of the "task_id" field.
|
||||||
|
TaskID string `json:"task_id,omitempty"`
|
||||||
|
// AccountID holds the value of the "account_id" field.
|
||||||
|
AccountID int64 `json:"account_id,omitempty"`
|
||||||
|
// Model holds the value of the "model" field.
|
||||||
|
Model string `json:"model,omitempty"`
|
||||||
|
// Prompt holds the value of the "prompt" field.
|
||||||
|
Prompt string `json:"prompt,omitempty"`
|
||||||
|
// Status holds the value of the "status" field.
|
||||||
|
Status string `json:"status,omitempty"`
|
||||||
|
// Progress holds the value of the "progress" field.
|
||||||
|
Progress float64 `json:"progress,omitempty"`
|
||||||
|
// ResultUrls holds the value of the "result_urls" field.
|
||||||
|
ResultUrls *string `json:"result_urls,omitempty"`
|
||||||
|
// ErrorMessage holds the value of the "error_message" field.
|
||||||
|
ErrorMessage *string `json:"error_message,omitempty"`
|
||||||
|
// RetryCount holds the value of the "retry_count" field.
|
||||||
|
RetryCount int `json:"retry_count,omitempty"`
|
||||||
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
|
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// CompletedAt holds the value of the "completed_at" field.
|
||||||
|
CompletedAt *time.Time `json:"completed_at,omitempty"`
|
||||||
|
selectValues sql.SelectValues
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
|
func (*SoraTask) scanValues(columns []string) ([]any, error) {
|
||||||
|
values := make([]any, len(columns))
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case soratask.FieldProgress:
|
||||||
|
values[i] = new(sql.NullFloat64)
|
||||||
|
case soratask.FieldID, soratask.FieldAccountID, soratask.FieldRetryCount:
|
||||||
|
values[i] = new(sql.NullInt64)
|
||||||
|
case soratask.FieldTaskID, soratask.FieldModel, soratask.FieldPrompt, soratask.FieldStatus, soratask.FieldResultUrls, soratask.FieldErrorMessage:
|
||||||
|
values[i] = new(sql.NullString)
|
||||||
|
case soratask.FieldCreatedAt, soratask.FieldCompletedAt:
|
||||||
|
values[i] = new(sql.NullTime)
|
||||||
|
default:
|
||||||
|
values[i] = new(sql.UnknownType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||||
|
// to the SoraTask fields.
|
||||||
|
func (_m *SoraTask) assignValues(columns []string, values []any) error {
|
||||||
|
if m, n := len(values), len(columns); m < n {
|
||||||
|
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||||
|
}
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case soratask.FieldID:
|
||||||
|
value, ok := values[i].(*sql.NullInt64)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field id", value)
|
||||||
|
}
|
||||||
|
_m.ID = int64(value.Int64)
|
||||||
|
case soratask.FieldTaskID:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field task_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.TaskID = value.String
|
||||||
|
}
|
||||||
|
case soratask.FieldAccountID:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field account_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.AccountID = value.Int64
|
||||||
|
}
|
||||||
|
case soratask.FieldModel:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field model", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.Model = value.String
|
||||||
|
}
|
||||||
|
case soratask.FieldPrompt:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field prompt", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.Prompt = value.String
|
||||||
|
}
|
||||||
|
case soratask.FieldStatus:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field status", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.Status = value.String
|
||||||
|
}
|
||||||
|
case soratask.FieldProgress:
|
||||||
|
if value, ok := values[i].(*sql.NullFloat64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field progress", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.Progress = value.Float64
|
||||||
|
}
|
||||||
|
case soratask.FieldResultUrls:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field result_urls", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ResultUrls = new(string)
|
||||||
|
*_m.ResultUrls = value.String
|
||||||
|
}
|
||||||
|
case soratask.FieldErrorMessage:
|
||||||
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field error_message", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ErrorMessage = new(string)
|
||||||
|
*_m.ErrorMessage = value.String
|
||||||
|
}
|
||||||
|
case soratask.FieldRetryCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field retry_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.RetryCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case soratask.FieldCreatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CreatedAt = value.Time
|
||||||
|
}
|
||||||
|
case soratask.FieldCompletedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field completed_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CompletedAt = new(time.Time)
|
||||||
|
*_m.CompletedAt = value.Time
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
_m.selectValues.Set(columns[i], values[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value returns the ent.Value that was dynamically selected and assigned to the SoraTask.
|
||||||
|
// This includes values selected through modifiers, order, etc.
|
||||||
|
func (_m *SoraTask) Value(name string) (ent.Value, error) {
|
||||||
|
return _m.selectValues.Get(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns a builder for updating this SoraTask.
|
||||||
|
// Note that you need to call SoraTask.Unwrap() before calling this method if this SoraTask
|
||||||
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
func (_m *SoraTask) Update() *SoraTaskUpdateOne {
|
||||||
|
return NewSoraTaskClient(_m.config).UpdateOne(_m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap unwraps the SoraTask entity that was returned from a transaction after it was closed,
|
||||||
|
// so that all future queries will be executed through the driver which created the transaction.
|
||||||
|
func (_m *SoraTask) Unwrap() *SoraTask {
|
||||||
|
_tx, ok := _m.config.driver.(*txDriver)
|
||||||
|
if !ok {
|
||||||
|
panic("ent: SoraTask is not a transactional entity")
|
||||||
|
}
|
||||||
|
_m.config.driver = _tx.drv
|
||||||
|
return _m
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements the fmt.Stringer.
|
||||||
|
func (_m *SoraTask) String() string {
|
||||||
|
var builder strings.Builder
|
||||||
|
builder.WriteString("SoraTask(")
|
||||||
|
builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID))
|
||||||
|
builder.WriteString("task_id=")
|
||||||
|
builder.WriteString(_m.TaskID)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("account_id=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.AccountID))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("model=")
|
||||||
|
builder.WriteString(_m.Model)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("prompt=")
|
||||||
|
builder.WriteString(_m.Prompt)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("status=")
|
||||||
|
builder.WriteString(_m.Status)
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("progress=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.Progress))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.ResultUrls; v != nil {
|
||||||
|
builder.WriteString("result_urls=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.ErrorMessage; v != nil {
|
||||||
|
builder.WriteString("error_message=")
|
||||||
|
builder.WriteString(*v)
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("retry_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.RetryCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(_m.CreatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.CompletedAt; v != nil {
|
||||||
|
builder.WriteString("completed_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
|
builder.WriteByte(')')
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTasks is a parsable slice of SoraTask.
|
||||||
|
type SoraTasks []*SoraTask
|
||||||
146
backend/ent/soratask/soratask.go
Normal file
146
backend/ent/soratask/soratask.go
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package soratask
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Label holds the string label denoting the soratask type in the database.
|
||||||
|
Label = "sora_task"
|
||||||
|
// FieldID holds the string denoting the id field in the database.
|
||||||
|
FieldID = "id"
|
||||||
|
// FieldTaskID holds the string denoting the task_id field in the database.
|
||||||
|
FieldTaskID = "task_id"
|
||||||
|
// FieldAccountID holds the string denoting the account_id field in the database.
|
||||||
|
FieldAccountID = "account_id"
|
||||||
|
// FieldModel holds the string denoting the model field in the database.
|
||||||
|
FieldModel = "model"
|
||||||
|
// FieldPrompt holds the string denoting the prompt field in the database.
|
||||||
|
FieldPrompt = "prompt"
|
||||||
|
// FieldStatus holds the string denoting the status field in the database.
|
||||||
|
FieldStatus = "status"
|
||||||
|
// FieldProgress holds the string denoting the progress field in the database.
|
||||||
|
FieldProgress = "progress"
|
||||||
|
// FieldResultUrls holds the string denoting the result_urls field in the database.
|
||||||
|
FieldResultUrls = "result_urls"
|
||||||
|
// FieldErrorMessage holds the string denoting the error_message field in the database.
|
||||||
|
FieldErrorMessage = "error_message"
|
||||||
|
// FieldRetryCount holds the string denoting the retry_count field in the database.
|
||||||
|
FieldRetryCount = "retry_count"
|
||||||
|
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||||
|
FieldCreatedAt = "created_at"
|
||||||
|
// FieldCompletedAt holds the string denoting the completed_at field in the database.
|
||||||
|
FieldCompletedAt = "completed_at"
|
||||||
|
// Table holds the table name of the soratask in the database.
|
||||||
|
Table = "sora_tasks"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Columns holds all SQL columns for soratask fields.
|
||||||
|
var Columns = []string{
|
||||||
|
FieldID,
|
||||||
|
FieldTaskID,
|
||||||
|
FieldAccountID,
|
||||||
|
FieldModel,
|
||||||
|
FieldPrompt,
|
||||||
|
FieldStatus,
|
||||||
|
FieldProgress,
|
||||||
|
FieldResultUrls,
|
||||||
|
FieldErrorMessage,
|
||||||
|
FieldRetryCount,
|
||||||
|
FieldCreatedAt,
|
||||||
|
FieldCompletedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
func ValidColumn(column string) bool {
|
||||||
|
for i := range Columns {
|
||||||
|
if column == Columns[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// TaskIDValidator is a validator for the "task_id" field. It is called by the builders before save.
|
||||||
|
TaskIDValidator func(string) error
|
||||||
|
// ModelValidator is a validator for the "model" field. It is called by the builders before save.
|
||||||
|
ModelValidator func(string) error
|
||||||
|
// DefaultStatus holds the default value on creation for the "status" field.
|
||||||
|
DefaultStatus string
|
||||||
|
// StatusValidator is a validator for the "status" field. It is called by the builders before save.
|
||||||
|
StatusValidator func(string) error
|
||||||
|
// DefaultProgress holds the default value on creation for the "progress" field.
|
||||||
|
DefaultProgress float64
|
||||||
|
// DefaultRetryCount holds the default value on creation for the "retry_count" field.
|
||||||
|
DefaultRetryCount int
|
||||||
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
|
DefaultCreatedAt func() time.Time
|
||||||
|
)
|
||||||
|
|
||||||
|
// OrderOption defines the ordering options for the SoraTask queries.
|
||||||
|
type OrderOption func(*sql.Selector)
|
||||||
|
|
||||||
|
// ByID orders the results by the id field.
|
||||||
|
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByTaskID orders the results by the task_id field.
|
||||||
|
func ByTaskID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldTaskID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByAccountID orders the results by the account_id field.
|
||||||
|
func ByAccountID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldAccountID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByModel orders the results by the model field.
|
||||||
|
func ByModel(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldModel, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByPrompt orders the results by the prompt field.
|
||||||
|
func ByPrompt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldPrompt, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByStatus orders the results by the status field.
|
||||||
|
func ByStatus(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldStatus, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByProgress orders the results by the progress field.
|
||||||
|
func ByProgress(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldProgress, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByResultUrls orders the results by the result_urls field.
|
||||||
|
func ByResultUrls(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldResultUrls, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByErrorMessage orders the results by the error_message field.
|
||||||
|
func ByErrorMessage(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldErrorMessage, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByRetryCount orders the results by the retry_count field.
|
||||||
|
func ByRetryCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldRetryCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCreatedAt orders the results by the created_at field.
|
||||||
|
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCompletedAt orders the results by the completed_at field.
|
||||||
|
func ByCompletedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCompletedAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
745
backend/ent/soratask/where.go
Normal file
745
backend/ent/soratask/where.go
Normal file
@@ -0,0 +1,745 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package soratask
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ID filters vertices based on their ID field.
|
||||||
|
func ID(id int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDEQ applies the EQ predicate on the ID field.
|
||||||
|
func IDEQ(id int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNEQ applies the NEQ predicate on the ID field.
|
||||||
|
func IDNEQ(id int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDIn applies the In predicate on the ID field.
|
||||||
|
func IDIn(ids ...int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldID, ids...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNotIn applies the NotIn predicate on the ID field.
|
||||||
|
func IDNotIn(ids ...int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldID, ids...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGT applies the GT predicate on the ID field.
|
||||||
|
func IDGT(id int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGTE applies the GTE predicate on the ID field.
|
||||||
|
func IDGTE(id int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLT applies the LT predicate on the ID field.
|
||||||
|
func IDLT(id int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLTE applies the LTE predicate on the ID field.
|
||||||
|
func IDLTE(id int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskID applies equality check predicate on the "task_id" field. It's identical to TaskIDEQ.
|
||||||
|
func TaskID(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountID applies equality check predicate on the "account_id" field. It's identical to AccountIDEQ.
|
||||||
|
func AccountID(v int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Model applies equality check predicate on the "model" field. It's identical to ModelEQ.
|
||||||
|
func Model(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prompt applies equality check predicate on the "prompt" field. It's identical to PromptEQ.
|
||||||
|
func Prompt(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Status applies equality check predicate on the "status" field. It's identical to StatusEQ.
|
||||||
|
func Status(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Progress applies equality check predicate on the "progress" field. It's identical to ProgressEQ.
|
||||||
|
func Progress(v float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldProgress, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrls applies equality check predicate on the "result_urls" field. It's identical to ResultUrlsEQ.
|
||||||
|
func ResultUrls(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessage applies equality check predicate on the "error_message" field. It's identical to ErrorMessageEQ.
|
||||||
|
func ErrorMessage(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCount applies equality check predicate on the "retry_count" field. It's identical to RetryCountEQ.
|
||||||
|
func RetryCount(v int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldRetryCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
|
||||||
|
func CreatedAt(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAt applies equality check predicate on the "completed_at" field. It's identical to CompletedAtEQ.
|
||||||
|
func CompletedAt(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldCompletedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDEQ applies the EQ predicate on the "task_id" field.
|
||||||
|
func TaskIDEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDNEQ applies the NEQ predicate on the "task_id" field.
|
||||||
|
func TaskIDNEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDIn applies the In predicate on the "task_id" field.
|
||||||
|
func TaskIDIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldTaskID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDNotIn applies the NotIn predicate on the "task_id" field.
|
||||||
|
func TaskIDNotIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldTaskID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDGT applies the GT predicate on the "task_id" field.
|
||||||
|
func TaskIDGT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDGTE applies the GTE predicate on the "task_id" field.
|
||||||
|
func TaskIDGTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDLT applies the LT predicate on the "task_id" field.
|
||||||
|
func TaskIDLT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDLTE applies the LTE predicate on the "task_id" field.
|
||||||
|
func TaskIDLTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDContains applies the Contains predicate on the "task_id" field.
|
||||||
|
func TaskIDContains(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContains(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDHasPrefix applies the HasPrefix predicate on the "task_id" field.
|
||||||
|
func TaskIDHasPrefix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasPrefix(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDHasSuffix applies the HasSuffix predicate on the "task_id" field.
|
||||||
|
func TaskIDHasSuffix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasSuffix(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDEqualFold applies the EqualFold predicate on the "task_id" field.
|
||||||
|
func TaskIDEqualFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEqualFold(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TaskIDContainsFold applies the ContainsFold predicate on the "task_id" field.
|
||||||
|
func TaskIDContainsFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContainsFold(FieldTaskID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDEQ applies the EQ predicate on the "account_id" field.
|
||||||
|
func AccountIDEQ(v int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDNEQ applies the NEQ predicate on the "account_id" field.
|
||||||
|
func AccountIDNEQ(v int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDIn applies the In predicate on the "account_id" field.
|
||||||
|
func AccountIDIn(vs ...int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldAccountID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDNotIn applies the NotIn predicate on the "account_id" field.
|
||||||
|
func AccountIDNotIn(vs ...int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldAccountID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDGT applies the GT predicate on the "account_id" field.
|
||||||
|
func AccountIDGT(v int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDGTE applies the GTE predicate on the "account_id" field.
|
||||||
|
func AccountIDGTE(v int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDLT applies the LT predicate on the "account_id" field.
|
||||||
|
func AccountIDLT(v int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDLTE applies the LTE predicate on the "account_id" field.
|
||||||
|
func AccountIDLTE(v int64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelEQ applies the EQ predicate on the "model" field.
|
||||||
|
func ModelEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelNEQ applies the NEQ predicate on the "model" field.
|
||||||
|
func ModelNEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelIn applies the In predicate on the "model" field.
|
||||||
|
func ModelIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldModel, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelNotIn applies the NotIn predicate on the "model" field.
|
||||||
|
func ModelNotIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldModel, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelGT applies the GT predicate on the "model" field.
|
||||||
|
func ModelGT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelGTE applies the GTE predicate on the "model" field.
|
||||||
|
func ModelGTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelLT applies the LT predicate on the "model" field.
|
||||||
|
func ModelLT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelLTE applies the LTE predicate on the "model" field.
|
||||||
|
func ModelLTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelContains applies the Contains predicate on the "model" field.
|
||||||
|
func ModelContains(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContains(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelHasPrefix applies the HasPrefix predicate on the "model" field.
|
||||||
|
func ModelHasPrefix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasPrefix(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelHasSuffix applies the HasSuffix predicate on the "model" field.
|
||||||
|
func ModelHasSuffix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasSuffix(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelEqualFold applies the EqualFold predicate on the "model" field.
|
||||||
|
func ModelEqualFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEqualFold(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelContainsFold applies the ContainsFold predicate on the "model" field.
|
||||||
|
func ModelContainsFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContainsFold(FieldModel, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptEQ applies the EQ predicate on the "prompt" field.
|
||||||
|
func PromptEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptNEQ applies the NEQ predicate on the "prompt" field.
|
||||||
|
func PromptNEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptIn applies the In predicate on the "prompt" field.
|
||||||
|
func PromptIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldPrompt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptNotIn applies the NotIn predicate on the "prompt" field.
|
||||||
|
func PromptNotIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldPrompt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptGT applies the GT predicate on the "prompt" field.
|
||||||
|
func PromptGT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptGTE applies the GTE predicate on the "prompt" field.
|
||||||
|
func PromptGTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptLT applies the LT predicate on the "prompt" field.
|
||||||
|
func PromptLT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptLTE applies the LTE predicate on the "prompt" field.
|
||||||
|
func PromptLTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptContains applies the Contains predicate on the "prompt" field.
|
||||||
|
func PromptContains(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContains(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptHasPrefix applies the HasPrefix predicate on the "prompt" field.
|
||||||
|
func PromptHasPrefix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasPrefix(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptHasSuffix applies the HasSuffix predicate on the "prompt" field.
|
||||||
|
func PromptHasSuffix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasSuffix(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptEqualFold applies the EqualFold predicate on the "prompt" field.
|
||||||
|
func PromptEqualFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEqualFold(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// PromptContainsFold applies the ContainsFold predicate on the "prompt" field.
|
||||||
|
func PromptContainsFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContainsFold(FieldPrompt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusEQ applies the EQ predicate on the "status" field.
|
||||||
|
func StatusEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusNEQ applies the NEQ predicate on the "status" field.
|
||||||
|
func StatusNEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusIn applies the In predicate on the "status" field.
|
||||||
|
func StatusIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldStatus, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusNotIn applies the NotIn predicate on the "status" field.
|
||||||
|
func StatusNotIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldStatus, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusGT applies the GT predicate on the "status" field.
|
||||||
|
func StatusGT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusGTE applies the GTE predicate on the "status" field.
|
||||||
|
func StatusGTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusLT applies the LT predicate on the "status" field.
|
||||||
|
func StatusLT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusLTE applies the LTE predicate on the "status" field.
|
||||||
|
func StatusLTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusContains applies the Contains predicate on the "status" field.
|
||||||
|
func StatusContains(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContains(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusHasPrefix applies the HasPrefix predicate on the "status" field.
|
||||||
|
func StatusHasPrefix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasPrefix(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusHasSuffix applies the HasSuffix predicate on the "status" field.
|
||||||
|
func StatusHasSuffix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasSuffix(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusEqualFold applies the EqualFold predicate on the "status" field.
|
||||||
|
func StatusEqualFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEqualFold(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// StatusContainsFold applies the ContainsFold predicate on the "status" field.
|
||||||
|
func StatusContainsFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContainsFold(FieldStatus, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressEQ applies the EQ predicate on the "progress" field.
|
||||||
|
func ProgressEQ(v float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldProgress, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressNEQ applies the NEQ predicate on the "progress" field.
|
||||||
|
func ProgressNEQ(v float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldProgress, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressIn applies the In predicate on the "progress" field.
|
||||||
|
func ProgressIn(vs ...float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldProgress, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressNotIn applies the NotIn predicate on the "progress" field.
|
||||||
|
func ProgressNotIn(vs ...float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldProgress, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressGT applies the GT predicate on the "progress" field.
|
||||||
|
func ProgressGT(v float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldProgress, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressGTE applies the GTE predicate on the "progress" field.
|
||||||
|
func ProgressGTE(v float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldProgress, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressLT applies the LT predicate on the "progress" field.
|
||||||
|
func ProgressLT(v float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldProgress, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProgressLTE applies the LTE predicate on the "progress" field.
|
||||||
|
func ProgressLTE(v float64) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldProgress, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsEQ applies the EQ predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsNEQ applies the NEQ predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsNEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsIn applies the In predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldResultUrls, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsNotIn applies the NotIn predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsNotIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldResultUrls, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsGT applies the GT predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsGT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsGTE applies the GTE predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsGTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsLT applies the LT predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsLT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsLTE applies the LTE predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsLTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsContains applies the Contains predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsContains(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContains(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsHasPrefix applies the HasPrefix predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsHasPrefix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasPrefix(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsHasSuffix applies the HasSuffix predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsHasSuffix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasSuffix(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsIsNil applies the IsNil predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsIsNil() predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIsNull(FieldResultUrls))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsNotNil applies the NotNil predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsNotNil() predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotNull(FieldResultUrls))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsEqualFold applies the EqualFold predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsEqualFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEqualFold(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResultUrlsContainsFold applies the ContainsFold predicate on the "result_urls" field.
|
||||||
|
func ResultUrlsContainsFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContainsFold(FieldResultUrls, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageEQ applies the EQ predicate on the "error_message" field.
|
||||||
|
func ErrorMessageEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageNEQ applies the NEQ predicate on the "error_message" field.
|
||||||
|
func ErrorMessageNEQ(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageIn applies the In predicate on the "error_message" field.
|
||||||
|
func ErrorMessageIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldErrorMessage, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageNotIn applies the NotIn predicate on the "error_message" field.
|
||||||
|
func ErrorMessageNotIn(vs ...string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldErrorMessage, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageGT applies the GT predicate on the "error_message" field.
|
||||||
|
func ErrorMessageGT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageGTE applies the GTE predicate on the "error_message" field.
|
||||||
|
func ErrorMessageGTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageLT applies the LT predicate on the "error_message" field.
|
||||||
|
func ErrorMessageLT(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageLTE applies the LTE predicate on the "error_message" field.
|
||||||
|
func ErrorMessageLTE(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageContains applies the Contains predicate on the "error_message" field.
|
||||||
|
func ErrorMessageContains(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContains(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageHasPrefix applies the HasPrefix predicate on the "error_message" field.
|
||||||
|
func ErrorMessageHasPrefix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasPrefix(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageHasSuffix applies the HasSuffix predicate on the "error_message" field.
|
||||||
|
func ErrorMessageHasSuffix(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldHasSuffix(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageIsNil applies the IsNil predicate on the "error_message" field.
|
||||||
|
func ErrorMessageIsNil() predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIsNull(FieldErrorMessage))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageNotNil applies the NotNil predicate on the "error_message" field.
|
||||||
|
func ErrorMessageNotNil() predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotNull(FieldErrorMessage))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageEqualFold applies the EqualFold predicate on the "error_message" field.
|
||||||
|
func ErrorMessageEqualFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEqualFold(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorMessageContainsFold applies the ContainsFold predicate on the "error_message" field.
|
||||||
|
func ErrorMessageContainsFold(v string) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldContainsFold(FieldErrorMessage, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountEQ applies the EQ predicate on the "retry_count" field.
|
||||||
|
func RetryCountEQ(v int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldRetryCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountNEQ applies the NEQ predicate on the "retry_count" field.
|
||||||
|
func RetryCountNEQ(v int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldRetryCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountIn applies the In predicate on the "retry_count" field.
|
||||||
|
func RetryCountIn(vs ...int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldRetryCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountNotIn applies the NotIn predicate on the "retry_count" field.
|
||||||
|
func RetryCountNotIn(vs ...int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldRetryCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountGT applies the GT predicate on the "retry_count" field.
|
||||||
|
func RetryCountGT(v int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldRetryCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountGTE applies the GTE predicate on the "retry_count" field.
|
||||||
|
func RetryCountGTE(v int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldRetryCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountLT applies the LT predicate on the "retry_count" field.
|
||||||
|
func RetryCountLT(v int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldRetryCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// RetryCountLTE applies the LTE predicate on the "retry_count" field.
|
||||||
|
func RetryCountLTE(v int) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldRetryCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtEQ(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtNEQ(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtIn applies the In predicate on the "created_at" field.
|
||||||
|
func CreatedAtIn(vs ...time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldCreatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotIn(vs ...time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldCreatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGT applies the GT predicate on the "created_at" field.
|
||||||
|
func CreatedAtGT(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtGTE(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLT applies the LT predicate on the "created_at" field.
|
||||||
|
func CreatedAtLT(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtLTE(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtEQ applies the EQ predicate on the "completed_at" field.
|
||||||
|
func CompletedAtEQ(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldEQ(FieldCompletedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtNEQ applies the NEQ predicate on the "completed_at" field.
|
||||||
|
func CompletedAtNEQ(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNEQ(FieldCompletedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtIn applies the In predicate on the "completed_at" field.
|
||||||
|
func CompletedAtIn(vs ...time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIn(FieldCompletedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtNotIn applies the NotIn predicate on the "completed_at" field.
|
||||||
|
func CompletedAtNotIn(vs ...time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotIn(FieldCompletedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtGT applies the GT predicate on the "completed_at" field.
|
||||||
|
func CompletedAtGT(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGT(FieldCompletedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtGTE applies the GTE predicate on the "completed_at" field.
|
||||||
|
func CompletedAtGTE(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldGTE(FieldCompletedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtLT applies the LT predicate on the "completed_at" field.
|
||||||
|
func CompletedAtLT(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLT(FieldCompletedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtLTE applies the LTE predicate on the "completed_at" field.
|
||||||
|
func CompletedAtLTE(v time.Time) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldLTE(FieldCompletedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtIsNil applies the IsNil predicate on the "completed_at" field.
|
||||||
|
func CompletedAtIsNil() predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldIsNull(FieldCompletedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CompletedAtNotNil applies the NotNil predicate on the "completed_at" field.
|
||||||
|
func CompletedAtNotNil() predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.FieldNotNull(FieldCompletedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// And groups predicates with the AND operator between them.
|
||||||
|
func And(predicates ...predicate.SoraTask) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.AndPredicates(predicates...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Or groups predicates with the OR operator between them.
|
||||||
|
func Or(predicates ...predicate.SoraTask) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.OrPredicates(predicates...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not applies the not operator on the given predicate.
|
||||||
|
func Not(p predicate.SoraTask) predicate.SoraTask {
|
||||||
|
return predicate.SoraTask(sql.NotPredicates(p))
|
||||||
|
}
|
||||||
1189
backend/ent/soratask_create.go
Normal file
1189
backend/ent/soratask_create.go
Normal file
File diff suppressed because it is too large
Load Diff
88
backend/ent/soratask_delete.go
Normal file
88
backend/ent/soratask_delete.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraTaskDelete is the builder for deleting a SoraTask entity.
|
||||||
|
type SoraTaskDelete struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraTaskMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraTaskDelete builder.
|
||||||
|
func (_d *SoraTaskDelete) Where(ps ...predicate.SoraTask) *SoraTaskDelete {
|
||||||
|
_d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||||
|
func (_d *SoraTaskDelete) Exec(ctx context.Context) (int, error) {
|
||||||
|
return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraTaskDelete) ExecX(ctx context.Context) int {
|
||||||
|
n, err := _d.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_d *SoraTaskDelete) sqlExec(ctx context.Context) (int, error) {
|
||||||
|
_spec := sqlgraph.NewDeleteSpec(soratask.Table, sqlgraph.NewFieldSpec(soratask.FieldID, field.TypeInt64))
|
||||||
|
if ps := _d.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec)
|
||||||
|
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
_d.mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTaskDeleteOne is the builder for deleting a single SoraTask entity.
|
||||||
|
type SoraTaskDeleteOne struct {
|
||||||
|
_d *SoraTaskDelete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraTaskDelete builder.
|
||||||
|
func (_d *SoraTaskDeleteOne) Where(ps ...predicate.SoraTask) *SoraTaskDeleteOne {
|
||||||
|
_d._d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query.
|
||||||
|
func (_d *SoraTaskDeleteOne) Exec(ctx context.Context) error {
|
||||||
|
n, err := _d._d.Exec(ctx)
|
||||||
|
switch {
|
||||||
|
case err != nil:
|
||||||
|
return err
|
||||||
|
case n == 0:
|
||||||
|
return &NotFoundError{soratask.Label}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraTaskDeleteOne) ExecX(ctx context.Context) {
|
||||||
|
if err := _d.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
564
backend/ent/soratask_query.go
Normal file
564
backend/ent/soratask_query.go
Normal file
@@ -0,0 +1,564 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraTaskQuery is the builder for querying SoraTask entities.
|
||||||
|
type SoraTaskQuery struct {
|
||||||
|
config
|
||||||
|
ctx *QueryContext
|
||||||
|
order []soratask.OrderOption
|
||||||
|
inters []Interceptor
|
||||||
|
predicates []predicate.SoraTask
|
||||||
|
modifiers []func(*sql.Selector)
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where adds a new predicate for the SoraTaskQuery builder.
|
||||||
|
func (_q *SoraTaskQuery) Where(ps ...predicate.SoraTask) *SoraTaskQuery {
|
||||||
|
_q.predicates = append(_q.predicates, ps...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit the number of records to be returned by this query.
|
||||||
|
func (_q *SoraTaskQuery) Limit(limit int) *SoraTaskQuery {
|
||||||
|
_q.ctx.Limit = &limit
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offset to start from.
|
||||||
|
func (_q *SoraTaskQuery) Offset(offset int) *SoraTaskQuery {
|
||||||
|
_q.ctx.Offset = &offset
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique configures the query builder to filter duplicate records on query.
|
||||||
|
// By default, unique is set to true, and can be disabled using this method.
|
||||||
|
func (_q *SoraTaskQuery) Unique(unique bool) *SoraTaskQuery {
|
||||||
|
_q.ctx.Unique = &unique
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order specifies how the records should be ordered.
|
||||||
|
func (_q *SoraTaskQuery) Order(o ...soratask.OrderOption) *SoraTaskQuery {
|
||||||
|
_q.order = append(_q.order, o...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// First returns the first SoraTask entity from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraTask was found.
|
||||||
|
func (_q *SoraTaskQuery) First(ctx context.Context) (*SoraTask, error) {
|
||||||
|
nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nil, &NotFoundError{soratask.Label}
|
||||||
|
}
|
||||||
|
return nodes[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstX is like First, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) FirstX(ctx context.Context) *SoraTask {
|
||||||
|
node, err := _q.First(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstID returns the first SoraTask ID from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraTask ID was found.
|
||||||
|
func (_q *SoraTaskQuery) FirstID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(ids) == 0 {
|
||||||
|
err = &NotFoundError{soratask.Label}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return ids[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) FirstIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.FirstID(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only returns a single SoraTask entity found by the query, ensuring it only returns one.
|
||||||
|
// Returns a *NotSingularError when more than one SoraTask entity is found.
|
||||||
|
// Returns a *NotFoundError when no SoraTask entities are found.
|
||||||
|
func (_q *SoraTaskQuery) Only(ctx context.Context) (*SoraTask, error) {
|
||||||
|
nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch len(nodes) {
|
||||||
|
case 1:
|
||||||
|
return nodes[0], nil
|
||||||
|
case 0:
|
||||||
|
return nil, &NotFoundError{soratask.Label}
|
||||||
|
default:
|
||||||
|
return nil, &NotSingularError{soratask.Label}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyX is like Only, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) OnlyX(ctx context.Context) *SoraTask {
|
||||||
|
node, err := _q.Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyID is like Only, but returns the only SoraTask ID in the query.
|
||||||
|
// Returns a *NotSingularError when more than one SoraTask ID is found.
|
||||||
|
// Returns a *NotFoundError when no entities are found.
|
||||||
|
func (_q *SoraTaskQuery) OnlyID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch len(ids) {
|
||||||
|
case 1:
|
||||||
|
id = ids[0]
|
||||||
|
case 0:
|
||||||
|
err = &NotFoundError{soratask.Label}
|
||||||
|
default:
|
||||||
|
err = &NotSingularError{soratask.Label}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) OnlyIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.OnlyID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// All executes the query and returns a list of SoraTasks.
|
||||||
|
func (_q *SoraTaskQuery) All(ctx context.Context) ([]*SoraTask, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
qr := querierAll[[]*SoraTask, *SoraTaskQuery]()
|
||||||
|
return withInterceptors[[]*SoraTask](ctx, _q, qr, _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllX is like All, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) AllX(ctx context.Context) []*SoraTask {
|
||||||
|
nodes, err := _q.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs executes the query and returns a list of SoraTask IDs.
|
||||||
|
func (_q *SoraTaskQuery) IDs(ctx context.Context) (ids []int64, err error) {
|
||||||
|
if _q.ctx.Unique == nil && _q.path != nil {
|
||||||
|
_q.Unique(true)
|
||||||
|
}
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs)
|
||||||
|
if err = _q.Select(soratask.FieldID).Scan(ctx, &ids); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDsX is like IDs, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) IDsX(ctx context.Context) []int64 {
|
||||||
|
ids, err := _q.IDs(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return ids
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the count of the given query.
|
||||||
|
func (_q *SoraTaskQuery) Count(ctx context.Context) (int, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return withInterceptors[int](ctx, _q, querierCount[*SoraTaskQuery](), _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountX is like Count, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) CountX(ctx context.Context) int {
|
||||||
|
count, err := _q.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exist returns true if the query has elements in the graph.
|
||||||
|
func (_q *SoraTaskQuery) Exist(ctx context.Context) (bool, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist)
|
||||||
|
switch _, err := _q.FirstID(ctx); {
|
||||||
|
case IsNotFound(err):
|
||||||
|
return false, nil
|
||||||
|
case err != nil:
|
||||||
|
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||||
|
default:
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExistX is like Exist, but panics if an error occurs.
|
||||||
|
func (_q *SoraTaskQuery) ExistX(ctx context.Context) bool {
|
||||||
|
exist, err := _q.Exist(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return exist
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone returns a duplicate of the SoraTaskQuery builder, including all associated steps. It can be
|
||||||
|
// used to prepare common query builders and use them differently after the clone is made.
|
||||||
|
func (_q *SoraTaskQuery) Clone() *SoraTaskQuery {
|
||||||
|
if _q == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &SoraTaskQuery{
|
||||||
|
config: _q.config,
|
||||||
|
ctx: _q.ctx.Clone(),
|
||||||
|
order: append([]soratask.OrderOption{}, _q.order...),
|
||||||
|
inters: append([]Interceptor{}, _q.inters...),
|
||||||
|
predicates: append([]predicate.SoraTask{}, _q.predicates...),
|
||||||
|
// clone intermediate query.
|
||||||
|
sql: _q.sql.Clone(),
|
||||||
|
path: _q.path,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// TaskID string `json:"task_id,omitempty"`
|
||||||
|
// Count int `json:"count,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraTask.Query().
|
||||||
|
// GroupBy(soratask.FieldTaskID).
|
||||||
|
// Aggregate(ent.Count()).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraTaskQuery) GroupBy(field string, fields ...string) *SoraTaskGroupBy {
|
||||||
|
_q.ctx.Fields = append([]string{field}, fields...)
|
||||||
|
grbuild := &SoraTaskGroupBy{build: _q}
|
||||||
|
grbuild.flds = &_q.ctx.Fields
|
||||||
|
grbuild.label = soratask.Label
|
||||||
|
grbuild.scan = grbuild.Scan
|
||||||
|
return grbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows the selection one or more fields/columns for the given query,
|
||||||
|
// instead of selecting all fields in the entity.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// TaskID string `json:"task_id,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraTask.Query().
|
||||||
|
// Select(soratask.FieldTaskID).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraTaskQuery) Select(fields ...string) *SoraTaskSelect {
|
||||||
|
_q.ctx.Fields = append(_q.ctx.Fields, fields...)
|
||||||
|
sbuild := &SoraTaskSelect{SoraTaskQuery: _q}
|
||||||
|
sbuild.label = soratask.Label
|
||||||
|
sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan
|
||||||
|
return sbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate returns a SoraTaskSelect configured with the given aggregations.
|
||||||
|
func (_q *SoraTaskQuery) Aggregate(fns ...AggregateFunc) *SoraTaskSelect {
|
||||||
|
return _q.Select().Aggregate(fns...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraTaskQuery) prepareQuery(ctx context.Context) error {
|
||||||
|
for _, inter := range _q.inters {
|
||||||
|
if inter == nil {
|
||||||
|
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
if trv, ok := inter.(Traverser); ok {
|
||||||
|
if err := trv.Traverse(ctx, _q); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, f := range _q.ctx.Fields {
|
||||||
|
if !soratask.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _q.path != nil {
|
||||||
|
prev, err := _q.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_q.sql = prev
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraTaskQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*SoraTask, error) {
|
||||||
|
var (
|
||||||
|
nodes = []*SoraTask{}
|
||||||
|
_spec = _q.querySpec()
|
||||||
|
)
|
||||||
|
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||||
|
return (*SoraTask).scanValues(nil, columns)
|
||||||
|
}
|
||||||
|
_spec.Assign = func(columns []string, values []any) error {
|
||||||
|
node := &SoraTask{config: _q.config}
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
return node.assignValues(columns, values)
|
||||||
|
}
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
for i := range hooks {
|
||||||
|
hooks[i](ctx, _spec)
|
||||||
|
}
|
||||||
|
if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraTaskQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
|
_spec := _q.querySpec()
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
_spec.Node.Columns = _q.ctx.Fields
|
||||||
|
if len(_q.ctx.Fields) > 0 {
|
||||||
|
_spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique
|
||||||
|
}
|
||||||
|
return sqlgraph.CountNodes(ctx, _q.driver, _spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraTaskQuery) querySpec() *sqlgraph.QuerySpec {
|
||||||
|
_spec := sqlgraph.NewQuerySpec(soratask.Table, soratask.Columns, sqlgraph.NewFieldSpec(soratask.FieldID, field.TypeInt64))
|
||||||
|
_spec.From = _q.sql
|
||||||
|
if unique := _q.ctx.Unique; unique != nil {
|
||||||
|
_spec.Unique = *unique
|
||||||
|
} else if _q.path != nil {
|
||||||
|
_spec.Unique = true
|
||||||
|
}
|
||||||
|
if fields := _q.ctx.Fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, soratask.FieldID)
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i] != soratask.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := _q.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
_spec.Limit = *limit
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
_spec.Offset = *offset
|
||||||
|
}
|
||||||
|
if ps := _q.order; len(ps) > 0 {
|
||||||
|
_spec.Order = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraTaskQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||||
|
builder := sql.Dialect(_q.driver.Dialect())
|
||||||
|
t1 := builder.Table(soratask.Table)
|
||||||
|
columns := _q.ctx.Fields
|
||||||
|
if len(columns) == 0 {
|
||||||
|
columns = soratask.Columns
|
||||||
|
}
|
||||||
|
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||||
|
if _q.sql != nil {
|
||||||
|
selector = _q.sql
|
||||||
|
selector.Select(selector.Columns(columns...)...)
|
||||||
|
}
|
||||||
|
if _q.ctx.Unique != nil && *_q.ctx.Unique {
|
||||||
|
selector.Distinct()
|
||||||
|
}
|
||||||
|
for _, m := range _q.modifiers {
|
||||||
|
m(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.predicates {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.order {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
// limit is mandatory for offset clause. We start
|
||||||
|
// with default value, and override it below if needed.
|
||||||
|
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
selector.Limit(*limit)
|
||||||
|
}
|
||||||
|
return selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForUpdate locks the selected rows against concurrent updates, and prevent them from being
|
||||||
|
// updated, deleted or "selected ... for update" by other sessions, until the transaction is
|
||||||
|
// either committed or rolled-back.
|
||||||
|
func (_q *SoraTaskQuery) ForUpdate(opts ...sql.LockOption) *SoraTaskQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForUpdate(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForShare behaves similarly to ForUpdate, except that it acquires a shared mode lock
|
||||||
|
// on any rows that are read. Other sessions can read the rows, but cannot modify them
|
||||||
|
// until your transaction commits.
|
||||||
|
func (_q *SoraTaskQuery) ForShare(opts ...sql.LockOption) *SoraTaskQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForShare(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTaskGroupBy is the group-by builder for SoraTask entities.
|
||||||
|
type SoraTaskGroupBy struct {
|
||||||
|
selector
|
||||||
|
build *SoraTaskQuery
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the group-by query.
|
||||||
|
func (_g *SoraTaskGroupBy) Aggregate(fns ...AggregateFunc) *SoraTaskGroupBy {
|
||||||
|
_g.fns = append(_g.fns, fns...)
|
||||||
|
return _g
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_g *SoraTaskGroupBy) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy)
|
||||||
|
if err := _g.build.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraTaskQuery, *SoraTaskGroupBy](ctx, _g.build, _g, _g.build.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_g *SoraTaskGroupBy) sqlScan(ctx context.Context, root *SoraTaskQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx).Select()
|
||||||
|
aggregation := make([]string, 0, len(_g.fns))
|
||||||
|
for _, fn := range _g.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
if len(selector.SelectedColumns()) == 0 {
|
||||||
|
columns := make([]string, 0, len(*_g.flds)+len(_g.fns))
|
||||||
|
for _, f := range *_g.flds {
|
||||||
|
columns = append(columns, selector.C(f))
|
||||||
|
}
|
||||||
|
columns = append(columns, aggregation...)
|
||||||
|
selector.Select(columns...)
|
||||||
|
}
|
||||||
|
selector.GroupBy(selector.Columns(*_g.flds...)...)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _g.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTaskSelect is the builder for selecting fields of SoraTask entities.
|
||||||
|
type SoraTaskSelect struct {
|
||||||
|
*SoraTaskQuery
|
||||||
|
selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the selector query.
|
||||||
|
func (_s *SoraTaskSelect) Aggregate(fns ...AggregateFunc) *SoraTaskSelect {
|
||||||
|
_s.fns = append(_s.fns, fns...)
|
||||||
|
return _s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_s *SoraTaskSelect) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect)
|
||||||
|
if err := _s.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraTaskQuery, *SoraTaskSelect](ctx, _s.SoraTaskQuery, _s, _s.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_s *SoraTaskSelect) sqlScan(ctx context.Context, root *SoraTaskQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx)
|
||||||
|
aggregation := make([]string, 0, len(_s.fns))
|
||||||
|
for _, fn := range _s.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
switch n := len(*_s.selector.flds); {
|
||||||
|
case n == 0 && len(aggregation) > 0:
|
||||||
|
selector.Select(aggregation...)
|
||||||
|
case n != 0 && len(aggregation) > 0:
|
||||||
|
selector.AppendSelect(aggregation...)
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _s.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
710
backend/ent/soratask_update.go
Normal file
710
backend/ent/soratask_update.go
Normal file
@@ -0,0 +1,710 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraTaskUpdate is the builder for updating SoraTask entities.
|
||||||
|
type SoraTaskUpdate struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraTaskMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraTaskUpdate builder.
|
||||||
|
func (_u *SoraTaskUpdate) Where(ps ...predicate.SoraTask) *SoraTaskUpdate {
|
||||||
|
_u.mutation.Where(ps...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTaskID sets the "task_id" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetTaskID(v string) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetTaskID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTaskID sets the "task_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableTaskID(v *string) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTaskID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAccountID sets the "account_id" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetAccountID(v int64) *SoraTaskUpdate {
|
||||||
|
_u.mutation.ResetAccountID()
|
||||||
|
_u.mutation.SetAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableAccountID sets the "account_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableAccountID(v *int64) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetAccountID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAccountID adds value to the "account_id" field.
|
||||||
|
func (_u *SoraTaskUpdate) AddAccountID(v int64) *SoraTaskUpdate {
|
||||||
|
_u.mutation.AddAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetModel sets the "model" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetModel(v string) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetModel(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableModel sets the "model" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableModel(v *string) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetModel(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPrompt sets the "prompt" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetPrompt(v string) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetPrompt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillablePrompt sets the "prompt" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillablePrompt(v *string) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetPrompt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetStatus sets the "status" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetStatus(v string) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetStatus(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableStatus sets the "status" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableStatus(v *string) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetStatus(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetProgress sets the "progress" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetProgress(v float64) *SoraTaskUpdate {
|
||||||
|
_u.mutation.ResetProgress()
|
||||||
|
_u.mutation.SetProgress(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableProgress sets the "progress" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableProgress(v *float64) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetProgress(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddProgress adds value to the "progress" field.
|
||||||
|
func (_u *SoraTaskUpdate) AddProgress(v float64) *SoraTaskUpdate {
|
||||||
|
_u.mutation.AddProgress(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetResultUrls sets the "result_urls" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetResultUrls(v string) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetResultUrls(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableResultUrls sets the "result_urls" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableResultUrls(v *string) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetResultUrls(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearResultUrls clears the value of the "result_urls" field.
|
||||||
|
func (_u *SoraTaskUpdate) ClearResultUrls() *SoraTaskUpdate {
|
||||||
|
_u.mutation.ClearResultUrls()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetErrorMessage sets the "error_message" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetErrorMessage(v string) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetErrorMessage(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableErrorMessage sets the "error_message" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableErrorMessage(v *string) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetErrorMessage(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearErrorMessage clears the value of the "error_message" field.
|
||||||
|
func (_u *SoraTaskUpdate) ClearErrorMessage() *SoraTaskUpdate {
|
||||||
|
_u.mutation.ClearErrorMessage()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetRetryCount sets the "retry_count" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetRetryCount(v int) *SoraTaskUpdate {
|
||||||
|
_u.mutation.ResetRetryCount()
|
||||||
|
_u.mutation.SetRetryCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableRetryCount sets the "retry_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableRetryCount(v *int) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetRetryCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddRetryCount adds value to the "retry_count" field.
|
||||||
|
func (_u *SoraTaskUpdate) AddRetryCount(v int) *SoraTaskUpdate {
|
||||||
|
_u.mutation.AddRetryCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetCreatedAt(v time.Time) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetCreatedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableCreatedAt(v *time.Time) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCreatedAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCompletedAt sets the "completed_at" field.
|
||||||
|
func (_u *SoraTaskUpdate) SetCompletedAt(v time.Time) *SoraTaskUpdate {
|
||||||
|
_u.mutation.SetCompletedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCompletedAt sets the "completed_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdate) SetNillableCompletedAt(v *time.Time) *SoraTaskUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCompletedAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCompletedAt clears the value of the "completed_at" field.
|
||||||
|
func (_u *SoraTaskUpdate) ClearCompletedAt() *SoraTaskUpdate {
|
||||||
|
_u.mutation.ClearCompletedAt()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the SoraTaskMutation object of the builder.
|
||||||
|
func (_u *SoraTaskUpdate) Mutation() *SoraTaskMutation {
|
||||||
|
return _u.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
|
func (_u *SoraTaskUpdate) Save(ctx context.Context) (int, error) {
|
||||||
|
return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (_u *SoraTaskUpdate) SaveX(ctx context.Context) int {
|
||||||
|
affected, err := _u.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return affected
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (_u *SoraTaskUpdate) Exec(ctx context.Context) error {
|
||||||
|
_, err := _u.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_u *SoraTaskUpdate) ExecX(ctx context.Context) {
|
||||||
|
if err := _u.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (_u *SoraTaskUpdate) check() error {
|
||||||
|
if v, ok := _u.mutation.TaskID(); ok {
|
||||||
|
if err := soratask.TaskIDValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "task_id", err: fmt.Errorf(`ent: validator failed for field "SoraTask.task_id": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := _u.mutation.Model(); ok {
|
||||||
|
if err := soratask.ModelValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "model", err: fmt.Errorf(`ent: validator failed for field "SoraTask.model": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := _u.mutation.Status(); ok {
|
||||||
|
if err := soratask.StatusValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "SoraTask.status": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_u *SoraTaskUpdate) sqlSave(ctx context.Context) (_node int, err error) {
|
||||||
|
if err := _u.check(); err != nil {
|
||||||
|
return _node, err
|
||||||
|
}
|
||||||
|
_spec := sqlgraph.NewUpdateSpec(soratask.Table, soratask.Columns, sqlgraph.NewFieldSpec(soratask.FieldID, field.TypeInt64))
|
||||||
|
if ps := _u.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TaskID(); ok {
|
||||||
|
_spec.SetField(soratask.FieldTaskID, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AccountID(); ok {
|
||||||
|
_spec.SetField(soratask.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedAccountID(); ok {
|
||||||
|
_spec.AddField(soratask.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Model(); ok {
|
||||||
|
_spec.SetField(soratask.FieldModel, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Prompt(); ok {
|
||||||
|
_spec.SetField(soratask.FieldPrompt, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Status(); ok {
|
||||||
|
_spec.SetField(soratask.FieldStatus, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Progress(); ok {
|
||||||
|
_spec.SetField(soratask.FieldProgress, field.TypeFloat64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedProgress(); ok {
|
||||||
|
_spec.AddField(soratask.FieldProgress, field.TypeFloat64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ResultUrls(); ok {
|
||||||
|
_spec.SetField(soratask.FieldResultUrls, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.ResultUrlsCleared() {
|
||||||
|
_spec.ClearField(soratask.FieldResultUrls, field.TypeString)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ErrorMessage(); ok {
|
||||||
|
_spec.SetField(soratask.FieldErrorMessage, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.ErrorMessageCleared() {
|
||||||
|
_spec.ClearField(soratask.FieldErrorMessage, field.TypeString)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.RetryCount(); ok {
|
||||||
|
_spec.SetField(soratask.FieldRetryCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedRetryCount(); ok {
|
||||||
|
_spec.AddField(soratask.FieldRetryCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(soratask.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CompletedAt(); ok {
|
||||||
|
_spec.SetField(soratask.FieldCompletedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.CompletedAtCleared() {
|
||||||
|
_spec.ClearField(soratask.FieldCompletedAt, field.TypeTime)
|
||||||
|
}
|
||||||
|
if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{soratask.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
_u.mutation.done = true
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTaskUpdateOne is the builder for updating a single SoraTask entity.
|
||||||
|
type SoraTaskUpdateOne struct {
|
||||||
|
config
|
||||||
|
fields []string
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraTaskMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTaskID sets the "task_id" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetTaskID(v string) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetTaskID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTaskID sets the "task_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableTaskID(v *string) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTaskID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAccountID sets the "account_id" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetAccountID(v int64) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.ResetAccountID()
|
||||||
|
_u.mutation.SetAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableAccountID sets the "account_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableAccountID(v *int64) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetAccountID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAccountID adds value to the "account_id" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) AddAccountID(v int64) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.AddAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetModel sets the "model" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetModel(v string) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetModel(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableModel sets the "model" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableModel(v *string) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetModel(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPrompt sets the "prompt" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetPrompt(v string) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetPrompt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillablePrompt sets the "prompt" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillablePrompt(v *string) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetPrompt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetStatus sets the "status" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetStatus(v string) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetStatus(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableStatus sets the "status" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableStatus(v *string) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetStatus(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetProgress sets the "progress" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetProgress(v float64) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.ResetProgress()
|
||||||
|
_u.mutation.SetProgress(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableProgress sets the "progress" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableProgress(v *float64) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetProgress(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddProgress adds value to the "progress" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) AddProgress(v float64) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.AddProgress(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetResultUrls sets the "result_urls" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetResultUrls(v string) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetResultUrls(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableResultUrls sets the "result_urls" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableResultUrls(v *string) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetResultUrls(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearResultUrls clears the value of the "result_urls" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) ClearResultUrls() *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.ClearResultUrls()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetErrorMessage sets the "error_message" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetErrorMessage(v string) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetErrorMessage(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableErrorMessage sets the "error_message" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableErrorMessage(v *string) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetErrorMessage(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearErrorMessage clears the value of the "error_message" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) ClearErrorMessage() *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.ClearErrorMessage()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetRetryCount sets the "retry_count" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetRetryCount(v int) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.ResetRetryCount()
|
||||||
|
_u.mutation.SetRetryCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableRetryCount sets the "retry_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableRetryCount(v *int) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetRetryCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddRetryCount adds value to the "retry_count" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) AddRetryCount(v int) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.AddRetryCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetCreatedAt(v time.Time) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetCreatedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableCreatedAt(v *time.Time) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCreatedAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCompletedAt sets the "completed_at" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetCompletedAt(v time.Time) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.SetCompletedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableCompletedAt sets the "completed_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraTaskUpdateOne) SetNillableCompletedAt(v *time.Time) *SoraTaskUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetCompletedAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCompletedAt clears the value of the "completed_at" field.
|
||||||
|
func (_u *SoraTaskUpdateOne) ClearCompletedAt() *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.ClearCompletedAt()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the SoraTaskMutation object of the builder.
|
||||||
|
func (_u *SoraTaskUpdateOne) Mutation() *SoraTaskMutation {
|
||||||
|
return _u.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraTaskUpdate builder.
|
||||||
|
func (_u *SoraTaskUpdateOne) Where(ps ...predicate.SoraTask) *SoraTaskUpdateOne {
|
||||||
|
_u.mutation.Where(ps...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
|
// The default is selecting all fields defined in the entity schema.
|
||||||
|
func (_u *SoraTaskUpdateOne) Select(field string, fields ...string) *SoraTaskUpdateOne {
|
||||||
|
_u.fields = append([]string{field}, fields...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the updated SoraTask entity.
|
||||||
|
func (_u *SoraTaskUpdateOne) Save(ctx context.Context) (*SoraTask, error) {
|
||||||
|
return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (_u *SoraTaskUpdateOne) SaveX(ctx context.Context) *SoraTask {
|
||||||
|
node, err := _u.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query on the entity.
|
||||||
|
func (_u *SoraTaskUpdateOne) Exec(ctx context.Context) error {
|
||||||
|
_, err := _u.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_u *SoraTaskUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
if err := _u.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check runs all checks and user-defined validators on the builder.
|
||||||
|
func (_u *SoraTaskUpdateOne) check() error {
|
||||||
|
if v, ok := _u.mutation.TaskID(); ok {
|
||||||
|
if err := soratask.TaskIDValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "task_id", err: fmt.Errorf(`ent: validator failed for field "SoraTask.task_id": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := _u.mutation.Model(); ok {
|
||||||
|
if err := soratask.ModelValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "model", err: fmt.Errorf(`ent: validator failed for field "SoraTask.model": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if v, ok := _u.mutation.Status(); ok {
|
||||||
|
if err := soratask.StatusValidator(v); err != nil {
|
||||||
|
return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "SoraTask.status": %w`, err)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_u *SoraTaskUpdateOne) sqlSave(ctx context.Context) (_node *SoraTask, err error) {
|
||||||
|
if err := _u.check(); err != nil {
|
||||||
|
return _node, err
|
||||||
|
}
|
||||||
|
_spec := sqlgraph.NewUpdateSpec(soratask.Table, soratask.Columns, sqlgraph.NewFieldSpec(soratask.FieldID, field.TypeInt64))
|
||||||
|
id, ok := _u.mutation.ID()
|
||||||
|
if !ok {
|
||||||
|
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "SoraTask.id" for update`)}
|
||||||
|
}
|
||||||
|
_spec.Node.ID.Value = id
|
||||||
|
if fields := _u.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, soratask.FieldID)
|
||||||
|
for _, f := range fields {
|
||||||
|
if !soratask.ValidColumn(f) {
|
||||||
|
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
if f != soratask.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := _u.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TaskID(); ok {
|
||||||
|
_spec.SetField(soratask.FieldTaskID, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AccountID(); ok {
|
||||||
|
_spec.SetField(soratask.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedAccountID(); ok {
|
||||||
|
_spec.AddField(soratask.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Model(); ok {
|
||||||
|
_spec.SetField(soratask.FieldModel, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Prompt(); ok {
|
||||||
|
_spec.SetField(soratask.FieldPrompt, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Status(); ok {
|
||||||
|
_spec.SetField(soratask.FieldStatus, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.Progress(); ok {
|
||||||
|
_spec.SetField(soratask.FieldProgress, field.TypeFloat64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedProgress(); ok {
|
||||||
|
_spec.AddField(soratask.FieldProgress, field.TypeFloat64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ResultUrls(); ok {
|
||||||
|
_spec.SetField(soratask.FieldResultUrls, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.ResultUrlsCleared() {
|
||||||
|
_spec.ClearField(soratask.FieldResultUrls, field.TypeString)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ErrorMessage(); ok {
|
||||||
|
_spec.SetField(soratask.FieldErrorMessage, field.TypeString, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.ErrorMessageCleared() {
|
||||||
|
_spec.ClearField(soratask.FieldErrorMessage, field.TypeString)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.RetryCount(); ok {
|
||||||
|
_spec.SetField(soratask.FieldRetryCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedRetryCount(); ok {
|
||||||
|
_spec.AddField(soratask.FieldRetryCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(soratask.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.CompletedAt(); ok {
|
||||||
|
_spec.SetField(soratask.FieldCompletedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.CompletedAtCleared() {
|
||||||
|
_spec.ClearField(soratask.FieldCompletedAt, field.TypeTime)
|
||||||
|
}
|
||||||
|
_node = &SoraTask{config: _u.config}
|
||||||
|
_spec.Assign = _node.assignValues
|
||||||
|
_spec.ScanValues = _node.scanValues
|
||||||
|
if err = sqlgraph.UpdateNode(ctx, _u.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{soratask.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_u.mutation.done = true
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
231
backend/ent/sorausagestat.go
Normal file
231
backend/ent/sorausagestat.go
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraUsageStat is the model entity for the SoraUsageStat schema.
|
||||||
|
type SoraUsageStat struct {
|
||||||
|
config `json:"-"`
|
||||||
|
// ID of the ent.
|
||||||
|
ID int64 `json:"id,omitempty"`
|
||||||
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
|
CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
|
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
||||||
|
// 关联 accounts 表的 ID
|
||||||
|
AccountID int64 `json:"account_id,omitempty"`
|
||||||
|
// ImageCount holds the value of the "image_count" field.
|
||||||
|
ImageCount int `json:"image_count,omitempty"`
|
||||||
|
// VideoCount holds the value of the "video_count" field.
|
||||||
|
VideoCount int `json:"video_count,omitempty"`
|
||||||
|
// ErrorCount holds the value of the "error_count" field.
|
||||||
|
ErrorCount int `json:"error_count,omitempty"`
|
||||||
|
// LastErrorAt holds the value of the "last_error_at" field.
|
||||||
|
LastErrorAt *time.Time `json:"last_error_at,omitempty"`
|
||||||
|
// TodayImageCount holds the value of the "today_image_count" field.
|
||||||
|
TodayImageCount int `json:"today_image_count,omitempty"`
|
||||||
|
// TodayVideoCount holds the value of the "today_video_count" field.
|
||||||
|
TodayVideoCount int `json:"today_video_count,omitempty"`
|
||||||
|
// TodayErrorCount holds the value of the "today_error_count" field.
|
||||||
|
TodayErrorCount int `json:"today_error_count,omitempty"`
|
||||||
|
// TodayDate holds the value of the "today_date" field.
|
||||||
|
TodayDate *time.Time `json:"today_date,omitempty"`
|
||||||
|
// ConsecutiveErrorCount holds the value of the "consecutive_error_count" field.
|
||||||
|
ConsecutiveErrorCount int `json:"consecutive_error_count,omitempty"`
|
||||||
|
selectValues sql.SelectValues
|
||||||
|
}
|
||||||
|
|
||||||
|
// scanValues returns the types for scanning values from sql.Rows.
|
||||||
|
func (*SoraUsageStat) scanValues(columns []string) ([]any, error) {
|
||||||
|
values := make([]any, len(columns))
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case sorausagestat.FieldID, sorausagestat.FieldAccountID, sorausagestat.FieldImageCount, sorausagestat.FieldVideoCount, sorausagestat.FieldErrorCount, sorausagestat.FieldTodayImageCount, sorausagestat.FieldTodayVideoCount, sorausagestat.FieldTodayErrorCount, sorausagestat.FieldConsecutiveErrorCount:
|
||||||
|
values[i] = new(sql.NullInt64)
|
||||||
|
case sorausagestat.FieldCreatedAt, sorausagestat.FieldUpdatedAt, sorausagestat.FieldLastErrorAt, sorausagestat.FieldTodayDate:
|
||||||
|
values[i] = new(sql.NullTime)
|
||||||
|
default:
|
||||||
|
values[i] = new(sql.UnknownType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assignValues assigns the values that were returned from sql.Rows (after scanning)
|
||||||
|
// to the SoraUsageStat fields.
|
||||||
|
func (_m *SoraUsageStat) assignValues(columns []string, values []any) error {
|
||||||
|
if m, n := len(values), len(columns); m < n {
|
||||||
|
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
|
||||||
|
}
|
||||||
|
for i := range columns {
|
||||||
|
switch columns[i] {
|
||||||
|
case sorausagestat.FieldID:
|
||||||
|
value, ok := values[i].(*sql.NullInt64)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field id", value)
|
||||||
|
}
|
||||||
|
_m.ID = int64(value.Int64)
|
||||||
|
case sorausagestat.FieldCreatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.CreatedAt = value.Time
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldUpdatedAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.UpdatedAt = value.Time
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldAccountID:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field account_id", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.AccountID = value.Int64
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldImageCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field image_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ImageCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldVideoCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field video_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.VideoCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldErrorCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field error_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ErrorCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldLastErrorAt:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field last_error_at", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.LastErrorAt = new(time.Time)
|
||||||
|
*_m.LastErrorAt = value.Time
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldTodayImageCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field today_image_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.TodayImageCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldTodayVideoCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field today_video_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.TodayVideoCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldTodayErrorCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field today_error_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.TodayErrorCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldTodayDate:
|
||||||
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field today_date", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.TodayDate = new(time.Time)
|
||||||
|
*_m.TodayDate = value.Time
|
||||||
|
}
|
||||||
|
case sorausagestat.FieldConsecutiveErrorCount:
|
||||||
|
if value, ok := values[i].(*sql.NullInt64); !ok {
|
||||||
|
return fmt.Errorf("unexpected type %T for field consecutive_error_count", values[i])
|
||||||
|
} else if value.Valid {
|
||||||
|
_m.ConsecutiveErrorCount = int(value.Int64)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
_m.selectValues.Set(columns[i], values[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value returns the ent.Value that was dynamically selected and assigned to the SoraUsageStat.
|
||||||
|
// This includes values selected through modifiers, order, etc.
|
||||||
|
func (_m *SoraUsageStat) Value(name string) (ent.Value, error) {
|
||||||
|
return _m.selectValues.Get(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update returns a builder for updating this SoraUsageStat.
|
||||||
|
// Note that you need to call SoraUsageStat.Unwrap() before calling this method if this SoraUsageStat
|
||||||
|
// was returned from a transaction, and the transaction was committed or rolled back.
|
||||||
|
func (_m *SoraUsageStat) Update() *SoraUsageStatUpdateOne {
|
||||||
|
return NewSoraUsageStatClient(_m.config).UpdateOne(_m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap unwraps the SoraUsageStat entity that was returned from a transaction after it was closed,
|
||||||
|
// so that all future queries will be executed through the driver which created the transaction.
|
||||||
|
func (_m *SoraUsageStat) Unwrap() *SoraUsageStat {
|
||||||
|
_tx, ok := _m.config.driver.(*txDriver)
|
||||||
|
if !ok {
|
||||||
|
panic("ent: SoraUsageStat is not a transactional entity")
|
||||||
|
}
|
||||||
|
_m.config.driver = _tx.drv
|
||||||
|
return _m
|
||||||
|
}
|
||||||
|
|
||||||
|
// String implements the fmt.Stringer.
|
||||||
|
func (_m *SoraUsageStat) String() string {
|
||||||
|
var builder strings.Builder
|
||||||
|
builder.WriteString("SoraUsageStat(")
|
||||||
|
builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID))
|
||||||
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(_m.CreatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(_m.UpdatedAt.Format(time.ANSIC))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("account_id=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.AccountID))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("image_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.ImageCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("video_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.VideoCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("error_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.ErrorCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.LastErrorAt; v != nil {
|
||||||
|
builder.WriteString("last_error_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("today_image_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.TodayImageCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("today_video_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.TodayVideoCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("today_error_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.TodayErrorCount))
|
||||||
|
builder.WriteString(", ")
|
||||||
|
if v := _m.TodayDate; v != nil {
|
||||||
|
builder.WriteString("today_date=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
|
builder.WriteString(", ")
|
||||||
|
builder.WriteString("consecutive_error_count=")
|
||||||
|
builder.WriteString(fmt.Sprintf("%v", _m.ConsecutiveErrorCount))
|
||||||
|
builder.WriteByte(')')
|
||||||
|
return builder.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStats is a parsable slice of SoraUsageStat.
|
||||||
|
type SoraUsageStats []*SoraUsageStat
|
||||||
160
backend/ent/sorausagestat/sorausagestat.go
Normal file
160
backend/ent/sorausagestat/sorausagestat.go
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package sorausagestat
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Label holds the string label denoting the sorausagestat type in the database.
|
||||||
|
Label = "sora_usage_stat"
|
||||||
|
// FieldID holds the string denoting the id field in the database.
|
||||||
|
FieldID = "id"
|
||||||
|
// FieldCreatedAt holds the string denoting the created_at field in the database.
|
||||||
|
FieldCreatedAt = "created_at"
|
||||||
|
// FieldUpdatedAt holds the string denoting the updated_at field in the database.
|
||||||
|
FieldUpdatedAt = "updated_at"
|
||||||
|
// FieldAccountID holds the string denoting the account_id field in the database.
|
||||||
|
FieldAccountID = "account_id"
|
||||||
|
// FieldImageCount holds the string denoting the image_count field in the database.
|
||||||
|
FieldImageCount = "image_count"
|
||||||
|
// FieldVideoCount holds the string denoting the video_count field in the database.
|
||||||
|
FieldVideoCount = "video_count"
|
||||||
|
// FieldErrorCount holds the string denoting the error_count field in the database.
|
||||||
|
FieldErrorCount = "error_count"
|
||||||
|
// FieldLastErrorAt holds the string denoting the last_error_at field in the database.
|
||||||
|
FieldLastErrorAt = "last_error_at"
|
||||||
|
// FieldTodayImageCount holds the string denoting the today_image_count field in the database.
|
||||||
|
FieldTodayImageCount = "today_image_count"
|
||||||
|
// FieldTodayVideoCount holds the string denoting the today_video_count field in the database.
|
||||||
|
FieldTodayVideoCount = "today_video_count"
|
||||||
|
// FieldTodayErrorCount holds the string denoting the today_error_count field in the database.
|
||||||
|
FieldTodayErrorCount = "today_error_count"
|
||||||
|
// FieldTodayDate holds the string denoting the today_date field in the database.
|
||||||
|
FieldTodayDate = "today_date"
|
||||||
|
// FieldConsecutiveErrorCount holds the string denoting the consecutive_error_count field in the database.
|
||||||
|
FieldConsecutiveErrorCount = "consecutive_error_count"
|
||||||
|
// Table holds the table name of the sorausagestat in the database.
|
||||||
|
Table = "sora_usage_stats"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Columns holds all SQL columns for sorausagestat fields.
|
||||||
|
var Columns = []string{
|
||||||
|
FieldID,
|
||||||
|
FieldCreatedAt,
|
||||||
|
FieldUpdatedAt,
|
||||||
|
FieldAccountID,
|
||||||
|
FieldImageCount,
|
||||||
|
FieldVideoCount,
|
||||||
|
FieldErrorCount,
|
||||||
|
FieldLastErrorAt,
|
||||||
|
FieldTodayImageCount,
|
||||||
|
FieldTodayVideoCount,
|
||||||
|
FieldTodayErrorCount,
|
||||||
|
FieldTodayDate,
|
||||||
|
FieldConsecutiveErrorCount,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
func ValidColumn(column string) bool {
|
||||||
|
for i := range Columns {
|
||||||
|
if column == Columns[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
|
DefaultCreatedAt func() time.Time
|
||||||
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
|
DefaultUpdatedAt func() time.Time
|
||||||
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
UpdateDefaultUpdatedAt func() time.Time
|
||||||
|
// DefaultImageCount holds the default value on creation for the "image_count" field.
|
||||||
|
DefaultImageCount int
|
||||||
|
// DefaultVideoCount holds the default value on creation for the "video_count" field.
|
||||||
|
DefaultVideoCount int
|
||||||
|
// DefaultErrorCount holds the default value on creation for the "error_count" field.
|
||||||
|
DefaultErrorCount int
|
||||||
|
// DefaultTodayImageCount holds the default value on creation for the "today_image_count" field.
|
||||||
|
DefaultTodayImageCount int
|
||||||
|
// DefaultTodayVideoCount holds the default value on creation for the "today_video_count" field.
|
||||||
|
DefaultTodayVideoCount int
|
||||||
|
// DefaultTodayErrorCount holds the default value on creation for the "today_error_count" field.
|
||||||
|
DefaultTodayErrorCount int
|
||||||
|
// DefaultConsecutiveErrorCount holds the default value on creation for the "consecutive_error_count" field.
|
||||||
|
DefaultConsecutiveErrorCount int
|
||||||
|
)
|
||||||
|
|
||||||
|
// OrderOption defines the ordering options for the SoraUsageStat queries.
|
||||||
|
type OrderOption func(*sql.Selector)
|
||||||
|
|
||||||
|
// ByID orders the results by the id field.
|
||||||
|
func ByID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByCreatedAt orders the results by the created_at field.
|
||||||
|
func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByUpdatedAt orders the results by the updated_at field.
|
||||||
|
func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByAccountID orders the results by the account_id field.
|
||||||
|
func ByAccountID(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldAccountID, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByImageCount orders the results by the image_count field.
|
||||||
|
func ByImageCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldImageCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByVideoCount orders the results by the video_count field.
|
||||||
|
func ByVideoCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldVideoCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByErrorCount orders the results by the error_count field.
|
||||||
|
func ByErrorCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldErrorCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByLastErrorAt orders the results by the last_error_at field.
|
||||||
|
func ByLastErrorAt(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldLastErrorAt, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByTodayImageCount orders the results by the today_image_count field.
|
||||||
|
func ByTodayImageCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldTodayImageCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByTodayVideoCount orders the results by the today_video_count field.
|
||||||
|
func ByTodayVideoCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldTodayVideoCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByTodayErrorCount orders the results by the today_error_count field.
|
||||||
|
func ByTodayErrorCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldTodayErrorCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByTodayDate orders the results by the today_date field.
|
||||||
|
func ByTodayDate(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldTodayDate, opts...).ToFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByConsecutiveErrorCount orders the results by the consecutive_error_count field.
|
||||||
|
func ByConsecutiveErrorCount(opts ...sql.OrderTermOption) OrderOption {
|
||||||
|
return sql.OrderByField(FieldConsecutiveErrorCount, opts...).ToFunc()
|
||||||
|
}
|
||||||
630
backend/ent/sorausagestat/where.go
Normal file
630
backend/ent/sorausagestat/where.go
Normal file
@@ -0,0 +1,630 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package sorausagestat
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ID filters vertices based on their ID field.
|
||||||
|
func ID(id int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDEQ applies the EQ predicate on the ID field.
|
||||||
|
func IDEQ(id int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNEQ applies the NEQ predicate on the ID field.
|
||||||
|
func IDNEQ(id int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDIn applies the In predicate on the ID field.
|
||||||
|
func IDIn(ids ...int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldID, ids...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDNotIn applies the NotIn predicate on the ID field.
|
||||||
|
func IDNotIn(ids ...int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldID, ids...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGT applies the GT predicate on the ID field.
|
||||||
|
func IDGT(id int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDGTE applies the GTE predicate on the ID field.
|
||||||
|
func IDGTE(id int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLT applies the LT predicate on the ID field.
|
||||||
|
func IDLT(id int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDLTE applies the LTE predicate on the ID field.
|
||||||
|
func IDLTE(id int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldID, id))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
|
||||||
|
func CreatedAt(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
|
||||||
|
func UpdatedAt(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldUpdatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountID applies equality check predicate on the "account_id" field. It's identical to AccountIDEQ.
|
||||||
|
func AccountID(v int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCount applies equality check predicate on the "image_count" field. It's identical to ImageCountEQ.
|
||||||
|
func ImageCount(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCount applies equality check predicate on the "video_count" field. It's identical to VideoCountEQ.
|
||||||
|
func VideoCount(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCount applies equality check predicate on the "error_count" field. It's identical to ErrorCountEQ.
|
||||||
|
func ErrorCount(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAt applies equality check predicate on the "last_error_at" field. It's identical to LastErrorAtEQ.
|
||||||
|
func LastErrorAt(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldLastErrorAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCount applies equality check predicate on the "today_image_count" field. It's identical to TodayImageCountEQ.
|
||||||
|
func TodayImageCount(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCount applies equality check predicate on the "today_video_count" field. It's identical to TodayVideoCountEQ.
|
||||||
|
func TodayVideoCount(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCount applies equality check predicate on the "today_error_count" field. It's identical to TodayErrorCountEQ.
|
||||||
|
func TodayErrorCount(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDate applies equality check predicate on the "today_date" field. It's identical to TodayDateEQ.
|
||||||
|
func TodayDate(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayDate, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCount applies equality check predicate on the "consecutive_error_count" field. It's identical to ConsecutiveErrorCountEQ.
|
||||||
|
func ConsecutiveErrorCount(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldConsecutiveErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
|
||||||
|
func CreatedAtNEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtIn applies the In predicate on the "created_at" field.
|
||||||
|
func CreatedAtIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldCreatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldCreatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGT applies the GT predicate on the "created_at" field.
|
||||||
|
func CreatedAtGT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtGTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLT applies the LT predicate on the "created_at" field.
|
||||||
|
func CreatedAtLT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
|
||||||
|
func CreatedAtLTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldUpdatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldUpdatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIn applies the In predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldUpdatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldUpdatedAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldUpdatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtGTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldUpdatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldUpdatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtLTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldUpdatedAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDEQ applies the EQ predicate on the "account_id" field.
|
||||||
|
func AccountIDEQ(v int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDNEQ applies the NEQ predicate on the "account_id" field.
|
||||||
|
func AccountIDNEQ(v int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDIn applies the In predicate on the "account_id" field.
|
||||||
|
func AccountIDIn(vs ...int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldAccountID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDNotIn applies the NotIn predicate on the "account_id" field.
|
||||||
|
func AccountIDNotIn(vs ...int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldAccountID, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDGT applies the GT predicate on the "account_id" field.
|
||||||
|
func AccountIDGT(v int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDGTE applies the GTE predicate on the "account_id" field.
|
||||||
|
func AccountIDGTE(v int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDLT applies the LT predicate on the "account_id" field.
|
||||||
|
func AccountIDLT(v int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// AccountIDLTE applies the LTE predicate on the "account_id" field.
|
||||||
|
func AccountIDLTE(v int64) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldAccountID, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountEQ applies the EQ predicate on the "image_count" field.
|
||||||
|
func ImageCountEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountNEQ applies the NEQ predicate on the "image_count" field.
|
||||||
|
func ImageCountNEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountIn applies the In predicate on the "image_count" field.
|
||||||
|
func ImageCountIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldImageCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountNotIn applies the NotIn predicate on the "image_count" field.
|
||||||
|
func ImageCountNotIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldImageCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountGT applies the GT predicate on the "image_count" field.
|
||||||
|
func ImageCountGT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountGTE applies the GTE predicate on the "image_count" field.
|
||||||
|
func ImageCountGTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountLT applies the LT predicate on the "image_count" field.
|
||||||
|
func ImageCountLT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ImageCountLTE applies the LTE predicate on the "image_count" field.
|
||||||
|
func ImageCountLTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountEQ applies the EQ predicate on the "video_count" field.
|
||||||
|
func VideoCountEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountNEQ applies the NEQ predicate on the "video_count" field.
|
||||||
|
func VideoCountNEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountIn applies the In predicate on the "video_count" field.
|
||||||
|
func VideoCountIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldVideoCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountNotIn applies the NotIn predicate on the "video_count" field.
|
||||||
|
func VideoCountNotIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldVideoCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountGT applies the GT predicate on the "video_count" field.
|
||||||
|
func VideoCountGT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountGTE applies the GTE predicate on the "video_count" field.
|
||||||
|
func VideoCountGTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountLT applies the LT predicate on the "video_count" field.
|
||||||
|
func VideoCountLT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// VideoCountLTE applies the LTE predicate on the "video_count" field.
|
||||||
|
func VideoCountLTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountEQ applies the EQ predicate on the "error_count" field.
|
||||||
|
func ErrorCountEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountNEQ applies the NEQ predicate on the "error_count" field.
|
||||||
|
func ErrorCountNEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountIn applies the In predicate on the "error_count" field.
|
||||||
|
func ErrorCountIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldErrorCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountNotIn applies the NotIn predicate on the "error_count" field.
|
||||||
|
func ErrorCountNotIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldErrorCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountGT applies the GT predicate on the "error_count" field.
|
||||||
|
func ErrorCountGT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountGTE applies the GTE predicate on the "error_count" field.
|
||||||
|
func ErrorCountGTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountLT applies the LT predicate on the "error_count" field.
|
||||||
|
func ErrorCountLT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorCountLTE applies the LTE predicate on the "error_count" field.
|
||||||
|
func ErrorCountLTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtEQ applies the EQ predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldLastErrorAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtNEQ applies the NEQ predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtNEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldLastErrorAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtIn applies the In predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldLastErrorAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtNotIn applies the NotIn predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtNotIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldLastErrorAt, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtGT applies the GT predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtGT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldLastErrorAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtGTE applies the GTE predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtGTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldLastErrorAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtLT applies the LT predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtLT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldLastErrorAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtLTE applies the LTE predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtLTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldLastErrorAt, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtIsNil applies the IsNil predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtIsNil() predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIsNull(FieldLastErrorAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// LastErrorAtNotNil applies the NotNil predicate on the "last_error_at" field.
|
||||||
|
func LastErrorAtNotNil() predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotNull(FieldLastErrorAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountEQ applies the EQ predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountNEQ applies the NEQ predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountNEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldTodayImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountIn applies the In predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldTodayImageCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountNotIn applies the NotIn predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountNotIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldTodayImageCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountGT applies the GT predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountGT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldTodayImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountGTE applies the GTE predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountGTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldTodayImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountLT applies the LT predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountLT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldTodayImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayImageCountLTE applies the LTE predicate on the "today_image_count" field.
|
||||||
|
func TodayImageCountLTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldTodayImageCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountEQ applies the EQ predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountNEQ applies the NEQ predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountNEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldTodayVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountIn applies the In predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldTodayVideoCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountNotIn applies the NotIn predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountNotIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldTodayVideoCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountGT applies the GT predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountGT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldTodayVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountGTE applies the GTE predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountGTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldTodayVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountLT applies the LT predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountLT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldTodayVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayVideoCountLTE applies the LTE predicate on the "today_video_count" field.
|
||||||
|
func TodayVideoCountLTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldTodayVideoCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountEQ applies the EQ predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountNEQ applies the NEQ predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountNEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldTodayErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountIn applies the In predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldTodayErrorCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountNotIn applies the NotIn predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountNotIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldTodayErrorCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountGT applies the GT predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountGT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldTodayErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountGTE applies the GTE predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountGTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldTodayErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountLT applies the LT predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountLT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldTodayErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayErrorCountLTE applies the LTE predicate on the "today_error_count" field.
|
||||||
|
func TodayErrorCountLTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldTodayErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateEQ applies the EQ predicate on the "today_date" field.
|
||||||
|
func TodayDateEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldTodayDate, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateNEQ applies the NEQ predicate on the "today_date" field.
|
||||||
|
func TodayDateNEQ(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldTodayDate, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateIn applies the In predicate on the "today_date" field.
|
||||||
|
func TodayDateIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldTodayDate, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateNotIn applies the NotIn predicate on the "today_date" field.
|
||||||
|
func TodayDateNotIn(vs ...time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldTodayDate, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateGT applies the GT predicate on the "today_date" field.
|
||||||
|
func TodayDateGT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldTodayDate, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateGTE applies the GTE predicate on the "today_date" field.
|
||||||
|
func TodayDateGTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldTodayDate, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateLT applies the LT predicate on the "today_date" field.
|
||||||
|
func TodayDateLT(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldTodayDate, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateLTE applies the LTE predicate on the "today_date" field.
|
||||||
|
func TodayDateLTE(v time.Time) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldTodayDate, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateIsNil applies the IsNil predicate on the "today_date" field.
|
||||||
|
func TodayDateIsNil() predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIsNull(FieldTodayDate))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TodayDateNotNil applies the NotNil predicate on the "today_date" field.
|
||||||
|
func TodayDateNotNil() predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotNull(FieldTodayDate))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountEQ applies the EQ predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldEQ(FieldConsecutiveErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountNEQ applies the NEQ predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountNEQ(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNEQ(FieldConsecutiveErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountIn applies the In predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldIn(FieldConsecutiveErrorCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountNotIn applies the NotIn predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountNotIn(vs ...int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldNotIn(FieldConsecutiveErrorCount, vs...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountGT applies the GT predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountGT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGT(FieldConsecutiveErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountGTE applies the GTE predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountGTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldGTE(FieldConsecutiveErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountLT applies the LT predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountLT(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLT(FieldConsecutiveErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConsecutiveErrorCountLTE applies the LTE predicate on the "consecutive_error_count" field.
|
||||||
|
func ConsecutiveErrorCountLTE(v int) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.FieldLTE(FieldConsecutiveErrorCount, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// And groups predicates with the AND operator between them.
|
||||||
|
func And(predicates ...predicate.SoraUsageStat) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.AndPredicates(predicates...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Or groups predicates with the OR operator between them.
|
||||||
|
func Or(predicates ...predicate.SoraUsageStat) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.OrPredicates(predicates...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not applies the not operator on the given predicate.
|
||||||
|
func Not(p predicate.SoraUsageStat) predicate.SoraUsageStat {
|
||||||
|
return predicate.SoraUsageStat(sql.NotPredicates(p))
|
||||||
|
}
|
||||||
1334
backend/ent/sorausagestat_create.go
Normal file
1334
backend/ent/sorausagestat_create.go
Normal file
File diff suppressed because it is too large
Load Diff
88
backend/ent/sorausagestat_delete.go
Normal file
88
backend/ent/sorausagestat_delete.go
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraUsageStatDelete is the builder for deleting a SoraUsageStat entity.
|
||||||
|
type SoraUsageStatDelete struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraUsageStatMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraUsageStatDelete builder.
|
||||||
|
func (_d *SoraUsageStatDelete) Where(ps ...predicate.SoraUsageStat) *SoraUsageStatDelete {
|
||||||
|
_d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query and returns how many vertices were deleted.
|
||||||
|
func (_d *SoraUsageStatDelete) Exec(ctx context.Context) (int, error) {
|
||||||
|
return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraUsageStatDelete) ExecX(ctx context.Context) int {
|
||||||
|
n, err := _d.Exec(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_d *SoraUsageStatDelete) sqlExec(ctx context.Context) (int, error) {
|
||||||
|
_spec := sqlgraph.NewDeleteSpec(sorausagestat.Table, sqlgraph.NewFieldSpec(sorausagestat.FieldID, field.TypeInt64))
|
||||||
|
if ps := _d.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec)
|
||||||
|
if err != nil && sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
_d.mutation.done = true
|
||||||
|
return affected, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStatDeleteOne is the builder for deleting a single SoraUsageStat entity.
|
||||||
|
type SoraUsageStatDeleteOne struct {
|
||||||
|
_d *SoraUsageStatDelete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraUsageStatDelete builder.
|
||||||
|
func (_d *SoraUsageStatDeleteOne) Where(ps ...predicate.SoraUsageStat) *SoraUsageStatDeleteOne {
|
||||||
|
_d._d.mutation.Where(ps...)
|
||||||
|
return _d
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the deletion query.
|
||||||
|
func (_d *SoraUsageStatDeleteOne) Exec(ctx context.Context) error {
|
||||||
|
n, err := _d._d.Exec(ctx)
|
||||||
|
switch {
|
||||||
|
case err != nil:
|
||||||
|
return err
|
||||||
|
case n == 0:
|
||||||
|
return &NotFoundError{sorausagestat.Label}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_d *SoraUsageStatDeleteOne) ExecX(ctx context.Context) {
|
||||||
|
if err := _d.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
564
backend/ent/sorausagestat_query.go
Normal file
564
backend/ent/sorausagestat_query.go
Normal file
@@ -0,0 +1,564 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"entgo.io/ent"
|
||||||
|
"entgo.io/ent/dialect"
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraUsageStatQuery is the builder for querying SoraUsageStat entities.
|
||||||
|
type SoraUsageStatQuery struct {
|
||||||
|
config
|
||||||
|
ctx *QueryContext
|
||||||
|
order []sorausagestat.OrderOption
|
||||||
|
inters []Interceptor
|
||||||
|
predicates []predicate.SoraUsageStat
|
||||||
|
modifiers []func(*sql.Selector)
|
||||||
|
// intermediate query (i.e. traversal path).
|
||||||
|
sql *sql.Selector
|
||||||
|
path func(context.Context) (*sql.Selector, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where adds a new predicate for the SoraUsageStatQuery builder.
|
||||||
|
func (_q *SoraUsageStatQuery) Where(ps ...predicate.SoraUsageStat) *SoraUsageStatQuery {
|
||||||
|
_q.predicates = append(_q.predicates, ps...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit the number of records to be returned by this query.
|
||||||
|
func (_q *SoraUsageStatQuery) Limit(limit int) *SoraUsageStatQuery {
|
||||||
|
_q.ctx.Limit = &limit
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offset to start from.
|
||||||
|
func (_q *SoraUsageStatQuery) Offset(offset int) *SoraUsageStatQuery {
|
||||||
|
_q.ctx.Offset = &offset
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unique configures the query builder to filter duplicate records on query.
|
||||||
|
// By default, unique is set to true, and can be disabled using this method.
|
||||||
|
func (_q *SoraUsageStatQuery) Unique(unique bool) *SoraUsageStatQuery {
|
||||||
|
_q.ctx.Unique = &unique
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order specifies how the records should be ordered.
|
||||||
|
func (_q *SoraUsageStatQuery) Order(o ...sorausagestat.OrderOption) *SoraUsageStatQuery {
|
||||||
|
_q.order = append(_q.order, o...)
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// First returns the first SoraUsageStat entity from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraUsageStat was found.
|
||||||
|
func (_q *SoraUsageStatQuery) First(ctx context.Context) (*SoraUsageStat, error) {
|
||||||
|
nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nil, &NotFoundError{sorausagestat.Label}
|
||||||
|
}
|
||||||
|
return nodes[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstX is like First, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) FirstX(ctx context.Context) *SoraUsageStat {
|
||||||
|
node, err := _q.First(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstID returns the first SoraUsageStat ID from the query.
|
||||||
|
// Returns a *NotFoundError when no SoraUsageStat ID was found.
|
||||||
|
func (_q *SoraUsageStatQuery) FirstID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(ids) == 0 {
|
||||||
|
err = &NotFoundError{sorausagestat.Label}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return ids[0], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstIDX is like FirstID, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) FirstIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.FirstID(ctx)
|
||||||
|
if err != nil && !IsNotFound(err) {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only returns a single SoraUsageStat entity found by the query, ensuring it only returns one.
|
||||||
|
// Returns a *NotSingularError when more than one SoraUsageStat entity is found.
|
||||||
|
// Returns a *NotFoundError when no SoraUsageStat entities are found.
|
||||||
|
func (_q *SoraUsageStatQuery) Only(ctx context.Context) (*SoraUsageStat, error) {
|
||||||
|
nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch len(nodes) {
|
||||||
|
case 1:
|
||||||
|
return nodes[0], nil
|
||||||
|
case 0:
|
||||||
|
return nil, &NotFoundError{sorausagestat.Label}
|
||||||
|
default:
|
||||||
|
return nil, &NotSingularError{sorausagestat.Label}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyX is like Only, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) OnlyX(ctx context.Context) *SoraUsageStat {
|
||||||
|
node, err := _q.Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyID is like Only, but returns the only SoraUsageStat ID in the query.
|
||||||
|
// Returns a *NotSingularError when more than one SoraUsageStat ID is found.
|
||||||
|
// Returns a *NotFoundError when no entities are found.
|
||||||
|
func (_q *SoraUsageStatQuery) OnlyID(ctx context.Context) (id int64, err error) {
|
||||||
|
var ids []int64
|
||||||
|
if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch len(ids) {
|
||||||
|
case 1:
|
||||||
|
id = ids[0]
|
||||||
|
case 0:
|
||||||
|
err = &NotFoundError{sorausagestat.Label}
|
||||||
|
default:
|
||||||
|
err = &NotSingularError{sorausagestat.Label}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OnlyIDX is like OnlyID, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) OnlyIDX(ctx context.Context) int64 {
|
||||||
|
id, err := _q.OnlyID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
||||||
|
// All executes the query and returns a list of SoraUsageStats.
|
||||||
|
func (_q *SoraUsageStatQuery) All(ctx context.Context) ([]*SoraUsageStat, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
qr := querierAll[[]*SoraUsageStat, *SoraUsageStatQuery]()
|
||||||
|
return withInterceptors[[]*SoraUsageStat](ctx, _q, qr, _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllX is like All, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) AllX(ctx context.Context) []*SoraUsageStat {
|
||||||
|
nodes, err := _q.All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs executes the query and returns a list of SoraUsageStat IDs.
|
||||||
|
func (_q *SoraUsageStatQuery) IDs(ctx context.Context) (ids []int64, err error) {
|
||||||
|
if _q.ctx.Unique == nil && _q.path != nil {
|
||||||
|
_q.Unique(true)
|
||||||
|
}
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs)
|
||||||
|
if err = _q.Select(sorausagestat.FieldID).Scan(ctx, &ids); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ids, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDsX is like IDs, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) IDsX(ctx context.Context) []int64 {
|
||||||
|
ids, err := _q.IDs(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return ids
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the count of the given query.
|
||||||
|
func (_q *SoraUsageStatQuery) Count(ctx context.Context) (int, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount)
|
||||||
|
if err := _q.prepareQuery(ctx); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return withInterceptors[int](ctx, _q, querierCount[*SoraUsageStatQuery](), _q.inters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CountX is like Count, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) CountX(ctx context.Context) int {
|
||||||
|
count, err := _q.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exist returns true if the query has elements in the graph.
|
||||||
|
func (_q *SoraUsageStatQuery) Exist(ctx context.Context) (bool, error) {
|
||||||
|
ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist)
|
||||||
|
switch _, err := _q.FirstID(ctx); {
|
||||||
|
case IsNotFound(err):
|
||||||
|
return false, nil
|
||||||
|
case err != nil:
|
||||||
|
return false, fmt.Errorf("ent: check existence: %w", err)
|
||||||
|
default:
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExistX is like Exist, but panics if an error occurs.
|
||||||
|
func (_q *SoraUsageStatQuery) ExistX(ctx context.Context) bool {
|
||||||
|
exist, err := _q.Exist(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return exist
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clone returns a duplicate of the SoraUsageStatQuery builder, including all associated steps. It can be
|
||||||
|
// used to prepare common query builders and use them differently after the clone is made.
|
||||||
|
func (_q *SoraUsageStatQuery) Clone() *SoraUsageStatQuery {
|
||||||
|
if _q == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &SoraUsageStatQuery{
|
||||||
|
config: _q.config,
|
||||||
|
ctx: _q.ctx.Clone(),
|
||||||
|
order: append([]sorausagestat.OrderOption{}, _q.order...),
|
||||||
|
inters: append([]Interceptor{}, _q.inters...),
|
||||||
|
predicates: append([]predicate.SoraUsageStat{}, _q.predicates...),
|
||||||
|
// clone intermediate query.
|
||||||
|
sql: _q.sql.Clone(),
|
||||||
|
path: _q.path,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GroupBy is used to group vertices by one or more fields/columns.
|
||||||
|
// It is often used with aggregate functions, like: count, max, mean, min, sum.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// Count int `json:"count,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraUsageStat.Query().
|
||||||
|
// GroupBy(sorausagestat.FieldCreatedAt).
|
||||||
|
// Aggregate(ent.Count()).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraUsageStatQuery) GroupBy(field string, fields ...string) *SoraUsageStatGroupBy {
|
||||||
|
_q.ctx.Fields = append([]string{field}, fields...)
|
||||||
|
grbuild := &SoraUsageStatGroupBy{build: _q}
|
||||||
|
grbuild.flds = &_q.ctx.Fields
|
||||||
|
grbuild.label = sorausagestat.Label
|
||||||
|
grbuild.scan = grbuild.Scan
|
||||||
|
return grbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows the selection one or more fields/columns for the given query,
|
||||||
|
// instead of selecting all fields in the entity.
|
||||||
|
//
|
||||||
|
// Example:
|
||||||
|
//
|
||||||
|
// var v []struct {
|
||||||
|
// CreatedAt time.Time `json:"created_at,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// client.SoraUsageStat.Query().
|
||||||
|
// Select(sorausagestat.FieldCreatedAt).
|
||||||
|
// Scan(ctx, &v)
|
||||||
|
func (_q *SoraUsageStatQuery) Select(fields ...string) *SoraUsageStatSelect {
|
||||||
|
_q.ctx.Fields = append(_q.ctx.Fields, fields...)
|
||||||
|
sbuild := &SoraUsageStatSelect{SoraUsageStatQuery: _q}
|
||||||
|
sbuild.label = sorausagestat.Label
|
||||||
|
sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan
|
||||||
|
return sbuild
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate returns a SoraUsageStatSelect configured with the given aggregations.
|
||||||
|
func (_q *SoraUsageStatQuery) Aggregate(fns ...AggregateFunc) *SoraUsageStatSelect {
|
||||||
|
return _q.Select().Aggregate(fns...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraUsageStatQuery) prepareQuery(ctx context.Context) error {
|
||||||
|
for _, inter := range _q.inters {
|
||||||
|
if inter == nil {
|
||||||
|
return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
|
||||||
|
}
|
||||||
|
if trv, ok := inter.(Traverser); ok {
|
||||||
|
if err := trv.Traverse(ctx, _q); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, f := range _q.ctx.Fields {
|
||||||
|
if !sorausagestat.ValidColumn(f) {
|
||||||
|
return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if _q.path != nil {
|
||||||
|
prev, err := _q.path(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_q.sql = prev
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraUsageStatQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*SoraUsageStat, error) {
|
||||||
|
var (
|
||||||
|
nodes = []*SoraUsageStat{}
|
||||||
|
_spec = _q.querySpec()
|
||||||
|
)
|
||||||
|
_spec.ScanValues = func(columns []string) ([]any, error) {
|
||||||
|
return (*SoraUsageStat).scanValues(nil, columns)
|
||||||
|
}
|
||||||
|
_spec.Assign = func(columns []string, values []any) error {
|
||||||
|
node := &SoraUsageStat{config: _q.config}
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
return node.assignValues(columns, values)
|
||||||
|
}
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
for i := range hooks {
|
||||||
|
hooks[i](ctx, _spec)
|
||||||
|
}
|
||||||
|
if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(nodes) == 0 {
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
return nodes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraUsageStatQuery) sqlCount(ctx context.Context) (int, error) {
|
||||||
|
_spec := _q.querySpec()
|
||||||
|
if len(_q.modifiers) > 0 {
|
||||||
|
_spec.Modifiers = _q.modifiers
|
||||||
|
}
|
||||||
|
_spec.Node.Columns = _q.ctx.Fields
|
||||||
|
if len(_q.ctx.Fields) > 0 {
|
||||||
|
_spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique
|
||||||
|
}
|
||||||
|
return sqlgraph.CountNodes(ctx, _q.driver, _spec)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraUsageStatQuery) querySpec() *sqlgraph.QuerySpec {
|
||||||
|
_spec := sqlgraph.NewQuerySpec(sorausagestat.Table, sorausagestat.Columns, sqlgraph.NewFieldSpec(sorausagestat.FieldID, field.TypeInt64))
|
||||||
|
_spec.From = _q.sql
|
||||||
|
if unique := _q.ctx.Unique; unique != nil {
|
||||||
|
_spec.Unique = *unique
|
||||||
|
} else if _q.path != nil {
|
||||||
|
_spec.Unique = true
|
||||||
|
}
|
||||||
|
if fields := _q.ctx.Fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, sorausagestat.FieldID)
|
||||||
|
for i := range fields {
|
||||||
|
if fields[i] != sorausagestat.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, fields[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := _q.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
_spec.Limit = *limit
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
_spec.Offset = *offset
|
||||||
|
}
|
||||||
|
if ps := _q.order; len(ps) > 0 {
|
||||||
|
_spec.Order = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return _spec
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_q *SoraUsageStatQuery) sqlQuery(ctx context.Context) *sql.Selector {
|
||||||
|
builder := sql.Dialect(_q.driver.Dialect())
|
||||||
|
t1 := builder.Table(sorausagestat.Table)
|
||||||
|
columns := _q.ctx.Fields
|
||||||
|
if len(columns) == 0 {
|
||||||
|
columns = sorausagestat.Columns
|
||||||
|
}
|
||||||
|
selector := builder.Select(t1.Columns(columns...)...).From(t1)
|
||||||
|
if _q.sql != nil {
|
||||||
|
selector = _q.sql
|
||||||
|
selector.Select(selector.Columns(columns...)...)
|
||||||
|
}
|
||||||
|
if _q.ctx.Unique != nil && *_q.ctx.Unique {
|
||||||
|
selector.Distinct()
|
||||||
|
}
|
||||||
|
for _, m := range _q.modifiers {
|
||||||
|
m(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.predicates {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
for _, p := range _q.order {
|
||||||
|
p(selector)
|
||||||
|
}
|
||||||
|
if offset := _q.ctx.Offset; offset != nil {
|
||||||
|
// limit is mandatory for offset clause. We start
|
||||||
|
// with default value, and override it below if needed.
|
||||||
|
selector.Offset(*offset).Limit(math.MaxInt32)
|
||||||
|
}
|
||||||
|
if limit := _q.ctx.Limit; limit != nil {
|
||||||
|
selector.Limit(*limit)
|
||||||
|
}
|
||||||
|
return selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForUpdate locks the selected rows against concurrent updates, and prevent them from being
|
||||||
|
// updated, deleted or "selected ... for update" by other sessions, until the transaction is
|
||||||
|
// either committed or rolled-back.
|
||||||
|
func (_q *SoraUsageStatQuery) ForUpdate(opts ...sql.LockOption) *SoraUsageStatQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForUpdate(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForShare behaves similarly to ForUpdate, except that it acquires a shared mode lock
|
||||||
|
// on any rows that are read. Other sessions can read the rows, but cannot modify them
|
||||||
|
// until your transaction commits.
|
||||||
|
func (_q *SoraUsageStatQuery) ForShare(opts ...sql.LockOption) *SoraUsageStatQuery {
|
||||||
|
if _q.driver.Dialect() == dialect.Postgres {
|
||||||
|
_q.Unique(false)
|
||||||
|
}
|
||||||
|
_q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
|
||||||
|
s.ForShare(opts...)
|
||||||
|
})
|
||||||
|
return _q
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStatGroupBy is the group-by builder for SoraUsageStat entities.
|
||||||
|
type SoraUsageStatGroupBy struct {
|
||||||
|
selector
|
||||||
|
build *SoraUsageStatQuery
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the group-by query.
|
||||||
|
func (_g *SoraUsageStatGroupBy) Aggregate(fns ...AggregateFunc) *SoraUsageStatGroupBy {
|
||||||
|
_g.fns = append(_g.fns, fns...)
|
||||||
|
return _g
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_g *SoraUsageStatGroupBy) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy)
|
||||||
|
if err := _g.build.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraUsageStatQuery, *SoraUsageStatGroupBy](ctx, _g.build, _g, _g.build.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_g *SoraUsageStatGroupBy) sqlScan(ctx context.Context, root *SoraUsageStatQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx).Select()
|
||||||
|
aggregation := make([]string, 0, len(_g.fns))
|
||||||
|
for _, fn := range _g.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
if len(selector.SelectedColumns()) == 0 {
|
||||||
|
columns := make([]string, 0, len(*_g.flds)+len(_g.fns))
|
||||||
|
for _, f := range *_g.flds {
|
||||||
|
columns = append(columns, selector.C(f))
|
||||||
|
}
|
||||||
|
columns = append(columns, aggregation...)
|
||||||
|
selector.Select(columns...)
|
||||||
|
}
|
||||||
|
selector.GroupBy(selector.Columns(*_g.flds...)...)
|
||||||
|
if err := selector.Err(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _g.build.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStatSelect is the builder for selecting fields of SoraUsageStat entities.
|
||||||
|
type SoraUsageStatSelect struct {
|
||||||
|
*SoraUsageStatQuery
|
||||||
|
selector
|
||||||
|
}
|
||||||
|
|
||||||
|
// Aggregate adds the given aggregation functions to the selector query.
|
||||||
|
func (_s *SoraUsageStatSelect) Aggregate(fns ...AggregateFunc) *SoraUsageStatSelect {
|
||||||
|
_s.fns = append(_s.fns, fns...)
|
||||||
|
return _s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan applies the selector query and scans the result into the given value.
|
||||||
|
func (_s *SoraUsageStatSelect) Scan(ctx context.Context, v any) error {
|
||||||
|
ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect)
|
||||||
|
if err := _s.prepareQuery(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return scanWithInterceptors[*SoraUsageStatQuery, *SoraUsageStatSelect](ctx, _s.SoraUsageStatQuery, _s, _s.inters, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_s *SoraUsageStatSelect) sqlScan(ctx context.Context, root *SoraUsageStatQuery, v any) error {
|
||||||
|
selector := root.sqlQuery(ctx)
|
||||||
|
aggregation := make([]string, 0, len(_s.fns))
|
||||||
|
for _, fn := range _s.fns {
|
||||||
|
aggregation = append(aggregation, fn(selector))
|
||||||
|
}
|
||||||
|
switch n := len(*_s.selector.flds); {
|
||||||
|
case n == 0 && len(aggregation) > 0:
|
||||||
|
selector.Select(aggregation...)
|
||||||
|
case n != 0 && len(aggregation) > 0:
|
||||||
|
selector.AppendSelect(aggregation...)
|
||||||
|
}
|
||||||
|
rows := &sql.Rows{}
|
||||||
|
query, args := selector.Query()
|
||||||
|
if err := _s.driver.Query(ctx, query, args, rows); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
return sql.ScanSlice(rows, v)
|
||||||
|
}
|
||||||
748
backend/ent/sorausagestat_update.go
Normal file
748
backend/ent/sorausagestat_update.go
Normal file
@@ -0,0 +1,748 @@
|
|||||||
|
// Code generated by ent, DO NOT EDIT.
|
||||||
|
|
||||||
|
package ent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"entgo.io/ent/dialect/sql"
|
||||||
|
"entgo.io/ent/dialect/sql/sqlgraph"
|
||||||
|
"entgo.io/ent/schema/field"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/predicate"
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraUsageStatUpdate is the builder for updating SoraUsageStat entities.
|
||||||
|
type SoraUsageStatUpdate struct {
|
||||||
|
config
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraUsageStatMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraUsageStatUpdate builder.
|
||||||
|
func (_u *SoraUsageStatUpdate) Where(ps ...predicate.SoraUsageStat) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.Where(ps...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetUpdatedAt(v time.Time) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.SetUpdatedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAccountID sets the "account_id" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetAccountID(v int64) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetAccountID()
|
||||||
|
_u.mutation.SetAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableAccountID sets the "account_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableAccountID(v *int64) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetAccountID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAccountID adds value to the "account_id" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddAccountID(v int64) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetImageCount sets the "image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetImageCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetImageCount()
|
||||||
|
_u.mutation.SetImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableImageCount sets the "image_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableImageCount(v *int) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetImageCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddImageCount adds value to the "image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddImageCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetVideoCount sets the "video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetVideoCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetVideoCount()
|
||||||
|
_u.mutation.SetVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableVideoCount sets the "video_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableVideoCount(v *int) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetVideoCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddVideoCount adds value to the "video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddVideoCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetErrorCount sets the "error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetErrorCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetErrorCount()
|
||||||
|
_u.mutation.SetErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableErrorCount sets the "error_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableErrorCount(v *int) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetErrorCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddErrorCount adds value to the "error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddErrorCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetLastErrorAt sets the "last_error_at" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetLastErrorAt(v time.Time) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.SetLastErrorAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableLastErrorAt sets the "last_error_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableLastErrorAt(v *time.Time) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetLastErrorAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearLastErrorAt clears the value of the "last_error_at" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) ClearLastErrorAt() *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ClearLastErrorAt()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayImageCount sets the "today_image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetTodayImageCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetTodayImageCount()
|
||||||
|
_u.mutation.SetTodayImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayImageCount sets the "today_image_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableTodayImageCount(v *int) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayImageCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTodayImageCount adds value to the "today_image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddTodayImageCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddTodayImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayVideoCount sets the "today_video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetTodayVideoCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetTodayVideoCount()
|
||||||
|
_u.mutation.SetTodayVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayVideoCount sets the "today_video_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableTodayVideoCount(v *int) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayVideoCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTodayVideoCount adds value to the "today_video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddTodayVideoCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddTodayVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayErrorCount sets the "today_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetTodayErrorCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetTodayErrorCount()
|
||||||
|
_u.mutation.SetTodayErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayErrorCount sets the "today_error_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableTodayErrorCount(v *int) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayErrorCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTodayErrorCount adds value to the "today_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddTodayErrorCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddTodayErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayDate sets the "today_date" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetTodayDate(v time.Time) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.SetTodayDate(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayDate sets the "today_date" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableTodayDate(v *time.Time) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayDate(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearTodayDate clears the value of the "today_date" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) ClearTodayDate() *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ClearTodayDate()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetConsecutiveErrorCount sets the "consecutive_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetConsecutiveErrorCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.ResetConsecutiveErrorCount()
|
||||||
|
_u.mutation.SetConsecutiveErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableConsecutiveErrorCount sets the "consecutive_error_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdate) SetNillableConsecutiveErrorCount(v *int) *SoraUsageStatUpdate {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetConsecutiveErrorCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddConsecutiveErrorCount adds value to the "consecutive_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdate) AddConsecutiveErrorCount(v int) *SoraUsageStatUpdate {
|
||||||
|
_u.mutation.AddConsecutiveErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the SoraUsageStatMutation object of the builder.
|
||||||
|
func (_u *SoraUsageStatUpdate) Mutation() *SoraUsageStatMutation {
|
||||||
|
return _u.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the number of nodes affected by the update operation.
|
||||||
|
func (_u *SoraUsageStatUpdate) Save(ctx context.Context) (int, error) {
|
||||||
|
_u.defaults()
|
||||||
|
return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (_u *SoraUsageStatUpdate) SaveX(ctx context.Context) int {
|
||||||
|
affected, err := _u.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return affected
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query.
|
||||||
|
func (_u *SoraUsageStatUpdate) Exec(ctx context.Context) error {
|
||||||
|
_, err := _u.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_u *SoraUsageStatUpdate) ExecX(ctx context.Context) {
|
||||||
|
if err := _u.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (_u *SoraUsageStatUpdate) defaults() {
|
||||||
|
if _, ok := _u.mutation.UpdatedAt(); !ok {
|
||||||
|
v := sorausagestat.UpdateDefaultUpdatedAt()
|
||||||
|
_u.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_u *SoraUsageStatUpdate) sqlSave(ctx context.Context) (_node int, err error) {
|
||||||
|
_spec := sqlgraph.NewUpdateSpec(sorausagestat.Table, sorausagestat.Columns, sqlgraph.NewFieldSpec(sorausagestat.FieldID, field.TypeInt64))
|
||||||
|
if ps := _u.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldUpdatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AccountID(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedAccountID(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ImageCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedImageCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.VideoCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedVideoCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ErrorCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedErrorCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.LastErrorAt(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldLastErrorAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.LastErrorAtCleared() {
|
||||||
|
_spec.ClearField(sorausagestat.FieldLastErrorAt, field.TypeTime)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayImageCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedTodayImageCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldTodayImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayVideoCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedTodayVideoCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldTodayVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayErrorCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedTodayErrorCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldTodayErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayDate(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayDate, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.TodayDateCleared() {
|
||||||
|
_spec.ClearField(sorausagestat.FieldTodayDate, field.TypeTime)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ConsecutiveErrorCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldConsecutiveErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedConsecutiveErrorCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldConsecutiveErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{sorausagestat.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
_u.mutation.done = true
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStatUpdateOne is the builder for updating a single SoraUsageStat entity.
|
||||||
|
type SoraUsageStatUpdateOne struct {
|
||||||
|
config
|
||||||
|
fields []string
|
||||||
|
hooks []Hook
|
||||||
|
mutation *SoraUsageStatMutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetUpdatedAt(v time.Time) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.SetUpdatedAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAccountID sets the "account_id" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetAccountID(v int64) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetAccountID()
|
||||||
|
_u.mutation.SetAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableAccountID sets the "account_id" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableAccountID(v *int64) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetAccountID(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddAccountID adds value to the "account_id" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddAccountID(v int64) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddAccountID(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetImageCount sets the "image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetImageCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetImageCount()
|
||||||
|
_u.mutation.SetImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableImageCount sets the "image_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableImageCount(v *int) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetImageCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddImageCount adds value to the "image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddImageCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetVideoCount sets the "video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetVideoCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetVideoCount()
|
||||||
|
_u.mutation.SetVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableVideoCount sets the "video_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableVideoCount(v *int) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetVideoCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddVideoCount adds value to the "video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddVideoCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetErrorCount sets the "error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetErrorCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetErrorCount()
|
||||||
|
_u.mutation.SetErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableErrorCount sets the "error_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableErrorCount(v *int) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetErrorCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddErrorCount adds value to the "error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddErrorCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetLastErrorAt sets the "last_error_at" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetLastErrorAt(v time.Time) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.SetLastErrorAt(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableLastErrorAt sets the "last_error_at" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableLastErrorAt(v *time.Time) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetLastErrorAt(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearLastErrorAt clears the value of the "last_error_at" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) ClearLastErrorAt() *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ClearLastErrorAt()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayImageCount sets the "today_image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetTodayImageCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetTodayImageCount()
|
||||||
|
_u.mutation.SetTodayImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayImageCount sets the "today_image_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableTodayImageCount(v *int) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayImageCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTodayImageCount adds value to the "today_image_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddTodayImageCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddTodayImageCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayVideoCount sets the "today_video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetTodayVideoCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetTodayVideoCount()
|
||||||
|
_u.mutation.SetTodayVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayVideoCount sets the "today_video_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableTodayVideoCount(v *int) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayVideoCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTodayVideoCount adds value to the "today_video_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddTodayVideoCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddTodayVideoCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayErrorCount sets the "today_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetTodayErrorCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetTodayErrorCount()
|
||||||
|
_u.mutation.SetTodayErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayErrorCount sets the "today_error_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableTodayErrorCount(v *int) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayErrorCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddTodayErrorCount adds value to the "today_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddTodayErrorCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddTodayErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTodayDate sets the "today_date" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetTodayDate(v time.Time) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.SetTodayDate(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableTodayDate sets the "today_date" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableTodayDate(v *time.Time) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetTodayDate(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearTodayDate clears the value of the "today_date" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) ClearTodayDate() *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ClearTodayDate()
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetConsecutiveErrorCount sets the "consecutive_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetConsecutiveErrorCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.ResetConsecutiveErrorCount()
|
||||||
|
_u.mutation.SetConsecutiveErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableConsecutiveErrorCount sets the "consecutive_error_count" field if the given value is not nil.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SetNillableConsecutiveErrorCount(v *int) *SoraUsageStatUpdateOne {
|
||||||
|
if v != nil {
|
||||||
|
_u.SetConsecutiveErrorCount(*v)
|
||||||
|
}
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddConsecutiveErrorCount adds value to the "consecutive_error_count" field.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) AddConsecutiveErrorCount(v int) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.AddConsecutiveErrorCount(v)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mutation returns the SoraUsageStatMutation object of the builder.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) Mutation() *SoraUsageStatMutation {
|
||||||
|
return _u.mutation
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where appends a list predicates to the SoraUsageStatUpdate builder.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) Where(ps ...predicate.SoraUsageStat) *SoraUsageStatUpdateOne {
|
||||||
|
_u.mutation.Where(ps...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Select allows selecting one or more fields (columns) of the returned entity.
|
||||||
|
// The default is selecting all fields defined in the entity schema.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) Select(field string, fields ...string) *SoraUsageStatUpdateOne {
|
||||||
|
_u.fields = append([]string{field}, fields...)
|
||||||
|
return _u
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save executes the query and returns the updated SoraUsageStat entity.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) Save(ctx context.Context) (*SoraUsageStat, error) {
|
||||||
|
_u.defaults()
|
||||||
|
return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveX is like Save, but panics if an error occurs.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) SaveX(ctx context.Context) *SoraUsageStat {
|
||||||
|
node, err := _u.Save(ctx)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exec executes the query on the entity.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) Exec(ctx context.Context) error {
|
||||||
|
_, err := _u.Save(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExecX is like Exec, but panics if an error occurs.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
if err := _u.Exec(ctx); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// defaults sets the default values of the builder before save.
|
||||||
|
func (_u *SoraUsageStatUpdateOne) defaults() {
|
||||||
|
if _, ok := _u.mutation.UpdatedAt(); !ok {
|
||||||
|
v := sorausagestat.UpdateDefaultUpdatedAt()
|
||||||
|
_u.mutation.SetUpdatedAt(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_u *SoraUsageStatUpdateOne) sqlSave(ctx context.Context) (_node *SoraUsageStat, err error) {
|
||||||
|
_spec := sqlgraph.NewUpdateSpec(sorausagestat.Table, sorausagestat.Columns, sqlgraph.NewFieldSpec(sorausagestat.FieldID, field.TypeInt64))
|
||||||
|
id, ok := _u.mutation.ID()
|
||||||
|
if !ok {
|
||||||
|
return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "SoraUsageStat.id" for update`)}
|
||||||
|
}
|
||||||
|
_spec.Node.ID.Value = id
|
||||||
|
if fields := _u.fields; len(fields) > 0 {
|
||||||
|
_spec.Node.Columns = make([]string, 0, len(fields))
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, sorausagestat.FieldID)
|
||||||
|
for _, f := range fields {
|
||||||
|
if !sorausagestat.ValidColumn(f) {
|
||||||
|
return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
|
||||||
|
}
|
||||||
|
if f != sorausagestat.FieldID {
|
||||||
|
_spec.Node.Columns = append(_spec.Node.Columns, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps := _u.mutation.predicates; len(ps) > 0 {
|
||||||
|
_spec.Predicate = func(selector *sql.Selector) {
|
||||||
|
for i := range ps {
|
||||||
|
ps[i](selector)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.UpdatedAt(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldUpdatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AccountID(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedAccountID(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldAccountID, field.TypeInt64, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ImageCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedImageCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.VideoCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedVideoCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ErrorCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedErrorCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.LastErrorAt(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldLastErrorAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.LastErrorAtCleared() {
|
||||||
|
_spec.ClearField(sorausagestat.FieldLastErrorAt, field.TypeTime)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayImageCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedTodayImageCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldTodayImageCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayVideoCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedTodayVideoCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldTodayVideoCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayErrorCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedTodayErrorCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldTodayErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.TodayDate(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldTodayDate, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if _u.mutation.TodayDateCleared() {
|
||||||
|
_spec.ClearField(sorausagestat.FieldTodayDate, field.TypeTime)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.ConsecutiveErrorCount(); ok {
|
||||||
|
_spec.SetField(sorausagestat.FieldConsecutiveErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
if value, ok := _u.mutation.AddedConsecutiveErrorCount(); ok {
|
||||||
|
_spec.AddField(sorausagestat.FieldConsecutiveErrorCount, field.TypeInt, value)
|
||||||
|
}
|
||||||
|
_node = &SoraUsageStat{config: _u.config}
|
||||||
|
_spec.Assign = _node.assignValues
|
||||||
|
_spec.ScanValues = _node.scanValues
|
||||||
|
if err = sqlgraph.UpdateNode(ctx, _u.driver, _spec); err != nil {
|
||||||
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
|
err = &NotFoundError{sorausagestat.Label}
|
||||||
|
} else if sqlgraph.IsConstraintError(err) {
|
||||||
|
err = &ConstraintError{msg: err.Error(), wrap: err}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_u.mutation.done = true
|
||||||
|
return _node, nil
|
||||||
|
}
|
||||||
@@ -32,6 +32,14 @@ type Tx struct {
|
|||||||
RedeemCode *RedeemCodeClient
|
RedeemCode *RedeemCodeClient
|
||||||
// Setting is the client for interacting with the Setting builders.
|
// Setting is the client for interacting with the Setting builders.
|
||||||
Setting *SettingClient
|
Setting *SettingClient
|
||||||
|
// SoraAccount is the client for interacting with the SoraAccount builders.
|
||||||
|
SoraAccount *SoraAccountClient
|
||||||
|
// SoraCacheFile is the client for interacting with the SoraCacheFile builders.
|
||||||
|
SoraCacheFile *SoraCacheFileClient
|
||||||
|
// SoraTask is the client for interacting with the SoraTask builders.
|
||||||
|
SoraTask *SoraTaskClient
|
||||||
|
// SoraUsageStat is the client for interacting with the SoraUsageStat builders.
|
||||||
|
SoraUsageStat *SoraUsageStatClient
|
||||||
// UsageCleanupTask is the client for interacting with the UsageCleanupTask builders.
|
// UsageCleanupTask is the client for interacting with the UsageCleanupTask builders.
|
||||||
UsageCleanupTask *UsageCleanupTaskClient
|
UsageCleanupTask *UsageCleanupTaskClient
|
||||||
// UsageLog is the client for interacting with the UsageLog builders.
|
// UsageLog is the client for interacting with the UsageLog builders.
|
||||||
@@ -186,6 +194,10 @@ func (tx *Tx) init() {
|
|||||||
tx.Proxy = NewProxyClient(tx.config)
|
tx.Proxy = NewProxyClient(tx.config)
|
||||||
tx.RedeemCode = NewRedeemCodeClient(tx.config)
|
tx.RedeemCode = NewRedeemCodeClient(tx.config)
|
||||||
tx.Setting = NewSettingClient(tx.config)
|
tx.Setting = NewSettingClient(tx.config)
|
||||||
|
tx.SoraAccount = NewSoraAccountClient(tx.config)
|
||||||
|
tx.SoraCacheFile = NewSoraCacheFileClient(tx.config)
|
||||||
|
tx.SoraTask = NewSoraTaskClient(tx.config)
|
||||||
|
tx.SoraUsageStat = NewSoraUsageStatClient(tx.config)
|
||||||
tx.UsageCleanupTask = NewUsageCleanupTaskClient(tx.config)
|
tx.UsageCleanupTask = NewUsageCleanupTaskClient(tx.config)
|
||||||
tx.UsageLog = NewUsageLogClient(tx.config)
|
tx.UsageLog = NewUsageLogClient(tx.config)
|
||||||
tx.User = NewUserClient(tx.config)
|
tx.User = NewUserClient(tx.config)
|
||||||
|
|||||||
@@ -58,6 +58,7 @@ type Config struct {
|
|||||||
UsageCleanup UsageCleanupConfig `mapstructure:"usage_cleanup"`
|
UsageCleanup UsageCleanupConfig `mapstructure:"usage_cleanup"`
|
||||||
Concurrency ConcurrencyConfig `mapstructure:"concurrency"`
|
Concurrency ConcurrencyConfig `mapstructure:"concurrency"`
|
||||||
TokenRefresh TokenRefreshConfig `mapstructure:"token_refresh"`
|
TokenRefresh TokenRefreshConfig `mapstructure:"token_refresh"`
|
||||||
|
Sora SoraConfig `mapstructure:"sora"`
|
||||||
RunMode string `mapstructure:"run_mode" yaml:"run_mode"`
|
RunMode string `mapstructure:"run_mode" yaml:"run_mode"`
|
||||||
Timezone string `mapstructure:"timezone"` // e.g. "Asia/Shanghai", "UTC"
|
Timezone string `mapstructure:"timezone"` // e.g. "Asia/Shanghai", "UTC"
|
||||||
Gemini GeminiConfig `mapstructure:"gemini"`
|
Gemini GeminiConfig `mapstructure:"gemini"`
|
||||||
@@ -69,6 +70,38 @@ type GeminiConfig struct {
|
|||||||
Quota GeminiQuotaConfig `mapstructure:"quota"`
|
Quota GeminiQuotaConfig `mapstructure:"quota"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SoraConfig struct {
|
||||||
|
BaseURL string `mapstructure:"base_url"`
|
||||||
|
Timeout int `mapstructure:"timeout"`
|
||||||
|
MaxRetries int `mapstructure:"max_retries"`
|
||||||
|
PollInterval float64 `mapstructure:"poll_interval"`
|
||||||
|
CallLogicMode string `mapstructure:"call_logic_mode"`
|
||||||
|
Cache SoraCacheConfig `mapstructure:"cache"`
|
||||||
|
WatermarkFree SoraWatermarkFreeConfig `mapstructure:"watermark_free"`
|
||||||
|
TokenRefresh SoraTokenRefreshConfig `mapstructure:"token_refresh"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SoraCacheConfig struct {
|
||||||
|
Enabled bool `mapstructure:"enabled"`
|
||||||
|
BaseDir string `mapstructure:"base_dir"`
|
||||||
|
VideoDir string `mapstructure:"video_dir"`
|
||||||
|
MaxBytes int64 `mapstructure:"max_bytes"`
|
||||||
|
AllowedHosts []string `mapstructure:"allowed_hosts"`
|
||||||
|
UserDirEnabled bool `mapstructure:"user_dir_enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SoraWatermarkFreeConfig struct {
|
||||||
|
Enabled bool `mapstructure:"enabled"`
|
||||||
|
ParseMethod string `mapstructure:"parse_method"`
|
||||||
|
CustomParseURL string `mapstructure:"custom_parse_url"`
|
||||||
|
CustomParseToken string `mapstructure:"custom_parse_token"`
|
||||||
|
FallbackOnFailure bool `mapstructure:"fallback_on_failure"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SoraTokenRefreshConfig struct {
|
||||||
|
Enabled bool `mapstructure:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
type GeminiOAuthConfig struct {
|
type GeminiOAuthConfig struct {
|
||||||
ClientID string `mapstructure:"client_id"`
|
ClientID string `mapstructure:"client_id"`
|
||||||
ClientSecret string `mapstructure:"client_secret"`
|
ClientSecret string `mapstructure:"client_secret"`
|
||||||
@@ -862,6 +895,24 @@ func setDefaults() {
|
|||||||
viper.SetDefault("token_refresh.max_retries", 3) // 最多重试3次
|
viper.SetDefault("token_refresh.max_retries", 3) // 最多重试3次
|
||||||
viper.SetDefault("token_refresh.retry_backoff_seconds", 2) // 重试退避基础2秒
|
viper.SetDefault("token_refresh.retry_backoff_seconds", 2) // 重试退避基础2秒
|
||||||
|
|
||||||
|
viper.SetDefault("sora.base_url", "https://sora.chatgpt.com/backend")
|
||||||
|
viper.SetDefault("sora.timeout", 120)
|
||||||
|
viper.SetDefault("sora.max_retries", 3)
|
||||||
|
viper.SetDefault("sora.poll_interval", 2.5)
|
||||||
|
viper.SetDefault("sora.call_logic_mode", "default")
|
||||||
|
viper.SetDefault("sora.cache.enabled", false)
|
||||||
|
viper.SetDefault("sora.cache.base_dir", "tmp/sora")
|
||||||
|
viper.SetDefault("sora.cache.video_dir", "data/video")
|
||||||
|
viper.SetDefault("sora.cache.max_bytes", int64(0))
|
||||||
|
viper.SetDefault("sora.cache.allowed_hosts", []string{})
|
||||||
|
viper.SetDefault("sora.cache.user_dir_enabled", true)
|
||||||
|
viper.SetDefault("sora.watermark_free.enabled", false)
|
||||||
|
viper.SetDefault("sora.watermark_free.parse_method", "third_party")
|
||||||
|
viper.SetDefault("sora.watermark_free.custom_parse_url", "")
|
||||||
|
viper.SetDefault("sora.watermark_free.custom_parse_token", "")
|
||||||
|
viper.SetDefault("sora.watermark_free.fallback_on_failure", true)
|
||||||
|
viper.SetDefault("sora.token_refresh.enabled", false)
|
||||||
|
|
||||||
// Gemini OAuth - configure via environment variables or config file
|
// Gemini OAuth - configure via environment variables or config file
|
||||||
// GEMINI_OAUTH_CLIENT_ID and GEMINI_OAUTH_CLIENT_SECRET
|
// GEMINI_OAUTH_CLIENT_ID and GEMINI_OAUTH_CLIENT_SECRET
|
||||||
// Default: uses Gemini CLI public credentials (set via environment)
|
// Default: uses Gemini CLI public credentials (set via environment)
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ func NewGroupHandler(adminService service.AdminService) *GroupHandler {
|
|||||||
type CreateGroupRequest struct {
|
type CreateGroupRequest struct {
|
||||||
Name string `json:"name" binding:"required"`
|
Name string `json:"name" binding:"required"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
Platform string `json:"platform" binding:"omitempty,oneof=anthropic openai gemini antigravity"`
|
Platform string `json:"platform" binding:"omitempty,oneof=anthropic openai gemini antigravity sora"`
|
||||||
RateMultiplier float64 `json:"rate_multiplier"`
|
RateMultiplier float64 `json:"rate_multiplier"`
|
||||||
IsExclusive bool `json:"is_exclusive"`
|
IsExclusive bool `json:"is_exclusive"`
|
||||||
SubscriptionType string `json:"subscription_type" binding:"omitempty,oneof=standard subscription"`
|
SubscriptionType string `json:"subscription_type" binding:"omitempty,oneof=standard subscription"`
|
||||||
@@ -49,7 +49,7 @@ type CreateGroupRequest struct {
|
|||||||
type UpdateGroupRequest struct {
|
type UpdateGroupRequest struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
Platform string `json:"platform" binding:"omitempty,oneof=anthropic openai gemini antigravity"`
|
Platform string `json:"platform" binding:"omitempty,oneof=anthropic openai gemini antigravity sora"`
|
||||||
RateMultiplier *float64 `json:"rate_multiplier"`
|
RateMultiplier *float64 `json:"rate_multiplier"`
|
||||||
IsExclusive *bool `json:"is_exclusive"`
|
IsExclusive *bool `json:"is_exclusive"`
|
||||||
Status string `json:"status" binding:"omitempty,oneof=active inactive"`
|
Status string `json:"status" binding:"omitempty,oneof=active inactive"`
|
||||||
|
|||||||
@@ -79,6 +79,23 @@ func (h *SettingHandler) GetSettings(c *gin.Context) {
|
|||||||
FallbackModelAntigravity: settings.FallbackModelAntigravity,
|
FallbackModelAntigravity: settings.FallbackModelAntigravity,
|
||||||
EnableIdentityPatch: settings.EnableIdentityPatch,
|
EnableIdentityPatch: settings.EnableIdentityPatch,
|
||||||
IdentityPatchPrompt: settings.IdentityPatchPrompt,
|
IdentityPatchPrompt: settings.IdentityPatchPrompt,
|
||||||
|
SoraBaseURL: settings.SoraBaseURL,
|
||||||
|
SoraTimeout: settings.SoraTimeout,
|
||||||
|
SoraMaxRetries: settings.SoraMaxRetries,
|
||||||
|
SoraPollInterval: settings.SoraPollInterval,
|
||||||
|
SoraCallLogicMode: settings.SoraCallLogicMode,
|
||||||
|
SoraCacheEnabled: settings.SoraCacheEnabled,
|
||||||
|
SoraCacheBaseDir: settings.SoraCacheBaseDir,
|
||||||
|
SoraCacheVideoDir: settings.SoraCacheVideoDir,
|
||||||
|
SoraCacheMaxBytes: settings.SoraCacheMaxBytes,
|
||||||
|
SoraCacheAllowedHosts: settings.SoraCacheAllowedHosts,
|
||||||
|
SoraCacheUserDirEnabled: settings.SoraCacheUserDirEnabled,
|
||||||
|
SoraWatermarkFreeEnabled: settings.SoraWatermarkFreeEnabled,
|
||||||
|
SoraWatermarkFreeParseMethod: settings.SoraWatermarkFreeParseMethod,
|
||||||
|
SoraWatermarkFreeCustomParseURL: settings.SoraWatermarkFreeCustomParseURL,
|
||||||
|
SoraWatermarkFreeCustomParseToken: settings.SoraWatermarkFreeCustomParseToken,
|
||||||
|
SoraWatermarkFreeFallbackOnFailure: settings.SoraWatermarkFreeFallbackOnFailure,
|
||||||
|
SoraTokenRefreshEnabled: settings.SoraTokenRefreshEnabled,
|
||||||
OpsMonitoringEnabled: opsEnabled && settings.OpsMonitoringEnabled,
|
OpsMonitoringEnabled: opsEnabled && settings.OpsMonitoringEnabled,
|
||||||
OpsRealtimeMonitoringEnabled: settings.OpsRealtimeMonitoringEnabled,
|
OpsRealtimeMonitoringEnabled: settings.OpsRealtimeMonitoringEnabled,
|
||||||
OpsQueryModeDefault: settings.OpsQueryModeDefault,
|
OpsQueryModeDefault: settings.OpsQueryModeDefault,
|
||||||
@@ -138,6 +155,25 @@ type UpdateSettingsRequest struct {
|
|||||||
EnableIdentityPatch bool `json:"enable_identity_patch"`
|
EnableIdentityPatch bool `json:"enable_identity_patch"`
|
||||||
IdentityPatchPrompt string `json:"identity_patch_prompt"`
|
IdentityPatchPrompt string `json:"identity_patch_prompt"`
|
||||||
|
|
||||||
|
// Sora configuration
|
||||||
|
SoraBaseURL string `json:"sora_base_url"`
|
||||||
|
SoraTimeout int `json:"sora_timeout"`
|
||||||
|
SoraMaxRetries int `json:"sora_max_retries"`
|
||||||
|
SoraPollInterval float64 `json:"sora_poll_interval"`
|
||||||
|
SoraCallLogicMode string `json:"sora_call_logic_mode"`
|
||||||
|
SoraCacheEnabled bool `json:"sora_cache_enabled"`
|
||||||
|
SoraCacheBaseDir string `json:"sora_cache_base_dir"`
|
||||||
|
SoraCacheVideoDir string `json:"sora_cache_video_dir"`
|
||||||
|
SoraCacheMaxBytes int64 `json:"sora_cache_max_bytes"`
|
||||||
|
SoraCacheAllowedHosts []string `json:"sora_cache_allowed_hosts"`
|
||||||
|
SoraCacheUserDirEnabled bool `json:"sora_cache_user_dir_enabled"`
|
||||||
|
SoraWatermarkFreeEnabled bool `json:"sora_watermark_free_enabled"`
|
||||||
|
SoraWatermarkFreeParseMethod string `json:"sora_watermark_free_parse_method"`
|
||||||
|
SoraWatermarkFreeCustomParseURL string `json:"sora_watermark_free_custom_parse_url"`
|
||||||
|
SoraWatermarkFreeCustomParseToken string `json:"sora_watermark_free_custom_parse_token"`
|
||||||
|
SoraWatermarkFreeFallbackOnFailure bool `json:"sora_watermark_free_fallback_on_failure"`
|
||||||
|
SoraTokenRefreshEnabled bool `json:"sora_token_refresh_enabled"`
|
||||||
|
|
||||||
// Ops monitoring (vNext)
|
// Ops monitoring (vNext)
|
||||||
OpsMonitoringEnabled *bool `json:"ops_monitoring_enabled"`
|
OpsMonitoringEnabled *bool `json:"ops_monitoring_enabled"`
|
||||||
OpsRealtimeMonitoringEnabled *bool `json:"ops_realtime_monitoring_enabled"`
|
OpsRealtimeMonitoringEnabled *bool `json:"ops_realtime_monitoring_enabled"`
|
||||||
@@ -227,6 +263,32 @@ func (h *SettingHandler) UpdateSettings(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sora 参数校验与清理
|
||||||
|
req.SoraBaseURL = strings.TrimSpace(req.SoraBaseURL)
|
||||||
|
if req.SoraBaseURL == "" {
|
||||||
|
req.SoraBaseURL = previousSettings.SoraBaseURL
|
||||||
|
}
|
||||||
|
if req.SoraBaseURL != "" {
|
||||||
|
if err := config.ValidateAbsoluteHTTPURL(req.SoraBaseURL); err != nil {
|
||||||
|
response.BadRequest(c, "Sora Base URL must be an absolute http(s) URL")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if req.SoraTimeout <= 0 {
|
||||||
|
req.SoraTimeout = previousSettings.SoraTimeout
|
||||||
|
}
|
||||||
|
if req.SoraMaxRetries < 0 {
|
||||||
|
req.SoraMaxRetries = previousSettings.SoraMaxRetries
|
||||||
|
}
|
||||||
|
if req.SoraPollInterval <= 0 {
|
||||||
|
req.SoraPollInterval = previousSettings.SoraPollInterval
|
||||||
|
}
|
||||||
|
if req.SoraCacheMaxBytes < 0 {
|
||||||
|
req.SoraCacheMaxBytes = 0
|
||||||
|
}
|
||||||
|
req.SoraCacheAllowedHosts = normalizeStringList(req.SoraCacheAllowedHosts)
|
||||||
|
req.SoraWatermarkFreeCustomParseURL = strings.TrimSpace(req.SoraWatermarkFreeCustomParseURL)
|
||||||
|
|
||||||
// Ops metrics collector interval validation (seconds).
|
// Ops metrics collector interval validation (seconds).
|
||||||
if req.OpsMetricsIntervalSeconds != nil {
|
if req.OpsMetricsIntervalSeconds != nil {
|
||||||
v := *req.OpsMetricsIntervalSeconds
|
v := *req.OpsMetricsIntervalSeconds
|
||||||
@@ -240,40 +302,57 @@ func (h *SettingHandler) UpdateSettings(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
settings := &service.SystemSettings{
|
settings := &service.SystemSettings{
|
||||||
RegistrationEnabled: req.RegistrationEnabled,
|
RegistrationEnabled: req.RegistrationEnabled,
|
||||||
EmailVerifyEnabled: req.EmailVerifyEnabled,
|
EmailVerifyEnabled: req.EmailVerifyEnabled,
|
||||||
PromoCodeEnabled: req.PromoCodeEnabled,
|
PromoCodeEnabled: req.PromoCodeEnabled,
|
||||||
SMTPHost: req.SMTPHost,
|
SMTPHost: req.SMTPHost,
|
||||||
SMTPPort: req.SMTPPort,
|
SMTPPort: req.SMTPPort,
|
||||||
SMTPUsername: req.SMTPUsername,
|
SMTPUsername: req.SMTPUsername,
|
||||||
SMTPPassword: req.SMTPPassword,
|
SMTPPassword: req.SMTPPassword,
|
||||||
SMTPFrom: req.SMTPFrom,
|
SMTPFrom: req.SMTPFrom,
|
||||||
SMTPFromName: req.SMTPFromName,
|
SMTPFromName: req.SMTPFromName,
|
||||||
SMTPUseTLS: req.SMTPUseTLS,
|
SMTPUseTLS: req.SMTPUseTLS,
|
||||||
TurnstileEnabled: req.TurnstileEnabled,
|
TurnstileEnabled: req.TurnstileEnabled,
|
||||||
TurnstileSiteKey: req.TurnstileSiteKey,
|
TurnstileSiteKey: req.TurnstileSiteKey,
|
||||||
TurnstileSecretKey: req.TurnstileSecretKey,
|
TurnstileSecretKey: req.TurnstileSecretKey,
|
||||||
LinuxDoConnectEnabled: req.LinuxDoConnectEnabled,
|
LinuxDoConnectEnabled: req.LinuxDoConnectEnabled,
|
||||||
LinuxDoConnectClientID: req.LinuxDoConnectClientID,
|
LinuxDoConnectClientID: req.LinuxDoConnectClientID,
|
||||||
LinuxDoConnectClientSecret: req.LinuxDoConnectClientSecret,
|
LinuxDoConnectClientSecret: req.LinuxDoConnectClientSecret,
|
||||||
LinuxDoConnectRedirectURL: req.LinuxDoConnectRedirectURL,
|
LinuxDoConnectRedirectURL: req.LinuxDoConnectRedirectURL,
|
||||||
SiteName: req.SiteName,
|
SiteName: req.SiteName,
|
||||||
SiteLogo: req.SiteLogo,
|
SiteLogo: req.SiteLogo,
|
||||||
SiteSubtitle: req.SiteSubtitle,
|
SiteSubtitle: req.SiteSubtitle,
|
||||||
APIBaseURL: req.APIBaseURL,
|
APIBaseURL: req.APIBaseURL,
|
||||||
ContactInfo: req.ContactInfo,
|
ContactInfo: req.ContactInfo,
|
||||||
DocURL: req.DocURL,
|
DocURL: req.DocURL,
|
||||||
HomeContent: req.HomeContent,
|
HomeContent: req.HomeContent,
|
||||||
HideCcsImportButton: req.HideCcsImportButton,
|
HideCcsImportButton: req.HideCcsImportButton,
|
||||||
DefaultConcurrency: req.DefaultConcurrency,
|
DefaultConcurrency: req.DefaultConcurrency,
|
||||||
DefaultBalance: req.DefaultBalance,
|
DefaultBalance: req.DefaultBalance,
|
||||||
EnableModelFallback: req.EnableModelFallback,
|
EnableModelFallback: req.EnableModelFallback,
|
||||||
FallbackModelAnthropic: req.FallbackModelAnthropic,
|
FallbackModelAnthropic: req.FallbackModelAnthropic,
|
||||||
FallbackModelOpenAI: req.FallbackModelOpenAI,
|
FallbackModelOpenAI: req.FallbackModelOpenAI,
|
||||||
FallbackModelGemini: req.FallbackModelGemini,
|
FallbackModelGemini: req.FallbackModelGemini,
|
||||||
FallbackModelAntigravity: req.FallbackModelAntigravity,
|
FallbackModelAntigravity: req.FallbackModelAntigravity,
|
||||||
EnableIdentityPatch: req.EnableIdentityPatch,
|
EnableIdentityPatch: req.EnableIdentityPatch,
|
||||||
IdentityPatchPrompt: req.IdentityPatchPrompt,
|
IdentityPatchPrompt: req.IdentityPatchPrompt,
|
||||||
|
SoraBaseURL: req.SoraBaseURL,
|
||||||
|
SoraTimeout: req.SoraTimeout,
|
||||||
|
SoraMaxRetries: req.SoraMaxRetries,
|
||||||
|
SoraPollInterval: req.SoraPollInterval,
|
||||||
|
SoraCallLogicMode: req.SoraCallLogicMode,
|
||||||
|
SoraCacheEnabled: req.SoraCacheEnabled,
|
||||||
|
SoraCacheBaseDir: req.SoraCacheBaseDir,
|
||||||
|
SoraCacheVideoDir: req.SoraCacheVideoDir,
|
||||||
|
SoraCacheMaxBytes: req.SoraCacheMaxBytes,
|
||||||
|
SoraCacheAllowedHosts: req.SoraCacheAllowedHosts,
|
||||||
|
SoraCacheUserDirEnabled: req.SoraCacheUserDirEnabled,
|
||||||
|
SoraWatermarkFreeEnabled: req.SoraWatermarkFreeEnabled,
|
||||||
|
SoraWatermarkFreeParseMethod: req.SoraWatermarkFreeParseMethod,
|
||||||
|
SoraWatermarkFreeCustomParseURL: req.SoraWatermarkFreeCustomParseURL,
|
||||||
|
SoraWatermarkFreeCustomParseToken: req.SoraWatermarkFreeCustomParseToken,
|
||||||
|
SoraWatermarkFreeFallbackOnFailure: req.SoraWatermarkFreeFallbackOnFailure,
|
||||||
|
SoraTokenRefreshEnabled: req.SoraTokenRefreshEnabled,
|
||||||
OpsMonitoringEnabled: func() bool {
|
OpsMonitoringEnabled: func() bool {
|
||||||
if req.OpsMonitoringEnabled != nil {
|
if req.OpsMonitoringEnabled != nil {
|
||||||
return *req.OpsMonitoringEnabled
|
return *req.OpsMonitoringEnabled
|
||||||
@@ -349,6 +428,23 @@ func (h *SettingHandler) UpdateSettings(c *gin.Context) {
|
|||||||
FallbackModelAntigravity: updatedSettings.FallbackModelAntigravity,
|
FallbackModelAntigravity: updatedSettings.FallbackModelAntigravity,
|
||||||
EnableIdentityPatch: updatedSettings.EnableIdentityPatch,
|
EnableIdentityPatch: updatedSettings.EnableIdentityPatch,
|
||||||
IdentityPatchPrompt: updatedSettings.IdentityPatchPrompt,
|
IdentityPatchPrompt: updatedSettings.IdentityPatchPrompt,
|
||||||
|
SoraBaseURL: updatedSettings.SoraBaseURL,
|
||||||
|
SoraTimeout: updatedSettings.SoraTimeout,
|
||||||
|
SoraMaxRetries: updatedSettings.SoraMaxRetries,
|
||||||
|
SoraPollInterval: updatedSettings.SoraPollInterval,
|
||||||
|
SoraCallLogicMode: updatedSettings.SoraCallLogicMode,
|
||||||
|
SoraCacheEnabled: updatedSettings.SoraCacheEnabled,
|
||||||
|
SoraCacheBaseDir: updatedSettings.SoraCacheBaseDir,
|
||||||
|
SoraCacheVideoDir: updatedSettings.SoraCacheVideoDir,
|
||||||
|
SoraCacheMaxBytes: updatedSettings.SoraCacheMaxBytes,
|
||||||
|
SoraCacheAllowedHosts: updatedSettings.SoraCacheAllowedHosts,
|
||||||
|
SoraCacheUserDirEnabled: updatedSettings.SoraCacheUserDirEnabled,
|
||||||
|
SoraWatermarkFreeEnabled: updatedSettings.SoraWatermarkFreeEnabled,
|
||||||
|
SoraWatermarkFreeParseMethod: updatedSettings.SoraWatermarkFreeParseMethod,
|
||||||
|
SoraWatermarkFreeCustomParseURL: updatedSettings.SoraWatermarkFreeCustomParseURL,
|
||||||
|
SoraWatermarkFreeCustomParseToken: updatedSettings.SoraWatermarkFreeCustomParseToken,
|
||||||
|
SoraWatermarkFreeFallbackOnFailure: updatedSettings.SoraWatermarkFreeFallbackOnFailure,
|
||||||
|
SoraTokenRefreshEnabled: updatedSettings.SoraTokenRefreshEnabled,
|
||||||
OpsMonitoringEnabled: updatedSettings.OpsMonitoringEnabled,
|
OpsMonitoringEnabled: updatedSettings.OpsMonitoringEnabled,
|
||||||
OpsRealtimeMonitoringEnabled: updatedSettings.OpsRealtimeMonitoringEnabled,
|
OpsRealtimeMonitoringEnabled: updatedSettings.OpsRealtimeMonitoringEnabled,
|
||||||
OpsQueryModeDefault: updatedSettings.OpsQueryModeDefault,
|
OpsQueryModeDefault: updatedSettings.OpsQueryModeDefault,
|
||||||
@@ -477,6 +573,57 @@ func diffSettings(before *service.SystemSettings, after *service.SystemSettings,
|
|||||||
if before.IdentityPatchPrompt != after.IdentityPatchPrompt {
|
if before.IdentityPatchPrompt != after.IdentityPatchPrompt {
|
||||||
changed = append(changed, "identity_patch_prompt")
|
changed = append(changed, "identity_patch_prompt")
|
||||||
}
|
}
|
||||||
|
if before.SoraBaseURL != after.SoraBaseURL {
|
||||||
|
changed = append(changed, "sora_base_url")
|
||||||
|
}
|
||||||
|
if before.SoraTimeout != after.SoraTimeout {
|
||||||
|
changed = append(changed, "sora_timeout")
|
||||||
|
}
|
||||||
|
if before.SoraMaxRetries != after.SoraMaxRetries {
|
||||||
|
changed = append(changed, "sora_max_retries")
|
||||||
|
}
|
||||||
|
if before.SoraPollInterval != after.SoraPollInterval {
|
||||||
|
changed = append(changed, "sora_poll_interval")
|
||||||
|
}
|
||||||
|
if before.SoraCallLogicMode != after.SoraCallLogicMode {
|
||||||
|
changed = append(changed, "sora_call_logic_mode")
|
||||||
|
}
|
||||||
|
if before.SoraCacheEnabled != after.SoraCacheEnabled {
|
||||||
|
changed = append(changed, "sora_cache_enabled")
|
||||||
|
}
|
||||||
|
if before.SoraCacheBaseDir != after.SoraCacheBaseDir {
|
||||||
|
changed = append(changed, "sora_cache_base_dir")
|
||||||
|
}
|
||||||
|
if before.SoraCacheVideoDir != after.SoraCacheVideoDir {
|
||||||
|
changed = append(changed, "sora_cache_video_dir")
|
||||||
|
}
|
||||||
|
if before.SoraCacheMaxBytes != after.SoraCacheMaxBytes {
|
||||||
|
changed = append(changed, "sora_cache_max_bytes")
|
||||||
|
}
|
||||||
|
if strings.Join(before.SoraCacheAllowedHosts, ",") != strings.Join(after.SoraCacheAllowedHosts, ",") {
|
||||||
|
changed = append(changed, "sora_cache_allowed_hosts")
|
||||||
|
}
|
||||||
|
if before.SoraCacheUserDirEnabled != after.SoraCacheUserDirEnabled {
|
||||||
|
changed = append(changed, "sora_cache_user_dir_enabled")
|
||||||
|
}
|
||||||
|
if before.SoraWatermarkFreeEnabled != after.SoraWatermarkFreeEnabled {
|
||||||
|
changed = append(changed, "sora_watermark_free_enabled")
|
||||||
|
}
|
||||||
|
if before.SoraWatermarkFreeParseMethod != after.SoraWatermarkFreeParseMethod {
|
||||||
|
changed = append(changed, "sora_watermark_free_parse_method")
|
||||||
|
}
|
||||||
|
if before.SoraWatermarkFreeCustomParseURL != after.SoraWatermarkFreeCustomParseURL {
|
||||||
|
changed = append(changed, "sora_watermark_free_custom_parse_url")
|
||||||
|
}
|
||||||
|
if before.SoraWatermarkFreeCustomParseToken != after.SoraWatermarkFreeCustomParseToken {
|
||||||
|
changed = append(changed, "sora_watermark_free_custom_parse_token")
|
||||||
|
}
|
||||||
|
if before.SoraWatermarkFreeFallbackOnFailure != after.SoraWatermarkFreeFallbackOnFailure {
|
||||||
|
changed = append(changed, "sora_watermark_free_fallback_on_failure")
|
||||||
|
}
|
||||||
|
if before.SoraTokenRefreshEnabled != after.SoraTokenRefreshEnabled {
|
||||||
|
changed = append(changed, "sora_token_refresh_enabled")
|
||||||
|
}
|
||||||
if before.OpsMonitoringEnabled != after.OpsMonitoringEnabled {
|
if before.OpsMonitoringEnabled != after.OpsMonitoringEnabled {
|
||||||
changed = append(changed, "ops_monitoring_enabled")
|
changed = append(changed, "ops_monitoring_enabled")
|
||||||
}
|
}
|
||||||
@@ -492,6 +639,19 @@ func diffSettings(before *service.SystemSettings, after *service.SystemSettings,
|
|||||||
return changed
|
return changed
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func normalizeStringList(values []string) []string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
normalized := make([]string, 0, len(values))
|
||||||
|
for _, value := range values {
|
||||||
|
if trimmed := strings.TrimSpace(value); trimmed != "" {
|
||||||
|
normalized = append(normalized, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
// TestSMTPRequest 测试SMTP连接请求
|
// TestSMTPRequest 测试SMTP连接请求
|
||||||
type TestSMTPRequest struct {
|
type TestSMTPRequest struct {
|
||||||
SMTPHost string `json:"smtp_host" binding:"required"`
|
SMTPHost string `json:"smtp_host" binding:"required"`
|
||||||
|
|||||||
355
backend/internal/handler/admin/sora_account_handler.go
Normal file
355
backend/internal/handler/admin/sora_account_handler.go
Normal file
@@ -0,0 +1,355 @@
|
|||||||
|
package admin
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/handler/dto"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/response"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/service"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraAccountHandler Sora 账号扩展管理
|
||||||
|
// 提供 Sora 扩展表的查询与更新能力。
|
||||||
|
type SoraAccountHandler struct {
|
||||||
|
adminService service.AdminService
|
||||||
|
soraAccountRepo service.SoraAccountRepository
|
||||||
|
usageRepo service.SoraUsageStatRepository
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraAccountHandler 创建 SoraAccountHandler
|
||||||
|
func NewSoraAccountHandler(adminService service.AdminService, soraAccountRepo service.SoraAccountRepository, usageRepo service.SoraUsageStatRepository) *SoraAccountHandler {
|
||||||
|
return &SoraAccountHandler{
|
||||||
|
adminService: adminService,
|
||||||
|
soraAccountRepo: soraAccountRepo,
|
||||||
|
usageRepo: usageRepo,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountUpdateRequest 更新/创建 Sora 账号扩展请求
|
||||||
|
// 使用指针类型区分未提供与设置为空值。
|
||||||
|
type SoraAccountUpdateRequest struct {
|
||||||
|
AccessToken *string `json:"access_token"`
|
||||||
|
SessionToken *string `json:"session_token"`
|
||||||
|
RefreshToken *string `json:"refresh_token"`
|
||||||
|
ClientID *string `json:"client_id"`
|
||||||
|
Email *string `json:"email"`
|
||||||
|
Username *string `json:"username"`
|
||||||
|
Remark *string `json:"remark"`
|
||||||
|
UseCount *int `json:"use_count"`
|
||||||
|
PlanType *string `json:"plan_type"`
|
||||||
|
PlanTitle *string `json:"plan_title"`
|
||||||
|
SubscriptionEnd *int64 `json:"subscription_end"`
|
||||||
|
SoraSupported *bool `json:"sora_supported"`
|
||||||
|
SoraInviteCode *string `json:"sora_invite_code"`
|
||||||
|
SoraRedeemedCount *int `json:"sora_redeemed_count"`
|
||||||
|
SoraRemainingCount *int `json:"sora_remaining_count"`
|
||||||
|
SoraTotalCount *int `json:"sora_total_count"`
|
||||||
|
SoraCooldownUntil *int64 `json:"sora_cooldown_until"`
|
||||||
|
CooledUntil *int64 `json:"cooled_until"`
|
||||||
|
ImageEnabled *bool `json:"image_enabled"`
|
||||||
|
VideoEnabled *bool `json:"video_enabled"`
|
||||||
|
ImageConcurrency *int `json:"image_concurrency"`
|
||||||
|
VideoConcurrency *int `json:"video_concurrency"`
|
||||||
|
IsExpired *bool `json:"is_expired"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountBatchRequest 批量导入请求
|
||||||
|
// accounts 支持批量 upsert。
|
||||||
|
type SoraAccountBatchRequest struct {
|
||||||
|
Accounts []SoraAccountBatchItem `json:"accounts"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountBatchItem 批量导入条目
|
||||||
|
type SoraAccountBatchItem struct {
|
||||||
|
AccountID int64 `json:"account_id"`
|
||||||
|
SoraAccountUpdateRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountBatchResult 批量导入结果
|
||||||
|
// 仅返回成功/失败数量与明细。
|
||||||
|
type SoraAccountBatchResult struct {
|
||||||
|
Success int `json:"success"`
|
||||||
|
Failed int `json:"failed"`
|
||||||
|
Results []SoraAccountBatchItemResult `json:"results"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountBatchItemResult 批量导入单条结果
|
||||||
|
type SoraAccountBatchItemResult struct {
|
||||||
|
AccountID int64 `json:"account_id"`
|
||||||
|
Success bool `json:"success"`
|
||||||
|
Error string `json:"error,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// List 获取 Sora 账号扩展列表
|
||||||
|
// GET /api/v1/admin/sora/accounts
|
||||||
|
func (h *SoraAccountHandler) List(c *gin.Context) {
|
||||||
|
page, pageSize := response.ParsePagination(c)
|
||||||
|
search := strings.TrimSpace(c.Query("search"))
|
||||||
|
|
||||||
|
accounts, total, err := h.adminService.ListAccounts(c.Request.Context(), page, pageSize, service.PlatformSora, "", "", search)
|
||||||
|
if err != nil {
|
||||||
|
response.ErrorFrom(c, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
accountIDs := make([]int64, 0, len(accounts))
|
||||||
|
for i := range accounts {
|
||||||
|
accountIDs = append(accountIDs, accounts[i].ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
soraMap := map[int64]*service.SoraAccount{}
|
||||||
|
if h.soraAccountRepo != nil {
|
||||||
|
soraMap, _ = h.soraAccountRepo.GetByAccountIDs(c.Request.Context(), accountIDs)
|
||||||
|
}
|
||||||
|
|
||||||
|
usageMap := map[int64]*service.SoraUsageStat{}
|
||||||
|
if h.usageRepo != nil {
|
||||||
|
usageMap, _ = h.usageRepo.GetByAccountIDs(c.Request.Context(), accountIDs)
|
||||||
|
}
|
||||||
|
|
||||||
|
result := make([]dto.SoraAccount, 0, len(accounts))
|
||||||
|
for i := range accounts {
|
||||||
|
acc := accounts[i]
|
||||||
|
item := dto.SoraAccountFromService(&acc, soraMap[acc.ID], usageMap[acc.ID])
|
||||||
|
if item != nil {
|
||||||
|
result = append(result, *item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Paginated(c, result, total, page, pageSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get 获取单个 Sora 账号扩展
|
||||||
|
// GET /api/v1/admin/sora/accounts/:id
|
||||||
|
func (h *SoraAccountHandler) Get(c *gin.Context) {
|
||||||
|
accountID, err := strconv.ParseInt(c.Param("id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, "账号 ID 无效")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
account, err := h.adminService.GetAccount(c.Request.Context(), accountID)
|
||||||
|
if err != nil {
|
||||||
|
response.ErrorFrom(c, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if account.Platform != service.PlatformSora {
|
||||||
|
response.BadRequest(c, "账号不是 Sora 平台")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var soraAcc *service.SoraAccount
|
||||||
|
if h.soraAccountRepo != nil {
|
||||||
|
soraAcc, _ = h.soraAccountRepo.GetByAccountID(c.Request.Context(), accountID)
|
||||||
|
}
|
||||||
|
var usage *service.SoraUsageStat
|
||||||
|
if h.usageRepo != nil {
|
||||||
|
usage, _ = h.usageRepo.GetByAccountID(c.Request.Context(), accountID)
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, dto.SoraAccountFromService(account, soraAcc, usage))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upsert 更新或创建 Sora 账号扩展
|
||||||
|
// PUT /api/v1/admin/sora/accounts/:id
|
||||||
|
func (h *SoraAccountHandler) Upsert(c *gin.Context) {
|
||||||
|
accountID, err := strconv.ParseInt(c.Param("id"), 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
response.BadRequest(c, "账号 ID 无效")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var req SoraAccountUpdateRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "请求参数无效: "+err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
account, err := h.adminService.GetAccount(c.Request.Context(), accountID)
|
||||||
|
if err != nil {
|
||||||
|
response.ErrorFrom(c, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if account.Platform != service.PlatformSora {
|
||||||
|
response.BadRequest(c, "账号不是 Sora 平台")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
updates := buildSoraAccountUpdates(&req)
|
||||||
|
if h.soraAccountRepo != nil && len(updates) > 0 {
|
||||||
|
if err := h.soraAccountRepo.Upsert(c.Request.Context(), accountID, updates); err != nil {
|
||||||
|
response.ErrorFrom(c, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var soraAcc *service.SoraAccount
|
||||||
|
if h.soraAccountRepo != nil {
|
||||||
|
soraAcc, _ = h.soraAccountRepo.GetByAccountID(c.Request.Context(), accountID)
|
||||||
|
}
|
||||||
|
var usage *service.SoraUsageStat
|
||||||
|
if h.usageRepo != nil {
|
||||||
|
usage, _ = h.usageRepo.GetByAccountID(c.Request.Context(), accountID)
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, dto.SoraAccountFromService(account, soraAcc, usage))
|
||||||
|
}
|
||||||
|
|
||||||
|
// BatchUpsert 批量导入 Sora 账号扩展
|
||||||
|
// POST /api/v1/admin/sora/accounts/import
|
||||||
|
func (h *SoraAccountHandler) BatchUpsert(c *gin.Context) {
|
||||||
|
var req SoraAccountBatchRequest
|
||||||
|
if err := c.ShouldBindJSON(&req); err != nil {
|
||||||
|
response.BadRequest(c, "请求参数无效: "+err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(req.Accounts) == 0 {
|
||||||
|
response.BadRequest(c, "accounts 不能为空")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ids := make([]int64, 0, len(req.Accounts))
|
||||||
|
for _, item := range req.Accounts {
|
||||||
|
if item.AccountID > 0 {
|
||||||
|
ids = append(ids, item.AccountID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
accountMap := make(map[int64]*service.Account, len(ids))
|
||||||
|
if len(ids) > 0 {
|
||||||
|
accounts, _ := h.adminService.GetAccountsByIDs(c.Request.Context(), ids)
|
||||||
|
for _, acc := range accounts {
|
||||||
|
if acc != nil {
|
||||||
|
accountMap[acc.ID] = acc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result := SoraAccountBatchResult{
|
||||||
|
Results: make([]SoraAccountBatchItemResult, 0, len(req.Accounts)),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, item := range req.Accounts {
|
||||||
|
entry := SoraAccountBatchItemResult{AccountID: item.AccountID}
|
||||||
|
acc := accountMap[item.AccountID]
|
||||||
|
if acc == nil {
|
||||||
|
entry.Error = "账号不存在"
|
||||||
|
result.Results = append(result.Results, entry)
|
||||||
|
result.Failed++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if acc.Platform != service.PlatformSora {
|
||||||
|
entry.Error = "账号不是 Sora 平台"
|
||||||
|
result.Results = append(result.Results, entry)
|
||||||
|
result.Failed++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
updates := buildSoraAccountUpdates(&item.SoraAccountUpdateRequest)
|
||||||
|
if h.soraAccountRepo != nil && len(updates) > 0 {
|
||||||
|
if err := h.soraAccountRepo.Upsert(c.Request.Context(), item.AccountID, updates); err != nil {
|
||||||
|
entry.Error = err.Error()
|
||||||
|
result.Results = append(result.Results, entry)
|
||||||
|
result.Failed++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
entry.Success = true
|
||||||
|
result.Results = append(result.Results, entry)
|
||||||
|
result.Success++
|
||||||
|
}
|
||||||
|
|
||||||
|
response.Success(c, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListUsage 获取 Sora 调用统计
|
||||||
|
// GET /api/v1/admin/sora/usage
|
||||||
|
func (h *SoraAccountHandler) ListUsage(c *gin.Context) {
|
||||||
|
page, pageSize := response.ParsePagination(c)
|
||||||
|
params := pagination.PaginationParams{Page: page, PageSize: pageSize}
|
||||||
|
if h.usageRepo == nil {
|
||||||
|
response.Paginated(c, []dto.SoraUsageStat{}, 0, page, pageSize)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
stats, paginationResult, err := h.usageRepo.List(c.Request.Context(), params)
|
||||||
|
if err != nil {
|
||||||
|
response.ErrorFrom(c, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
result := make([]dto.SoraUsageStat, 0, len(stats))
|
||||||
|
for _, stat := range stats {
|
||||||
|
item := dto.SoraUsageStatFromService(stat)
|
||||||
|
if item != nil {
|
||||||
|
result = append(result, *item)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
response.Paginated(c, result, paginationResult.Total, paginationResult.Page, paginationResult.PageSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildSoraAccountUpdates(req *SoraAccountUpdateRequest) map[string]any {
|
||||||
|
if req == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
updates := make(map[string]any)
|
||||||
|
setString := func(key string, value *string) {
|
||||||
|
if value == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
updates[key] = strings.TrimSpace(*value)
|
||||||
|
}
|
||||||
|
setString("access_token", req.AccessToken)
|
||||||
|
setString("session_token", req.SessionToken)
|
||||||
|
setString("refresh_token", req.RefreshToken)
|
||||||
|
setString("client_id", req.ClientID)
|
||||||
|
setString("email", req.Email)
|
||||||
|
setString("username", req.Username)
|
||||||
|
setString("remark", req.Remark)
|
||||||
|
setString("plan_type", req.PlanType)
|
||||||
|
setString("plan_title", req.PlanTitle)
|
||||||
|
setString("sora_invite_code", req.SoraInviteCode)
|
||||||
|
|
||||||
|
if req.UseCount != nil {
|
||||||
|
updates["use_count"] = *req.UseCount
|
||||||
|
}
|
||||||
|
if req.SoraSupported != nil {
|
||||||
|
updates["sora_supported"] = *req.SoraSupported
|
||||||
|
}
|
||||||
|
if req.SoraRedeemedCount != nil {
|
||||||
|
updates["sora_redeemed_count"] = *req.SoraRedeemedCount
|
||||||
|
}
|
||||||
|
if req.SoraRemainingCount != nil {
|
||||||
|
updates["sora_remaining_count"] = *req.SoraRemainingCount
|
||||||
|
}
|
||||||
|
if req.SoraTotalCount != nil {
|
||||||
|
updates["sora_total_count"] = *req.SoraTotalCount
|
||||||
|
}
|
||||||
|
if req.ImageEnabled != nil {
|
||||||
|
updates["image_enabled"] = *req.ImageEnabled
|
||||||
|
}
|
||||||
|
if req.VideoEnabled != nil {
|
||||||
|
updates["video_enabled"] = *req.VideoEnabled
|
||||||
|
}
|
||||||
|
if req.ImageConcurrency != nil {
|
||||||
|
updates["image_concurrency"] = *req.ImageConcurrency
|
||||||
|
}
|
||||||
|
if req.VideoConcurrency != nil {
|
||||||
|
updates["video_concurrency"] = *req.VideoConcurrency
|
||||||
|
}
|
||||||
|
if req.IsExpired != nil {
|
||||||
|
updates["is_expired"] = *req.IsExpired
|
||||||
|
}
|
||||||
|
if req.SubscriptionEnd != nil && *req.SubscriptionEnd > 0 {
|
||||||
|
updates["subscription_end"] = time.Unix(*req.SubscriptionEnd, 0).UTC()
|
||||||
|
}
|
||||||
|
if req.SoraCooldownUntil != nil && *req.SoraCooldownUntil > 0 {
|
||||||
|
updates["sora_cooldown_until"] = time.Unix(*req.SoraCooldownUntil, 0).UTC()
|
||||||
|
}
|
||||||
|
if req.CooledUntil != nil && *req.CooledUntil > 0 {
|
||||||
|
updates["cooled_until"] = time.Unix(*req.CooledUntil, 0).UTC()
|
||||||
|
}
|
||||||
|
return updates
|
||||||
|
}
|
||||||
@@ -287,6 +287,72 @@ func ProxyWithAccountCountFromService(p *service.ProxyWithAccountCount) *ProxyWi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func SoraUsageStatFromService(stat *service.SoraUsageStat) *SoraUsageStat {
|
||||||
|
if stat == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &SoraUsageStat{
|
||||||
|
AccountID: stat.AccountID,
|
||||||
|
ImageCount: stat.ImageCount,
|
||||||
|
VideoCount: stat.VideoCount,
|
||||||
|
ErrorCount: stat.ErrorCount,
|
||||||
|
LastErrorAt: timeToUnixSeconds(stat.LastErrorAt),
|
||||||
|
TodayImageCount: stat.TodayImageCount,
|
||||||
|
TodayVideoCount: stat.TodayVideoCount,
|
||||||
|
TodayErrorCount: stat.TodayErrorCount,
|
||||||
|
TodayDate: timeToUnixSeconds(stat.TodayDate),
|
||||||
|
ConsecutiveErrorCount: stat.ConsecutiveErrorCount,
|
||||||
|
CreatedAt: stat.CreatedAt,
|
||||||
|
UpdatedAt: stat.UpdatedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SoraAccountFromService(account *service.Account, soraAcc *service.SoraAccount, usage *service.SoraUsageStat) *SoraAccount {
|
||||||
|
if account == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
out := &SoraAccount{
|
||||||
|
AccountID: account.ID,
|
||||||
|
AccountName: account.Name,
|
||||||
|
AccountStatus: account.Status,
|
||||||
|
AccountType: account.Type,
|
||||||
|
AccountConcurrency: account.Concurrency,
|
||||||
|
ProxyID: account.ProxyID,
|
||||||
|
Usage: SoraUsageStatFromService(usage),
|
||||||
|
CreatedAt: account.CreatedAt,
|
||||||
|
UpdatedAt: account.UpdatedAt,
|
||||||
|
}
|
||||||
|
if soraAcc == nil {
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
out.AccessToken = soraAcc.AccessToken
|
||||||
|
out.SessionToken = soraAcc.SessionToken
|
||||||
|
out.RefreshToken = soraAcc.RefreshToken
|
||||||
|
out.ClientID = soraAcc.ClientID
|
||||||
|
out.Email = soraAcc.Email
|
||||||
|
out.Username = soraAcc.Username
|
||||||
|
out.Remark = soraAcc.Remark
|
||||||
|
out.UseCount = soraAcc.UseCount
|
||||||
|
out.PlanType = soraAcc.PlanType
|
||||||
|
out.PlanTitle = soraAcc.PlanTitle
|
||||||
|
out.SubscriptionEnd = timeToUnixSeconds(soraAcc.SubscriptionEnd)
|
||||||
|
out.SoraSupported = soraAcc.SoraSupported
|
||||||
|
out.SoraInviteCode = soraAcc.SoraInviteCode
|
||||||
|
out.SoraRedeemedCount = soraAcc.SoraRedeemedCount
|
||||||
|
out.SoraRemainingCount = soraAcc.SoraRemainingCount
|
||||||
|
out.SoraTotalCount = soraAcc.SoraTotalCount
|
||||||
|
out.SoraCooldownUntil = timeToUnixSeconds(soraAcc.SoraCooldownUntil)
|
||||||
|
out.CooledUntil = timeToUnixSeconds(soraAcc.CooledUntil)
|
||||||
|
out.ImageEnabled = soraAcc.ImageEnabled
|
||||||
|
out.VideoEnabled = soraAcc.VideoEnabled
|
||||||
|
out.ImageConcurrency = soraAcc.ImageConcurrency
|
||||||
|
out.VideoConcurrency = soraAcc.VideoConcurrency
|
||||||
|
out.IsExpired = soraAcc.IsExpired
|
||||||
|
out.CreatedAt = soraAcc.CreatedAt
|
||||||
|
out.UpdatedAt = soraAcc.UpdatedAt
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
func ProxyAccountSummaryFromService(a *service.ProxyAccountSummary) *ProxyAccountSummary {
|
func ProxyAccountSummaryFromService(a *service.ProxyAccountSummary) *ProxyAccountSummary {
|
||||||
if a == nil {
|
if a == nil {
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@@ -46,6 +46,25 @@ type SystemSettings struct {
|
|||||||
EnableIdentityPatch bool `json:"enable_identity_patch"`
|
EnableIdentityPatch bool `json:"enable_identity_patch"`
|
||||||
IdentityPatchPrompt string `json:"identity_patch_prompt"`
|
IdentityPatchPrompt string `json:"identity_patch_prompt"`
|
||||||
|
|
||||||
|
// Sora configuration
|
||||||
|
SoraBaseURL string `json:"sora_base_url"`
|
||||||
|
SoraTimeout int `json:"sora_timeout"`
|
||||||
|
SoraMaxRetries int `json:"sora_max_retries"`
|
||||||
|
SoraPollInterval float64 `json:"sora_poll_interval"`
|
||||||
|
SoraCallLogicMode string `json:"sora_call_logic_mode"`
|
||||||
|
SoraCacheEnabled bool `json:"sora_cache_enabled"`
|
||||||
|
SoraCacheBaseDir string `json:"sora_cache_base_dir"`
|
||||||
|
SoraCacheVideoDir string `json:"sora_cache_video_dir"`
|
||||||
|
SoraCacheMaxBytes int64 `json:"sora_cache_max_bytes"`
|
||||||
|
SoraCacheAllowedHosts []string `json:"sora_cache_allowed_hosts"`
|
||||||
|
SoraCacheUserDirEnabled bool `json:"sora_cache_user_dir_enabled"`
|
||||||
|
SoraWatermarkFreeEnabled bool `json:"sora_watermark_free_enabled"`
|
||||||
|
SoraWatermarkFreeParseMethod string `json:"sora_watermark_free_parse_method"`
|
||||||
|
SoraWatermarkFreeCustomParseURL string `json:"sora_watermark_free_custom_parse_url"`
|
||||||
|
SoraWatermarkFreeCustomParseToken string `json:"sora_watermark_free_custom_parse_token"`
|
||||||
|
SoraWatermarkFreeFallbackOnFailure bool `json:"sora_watermark_free_fallback_on_failure"`
|
||||||
|
SoraTokenRefreshEnabled bool `json:"sora_token_refresh_enabled"`
|
||||||
|
|
||||||
// Ops monitoring (vNext)
|
// Ops monitoring (vNext)
|
||||||
OpsMonitoringEnabled bool `json:"ops_monitoring_enabled"`
|
OpsMonitoringEnabled bool `json:"ops_monitoring_enabled"`
|
||||||
OpsRealtimeMonitoringEnabled bool `json:"ops_realtime_monitoring_enabled"`
|
OpsRealtimeMonitoringEnabled bool `json:"ops_realtime_monitoring_enabled"`
|
||||||
|
|||||||
@@ -141,6 +141,56 @@ type Account struct {
|
|||||||
Groups []*Group `json:"groups,omitempty"`
|
Groups []*Group `json:"groups,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SoraUsageStat struct {
|
||||||
|
AccountID int64 `json:"account_id"`
|
||||||
|
ImageCount int `json:"image_count"`
|
||||||
|
VideoCount int `json:"video_count"`
|
||||||
|
ErrorCount int `json:"error_count"`
|
||||||
|
LastErrorAt *int64 `json:"last_error_at"`
|
||||||
|
TodayImageCount int `json:"today_image_count"`
|
||||||
|
TodayVideoCount int `json:"today_video_count"`
|
||||||
|
TodayErrorCount int `json:"today_error_count"`
|
||||||
|
TodayDate *int64 `json:"today_date"`
|
||||||
|
ConsecutiveErrorCount int `json:"consecutive_error_count"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SoraAccount struct {
|
||||||
|
AccountID int64 `json:"account_id"`
|
||||||
|
AccountName string `json:"account_name"`
|
||||||
|
AccountStatus string `json:"account_status"`
|
||||||
|
AccountType string `json:"account_type"`
|
||||||
|
AccountConcurrency int `json:"account_concurrency"`
|
||||||
|
ProxyID *int64 `json:"proxy_id"`
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
SessionToken string `json:"session_token"`
|
||||||
|
RefreshToken string `json:"refresh_token"`
|
||||||
|
ClientID string `json:"client_id"`
|
||||||
|
Email string `json:"email"`
|
||||||
|
Username string `json:"username"`
|
||||||
|
Remark string `json:"remark"`
|
||||||
|
UseCount int `json:"use_count"`
|
||||||
|
PlanType string `json:"plan_type"`
|
||||||
|
PlanTitle string `json:"plan_title"`
|
||||||
|
SubscriptionEnd *int64 `json:"subscription_end"`
|
||||||
|
SoraSupported bool `json:"sora_supported"`
|
||||||
|
SoraInviteCode string `json:"sora_invite_code"`
|
||||||
|
SoraRedeemedCount int `json:"sora_redeemed_count"`
|
||||||
|
SoraRemainingCount int `json:"sora_remaining_count"`
|
||||||
|
SoraTotalCount int `json:"sora_total_count"`
|
||||||
|
SoraCooldownUntil *int64 `json:"sora_cooldown_until"`
|
||||||
|
CooledUntil *int64 `json:"cooled_until"`
|
||||||
|
ImageEnabled bool `json:"image_enabled"`
|
||||||
|
VideoEnabled bool `json:"video_enabled"`
|
||||||
|
ImageConcurrency int `json:"image_concurrency"`
|
||||||
|
VideoConcurrency int `json:"video_concurrency"`
|
||||||
|
IsExpired bool `json:"is_expired"`
|
||||||
|
Usage *SoraUsageStat `json:"usage,omitempty"`
|
||||||
|
CreatedAt time.Time `json:"created_at"`
|
||||||
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
|
}
|
||||||
|
|
||||||
type AccountGroup struct {
|
type AccountGroup struct {
|
||||||
AccountID int64 `json:"account_id"`
|
AccountID int64 `json:"account_id"`
|
||||||
GroupID int64 `json:"group_id"`
|
GroupID int64 `json:"group_id"`
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ import (
|
|||||||
pkgerrors "github.com/Wei-Shaw/sub2api/internal/pkg/errors"
|
pkgerrors "github.com/Wei-Shaw/sub2api/internal/pkg/errors"
|
||||||
"github.com/Wei-Shaw/sub2api/internal/pkg/ip"
|
"github.com/Wei-Shaw/sub2api/internal/pkg/ip"
|
||||||
"github.com/Wei-Shaw/sub2api/internal/pkg/openai"
|
"github.com/Wei-Shaw/sub2api/internal/pkg/openai"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/sora"
|
||||||
middleware2 "github.com/Wei-Shaw/sub2api/internal/server/middleware"
|
middleware2 "github.com/Wei-Shaw/sub2api/internal/server/middleware"
|
||||||
"github.com/Wei-Shaw/sub2api/internal/service"
|
"github.com/Wei-Shaw/sub2api/internal/service"
|
||||||
|
|
||||||
@@ -508,6 +509,13 @@ func (h *GatewayHandler) Models(c *gin.Context) {
|
|||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if platform == service.PlatformSora {
|
||||||
|
c.JSON(http.StatusOK, gin.H{
|
||||||
|
"object": "list",
|
||||||
|
"data": sora.ListModels(),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
c.JSON(http.StatusOK, gin.H{
|
c.JSON(http.StatusOK, gin.H{
|
||||||
"object": "list",
|
"object": "list",
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ type AdminHandlers struct {
|
|||||||
Proxy *admin.ProxyHandler
|
Proxy *admin.ProxyHandler
|
||||||
Redeem *admin.RedeemHandler
|
Redeem *admin.RedeemHandler
|
||||||
Promo *admin.PromoHandler
|
Promo *admin.PromoHandler
|
||||||
|
SoraAccount *admin.SoraAccountHandler
|
||||||
Setting *admin.SettingHandler
|
Setting *admin.SettingHandler
|
||||||
Ops *admin.OpsHandler
|
Ops *admin.OpsHandler
|
||||||
System *admin.SystemHandler
|
System *admin.SystemHandler
|
||||||
@@ -36,6 +37,7 @@ type Handlers struct {
|
|||||||
Admin *AdminHandlers
|
Admin *AdminHandlers
|
||||||
Gateway *GatewayHandler
|
Gateway *GatewayHandler
|
||||||
OpenAIGateway *OpenAIGatewayHandler
|
OpenAIGateway *OpenAIGatewayHandler
|
||||||
|
SoraGateway *SoraGatewayHandler
|
||||||
Setting *SettingHandler
|
Setting *SettingHandler
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -814,6 +814,8 @@ func guessPlatformFromPath(path string) string {
|
|||||||
return service.PlatformAntigravity
|
return service.PlatformAntigravity
|
||||||
case strings.HasPrefix(p, "/v1beta/"):
|
case strings.HasPrefix(p, "/v1beta/"):
|
||||||
return service.PlatformGemini
|
return service.PlatformGemini
|
||||||
|
case strings.Contains(p, "/chat/completions"):
|
||||||
|
return service.PlatformSora
|
||||||
case strings.Contains(p, "/responses"):
|
case strings.Contains(p, "/responses"):
|
||||||
return service.PlatformOpenAI
|
return service.PlatformOpenAI
|
||||||
default:
|
default:
|
||||||
|
|||||||
364
backend/internal/handler/sora_gateway_handler.go
Normal file
364
backend/internal/handler/sora_gateway_handler.go
Normal file
@@ -0,0 +1,364 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/config"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/sora"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/server/middleware"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/service"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraGatewayHandler handles Sora OpenAI compatible endpoints.
|
||||||
|
type SoraGatewayHandler struct {
|
||||||
|
gatewayService *service.GatewayService
|
||||||
|
soraGatewayService *service.SoraGatewayService
|
||||||
|
billingCacheService *service.BillingCacheService
|
||||||
|
concurrencyHelper *ConcurrencyHelper
|
||||||
|
maxAccountSwitches int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraGatewayHandler creates a new SoraGatewayHandler.
|
||||||
|
func NewSoraGatewayHandler(
|
||||||
|
gatewayService *service.GatewayService,
|
||||||
|
soraGatewayService *service.SoraGatewayService,
|
||||||
|
concurrencyService *service.ConcurrencyService,
|
||||||
|
billingCacheService *service.BillingCacheService,
|
||||||
|
cfg *config.Config,
|
||||||
|
) *SoraGatewayHandler {
|
||||||
|
pingInterval := time.Duration(0)
|
||||||
|
maxAccountSwitches := 3
|
||||||
|
if cfg != nil {
|
||||||
|
pingInterval = time.Duration(cfg.Concurrency.PingInterval) * time.Second
|
||||||
|
if cfg.Gateway.MaxAccountSwitches > 0 {
|
||||||
|
maxAccountSwitches = cfg.Gateway.MaxAccountSwitches
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &SoraGatewayHandler{
|
||||||
|
gatewayService: gatewayService,
|
||||||
|
soraGatewayService: soraGatewayService,
|
||||||
|
billingCacheService: billingCacheService,
|
||||||
|
concurrencyHelper: NewConcurrencyHelper(concurrencyService, SSEPingFormatComment, pingInterval),
|
||||||
|
maxAccountSwitches: maxAccountSwitches,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChatCompletions handles Sora OpenAI-compatible chat completions endpoint.
|
||||||
|
// POST /v1/chat/completions
|
||||||
|
func (h *SoraGatewayHandler) ChatCompletions(c *gin.Context) {
|
||||||
|
apiKey, ok := middleware.GetAPIKeyFromContext(c)
|
||||||
|
if !ok {
|
||||||
|
h.errorResponse(c, http.StatusUnauthorized, "authentication_error", "Invalid API key")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
subject, ok := middleware.GetAuthSubjectFromContext(c)
|
||||||
|
if !ok {
|
||||||
|
h.errorResponse(c, http.StatusInternalServerError, "api_error", "User context not found")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(c.Request.Body)
|
||||||
|
if err != nil {
|
||||||
|
if maxErr, ok := extractMaxBytesError(err); ok {
|
||||||
|
h.errorResponse(c, http.StatusRequestEntityTooLarge, "invalid_request_error", buildBodyTooLargeMessage(maxErr.Limit))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "Failed to read request body")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(body) == 0 {
|
||||||
|
h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "Request body is empty")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var reqBody map[string]any
|
||||||
|
if err := json.Unmarshal(body, &reqBody); err != nil {
|
||||||
|
h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "Failed to parse request body")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
model, _ := reqBody["model"].(string)
|
||||||
|
if strings.TrimSpace(model) == "" {
|
||||||
|
h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "model is required")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
stream, _ := reqBody["stream"].(bool)
|
||||||
|
|
||||||
|
prompt, imageData, videoData, remixID, err := parseSoraPrompt(reqBody)
|
||||||
|
if err != nil {
|
||||||
|
h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if remixID == "" {
|
||||||
|
remixID = sora.ExtractRemixID(prompt)
|
||||||
|
}
|
||||||
|
if remixID != "" {
|
||||||
|
prompt = strings.ReplaceAll(prompt, remixID, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
if apiKey.Group != nil && apiKey.Group.Platform != service.PlatformSora {
|
||||||
|
h.errorResponse(c, http.StatusBadRequest, "invalid_request_error", "当前分组不支持 Sora 平台")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
streamStarted := false
|
||||||
|
maxWait := service.CalculateMaxWait(subject.Concurrency)
|
||||||
|
canWait, err := h.concurrencyHelper.IncrementWaitCount(c.Request.Context(), subject.UserID, maxWait)
|
||||||
|
waitCounted := false
|
||||||
|
if err == nil && canWait {
|
||||||
|
waitCounted = true
|
||||||
|
}
|
||||||
|
if err == nil && !canWait {
|
||||||
|
h.errorResponse(c, http.StatusTooManyRequests, "rate_limit_error", "Too many pending requests, please retry later")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if waitCounted {
|
||||||
|
h.concurrencyHelper.DecrementWaitCount(c.Request.Context(), subject.UserID)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
userReleaseFunc, err := h.concurrencyHelper.AcquireUserSlotWithWait(c, subject.UserID, subject.Concurrency, stream, &streamStarted)
|
||||||
|
if err != nil {
|
||||||
|
h.handleConcurrencyError(c, err, "user", streamStarted)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if waitCounted {
|
||||||
|
h.concurrencyHelper.DecrementWaitCount(c.Request.Context(), subject.UserID)
|
||||||
|
waitCounted = false
|
||||||
|
}
|
||||||
|
userReleaseFunc = wrapReleaseOnDone(c.Request.Context(), userReleaseFunc)
|
||||||
|
if userReleaseFunc != nil {
|
||||||
|
defer userReleaseFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
failedAccountIDs := make(map[int64]struct{})
|
||||||
|
maxSwitches := h.maxAccountSwitches
|
||||||
|
if mode := h.soraGatewayService.CallLogicMode(c.Request.Context()); strings.EqualFold(mode, "native") {
|
||||||
|
maxSwitches = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
for switchCount := 0; switchCount < maxSwitches; switchCount++ {
|
||||||
|
selection, err := h.gatewayService.SelectAccountWithLoadAwareness(c.Request.Context(), apiKey.GroupID, "", model, failedAccountIDs, "")
|
||||||
|
if err != nil {
|
||||||
|
h.errorResponse(c, http.StatusServiceUnavailable, "server_error", err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
account := selection.Account
|
||||||
|
releaseFunc := selection.ReleaseFunc
|
||||||
|
|
||||||
|
result, err := h.soraGatewayService.Generate(c.Request.Context(), account, service.SoraGenerationRequest{
|
||||||
|
Model: model,
|
||||||
|
Prompt: prompt,
|
||||||
|
Image: imageData,
|
||||||
|
Video: videoData,
|
||||||
|
RemixTargetID: remixID,
|
||||||
|
Stream: stream,
|
||||||
|
UserID: subject.UserID,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
// 失败路径:立即释放槽位,而非 defer
|
||||||
|
if releaseFunc != nil {
|
||||||
|
releaseFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
if errors.Is(err, service.ErrSoraAccountMissingToken) || errors.Is(err, service.ErrSoraAccountNotEligible) {
|
||||||
|
failedAccountIDs[account.ID] = struct{}{}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
h.handleStreamingAwareError(c, http.StatusBadGateway, "server_error", err.Error(), streamStarted)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// 成功路径:使用 defer 在函数退出时释放
|
||||||
|
if releaseFunc != nil {
|
||||||
|
defer releaseFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
h.respondCompletion(c, model, result, stream)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.handleFailoverExhausted(c, http.StatusServiceUnavailable, streamStarted)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *SoraGatewayHandler) respondCompletion(c *gin.Context, model string, result *service.SoraGenerationResult, stream bool) {
|
||||||
|
if result == nil {
|
||||||
|
h.errorResponse(c, http.StatusInternalServerError, "api_error", "Empty response")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if stream {
|
||||||
|
c.Header("Content-Type", "text/event-stream")
|
||||||
|
c.Header("Cache-Control", "no-cache")
|
||||||
|
c.Header("Connection", "keep-alive")
|
||||||
|
first := buildSoraStreamChunk(model, "", true, "")
|
||||||
|
if _, err := c.Writer.WriteString(first); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
final := buildSoraStreamChunk(model, result.Content, false, "stop")
|
||||||
|
if _, err := c.Writer.WriteString(final); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_, _ = c.Writer.WriteString("data: [DONE]\n\n")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
c.JSON(http.StatusOK, buildSoraNonStreamResponse(model, result.Content))
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildSoraStreamChunk(model, content string, isFirst bool, finishReason string) string {
|
||||||
|
chunkID := fmt.Sprintf("chatcmpl-%d", time.Now().UnixMilli())
|
||||||
|
delta := map[string]any{}
|
||||||
|
if isFirst {
|
||||||
|
delta["role"] = "assistant"
|
||||||
|
}
|
||||||
|
if content != "" {
|
||||||
|
delta["content"] = content
|
||||||
|
} else {
|
||||||
|
delta["content"] = nil
|
||||||
|
}
|
||||||
|
response := map[string]any{
|
||||||
|
"id": chunkID,
|
||||||
|
"object": "chat.completion.chunk",
|
||||||
|
"created": time.Now().Unix(),
|
||||||
|
"model": model,
|
||||||
|
"choices": []any{
|
||||||
|
map[string]any{
|
||||||
|
"index": 0,
|
||||||
|
"delta": delta,
|
||||||
|
"finish_reason": finishReason,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
payload, _ := json.Marshal(response)
|
||||||
|
return "data: " + string(payload) + "\n\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildSoraNonStreamResponse(model, content string) map[string]any {
|
||||||
|
return map[string]any{
|
||||||
|
"id": fmt.Sprintf("chatcmpl-%d", time.Now().UnixMilli()),
|
||||||
|
"object": "chat.completion",
|
||||||
|
"created": time.Now().Unix(),
|
||||||
|
"model": model,
|
||||||
|
"choices": []any{
|
||||||
|
map[string]any{
|
||||||
|
"index": 0,
|
||||||
|
"message": map[string]any{
|
||||||
|
"role": "assistant",
|
||||||
|
"content": content,
|
||||||
|
},
|
||||||
|
"finish_reason": "stop",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseSoraPrompt(req map[string]any) (prompt, imageData, videoData, remixID string, err error) {
|
||||||
|
messages, ok := req["messages"].([]any)
|
||||||
|
if !ok || len(messages) == 0 {
|
||||||
|
return "", "", "", "", fmt.Errorf("messages is required")
|
||||||
|
}
|
||||||
|
last := messages[len(messages)-1]
|
||||||
|
msg, ok := last.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
return "", "", "", "", fmt.Errorf("invalid message format")
|
||||||
|
}
|
||||||
|
content, ok := msg["content"]
|
||||||
|
if !ok {
|
||||||
|
return "", "", "", "", fmt.Errorf("content is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, ok := req["image"].(string); ok && v != "" {
|
||||||
|
imageData = v
|
||||||
|
}
|
||||||
|
if v, ok := req["video"].(string); ok && v != "" {
|
||||||
|
videoData = v
|
||||||
|
}
|
||||||
|
if v, ok := req["remix_target_id"].(string); ok {
|
||||||
|
remixID = v
|
||||||
|
}
|
||||||
|
|
||||||
|
switch value := content.(type) {
|
||||||
|
case string:
|
||||||
|
prompt = value
|
||||||
|
case []any:
|
||||||
|
for _, item := range value {
|
||||||
|
part, ok := item.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch part["type"] {
|
||||||
|
case "text":
|
||||||
|
if text, ok := part["text"].(string); ok {
|
||||||
|
prompt = text
|
||||||
|
}
|
||||||
|
case "image_url":
|
||||||
|
if image, ok := part["image_url"].(map[string]any); ok {
|
||||||
|
if url, ok := image["url"].(string); ok {
|
||||||
|
imageData = url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "video_url":
|
||||||
|
if video, ok := part["video_url"].(map[string]any); ok {
|
||||||
|
if url, ok := video["url"].(string); ok {
|
||||||
|
videoData = url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return "", "", "", "", fmt.Errorf("invalid content format")
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(prompt) == "" && strings.TrimSpace(videoData) == "" {
|
||||||
|
return "", "", "", "", fmt.Errorf("prompt is required")
|
||||||
|
}
|
||||||
|
return prompt, imageData, videoData, remixID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func looksLikeURL(value string) bool {
|
||||||
|
trimmed := strings.ToLower(strings.TrimSpace(value))
|
||||||
|
return strings.HasPrefix(trimmed, "http://") || strings.HasPrefix(trimmed, "https://")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *SoraGatewayHandler) handleConcurrencyError(c *gin.Context, err error, slotType string, streamStarted bool) {
|
||||||
|
if streamStarted {
|
||||||
|
h.handleStreamingAwareError(c, http.StatusTooManyRequests, "rate_limit_error", err.Error(), true)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(http.StatusTooManyRequests, gin.H{"error": err.Error()})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *SoraGatewayHandler) handleFailoverExhausted(c *gin.Context, statusCode int, streamStarted bool) {
|
||||||
|
message := "No available Sora accounts"
|
||||||
|
h.handleStreamingAwareError(c, statusCode, "server_error", message, streamStarted)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *SoraGatewayHandler) handleStreamingAwareError(c *gin.Context, status int, errType, message string, streamStarted bool) {
|
||||||
|
if streamStarted {
|
||||||
|
payload := map[string]any{"error": map[string]any{"message": message, "type": errType, "param": nil, "code": nil}}
|
||||||
|
data, _ := json.Marshal(payload)
|
||||||
|
_, _ = c.Writer.WriteString("data: " + string(data) + "\n\n")
|
||||||
|
_, _ = c.Writer.WriteString("data: [DONE]\n\n")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
h.errorResponse(c, status, errType, message)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *SoraGatewayHandler) errorResponse(c *gin.Context, status int, errType, message string) {
|
||||||
|
c.JSON(status, gin.H{
|
||||||
|
"error": gin.H{
|
||||||
|
"message": message,
|
||||||
|
"type": errType,
|
||||||
|
"param": nil,
|
||||||
|
"code": nil,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -20,6 +20,7 @@ func ProvideAdminHandlers(
|
|||||||
proxyHandler *admin.ProxyHandler,
|
proxyHandler *admin.ProxyHandler,
|
||||||
redeemHandler *admin.RedeemHandler,
|
redeemHandler *admin.RedeemHandler,
|
||||||
promoHandler *admin.PromoHandler,
|
promoHandler *admin.PromoHandler,
|
||||||
|
soraAccountHandler *admin.SoraAccountHandler,
|
||||||
settingHandler *admin.SettingHandler,
|
settingHandler *admin.SettingHandler,
|
||||||
opsHandler *admin.OpsHandler,
|
opsHandler *admin.OpsHandler,
|
||||||
systemHandler *admin.SystemHandler,
|
systemHandler *admin.SystemHandler,
|
||||||
@@ -39,6 +40,7 @@ func ProvideAdminHandlers(
|
|||||||
Proxy: proxyHandler,
|
Proxy: proxyHandler,
|
||||||
Redeem: redeemHandler,
|
Redeem: redeemHandler,
|
||||||
Promo: promoHandler,
|
Promo: promoHandler,
|
||||||
|
SoraAccount: soraAccountHandler,
|
||||||
Setting: settingHandler,
|
Setting: settingHandler,
|
||||||
Ops: opsHandler,
|
Ops: opsHandler,
|
||||||
System: systemHandler,
|
System: systemHandler,
|
||||||
@@ -69,6 +71,7 @@ func ProvideHandlers(
|
|||||||
adminHandlers *AdminHandlers,
|
adminHandlers *AdminHandlers,
|
||||||
gatewayHandler *GatewayHandler,
|
gatewayHandler *GatewayHandler,
|
||||||
openaiGatewayHandler *OpenAIGatewayHandler,
|
openaiGatewayHandler *OpenAIGatewayHandler,
|
||||||
|
soraGatewayHandler *SoraGatewayHandler,
|
||||||
settingHandler *SettingHandler,
|
settingHandler *SettingHandler,
|
||||||
) *Handlers {
|
) *Handlers {
|
||||||
return &Handlers{
|
return &Handlers{
|
||||||
@@ -81,6 +84,7 @@ func ProvideHandlers(
|
|||||||
Admin: adminHandlers,
|
Admin: adminHandlers,
|
||||||
Gateway: gatewayHandler,
|
Gateway: gatewayHandler,
|
||||||
OpenAIGateway: openaiGatewayHandler,
|
OpenAIGateway: openaiGatewayHandler,
|
||||||
|
SoraGateway: soraGatewayHandler,
|
||||||
Setting: settingHandler,
|
Setting: settingHandler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -96,6 +100,7 @@ var ProviderSet = wire.NewSet(
|
|||||||
NewSubscriptionHandler,
|
NewSubscriptionHandler,
|
||||||
NewGatewayHandler,
|
NewGatewayHandler,
|
||||||
NewOpenAIGatewayHandler,
|
NewOpenAIGatewayHandler,
|
||||||
|
NewSoraGatewayHandler,
|
||||||
ProvideSettingHandler,
|
ProvideSettingHandler,
|
||||||
|
|
||||||
// Admin handlers
|
// Admin handlers
|
||||||
@@ -110,6 +115,7 @@ var ProviderSet = wire.NewSet(
|
|||||||
admin.NewProxyHandler,
|
admin.NewProxyHandler,
|
||||||
admin.NewRedeemHandler,
|
admin.NewRedeemHandler,
|
||||||
admin.NewPromoHandler,
|
admin.NewPromoHandler,
|
||||||
|
admin.NewSoraAccountHandler,
|
||||||
admin.NewSettingHandler,
|
admin.NewSettingHandler,
|
||||||
admin.NewOpsHandler,
|
admin.NewOpsHandler,
|
||||||
ProvideSystemHandler,
|
ProvideSystemHandler,
|
||||||
|
|||||||
148
backend/internal/pkg/sora/character.go
Normal file
148
backend/internal/pkg/sora/character.go
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
package sora
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"net/textproto"
|
||||||
|
)
|
||||||
|
|
||||||
|
// UploadCharacterVideo uploads a character video and returns cameo ID.
|
||||||
|
func (c *Client) UploadCharacterVideo(ctx context.Context, opts RequestOptions, data []byte) (string, error) {
|
||||||
|
if len(data) == 0 {
|
||||||
|
return "", errors.New("video data empty")
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
writer := multipart.NewWriter(&buf)
|
||||||
|
if err := writeMultipartFile(writer, "file", "video.mp4", "video/mp4", data); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if err := writer.WriteField("timestamps", "0,3"); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if err := writer.Close(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/characters/upload", opts, &buf, writer.FormDataContentType(), false)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "id"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetCameoStatus returns cameo processing status.
|
||||||
|
func (c *Client) GetCameoStatus(ctx context.Context, opts RequestOptions, cameoID string) (map[string]any, error) {
|
||||||
|
if cameoID == "" {
|
||||||
|
return nil, errors.New("cameo id empty")
|
||||||
|
}
|
||||||
|
return c.doRequest(ctx, "GET", "/project_y/cameos/in_progress/"+cameoID, opts, nil, "", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DownloadCharacterImage downloads character avatar image data.
|
||||||
|
func (c *Client) DownloadCharacterImage(ctx context.Context, opts RequestOptions, imageURL string) ([]byte, error) {
|
||||||
|
if c.upstream == nil {
|
||||||
|
return nil, errors.New("upstream is nil")
|
||||||
|
}
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", imageURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", defaultDesktopUA)
|
||||||
|
resp, err := c.upstream.DoWithTLS(req, opts.ProxyURL, opts.AccountID, opts.AccountConcurrency, c.enableTLSFingerprint)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return nil, fmt.Errorf("download image failed: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
return io.ReadAll(resp.Body)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UploadCharacterImage uploads character avatar and returns asset pointer.
|
||||||
|
func (c *Client) UploadCharacterImage(ctx context.Context, opts RequestOptions, data []byte) (string, error) {
|
||||||
|
if len(data) == 0 {
|
||||||
|
return "", errors.New("image data empty")
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
writer := multipart.NewWriter(&buf)
|
||||||
|
if err := writeMultipartFile(writer, "file", "profile.webp", "image/webp", data); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if err := writer.WriteField("use_case", "profile"); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if err := writer.Close(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/project_y/file/upload", opts, &buf, writer.FormDataContentType(), false)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "asset_pointer"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FinalizeCharacter finalizes character creation and returns character ID.
|
||||||
|
func (c *Client) FinalizeCharacter(ctx context.Context, opts RequestOptions, cameoID, username, displayName, assetPointer string) (string, error) {
|
||||||
|
payload := map[string]any{
|
||||||
|
"cameo_id": cameoID,
|
||||||
|
"username": username,
|
||||||
|
"display_name": displayName,
|
||||||
|
"profile_asset_pointer": assetPointer,
|
||||||
|
"instruction_set": nil,
|
||||||
|
"safety_instruction_set": nil,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/characters/finalize", opts, bytes.NewReader(body), "application/json", false)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if character, ok := resp["character"].(map[string]any); ok {
|
||||||
|
if id, ok := character["character_id"].(string); ok {
|
||||||
|
return id, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetCharacterPublic marks character as public.
|
||||||
|
func (c *Client) SetCharacterPublic(ctx context.Context, opts RequestOptions, cameoID string) error {
|
||||||
|
payload := map[string]any{"visibility": "public"}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = c.doRequest(ctx, "POST", "/project_y/cameos/by_id/"+cameoID+"/update_v2", opts, bytes.NewReader(body), "application/json", false)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteCharacter deletes a character by ID.
|
||||||
|
func (c *Client) DeleteCharacter(ctx context.Context, opts RequestOptions, characterID string) error {
|
||||||
|
if characterID == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
_, err := c.doRequest(ctx, "DELETE", "/project_y/characters/"+characterID, opts, nil, "", false)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeMultipartFile(writer *multipart.Writer, field, filename, contentType string, data []byte) error {
|
||||||
|
header := make(textproto.MIMEHeader)
|
||||||
|
header.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, field, filename))
|
||||||
|
if contentType != "" {
|
||||||
|
header.Set("Content-Type", contentType)
|
||||||
|
}
|
||||||
|
part, err := writer.CreatePart(header)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = part.Write(data)
|
||||||
|
return err
|
||||||
|
}
|
||||||
612
backend/internal/pkg/sora/client.go
Normal file
612
backend/internal/pkg/sora/client.go
Normal file
@@ -0,0 +1,612 @@
|
|||||||
|
package sora
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/sha3"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
chatGPTBaseURL = "https://chatgpt.com"
|
||||||
|
sentinelFlow = "sora_2_create_task"
|
||||||
|
maxAPIResponseSize = 1 * 1024 * 1024 // 1MB
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
defaultMobileUA = "Sora/1.2026.007 (Android 15; Pixel 8 Pro; build 2600700)"
|
||||||
|
defaultDesktopUA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
|
||||||
|
sentinelCache sync.Map // 包级缓存,存储 Sentinel Token,key 为 accountID
|
||||||
|
)
|
||||||
|
|
||||||
|
// sentinelCacheEntry 是 Sentinel Token 缓存条目
|
||||||
|
type sentinelCacheEntry struct {
|
||||||
|
token string
|
||||||
|
expiresAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpstreamClient defines the HTTP client interface for Sora requests.
|
||||||
|
type UpstreamClient interface {
|
||||||
|
Do(req *http.Request, proxyURL string, accountID int64, accountConcurrency int) (*http.Response, error)
|
||||||
|
DoWithTLS(req *http.Request, proxyURL string, accountID int64, accountConcurrency int, enableTLSFingerprint bool) (*http.Response, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client is a minimal Sora API client.
|
||||||
|
type Client struct {
|
||||||
|
baseURL string
|
||||||
|
timeout time.Duration
|
||||||
|
upstream UpstreamClient
|
||||||
|
enableTLSFingerprint bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// RequestOptions configures per-request context.
|
||||||
|
type RequestOptions struct {
|
||||||
|
AccountID int64
|
||||||
|
AccountConcurrency int
|
||||||
|
ProxyURL string
|
||||||
|
AccessToken string
|
||||||
|
}
|
||||||
|
|
||||||
|
// getCachedSentinel 从缓存中获取 Sentinel Token
|
||||||
|
func getCachedSentinel(accountID int64) (string, bool) {
|
||||||
|
v, ok := sentinelCache.Load(accountID)
|
||||||
|
if !ok {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
entry := v.(*sentinelCacheEntry)
|
||||||
|
if time.Now().After(entry.expiresAt) {
|
||||||
|
sentinelCache.Delete(accountID)
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
return entry.token, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// cacheSentinel 缓存 Sentinel Token
|
||||||
|
func cacheSentinel(accountID int64, token string) {
|
||||||
|
sentinelCache.Store(accountID, &sentinelCacheEntry{
|
||||||
|
token: token,
|
||||||
|
expiresAt: time.Now().Add(3 * time.Minute), // 3分钟有效期
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewClient creates a Sora client.
|
||||||
|
func NewClient(baseURL string, timeout time.Duration, upstream UpstreamClient, enableTLSFingerprint bool) *Client {
|
||||||
|
return &Client{
|
||||||
|
baseURL: strings.TrimRight(baseURL, "/"),
|
||||||
|
timeout: timeout,
|
||||||
|
upstream: upstream,
|
||||||
|
enableTLSFingerprint: enableTLSFingerprint,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UploadImage uploads an image and returns media ID.
|
||||||
|
func (c *Client) UploadImage(ctx context.Context, opts RequestOptions, data []byte, filename string) (string, error) {
|
||||||
|
if filename == "" {
|
||||||
|
filename = "image.png"
|
||||||
|
}
|
||||||
|
var buf bytes.Buffer
|
||||||
|
writer := multipart.NewWriter(&buf)
|
||||||
|
part, err := writer.CreateFormFile("file", filename)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if _, err := part.Write(data); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if err := writer.WriteField("file_name", filename); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if err := writer.Close(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/uploads", opts, &buf, writer.FormDataContentType(), false)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "id"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateImage creates an image generation task.
|
||||||
|
func (c *Client) GenerateImage(ctx context.Context, opts RequestOptions, prompt string, width, height int, mediaID string) (string, error) {
|
||||||
|
operation := "simple_compose"
|
||||||
|
var inpaint []map[string]any
|
||||||
|
if mediaID != "" {
|
||||||
|
operation = "remix"
|
||||||
|
inpaint = []map[string]any{
|
||||||
|
{
|
||||||
|
"type": "image",
|
||||||
|
"frame_index": 0,
|
||||||
|
"upload_media_id": mediaID,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
payload := map[string]any{
|
||||||
|
"type": "image_gen",
|
||||||
|
"operation": operation,
|
||||||
|
"prompt": prompt,
|
||||||
|
"width": width,
|
||||||
|
"height": height,
|
||||||
|
"n_variants": 1,
|
||||||
|
"n_frames": 1,
|
||||||
|
"inpaint_items": inpaint,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/video_gen", opts, bytes.NewReader(body), "application/json", true)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "id"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateVideo creates a video generation task.
|
||||||
|
func (c *Client) GenerateVideo(ctx context.Context, opts RequestOptions, prompt, orientation string, nFrames int, mediaID, styleID, model, size string) (string, error) {
|
||||||
|
var inpaint []map[string]any
|
||||||
|
if mediaID != "" {
|
||||||
|
inpaint = []map[string]any{{"kind": "upload", "upload_id": mediaID}}
|
||||||
|
}
|
||||||
|
payload := map[string]any{
|
||||||
|
"kind": "video",
|
||||||
|
"prompt": prompt,
|
||||||
|
"orientation": orientation,
|
||||||
|
"size": size,
|
||||||
|
"n_frames": nFrames,
|
||||||
|
"model": model,
|
||||||
|
"inpaint_items": inpaint,
|
||||||
|
"style_id": styleID,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/nf/create", opts, bytes.NewReader(body), "application/json", true)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "id"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenerateStoryboard creates a storyboard video task.
|
||||||
|
func (c *Client) GenerateStoryboard(ctx context.Context, opts RequestOptions, prompt, orientation string, nFrames int, mediaID, styleID string) (string, error) {
|
||||||
|
var inpaint []map[string]any
|
||||||
|
if mediaID != "" {
|
||||||
|
inpaint = []map[string]any{{"kind": "upload", "upload_id": mediaID}}
|
||||||
|
}
|
||||||
|
payload := map[string]any{
|
||||||
|
"kind": "video",
|
||||||
|
"prompt": prompt,
|
||||||
|
"title": "Draft your video",
|
||||||
|
"orientation": orientation,
|
||||||
|
"size": "small",
|
||||||
|
"n_frames": nFrames,
|
||||||
|
"storyboard_id": nil,
|
||||||
|
"inpaint_items": inpaint,
|
||||||
|
"remix_target_id": nil,
|
||||||
|
"model": "sy_8",
|
||||||
|
"metadata": nil,
|
||||||
|
"style_id": styleID,
|
||||||
|
"cameo_ids": nil,
|
||||||
|
"cameo_replacements": nil,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/nf/create/storyboard", opts, bytes.NewReader(body), "application/json", true)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "id"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// RemixVideo creates a remix task.
|
||||||
|
func (c *Client) RemixVideo(ctx context.Context, opts RequestOptions, remixTargetID, prompt, orientation string, nFrames int, styleID string) (string, error) {
|
||||||
|
payload := map[string]any{
|
||||||
|
"kind": "video",
|
||||||
|
"prompt": prompt,
|
||||||
|
"inpaint_items": []map[string]any{},
|
||||||
|
"remix_target_id": remixTargetID,
|
||||||
|
"cameo_ids": []string{},
|
||||||
|
"cameo_replacements": map[string]any{},
|
||||||
|
"model": "sy_8",
|
||||||
|
"orientation": orientation,
|
||||||
|
"n_frames": nFrames,
|
||||||
|
"style_id": styleID,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/nf/create", opts, bytes.NewReader(body), "application/json", true)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "id"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetImageTasks returns recent image tasks.
|
||||||
|
func (c *Client) GetImageTasks(ctx context.Context, opts RequestOptions) (map[string]any, error) {
|
||||||
|
return c.doRequest(ctx, "GET", "/v2/recent_tasks?limit=20", opts, nil, "", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetPendingTasks returns pending video tasks.
|
||||||
|
func (c *Client) GetPendingTasks(ctx context.Context, opts RequestOptions) ([]map[string]any, error) {
|
||||||
|
resp, err := c.doRequestAny(ctx, "GET", "/nf/pending/v2", opts, nil, "", false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch v := resp.(type) {
|
||||||
|
case []any:
|
||||||
|
return convertList(v), nil
|
||||||
|
case map[string]any:
|
||||||
|
if list, ok := v["items"].([]any); ok {
|
||||||
|
return convertList(list), nil
|
||||||
|
}
|
||||||
|
if arr, ok := v["data"].([]any); ok {
|
||||||
|
return convertList(arr), nil
|
||||||
|
}
|
||||||
|
return convertListFromAny(v), nil
|
||||||
|
default:
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetVideoDrafts returns recent video drafts.
|
||||||
|
func (c *Client) GetVideoDrafts(ctx context.Context, opts RequestOptions) (map[string]any, error) {
|
||||||
|
return c.doRequest(ctx, "GET", "/project_y/profile/drafts?limit=15", opts, nil, "", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnhancePrompt calls prompt enhancement API.
|
||||||
|
func (c *Client) EnhancePrompt(ctx context.Context, opts RequestOptions, prompt, expansionLevel string, durationS int) (string, error) {
|
||||||
|
payload := map[string]any{
|
||||||
|
"prompt": prompt,
|
||||||
|
"expansion_level": expansionLevel,
|
||||||
|
"duration_s": durationS,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/editor/enhance_prompt", opts, bytes.NewReader(body), "application/json", false)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return stringFromJSON(resp, "enhanced_prompt"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PostVideoForWatermarkFree publishes a video for watermark-free parsing.
|
||||||
|
func (c *Client) PostVideoForWatermarkFree(ctx context.Context, opts RequestOptions, generationID string) (string, error) {
|
||||||
|
payload := map[string]any{
|
||||||
|
"attachments_to_create": []map[string]any{{
|
||||||
|
"generation_id": generationID,
|
||||||
|
"kind": "sora",
|
||||||
|
}},
|
||||||
|
"post_text": "",
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
resp, err := c.doRequest(ctx, "POST", "/project_y/post", opts, bytes.NewReader(body), "application/json", true)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
post, _ := resp["post"].(map[string]any)
|
||||||
|
if post == nil {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
return stringFromJSON(post, "id"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeletePost deletes a Sora post.
|
||||||
|
func (c *Client) DeletePost(ctx context.Context, opts RequestOptions, postID string) error {
|
||||||
|
if postID == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
_, err := c.doRequest(ctx, "DELETE", "/project_y/post/"+postID, opts, nil, "", false)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) doRequest(ctx context.Context, method, endpoint string, opts RequestOptions, body io.Reader, contentType string, addSentinel bool) (map[string]any, error) {
|
||||||
|
resp, err := c.doRequestAny(ctx, method, endpoint, opts, body, contentType, addSentinel)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
parsed, ok := resp.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("unexpected response format")
|
||||||
|
}
|
||||||
|
return parsed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) doRequestAny(ctx context.Context, method, endpoint string, opts RequestOptions, body io.Reader, contentType string, addSentinel bool) (any, error) {
|
||||||
|
if c.upstream == nil {
|
||||||
|
return nil, errors.New("upstream is nil")
|
||||||
|
}
|
||||||
|
url := c.baseURL + endpoint
|
||||||
|
req, err := http.NewRequestWithContext(ctx, method, url, body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if contentType != "" {
|
||||||
|
req.Header.Set("Content-Type", contentType)
|
||||||
|
}
|
||||||
|
if opts.AccessToken != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+opts.AccessToken)
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", defaultMobileUA)
|
||||||
|
if addSentinel {
|
||||||
|
sentinel, err := c.generateSentinelToken(ctx, opts)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("openai-sentinel-token", sentinel)
|
||||||
|
}
|
||||||
|
resp, err := c.upstream.DoWithTLS(req, opts.ProxyURL, opts.AccountID, opts.AccountConcurrency, c.enableTLSFingerprint)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// 使用 LimitReader 限制最大响应大小,防止 DoS 攻击
|
||||||
|
limitedReader := io.LimitReader(resp.Body, maxAPIResponseSize+1)
|
||||||
|
data, err := io.ReadAll(limitedReader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查是否超过大小限制
|
||||||
|
if int64(len(data)) > maxAPIResponseSize {
|
||||||
|
return nil, fmt.Errorf("API 响应过大 (最大 %d 字节)", maxAPIResponseSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return nil, fmt.Errorf("sora api error: %d %s", resp.StatusCode, strings.TrimSpace(string(data)))
|
||||||
|
}
|
||||||
|
if len(data) == 0 {
|
||||||
|
return map[string]any{}, nil
|
||||||
|
}
|
||||||
|
var parsed any
|
||||||
|
if err := json.Unmarshal(data, &parsed); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return parsed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) generateSentinelToken(ctx context.Context, opts RequestOptions) (string, error) {
|
||||||
|
// 尝试从缓存获取
|
||||||
|
if token, ok := getCachedSentinel(opts.AccountID); ok {
|
||||||
|
return token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
reqID := uuid.New().String()
|
||||||
|
powToken, err := generatePowToken(defaultDesktopUA)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
payload := map[string]any{"p": powToken, "flow": sentinelFlow, "id": reqID}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
url := chatGPTBaseURL + "/backend-api/sentinel/req"
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "POST", url, bytes.NewReader(body))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
req.Header.Set("Origin", "https://sora.chatgpt.com")
|
||||||
|
req.Header.Set("Referer", "https://sora.chatgpt.com/")
|
||||||
|
req.Header.Set("User-Agent", defaultDesktopUA)
|
||||||
|
if opts.AccessToken != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+opts.AccessToken)
|
||||||
|
}
|
||||||
|
resp, err := c.upstream.DoWithTLS(req, opts.ProxyURL, opts.AccountID, opts.AccountConcurrency, c.enableTLSFingerprint)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// 使用 LimitReader 限制最大响应大小,防止 DoS 攻击
|
||||||
|
limitedReader := io.LimitReader(resp.Body, maxAPIResponseSize+1)
|
||||||
|
data, err := io.ReadAll(limitedReader)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查是否超过大小限制
|
||||||
|
if int64(len(data)) > maxAPIResponseSize {
|
||||||
|
return "", fmt.Errorf("API 响应过大 (最大 %d 字节)", maxAPIResponseSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return "", fmt.Errorf("sentinel request failed: %d %s", resp.StatusCode, strings.TrimSpace(string(data)))
|
||||||
|
}
|
||||||
|
var parsed map[string]any
|
||||||
|
if err := json.Unmarshal(data, &parsed); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
token := buildSentinelToken(reqID, powToken, parsed)
|
||||||
|
|
||||||
|
// 缓存结果
|
||||||
|
cacheSentinel(opts.AccountID, token)
|
||||||
|
|
||||||
|
return token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildSentinelToken(reqID, powToken string, resp map[string]any) string {
|
||||||
|
finalPow := powToken
|
||||||
|
pow, _ := resp["proofofwork"].(map[string]any)
|
||||||
|
if pow != nil {
|
||||||
|
required, _ := pow["required"].(bool)
|
||||||
|
if required {
|
||||||
|
seed, _ := pow["seed"].(string)
|
||||||
|
difficulty, _ := pow["difficulty"].(string)
|
||||||
|
if seed != "" && difficulty != "" {
|
||||||
|
candidate, _ := solvePow(seed, difficulty, defaultDesktopUA)
|
||||||
|
if candidate != "" {
|
||||||
|
finalPow = "gAAAAAB" + candidate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !strings.HasSuffix(finalPow, "~S") {
|
||||||
|
finalPow += "~S"
|
||||||
|
}
|
||||||
|
turnstile := ""
|
||||||
|
if t, ok := resp["turnstile"].(map[string]any); ok {
|
||||||
|
turnstile, _ = t["dx"].(string)
|
||||||
|
}
|
||||||
|
token := ""
|
||||||
|
if v, ok := resp["token"].(string); ok {
|
||||||
|
token = v
|
||||||
|
}
|
||||||
|
payload := map[string]any{
|
||||||
|
"p": finalPow,
|
||||||
|
"t": turnstile,
|
||||||
|
"c": token,
|
||||||
|
"id": reqID,
|
||||||
|
"flow": sentinelFlow,
|
||||||
|
}
|
||||||
|
data, _ := json.Marshal(payload)
|
||||||
|
return string(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func generatePowToken(userAgent string) (string, error) {
|
||||||
|
seed := fmt.Sprintf("%f", float64(time.Now().UnixNano())/1e9)
|
||||||
|
candidate, _ := solvePow(seed, "0fffff", userAgent)
|
||||||
|
if candidate == "" {
|
||||||
|
return "", errors.New("pow generation failed")
|
||||||
|
}
|
||||||
|
return "gAAAAAC" + candidate, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func solvePow(seed, difficulty, userAgent string) (string, bool) {
|
||||||
|
config := powConfig(userAgent)
|
||||||
|
seedBytes := []byte(seed)
|
||||||
|
diffBytes, err := hexDecode(difficulty)
|
||||||
|
if err != nil {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
configBytes, err := json.Marshal(config)
|
||||||
|
if err != nil {
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
prefix := configBytes[:len(configBytes)-1]
|
||||||
|
for i := 0; i < 500000; i++ {
|
||||||
|
payload := append(prefix, []byte(fmt.Sprintf(",%d,%d]", i, i>>1))...)
|
||||||
|
b64 := base64.StdEncoding.EncodeToString(payload)
|
||||||
|
h := sha3.Sum512(append(seedBytes, []byte(b64)...))
|
||||||
|
if bytes.Compare(h[:len(diffBytes)], diffBytes) <= 0 {
|
||||||
|
return b64, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
func powConfig(userAgent string) []any {
|
||||||
|
return []any{
|
||||||
|
3000,
|
||||||
|
formatPowTime(),
|
||||||
|
4294705152,
|
||||||
|
0,
|
||||||
|
userAgent,
|
||||||
|
"",
|
||||||
|
nil,
|
||||||
|
"en-US",
|
||||||
|
"en-US,es-US,en,es",
|
||||||
|
0,
|
||||||
|
"webdriver-false",
|
||||||
|
"location",
|
||||||
|
"window",
|
||||||
|
time.Now().UnixMilli(),
|
||||||
|
uuid.New().String(),
|
||||||
|
"",
|
||||||
|
16,
|
||||||
|
float64(time.Now().UnixMilli()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatPowTime() string {
|
||||||
|
loc := time.FixedZone("EST", -5*60*60)
|
||||||
|
return time.Now().In(loc).Format("Mon Jan 02 2006 15:04:05") + " GMT-0500 (Eastern Standard Time)"
|
||||||
|
}
|
||||||
|
|
||||||
|
func hexDecode(s string) ([]byte, error) {
|
||||||
|
if len(s)%2 != 0 {
|
||||||
|
return nil, errors.New("invalid hex length")
|
||||||
|
}
|
||||||
|
out := make([]byte, len(s)/2)
|
||||||
|
for i := 0; i < len(out); i++ {
|
||||||
|
byteVal, err := hexPair(s[i*2 : i*2+2])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
out[i] = byteVal
|
||||||
|
}
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func hexPair(pair string) (byte, error) {
|
||||||
|
var v byte
|
||||||
|
for i := 0; i < 2; i++ {
|
||||||
|
c := pair[i]
|
||||||
|
var n byte
|
||||||
|
switch {
|
||||||
|
case c >= '0' && c <= '9':
|
||||||
|
n = c - '0'
|
||||||
|
case c >= 'a' && c <= 'f':
|
||||||
|
n = c - 'a' + 10
|
||||||
|
case c >= 'A' && c <= 'F':
|
||||||
|
n = c - 'A' + 10
|
||||||
|
default:
|
||||||
|
return 0, errors.New("invalid hex")
|
||||||
|
}
|
||||||
|
v = v<<4 | n
|
||||||
|
}
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringFromJSON(data map[string]any, key string) string {
|
||||||
|
if data == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if v, ok := data[key].(string); ok {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertList(list []any) []map[string]any {
|
||||||
|
results := make([]map[string]any, 0, len(list))
|
||||||
|
for _, item := range list {
|
||||||
|
if m, ok := item.(map[string]any); ok {
|
||||||
|
results = append(results, m)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertListFromAny(data map[string]any) []map[string]any {
|
||||||
|
if data == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
items, ok := data["items"].([]any)
|
||||||
|
if ok {
|
||||||
|
return convertList(items)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
263
backend/internal/pkg/sora/models.go
Normal file
263
backend/internal/pkg/sora/models.go
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
package sora
|
||||||
|
|
||||||
|
// ModelConfig 定义 Sora 模型配置。
|
||||||
|
type ModelConfig struct {
|
||||||
|
Type string
|
||||||
|
Width int
|
||||||
|
Height int
|
||||||
|
Orientation string
|
||||||
|
NFrames int
|
||||||
|
Model string
|
||||||
|
Size string
|
||||||
|
RequirePro bool
|
||||||
|
ExpansionLevel string
|
||||||
|
DurationS int
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelConfigs 定义所有模型配置。
|
||||||
|
var ModelConfigs = map[string]ModelConfig{
|
||||||
|
"gpt-image": {
|
||||||
|
Type: "image",
|
||||||
|
Width: 360,
|
||||||
|
Height: 360,
|
||||||
|
},
|
||||||
|
"gpt-image-landscape": {
|
||||||
|
Type: "image",
|
||||||
|
Width: 540,
|
||||||
|
Height: 360,
|
||||||
|
},
|
||||||
|
"gpt-image-portrait": {
|
||||||
|
Type: "image",
|
||||||
|
Width: 360,
|
||||||
|
Height: 540,
|
||||||
|
},
|
||||||
|
"sora2-landscape-10s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 300,
|
||||||
|
},
|
||||||
|
"sora2-portrait-10s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 300,
|
||||||
|
},
|
||||||
|
"sora2-landscape-15s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 450,
|
||||||
|
},
|
||||||
|
"sora2-portrait-15s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 450,
|
||||||
|
},
|
||||||
|
"sora2-landscape-25s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 750,
|
||||||
|
Model: "sy_8",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2-portrait-25s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 750,
|
||||||
|
Model: "sy_8",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-landscape-10s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 300,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-portrait-10s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 300,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-landscape-15s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 450,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-portrait-15s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 450,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-landscape-25s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 750,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-portrait-25s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 750,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "small",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-hd-landscape-10s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 300,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "large",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-hd-portrait-10s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 300,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "large",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-hd-landscape-15s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "landscape",
|
||||||
|
NFrames: 450,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "large",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"sora2pro-hd-portrait-15s": {
|
||||||
|
Type: "video",
|
||||||
|
Orientation: "portrait",
|
||||||
|
NFrames: 450,
|
||||||
|
Model: "sy_ore",
|
||||||
|
Size: "large",
|
||||||
|
RequirePro: true,
|
||||||
|
},
|
||||||
|
"prompt-enhance-short-10s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "short",
|
||||||
|
DurationS: 10,
|
||||||
|
},
|
||||||
|
"prompt-enhance-short-15s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "short",
|
||||||
|
DurationS: 15,
|
||||||
|
},
|
||||||
|
"prompt-enhance-short-20s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "short",
|
||||||
|
DurationS: 20,
|
||||||
|
},
|
||||||
|
"prompt-enhance-medium-10s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "medium",
|
||||||
|
DurationS: 10,
|
||||||
|
},
|
||||||
|
"prompt-enhance-medium-15s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "medium",
|
||||||
|
DurationS: 15,
|
||||||
|
},
|
||||||
|
"prompt-enhance-medium-20s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "medium",
|
||||||
|
DurationS: 20,
|
||||||
|
},
|
||||||
|
"prompt-enhance-long-10s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "long",
|
||||||
|
DurationS: 10,
|
||||||
|
},
|
||||||
|
"prompt-enhance-long-15s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "long",
|
||||||
|
DurationS: 15,
|
||||||
|
},
|
||||||
|
"prompt-enhance-long-20s": {
|
||||||
|
Type: "prompt_enhance",
|
||||||
|
ExpansionLevel: "long",
|
||||||
|
DurationS: 20,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelListItem 返回模型列表条目。
|
||||||
|
type ModelListItem struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Object string `json:"object"`
|
||||||
|
OwnedBy string `json:"owned_by"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListModels 生成模型列表。
|
||||||
|
func ListModels() []ModelListItem {
|
||||||
|
models := make([]ModelListItem, 0, len(ModelConfigs))
|
||||||
|
for id, cfg := range ModelConfigs {
|
||||||
|
description := ""
|
||||||
|
switch cfg.Type {
|
||||||
|
case "image":
|
||||||
|
description = "Image generation"
|
||||||
|
if cfg.Width > 0 && cfg.Height > 0 {
|
||||||
|
description += " - " + itoa(cfg.Width) + "x" + itoa(cfg.Height)
|
||||||
|
}
|
||||||
|
case "video":
|
||||||
|
description = "Video generation"
|
||||||
|
if cfg.Orientation != "" {
|
||||||
|
description += " - " + cfg.Orientation
|
||||||
|
}
|
||||||
|
case "prompt_enhance":
|
||||||
|
description = "Prompt enhancement"
|
||||||
|
if cfg.ExpansionLevel != "" {
|
||||||
|
description += " - " + cfg.ExpansionLevel
|
||||||
|
}
|
||||||
|
if cfg.DurationS > 0 {
|
||||||
|
description += " (" + itoa(cfg.DurationS) + "s)"
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
description = "Sora model"
|
||||||
|
}
|
||||||
|
models = append(models, ModelListItem{
|
||||||
|
ID: id,
|
||||||
|
Object: "model",
|
||||||
|
OwnedBy: "sora",
|
||||||
|
Description: description,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return models
|
||||||
|
}
|
||||||
|
|
||||||
|
func itoa(val int) string {
|
||||||
|
if val == 0 {
|
||||||
|
return "0"
|
||||||
|
}
|
||||||
|
neg := false
|
||||||
|
if val < 0 {
|
||||||
|
neg = true
|
||||||
|
val = -val
|
||||||
|
}
|
||||||
|
buf := [12]byte{}
|
||||||
|
i := len(buf)
|
||||||
|
for val > 0 {
|
||||||
|
i--
|
||||||
|
buf[i] = byte('0' + val%10)
|
||||||
|
val /= 10
|
||||||
|
}
|
||||||
|
if neg {
|
||||||
|
i--
|
||||||
|
buf[i] = '-'
|
||||||
|
}
|
||||||
|
return string(buf[i:])
|
||||||
|
}
|
||||||
63
backend/internal/pkg/sora/prompt.go
Normal file
63
backend/internal/pkg/sora/prompt.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
package sora
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var storyboardRe = regexp.MustCompile(`\[(\d+(?:\.\d+)?)s\]`)
|
||||||
|
|
||||||
|
// IsStoryboardPrompt 检测是否为分镜提示词。
|
||||||
|
func IsStoryboardPrompt(prompt string) bool {
|
||||||
|
if strings.TrimSpace(prompt) == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return storyboardRe.MatchString(prompt)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FormatStoryboardPrompt 将分镜提示词转换为 API 需要的格式。
|
||||||
|
func FormatStoryboardPrompt(prompt string) string {
|
||||||
|
prompt = strings.TrimSpace(prompt)
|
||||||
|
if prompt == "" {
|
||||||
|
return prompt
|
||||||
|
}
|
||||||
|
matches := storyboardRe.FindAllStringSubmatchIndex(prompt, -1)
|
||||||
|
if len(matches) == 0 {
|
||||||
|
return prompt
|
||||||
|
}
|
||||||
|
firstIdx := matches[0][0]
|
||||||
|
instructions := strings.TrimSpace(prompt[:firstIdx])
|
||||||
|
|
||||||
|
shotPattern := regexp.MustCompile(`\[(\d+(?:\.\d+)?)s\]\s*([^\[]+)`)
|
||||||
|
shotMatches := shotPattern.FindAllStringSubmatch(prompt, -1)
|
||||||
|
if len(shotMatches) == 0 {
|
||||||
|
return prompt
|
||||||
|
}
|
||||||
|
|
||||||
|
shots := make([]string, 0, len(shotMatches))
|
||||||
|
for i, sm := range shotMatches {
|
||||||
|
if len(sm) < 3 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
duration := strings.TrimSpace(sm[1])
|
||||||
|
scene := strings.TrimSpace(sm[2])
|
||||||
|
shots = append(shots, "Shot "+itoa(i+1)+":\nduration: "+duration+"sec\nScene: "+scene)
|
||||||
|
}
|
||||||
|
|
||||||
|
timeline := strings.Join(shots, "\n\n")
|
||||||
|
if instructions != "" {
|
||||||
|
return "current timeline:\n" + timeline + "\n\ninstructions:\n" + instructions
|
||||||
|
}
|
||||||
|
return timeline
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExtractRemixID 提取分享链接中的 remix ID。
|
||||||
|
func ExtractRemixID(text string) string {
|
||||||
|
text = strings.TrimSpace(text)
|
||||||
|
if text == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
re := regexp.MustCompile(`s_[a-f0-9]{32}`)
|
||||||
|
match := re.FindString(text)
|
||||||
|
return match
|
||||||
|
}
|
||||||
31
backend/internal/pkg/uuidv7/uuidv7.go
Normal file
31
backend/internal/pkg/uuidv7/uuidv7.go
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
package uuidv7
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// New returns a UUIDv7 string.
|
||||||
|
func New() (string, error) {
|
||||||
|
var b [16]byte
|
||||||
|
if _, err := rand.Read(b[:]); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
ms := uint64(time.Now().UnixMilli())
|
||||||
|
b[0] = byte(ms >> 40)
|
||||||
|
b[1] = byte(ms >> 32)
|
||||||
|
b[2] = byte(ms >> 24)
|
||||||
|
b[3] = byte(ms >> 16)
|
||||||
|
b[4] = byte(ms >> 8)
|
||||||
|
b[5] = byte(ms)
|
||||||
|
b[6] = (b[6] & 0x0f) | 0x70
|
||||||
|
b[8] = (b[8] & 0x3f) | 0x80
|
||||||
|
return fmt.Sprintf("%08x-%04x-%04x-%04x-%012x",
|
||||||
|
uint32(b[0])<<24|uint32(b[1])<<16|uint32(b[2])<<8|uint32(b[3]),
|
||||||
|
uint16(b[4])<<8|uint16(b[5]),
|
||||||
|
uint16(b[6])<<8|uint16(b[7]),
|
||||||
|
uint16(b[8])<<8|uint16(b[9]),
|
||||||
|
uint64(b[10])<<40|uint64(b[11])<<32|uint64(b[12])<<24|uint64(b[13])<<16|uint64(b[14])<<8|uint64(b[15]),
|
||||||
|
), nil
|
||||||
|
}
|
||||||
498
backend/internal/repository/sora_repo.go
Normal file
498
backend/internal/repository/sora_repo.go
Normal file
@@ -0,0 +1,498 @@
|
|||||||
|
package repository
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"errors"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/ent"
|
||||||
|
dbsoraaccount "github.com/Wei-Shaw/sub2api/ent/soraaccount"
|
||||||
|
dbsoracachefile "github.com/Wei-Shaw/sub2api/ent/soracachefile"
|
||||||
|
dbsoratask "github.com/Wei-Shaw/sub2api/ent/soratask"
|
||||||
|
dbsorausagestat "github.com/Wei-Shaw/sub2api/ent/sorausagestat"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/service"
|
||||||
|
|
||||||
|
entsql "entgo.io/ent/dialect/sql"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraAccount
|
||||||
|
|
||||||
|
type soraAccountRepository struct {
|
||||||
|
client *ent.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSoraAccountRepository(client *ent.Client) service.SoraAccountRepository {
|
||||||
|
return &soraAccountRepository{client: client}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraAccountRepository) GetByAccountID(ctx context.Context, accountID int64) (*service.SoraAccount, error) {
|
||||||
|
if accountID <= 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
acc, err := r.client.SoraAccount.Query().Where(dbsoraaccount.AccountIDEQ(accountID)).Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if ent.IsNotFound(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return mapSoraAccount(acc), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraAccountRepository) GetByAccountIDs(ctx context.Context, accountIDs []int64) (map[int64]*service.SoraAccount, error) {
|
||||||
|
if len(accountIDs) == 0 {
|
||||||
|
return map[int64]*service.SoraAccount{}, nil
|
||||||
|
}
|
||||||
|
records, err := r.client.SoraAccount.Query().Where(dbsoraaccount.AccountIDIn(accountIDs...)).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result := make(map[int64]*service.SoraAccount, len(records))
|
||||||
|
for _, record := range records {
|
||||||
|
if record == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result[record.AccountID] = mapSoraAccount(record)
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraAccountRepository) Upsert(ctx context.Context, accountID int64, updates map[string]any) error {
|
||||||
|
if accountID <= 0 {
|
||||||
|
return errors.New("invalid account_id")
|
||||||
|
}
|
||||||
|
acc, err := r.client.SoraAccount.Query().Where(dbsoraaccount.AccountIDEQ(accountID)).Only(ctx)
|
||||||
|
if err != nil && !ent.IsNotFound(err) {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if acc == nil {
|
||||||
|
builder := r.client.SoraAccount.Create().SetAccountID(accountID)
|
||||||
|
applySoraAccountUpdates(builder.Mutation(), updates)
|
||||||
|
return builder.Exec(ctx)
|
||||||
|
}
|
||||||
|
updater := r.client.SoraAccount.UpdateOneID(acc.ID)
|
||||||
|
applySoraAccountUpdates(updater.Mutation(), updates)
|
||||||
|
return updater.Exec(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func applySoraAccountUpdates(m *ent.SoraAccountMutation, updates map[string]any) {
|
||||||
|
if updates == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for key, val := range updates {
|
||||||
|
switch key {
|
||||||
|
case "access_token":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetAccessToken(v)
|
||||||
|
}
|
||||||
|
case "session_token":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetSessionToken(v)
|
||||||
|
}
|
||||||
|
case "refresh_token":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetRefreshToken(v)
|
||||||
|
}
|
||||||
|
case "client_id":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetClientID(v)
|
||||||
|
}
|
||||||
|
case "email":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetEmail(v)
|
||||||
|
}
|
||||||
|
case "username":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetUsername(v)
|
||||||
|
}
|
||||||
|
case "remark":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetRemark(v)
|
||||||
|
}
|
||||||
|
case "plan_type":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetPlanType(v)
|
||||||
|
}
|
||||||
|
case "plan_title":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetPlanTitle(v)
|
||||||
|
}
|
||||||
|
case "subscription_end":
|
||||||
|
if v, ok := val.(time.Time); ok {
|
||||||
|
m.SetSubscriptionEnd(v)
|
||||||
|
}
|
||||||
|
if v, ok := val.(*time.Time); ok && v != nil {
|
||||||
|
m.SetSubscriptionEnd(*v)
|
||||||
|
}
|
||||||
|
case "sora_supported":
|
||||||
|
if v, ok := val.(bool); ok {
|
||||||
|
m.SetSoraSupported(v)
|
||||||
|
}
|
||||||
|
case "sora_invite_code":
|
||||||
|
if v, ok := val.(string); ok {
|
||||||
|
m.SetSoraInviteCode(v)
|
||||||
|
}
|
||||||
|
case "sora_redeemed_count":
|
||||||
|
if v, ok := val.(int); ok {
|
||||||
|
m.SetSoraRedeemedCount(v)
|
||||||
|
}
|
||||||
|
case "sora_remaining_count":
|
||||||
|
if v, ok := val.(int); ok {
|
||||||
|
m.SetSoraRemainingCount(v)
|
||||||
|
}
|
||||||
|
case "sora_total_count":
|
||||||
|
if v, ok := val.(int); ok {
|
||||||
|
m.SetSoraTotalCount(v)
|
||||||
|
}
|
||||||
|
case "sora_cooldown_until":
|
||||||
|
if v, ok := val.(time.Time); ok {
|
||||||
|
m.SetSoraCooldownUntil(v)
|
||||||
|
}
|
||||||
|
if v, ok := val.(*time.Time); ok && v != nil {
|
||||||
|
m.SetSoraCooldownUntil(*v)
|
||||||
|
}
|
||||||
|
case "cooled_until":
|
||||||
|
if v, ok := val.(time.Time); ok {
|
||||||
|
m.SetCooledUntil(v)
|
||||||
|
}
|
||||||
|
if v, ok := val.(*time.Time); ok && v != nil {
|
||||||
|
m.SetCooledUntil(*v)
|
||||||
|
}
|
||||||
|
case "image_enabled":
|
||||||
|
if v, ok := val.(bool); ok {
|
||||||
|
m.SetImageEnabled(v)
|
||||||
|
}
|
||||||
|
case "video_enabled":
|
||||||
|
if v, ok := val.(bool); ok {
|
||||||
|
m.SetVideoEnabled(v)
|
||||||
|
}
|
||||||
|
case "image_concurrency":
|
||||||
|
if v, ok := val.(int); ok {
|
||||||
|
m.SetImageConcurrency(v)
|
||||||
|
}
|
||||||
|
case "video_concurrency":
|
||||||
|
if v, ok := val.(int); ok {
|
||||||
|
m.SetVideoConcurrency(v)
|
||||||
|
}
|
||||||
|
case "is_expired":
|
||||||
|
if v, ok := val.(bool); ok {
|
||||||
|
m.SetIsExpired(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapSoraAccount(acc *ent.SoraAccount) *service.SoraAccount {
|
||||||
|
if acc == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &service.SoraAccount{
|
||||||
|
AccountID: acc.AccountID,
|
||||||
|
AccessToken: derefString(acc.AccessToken),
|
||||||
|
SessionToken: derefString(acc.SessionToken),
|
||||||
|
RefreshToken: derefString(acc.RefreshToken),
|
||||||
|
ClientID: derefString(acc.ClientID),
|
||||||
|
Email: derefString(acc.Email),
|
||||||
|
Username: derefString(acc.Username),
|
||||||
|
Remark: derefString(acc.Remark),
|
||||||
|
UseCount: acc.UseCount,
|
||||||
|
PlanType: derefString(acc.PlanType),
|
||||||
|
PlanTitle: derefString(acc.PlanTitle),
|
||||||
|
SubscriptionEnd: acc.SubscriptionEnd,
|
||||||
|
SoraSupported: acc.SoraSupported,
|
||||||
|
SoraInviteCode: derefString(acc.SoraInviteCode),
|
||||||
|
SoraRedeemedCount: acc.SoraRedeemedCount,
|
||||||
|
SoraRemainingCount: acc.SoraRemainingCount,
|
||||||
|
SoraTotalCount: acc.SoraTotalCount,
|
||||||
|
SoraCooldownUntil: acc.SoraCooldownUntil,
|
||||||
|
CooledUntil: acc.CooledUntil,
|
||||||
|
ImageEnabled: acc.ImageEnabled,
|
||||||
|
VideoEnabled: acc.VideoEnabled,
|
||||||
|
ImageConcurrency: acc.ImageConcurrency,
|
||||||
|
VideoConcurrency: acc.VideoConcurrency,
|
||||||
|
IsExpired: acc.IsExpired,
|
||||||
|
CreatedAt: acc.CreatedAt,
|
||||||
|
UpdatedAt: acc.UpdatedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapSoraUsageStat(stat *ent.SoraUsageStat) *service.SoraUsageStat {
|
||||||
|
if stat == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &service.SoraUsageStat{
|
||||||
|
AccountID: stat.AccountID,
|
||||||
|
ImageCount: stat.ImageCount,
|
||||||
|
VideoCount: stat.VideoCount,
|
||||||
|
ErrorCount: stat.ErrorCount,
|
||||||
|
LastErrorAt: stat.LastErrorAt,
|
||||||
|
TodayImageCount: stat.TodayImageCount,
|
||||||
|
TodayVideoCount: stat.TodayVideoCount,
|
||||||
|
TodayErrorCount: stat.TodayErrorCount,
|
||||||
|
TodayDate: stat.TodayDate,
|
||||||
|
ConsecutiveErrorCount: stat.ConsecutiveErrorCount,
|
||||||
|
CreatedAt: stat.CreatedAt,
|
||||||
|
UpdatedAt: stat.UpdatedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapSoraCacheFile(file *ent.SoraCacheFile) *service.SoraCacheFile {
|
||||||
|
if file == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &service.SoraCacheFile{
|
||||||
|
ID: int64(file.ID),
|
||||||
|
TaskID: derefString(file.TaskID),
|
||||||
|
AccountID: file.AccountID,
|
||||||
|
UserID: file.UserID,
|
||||||
|
MediaType: file.MediaType,
|
||||||
|
OriginalURL: file.OriginalURL,
|
||||||
|
CachePath: file.CachePath,
|
||||||
|
CacheURL: file.CacheURL,
|
||||||
|
SizeBytes: file.SizeBytes,
|
||||||
|
CreatedAt: file.CreatedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStat
|
||||||
|
|
||||||
|
type soraUsageStatRepository struct {
|
||||||
|
client *ent.Client
|
||||||
|
sql sqlExecutor
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSoraUsageStatRepository(client *ent.Client, sqlDB *sql.DB) service.SoraUsageStatRepository {
|
||||||
|
return &soraUsageStatRepository{client: client, sql: sqlDB}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraUsageStatRepository) RecordSuccess(ctx context.Context, accountID int64, isVideo bool) error {
|
||||||
|
if accountID <= 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
field := "image_count"
|
||||||
|
todayField := "today_image_count"
|
||||||
|
if isVideo {
|
||||||
|
field = "video_count"
|
||||||
|
todayField = "today_video_count"
|
||||||
|
}
|
||||||
|
today := time.Now().UTC().Truncate(24 * time.Hour)
|
||||||
|
query := "INSERT INTO sora_usage_stats (account_id, " + field + ", " + todayField + ", today_date, consecutive_error_count, created_at, updated_at) " +
|
||||||
|
"VALUES ($1, 1, 1, $2, 0, NOW(), NOW()) " +
|
||||||
|
"ON CONFLICT (account_id) DO UPDATE SET " +
|
||||||
|
field + " = sora_usage_stats." + field + " + 1, " +
|
||||||
|
todayField + " = CASE WHEN sora_usage_stats.today_date = $2 THEN sora_usage_stats." + todayField + " + 1 ELSE 1 END, " +
|
||||||
|
"today_date = $2, consecutive_error_count = 0, updated_at = NOW()"
|
||||||
|
_, err := r.sql.ExecContext(ctx, query, accountID, today)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraUsageStatRepository) RecordError(ctx context.Context, accountID int64) (int, error) {
|
||||||
|
if accountID <= 0 {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
today := time.Now().UTC().Truncate(24 * time.Hour)
|
||||||
|
query := "INSERT INTO sora_usage_stats (account_id, error_count, today_error_count, today_date, consecutive_error_count, last_error_at, created_at, updated_at) " +
|
||||||
|
"VALUES ($1, 1, 1, $2, 1, NOW(), NOW(), NOW()) " +
|
||||||
|
"ON CONFLICT (account_id) DO UPDATE SET " +
|
||||||
|
"error_count = sora_usage_stats.error_count + 1, " +
|
||||||
|
"today_error_count = CASE WHEN sora_usage_stats.today_date = $2 THEN sora_usage_stats.today_error_count + 1 ELSE 1 END, " +
|
||||||
|
"today_date = $2, consecutive_error_count = sora_usage_stats.consecutive_error_count + 1, last_error_at = NOW(), updated_at = NOW() " +
|
||||||
|
"RETURNING consecutive_error_count"
|
||||||
|
var consecutive int
|
||||||
|
err := scanSingleRow(ctx, r.sql, query, []any{accountID, today}, &consecutive)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return consecutive, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraUsageStatRepository) ResetConsecutiveErrors(ctx context.Context, accountID int64) error {
|
||||||
|
if accountID <= 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
err := r.client.SoraUsageStat.Update().Where(dbsorausagestat.AccountIDEQ(accountID)).
|
||||||
|
SetConsecutiveErrorCount(0).
|
||||||
|
Exec(ctx)
|
||||||
|
if ent.IsNotFound(err) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraUsageStatRepository) GetByAccountID(ctx context.Context, accountID int64) (*service.SoraUsageStat, error) {
|
||||||
|
if accountID <= 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
stat, err := r.client.SoraUsageStat.Query().Where(dbsorausagestat.AccountIDEQ(accountID)).Only(ctx)
|
||||||
|
if err != nil {
|
||||||
|
if ent.IsNotFound(err) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return mapSoraUsageStat(stat), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraUsageStatRepository) GetByAccountIDs(ctx context.Context, accountIDs []int64) (map[int64]*service.SoraUsageStat, error) {
|
||||||
|
if len(accountIDs) == 0 {
|
||||||
|
return map[int64]*service.SoraUsageStat{}, nil
|
||||||
|
}
|
||||||
|
stats, err := r.client.SoraUsageStat.Query().Where(dbsorausagestat.AccountIDIn(accountIDs...)).All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result := make(map[int64]*service.SoraUsageStat, len(stats))
|
||||||
|
for _, stat := range stats {
|
||||||
|
if stat == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result[stat.AccountID] = mapSoraUsageStat(stat)
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraUsageStatRepository) List(ctx context.Context, params pagination.PaginationParams) ([]*service.SoraUsageStat, *pagination.PaginationResult, error) {
|
||||||
|
query := r.client.SoraUsageStat.Query()
|
||||||
|
total, err := query.Count(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
stats, err := query.Order(ent.Desc(dbsorausagestat.FieldUpdatedAt)).
|
||||||
|
Limit(params.Limit()).
|
||||||
|
Offset(params.Offset()).
|
||||||
|
All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
result := make([]*service.SoraUsageStat, 0, len(stats))
|
||||||
|
for _, stat := range stats {
|
||||||
|
result = append(result, mapSoraUsageStat(stat))
|
||||||
|
}
|
||||||
|
return result, paginationResultFromTotal(int64(total), params), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTask
|
||||||
|
|
||||||
|
type soraTaskRepository struct {
|
||||||
|
client *ent.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSoraTaskRepository(client *ent.Client) service.SoraTaskRepository {
|
||||||
|
return &soraTaskRepository{client: client}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraTaskRepository) Create(ctx context.Context, task *service.SoraTask) error {
|
||||||
|
if task == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
builder := r.client.SoraTask.Create().
|
||||||
|
SetTaskID(task.TaskID).
|
||||||
|
SetAccountID(task.AccountID).
|
||||||
|
SetModel(task.Model).
|
||||||
|
SetPrompt(task.Prompt).
|
||||||
|
SetStatus(task.Status).
|
||||||
|
SetProgress(task.Progress).
|
||||||
|
SetRetryCount(task.RetryCount)
|
||||||
|
if task.ResultURLs != "" {
|
||||||
|
builder.SetResultUrls(task.ResultURLs)
|
||||||
|
}
|
||||||
|
if task.ErrorMessage != "" {
|
||||||
|
builder.SetErrorMessage(task.ErrorMessage)
|
||||||
|
}
|
||||||
|
if task.CreatedAt.IsZero() {
|
||||||
|
builder.SetCreatedAt(time.Now())
|
||||||
|
} else {
|
||||||
|
builder.SetCreatedAt(task.CreatedAt)
|
||||||
|
}
|
||||||
|
if task.CompletedAt != nil {
|
||||||
|
builder.SetCompletedAt(*task.CompletedAt)
|
||||||
|
}
|
||||||
|
return builder.Exec(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraTaskRepository) UpdateStatus(ctx context.Context, taskID string, status string, progress float64, resultURLs string, errorMessage string, completedAt *time.Time) error {
|
||||||
|
if taskID == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
builder := r.client.SoraTask.Update().Where(dbsoratask.TaskIDEQ(taskID)).
|
||||||
|
SetStatus(status).
|
||||||
|
SetProgress(progress)
|
||||||
|
if resultURLs != "" {
|
||||||
|
builder.SetResultUrls(resultURLs)
|
||||||
|
}
|
||||||
|
if errorMessage != "" {
|
||||||
|
builder.SetErrorMessage(errorMessage)
|
||||||
|
}
|
||||||
|
if completedAt != nil {
|
||||||
|
builder.SetCompletedAt(*completedAt)
|
||||||
|
}
|
||||||
|
_, err := builder.Save(ctx)
|
||||||
|
if ent.IsNotFound(err) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFile
|
||||||
|
|
||||||
|
type soraCacheFileRepository struct {
|
||||||
|
client *ent.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSoraCacheFileRepository(client *ent.Client) service.SoraCacheFileRepository {
|
||||||
|
return &soraCacheFileRepository{client: client}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraCacheFileRepository) Create(ctx context.Context, file *service.SoraCacheFile) error {
|
||||||
|
if file == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
builder := r.client.SoraCacheFile.Create().
|
||||||
|
SetAccountID(file.AccountID).
|
||||||
|
SetUserID(file.UserID).
|
||||||
|
SetMediaType(file.MediaType).
|
||||||
|
SetOriginalURL(file.OriginalURL).
|
||||||
|
SetCachePath(file.CachePath).
|
||||||
|
SetCacheURL(file.CacheURL).
|
||||||
|
SetSizeBytes(file.SizeBytes)
|
||||||
|
if file.TaskID != "" {
|
||||||
|
builder.SetTaskID(file.TaskID)
|
||||||
|
}
|
||||||
|
if file.CreatedAt.IsZero() {
|
||||||
|
builder.SetCreatedAt(time.Now())
|
||||||
|
} else {
|
||||||
|
builder.SetCreatedAt(file.CreatedAt)
|
||||||
|
}
|
||||||
|
return builder.Exec(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraCacheFileRepository) ListOldest(ctx context.Context, limit int) ([]*service.SoraCacheFile, error) {
|
||||||
|
if limit <= 0 {
|
||||||
|
return []*service.SoraCacheFile{}, nil
|
||||||
|
}
|
||||||
|
records, err := r.client.SoraCacheFile.Query().
|
||||||
|
Order(dbsoracachefile.ByCreatedAt(entsql.OrderAsc())).
|
||||||
|
Limit(limit).
|
||||||
|
All(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result := make([]*service.SoraCacheFile, 0, len(records))
|
||||||
|
for _, record := range records {
|
||||||
|
if record == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result = append(result, mapSoraCacheFile(record))
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *soraCacheFileRepository) DeleteByIDs(ctx context.Context, ids []int64) error {
|
||||||
|
if len(ids) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
_, err := r.client.SoraCacheFile.Delete().Where(dbsoracachefile.IDIn(ids...)).Exec(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
@@ -64,6 +64,10 @@ var ProviderSet = wire.NewSet(
|
|||||||
NewUserSubscriptionRepository,
|
NewUserSubscriptionRepository,
|
||||||
NewUserAttributeDefinitionRepository,
|
NewUserAttributeDefinitionRepository,
|
||||||
NewUserAttributeValueRepository,
|
NewUserAttributeValueRepository,
|
||||||
|
NewSoraAccountRepository,
|
||||||
|
NewSoraUsageStatRepository,
|
||||||
|
NewSoraTaskRepository,
|
||||||
|
NewSoraCacheFileRepository,
|
||||||
|
|
||||||
// Cache implementations
|
// Cache implementations
|
||||||
NewGatewayCache,
|
NewGatewayCache,
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
package server
|
package server
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"log"
|
"log"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/Wei-Shaw/sub2api/internal/config"
|
"github.com/Wei-Shaw/sub2api/internal/config"
|
||||||
"github.com/Wei-Shaw/sub2api/internal/handler"
|
"github.com/Wei-Shaw/sub2api/internal/handler"
|
||||||
@@ -46,6 +49,22 @@ func SetupRouter(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Serve Sora cached videos when enabled
|
||||||
|
cacheVideoDir := ""
|
||||||
|
cacheEnabled := false
|
||||||
|
if settingService != nil {
|
||||||
|
soraCfg := settingService.GetSoraConfig(context.Background())
|
||||||
|
cacheEnabled = soraCfg.Cache.Enabled
|
||||||
|
cacheVideoDir = strings.TrimSpace(soraCfg.Cache.VideoDir)
|
||||||
|
} else if cfg != nil {
|
||||||
|
cacheEnabled = cfg.Sora.Cache.Enabled
|
||||||
|
cacheVideoDir = strings.TrimSpace(cfg.Sora.Cache.VideoDir)
|
||||||
|
}
|
||||||
|
if cacheEnabled && cacheVideoDir != "" {
|
||||||
|
videoDir := filepath.Clean(cacheVideoDir)
|
||||||
|
r.Static("/data/video", videoDir)
|
||||||
|
}
|
||||||
|
|
||||||
// 注册路由
|
// 注册路由
|
||||||
registerRoutes(r, handlers, jwtAuth, adminAuth, apiKeyAuth, apiKeyService, subscriptionService, opsService, cfg, redisClient)
|
registerRoutes(r, handlers, jwtAuth, adminAuth, apiKeyAuth, apiKeyService, subscriptionService, opsService, cfg, redisClient)
|
||||||
|
|
||||||
|
|||||||
@@ -29,6 +29,9 @@ func RegisterAdminRoutes(
|
|||||||
// 账号管理
|
// 账号管理
|
||||||
registerAccountRoutes(admin, h)
|
registerAccountRoutes(admin, h)
|
||||||
|
|
||||||
|
// Sora 账号扩展
|
||||||
|
registerSoraRoutes(admin, h)
|
||||||
|
|
||||||
// OpenAI OAuth
|
// OpenAI OAuth
|
||||||
registerOpenAIOAuthRoutes(admin, h)
|
registerOpenAIOAuthRoutes(admin, h)
|
||||||
|
|
||||||
@@ -229,6 +232,17 @@ func registerAccountRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func registerSoraRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
|
||||||
|
sora := admin.Group("/sora")
|
||||||
|
{
|
||||||
|
sora.GET("/accounts", h.Admin.SoraAccount.List)
|
||||||
|
sora.GET("/accounts/:id", h.Admin.SoraAccount.Get)
|
||||||
|
sora.PUT("/accounts/:id", h.Admin.SoraAccount.Upsert)
|
||||||
|
sora.POST("/accounts/import", h.Admin.SoraAccount.BatchUpsert)
|
||||||
|
sora.GET("/usage", h.Admin.SoraAccount.ListUsage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func registerOpenAIOAuthRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
|
func registerOpenAIOAuthRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
|
||||||
openai := admin.Group("/openai")
|
openai := admin.Group("/openai")
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ func RegisterGatewayRoutes(
|
|||||||
gateway.POST("/messages", h.Gateway.Messages)
|
gateway.POST("/messages", h.Gateway.Messages)
|
||||||
gateway.POST("/messages/count_tokens", h.Gateway.CountTokens)
|
gateway.POST("/messages/count_tokens", h.Gateway.CountTokens)
|
||||||
gateway.GET("/models", h.Gateway.Models)
|
gateway.GET("/models", h.Gateway.Models)
|
||||||
|
gateway.POST("/chat/completions", h.SoraGateway.ChatCompletions)
|
||||||
gateway.GET("/usage", h.Gateway.Usage)
|
gateway.GET("/usage", h.Gateway.Usage)
|
||||||
// OpenAI Responses API
|
// OpenAI Responses API
|
||||||
gateway.POST("/responses", h.OpenAIGateway.Responses)
|
gateway.POST("/responses", h.OpenAIGateway.Responses)
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ const (
|
|||||||
PlatformOpenAI = "openai"
|
PlatformOpenAI = "openai"
|
||||||
PlatformGemini = "gemini"
|
PlatformGemini = "gemini"
|
||||||
PlatformAntigravity = "antigravity"
|
PlatformAntigravity = "antigravity"
|
||||||
|
PlatformSora = "sora"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Account type constants
|
// Account type constants
|
||||||
@@ -124,6 +125,28 @@ const (
|
|||||||
SettingKeyEnableIdentityPatch = "enable_identity_patch"
|
SettingKeyEnableIdentityPatch = "enable_identity_patch"
|
||||||
SettingKeyIdentityPatchPrompt = "identity_patch_prompt"
|
SettingKeyIdentityPatchPrompt = "identity_patch_prompt"
|
||||||
|
|
||||||
|
// =========================
|
||||||
|
// Sora Settings
|
||||||
|
// =========================
|
||||||
|
|
||||||
|
SettingKeySoraBaseURL = "sora_base_url"
|
||||||
|
SettingKeySoraTimeout = "sora_timeout"
|
||||||
|
SettingKeySoraMaxRetries = "sora_max_retries"
|
||||||
|
SettingKeySoraPollInterval = "sora_poll_interval"
|
||||||
|
SettingKeySoraCallLogicMode = "sora_call_logic_mode"
|
||||||
|
SettingKeySoraCacheEnabled = "sora_cache_enabled"
|
||||||
|
SettingKeySoraCacheBaseDir = "sora_cache_base_dir"
|
||||||
|
SettingKeySoraCacheVideoDir = "sora_cache_video_dir"
|
||||||
|
SettingKeySoraCacheMaxBytes = "sora_cache_max_bytes"
|
||||||
|
SettingKeySoraCacheAllowedHosts = "sora_cache_allowed_hosts"
|
||||||
|
SettingKeySoraCacheUserDirEnabled = "sora_cache_user_dir_enabled"
|
||||||
|
SettingKeySoraWatermarkFreeEnabled = "sora_watermark_free_enabled"
|
||||||
|
SettingKeySoraWatermarkFreeParseMethod = "sora_watermark_free_parse_method"
|
||||||
|
SettingKeySoraWatermarkFreeCustomParseURL = "sora_watermark_free_custom_parse_url"
|
||||||
|
SettingKeySoraWatermarkFreeCustomParseToken = "sora_watermark_free_custom_parse_token"
|
||||||
|
SettingKeySoraWatermarkFreeFallbackOnFailure = "sora_watermark_free_fallback_on_failure"
|
||||||
|
SettingKeySoraTokenRefreshEnabled = "sora_token_refresh_enabled"
|
||||||
|
|
||||||
// =========================
|
// =========================
|
||||||
// Ops Monitoring (vNext)
|
// Ops Monitoring (vNext)
|
||||||
// =========================
|
// =========================
|
||||||
|
|||||||
@@ -378,7 +378,7 @@ func (s *SchedulerSnapshotService) rebuildByGroupIDs(ctx context.Context, groupI
|
|||||||
if len(groupIDs) == 0 {
|
if len(groupIDs) == 0 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
platforms := []string{PlatformAnthropic, PlatformGemini, PlatformOpenAI, PlatformAntigravity}
|
platforms := []string{PlatformAnthropic, PlatformGemini, PlatformOpenAI, PlatformSora, PlatformAntigravity}
|
||||||
var firstErr error
|
var firstErr error
|
||||||
for _, platform := range platforms {
|
for _, platform := range platforms {
|
||||||
if err := s.rebuildBucketsForPlatform(ctx, platform, groupIDs, reason); err != nil && firstErr == nil {
|
if err := s.rebuildBucketsForPlatform(ctx, platform, groupIDs, reason); err != nil && firstErr == nil {
|
||||||
@@ -661,7 +661,7 @@ func (s *SchedulerSnapshotService) fullRebuildInterval() time.Duration {
|
|||||||
|
|
||||||
func (s *SchedulerSnapshotService) defaultBuckets(ctx context.Context) ([]SchedulerBucket, error) {
|
func (s *SchedulerSnapshotService) defaultBuckets(ctx context.Context) ([]SchedulerBucket, error) {
|
||||||
buckets := make([]SchedulerBucket, 0)
|
buckets := make([]SchedulerBucket, 0)
|
||||||
platforms := []string{PlatformAnthropic, PlatformGemini, PlatformOpenAI, PlatformAntigravity}
|
platforms := []string{PlatformAnthropic, PlatformGemini, PlatformOpenAI, PlatformSora, PlatformAntigravity}
|
||||||
for _, platform := range platforms {
|
for _, platform := range platforms {
|
||||||
buckets = append(buckets, SchedulerBucket{GroupID: 0, Platform: platform, Mode: SchedulerModeSingle})
|
buckets = append(buckets, SchedulerBucket{GroupID: 0, Platform: platform, Mode: SchedulerModeSingle})
|
||||||
buckets = append(buckets, SchedulerBucket{GroupID: 0, Platform: platform, Mode: SchedulerModeForced})
|
buckets = append(buckets, SchedulerBucket{GroupID: 0, Platform: platform, Mode: SchedulerModeForced})
|
||||||
|
|||||||
@@ -219,6 +219,29 @@ func (s *SettingService) UpdateSettings(ctx context.Context, settings *SystemSet
|
|||||||
updates[SettingKeyEnableIdentityPatch] = strconv.FormatBool(settings.EnableIdentityPatch)
|
updates[SettingKeyEnableIdentityPatch] = strconv.FormatBool(settings.EnableIdentityPatch)
|
||||||
updates[SettingKeyIdentityPatchPrompt] = settings.IdentityPatchPrompt
|
updates[SettingKeyIdentityPatchPrompt] = settings.IdentityPatchPrompt
|
||||||
|
|
||||||
|
// Sora settings
|
||||||
|
updates[SettingKeySoraBaseURL] = strings.TrimSpace(settings.SoraBaseURL)
|
||||||
|
updates[SettingKeySoraTimeout] = strconv.Itoa(settings.SoraTimeout)
|
||||||
|
updates[SettingKeySoraMaxRetries] = strconv.Itoa(settings.SoraMaxRetries)
|
||||||
|
updates[SettingKeySoraPollInterval] = strconv.FormatFloat(settings.SoraPollInterval, 'f', -1, 64)
|
||||||
|
updates[SettingKeySoraCallLogicMode] = settings.SoraCallLogicMode
|
||||||
|
updates[SettingKeySoraCacheEnabled] = strconv.FormatBool(settings.SoraCacheEnabled)
|
||||||
|
updates[SettingKeySoraCacheBaseDir] = settings.SoraCacheBaseDir
|
||||||
|
updates[SettingKeySoraCacheVideoDir] = settings.SoraCacheVideoDir
|
||||||
|
updates[SettingKeySoraCacheMaxBytes] = strconv.FormatInt(settings.SoraCacheMaxBytes, 10)
|
||||||
|
allowedHostsRaw, err := marshalStringSliceSetting(settings.SoraCacheAllowedHosts)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("marshal sora cache allowed hosts: %w", err)
|
||||||
|
}
|
||||||
|
updates[SettingKeySoraCacheAllowedHosts] = allowedHostsRaw
|
||||||
|
updates[SettingKeySoraCacheUserDirEnabled] = strconv.FormatBool(settings.SoraCacheUserDirEnabled)
|
||||||
|
updates[SettingKeySoraWatermarkFreeEnabled] = strconv.FormatBool(settings.SoraWatermarkFreeEnabled)
|
||||||
|
updates[SettingKeySoraWatermarkFreeParseMethod] = settings.SoraWatermarkFreeParseMethod
|
||||||
|
updates[SettingKeySoraWatermarkFreeCustomParseURL] = strings.TrimSpace(settings.SoraWatermarkFreeCustomParseURL)
|
||||||
|
updates[SettingKeySoraWatermarkFreeCustomParseToken] = settings.SoraWatermarkFreeCustomParseToken
|
||||||
|
updates[SettingKeySoraWatermarkFreeFallbackOnFailure] = strconv.FormatBool(settings.SoraWatermarkFreeFallbackOnFailure)
|
||||||
|
updates[SettingKeySoraTokenRefreshEnabled] = strconv.FormatBool(settings.SoraTokenRefreshEnabled)
|
||||||
|
|
||||||
// Ops monitoring (vNext)
|
// Ops monitoring (vNext)
|
||||||
updates[SettingKeyOpsMonitoringEnabled] = strconv.FormatBool(settings.OpsMonitoringEnabled)
|
updates[SettingKeyOpsMonitoringEnabled] = strconv.FormatBool(settings.OpsMonitoringEnabled)
|
||||||
updates[SettingKeyOpsRealtimeMonitoringEnabled] = strconv.FormatBool(settings.OpsRealtimeMonitoringEnabled)
|
updates[SettingKeyOpsRealtimeMonitoringEnabled] = strconv.FormatBool(settings.OpsRealtimeMonitoringEnabled)
|
||||||
@@ -227,7 +250,7 @@ func (s *SettingService) UpdateSettings(ctx context.Context, settings *SystemSet
|
|||||||
updates[SettingKeyOpsMetricsIntervalSeconds] = strconv.Itoa(settings.OpsMetricsIntervalSeconds)
|
updates[SettingKeyOpsMetricsIntervalSeconds] = strconv.Itoa(settings.OpsMetricsIntervalSeconds)
|
||||||
}
|
}
|
||||||
|
|
||||||
err := s.settingRepo.SetMultiple(ctx, updates)
|
err = s.settingRepo.SetMultiple(ctx, updates)
|
||||||
if err == nil && s.onUpdate != nil {
|
if err == nil && s.onUpdate != nil {
|
||||||
s.onUpdate() // Invalidate cache after settings update
|
s.onUpdate() // Invalidate cache after settings update
|
||||||
}
|
}
|
||||||
@@ -295,6 +318,41 @@ func (s *SettingService) GetDefaultBalance(ctx context.Context) float64 {
|
|||||||
return s.cfg.Default.UserBalance
|
return s.cfg.Default.UserBalance
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetSoraConfig 获取 Sora 配置(优先读取 DB 设置,回退 config.yaml)
|
||||||
|
func (s *SettingService) GetSoraConfig(ctx context.Context) config.SoraConfig {
|
||||||
|
base := config.SoraConfig{}
|
||||||
|
if s.cfg != nil {
|
||||||
|
base = s.cfg.Sora
|
||||||
|
}
|
||||||
|
if s.settingRepo == nil {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
keys := []string{
|
||||||
|
SettingKeySoraBaseURL,
|
||||||
|
SettingKeySoraTimeout,
|
||||||
|
SettingKeySoraMaxRetries,
|
||||||
|
SettingKeySoraPollInterval,
|
||||||
|
SettingKeySoraCallLogicMode,
|
||||||
|
SettingKeySoraCacheEnabled,
|
||||||
|
SettingKeySoraCacheBaseDir,
|
||||||
|
SettingKeySoraCacheVideoDir,
|
||||||
|
SettingKeySoraCacheMaxBytes,
|
||||||
|
SettingKeySoraCacheAllowedHosts,
|
||||||
|
SettingKeySoraCacheUserDirEnabled,
|
||||||
|
SettingKeySoraWatermarkFreeEnabled,
|
||||||
|
SettingKeySoraWatermarkFreeParseMethod,
|
||||||
|
SettingKeySoraWatermarkFreeCustomParseURL,
|
||||||
|
SettingKeySoraWatermarkFreeCustomParseToken,
|
||||||
|
SettingKeySoraWatermarkFreeFallbackOnFailure,
|
||||||
|
SettingKeySoraTokenRefreshEnabled,
|
||||||
|
}
|
||||||
|
values, err := s.settingRepo.GetMultiple(ctx, keys)
|
||||||
|
if err != nil {
|
||||||
|
return base
|
||||||
|
}
|
||||||
|
return mergeSoraConfig(base, values)
|
||||||
|
}
|
||||||
|
|
||||||
// InitializeDefaultSettings 初始化默认设置
|
// InitializeDefaultSettings 初始化默认设置
|
||||||
func (s *SettingService) InitializeDefaultSettings(ctx context.Context) error {
|
func (s *SettingService) InitializeDefaultSettings(ctx context.Context) error {
|
||||||
// 检查是否已有设置
|
// 检查是否已有设置
|
||||||
@@ -308,6 +366,12 @@ func (s *SettingService) InitializeDefaultSettings(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 初始化默认设置
|
// 初始化默认设置
|
||||||
|
soraCfg := config.SoraConfig{}
|
||||||
|
if s.cfg != nil {
|
||||||
|
soraCfg = s.cfg.Sora
|
||||||
|
}
|
||||||
|
allowedHostsRaw, _ := marshalStringSliceSetting(soraCfg.Cache.AllowedHosts)
|
||||||
|
|
||||||
defaults := map[string]string{
|
defaults := map[string]string{
|
||||||
SettingKeyRegistrationEnabled: "true",
|
SettingKeyRegistrationEnabled: "true",
|
||||||
SettingKeyEmailVerifyEnabled: "false",
|
SettingKeyEmailVerifyEnabled: "false",
|
||||||
@@ -328,6 +392,25 @@ func (s *SettingService) InitializeDefaultSettings(ctx context.Context) error {
|
|||||||
SettingKeyEnableIdentityPatch: "true",
|
SettingKeyEnableIdentityPatch: "true",
|
||||||
SettingKeyIdentityPatchPrompt: "",
|
SettingKeyIdentityPatchPrompt: "",
|
||||||
|
|
||||||
|
// Sora defaults
|
||||||
|
SettingKeySoraBaseURL: soraCfg.BaseURL,
|
||||||
|
SettingKeySoraTimeout: strconv.Itoa(soraCfg.Timeout),
|
||||||
|
SettingKeySoraMaxRetries: strconv.Itoa(soraCfg.MaxRetries),
|
||||||
|
SettingKeySoraPollInterval: strconv.FormatFloat(soraCfg.PollInterval, 'f', -1, 64),
|
||||||
|
SettingKeySoraCallLogicMode: soraCfg.CallLogicMode,
|
||||||
|
SettingKeySoraCacheEnabled: strconv.FormatBool(soraCfg.Cache.Enabled),
|
||||||
|
SettingKeySoraCacheBaseDir: soraCfg.Cache.BaseDir,
|
||||||
|
SettingKeySoraCacheVideoDir: soraCfg.Cache.VideoDir,
|
||||||
|
SettingKeySoraCacheMaxBytes: strconv.FormatInt(soraCfg.Cache.MaxBytes, 10),
|
||||||
|
SettingKeySoraCacheAllowedHosts: allowedHostsRaw,
|
||||||
|
SettingKeySoraCacheUserDirEnabled: strconv.FormatBool(soraCfg.Cache.UserDirEnabled),
|
||||||
|
SettingKeySoraWatermarkFreeEnabled: strconv.FormatBool(soraCfg.WatermarkFree.Enabled),
|
||||||
|
SettingKeySoraWatermarkFreeParseMethod: soraCfg.WatermarkFree.ParseMethod,
|
||||||
|
SettingKeySoraWatermarkFreeCustomParseURL: soraCfg.WatermarkFree.CustomParseURL,
|
||||||
|
SettingKeySoraWatermarkFreeCustomParseToken: soraCfg.WatermarkFree.CustomParseToken,
|
||||||
|
SettingKeySoraWatermarkFreeFallbackOnFailure: strconv.FormatBool(soraCfg.WatermarkFree.FallbackOnFailure),
|
||||||
|
SettingKeySoraTokenRefreshEnabled: strconv.FormatBool(soraCfg.TokenRefresh.Enabled),
|
||||||
|
|
||||||
// Ops monitoring defaults (vNext)
|
// Ops monitoring defaults (vNext)
|
||||||
SettingKeyOpsMonitoringEnabled: "true",
|
SettingKeyOpsMonitoringEnabled: "true",
|
||||||
SettingKeyOpsRealtimeMonitoringEnabled: "true",
|
SettingKeyOpsRealtimeMonitoringEnabled: "true",
|
||||||
@@ -434,6 +517,26 @@ func (s *SettingService) parseSettings(settings map[string]string) *SystemSettin
|
|||||||
}
|
}
|
||||||
result.IdentityPatchPrompt = settings[SettingKeyIdentityPatchPrompt]
|
result.IdentityPatchPrompt = settings[SettingKeyIdentityPatchPrompt]
|
||||||
|
|
||||||
|
// Sora settings
|
||||||
|
soraCfg := s.parseSoraConfig(settings)
|
||||||
|
result.SoraBaseURL = soraCfg.BaseURL
|
||||||
|
result.SoraTimeout = soraCfg.Timeout
|
||||||
|
result.SoraMaxRetries = soraCfg.MaxRetries
|
||||||
|
result.SoraPollInterval = soraCfg.PollInterval
|
||||||
|
result.SoraCallLogicMode = soraCfg.CallLogicMode
|
||||||
|
result.SoraCacheEnabled = soraCfg.Cache.Enabled
|
||||||
|
result.SoraCacheBaseDir = soraCfg.Cache.BaseDir
|
||||||
|
result.SoraCacheVideoDir = soraCfg.Cache.VideoDir
|
||||||
|
result.SoraCacheMaxBytes = soraCfg.Cache.MaxBytes
|
||||||
|
result.SoraCacheAllowedHosts = soraCfg.Cache.AllowedHosts
|
||||||
|
result.SoraCacheUserDirEnabled = soraCfg.Cache.UserDirEnabled
|
||||||
|
result.SoraWatermarkFreeEnabled = soraCfg.WatermarkFree.Enabled
|
||||||
|
result.SoraWatermarkFreeParseMethod = soraCfg.WatermarkFree.ParseMethod
|
||||||
|
result.SoraWatermarkFreeCustomParseURL = soraCfg.WatermarkFree.CustomParseURL
|
||||||
|
result.SoraWatermarkFreeCustomParseToken = soraCfg.WatermarkFree.CustomParseToken
|
||||||
|
result.SoraWatermarkFreeFallbackOnFailure = soraCfg.WatermarkFree.FallbackOnFailure
|
||||||
|
result.SoraTokenRefreshEnabled = soraCfg.TokenRefresh.Enabled
|
||||||
|
|
||||||
// Ops monitoring settings (default: enabled, fail-open)
|
// Ops monitoring settings (default: enabled, fail-open)
|
||||||
result.OpsMonitoringEnabled = !isFalseSettingValue(settings[SettingKeyOpsMonitoringEnabled])
|
result.OpsMonitoringEnabled = !isFalseSettingValue(settings[SettingKeyOpsMonitoringEnabled])
|
||||||
result.OpsRealtimeMonitoringEnabled = !isFalseSettingValue(settings[SettingKeyOpsRealtimeMonitoringEnabled])
|
result.OpsRealtimeMonitoringEnabled = !isFalseSettingValue(settings[SettingKeyOpsRealtimeMonitoringEnabled])
|
||||||
@@ -471,6 +574,131 @@ func (s *SettingService) getStringOrDefault(settings map[string]string, key, def
|
|||||||
return defaultValue
|
return defaultValue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *SettingService) parseSoraConfig(settings map[string]string) config.SoraConfig {
|
||||||
|
base := config.SoraConfig{}
|
||||||
|
if s.cfg != nil {
|
||||||
|
base = s.cfg.Sora
|
||||||
|
}
|
||||||
|
return mergeSoraConfig(base, settings)
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeSoraConfig(base config.SoraConfig, settings map[string]string) config.SoraConfig {
|
||||||
|
cfg := base
|
||||||
|
if settings == nil {
|
||||||
|
return cfg
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraBaseURL]; ok {
|
||||||
|
if trimmed := strings.TrimSpace(raw); trimmed != "" {
|
||||||
|
cfg.BaseURL = trimmed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraTimeout]; ok {
|
||||||
|
if v, err := strconv.Atoi(strings.TrimSpace(raw)); err == nil && v > 0 {
|
||||||
|
cfg.Timeout = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraMaxRetries]; ok {
|
||||||
|
if v, err := strconv.Atoi(strings.TrimSpace(raw)); err == nil && v >= 0 {
|
||||||
|
cfg.MaxRetries = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraPollInterval]; ok {
|
||||||
|
if v, err := strconv.ParseFloat(strings.TrimSpace(raw), 64); err == nil && v > 0 {
|
||||||
|
cfg.PollInterval = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraCallLogicMode]; ok && strings.TrimSpace(raw) != "" {
|
||||||
|
cfg.CallLogicMode = strings.TrimSpace(raw)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraCacheEnabled]; ok {
|
||||||
|
cfg.Cache.Enabled = parseBoolSetting(raw, cfg.Cache.Enabled)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraCacheBaseDir]; ok && strings.TrimSpace(raw) != "" {
|
||||||
|
cfg.Cache.BaseDir = strings.TrimSpace(raw)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraCacheVideoDir]; ok && strings.TrimSpace(raw) != "" {
|
||||||
|
cfg.Cache.VideoDir = strings.TrimSpace(raw)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraCacheMaxBytes]; ok {
|
||||||
|
if v, err := strconv.ParseInt(strings.TrimSpace(raw), 10, 64); err == nil && v >= 0 {
|
||||||
|
cfg.Cache.MaxBytes = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraCacheAllowedHosts]; ok {
|
||||||
|
cfg.Cache.AllowedHosts = parseStringSliceSetting(raw)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraCacheUserDirEnabled]; ok {
|
||||||
|
cfg.Cache.UserDirEnabled = parseBoolSetting(raw, cfg.Cache.UserDirEnabled)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraWatermarkFreeEnabled]; ok {
|
||||||
|
cfg.WatermarkFree.Enabled = parseBoolSetting(raw, cfg.WatermarkFree.Enabled)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraWatermarkFreeParseMethod]; ok && strings.TrimSpace(raw) != "" {
|
||||||
|
cfg.WatermarkFree.ParseMethod = strings.TrimSpace(raw)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraWatermarkFreeCustomParseURL]; ok && strings.TrimSpace(raw) != "" {
|
||||||
|
cfg.WatermarkFree.CustomParseURL = strings.TrimSpace(raw)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraWatermarkFreeCustomParseToken]; ok {
|
||||||
|
cfg.WatermarkFree.CustomParseToken = raw
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraWatermarkFreeFallbackOnFailure]; ok {
|
||||||
|
cfg.WatermarkFree.FallbackOnFailure = parseBoolSetting(raw, cfg.WatermarkFree.FallbackOnFailure)
|
||||||
|
}
|
||||||
|
if raw, ok := settings[SettingKeySoraTokenRefreshEnabled]; ok {
|
||||||
|
cfg.TokenRefresh.Enabled = parseBoolSetting(raw, cfg.TokenRefresh.Enabled)
|
||||||
|
}
|
||||||
|
return cfg
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseBoolSetting(raw string, fallback bool) bool {
|
||||||
|
trimmed := strings.TrimSpace(raw)
|
||||||
|
if trimmed == "" {
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
if v, err := strconv.ParseBool(trimmed); err == nil {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseStringSliceSetting(raw string) []string {
|
||||||
|
trimmed := strings.TrimSpace(raw)
|
||||||
|
if trimmed == "" {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
var values []string
|
||||||
|
if err := json.Unmarshal([]byte(trimmed), &values); err == nil {
|
||||||
|
return normalizeStringSlice(values)
|
||||||
|
}
|
||||||
|
parts := strings.FieldsFunc(trimmed, func(r rune) bool {
|
||||||
|
return r == ',' || r == '\n' || r == ';'
|
||||||
|
})
|
||||||
|
return normalizeStringSlice(parts)
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalStringSliceSetting(values []string) (string, error) {
|
||||||
|
normalized := normalizeStringSlice(values)
|
||||||
|
data, err := json.Marshal(normalized)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return string(data), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeStringSlice(values []string) []string {
|
||||||
|
if len(values) == 0 {
|
||||||
|
return []string{}
|
||||||
|
}
|
||||||
|
normalized := make([]string, 0, len(values))
|
||||||
|
for _, value := range values {
|
||||||
|
if trimmed := strings.TrimSpace(value); trimmed != "" {
|
||||||
|
normalized = append(normalized, trimmed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
// IsTurnstileEnabled 检查是否启用 Turnstile 验证
|
// IsTurnstileEnabled 检查是否启用 Turnstile 验证
|
||||||
func (s *SettingService) IsTurnstileEnabled(ctx context.Context) bool {
|
func (s *SettingService) IsTurnstileEnabled(ctx context.Context) bool {
|
||||||
value, err := s.settingRepo.GetValue(ctx, SettingKeyTurnstileEnabled)
|
value, err := s.settingRepo.GetValue(ctx, SettingKeyTurnstileEnabled)
|
||||||
|
|||||||
@@ -49,6 +49,25 @@ type SystemSettings struct {
|
|||||||
EnableIdentityPatch bool `json:"enable_identity_patch"`
|
EnableIdentityPatch bool `json:"enable_identity_patch"`
|
||||||
IdentityPatchPrompt string `json:"identity_patch_prompt"`
|
IdentityPatchPrompt string `json:"identity_patch_prompt"`
|
||||||
|
|
||||||
|
// Sora configuration
|
||||||
|
SoraBaseURL string
|
||||||
|
SoraTimeout int
|
||||||
|
SoraMaxRetries int
|
||||||
|
SoraPollInterval float64
|
||||||
|
SoraCallLogicMode string
|
||||||
|
SoraCacheEnabled bool
|
||||||
|
SoraCacheBaseDir string
|
||||||
|
SoraCacheVideoDir string
|
||||||
|
SoraCacheMaxBytes int64
|
||||||
|
SoraCacheAllowedHosts []string
|
||||||
|
SoraCacheUserDirEnabled bool
|
||||||
|
SoraWatermarkFreeEnabled bool
|
||||||
|
SoraWatermarkFreeParseMethod string
|
||||||
|
SoraWatermarkFreeCustomParseURL string
|
||||||
|
SoraWatermarkFreeCustomParseToken string
|
||||||
|
SoraWatermarkFreeFallbackOnFailure bool
|
||||||
|
SoraTokenRefreshEnabled bool
|
||||||
|
|
||||||
// Ops monitoring (vNext)
|
// Ops monitoring (vNext)
|
||||||
OpsMonitoringEnabled bool
|
OpsMonitoringEnabled bool
|
||||||
OpsRealtimeMonitoringEnabled bool
|
OpsRealtimeMonitoringEnabled bool
|
||||||
|
|||||||
156
backend/internal/service/sora_cache_cleanup_service.go
Normal file
156
backend/internal/service/sora_cache_cleanup_service.go
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
package service
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
soraCacheCleanupInterval = time.Hour
|
||||||
|
soraCacheCleanupBatch = 200
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraCacheCleanupService 负责清理 Sora 视频缓存文件。
|
||||||
|
type SoraCacheCleanupService struct {
|
||||||
|
cacheRepo SoraCacheFileRepository
|
||||||
|
settingService *SettingService
|
||||||
|
cfg *config.Config
|
||||||
|
stopCh chan struct{}
|
||||||
|
stopOnce sync.Once
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSoraCacheCleanupService(cacheRepo SoraCacheFileRepository, settingService *SettingService, cfg *config.Config) *SoraCacheCleanupService {
|
||||||
|
return &SoraCacheCleanupService{
|
||||||
|
cacheRepo: cacheRepo,
|
||||||
|
settingService: settingService,
|
||||||
|
cfg: cfg,
|
||||||
|
stopCh: make(chan struct{}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheCleanupService) Start() {
|
||||||
|
if s == nil || s.cacheRepo == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
go s.cleanupLoop()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheCleanupService) Stop() {
|
||||||
|
if s == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s.stopOnce.Do(func() {
|
||||||
|
close(s.stopCh)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheCleanupService) cleanupLoop() {
|
||||||
|
ticker := time.NewTicker(soraCacheCleanupInterval)
|
||||||
|
defer ticker.Stop()
|
||||||
|
|
||||||
|
s.cleanupOnce()
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ticker.C:
|
||||||
|
s.cleanupOnce()
|
||||||
|
case <-s.stopCh:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheCleanupService) cleanupOnce() {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Minute)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if s.cacheRepo == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg := s.getSoraConfig(ctx)
|
||||||
|
videoDir := strings.TrimSpace(cfg.Cache.VideoDir)
|
||||||
|
if videoDir == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
maxBytes := cfg.Cache.MaxBytes
|
||||||
|
if maxBytes <= 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
size, err := dirSize(videoDir)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[SoraCacheCleanup] 计算目录大小失败: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if size <= maxBytes {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for size > maxBytes {
|
||||||
|
entries, err := s.cacheRepo.ListOldest(ctx, soraCacheCleanupBatch)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[SoraCacheCleanup] 读取缓存记录失败: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(entries) == 0 {
|
||||||
|
log.Printf("[SoraCacheCleanup] 无缓存记录但目录仍超限: size=%d max=%d", size, maxBytes)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ids := make([]int64, 0, len(entries))
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
removedSize := entry.SizeBytes
|
||||||
|
if entry.CachePath != "" {
|
||||||
|
if info, err := os.Stat(entry.CachePath); err == nil {
|
||||||
|
if removedSize <= 0 {
|
||||||
|
removedSize = info.Size()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := os.Remove(entry.CachePath); err != nil && !os.IsNotExist(err) {
|
||||||
|
log.Printf("[SoraCacheCleanup] 删除缓存文件失败: path=%s err=%v", entry.CachePath, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.ID > 0 {
|
||||||
|
ids = append(ids, entry.ID)
|
||||||
|
}
|
||||||
|
if removedSize > 0 {
|
||||||
|
size -= removedSize
|
||||||
|
if size < 0 {
|
||||||
|
size = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(ids) > 0 {
|
||||||
|
if err := s.cacheRepo.DeleteByIDs(ctx, ids); err != nil {
|
||||||
|
log.Printf("[SoraCacheCleanup] 删除缓存记录失败: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if size > maxBytes {
|
||||||
|
if refreshed, err := dirSize(videoDir); err == nil {
|
||||||
|
size = refreshed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheCleanupService) getSoraConfig(ctx context.Context) config.SoraConfig {
|
||||||
|
if s.settingService != nil {
|
||||||
|
return s.settingService.GetSoraConfig(ctx)
|
||||||
|
}
|
||||||
|
if s.cfg != nil {
|
||||||
|
return s.cfg.Sora
|
||||||
|
}
|
||||||
|
return config.SoraConfig{}
|
||||||
|
}
|
||||||
246
backend/internal/service/sora_cache_service.go
Normal file
246
backend/internal/service/sora_cache_service.go
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
package service
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/config"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/uuidv7"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/util/urlvalidator"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraCacheService 提供 Sora 视频缓存能力。
|
||||||
|
type SoraCacheService struct {
|
||||||
|
cfg *config.Config
|
||||||
|
cacheRepo SoraCacheFileRepository
|
||||||
|
settingService *SettingService
|
||||||
|
accountRepo AccountRepository
|
||||||
|
httpUpstream HTTPUpstream
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraCacheService 创建 SoraCacheService。
|
||||||
|
func NewSoraCacheService(cfg *config.Config, cacheRepo SoraCacheFileRepository, settingService *SettingService, accountRepo AccountRepository, httpUpstream HTTPUpstream) *SoraCacheService {
|
||||||
|
return &SoraCacheService{
|
||||||
|
cfg: cfg,
|
||||||
|
cacheRepo: cacheRepo,
|
||||||
|
settingService: settingService,
|
||||||
|
accountRepo: accountRepo,
|
||||||
|
httpUpstream: httpUpstream,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheService) CacheVideo(ctx context.Context, accountID, userID int64, taskID, mediaURL string) (*SoraCacheFile, error) {
|
||||||
|
cfg := s.getSoraConfig(ctx)
|
||||||
|
if !cfg.Cache.Enabled {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
trimmed := strings.TrimSpace(mediaURL)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
allowedHosts := cfg.Cache.AllowedHosts
|
||||||
|
useAllowlist := true
|
||||||
|
if len(allowedHosts) == 0 {
|
||||||
|
if s.cfg != nil {
|
||||||
|
allowedHosts = s.cfg.Security.URLAllowlist.UpstreamHosts
|
||||||
|
useAllowlist = s.cfg.Security.URLAllowlist.Enabled
|
||||||
|
} else {
|
||||||
|
useAllowlist = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if useAllowlist {
|
||||||
|
if _, err := urlvalidator.ValidateHTTPSURL(trimmed, urlvalidator.ValidationOptions{
|
||||||
|
AllowedHosts: allowedHosts,
|
||||||
|
RequireAllowlist: true,
|
||||||
|
AllowPrivate: s.cfg != nil && s.cfg.Security.URLAllowlist.AllowPrivateHosts,
|
||||||
|
}); err != nil {
|
||||||
|
return nil, fmt.Errorf("缓存下载地址不合法: %w", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
allowInsecure := false
|
||||||
|
if s.cfg != nil {
|
||||||
|
allowInsecure = s.cfg.Security.URLAllowlist.AllowInsecureHTTP
|
||||||
|
}
|
||||||
|
if _, err := urlvalidator.ValidateURLFormat(trimmed, allowInsecure); err != nil {
|
||||||
|
return nil, fmt.Errorf("缓存下载地址不合法: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
videoDir := strings.TrimSpace(cfg.Cache.VideoDir)
|
||||||
|
if videoDir == "" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if cfg.Cache.MaxBytes > 0 {
|
||||||
|
size, err := dirSize(videoDir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if size >= cfg.Cache.MaxBytes {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
relativeDir := ""
|
||||||
|
if cfg.Cache.UserDirEnabled && userID > 0 {
|
||||||
|
relativeDir = fmt.Sprintf("u_%d", userID)
|
||||||
|
}
|
||||||
|
|
||||||
|
targetDir := filepath.Join(videoDir, relativeDir)
|
||||||
|
if err := os.MkdirAll(targetDir, 0o755); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
uuid, err := uuidv7.New()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
name := deriveFileName(trimmed)
|
||||||
|
if name == "" {
|
||||||
|
name = "video.mp4"
|
||||||
|
}
|
||||||
|
name = sanitizeFileName(name)
|
||||||
|
filename := uuid + "_" + name
|
||||||
|
cachePath := filepath.Join(targetDir, filename)
|
||||||
|
|
||||||
|
resp, err := s.downloadMedia(ctx, accountID, trimmed, time.Duration(cfg.Timeout)*time.Second)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return nil, fmt.Errorf("缓存下载失败: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
out, err := os.Create(cachePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer out.Close()
|
||||||
|
|
||||||
|
written, err := io.Copy(out, resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
cacheURL := buildCacheURL(relativeDir, filename)
|
||||||
|
|
||||||
|
record := &SoraCacheFile{
|
||||||
|
TaskID: taskID,
|
||||||
|
AccountID: accountID,
|
||||||
|
UserID: userID,
|
||||||
|
MediaType: "video",
|
||||||
|
OriginalURL: trimmed,
|
||||||
|
CachePath: cachePath,
|
||||||
|
CacheURL: cacheURL,
|
||||||
|
SizeBytes: written,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
}
|
||||||
|
if s.cacheRepo != nil {
|
||||||
|
if err := s.cacheRepo.Create(ctx, record); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return record, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildCacheURL(relativeDir, filename string) string {
|
||||||
|
base := "/data/video"
|
||||||
|
if relativeDir != "" {
|
||||||
|
return path.Join(base, relativeDir, filename)
|
||||||
|
}
|
||||||
|
return path.Join(base, filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheService) getSoraConfig(ctx context.Context) config.SoraConfig {
|
||||||
|
if s.settingService != nil {
|
||||||
|
return s.settingService.GetSoraConfig(ctx)
|
||||||
|
}
|
||||||
|
if s.cfg != nil {
|
||||||
|
return s.cfg.Sora
|
||||||
|
}
|
||||||
|
return config.SoraConfig{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraCacheService) downloadMedia(ctx context.Context, accountID int64, mediaURL string, timeout time.Duration) (*http.Response, error) {
|
||||||
|
if timeout <= 0 {
|
||||||
|
timeout = 120 * time.Second
|
||||||
|
}
|
||||||
|
ctx, cancel := context.WithTimeout(ctx, timeout)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", mediaURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36")
|
||||||
|
|
||||||
|
if s.httpUpstream == nil {
|
||||||
|
client := &http.Client{Timeout: timeout}
|
||||||
|
return client.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
var accountConcurrency int
|
||||||
|
proxyURL := ""
|
||||||
|
if s.accountRepo != nil && accountID > 0 {
|
||||||
|
account, err := s.accountRepo.GetByID(ctx, accountID)
|
||||||
|
if err == nil && account != nil {
|
||||||
|
accountConcurrency = account.Concurrency
|
||||||
|
if account.Proxy != nil {
|
||||||
|
proxyURL = account.Proxy.URL()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
enableTLS := false
|
||||||
|
if s.cfg != nil {
|
||||||
|
enableTLS = s.cfg.Gateway.TLSFingerprint.Enabled
|
||||||
|
}
|
||||||
|
return s.httpUpstream.DoWithTLS(req, proxyURL, accountID, accountConcurrency, enableTLS)
|
||||||
|
}
|
||||||
|
|
||||||
|
func deriveFileName(rawURL string) string {
|
||||||
|
parsed, err := url.Parse(rawURL)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
name := path.Base(parsed.Path)
|
||||||
|
if name == "/" || name == "." {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return name
|
||||||
|
}
|
||||||
|
|
||||||
|
func sanitizeFileName(name string) string {
|
||||||
|
name = strings.TrimSpace(name)
|
||||||
|
if name == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
sanitized := strings.Map(func(r rune) rune {
|
||||||
|
switch {
|
||||||
|
case r >= 'a' && r <= 'z':
|
||||||
|
return r
|
||||||
|
case r >= 'A' && r <= 'Z':
|
||||||
|
return r
|
||||||
|
case r >= '0' && r <= '9':
|
||||||
|
return r
|
||||||
|
case r == '-' || r == '_' || r == '.':
|
||||||
|
return r
|
||||||
|
case r == ' ': // 空格替换为下划线
|
||||||
|
return '_'
|
||||||
|
default:
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
}, name)
|
||||||
|
return strings.TrimLeft(sanitized, ".")
|
||||||
|
}
|
||||||
28
backend/internal/service/sora_cache_utils.go
Normal file
28
backend/internal/service/sora_cache_utils.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
package service
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
func dirSize(root string) (int64, error) {
|
||||||
|
var size int64
|
||||||
|
err := filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if d.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
info, err := d.Info()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
size += info.Size()
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil && os.IsNotExist(err) {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
return size, err
|
||||||
|
}
|
||||||
853
backend/internal/service/sora_gateway_service.go
Normal file
853
backend/internal/service/sora_gateway_service.go
Normal file
@@ -0,0 +1,853 @@
|
|||||||
|
package service
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math/rand"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/config"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/sora"
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/util/urlvalidator"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
soraErrorDisableThreshold = 5
|
||||||
|
maxImageDownloadSize = 20 * 1024 * 1024 // 20MB
|
||||||
|
maxVideoDownloadSize = 200 * 1024 * 1024 // 200MB
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrSoraAccountMissingToken = errors.New("sora account missing access token")
|
||||||
|
ErrSoraAccountNotEligible = errors.New("sora account not eligible")
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraGenerationRequest 表示 Sora 生成请求。
|
||||||
|
type SoraGenerationRequest struct {
|
||||||
|
Model string
|
||||||
|
Prompt string
|
||||||
|
Image string
|
||||||
|
Video string
|
||||||
|
RemixTargetID string
|
||||||
|
Stream bool
|
||||||
|
UserID int64
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraGenerationResult 表示 Sora 生成结果。
|
||||||
|
type SoraGenerationResult struct {
|
||||||
|
Content string
|
||||||
|
MediaType string
|
||||||
|
ResultURLs []string
|
||||||
|
TaskID string
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraGatewayService 处理 Sora 生成流程。
|
||||||
|
type SoraGatewayService struct {
|
||||||
|
accountRepo AccountRepository
|
||||||
|
soraAccountRepo SoraAccountRepository
|
||||||
|
usageRepo SoraUsageStatRepository
|
||||||
|
taskRepo SoraTaskRepository
|
||||||
|
cacheService *SoraCacheService
|
||||||
|
settingService *SettingService
|
||||||
|
concurrency *ConcurrencyService
|
||||||
|
cfg *config.Config
|
||||||
|
httpUpstream HTTPUpstream
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSoraGatewayService 创建 SoraGatewayService。
|
||||||
|
func NewSoraGatewayService(
|
||||||
|
accountRepo AccountRepository,
|
||||||
|
soraAccountRepo SoraAccountRepository,
|
||||||
|
usageRepo SoraUsageStatRepository,
|
||||||
|
taskRepo SoraTaskRepository,
|
||||||
|
cacheService *SoraCacheService,
|
||||||
|
settingService *SettingService,
|
||||||
|
concurrencyService *ConcurrencyService,
|
||||||
|
cfg *config.Config,
|
||||||
|
httpUpstream HTTPUpstream,
|
||||||
|
) *SoraGatewayService {
|
||||||
|
return &SoraGatewayService{
|
||||||
|
accountRepo: accountRepo,
|
||||||
|
soraAccountRepo: soraAccountRepo,
|
||||||
|
usageRepo: usageRepo,
|
||||||
|
taskRepo: taskRepo,
|
||||||
|
cacheService: cacheService,
|
||||||
|
settingService: settingService,
|
||||||
|
concurrency: concurrencyService,
|
||||||
|
cfg: cfg,
|
||||||
|
httpUpstream: httpUpstream,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListModels 返回 Sora 模型列表。
|
||||||
|
func (s *SoraGatewayService) ListModels() []sora.ModelListItem {
|
||||||
|
return sora.ListModels()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate 执行 Sora 生成流程。
|
||||||
|
func (s *SoraGatewayService) Generate(ctx context.Context, account *Account, req SoraGenerationRequest) (*SoraGenerationResult, error) {
|
||||||
|
client, cfg := s.getClient(ctx)
|
||||||
|
if client == nil {
|
||||||
|
return nil, errors.New("sora client is not configured")
|
||||||
|
}
|
||||||
|
modelCfg, ok := sora.ModelConfigs[req.Model]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unsupported model: %s", req.Model)
|
||||||
|
}
|
||||||
|
accessToken, soraAcc, err := s.getAccessToken(ctx, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if soraAcc != nil && soraAcc.SoraCooldownUntil != nil && time.Now().Before(*soraAcc.SoraCooldownUntil) {
|
||||||
|
return nil, ErrSoraAccountNotEligible
|
||||||
|
}
|
||||||
|
if modelCfg.RequirePro && !isSoraProAccount(soraAcc) {
|
||||||
|
return nil, ErrSoraAccountNotEligible
|
||||||
|
}
|
||||||
|
if modelCfg.Type == "video" && soraAcc != nil {
|
||||||
|
if !soraAcc.VideoEnabled || !soraAcc.SoraSupported || soraAcc.IsExpired {
|
||||||
|
return nil, ErrSoraAccountNotEligible
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if modelCfg.Type == "image" && soraAcc != nil {
|
||||||
|
if !soraAcc.ImageEnabled || soraAcc.IsExpired {
|
||||||
|
return nil, ErrSoraAccountNotEligible
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := sora.RequestOptions{
|
||||||
|
AccountID: account.ID,
|
||||||
|
AccountConcurrency: account.Concurrency,
|
||||||
|
AccessToken: accessToken,
|
||||||
|
}
|
||||||
|
if account.Proxy != nil {
|
||||||
|
opts.ProxyURL = account.Proxy.URL()
|
||||||
|
}
|
||||||
|
|
||||||
|
releaseFunc, err := s.acquireSoraSlots(ctx, account, soraAcc, modelCfg.Type == "video")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if releaseFunc != nil {
|
||||||
|
defer releaseFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
if modelCfg.Type == "prompt_enhance" {
|
||||||
|
content, err := client.EnhancePrompt(ctx, opts, req.Prompt, modelCfg.ExpansionLevel, modelCfg.DurationS)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &SoraGenerationResult{Content: content, MediaType: "text"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var mediaID string
|
||||||
|
if req.Image != "" {
|
||||||
|
data, err := s.loadImageBytes(ctx, opts, req.Image)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mediaID, err = client.UploadImage(ctx, opts, data, "image.png")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if req.Video != "" && modelCfg.Type != "video" {
|
||||||
|
return nil, errors.New("视频输入仅支持视频模型")
|
||||||
|
}
|
||||||
|
if req.Video != "" && req.Image != "" {
|
||||||
|
return nil, errors.New("不能同时传入 image 与 video")
|
||||||
|
}
|
||||||
|
|
||||||
|
var cleanupCharacter func()
|
||||||
|
if req.Video != "" && req.RemixTargetID == "" {
|
||||||
|
username, characterID, err := s.createCharacter(ctx, client, opts, req.Video)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(req.Prompt) == "" {
|
||||||
|
return &SoraGenerationResult{
|
||||||
|
Content: fmt.Sprintf("角色创建成功,角色名@%s", username),
|
||||||
|
MediaType: "text",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
if username != "" {
|
||||||
|
req.Prompt = fmt.Sprintf("@%s %s", username, strings.TrimSpace(req.Prompt))
|
||||||
|
}
|
||||||
|
if characterID != "" {
|
||||||
|
cleanupCharacter = func() {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
_ = client.DeleteCharacter(ctx, opts, characterID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cleanupCharacter != nil {
|
||||||
|
defer cleanupCharacter()
|
||||||
|
}
|
||||||
|
|
||||||
|
var taskID string
|
||||||
|
if modelCfg.Type == "image" {
|
||||||
|
taskID, err = client.GenerateImage(ctx, opts, req.Prompt, modelCfg.Width, modelCfg.Height, mediaID)
|
||||||
|
} else {
|
||||||
|
orientation := modelCfg.Orientation
|
||||||
|
if orientation == "" {
|
||||||
|
orientation = "landscape"
|
||||||
|
}
|
||||||
|
modelName := modelCfg.Model
|
||||||
|
if modelName == "" {
|
||||||
|
modelName = "sy_8"
|
||||||
|
}
|
||||||
|
size := modelCfg.Size
|
||||||
|
if size == "" {
|
||||||
|
size = "small"
|
||||||
|
}
|
||||||
|
if req.RemixTargetID != "" {
|
||||||
|
taskID, err = client.RemixVideo(ctx, opts, req.RemixTargetID, req.Prompt, orientation, modelCfg.NFrames, "")
|
||||||
|
} else if sora.IsStoryboardPrompt(req.Prompt) {
|
||||||
|
formatted := sora.FormatStoryboardPrompt(req.Prompt)
|
||||||
|
taskID, err = client.GenerateStoryboard(ctx, opts, formatted, orientation, modelCfg.NFrames, mediaID, "")
|
||||||
|
} else {
|
||||||
|
taskID, err = client.GenerateVideo(ctx, opts, req.Prompt, orientation, modelCfg.NFrames, mediaID, "", modelName, size)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.taskRepo != nil {
|
||||||
|
_ = s.taskRepo.Create(ctx, &SoraTask{
|
||||||
|
TaskID: taskID,
|
||||||
|
AccountID: account.ID,
|
||||||
|
Model: req.Model,
|
||||||
|
Prompt: req.Prompt,
|
||||||
|
Status: "processing",
|
||||||
|
Progress: 0,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := s.pollResult(ctx, client, cfg, opts, taskID, modelCfg.Type == "video", req)
|
||||||
|
if err != nil {
|
||||||
|
if s.taskRepo != nil {
|
||||||
|
_ = s.taskRepo.UpdateStatus(ctx, taskID, "failed", 0, "", err.Error(), timePtr(time.Now()))
|
||||||
|
}
|
||||||
|
consecutive := 0
|
||||||
|
if s.usageRepo != nil {
|
||||||
|
consecutive, _ = s.usageRepo.RecordError(ctx, account.ID)
|
||||||
|
}
|
||||||
|
if consecutive >= soraErrorDisableThreshold {
|
||||||
|
_ = s.accountRepo.SetError(ctx, account.ID, "Sora 连续错误次数过多,已自动禁用")
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.taskRepo != nil {
|
||||||
|
payload, _ := json.Marshal(result.ResultURLs)
|
||||||
|
_ = s.taskRepo.UpdateStatus(ctx, taskID, "completed", 100, string(payload), "", timePtr(time.Now()))
|
||||||
|
}
|
||||||
|
if s.usageRepo != nil {
|
||||||
|
_ = s.usageRepo.RecordSuccess(ctx, account.ID, modelCfg.Type == "video")
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) pollResult(ctx context.Context, client *sora.Client, cfg config.SoraConfig, opts sora.RequestOptions, taskID string, isVideo bool, req SoraGenerationRequest) (*SoraGenerationResult, error) {
|
||||||
|
if taskID == "" {
|
||||||
|
return nil, errors.New("missing task id")
|
||||||
|
}
|
||||||
|
pollInterval := 2 * time.Second
|
||||||
|
if cfg.PollInterval > 0 {
|
||||||
|
pollInterval = time.Duration(cfg.PollInterval*1000) * time.Millisecond
|
||||||
|
}
|
||||||
|
timeout := 300 * time.Second
|
||||||
|
if cfg.Timeout > 0 {
|
||||||
|
timeout = time.Duration(cfg.Timeout) * time.Second
|
||||||
|
}
|
||||||
|
deadline := time.Now().Add(timeout)
|
||||||
|
|
||||||
|
for time.Now().Before(deadline) {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, ctx.Err()
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
if isVideo {
|
||||||
|
pending, err := client.GetPendingTasks(ctx, opts)
|
||||||
|
if err == nil {
|
||||||
|
for _, task := range pending {
|
||||||
|
if stringFromMap(task, "id") == taskID {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
drafts, err := client.GetVideoDrafts(ctx, opts)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
items, _ := drafts["items"].([]any)
|
||||||
|
for _, item := range items {
|
||||||
|
entry, ok := item.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if stringFromMap(entry, "task_id") != taskID {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
url := firstNonEmpty(stringFromMap(entry, "downloadable_url"), stringFromMap(entry, "url"))
|
||||||
|
reason := stringFromMap(entry, "reason_str")
|
||||||
|
if url == "" {
|
||||||
|
if reason == "" {
|
||||||
|
reason = "视频生成失败"
|
||||||
|
}
|
||||||
|
return nil, errors.New(reason)
|
||||||
|
}
|
||||||
|
finalURL, err := s.handleWatermark(ctx, client, cfg, opts, url, entry, req, opts.AccountID, taskID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &SoraGenerationResult{
|
||||||
|
Content: buildVideoMarkdown(finalURL),
|
||||||
|
MediaType: "video",
|
||||||
|
ResultURLs: []string{finalURL},
|
||||||
|
TaskID: taskID,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
resp, err := client.GetImageTasks(ctx, opts)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tasks, _ := resp["task_responses"].([]any)
|
||||||
|
for _, item := range tasks {
|
||||||
|
entry, ok := item.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if stringFromMap(entry, "id") != taskID {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
status := stringFromMap(entry, "status")
|
||||||
|
switch status {
|
||||||
|
case "succeeded":
|
||||||
|
urls := extractImageURLs(entry)
|
||||||
|
if len(urls) == 0 {
|
||||||
|
return nil, errors.New("image urls empty")
|
||||||
|
}
|
||||||
|
content := buildImageMarkdown(urls)
|
||||||
|
return &SoraGenerationResult{
|
||||||
|
Content: content,
|
||||||
|
MediaType: "image",
|
||||||
|
ResultURLs: urls,
|
||||||
|
TaskID: taskID,
|
||||||
|
}, nil
|
||||||
|
case "failed":
|
||||||
|
message := stringFromMap(entry, "error_message")
|
||||||
|
if message == "" {
|
||||||
|
message = "image generation failed"
|
||||||
|
}
|
||||||
|
return nil, errors.New(message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
time.Sleep(pollInterval)
|
||||||
|
}
|
||||||
|
return nil, errors.New("generation timeout")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) handleWatermark(ctx context.Context, client *sora.Client, cfg config.SoraConfig, opts sora.RequestOptions, url string, entry map[string]any, req SoraGenerationRequest, accountID int64, taskID string) (string, error) {
|
||||||
|
if !cfg.WatermarkFree.Enabled {
|
||||||
|
return s.cacheVideo(ctx, url, req, accountID, taskID), nil
|
||||||
|
}
|
||||||
|
generationID := stringFromMap(entry, "id")
|
||||||
|
if generationID == "" {
|
||||||
|
return s.cacheVideo(ctx, url, req, accountID, taskID), nil
|
||||||
|
}
|
||||||
|
postID, err := client.PostVideoForWatermarkFree(ctx, opts, generationID)
|
||||||
|
if err != nil {
|
||||||
|
if cfg.WatermarkFree.FallbackOnFailure {
|
||||||
|
return s.cacheVideo(ctx, url, req, accountID, taskID), nil
|
||||||
|
}
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if postID == "" {
|
||||||
|
if cfg.WatermarkFree.FallbackOnFailure {
|
||||||
|
return s.cacheVideo(ctx, url, req, accountID, taskID), nil
|
||||||
|
}
|
||||||
|
return "", errors.New("watermark-free post id empty")
|
||||||
|
}
|
||||||
|
var parsedURL string
|
||||||
|
if cfg.WatermarkFree.ParseMethod == "custom" {
|
||||||
|
if cfg.WatermarkFree.CustomParseURL == "" || cfg.WatermarkFree.CustomParseToken == "" {
|
||||||
|
return "", errors.New("custom parse 未配置")
|
||||||
|
}
|
||||||
|
parsedURL, err = s.fetchCustomWatermarkURL(ctx, cfg.WatermarkFree.CustomParseURL, cfg.WatermarkFree.CustomParseToken, postID)
|
||||||
|
if err != nil {
|
||||||
|
if cfg.WatermarkFree.FallbackOnFailure {
|
||||||
|
return s.cacheVideo(ctx, url, req, accountID, taskID), nil
|
||||||
|
}
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
parsedURL = fmt.Sprintf("https://oscdn2.dyysy.com/MP4/%s.mp4", postID)
|
||||||
|
}
|
||||||
|
cached := s.cacheVideo(ctx, parsedURL, req, accountID, taskID)
|
||||||
|
_ = client.DeletePost(ctx, opts, postID)
|
||||||
|
return cached, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) cacheVideo(ctx context.Context, url string, req SoraGenerationRequest, accountID int64, taskID string) string {
|
||||||
|
if s.cacheService == nil {
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
file, err := s.cacheService.CacheVideo(ctx, accountID, req.UserID, taskID, url)
|
||||||
|
if err != nil || file == nil {
|
||||||
|
return url
|
||||||
|
}
|
||||||
|
return file.CacheURL
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) getAccessToken(ctx context.Context, account *Account) (string, *SoraAccount, error) {
|
||||||
|
if account == nil {
|
||||||
|
return "", nil, errors.New("account is nil")
|
||||||
|
}
|
||||||
|
var soraAcc *SoraAccount
|
||||||
|
if s.soraAccountRepo != nil {
|
||||||
|
soraAcc, _ = s.soraAccountRepo.GetByAccountID(ctx, account.ID)
|
||||||
|
}
|
||||||
|
if soraAcc != nil && soraAcc.AccessToken != "" {
|
||||||
|
return soraAcc.AccessToken, soraAcc, nil
|
||||||
|
}
|
||||||
|
if account.Credentials != nil {
|
||||||
|
if v, ok := account.Credentials["access_token"].(string); ok && v != "" {
|
||||||
|
return v, soraAcc, nil
|
||||||
|
}
|
||||||
|
if v, ok := account.Credentials["token"].(string); ok && v != "" {
|
||||||
|
return v, soraAcc, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", soraAcc, ErrSoraAccountMissingToken
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) getClient(ctx context.Context) (*sora.Client, config.SoraConfig) {
|
||||||
|
cfg := s.getSoraConfig(ctx)
|
||||||
|
if s.httpUpstream == nil {
|
||||||
|
return nil, cfg
|
||||||
|
}
|
||||||
|
baseURL := strings.TrimSpace(cfg.BaseURL)
|
||||||
|
if baseURL == "" {
|
||||||
|
return nil, cfg
|
||||||
|
}
|
||||||
|
timeout := time.Duration(cfg.Timeout) * time.Second
|
||||||
|
if cfg.Timeout <= 0 {
|
||||||
|
timeout = 120 * time.Second
|
||||||
|
}
|
||||||
|
enableTLS := false
|
||||||
|
if s.cfg != nil {
|
||||||
|
enableTLS = s.cfg.Gateway.TLSFingerprint.Enabled
|
||||||
|
}
|
||||||
|
return sora.NewClient(baseURL, timeout, s.httpUpstream, enableTLS), cfg
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeBase64(raw string) ([]byte, error) {
|
||||||
|
data := raw
|
||||||
|
if idx := strings.Index(raw, "base64,"); idx != -1 {
|
||||||
|
data = raw[idx+7:]
|
||||||
|
}
|
||||||
|
return base64.StdEncoding.DecodeString(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractImageURLs(entry map[string]any) []string {
|
||||||
|
generations, _ := entry["generations"].([]any)
|
||||||
|
urls := make([]string, 0, len(generations))
|
||||||
|
for _, gen := range generations {
|
||||||
|
m, ok := gen.(map[string]any)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if url, ok := m["url"].(string); ok && url != "" {
|
||||||
|
urls = append(urls, url)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return urls
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildImageMarkdown(urls []string) string {
|
||||||
|
parts := make([]string, 0, len(urls))
|
||||||
|
for _, u := range urls {
|
||||||
|
parts = append(parts, fmt.Sprintf("", u))
|
||||||
|
}
|
||||||
|
return strings.Join(parts, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildVideoMarkdown(url string) string {
|
||||||
|
return fmt.Sprintf("```html\n<video src='%s' controls></video>\n```", url)
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringFromMap(m map[string]any, key string) string {
|
||||||
|
if m == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if v, ok := m[key].(string); ok {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func firstNonEmpty(values ...string) string {
|
||||||
|
for _, v := range values {
|
||||||
|
if strings.TrimSpace(v) != "" {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSoraProAccount(acc *SoraAccount) bool {
|
||||||
|
if acc == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return strings.EqualFold(acc.PlanType, "chatgpt_pro")
|
||||||
|
}
|
||||||
|
|
||||||
|
func timePtr(t time.Time) *time.Time {
|
||||||
|
return &t
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetchCustomWatermarkURL 使用自定义解析服务获取无水印视频 URL
|
||||||
|
func (s *SoraGatewayService) fetchCustomWatermarkURL(ctx context.Context, parseURL, parseToken, postID string) (string, error) {
|
||||||
|
// 使用项目的 URL 校验器验证 parseURL 格式,防止 SSRF 攻击
|
||||||
|
if _, err := urlvalidator.ValidateHTTPSURL(parseURL, urlvalidator.ValidationOptions{}); err != nil {
|
||||||
|
return "", fmt.Errorf("无效的解析服务地址: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
payload := map[string]any{
|
||||||
|
"url": fmt.Sprintf("https://sora.chatgpt.com/p/%s", postID),
|
||||||
|
"token": parseToken,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "POST", strings.TrimRight(parseURL, "/")+"/get-sora-link", strings.NewReader(string(body)))
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
// 复用 httpUpstream,遵守代理和 TLS 配置
|
||||||
|
enableTLS := false
|
||||||
|
if s.cfg != nil {
|
||||||
|
enableTLS = s.cfg.Gateway.TLSFingerprint.Enabled
|
||||||
|
}
|
||||||
|
resp, err := s.httpUpstream.DoWithTLS(req, "", 0, 1, enableTLS)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return "", fmt.Errorf("custom parse failed: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
var parsed map[string]any
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&parsed); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if errMsg, ok := parsed["error"].(string); ok && errMsg != "" {
|
||||||
|
return "", errors.New(errMsg)
|
||||||
|
}
|
||||||
|
if link, ok := parsed["download_link"].(string); ok {
|
||||||
|
return link, nil
|
||||||
|
}
|
||||||
|
return "", errors.New("custom parse response missing download_link")
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
soraSlotImageLock int64 = 1
|
||||||
|
soraSlotImageLimit int64 = 2
|
||||||
|
soraSlotVideoLimit int64 = 3
|
||||||
|
soraDefaultUsername = "character"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) CallLogicMode(ctx context.Context) string {
|
||||||
|
return strings.TrimSpace(s.getSoraConfig(ctx).CallLogicMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) getSoraConfig(ctx context.Context) config.SoraConfig {
|
||||||
|
if s.settingService != nil {
|
||||||
|
return s.settingService.GetSoraConfig(ctx)
|
||||||
|
}
|
||||||
|
if s.cfg != nil {
|
||||||
|
return s.cfg.Sora
|
||||||
|
}
|
||||||
|
return config.SoraConfig{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) acquireSoraSlots(ctx context.Context, account *Account, soraAcc *SoraAccount, isVideo bool) (func(), error) {
|
||||||
|
if s.concurrency == nil || account == nil || soraAcc == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
releases := make([]func(), 0, 2)
|
||||||
|
appendRelease := func(release func()) {
|
||||||
|
if release != nil {
|
||||||
|
releases = append(releases, release)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 错误时释放所有已获取的槽位
|
||||||
|
releaseAll := func() {
|
||||||
|
for _, r := range releases {
|
||||||
|
r()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isVideo {
|
||||||
|
if soraAcc.VideoConcurrency > 0 {
|
||||||
|
release, err := s.acquireSoraSlot(ctx, account.ID, soraAcc.VideoConcurrency, soraSlotVideoLimit)
|
||||||
|
if err != nil {
|
||||||
|
releaseAll()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
appendRelease(release)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
release, err := s.acquireSoraSlot(ctx, account.ID, 1, soraSlotImageLock)
|
||||||
|
if err != nil {
|
||||||
|
releaseAll()
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
appendRelease(release)
|
||||||
|
if soraAcc.ImageConcurrency > 0 {
|
||||||
|
release, err := s.acquireSoraSlot(ctx, account.ID, soraAcc.ImageConcurrency, soraSlotImageLimit)
|
||||||
|
if err != nil {
|
||||||
|
releaseAll() // 释放已获取的 soraSlotImageLock
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
appendRelease(release)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(releases) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return func() {
|
||||||
|
for _, release := range releases {
|
||||||
|
release()
|
||||||
|
}
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) acquireSoraSlot(ctx context.Context, accountID int64, maxConcurrency int, slotType int64) (func(), error) {
|
||||||
|
if s.concurrency == nil || maxConcurrency <= 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
derivedID := soraConcurrencyAccountID(accountID, slotType)
|
||||||
|
result, err := s.concurrency.AcquireAccountSlot(ctx, derivedID, maxConcurrency)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if !result.Acquired {
|
||||||
|
return nil, ErrSoraAccountNotEligible
|
||||||
|
}
|
||||||
|
return result.ReleaseFunc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func soraConcurrencyAccountID(accountID int64, slotType int64) int64 {
|
||||||
|
if accountID < 0 {
|
||||||
|
accountID = -accountID
|
||||||
|
}
|
||||||
|
return -(accountID*10 + slotType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) createCharacter(ctx context.Context, client *sora.Client, opts sora.RequestOptions, rawVideo string) (string, string, error) {
|
||||||
|
videoBytes, err := s.loadVideoBytes(ctx, opts, rawVideo)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
cameoID, err := client.UploadCharacterVideo(ctx, opts, videoBytes)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
status, err := s.pollCameoStatus(ctx, client, opts, cameoID)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
username := processCharacterUsername(stringFromMap(status, "username_hint"))
|
||||||
|
if username == "" {
|
||||||
|
username = soraDefaultUsername
|
||||||
|
}
|
||||||
|
displayName := stringFromMap(status, "display_name_hint")
|
||||||
|
if displayName == "" {
|
||||||
|
displayName = "Character"
|
||||||
|
}
|
||||||
|
profileURL := stringFromMap(status, "profile_asset_url")
|
||||||
|
if profileURL == "" {
|
||||||
|
return "", "", errors.New("profile asset url missing")
|
||||||
|
}
|
||||||
|
avatarData, err := client.DownloadCharacterImage(ctx, opts, profileURL)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
assetPointer, err := client.UploadCharacterImage(ctx, opts, avatarData)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
characterID, err := client.FinalizeCharacter(ctx, opts, cameoID, username, displayName, assetPointer)
|
||||||
|
if err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
if err := client.SetCharacterPublic(ctx, opts, cameoID); err != nil {
|
||||||
|
return "", "", err
|
||||||
|
}
|
||||||
|
return username, characterID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) pollCameoStatus(ctx context.Context, client *sora.Client, opts sora.RequestOptions, cameoID string) (map[string]any, error) {
|
||||||
|
if cameoID == "" {
|
||||||
|
return nil, errors.New("cameo id empty")
|
||||||
|
}
|
||||||
|
timeout := 600 * time.Second
|
||||||
|
pollInterval := 5 * time.Second
|
||||||
|
deadline := time.Now().Add(timeout)
|
||||||
|
consecutiveErrors := 0
|
||||||
|
maxConsecutiveErrors := 3
|
||||||
|
|
||||||
|
for time.Now().Before(deadline) {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, ctx.Err()
|
||||||
|
default:
|
||||||
|
}
|
||||||
|
time.Sleep(pollInterval)
|
||||||
|
status, err := client.GetCameoStatus(ctx, opts, cameoID)
|
||||||
|
if err != nil {
|
||||||
|
consecutiveErrors++
|
||||||
|
if consecutiveErrors >= maxConsecutiveErrors {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
consecutiveErrors = 0
|
||||||
|
statusValue := stringFromMap(status, "status")
|
||||||
|
statusMessage := stringFromMap(status, "status_message")
|
||||||
|
if statusValue == "failed" {
|
||||||
|
if statusMessage == "" {
|
||||||
|
statusMessage = "角色创建失败"
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("角色创建失败: %s", statusMessage)
|
||||||
|
}
|
||||||
|
if strings.EqualFold(statusMessage, "Completed") || strings.EqualFold(statusValue, "finalized") {
|
||||||
|
return status, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, errors.New("角色创建超时")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) loadVideoBytes(ctx context.Context, opts sora.RequestOptions, rawVideo string) ([]byte, error) {
|
||||||
|
trimmed := strings.TrimSpace(rawVideo)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil, errors.New("video data is empty")
|
||||||
|
}
|
||||||
|
if looksLikeURL(trimmed) {
|
||||||
|
if err := s.validateMediaURL(trimmed); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return s.downloadMedia(ctx, opts, trimmed, maxVideoDownloadSize)
|
||||||
|
}
|
||||||
|
return decodeBase64(trimmed)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) loadImageBytes(ctx context.Context, opts sora.RequestOptions, rawImage string) ([]byte, error) {
|
||||||
|
trimmed := strings.TrimSpace(rawImage)
|
||||||
|
if trimmed == "" {
|
||||||
|
return nil, errors.New("image data is empty")
|
||||||
|
}
|
||||||
|
if looksLikeURL(trimmed) {
|
||||||
|
if err := s.validateMediaURL(trimmed); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return s.downloadMedia(ctx, opts, trimmed, maxImageDownloadSize)
|
||||||
|
}
|
||||||
|
return decodeBase64(trimmed)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) validateMediaURL(rawURL string) error {
|
||||||
|
cfg := s.cfg
|
||||||
|
if cfg == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if cfg.Security.URLAllowlist.Enabled {
|
||||||
|
_, err := urlvalidator.ValidateHTTPSURL(rawURL, urlvalidator.ValidationOptions{
|
||||||
|
AllowedHosts: cfg.Security.URLAllowlist.UpstreamHosts,
|
||||||
|
RequireAllowlist: true,
|
||||||
|
AllowPrivate: cfg.Security.URLAllowlist.AllowPrivateHosts,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("媒体地址不合法: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if _, err := urlvalidator.ValidateURLFormat(rawURL, cfg.Security.URLAllowlist.AllowInsecureHTTP); err != nil {
|
||||||
|
return fmt.Errorf("媒体地址不合法: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraGatewayService) downloadMedia(ctx context.Context, opts sora.RequestOptions, mediaURL string, maxSize int64) ([]byte, error) {
|
||||||
|
if s.httpUpstream == nil {
|
||||||
|
return nil, errors.New("upstream is nil")
|
||||||
|
}
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", mediaURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36")
|
||||||
|
enableTLS := false
|
||||||
|
if s.cfg != nil {
|
||||||
|
enableTLS = s.cfg.Gateway.TLSFingerprint.Enabled
|
||||||
|
}
|
||||||
|
resp, err := s.httpUpstream.DoWithTLS(req, opts.ProxyURL, opts.AccountID, opts.AccountConcurrency, enableTLS)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return nil, fmt.Errorf("下载失败: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 使用 LimitReader 限制最大读取大小,防止 DoS 攻击
|
||||||
|
limitedReader := io.LimitReader(resp.Body, maxSize+1)
|
||||||
|
data, err := io.ReadAll(limitedReader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("读取响应失败: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检查是否超过大小限制
|
||||||
|
if int64(len(data)) > maxSize {
|
||||||
|
return nil, fmt.Errorf("媒体文件过大 (最大 %d 字节, 实际 %d 字节)", maxSize, len(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func processCharacterUsername(usernameHint string) string {
|
||||||
|
trimmed := strings.TrimSpace(usernameHint)
|
||||||
|
if trimmed == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
base := trimmed
|
||||||
|
if idx := strings.LastIndex(trimmed, "."); idx != -1 && idx+1 < len(trimmed) {
|
||||||
|
base = trimmed[idx+1:]
|
||||||
|
}
|
||||||
|
rng := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||||
|
return fmt.Sprintf("%s%d", base, rng.Intn(900)+100)
|
||||||
|
}
|
||||||
|
|
||||||
|
func looksLikeURL(value string) bool {
|
||||||
|
trimmed := strings.ToLower(strings.TrimSpace(value))
|
||||||
|
return strings.HasPrefix(trimmed, "http://") || strings.HasPrefix(trimmed, "https://")
|
||||||
|
}
|
||||||
113
backend/internal/service/sora_repository.go
Normal file
113
backend/internal/service/sora_repository.go
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
package service
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SoraAccount 表示 Sora 账号扩展信息。
|
||||||
|
type SoraAccount struct {
|
||||||
|
AccountID int64
|
||||||
|
AccessToken string
|
||||||
|
SessionToken string
|
||||||
|
RefreshToken string
|
||||||
|
ClientID string
|
||||||
|
Email string
|
||||||
|
Username string
|
||||||
|
Remark string
|
||||||
|
UseCount int
|
||||||
|
PlanType string
|
||||||
|
PlanTitle string
|
||||||
|
SubscriptionEnd *time.Time
|
||||||
|
SoraSupported bool
|
||||||
|
SoraInviteCode string
|
||||||
|
SoraRedeemedCount int
|
||||||
|
SoraRemainingCount int
|
||||||
|
SoraTotalCount int
|
||||||
|
SoraCooldownUntil *time.Time
|
||||||
|
CooledUntil *time.Time
|
||||||
|
ImageEnabled bool
|
||||||
|
VideoEnabled bool
|
||||||
|
ImageConcurrency int
|
||||||
|
VideoConcurrency int
|
||||||
|
IsExpired bool
|
||||||
|
CreatedAt time.Time
|
||||||
|
UpdatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStat 表示 Sora 调用统计。
|
||||||
|
type SoraUsageStat struct {
|
||||||
|
AccountID int64
|
||||||
|
ImageCount int
|
||||||
|
VideoCount int
|
||||||
|
ErrorCount int
|
||||||
|
LastErrorAt *time.Time
|
||||||
|
TodayImageCount int
|
||||||
|
TodayVideoCount int
|
||||||
|
TodayErrorCount int
|
||||||
|
TodayDate *time.Time
|
||||||
|
ConsecutiveErrorCount int
|
||||||
|
CreatedAt time.Time
|
||||||
|
UpdatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTask 表示 Sora 任务记录。
|
||||||
|
type SoraTask struct {
|
||||||
|
TaskID string
|
||||||
|
AccountID int64
|
||||||
|
Model string
|
||||||
|
Prompt string
|
||||||
|
Status string
|
||||||
|
Progress float64
|
||||||
|
ResultURLs string
|
||||||
|
ErrorMessage string
|
||||||
|
RetryCount int
|
||||||
|
CreatedAt time.Time
|
||||||
|
CompletedAt *time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFile 表示 Sora 缓存文件记录。
|
||||||
|
type SoraCacheFile struct {
|
||||||
|
ID int64
|
||||||
|
TaskID string
|
||||||
|
AccountID int64
|
||||||
|
UserID int64
|
||||||
|
MediaType string
|
||||||
|
OriginalURL string
|
||||||
|
CachePath string
|
||||||
|
CacheURL string
|
||||||
|
SizeBytes int64
|
||||||
|
CreatedAt time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraAccountRepository 定义 Sora 账号仓储接口。
|
||||||
|
type SoraAccountRepository interface {
|
||||||
|
GetByAccountID(ctx context.Context, accountID int64) (*SoraAccount, error)
|
||||||
|
GetByAccountIDs(ctx context.Context, accountIDs []int64) (map[int64]*SoraAccount, error)
|
||||||
|
Upsert(ctx context.Context, accountID int64, updates map[string]any) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraUsageStatRepository 定义 Sora 调用统计仓储接口。
|
||||||
|
type SoraUsageStatRepository interface {
|
||||||
|
RecordSuccess(ctx context.Context, accountID int64, isVideo bool) error
|
||||||
|
RecordError(ctx context.Context, accountID int64) (int, error)
|
||||||
|
ResetConsecutiveErrors(ctx context.Context, accountID int64) error
|
||||||
|
GetByAccountID(ctx context.Context, accountID int64) (*SoraUsageStat, error)
|
||||||
|
GetByAccountIDs(ctx context.Context, accountIDs []int64) (map[int64]*SoraUsageStat, error)
|
||||||
|
List(ctx context.Context, params pagination.PaginationParams) ([]*SoraUsageStat, *pagination.PaginationResult, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraTaskRepository 定义 Sora 任务仓储接口。
|
||||||
|
type SoraTaskRepository interface {
|
||||||
|
Create(ctx context.Context, task *SoraTask) error
|
||||||
|
UpdateStatus(ctx context.Context, taskID string, status string, progress float64, resultURLs string, errorMessage string, completedAt *time.Time) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// SoraCacheFileRepository 定义 Sora 缓存文件仓储接口。
|
||||||
|
type SoraCacheFileRepository interface {
|
||||||
|
Create(ctx context.Context, file *SoraCacheFile) error
|
||||||
|
ListOldest(ctx context.Context, limit int) ([]*SoraCacheFile, error)
|
||||||
|
DeleteByIDs(ctx context.Context, ids []int64) error
|
||||||
|
}
|
||||||
313
backend/internal/service/sora_token_refresh_service.go
Normal file
313
backend/internal/service/sora_token_refresh_service.go
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
package service
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Wei-Shaw/sub2api/internal/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
const defaultSoraClientID = "app_LlGpXReQgckcGGUo2JrYvtJK"
|
||||||
|
|
||||||
|
// SoraTokenRefreshService handles Sora access token refresh.
|
||||||
|
type SoraTokenRefreshService struct {
|
||||||
|
accountRepo AccountRepository
|
||||||
|
soraAccountRepo SoraAccountRepository
|
||||||
|
settingService *SettingService
|
||||||
|
httpUpstream HTTPUpstream
|
||||||
|
cfg *config.Config
|
||||||
|
stopCh chan struct{}
|
||||||
|
stopOnce sync.Once
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSoraTokenRefreshService(
|
||||||
|
accountRepo AccountRepository,
|
||||||
|
soraAccountRepo SoraAccountRepository,
|
||||||
|
settingService *SettingService,
|
||||||
|
httpUpstream HTTPUpstream,
|
||||||
|
cfg *config.Config,
|
||||||
|
) *SoraTokenRefreshService {
|
||||||
|
return &SoraTokenRefreshService{
|
||||||
|
accountRepo: accountRepo,
|
||||||
|
soraAccountRepo: soraAccountRepo,
|
||||||
|
settingService: settingService,
|
||||||
|
httpUpstream: httpUpstream,
|
||||||
|
cfg: cfg,
|
||||||
|
stopCh: make(chan struct{}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) Start() {
|
||||||
|
if s == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
go s.refreshLoop()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) Stop() {
|
||||||
|
if s == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s.stopOnce.Do(func() {
|
||||||
|
close(s.stopCh)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) refreshLoop() {
|
||||||
|
for {
|
||||||
|
wait := s.nextRunDelay()
|
||||||
|
timer := time.NewTimer(wait)
|
||||||
|
select {
|
||||||
|
case <-timer.C:
|
||||||
|
s.refreshOnce()
|
||||||
|
case <-s.stopCh:
|
||||||
|
timer.Stop()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) refreshOnce() {
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Minute)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
if !s.isEnabled(ctx) {
|
||||||
|
log.Println("[SoraTokenRefresh] disabled by settings")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if s.accountRepo == nil || s.soraAccountRepo == nil {
|
||||||
|
log.Println("[SoraTokenRefresh] repository not configured")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
accounts, err := s.accountRepo.ListByPlatform(ctx, PlatformSora)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[SoraTokenRefresh] list accounts failed: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(accounts) == 0 {
|
||||||
|
log.Println("[SoraTokenRefresh] no sora accounts")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ids := make([]int64, 0, len(accounts))
|
||||||
|
accountMap := make(map[int64]*Account, len(accounts))
|
||||||
|
for i := range accounts {
|
||||||
|
acc := accounts[i]
|
||||||
|
ids = append(ids, acc.ID)
|
||||||
|
accountMap[acc.ID] = &acc
|
||||||
|
}
|
||||||
|
accountExtras, err := s.soraAccountRepo.GetByAccountIDs(ctx, ids)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("[SoraTokenRefresh] load sora accounts failed: %v", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
success := 0
|
||||||
|
failed := 0
|
||||||
|
skipped := 0
|
||||||
|
for accountID, account := range accountMap {
|
||||||
|
extra := accountExtras[accountID]
|
||||||
|
if extra == nil {
|
||||||
|
skipped++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
result, err := s.refreshForAccount(ctx, account, extra)
|
||||||
|
if err != nil {
|
||||||
|
failed++
|
||||||
|
log.Printf("[SoraTokenRefresh] account %d refresh failed: %v", accountID, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if result == nil {
|
||||||
|
skipped++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
updates := map[string]any{
|
||||||
|
"access_token": result.AccessToken,
|
||||||
|
}
|
||||||
|
if result.RefreshToken != "" {
|
||||||
|
updates["refresh_token"] = result.RefreshToken
|
||||||
|
}
|
||||||
|
if result.Email != "" {
|
||||||
|
updates["email"] = result.Email
|
||||||
|
}
|
||||||
|
if err := s.soraAccountRepo.Upsert(ctx, accountID, updates); err != nil {
|
||||||
|
failed++
|
||||||
|
log.Printf("[SoraTokenRefresh] account %d update failed: %v", accountID, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
success++
|
||||||
|
}
|
||||||
|
log.Printf("[SoraTokenRefresh] done: success=%d failed=%d skipped=%d", success, failed, skipped)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) refreshForAccount(ctx context.Context, account *Account, extra *SoraAccount) (*soraRefreshResult, error) {
|
||||||
|
if extra == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(extra.SessionToken) == "" && strings.TrimSpace(extra.RefreshToken) == "" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if extra.SessionToken != "" {
|
||||||
|
result, err := s.refreshWithSessionToken(ctx, account, extra.SessionToken)
|
||||||
|
if err == nil && result != nil && result.AccessToken != "" {
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
if strings.TrimSpace(extra.RefreshToken) == "" {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clientID := strings.TrimSpace(extra.ClientID)
|
||||||
|
if clientID == "" {
|
||||||
|
clientID = defaultSoraClientID
|
||||||
|
}
|
||||||
|
return s.refreshWithRefreshToken(ctx, account, extra.RefreshToken, clientID)
|
||||||
|
}
|
||||||
|
|
||||||
|
type soraRefreshResult struct {
|
||||||
|
AccessToken string
|
||||||
|
RefreshToken string
|
||||||
|
Email string
|
||||||
|
}
|
||||||
|
|
||||||
|
type soraSessionResponse struct {
|
||||||
|
AccessToken string `json:"accessToken"`
|
||||||
|
User struct {
|
||||||
|
Email string `json:"email"`
|
||||||
|
} `json:"user"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type soraRefreshResponse struct {
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
RefreshToken string `json:"refresh_token"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) refreshWithSessionToken(ctx context.Context, account *Account, sessionToken string) (*soraRefreshResult, error) {
|
||||||
|
if s.httpUpstream == nil {
|
||||||
|
return nil, fmt.Errorf("upstream not configured")
|
||||||
|
}
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", "https://sora.chatgpt.com/api/auth/session", nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("Cookie", "__Secure-next-auth.session-token="+sessionToken)
|
||||||
|
req.Header.Set("Accept", "application/json")
|
||||||
|
req.Header.Set("Origin", "https://sora.chatgpt.com")
|
||||||
|
req.Header.Set("Referer", "https://sora.chatgpt.com/")
|
||||||
|
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36")
|
||||||
|
|
||||||
|
enableTLS := false
|
||||||
|
if s.cfg != nil {
|
||||||
|
enableTLS = s.cfg.Gateway.TLSFingerprint.Enabled
|
||||||
|
}
|
||||||
|
proxyURL := ""
|
||||||
|
accountConcurrency := 0
|
||||||
|
accountID := int64(0)
|
||||||
|
if account != nil {
|
||||||
|
accountID = account.ID
|
||||||
|
accountConcurrency = account.Concurrency
|
||||||
|
if account.Proxy != nil {
|
||||||
|
proxyURL = account.Proxy.URL()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resp, err := s.httpUpstream.DoWithTLS(req, proxyURL, accountID, accountConcurrency, enableTLS)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return nil, fmt.Errorf("session refresh failed: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
var payload soraSessionResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&payload); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if payload.AccessToken == "" {
|
||||||
|
return nil, errors.New("session refresh missing access token")
|
||||||
|
}
|
||||||
|
return &soraRefreshResult{AccessToken: payload.AccessToken, Email: payload.User.Email}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) refreshWithRefreshToken(ctx context.Context, account *Account, refreshToken, clientID string) (*soraRefreshResult, error) {
|
||||||
|
if s.httpUpstream == nil {
|
||||||
|
return nil, fmt.Errorf("upstream not configured")
|
||||||
|
}
|
||||||
|
payload := map[string]any{
|
||||||
|
"client_id": clientID,
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
"redirect_uri": "com.openai.chat://auth0.openai.com/ios/com.openai.chat/callback",
|
||||||
|
"refresh_token": refreshToken,
|
||||||
|
}
|
||||||
|
body, err := json.Marshal(payload)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "POST", "https://auth.openai.com/oauth/token", bytes.NewReader(body))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
req.Header.Set("Accept", "application/json")
|
||||||
|
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36")
|
||||||
|
|
||||||
|
enableTLS := false
|
||||||
|
if s.cfg != nil {
|
||||||
|
enableTLS = s.cfg.Gateway.TLSFingerprint.Enabled
|
||||||
|
}
|
||||||
|
proxyURL := ""
|
||||||
|
accountConcurrency := 0
|
||||||
|
accountID := int64(0)
|
||||||
|
if account != nil {
|
||||||
|
accountID = account.ID
|
||||||
|
accountConcurrency = account.Concurrency
|
||||||
|
if account.Proxy != nil {
|
||||||
|
proxyURL = account.Proxy.URL()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resp, err := s.httpUpstream.DoWithTLS(req, proxyURL, accountID, accountConcurrency, enableTLS)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return nil, fmt.Errorf("refresh token failed: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
var payloadResp soraRefreshResponse
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&payloadResp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if payloadResp.AccessToken == "" {
|
||||||
|
return nil, errors.New("refresh token missing access token")
|
||||||
|
}
|
||||||
|
return &soraRefreshResult{AccessToken: payloadResp.AccessToken, RefreshToken: payloadResp.RefreshToken}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) nextRunDelay() time.Duration {
|
||||||
|
location := time.Local
|
||||||
|
if s.cfg != nil && strings.TrimSpace(s.cfg.Timezone) != "" {
|
||||||
|
if tz, err := time.LoadLocation(strings.TrimSpace(s.cfg.Timezone)); err == nil {
|
||||||
|
location = tz
|
||||||
|
}
|
||||||
|
}
|
||||||
|
now := time.Now().In(location)
|
||||||
|
next := time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 0, location).Add(24 * time.Hour)
|
||||||
|
return time.Until(next)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SoraTokenRefreshService) isEnabled(ctx context.Context) bool {
|
||||||
|
if s.settingService == nil {
|
||||||
|
return s.cfg != nil && s.cfg.Sora.TokenRefresh.Enabled
|
||||||
|
}
|
||||||
|
cfg := s.settingService.GetSoraConfig(ctx)
|
||||||
|
return cfg.TokenRefresh.Enabled
|
||||||
|
}
|
||||||
@@ -51,6 +51,30 @@ func ProvideTokenRefreshService(
|
|||||||
return svc
|
return svc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ProvideSoraTokenRefreshService creates and starts SoraTokenRefreshService.
|
||||||
|
func ProvideSoraTokenRefreshService(
|
||||||
|
accountRepo AccountRepository,
|
||||||
|
soraAccountRepo SoraAccountRepository,
|
||||||
|
settingService *SettingService,
|
||||||
|
httpUpstream HTTPUpstream,
|
||||||
|
cfg *config.Config,
|
||||||
|
) *SoraTokenRefreshService {
|
||||||
|
svc := NewSoraTokenRefreshService(accountRepo, soraAccountRepo, settingService, httpUpstream, cfg)
|
||||||
|
svc.Start()
|
||||||
|
return svc
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProvideSoraCacheCleanupService creates and starts SoraCacheCleanupService.
|
||||||
|
func ProvideSoraCacheCleanupService(
|
||||||
|
cacheRepo SoraCacheFileRepository,
|
||||||
|
settingService *SettingService,
|
||||||
|
cfg *config.Config,
|
||||||
|
) *SoraCacheCleanupService {
|
||||||
|
svc := NewSoraCacheCleanupService(cacheRepo, settingService, cfg)
|
||||||
|
svc.Start()
|
||||||
|
return svc
|
||||||
|
}
|
||||||
|
|
||||||
// ProvideDashboardAggregationService 创建并启动仪表盘聚合服务
|
// ProvideDashboardAggregationService 创建并启动仪表盘聚合服务
|
||||||
func ProvideDashboardAggregationService(repo DashboardAggregationRepository, timingWheel *TimingWheelService, cfg *config.Config) *DashboardAggregationService {
|
func ProvideDashboardAggregationService(repo DashboardAggregationRepository, timingWheel *TimingWheelService, cfg *config.Config) *DashboardAggregationService {
|
||||||
svc := NewDashboardAggregationService(repo, timingWheel, cfg)
|
svc := NewDashboardAggregationService(repo, timingWheel, cfg)
|
||||||
@@ -222,6 +246,8 @@ var ProviderSet = wire.NewSet(
|
|||||||
NewAdminService,
|
NewAdminService,
|
||||||
NewGatewayService,
|
NewGatewayService,
|
||||||
NewOpenAIGatewayService,
|
NewOpenAIGatewayService,
|
||||||
|
NewSoraCacheService,
|
||||||
|
NewSoraGatewayService,
|
||||||
NewOAuthService,
|
NewOAuthService,
|
||||||
NewOpenAIOAuthService,
|
NewOpenAIOAuthService,
|
||||||
NewGeminiOAuthService,
|
NewGeminiOAuthService,
|
||||||
@@ -255,6 +281,8 @@ var ProviderSet = wire.NewSet(
|
|||||||
NewCRSSyncService,
|
NewCRSSyncService,
|
||||||
ProvideUpdateService,
|
ProvideUpdateService,
|
||||||
ProvideTokenRefreshService,
|
ProvideTokenRefreshService,
|
||||||
|
ProvideSoraTokenRefreshService,
|
||||||
|
ProvideSoraCacheCleanupService,
|
||||||
ProvideAccountExpiryService,
|
ProvideAccountExpiryService,
|
||||||
ProvideTimingWheelService,
|
ProvideTimingWheelService,
|
||||||
ProvideDashboardAggregationService,
|
ProvideDashboardAggregationService,
|
||||||
|
|||||||
94
backend/migrations/044_add_sora_tables.sql
Normal file
94
backend/migrations/044_add_sora_tables.sql
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
-- Add Sora platform tables
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS sora_accounts (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
account_id BIGINT NOT NULL UNIQUE,
|
||||||
|
access_token TEXT,
|
||||||
|
session_token TEXT,
|
||||||
|
refresh_token TEXT,
|
||||||
|
client_id TEXT,
|
||||||
|
email TEXT,
|
||||||
|
username TEXT,
|
||||||
|
remark TEXT,
|
||||||
|
use_count INT DEFAULT 0,
|
||||||
|
plan_type TEXT,
|
||||||
|
plan_title TEXT,
|
||||||
|
subscription_end TIMESTAMPTZ,
|
||||||
|
sora_supported BOOLEAN DEFAULT FALSE,
|
||||||
|
sora_invite_code TEXT,
|
||||||
|
sora_redeemed_count INT DEFAULT 0,
|
||||||
|
sora_remaining_count INT DEFAULT 0,
|
||||||
|
sora_total_count INT DEFAULT 0,
|
||||||
|
sora_cooldown_until TIMESTAMPTZ,
|
||||||
|
cooled_until TIMESTAMPTZ,
|
||||||
|
image_enabled BOOLEAN DEFAULT TRUE,
|
||||||
|
video_enabled BOOLEAN DEFAULT TRUE,
|
||||||
|
image_concurrency INT DEFAULT -1,
|
||||||
|
video_concurrency INT DEFAULT -1,
|
||||||
|
is_expired BOOLEAN DEFAULT FALSE,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
FOREIGN KEY (account_id) REFERENCES accounts(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_accounts_plan_type ON sora_accounts (plan_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_accounts_sora_supported ON sora_accounts (sora_supported);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_accounts_image_enabled ON sora_accounts (image_enabled);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_accounts_video_enabled ON sora_accounts (video_enabled);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS sora_usage_stats (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
account_id BIGINT NOT NULL UNIQUE,
|
||||||
|
image_count INT DEFAULT 0,
|
||||||
|
video_count INT DEFAULT 0,
|
||||||
|
error_count INT DEFAULT 0,
|
||||||
|
last_error_at TIMESTAMPTZ,
|
||||||
|
today_image_count INT DEFAULT 0,
|
||||||
|
today_video_count INT DEFAULT 0,
|
||||||
|
today_error_count INT DEFAULT 0,
|
||||||
|
today_date DATE,
|
||||||
|
consecutive_error_count INT DEFAULT 0,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
FOREIGN KEY (account_id) REFERENCES accounts(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_usage_stats_today_date ON sora_usage_stats (today_date);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS sora_tasks (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
task_id TEXT NOT NULL UNIQUE,
|
||||||
|
account_id BIGINT NOT NULL,
|
||||||
|
model TEXT NOT NULL,
|
||||||
|
prompt TEXT NOT NULL,
|
||||||
|
status TEXT NOT NULL DEFAULT 'processing',
|
||||||
|
progress DOUBLE PRECISION DEFAULT 0,
|
||||||
|
result_urls TEXT,
|
||||||
|
error_message TEXT,
|
||||||
|
retry_count INT DEFAULT 0,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
completed_at TIMESTAMPTZ,
|
||||||
|
FOREIGN KEY (account_id) REFERENCES accounts(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_tasks_account_id ON sora_tasks (account_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_tasks_status ON sora_tasks (status);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS sora_cache_files (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
task_id TEXT,
|
||||||
|
account_id BIGINT NOT NULL,
|
||||||
|
user_id BIGINT NOT NULL,
|
||||||
|
media_type TEXT NOT NULL,
|
||||||
|
original_url TEXT NOT NULL,
|
||||||
|
cache_path TEXT NOT NULL,
|
||||||
|
cache_url TEXT NOT NULL,
|
||||||
|
size_bytes BIGINT DEFAULT 0,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
FOREIGN KEY (account_id) REFERENCES accounts(id),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_cache_files_account_id ON sora_cache_files (account_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_cache_files_user_id ON sora_cache_files (user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sora_cache_files_media_type ON sora_cache_files (media_type);
|
||||||
60
config.yaml
60
config.yaml
@@ -525,3 +525,63 @@ gemini:
|
|||||||
# Cooldown time (minutes) after hitting quota
|
# Cooldown time (minutes) after hitting quota
|
||||||
# 达到配额后的冷却时间(分钟)
|
# 达到配额后的冷却时间(分钟)
|
||||||
cooldown_minutes: 5
|
cooldown_minutes: 5
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Sora
|
||||||
|
# Sora 配置
|
||||||
|
# =============================================================================
|
||||||
|
sora:
|
||||||
|
# Sora Backend API base URL
|
||||||
|
# Sora 后端 API 基础地址
|
||||||
|
base_url: "https://sora.chatgpt.com/backend"
|
||||||
|
# Request timeout in seconds
|
||||||
|
# 请求超时时间(秒)
|
||||||
|
timeout: 120
|
||||||
|
# Max retry attempts for upstream requests
|
||||||
|
# 上游请求最大重试次数
|
||||||
|
max_retries: 3
|
||||||
|
# Poll interval in seconds for task status
|
||||||
|
# 任务状态轮询间隔(秒)
|
||||||
|
poll_interval: 2.5
|
||||||
|
# Call logic mode: default/native/proxy (default keeps current behavior)
|
||||||
|
# 调用模式:default/native/proxy(default 保持当前默认策略)
|
||||||
|
call_logic_mode: "default"
|
||||||
|
cache:
|
||||||
|
# Enable media caching
|
||||||
|
# 是否启用媒体缓存
|
||||||
|
enabled: false
|
||||||
|
# Base cache directory (temporary files, intermediate downloads)
|
||||||
|
# 缓存根目录(临时文件、中间下载)
|
||||||
|
base_dir: "tmp/sora"
|
||||||
|
# Video cache directory (separated from images)
|
||||||
|
# 视频缓存目录(与图片分离)
|
||||||
|
video_dir: "data/video"
|
||||||
|
# Max bytes for cache dir (0 = unlimited)
|
||||||
|
# 缓存目录最大字节数(0 = 不限制)
|
||||||
|
max_bytes: 0
|
||||||
|
# Allowed hosts for cache download (empty -> fallback to global allowlist)
|
||||||
|
# 缓存下载白名单域名(为空则回退全局 allowlist)
|
||||||
|
allowed_hosts: []
|
||||||
|
# Enable user directory isolation (data/video/u_{user_id})
|
||||||
|
# 是否按用户隔离目录(data/video/u_{user_id})
|
||||||
|
user_dir_enabled: true
|
||||||
|
watermark_free:
|
||||||
|
# Enable watermark-free flow
|
||||||
|
# 是否启用去水印流程
|
||||||
|
enabled: false
|
||||||
|
# Parse method: third_party/custom
|
||||||
|
# 解析方式:third_party/custom
|
||||||
|
parse_method: "third_party"
|
||||||
|
# Custom parse server URL
|
||||||
|
# 自定义解析服务 URL
|
||||||
|
custom_parse_url: ""
|
||||||
|
# Custom parse token
|
||||||
|
# 自定义解析 token
|
||||||
|
custom_parse_token: ""
|
||||||
|
# Fallback to watermark video when parse fails
|
||||||
|
# 去水印失败时是否回退原视频
|
||||||
|
fallback_on_failure: true
|
||||||
|
token_refresh:
|
||||||
|
# Enable periodic token refresh
|
||||||
|
# 是否启用定时刷新
|
||||||
|
enabled: false
|
||||||
|
|||||||
@@ -194,6 +194,28 @@ GEMINI_OAUTH_SCOPES=
|
|||||||
# GEMINI_QUOTA_POLICY={"tiers":{"LEGACY":{"pro_rpd":50,"flash_rpd":1500,"cooldown_minutes":30},"PRO":{"pro_rpd":1500,"flash_rpd":4000,"cooldown_minutes":5},"ULTRA":{"pro_rpd":2000,"flash_rpd":0,"cooldown_minutes":5}}}
|
# GEMINI_QUOTA_POLICY={"tiers":{"LEGACY":{"pro_rpd":50,"flash_rpd":1500,"cooldown_minutes":30},"PRO":{"pro_rpd":1500,"flash_rpd":4000,"cooldown_minutes":5},"ULTRA":{"pro_rpd":2000,"flash_rpd":0,"cooldown_minutes":5}}}
|
||||||
GEMINI_QUOTA_POLICY=
|
GEMINI_QUOTA_POLICY=
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Sora Configuration (OPTIONAL)
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
SORA_BASE_URL=https://sora.chatgpt.com/backend
|
||||||
|
SORA_TIMEOUT=120
|
||||||
|
SORA_MAX_RETRIES=3
|
||||||
|
SORA_POLL_INTERVAL=2.5
|
||||||
|
SORA_CALL_LOGIC_MODE=default
|
||||||
|
SORA_CACHE_ENABLED=false
|
||||||
|
SORA_CACHE_BASE_DIR=tmp/sora
|
||||||
|
SORA_CACHE_VIDEO_DIR=data/video
|
||||||
|
SORA_CACHE_MAX_BYTES=0
|
||||||
|
# Comma-separated hosts (leave empty to use global allowlist)
|
||||||
|
SORA_CACHE_ALLOWED_HOSTS=
|
||||||
|
SORA_CACHE_USER_DIR_ENABLED=true
|
||||||
|
SORA_WATERMARK_FREE_ENABLED=false
|
||||||
|
SORA_WATERMARK_FREE_PARSE_METHOD=third_party
|
||||||
|
SORA_WATERMARK_FREE_CUSTOM_PARSE_URL=
|
||||||
|
SORA_WATERMARK_FREE_CUSTOM_PARSE_TOKEN=
|
||||||
|
SORA_WATERMARK_FREE_FALLBACK_ON_FAILURE=true
|
||||||
|
SORA_TOKEN_REFRESH_ENABLED=false
|
||||||
|
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
# Ops Monitoring Configuration (运维监控配置)
|
# Ops Monitoring Configuration (运维监控配置)
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -583,6 +583,66 @@ gemini:
|
|||||||
# 达到配额后的冷却时间(分钟)
|
# 达到配额后的冷却时间(分钟)
|
||||||
cooldown_minutes: 5
|
cooldown_minutes: 5
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Sora
|
||||||
|
# Sora 配置
|
||||||
|
# =============================================================================
|
||||||
|
sora:
|
||||||
|
# Sora Backend API base URL
|
||||||
|
# Sora 后端 API 基础地址
|
||||||
|
base_url: "https://sora.chatgpt.com/backend"
|
||||||
|
# Request timeout in seconds
|
||||||
|
# 请求超时时间(秒)
|
||||||
|
timeout: 120
|
||||||
|
# Max retry attempts for upstream requests
|
||||||
|
# 上游请求最大重试次数
|
||||||
|
max_retries: 3
|
||||||
|
# Poll interval in seconds for task status
|
||||||
|
# 任务状态轮询间隔(秒)
|
||||||
|
poll_interval: 2.5
|
||||||
|
# Call logic mode: default/native/proxy (default keeps current behavior)
|
||||||
|
# 调用模式:default/native/proxy(default 保持当前默认策略)
|
||||||
|
call_logic_mode: "default"
|
||||||
|
cache:
|
||||||
|
# Enable media caching
|
||||||
|
# 是否启用媒体缓存
|
||||||
|
enabled: false
|
||||||
|
# Base cache directory (temporary files, intermediate downloads)
|
||||||
|
# 缓存根目录(临时文件、中间下载)
|
||||||
|
base_dir: "tmp/sora"
|
||||||
|
# Video cache directory (separated from images)
|
||||||
|
# 视频缓存目录(与图片分离)
|
||||||
|
video_dir: "data/video"
|
||||||
|
# Max bytes for cache dir (0 = unlimited)
|
||||||
|
# 缓存目录最大字节数(0 = 不限制)
|
||||||
|
max_bytes: 0
|
||||||
|
# Allowed hosts for cache download (empty -> fallback to global allowlist)
|
||||||
|
# 缓存下载白名单域名(为空则回退全局 allowlist)
|
||||||
|
allowed_hosts: []
|
||||||
|
# Enable user directory isolation (data/video/u_{user_id})
|
||||||
|
# 是否按用户隔离目录(data/video/u_{user_id})
|
||||||
|
user_dir_enabled: true
|
||||||
|
watermark_free:
|
||||||
|
# Enable watermark-free flow
|
||||||
|
# 是否启用去水印流程
|
||||||
|
enabled: false
|
||||||
|
# Parse method: third_party/custom
|
||||||
|
# 解析方式:third_party/custom
|
||||||
|
parse_method: "third_party"
|
||||||
|
# Custom parse server URL
|
||||||
|
# 自定义解析服务 URL
|
||||||
|
custom_parse_url: ""
|
||||||
|
# Custom parse token
|
||||||
|
# 自定义解析 token
|
||||||
|
custom_parse_token: ""
|
||||||
|
# Fallback to watermark video when parse fails
|
||||||
|
# 去水印失败时是否回退原视频
|
||||||
|
fallback_on_failure: true
|
||||||
|
token_refresh:
|
||||||
|
# Enable periodic token refresh
|
||||||
|
# 是否启用定时刷新
|
||||||
|
enabled: false
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Update Configuration (在线更新配置)
|
# Update Configuration (在线更新配置)
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|||||||
@@ -55,6 +55,25 @@ export interface SystemSettings {
|
|||||||
enable_identity_patch: boolean
|
enable_identity_patch: boolean
|
||||||
identity_patch_prompt: string
|
identity_patch_prompt: string
|
||||||
|
|
||||||
|
// Sora configuration
|
||||||
|
sora_base_url: string
|
||||||
|
sora_timeout: number
|
||||||
|
sora_max_retries: number
|
||||||
|
sora_poll_interval: number
|
||||||
|
sora_call_logic_mode: string
|
||||||
|
sora_cache_enabled: boolean
|
||||||
|
sora_cache_base_dir: string
|
||||||
|
sora_cache_video_dir: string
|
||||||
|
sora_cache_max_bytes: number
|
||||||
|
sora_cache_allowed_hosts: string[]
|
||||||
|
sora_cache_user_dir_enabled: boolean
|
||||||
|
sora_watermark_free_enabled: boolean
|
||||||
|
sora_watermark_free_parse_method: string
|
||||||
|
sora_watermark_free_custom_parse_url: string
|
||||||
|
sora_watermark_free_custom_parse_token: string
|
||||||
|
sora_watermark_free_fallback_on_failure: boolean
|
||||||
|
sora_token_refresh_enabled: boolean
|
||||||
|
|
||||||
// Ops Monitoring (vNext)
|
// Ops Monitoring (vNext)
|
||||||
ops_monitoring_enabled: boolean
|
ops_monitoring_enabled: boolean
|
||||||
ops_realtime_monitoring_enabled: boolean
|
ops_realtime_monitoring_enabled: boolean
|
||||||
@@ -97,6 +116,23 @@ export interface UpdateSettingsRequest {
|
|||||||
fallback_model_antigravity?: string
|
fallback_model_antigravity?: string
|
||||||
enable_identity_patch?: boolean
|
enable_identity_patch?: boolean
|
||||||
identity_patch_prompt?: string
|
identity_patch_prompt?: string
|
||||||
|
sora_base_url?: string
|
||||||
|
sora_timeout?: number
|
||||||
|
sora_max_retries?: number
|
||||||
|
sora_poll_interval?: number
|
||||||
|
sora_call_logic_mode?: string
|
||||||
|
sora_cache_enabled?: boolean
|
||||||
|
sora_cache_base_dir?: string
|
||||||
|
sora_cache_video_dir?: string
|
||||||
|
sora_cache_max_bytes?: number
|
||||||
|
sora_cache_allowed_hosts?: string[]
|
||||||
|
sora_cache_user_dir_enabled?: boolean
|
||||||
|
sora_watermark_free_enabled?: boolean
|
||||||
|
sora_watermark_free_parse_method?: string
|
||||||
|
sora_watermark_free_custom_parse_url?: string
|
||||||
|
sora_watermark_free_custom_parse_token?: string
|
||||||
|
sora_watermark_free_fallback_on_failure?: boolean
|
||||||
|
sora_token_refresh_enabled?: boolean
|
||||||
ops_monitoring_enabled?: boolean
|
ops_monitoring_enabled?: boolean
|
||||||
ops_realtime_monitoring_enabled?: boolean
|
ops_realtime_monitoring_enabled?: boolean
|
||||||
ops_query_mode_default?: 'auto' | 'raw' | 'preagg' | string
|
ops_query_mode_default?: 'auto' | 'raw' | 'preagg' | string
|
||||||
|
|||||||
@@ -147,6 +147,19 @@
|
|||||||
<Icon name="cloud" size="sm" />
|
<Icon name="cloud" size="sm" />
|
||||||
Antigravity
|
Antigravity
|
||||||
</button>
|
</button>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
@click="form.platform = 'sora'"
|
||||||
|
:class="[
|
||||||
|
'flex flex-1 items-center justify-center gap-2 rounded-md px-4 py-2.5 text-sm font-medium transition-all',
|
||||||
|
form.platform === 'sora'
|
||||||
|
? 'bg-white text-rose-600 shadow-sm dark:bg-dark-600 dark:text-rose-400'
|
||||||
|
: 'text-gray-600 hover:text-gray-900 dark:text-gray-400 dark:hover:text-gray-200'
|
||||||
|
]"
|
||||||
|
>
|
||||||
|
<Icon name="play" size="sm" />
|
||||||
|
Sora
|
||||||
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -672,6 +685,8 @@
|
|||||||
? 'https://api.openai.com'
|
? 'https://api.openai.com'
|
||||||
: form.platform === 'gemini'
|
: form.platform === 'gemini'
|
||||||
? 'https://generativelanguage.googleapis.com'
|
? 'https://generativelanguage.googleapis.com'
|
||||||
|
: form.platform === 'sora'
|
||||||
|
? 'https://sora.chatgpt.com/backend'
|
||||||
: 'https://api.anthropic.com'
|
: 'https://api.anthropic.com'
|
||||||
"
|
"
|
||||||
/>
|
/>
|
||||||
@@ -689,6 +704,8 @@
|
|||||||
? 'sk-proj-...'
|
? 'sk-proj-...'
|
||||||
: form.platform === 'gemini'
|
: form.platform === 'gemini'
|
||||||
? 'AIza...'
|
? 'AIza...'
|
||||||
|
: form.platform === 'sora'
|
||||||
|
? 'access-token...'
|
||||||
: 'sk-ant-...'
|
: 'sk-ant-...'
|
||||||
"
|
"
|
||||||
/>
|
/>
|
||||||
@@ -1850,12 +1867,14 @@ const oauthStepTitle = computed(() => {
|
|||||||
const baseUrlHint = computed(() => {
|
const baseUrlHint = computed(() => {
|
||||||
if (form.platform === 'openai') return t('admin.accounts.openai.baseUrlHint')
|
if (form.platform === 'openai') return t('admin.accounts.openai.baseUrlHint')
|
||||||
if (form.platform === 'gemini') return t('admin.accounts.gemini.baseUrlHint')
|
if (form.platform === 'gemini') return t('admin.accounts.gemini.baseUrlHint')
|
||||||
|
if (form.platform === 'sora') return t('admin.accounts.sora.baseUrlHint')
|
||||||
return t('admin.accounts.baseUrlHint')
|
return t('admin.accounts.baseUrlHint')
|
||||||
})
|
})
|
||||||
|
|
||||||
const apiKeyHint = computed(() => {
|
const apiKeyHint = computed(() => {
|
||||||
if (form.platform === 'openai') return t('admin.accounts.openai.apiKeyHint')
|
if (form.platform === 'openai') return t('admin.accounts.openai.apiKeyHint')
|
||||||
if (form.platform === 'gemini') return t('admin.accounts.gemini.apiKeyHint')
|
if (form.platform === 'gemini') return t('admin.accounts.gemini.apiKeyHint')
|
||||||
|
if (form.platform === 'sora') return t('admin.accounts.sora.apiKeyHint')
|
||||||
return t('admin.accounts.apiKeyHint')
|
return t('admin.accounts.apiKeyHint')
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -2100,7 +2119,9 @@ watch(
|
|||||||
? 'https://api.openai.com'
|
? 'https://api.openai.com'
|
||||||
: newPlatform === 'gemini'
|
: newPlatform === 'gemini'
|
||||||
? 'https://generativelanguage.googleapis.com'
|
? 'https://generativelanguage.googleapis.com'
|
||||||
: 'https://api.anthropic.com'
|
: newPlatform === 'sora'
|
||||||
|
? 'https://sora.chatgpt.com/backend'
|
||||||
|
: 'https://api.anthropic.com'
|
||||||
// Clear model-related settings
|
// Clear model-related settings
|
||||||
allowedModels.value = []
|
allowedModels.value = []
|
||||||
modelMappings.value = []
|
modelMappings.value = []
|
||||||
@@ -2112,6 +2133,9 @@ watch(
|
|||||||
if (newPlatform === 'antigravity') {
|
if (newPlatform === 'antigravity') {
|
||||||
accountCategory.value = 'oauth-based'
|
accountCategory.value = 'oauth-based'
|
||||||
}
|
}
|
||||||
|
if (newPlatform === 'sora') {
|
||||||
|
accountCategory.value = 'apikey'
|
||||||
|
}
|
||||||
// Reset OAuth states
|
// Reset OAuth states
|
||||||
oauth.resetState()
|
oauth.resetState()
|
||||||
openaiOAuth.resetState()
|
openaiOAuth.resetState()
|
||||||
@@ -2383,12 +2407,17 @@ const handleSubmit = async () => {
|
|||||||
? 'https://api.openai.com'
|
? 'https://api.openai.com'
|
||||||
: form.platform === 'gemini'
|
: form.platform === 'gemini'
|
||||||
? 'https://generativelanguage.googleapis.com'
|
? 'https://generativelanguage.googleapis.com'
|
||||||
: 'https://api.anthropic.com'
|
: form.platform === 'sora'
|
||||||
|
? 'https://sora.chatgpt.com/backend'
|
||||||
|
: 'https://api.anthropic.com'
|
||||||
|
|
||||||
// Build credentials with optional model mapping
|
// Build credentials with optional model mapping
|
||||||
const credentials: Record<string, unknown> = {
|
const credentials: Record<string, unknown> = {}
|
||||||
base_url: apiKeyBaseUrl.value.trim() || defaultBaseUrl,
|
if (form.platform === 'sora') {
|
||||||
api_key: apiKeyValue.value.trim()
|
credentials.access_token = apiKeyValue.value.trim()
|
||||||
|
} else {
|
||||||
|
credentials.base_url = apiKeyBaseUrl.value.trim() || defaultBaseUrl
|
||||||
|
credentials.api_key = apiKeyValue.value.trim()
|
||||||
}
|
}
|
||||||
if (form.platform === 'gemini') {
|
if (form.platform === 'gemini') {
|
||||||
credentials.tier_id = geminiTierAIStudio.value
|
credentials.tier_id = geminiTierAIStudio.value
|
||||||
|
|||||||
@@ -39,6 +39,8 @@
|
|||||||
? 'https://api.openai.com'
|
? 'https://api.openai.com'
|
||||||
: account.platform === 'gemini'
|
: account.platform === 'gemini'
|
||||||
? 'https://generativelanguage.googleapis.com'
|
? 'https://generativelanguage.googleapis.com'
|
||||||
|
: account.platform === 'sora'
|
||||||
|
? 'https://sora.chatgpt.com/backend'
|
||||||
: 'https://api.anthropic.com'
|
: 'https://api.anthropic.com'
|
||||||
"
|
"
|
||||||
/>
|
/>
|
||||||
@@ -55,6 +57,8 @@
|
|||||||
? 'sk-proj-...'
|
? 'sk-proj-...'
|
||||||
: account.platform === 'gemini'
|
: account.platform === 'gemini'
|
||||||
? 'AIza...'
|
? 'AIza...'
|
||||||
|
: account.platform === 'sora'
|
||||||
|
? 'access-token...'
|
||||||
: 'sk-ant-...'
|
: 'sk-ant-...'
|
||||||
"
|
"
|
||||||
/>
|
/>
|
||||||
@@ -919,6 +923,7 @@ const baseUrlHint = computed(() => {
|
|||||||
if (!props.account) return t('admin.accounts.baseUrlHint')
|
if (!props.account) return t('admin.accounts.baseUrlHint')
|
||||||
if (props.account.platform === 'openai') return t('admin.accounts.openai.baseUrlHint')
|
if (props.account.platform === 'openai') return t('admin.accounts.openai.baseUrlHint')
|
||||||
if (props.account.platform === 'gemini') return t('admin.accounts.gemini.baseUrlHint')
|
if (props.account.platform === 'gemini') return t('admin.accounts.gemini.baseUrlHint')
|
||||||
|
if (props.account.platform === 'sora') return t('admin.accounts.sora.baseUrlHint')
|
||||||
return t('admin.accounts.baseUrlHint')
|
return t('admin.accounts.baseUrlHint')
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -997,6 +1002,7 @@ const tempUnschedPresets = computed(() => [
|
|||||||
const defaultBaseUrl = computed(() => {
|
const defaultBaseUrl = computed(() => {
|
||||||
if (props.account?.platform === 'openai') return 'https://api.openai.com'
|
if (props.account?.platform === 'openai') return 'https://api.openai.com'
|
||||||
if (props.account?.platform === 'gemini') return 'https://generativelanguage.googleapis.com'
|
if (props.account?.platform === 'gemini') return 'https://generativelanguage.googleapis.com'
|
||||||
|
if (props.account?.platform === 'sora') return 'https://sora.chatgpt.com/backend'
|
||||||
return 'https://api.anthropic.com'
|
return 'https://api.anthropic.com'
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -1061,7 +1067,9 @@ watch(
|
|||||||
? 'https://api.openai.com'
|
? 'https://api.openai.com'
|
||||||
: newAccount.platform === 'gemini'
|
: newAccount.platform === 'gemini'
|
||||||
? 'https://generativelanguage.googleapis.com'
|
? 'https://generativelanguage.googleapis.com'
|
||||||
: 'https://api.anthropic.com'
|
: newAccount.platform === 'sora'
|
||||||
|
? 'https://sora.chatgpt.com/backend'
|
||||||
|
: 'https://api.anthropic.com'
|
||||||
editBaseUrl.value = (credentials.base_url as string) || platformDefaultUrl
|
editBaseUrl.value = (credentials.base_url as string) || platformDefaultUrl
|
||||||
|
|
||||||
// Load model mappings and detect mode
|
// Load model mappings and detect mode
|
||||||
@@ -1104,7 +1112,9 @@ watch(
|
|||||||
? 'https://api.openai.com'
|
? 'https://api.openai.com'
|
||||||
: newAccount.platform === 'gemini'
|
: newAccount.platform === 'gemini'
|
||||||
? 'https://generativelanguage.googleapis.com'
|
? 'https://generativelanguage.googleapis.com'
|
||||||
: 'https://api.anthropic.com'
|
: newAccount.platform === 'sora'
|
||||||
|
? 'https://sora.chatgpt.com/backend'
|
||||||
|
: 'https://api.anthropic.com'
|
||||||
editBaseUrl.value = platformDefaultUrl
|
editBaseUrl.value = platformDefaultUrl
|
||||||
modelRestrictionMode.value = 'whitelist'
|
modelRestrictionMode.value = 'whitelist'
|
||||||
modelMappings.value = []
|
modelMappings.value = []
|
||||||
@@ -1381,17 +1391,32 @@ const handleSubmit = async () => {
|
|||||||
if (props.account.type === 'apikey') {
|
if (props.account.type === 'apikey') {
|
||||||
const currentCredentials = (props.account.credentials as Record<string, unknown>) || {}
|
const currentCredentials = (props.account.credentials as Record<string, unknown>) || {}
|
||||||
const newBaseUrl = editBaseUrl.value.trim() || defaultBaseUrl.value
|
const newBaseUrl = editBaseUrl.value.trim() || defaultBaseUrl.value
|
||||||
|
const isSora = props.account.platform === 'sora'
|
||||||
const modelMapping = buildModelMappingObject(modelRestrictionMode.value, allowedModels.value, modelMappings.value)
|
const modelMapping = buildModelMappingObject(modelRestrictionMode.value, allowedModels.value, modelMappings.value)
|
||||||
|
|
||||||
// Always update credentials for apikey type to handle model mapping changes
|
// Always update credentials for apikey type to handle model mapping changes
|
||||||
const newCredentials: Record<string, unknown> = {
|
const newCredentials: Record<string, unknown> = {}
|
||||||
base_url: newBaseUrl
|
if (!isSora) {
|
||||||
|
newCredentials.base_url = newBaseUrl
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle API key
|
// Handle API key
|
||||||
if (editApiKey.value.trim()) {
|
if (editApiKey.value.trim()) {
|
||||||
// User provided a new API key
|
// User provided a new API key
|
||||||
newCredentials.api_key = editApiKey.value.trim()
|
if (isSora) {
|
||||||
|
newCredentials.access_token = editApiKey.value.trim()
|
||||||
|
} else {
|
||||||
|
newCredentials.api_key = editApiKey.value.trim()
|
||||||
|
}
|
||||||
|
} else if (isSora) {
|
||||||
|
const existingToken = (currentCredentials.access_token || currentCredentials.token) as string | undefined
|
||||||
|
if (existingToken) {
|
||||||
|
newCredentials.access_token = existingToken
|
||||||
|
} else {
|
||||||
|
appStore.showError(t('admin.accounts.apiKeyIsRequired'))
|
||||||
|
submitting.value = false
|
||||||
|
return
|
||||||
|
}
|
||||||
} else if (currentCredentials.api_key) {
|
} else if (currentCredentials.api_key) {
|
||||||
// Preserve existing api_key
|
// Preserve existing api_key
|
||||||
newCredentials.api_key = currentCredentials.api_key
|
newCredentials.api_key = currentCredentials.api_key
|
||||||
|
|||||||
@@ -428,7 +428,7 @@ interface Props {
|
|||||||
allowMultiple?: boolean
|
allowMultiple?: boolean
|
||||||
methodLabel?: string
|
methodLabel?: string
|
||||||
showCookieOption?: boolean // Whether to show cookie auto-auth option
|
showCookieOption?: boolean // Whether to show cookie auto-auth option
|
||||||
platform?: 'anthropic' | 'openai' | 'gemini' | 'antigravity' // Platform type for different UI/text
|
platform?: 'anthropic' | 'openai' | 'gemini' | 'antigravity' | 'sora' // Platform type for different UI/text
|
||||||
showProjectId?: boolean // New prop to control project ID visibility
|
showProjectId?: boolean // New prop to control project ID visibility
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ const props = defineProps(['searchQuery', 'filters']); const emit = defineEmits(
|
|||||||
const updatePlatform = (value: string | number | boolean | null) => { emit('update:filters', { ...props.filters, platform: value }) }
|
const updatePlatform = (value: string | number | boolean | null) => { emit('update:filters', { ...props.filters, platform: value }) }
|
||||||
const updateType = (value: string | number | boolean | null) => { emit('update:filters', { ...props.filters, type: value }) }
|
const updateType = (value: string | number | boolean | null) => { emit('update:filters', { ...props.filters, type: value }) }
|
||||||
const updateStatus = (value: string | number | boolean | null) => { emit('update:filters', { ...props.filters, status: value }) }
|
const updateStatus = (value: string | number | boolean | null) => { emit('update:filters', { ...props.filters, status: value }) }
|
||||||
const pOpts = computed(() => [{ value: '', label: t('admin.accounts.allPlatforms') }, { value: 'anthropic', label: 'Anthropic' }, { value: 'openai', label: 'OpenAI' }, { value: 'gemini', label: 'Gemini' }, { value: 'antigravity', label: 'Antigravity' }])
|
const pOpts = computed(() => [{ value: '', label: t('admin.accounts.allPlatforms') }, { value: 'anthropic', label: 'Anthropic' }, { value: 'openai', label: 'OpenAI' }, { value: 'gemini', label: 'Gemini' }, { value: 'sora', label: 'Sora' }, { value: 'antigravity', label: 'Antigravity' }])
|
||||||
const tOpts = computed(() => [{ value: '', label: t('admin.accounts.allTypes') }, { value: 'oauth', label: t('admin.accounts.oauthType') }, { value: 'setup-token', label: t('admin.accounts.setupToken') }, { value: 'apikey', label: t('admin.accounts.apiKey') }])
|
const tOpts = computed(() => [{ value: '', label: t('admin.accounts.allTypes') }, { value: 'oauth', label: t('admin.accounts.oauthType') }, { value: 'setup-token', label: t('admin.accounts.setupToken') }, { value: 'apikey', label: t('admin.accounts.apiKey') }])
|
||||||
const sOpts = computed(() => [{ value: '', label: t('admin.accounts.allStatus') }, { value: 'active', label: t('admin.accounts.status.active') }, { value: 'inactive', label: t('admin.accounts.status.inactive') }, { value: 'error', label: t('admin.accounts.status.error') }])
|
const sOpts = computed(() => [{ value: '', label: t('admin.accounts.allStatus') }, { value: 'active', label: t('admin.accounts.status.active') }, { value: 'inactive', label: t('admin.accounts.status.inactive') }, { value: 'error', label: t('admin.accounts.status.error') }])
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -97,6 +97,9 @@ const labelClass = computed(() => {
|
|||||||
if (props.platform === 'gemini') {
|
if (props.platform === 'gemini') {
|
||||||
return `${base} bg-blue-200/60 text-blue-800 dark:bg-blue-800/40 dark:text-blue-300`
|
return `${base} bg-blue-200/60 text-blue-800 dark:bg-blue-800/40 dark:text-blue-300`
|
||||||
}
|
}
|
||||||
|
if (props.platform === 'sora') {
|
||||||
|
return `${base} bg-rose-200/60 text-rose-800 dark:bg-rose-800/40 dark:text-rose-300`
|
||||||
|
}
|
||||||
return `${base} bg-violet-200/60 text-violet-800 dark:bg-violet-800/40 dark:text-violet-300`
|
return `${base} bg-violet-200/60 text-violet-800 dark:bg-violet-800/40 dark:text-violet-300`
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -118,6 +121,11 @@ const badgeClass = computed(() => {
|
|||||||
? 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-400'
|
? 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-400'
|
||||||
: 'bg-sky-50 text-sky-700 dark:bg-sky-900/20 dark:text-sky-400'
|
: 'bg-sky-50 text-sky-700 dark:bg-sky-900/20 dark:text-sky-400'
|
||||||
}
|
}
|
||||||
|
if (props.platform === 'sora') {
|
||||||
|
return isSubscription.value
|
||||||
|
? 'bg-rose-100 text-rose-700 dark:bg-rose-900/30 dark:text-rose-400'
|
||||||
|
: 'bg-rose-50 text-rose-700 dark:bg-rose-900/20 dark:text-rose-400'
|
||||||
|
}
|
||||||
// Fallback: original colors
|
// Fallback: original colors
|
||||||
return isSubscription.value
|
return isSubscription.value
|
||||||
? 'bg-violet-100 text-violet-700 dark:bg-violet-900/30 dark:text-violet-400'
|
? 'bg-violet-100 text-violet-700 dark:bg-violet-900/30 dark:text-violet-400'
|
||||||
|
|||||||
@@ -19,6 +19,10 @@
|
|||||||
<svg v-else-if="platform === 'antigravity'" :class="sizeClass" viewBox="0 0 24 24" fill="currentColor">
|
<svg v-else-if="platform === 'antigravity'" :class="sizeClass" viewBox="0 0 24 24" fill="currentColor">
|
||||||
<path d="M19.35 10.04C18.67 6.59 15.64 4 12 4 9.11 4 6.6 5.64 5.35 8.04 2.34 8.36 0 10.91 0 14c0 3.31 2.69 6 6 6h13c2.76 0 5-2.24 5-5 0-2.64-2.05-4.78-4.65-4.96z" />
|
<path d="M19.35 10.04C18.67 6.59 15.64 4 12 4 9.11 4 6.6 5.64 5.35 8.04 2.34 8.36 0 10.91 0 14c0 3.31 2.69 6 6 6h13c2.76 0 5-2.24 5-5 0-2.64-2.05-4.78-4.65-4.96z" />
|
||||||
</svg>
|
</svg>
|
||||||
|
<!-- Sora logo (play icon) -->
|
||||||
|
<svg v-else-if="platform === 'sora'" :class="sizeClass" viewBox="0 0 24 24" fill="currentColor">
|
||||||
|
<path d="M12 2C6.486 2 2 6.486 2 12s4.486 10 10 10 10-4.486 10-10S17.514 2 12 2zm-1 6 6 4-6 4V8z" />
|
||||||
|
</svg>
|
||||||
<!-- Fallback: generic platform icon -->
|
<!-- Fallback: generic platform icon -->
|
||||||
<svg v-else :class="sizeClass" fill="currentColor" viewBox="0 0 24 24">
|
<svg v-else :class="sizeClass" fill="currentColor" viewBox="0 0 24 24">
|
||||||
<path
|
<path
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ const platformLabel = computed(() => {
|
|||||||
if (props.platform === 'anthropic') return 'Anthropic'
|
if (props.platform === 'anthropic') return 'Anthropic'
|
||||||
if (props.platform === 'openai') return 'OpenAI'
|
if (props.platform === 'openai') return 'OpenAI'
|
||||||
if (props.platform === 'antigravity') return 'Antigravity'
|
if (props.platform === 'antigravity') return 'Antigravity'
|
||||||
|
if (props.platform === 'sora') return 'Sora'
|
||||||
return 'Gemini'
|
return 'Gemini'
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -74,6 +75,9 @@ const platformClass = computed(() => {
|
|||||||
if (props.platform === 'antigravity') {
|
if (props.platform === 'antigravity') {
|
||||||
return 'bg-purple-100 text-purple-700 dark:bg-purple-900/30 dark:text-purple-400'
|
return 'bg-purple-100 text-purple-700 dark:bg-purple-900/30 dark:text-purple-400'
|
||||||
}
|
}
|
||||||
|
if (props.platform === 'sora') {
|
||||||
|
return 'bg-rose-100 text-rose-700 dark:bg-rose-900/30 dark:text-rose-400'
|
||||||
|
}
|
||||||
return 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-400'
|
return 'bg-blue-100 text-blue-700 dark:bg-blue-900/30 dark:text-blue-400'
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -87,6 +91,9 @@ const typeClass = computed(() => {
|
|||||||
if (props.platform === 'antigravity') {
|
if (props.platform === 'antigravity') {
|
||||||
return 'bg-purple-100 text-purple-600 dark:bg-purple-900/30 dark:text-purple-400'
|
return 'bg-purple-100 text-purple-600 dark:bg-purple-900/30 dark:text-purple-400'
|
||||||
}
|
}
|
||||||
|
if (props.platform === 'sora') {
|
||||||
|
return 'bg-rose-100 text-rose-600 dark:bg-rose-900/30 dark:text-rose-400'
|
||||||
|
}
|
||||||
return 'bg-blue-100 text-blue-600 dark:bg-blue-900/30 dark:text-blue-400'
|
return 'bg-blue-100 text-blue-600 dark:bg-blue-900/30 dark:text-blue-400'
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -180,6 +180,8 @@ const defaultClientTab = computed(() => {
|
|||||||
switch (props.platform) {
|
switch (props.platform) {
|
||||||
case 'openai':
|
case 'openai':
|
||||||
return 'codex'
|
return 'codex'
|
||||||
|
case 'sora':
|
||||||
|
return 'codex'
|
||||||
case 'gemini':
|
case 'gemini':
|
||||||
return 'gemini'
|
return 'gemini'
|
||||||
case 'antigravity':
|
case 'antigravity':
|
||||||
@@ -266,6 +268,7 @@ const clientTabs = computed((): TabConfig[] => {
|
|||||||
if (!props.platform) return []
|
if (!props.platform) return []
|
||||||
switch (props.platform) {
|
switch (props.platform) {
|
||||||
case 'openai':
|
case 'openai':
|
||||||
|
case 'sora':
|
||||||
return [
|
return [
|
||||||
{ id: 'codex', label: t('keys.useKeyModal.cliTabs.codexCli'), icon: TerminalIcon },
|
{ id: 'codex', label: t('keys.useKeyModal.cliTabs.codexCli'), icon: TerminalIcon },
|
||||||
{ id: 'opencode', label: t('keys.useKeyModal.cliTabs.opencode'), icon: TerminalIcon }
|
{ id: 'opencode', label: t('keys.useKeyModal.cliTabs.opencode'), icon: TerminalIcon }
|
||||||
@@ -306,7 +309,7 @@ const showShellTabs = computed(() => activeClientTab.value !== 'opencode')
|
|||||||
|
|
||||||
const currentTabs = computed(() => {
|
const currentTabs = computed(() => {
|
||||||
if (!showShellTabs.value) return []
|
if (!showShellTabs.value) return []
|
||||||
if (props.platform === 'openai') {
|
if (props.platform === 'openai' || props.platform === 'sora') {
|
||||||
return openaiTabs
|
return openaiTabs
|
||||||
}
|
}
|
||||||
return shellTabs
|
return shellTabs
|
||||||
@@ -315,6 +318,7 @@ const currentTabs = computed(() => {
|
|||||||
const platformDescription = computed(() => {
|
const platformDescription = computed(() => {
|
||||||
switch (props.platform) {
|
switch (props.platform) {
|
||||||
case 'openai':
|
case 'openai':
|
||||||
|
case 'sora':
|
||||||
return t('keys.useKeyModal.openai.description')
|
return t('keys.useKeyModal.openai.description')
|
||||||
case 'gemini':
|
case 'gemini':
|
||||||
return t('keys.useKeyModal.gemini.description')
|
return t('keys.useKeyModal.gemini.description')
|
||||||
@@ -328,6 +332,7 @@ const platformDescription = computed(() => {
|
|||||||
const platformNote = computed(() => {
|
const platformNote = computed(() => {
|
||||||
switch (props.platform) {
|
switch (props.platform) {
|
||||||
case 'openai':
|
case 'openai':
|
||||||
|
case 'sora':
|
||||||
return activeTab.value === 'windows'
|
return activeTab.value === 'windows'
|
||||||
? t('keys.useKeyModal.openai.noteWindows')
|
? t('keys.useKeyModal.openai.noteWindows')
|
||||||
: t('keys.useKeyModal.openai.note')
|
: t('keys.useKeyModal.openai.note')
|
||||||
@@ -386,6 +391,7 @@ const currentFiles = computed((): FileConfig[] => {
|
|||||||
case 'anthropic':
|
case 'anthropic':
|
||||||
return [generateOpenCodeConfig('anthropic', apiBase, apiKey)]
|
return [generateOpenCodeConfig('anthropic', apiBase, apiKey)]
|
||||||
case 'openai':
|
case 'openai':
|
||||||
|
case 'sora':
|
||||||
return [generateOpenCodeConfig('openai', apiBase, apiKey)]
|
return [generateOpenCodeConfig('openai', apiBase, apiKey)]
|
||||||
case 'gemini':
|
case 'gemini':
|
||||||
return [generateOpenCodeConfig('gemini', geminiBase, apiKey)]
|
return [generateOpenCodeConfig('gemini', geminiBase, apiKey)]
|
||||||
@@ -401,6 +407,7 @@ const currentFiles = computed((): FileConfig[] => {
|
|||||||
|
|
||||||
switch (props.platform) {
|
switch (props.platform) {
|
||||||
case 'openai':
|
case 'openai':
|
||||||
|
case 'sora':
|
||||||
return generateOpenAIFiles(baseUrl, apiKey)
|
return generateOpenAIFiles(baseUrl, apiKey)
|
||||||
case 'gemini':
|
case 'gemini':
|
||||||
return [generateGeminiCliContent(baseUrl, apiKey)]
|
return [generateGeminiCliContent(baseUrl, apiKey)]
|
||||||
|
|||||||
@@ -52,6 +52,38 @@ const geminiModels = [
|
|||||||
'gemini-3-pro-preview'
|
'gemini-3-pro-preview'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
// OpenAI Sora
|
||||||
|
const soraModels = [
|
||||||
|
'gpt-image',
|
||||||
|
'gpt-image-landscape',
|
||||||
|
'gpt-image-portrait',
|
||||||
|
'sora2-landscape-10s',
|
||||||
|
'sora2-portrait-10s',
|
||||||
|
'sora2-landscape-15s',
|
||||||
|
'sora2-portrait-15s',
|
||||||
|
'sora2-landscape-25s',
|
||||||
|
'sora2-portrait-25s',
|
||||||
|
'sora2pro-landscape-10s',
|
||||||
|
'sora2pro-portrait-10s',
|
||||||
|
'sora2pro-landscape-15s',
|
||||||
|
'sora2pro-portrait-15s',
|
||||||
|
'sora2pro-landscape-25s',
|
||||||
|
'sora2pro-portrait-25s',
|
||||||
|
'sora2pro-hd-landscape-10s',
|
||||||
|
'sora2pro-hd-portrait-10s',
|
||||||
|
'sora2pro-hd-landscape-15s',
|
||||||
|
'sora2pro-hd-portrait-15s',
|
||||||
|
'prompt-enhance-short-10s',
|
||||||
|
'prompt-enhance-short-15s',
|
||||||
|
'prompt-enhance-short-20s',
|
||||||
|
'prompt-enhance-medium-10s',
|
||||||
|
'prompt-enhance-medium-15s',
|
||||||
|
'prompt-enhance-medium-20s',
|
||||||
|
'prompt-enhance-long-10s',
|
||||||
|
'prompt-enhance-long-15s',
|
||||||
|
'prompt-enhance-long-20s'
|
||||||
|
]
|
||||||
|
|
||||||
// 智谱 GLM
|
// 智谱 GLM
|
||||||
const zhipuModels = [
|
const zhipuModels = [
|
||||||
'glm-4', 'glm-4v', 'glm-4-plus', 'glm-4-0520',
|
'glm-4', 'glm-4v', 'glm-4-plus', 'glm-4-0520',
|
||||||
@@ -182,6 +214,7 @@ const allModelsList: string[] = [
|
|||||||
...openaiModels,
|
...openaiModels,
|
||||||
...claudeModels,
|
...claudeModels,
|
||||||
...geminiModels,
|
...geminiModels,
|
||||||
|
...soraModels,
|
||||||
...zhipuModels,
|
...zhipuModels,
|
||||||
...qwenModels,
|
...qwenModels,
|
||||||
...deepseekModels,
|
...deepseekModels,
|
||||||
@@ -258,6 +291,7 @@ export function getModelsByPlatform(platform: string): string[] {
|
|||||||
case 'anthropic':
|
case 'anthropic':
|
||||||
case 'claude': return claudeModels
|
case 'claude': return claudeModels
|
||||||
case 'gemini': return geminiModels
|
case 'gemini': return geminiModels
|
||||||
|
case 'sora': return soraModels
|
||||||
case 'zhipu': return zhipuModels
|
case 'zhipu': return zhipuModels
|
||||||
case 'qwen': return qwenModels
|
case 'qwen': return qwenModels
|
||||||
case 'deepseek': return deepseekModels
|
case 'deepseek': return deepseekModels
|
||||||
@@ -281,6 +315,7 @@ export function getModelsByPlatform(platform: string): string[] {
|
|||||||
export function getPresetMappingsByPlatform(platform: string) {
|
export function getPresetMappingsByPlatform(platform: string) {
|
||||||
if (platform === 'openai') return openaiPresetMappings
|
if (platform === 'openai') return openaiPresetMappings
|
||||||
if (platform === 'gemini') return geminiPresetMappings
|
if (platform === 'gemini') return geminiPresetMappings
|
||||||
|
if (platform === 'sora') return []
|
||||||
return anthropicPresetMappings
|
return anthropicPresetMappings
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -895,6 +895,7 @@ export default {
|
|||||||
anthropic: 'Anthropic',
|
anthropic: 'Anthropic',
|
||||||
openai: 'OpenAI',
|
openai: 'OpenAI',
|
||||||
gemini: 'Gemini',
|
gemini: 'Gemini',
|
||||||
|
sora: 'Sora',
|
||||||
antigravity: 'Antigravity'
|
antigravity: 'Antigravity'
|
||||||
},
|
},
|
||||||
deleteConfirm:
|
deleteConfirm:
|
||||||
@@ -1079,6 +1080,7 @@ export default {
|
|||||||
claude: 'Claude',
|
claude: 'Claude',
|
||||||
openai: 'OpenAI',
|
openai: 'OpenAI',
|
||||||
gemini: 'Gemini',
|
gemini: 'Gemini',
|
||||||
|
sora: 'Sora',
|
||||||
antigravity: 'Antigravity'
|
antigravity: 'Antigravity'
|
||||||
},
|
},
|
||||||
types: {
|
types: {
|
||||||
@@ -1247,6 +1249,11 @@ export default {
|
|||||||
baseUrlHint: 'Leave default for official OpenAI API',
|
baseUrlHint: 'Leave default for official OpenAI API',
|
||||||
apiKeyHint: 'Your OpenAI API Key'
|
apiKeyHint: 'Your OpenAI API Key'
|
||||||
},
|
},
|
||||||
|
// Sora specific hints
|
||||||
|
sora: {
|
||||||
|
baseUrlHint: 'Leave empty to use global Sora Base URL',
|
||||||
|
apiKeyHint: 'Your Sora access token'
|
||||||
|
},
|
||||||
modelRestriction: 'Model Restriction (Optional)',
|
modelRestriction: 'Model Restriction (Optional)',
|
||||||
modelWhitelist: 'Model Whitelist',
|
modelWhitelist: 'Model Whitelist',
|
||||||
modelMapping: 'Model Mapping',
|
modelMapping: 'Model Mapping',
|
||||||
@@ -2784,6 +2791,47 @@ export default {
|
|||||||
defaultConcurrency: 'Default Concurrency',
|
defaultConcurrency: 'Default Concurrency',
|
||||||
defaultConcurrencyHint: 'Maximum concurrent requests for new users'
|
defaultConcurrencyHint: 'Maximum concurrent requests for new users'
|
||||||
},
|
},
|
||||||
|
sora: {
|
||||||
|
title: 'Sora Settings',
|
||||||
|
description: 'Configure Sora upstream requests, cache, and watermark-free flow',
|
||||||
|
baseUrl: 'Sora Base URL',
|
||||||
|
baseUrlPlaceholder: 'https://sora.chatgpt.com/backend',
|
||||||
|
baseUrlHint: 'Base URL for the Sora backend API',
|
||||||
|
callLogicMode: 'Call Mode',
|
||||||
|
callLogicModeDefault: 'Default',
|
||||||
|
callLogicModeNative: 'Native',
|
||||||
|
callLogicModeProxy: 'Proxy',
|
||||||
|
callLogicModeHint: 'Default keeps the existing behavior',
|
||||||
|
timeout: 'Timeout (seconds)',
|
||||||
|
timeoutHint: 'Timeout for single request',
|
||||||
|
maxRetries: 'Max Retries',
|
||||||
|
maxRetriesHint: 'Retry count for upstream failures',
|
||||||
|
pollInterval: 'Poll Interval (seconds)',
|
||||||
|
pollIntervalHint: 'Polling interval for task status',
|
||||||
|
cacheEnabled: 'Enable Cache',
|
||||||
|
cacheEnabledHint: 'Cache generated media for local downloads',
|
||||||
|
cacheBaseDir: 'Cache Base Dir',
|
||||||
|
cacheVideoDir: 'Video Cache Dir',
|
||||||
|
cacheMaxBytes: 'Cache Size (bytes)',
|
||||||
|
cacheMaxBytesHint: '0 means unlimited',
|
||||||
|
cacheUserDirEnabled: 'User Directory Isolation',
|
||||||
|
cacheUserDirEnabledHint: 'Create per-user subdirectories',
|
||||||
|
cacheAllowedHosts: 'Cache Allowlist',
|
||||||
|
cacheAllowedHostsPlaceholder: 'One host per line, e.g. oscdn2.dyysy.com',
|
||||||
|
cacheAllowedHostsHint: 'Empty falls back to the global URL allowlist',
|
||||||
|
watermarkFreeEnabled: 'Enable Watermark-Free',
|
||||||
|
watermarkFreeEnabledHint: 'Try to resolve watermark-free videos',
|
||||||
|
watermarkFreeParseMethod: 'Parse Method',
|
||||||
|
watermarkFreeParseMethodThirdParty: 'Third-party',
|
||||||
|
watermarkFreeParseMethodCustom: 'Custom',
|
||||||
|
watermarkFreeParseMethodHint: 'Select the watermark-free parse method',
|
||||||
|
watermarkFreeCustomParseUrl: 'Custom Parse URL',
|
||||||
|
watermarkFreeCustomParseToken: 'Custom Parse Token',
|
||||||
|
watermarkFreeFallback: 'Fallback on Failure',
|
||||||
|
watermarkFreeFallbackHint: 'Return the original video on failure',
|
||||||
|
tokenRefreshEnabled: 'Enable Token Refresh',
|
||||||
|
tokenRefreshEnabledHint: 'Periodic token refresh (requires scheduler)'
|
||||||
|
},
|
||||||
site: {
|
site: {
|
||||||
title: 'Site Settings',
|
title: 'Site Settings',
|
||||||
description: 'Customize site branding',
|
description: 'Customize site branding',
|
||||||
|
|||||||
@@ -941,6 +941,7 @@ export default {
|
|||||||
anthropic: 'Anthropic',
|
anthropic: 'Anthropic',
|
||||||
openai: 'OpenAI',
|
openai: 'OpenAI',
|
||||||
gemini: 'Gemini',
|
gemini: 'Gemini',
|
||||||
|
sora: 'Sora',
|
||||||
antigravity: 'Antigravity'
|
antigravity: 'Antigravity'
|
||||||
},
|
},
|
||||||
saving: '保存中...',
|
saving: '保存中...',
|
||||||
@@ -1199,6 +1200,7 @@ export default {
|
|||||||
openai: 'OpenAI',
|
openai: 'OpenAI',
|
||||||
anthropic: 'Anthropic',
|
anthropic: 'Anthropic',
|
||||||
gemini: 'Gemini',
|
gemini: 'Gemini',
|
||||||
|
sora: 'Sora',
|
||||||
antigravity: 'Antigravity'
|
antigravity: 'Antigravity'
|
||||||
},
|
},
|
||||||
types: {
|
types: {
|
||||||
@@ -1382,6 +1384,11 @@ export default {
|
|||||||
baseUrlHint: '留空使用官方 OpenAI API',
|
baseUrlHint: '留空使用官方 OpenAI API',
|
||||||
apiKeyHint: '您的 OpenAI API Key'
|
apiKeyHint: '您的 OpenAI API Key'
|
||||||
},
|
},
|
||||||
|
// Sora specific hints
|
||||||
|
sora: {
|
||||||
|
baseUrlHint: '留空使用全局 Sora Base URL',
|
||||||
|
apiKeyHint: '您的 Sora access token'
|
||||||
|
},
|
||||||
modelRestriction: '模型限制(可选)',
|
modelRestriction: '模型限制(可选)',
|
||||||
modelWhitelist: '模型白名单',
|
modelWhitelist: '模型白名单',
|
||||||
modelMapping: '模型映射',
|
modelMapping: '模型映射',
|
||||||
@@ -2936,6 +2943,47 @@ export default {
|
|||||||
defaultConcurrency: '默认并发数',
|
defaultConcurrency: '默认并发数',
|
||||||
defaultConcurrencyHint: '新用户的最大并发请求数'
|
defaultConcurrencyHint: '新用户的最大并发请求数'
|
||||||
},
|
},
|
||||||
|
sora: {
|
||||||
|
title: 'Sora 设置',
|
||||||
|
description: '配置 Sora 上游请求、缓存与去水印策略',
|
||||||
|
baseUrl: 'Sora Base URL',
|
||||||
|
baseUrlPlaceholder: 'https://sora.chatgpt.com/backend',
|
||||||
|
baseUrlHint: 'Sora 后端 API 基础地址',
|
||||||
|
callLogicMode: '调用模式',
|
||||||
|
callLogicModeDefault: '默认',
|
||||||
|
callLogicModeNative: '原生',
|
||||||
|
callLogicModeProxy: '代理',
|
||||||
|
callLogicModeHint: '默认保持当前策略',
|
||||||
|
timeout: '请求超时(秒)',
|
||||||
|
timeoutHint: '单次任务超时控制',
|
||||||
|
maxRetries: '最大重试次数',
|
||||||
|
maxRetriesHint: '上游请求失败时的重试次数',
|
||||||
|
pollInterval: '轮询间隔(秒)',
|
||||||
|
pollIntervalHint: '任务状态轮询间隔',
|
||||||
|
cacheEnabled: '启用缓存',
|
||||||
|
cacheEnabledHint: '启用生成结果缓存并提供本地下载',
|
||||||
|
cacheBaseDir: '缓存根目录',
|
||||||
|
cacheVideoDir: '视频缓存目录',
|
||||||
|
cacheMaxBytes: '缓存容量(字节)',
|
||||||
|
cacheMaxBytesHint: '0 表示不限制',
|
||||||
|
cacheUserDirEnabled: '按用户隔离缓存目录',
|
||||||
|
cacheUserDirEnabledHint: '开启后按用户创建子目录',
|
||||||
|
cacheAllowedHosts: '缓存下载白名单',
|
||||||
|
cacheAllowedHostsPlaceholder: '每行一个域名,例如: oscdn2.dyysy.com',
|
||||||
|
cacheAllowedHostsHint: '为空时回退全局 URL 白名单',
|
||||||
|
watermarkFreeEnabled: '启用去水印',
|
||||||
|
watermarkFreeEnabledHint: '尝试通过解析服务获取无水印视频',
|
||||||
|
watermarkFreeParseMethod: '解析方式',
|
||||||
|
watermarkFreeParseMethodThirdParty: '第三方解析',
|
||||||
|
watermarkFreeParseMethodCustom: '自定义解析',
|
||||||
|
watermarkFreeParseMethodHint: '选择去水印解析方式',
|
||||||
|
watermarkFreeCustomParseUrl: '自定义解析地址',
|
||||||
|
watermarkFreeCustomParseToken: '自定义解析 Token',
|
||||||
|
watermarkFreeFallback: '解析失败降级',
|
||||||
|
watermarkFreeFallbackHint: '失败时返回原视频',
|
||||||
|
tokenRefreshEnabled: '启用 Token 刷新',
|
||||||
|
tokenRefreshEnabledHint: '定时刷新 Sora Token(需配置调度)'
|
||||||
|
},
|
||||||
site: {
|
site: {
|
||||||
title: '站点设置',
|
title: '站点设置',
|
||||||
description: '自定义站点品牌',
|
description: '自定义站点品牌',
|
||||||
|
|||||||
@@ -252,7 +252,7 @@ export interface PaginationConfig {
|
|||||||
|
|
||||||
// ==================== API Key & Group Types ====================
|
// ==================== API Key & Group Types ====================
|
||||||
|
|
||||||
export type GroupPlatform = 'anthropic' | 'openai' | 'gemini' | 'antigravity'
|
export type GroupPlatform = 'anthropic' | 'openai' | 'gemini' | 'antigravity' | 'sora'
|
||||||
|
|
||||||
export type SubscriptionType = 'standard' | 'subscription'
|
export type SubscriptionType = 'standard' | 'subscription'
|
||||||
|
|
||||||
@@ -355,7 +355,7 @@ export interface UpdateGroupRequest {
|
|||||||
|
|
||||||
// ==================== Account & Proxy Types ====================
|
// ==================== Account & Proxy Types ====================
|
||||||
|
|
||||||
export type AccountPlatform = 'anthropic' | 'openai' | 'gemini' | 'antigravity'
|
export type AccountPlatform = 'anthropic' | 'openai' | 'gemini' | 'antigravity' | 'sora'
|
||||||
export type AccountType = 'oauth' | 'setup-token' | 'apikey'
|
export type AccountType = 'oauth' | 'setup-token' | 'apikey'
|
||||||
export type OAuthAddMethod = 'oauth' | 'setup-token'
|
export type OAuthAddMethod = 'oauth' | 'setup-token'
|
||||||
export type ProxyProtocol = 'http' | 'https' | 'socks5' | 'socks5h'
|
export type ProxyProtocol = 'http' | 'https' | 'socks5' | 'socks5h'
|
||||||
|
|||||||
@@ -1152,6 +1152,7 @@ const platformOptions = computed(() => [
|
|||||||
{ value: 'anthropic', label: 'Anthropic' },
|
{ value: 'anthropic', label: 'Anthropic' },
|
||||||
{ value: 'openai', label: 'OpenAI' },
|
{ value: 'openai', label: 'OpenAI' },
|
||||||
{ value: 'gemini', label: 'Gemini' },
|
{ value: 'gemini', label: 'Gemini' },
|
||||||
|
{ value: 'sora', label: 'Sora' },
|
||||||
{ value: 'antigravity', label: 'Antigravity' }
|
{ value: 'antigravity', label: 'Antigravity' }
|
||||||
])
|
])
|
||||||
|
|
||||||
@@ -1160,6 +1161,7 @@ const platformFilterOptions = computed(() => [
|
|||||||
{ value: 'anthropic', label: 'Anthropic' },
|
{ value: 'anthropic', label: 'Anthropic' },
|
||||||
{ value: 'openai', label: 'OpenAI' },
|
{ value: 'openai', label: 'OpenAI' },
|
||||||
{ value: 'gemini', label: 'Gemini' },
|
{ value: 'gemini', label: 'Gemini' },
|
||||||
|
{ value: 'sora', label: 'Sora' },
|
||||||
{ value: 'antigravity', label: 'Antigravity' }
|
{ value: 'antigravity', label: 'Antigravity' }
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|||||||
@@ -561,6 +561,221 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Sora Settings -->
|
||||||
|
<div class="card">
|
||||||
|
<div class="border-b border-gray-100 px-6 py-4 dark:border-dark-700">
|
||||||
|
<h2 class="text-lg font-semibold text-gray-900 dark:text-white">
|
||||||
|
{{ t('admin.settings.sora.title') }}
|
||||||
|
</h2>
|
||||||
|
<p class="mt-1 text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.description') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div class="space-y-6 p-6">
|
||||||
|
<div class="grid grid-cols-1 gap-6 md:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.baseUrl') }}
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
v-model="form.sora_base_url"
|
||||||
|
type="text"
|
||||||
|
class="input font-mono text-sm"
|
||||||
|
:placeholder="t('admin.settings.sora.baseUrlPlaceholder')"
|
||||||
|
/>
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.baseUrlHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.callLogicMode') }}
|
||||||
|
</label>
|
||||||
|
<select v-model="form.sora_call_logic_mode" class="input">
|
||||||
|
<option value="default">{{ t('admin.settings.sora.callLogicModeDefault') }}</option>
|
||||||
|
<option value="native">{{ t('admin.settings.sora.callLogicModeNative') }}</option>
|
||||||
|
<option value="proxy">{{ t('admin.settings.sora.callLogicModeProxy') }}</option>
|
||||||
|
</select>
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.callLogicModeHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="grid grid-cols-1 gap-6 md:grid-cols-3">
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.timeout') }}
|
||||||
|
</label>
|
||||||
|
<input v-model.number="form.sora_timeout" type="number" min="1" class="input" />
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.timeoutHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.maxRetries') }}
|
||||||
|
</label>
|
||||||
|
<input v-model.number="form.sora_max_retries" type="number" min="0" class="input" />
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.maxRetriesHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.pollInterval') }}
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
v-model.number="form.sora_poll_interval"
|
||||||
|
type="number"
|
||||||
|
min="0.5"
|
||||||
|
step="0.1"
|
||||||
|
class="input"
|
||||||
|
/>
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.pollIntervalHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="space-y-4 border-t border-gray-100 pt-4 dark:border-dark-700">
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<label class="font-medium text-gray-900 dark:text-white">{{
|
||||||
|
t('admin.settings.sora.cacheEnabled')
|
||||||
|
}}</label>
|
||||||
|
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.cacheEnabledHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Toggle v-model="form.sora_cache_enabled" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div v-if="form.sora_cache_enabled" class="space-y-4">
|
||||||
|
<div class="grid grid-cols-1 gap-6 md:grid-cols-3">
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.cacheBaseDir') }}
|
||||||
|
</label>
|
||||||
|
<input v-model="form.sora_cache_base_dir" type="text" class="input font-mono text-sm" />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.cacheVideoDir') }}
|
||||||
|
</label>
|
||||||
|
<input v-model="form.sora_cache_video_dir" type="text" class="input font-mono text-sm" />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.cacheMaxBytes') }}
|
||||||
|
</label>
|
||||||
|
<input v-model.number="form.sora_cache_max_bytes" type="number" min="0" class="input" />
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.cacheMaxBytesHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<label class="font-medium text-gray-900 dark:text-white">{{
|
||||||
|
t('admin.settings.sora.cacheUserDirEnabled')
|
||||||
|
}}</label>
|
||||||
|
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.cacheUserDirEnabledHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Toggle v-model="form.sora_cache_user_dir_enabled" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.cacheAllowedHosts') }}
|
||||||
|
</label>
|
||||||
|
<textarea
|
||||||
|
v-model="form.sora_cache_allowed_hosts_text"
|
||||||
|
rows="3"
|
||||||
|
class="input font-mono text-sm"
|
||||||
|
:placeholder="t('admin.settings.sora.cacheAllowedHostsPlaceholder')"
|
||||||
|
></textarea>
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.cacheAllowedHostsHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="space-y-4 border-t border-gray-100 pt-4 dark:border-dark-700">
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<label class="font-medium text-gray-900 dark:text-white">{{
|
||||||
|
t('admin.settings.sora.watermarkFreeEnabled')
|
||||||
|
}}</label>
|
||||||
|
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.watermarkFreeEnabledHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Toggle v-model="form.sora_watermark_free_enabled" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div v-if="form.sora_watermark_free_enabled" class="space-y-4">
|
||||||
|
<div class="grid grid-cols-1 gap-6 md:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.watermarkFreeParseMethod') }}
|
||||||
|
</label>
|
||||||
|
<select v-model="form.sora_watermark_free_parse_method" class="input">
|
||||||
|
<option value="third_party">{{ t('admin.settings.sora.watermarkFreeParseMethodThirdParty') }}</option>
|
||||||
|
<option value="custom">{{ t('admin.settings.sora.watermarkFreeParseMethodCustom') }}</option>
|
||||||
|
</select>
|
||||||
|
<p class="mt-1.5 text-xs text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.watermarkFreeParseMethodHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<label class="font-medium text-gray-900 dark:text-white">{{
|
||||||
|
t('admin.settings.sora.watermarkFreeFallback')
|
||||||
|
}}</label>
|
||||||
|
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.watermarkFreeFallbackHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Toggle v-model="form.sora_watermark_free_fallback_on_failure" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div v-if="form.sora_watermark_free_parse_method === 'custom'" class="grid grid-cols-1 gap-6 md:grid-cols-2">
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.watermarkFreeCustomParseUrl') }}
|
||||||
|
</label>
|
||||||
|
<input v-model="form.sora_watermark_free_custom_parse_url" type="text" class="input font-mono text-sm" />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label class="mb-2 block text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||||
|
{{ t('admin.settings.sora.watermarkFreeCustomParseToken') }}
|
||||||
|
</label>
|
||||||
|
<input v-model="form.sora_watermark_free_custom_parse_token" type="password" class="input font-mono text-sm" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="flex items-center justify-between border-t border-gray-100 pt-4 dark:border-dark-700">
|
||||||
|
<div>
|
||||||
|
<label class="font-medium text-gray-900 dark:text-white">{{
|
||||||
|
t('admin.settings.sora.tokenRefreshEnabled')
|
||||||
|
}}</label>
|
||||||
|
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{{ t('admin.settings.sora.tokenRefreshEnabledHint') }}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<Toggle v-model="form.sora_token_refresh_enabled" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Site Settings -->
|
<!-- Site Settings -->
|
||||||
<div class="card">
|
<div class="card">
|
||||||
<div class="border-b border-gray-100 px-6 py-4 dark:border-dark-700">
|
<div class="border-b border-gray-100 px-6 py-4 dark:border-dark-700">
|
||||||
@@ -1023,6 +1238,7 @@ type SettingsForm = SystemSettings & {
|
|||||||
smtp_password: string
|
smtp_password: string
|
||||||
turnstile_secret_key: string
|
turnstile_secret_key: string
|
||||||
linuxdo_connect_client_secret: string
|
linuxdo_connect_client_secret: string
|
||||||
|
sora_cache_allowed_hosts_text: string
|
||||||
}
|
}
|
||||||
|
|
||||||
const form = reactive<SettingsForm>({
|
const form = reactive<SettingsForm>({
|
||||||
@@ -1067,6 +1283,25 @@ const form = reactive<SettingsForm>({
|
|||||||
// Identity patch (Claude -> Gemini)
|
// Identity patch (Claude -> Gemini)
|
||||||
enable_identity_patch: true,
|
enable_identity_patch: true,
|
||||||
identity_patch_prompt: '',
|
identity_patch_prompt: '',
|
||||||
|
// Sora
|
||||||
|
sora_base_url: 'https://sora.chatgpt.com/backend',
|
||||||
|
sora_timeout: 120,
|
||||||
|
sora_max_retries: 3,
|
||||||
|
sora_poll_interval: 2.5,
|
||||||
|
sora_call_logic_mode: 'default',
|
||||||
|
sora_cache_enabled: false,
|
||||||
|
sora_cache_base_dir: 'tmp/sora',
|
||||||
|
sora_cache_video_dir: 'data/video',
|
||||||
|
sora_cache_max_bytes: 0,
|
||||||
|
sora_cache_allowed_hosts: [],
|
||||||
|
sora_cache_user_dir_enabled: true,
|
||||||
|
sora_watermark_free_enabled: false,
|
||||||
|
sora_watermark_free_parse_method: 'third_party',
|
||||||
|
sora_watermark_free_custom_parse_url: '',
|
||||||
|
sora_watermark_free_custom_parse_token: '',
|
||||||
|
sora_watermark_free_fallback_on_failure: true,
|
||||||
|
sora_token_refresh_enabled: false,
|
||||||
|
sora_cache_allowed_hosts_text: '',
|
||||||
// Ops monitoring (vNext)
|
// Ops monitoring (vNext)
|
||||||
ops_monitoring_enabled: true,
|
ops_monitoring_enabled: true,
|
||||||
ops_realtime_monitoring_enabled: true,
|
ops_realtime_monitoring_enabled: true,
|
||||||
@@ -1136,6 +1371,7 @@ async function loadSettings() {
|
|||||||
form.smtp_password = ''
|
form.smtp_password = ''
|
||||||
form.turnstile_secret_key = ''
|
form.turnstile_secret_key = ''
|
||||||
form.linuxdo_connect_client_secret = ''
|
form.linuxdo_connect_client_secret = ''
|
||||||
|
form.sora_cache_allowed_hosts_text = (settings.sora_cache_allowed_hosts || []).join('\n')
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
appStore.showError(
|
appStore.showError(
|
||||||
t('admin.settings.failedToLoad') + ': ' + (error.message || t('common.unknownError'))
|
t('admin.settings.failedToLoad') + ': ' + (error.message || t('common.unknownError'))
|
||||||
@@ -1148,6 +1384,11 @@ async function loadSettings() {
|
|||||||
async function saveSettings() {
|
async function saveSettings() {
|
||||||
saving.value = true
|
saving.value = true
|
||||||
try {
|
try {
|
||||||
|
const soraAllowedHosts = form.sora_cache_allowed_hosts_text
|
||||||
|
.split(/\r?\n/)
|
||||||
|
.map((value) => value.trim())
|
||||||
|
.filter((value) => value.length > 0)
|
||||||
|
|
||||||
const payload: UpdateSettingsRequest = {
|
const payload: UpdateSettingsRequest = {
|
||||||
registration_enabled: form.registration_enabled,
|
registration_enabled: form.registration_enabled,
|
||||||
email_verify_enabled: form.email_verify_enabled,
|
email_verify_enabled: form.email_verify_enabled,
|
||||||
@@ -1182,13 +1423,31 @@ async function saveSettings() {
|
|||||||
fallback_model_gemini: form.fallback_model_gemini,
|
fallback_model_gemini: form.fallback_model_gemini,
|
||||||
fallback_model_antigravity: form.fallback_model_antigravity,
|
fallback_model_antigravity: form.fallback_model_antigravity,
|
||||||
enable_identity_patch: form.enable_identity_patch,
|
enable_identity_patch: form.enable_identity_patch,
|
||||||
identity_patch_prompt: form.identity_patch_prompt
|
identity_patch_prompt: form.identity_patch_prompt,
|
||||||
|
sora_base_url: form.sora_base_url,
|
||||||
|
sora_timeout: form.sora_timeout,
|
||||||
|
sora_max_retries: form.sora_max_retries,
|
||||||
|
sora_poll_interval: form.sora_poll_interval,
|
||||||
|
sora_call_logic_mode: form.sora_call_logic_mode,
|
||||||
|
sora_cache_enabled: form.sora_cache_enabled,
|
||||||
|
sora_cache_base_dir: form.sora_cache_base_dir,
|
||||||
|
sora_cache_video_dir: form.sora_cache_video_dir,
|
||||||
|
sora_cache_max_bytes: form.sora_cache_max_bytes,
|
||||||
|
sora_cache_allowed_hosts: soraAllowedHosts,
|
||||||
|
sora_cache_user_dir_enabled: form.sora_cache_user_dir_enabled,
|
||||||
|
sora_watermark_free_enabled: form.sora_watermark_free_enabled,
|
||||||
|
sora_watermark_free_parse_method: form.sora_watermark_free_parse_method,
|
||||||
|
sora_watermark_free_custom_parse_url: form.sora_watermark_free_custom_parse_url,
|
||||||
|
sora_watermark_free_custom_parse_token: form.sora_watermark_free_custom_parse_token,
|
||||||
|
sora_watermark_free_fallback_on_failure: form.sora_watermark_free_fallback_on_failure,
|
||||||
|
sora_token_refresh_enabled: form.sora_token_refresh_enabled
|
||||||
}
|
}
|
||||||
const updated = await adminAPI.settings.updateSettings(payload)
|
const updated = await adminAPI.settings.updateSettings(payload)
|
||||||
Object.assign(form, updated)
|
Object.assign(form, updated)
|
||||||
form.smtp_password = ''
|
form.smtp_password = ''
|
||||||
form.turnstile_secret_key = ''
|
form.turnstile_secret_key = ''
|
||||||
form.linuxdo_connect_client_secret = ''
|
form.linuxdo_connect_client_secret = ''
|
||||||
|
form.sora_cache_allowed_hosts_text = (updated.sora_cache_allowed_hosts || []).join('\n')
|
||||||
// Refresh cached public settings so sidebar/header update immediately
|
// Refresh cached public settings so sidebar/header update immediately
|
||||||
await appStore.fetchPublicSettings(true)
|
await appStore.fetchPublicSettings(true)
|
||||||
appStore.showSuccess(t('admin.settings.settingsSaved'))
|
appStore.showSuccess(t('admin.settings.settingsSaved'))
|
||||||
|
|||||||
@@ -111,6 +111,7 @@ const platformOptions = computed(() => [
|
|||||||
{ value: 'openai', label: 'OpenAI' },
|
{ value: 'openai', label: 'OpenAI' },
|
||||||
{ value: 'anthropic', label: 'Anthropic' },
|
{ value: 'anthropic', label: 'Anthropic' },
|
||||||
{ value: 'gemini', label: 'Gemini' },
|
{ value: 'gemini', label: 'Gemini' },
|
||||||
|
{ value: 'sora', label: 'Sora' },
|
||||||
{ value: 'antigravity', label: 'Antigravity' }
|
{ value: 'antigravity', label: 'Antigravity' }
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|||||||
@@ -916,6 +916,7 @@ const executeCcsImport = (row: ApiKey, clientType: 'claude' | 'gemini') => {
|
|||||||
} else {
|
} else {
|
||||||
switch (platform) {
|
switch (platform) {
|
||||||
case 'openai':
|
case 'openai':
|
||||||
|
case 'sora':
|
||||||
app = 'codex'
|
app = 'codex'
|
||||||
endpoint = baseUrl
|
endpoint = baseUrl
|
||||||
break
|
break
|
||||||
|
|||||||
Reference in New Issue
Block a user