feat: 完成 NestJS 后端核心底座开发 (M1-M6) 和 Ant Design Vue 前端迁移
主要更新: 1. 后端核心底座完成 (M1-M6): - 健康检查、指标监控、分布式锁 - 事件总线、队列系统、事务管理 - 安全守卫、多租户隔离、存储适配器 - 审计日志、配置管理、多语言支持 2. 前端迁移到 Ant Design Vue: - 从 Element Plus 迁移到 Ant Design Vue - 完善 system 模块 (role/menu/dept) - 修复依赖和配置问题 3. 文档完善: - AI 开发工作流文档 - 架构约束和开发规范 - 项目进度跟踪 4. 其他改进: - 修复编译错误和类型问题 - 完善测试用例 - 优化项目结构
This commit is contained in:
@@ -1,11 +1,10 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { INestApplication } from '@nestjs/common';
|
||||
import * as request from 'supertest';
|
||||
import { App } from 'supertest/types';
|
||||
import request from 'supertest';
|
||||
import { AppModule } from './../src/app.module';
|
||||
|
||||
describe('AppController (e2e)', () => {
|
||||
let app: INestApplication<App>;
|
||||
let app: INestApplication;
|
||||
|
||||
beforeEach(async () => {
|
||||
const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
@@ -16,6 +15,12 @@ describe('AppController (e2e)', () => {
|
||||
await app.init();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (app) {
|
||||
await app.close();
|
||||
}
|
||||
});
|
||||
|
||||
it('/ (GET)', () => {
|
||||
return request(app.getHttpServer())
|
||||
.get('/')
|
||||
|
||||
245
wwjcloud/test/database/index-manager.service.spec.ts
Normal file
245
wwjcloud/test/database/index-manager.service.spec.ts
Normal file
@@ -0,0 +1,245 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { getRepositoryToken } from '@nestjs/typeorm';
|
||||
import { Repository, DataSource } from 'typeorm';
|
||||
import { IndexManagerService } from '../../src/core/database/index-manager.service';
|
||||
|
||||
/**
|
||||
* IndexManagerService 单元测试
|
||||
* 测试数据库索引管理服务的核心功能
|
||||
*/
|
||||
describe('IndexManagerService', () => {
|
||||
let service: IndexManagerService;
|
||||
let dataSource: DataSource;
|
||||
let mockQueryRunner: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
// 创建模拟的查询运行器
|
||||
mockQueryRunner = {
|
||||
query: jest.fn(),
|
||||
release: jest.fn(),
|
||||
};
|
||||
|
||||
// 创建模拟的数据源
|
||||
const mockDataSource = {
|
||||
createQueryRunner: jest.fn().mockReturnValue(mockQueryRunner),
|
||||
query: jest.fn(),
|
||||
};
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
IndexManagerService,
|
||||
{
|
||||
provide: DataSource,
|
||||
useValue: mockDataSource,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<IndexManagerService>(IndexManagerService);
|
||||
dataSource = module.get<DataSource>(DataSource);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe('checkIndexExists', () => {
|
||||
it('should return true when index exists', async () => {
|
||||
// 模拟索引存在的查询结果
|
||||
mockQueryRunner.query.mockResolvedValue([{ count: 1 }]);
|
||||
|
||||
const result = await service.checkIndexExists('test_table', 'test_index');
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('SHOW INDEX FROM test_table'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return false when index does not exist', async () => {
|
||||
// 模拟索引不存在的查询结果
|
||||
mockQueryRunner.query.mockResolvedValue([]);
|
||||
|
||||
const result = await service.checkIndexExists('test_table', 'test_index');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle database errors gracefully', async () => {
|
||||
// 模拟数据库错误
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Database error'));
|
||||
|
||||
const result = await service.checkIndexExists('test_table', 'test_index');
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createIndex', () => {
|
||||
it('should create single column index successfully', async () => {
|
||||
mockQueryRunner.query.mockResolvedValue(undefined);
|
||||
|
||||
await service.createIndex('test_table', 'test_index', ['column1']);
|
||||
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
'CREATE INDEX test_index ON test_table (column1)',
|
||||
);
|
||||
});
|
||||
|
||||
it('should create composite index successfully', async () => {
|
||||
mockQueryRunner.query.mockResolvedValue(undefined);
|
||||
|
||||
await service.createIndex('test_table', 'test_composite_index', ['column1', 'column2']);
|
||||
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
'CREATE INDEX test_composite_index ON test_table (column1, column2)',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle index creation errors gracefully', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Index creation failed'));
|
||||
|
||||
// 应该不抛出异常,而是记录日志
|
||||
await expect(service.createIndex('test_table', 'test_index', ['column1'])).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTableIndexes', () => {
|
||||
it('should return table indexes', async () => {
|
||||
const mockIndexes = [
|
||||
{
|
||||
Table: 'test_table',
|
||||
Non_unique: 0,
|
||||
Key_name: 'PRIMARY',
|
||||
Seq_in_index: 1,
|
||||
Column_name: 'id',
|
||||
Collation: 'A',
|
||||
Cardinality: 1000,
|
||||
Sub_part: null,
|
||||
Packed: null,
|
||||
Null: '',
|
||||
Index_type: 'BTREE',
|
||||
Comment: '',
|
||||
},
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockIndexes);
|
||||
|
||||
const result = await service.getTableIndexes('test_table');
|
||||
|
||||
expect(result).toEqual(mockIndexes);
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
'SHOW INDEX FROM test_table',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when getting table indexes', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const result = await service.getTableIndexes('test_table');
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('analyzeTable', () => {
|
||||
it('should analyze table successfully', async () => {
|
||||
mockQueryRunner.query.mockResolvedValue(undefined);
|
||||
|
||||
await service.analyzeTable('test_table');
|
||||
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
'ANALYZE TABLE test_table',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle analyze table errors gracefully', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Analyze failed'));
|
||||
|
||||
await expect(service.analyzeTable('test_table')).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getIndexUsageStats', () => {
|
||||
it('should return index usage statistics', async () => {
|
||||
const mockStats = [
|
||||
{
|
||||
table_schema: 'test_db',
|
||||
table_name: 'test_table',
|
||||
index_name: 'test_index',
|
||||
count_read: 100,
|
||||
sum_timer_read: 1000000,
|
||||
},
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockStats);
|
||||
|
||||
const result = await service.getIndexUsageStats();
|
||||
|
||||
expect(result).toEqual(mockStats);
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('performance_schema.table_io_waits_summary_by_index_usage'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when getting index usage stats', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const result = await service.getIndexUsageStats();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkAndCreateIndexes', () => {
|
||||
it('should check and create all required indexes', async () => {
|
||||
// 模拟所有索引都不存在
|
||||
mockQueryRunner.query.mockResolvedValue([]);
|
||||
|
||||
const createIndexSpy = jest.spyOn(service, 'createIndex').mockResolvedValue(undefined);
|
||||
const checkIndexSpy = jest.spyOn(service, 'checkIndexExists').mockResolvedValue(false);
|
||||
|
||||
await service.checkAndCreateIndexes();
|
||||
|
||||
// 验证检查了所有必要的索引
|
||||
expect(checkIndexSpy).toHaveBeenCalledTimes(expect.any(Number));
|
||||
expect(createIndexSpy).toHaveBeenCalledTimes(expect.any(Number));
|
||||
|
||||
createIndexSpy.mockRestore();
|
||||
checkIndexSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should skip creating existing indexes', async () => {
|
||||
const createIndexSpy = jest.spyOn(service, 'createIndex').mockResolvedValue(undefined);
|
||||
const checkIndexSpy = jest.spyOn(service, 'checkIndexExists').mockResolvedValue(true);
|
||||
|
||||
await service.checkAndCreateIndexes();
|
||||
|
||||
// 如果所有索引都存在,则不应该创建任何索引
|
||||
expect(createIndexSpy).not.toHaveBeenCalled();
|
||||
|
||||
createIndexSpy.mockRestore();
|
||||
checkIndexSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('analyzeHotTables', () => {
|
||||
it('should analyze all hot tables', async () => {
|
||||
const analyzeTableSpy = jest.spyOn(service, 'analyzeTable').mockResolvedValue(undefined);
|
||||
|
||||
await service.analyzeHotTables();
|
||||
|
||||
// 验证分析了所有热点表
|
||||
expect(analyzeTableSpy).toHaveBeenCalledWith('member');
|
||||
expect(analyzeTableSpy).toHaveBeenCalledWith('member_account_log');
|
||||
expect(analyzeTableSpy).toHaveBeenCalledWith('pay');
|
||||
expect(analyzeTableSpy).toHaveBeenCalledWith('pay_refund');
|
||||
|
||||
analyzeTableSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
335
wwjcloud/test/database/performance-monitor.service.spec.ts
Normal file
335
wwjcloud/test/database/performance-monitor.service.spec.ts
Normal file
@@ -0,0 +1,335 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { DataSource } from 'typeorm';
|
||||
import { PerformanceMonitorService } from '../../src/core/database/performance-monitor.service';
|
||||
|
||||
/**
|
||||
* PerformanceMonitorService 单元测试
|
||||
* 测试数据库性能监控服务的核心功能
|
||||
*/
|
||||
describe('PerformanceMonitorService', () => {
|
||||
let service: PerformanceMonitorService;
|
||||
let dataSource: DataSource;
|
||||
let mockQueryRunner: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
// 创建模拟的查询运行器
|
||||
mockQueryRunner = {
|
||||
query: jest.fn(),
|
||||
release: jest.fn(),
|
||||
};
|
||||
|
||||
// 创建模拟的数据源
|
||||
const mockDataSource = {
|
||||
createQueryRunner: jest.fn().mockReturnValue(mockQueryRunner),
|
||||
query: jest.fn(),
|
||||
};
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
PerformanceMonitorService,
|
||||
{
|
||||
provide: DataSource,
|
||||
useValue: mockDataSource,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<PerformanceMonitorService>(PerformanceMonitorService);
|
||||
dataSource = module.get<DataSource>(DataSource);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe('checkSlowQueries', () => {
|
||||
it('should return slow queries', async () => {
|
||||
const mockSlowQueries = [
|
||||
{
|
||||
sql_text: 'SELECT * FROM member WHERE status = 1',
|
||||
exec_count: 100,
|
||||
avg_timer_wait: 5000000000, // 5秒
|
||||
sum_timer_wait: 500000000000,
|
||||
sum_rows_examined: 10000,
|
||||
sum_rows_sent: 1000,
|
||||
},
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockSlowQueries);
|
||||
|
||||
const result = await service.checkSlowQueries();
|
||||
|
||||
expect(result).toEqual(mockSlowQueries);
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('performance_schema.events_statements_summary_by_digest'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when checking slow queries', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const result = await service.checkSlowQueries();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkTableSizes', () => {
|
||||
it('should return table sizes', async () => {
|
||||
const mockTableSizes = [
|
||||
{
|
||||
table_name: 'member',
|
||||
size_mb: 150.5,
|
||||
rows: 50000,
|
||||
avg_row_length: 3200,
|
||||
data_length: 157286400,
|
||||
index_length: 52428800,
|
||||
},
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockTableSizes);
|
||||
|
||||
const result = await service.checkTableSizes();
|
||||
|
||||
expect(result).toEqual(mockTableSizes);
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('information_schema.tables'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when checking table sizes', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const result = await service.checkTableSizes();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkIndexEfficiency', () => {
|
||||
it('should return index efficiency data', async () => {
|
||||
const mockIndexEfficiency = [
|
||||
{
|
||||
table_name: 'member',
|
||||
index_name: 'idx_member_status',
|
||||
cardinality: 5,
|
||||
selectivity: 0.2,
|
||||
usage_count: 1000,
|
||||
efficiency_score: 75.5,
|
||||
},
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockIndexEfficiency);
|
||||
|
||||
const result = await service.checkIndexEfficiency();
|
||||
|
||||
expect(result).toEqual(mockIndexEfficiency);
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(
|
||||
expect.stringContaining('information_schema.statistics'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle errors when checking index efficiency', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const result = await service.checkIndexEfficiency();
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getQueryExecutionPlan', () => {
|
||||
it('should return query execution plan', async () => {
|
||||
const mockExecutionPlan = [
|
||||
{
|
||||
id: 1,
|
||||
select_type: 'SIMPLE',
|
||||
table: 'member',
|
||||
partitions: null,
|
||||
type: 'ref',
|
||||
possible_keys: 'idx_member_status',
|
||||
key: 'idx_member_status',
|
||||
key_len: '4',
|
||||
ref: 'const',
|
||||
rows: 1000,
|
||||
filtered: 100.0,
|
||||
Extra: 'Using index condition',
|
||||
},
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockExecutionPlan);
|
||||
|
||||
const sql = 'SELECT * FROM member WHERE status = 1';
|
||||
const result = await service.getQueryExecutionPlan(sql);
|
||||
|
||||
expect(result).toEqual(mockExecutionPlan);
|
||||
expect(mockQueryRunner.query).toHaveBeenCalledWith(`EXPLAIN ${sql}`);
|
||||
});
|
||||
|
||||
it('should handle errors when getting execution plan', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const sql = 'SELECT * FROM member WHERE status = 1';
|
||||
const result = await service.getQueryExecutionPlan(sql);
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('analyzeQueryPerformance', () => {
|
||||
it('should analyze query performance', async () => {
|
||||
const mockExecutionPlan = [
|
||||
{
|
||||
id: 1,
|
||||
select_type: 'SIMPLE',
|
||||
table: 'member',
|
||||
type: 'ref',
|
||||
possible_keys: 'idx_member_status',
|
||||
key: 'idx_member_status',
|
||||
rows: 1000,
|
||||
filtered: 100.0,
|
||||
Extra: 'Using index condition',
|
||||
},
|
||||
];
|
||||
|
||||
jest.spyOn(service, 'getQueryExecutionPlan').mockResolvedValue(mockExecutionPlan);
|
||||
|
||||
const sql = 'SELECT * FROM member WHERE status = 1';
|
||||
const result = await service.analyzeQueryPerformance(sql);
|
||||
|
||||
expect(result).toHaveProperty('executionPlan');
|
||||
expect(result).toHaveProperty('analysis');
|
||||
expect(result).toHaveProperty('recommendations');
|
||||
expect(result.executionPlan).toEqual(mockExecutionPlan);
|
||||
expect(result.analysis.estimatedRows).toBe(1000);
|
||||
expect(result.analysis.usesIndex).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect full table scan', async () => {
|
||||
const mockExecutionPlan = [
|
||||
{
|
||||
id: 1,
|
||||
select_type: 'SIMPLE',
|
||||
table: 'member',
|
||||
type: 'ALL',
|
||||
possible_keys: null,
|
||||
key: null,
|
||||
rows: 50000,
|
||||
filtered: 10.0,
|
||||
Extra: 'Using where',
|
||||
},
|
||||
];
|
||||
|
||||
jest.spyOn(service, 'getQueryExecutionPlan').mockResolvedValue(mockExecutionPlan);
|
||||
|
||||
const sql = 'SELECT * FROM member WHERE name LIKE "%test%"';
|
||||
const result = await service.analyzeQueryPerformance(sql);
|
||||
|
||||
expect(result.analysis.hasFullTableScan).toBe(true);
|
||||
expect(result.analysis.usesIndex).toBe(false);
|
||||
expect(result.recommendations).toContain('查询执行了全表扫描,建议添加适当的索引');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConnectionStatus', () => {
|
||||
it('should return connection status', async () => {
|
||||
const mockConnectionStatus = [
|
||||
{ Variable_name: 'Threads_connected', Value: '25' },
|
||||
{ Variable_name: 'Max_connections', Value: '151' },
|
||||
{ Variable_name: 'Threads_running', Value: '5' },
|
||||
{ Variable_name: 'Aborted_connects', Value: '10' },
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockConnectionStatus);
|
||||
|
||||
const result = await service.getConnectionStatus();
|
||||
|
||||
expect(result).toHaveProperty('threadsConnected');
|
||||
expect(result).toHaveProperty('maxConnections');
|
||||
expect(result).toHaveProperty('connectionUsage');
|
||||
expect(result.threadsConnected).toBe(25);
|
||||
expect(result.maxConnections).toBe(151);
|
||||
expect(result.connectionUsage).toBeCloseTo(16.56, 1);
|
||||
});
|
||||
|
||||
it('should handle errors when getting connection status', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const result = await service.getConnectionStatus();
|
||||
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPerformanceMetrics', () => {
|
||||
it('should return performance metrics', async () => {
|
||||
const mockMetrics = [
|
||||
{ Variable_name: 'Innodb_buffer_pool_read_requests', Value: '1000000' },
|
||||
{ Variable_name: 'Innodb_buffer_pool_reads', Value: '50000' },
|
||||
{ Variable_name: 'Slow_queries', Value: '100' },
|
||||
{ Variable_name: 'Questions', Value: '500000' },
|
||||
{ Variable_name: 'Uptime', Value: '86400' },
|
||||
{ Variable_name: 'Threads_created', Value: '200' },
|
||||
{ Variable_name: 'Connections', Value: '10000' },
|
||||
];
|
||||
|
||||
mockQueryRunner.query.mockResolvedValue(mockMetrics);
|
||||
|
||||
const result = await service.getPerformanceMetrics();
|
||||
|
||||
expect(result).toHaveProperty('buffer_pool_hit_rate');
|
||||
expect(result).toHaveProperty('slow_query_rate');
|
||||
expect(result).toHaveProperty('qps');
|
||||
expect(result).toHaveProperty('thread_cache_hit_rate');
|
||||
expect(result.buffer_pool_hit_rate).toBeCloseTo(95, 0);
|
||||
expect(result.slow_query_rate).toBeCloseTo(0.02, 2);
|
||||
});
|
||||
|
||||
it('should handle errors when getting performance metrics', async () => {
|
||||
mockQueryRunner.query.mockRejectedValue(new Error('Query failed'));
|
||||
|
||||
const result = await service.getPerformanceMetrics();
|
||||
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('performanceCheck', () => {
|
||||
it('should perform complete performance check', async () => {
|
||||
const checkSlowQueriesSpy = jest.spyOn(service, 'checkSlowQueries').mockResolvedValue([]);
|
||||
const checkTableSizesSpy = jest.spyOn(service, 'checkTableSizes').mockResolvedValue([]);
|
||||
const checkIndexEfficiencySpy = jest.spyOn(service, 'checkIndexEfficiency').mockResolvedValue([]);
|
||||
const getConnectionStatusSpy = jest.spyOn(service, 'getConnectionStatus').mockResolvedValue({});
|
||||
const getPerformanceMetricsSpy = jest.spyOn(service, 'getPerformanceMetrics').mockResolvedValue({});
|
||||
|
||||
await service.performanceCheck();
|
||||
|
||||
expect(checkSlowQueriesSpy).toHaveBeenCalled();
|
||||
expect(checkTableSizesSpy).toHaveBeenCalled();
|
||||
expect(checkIndexEfficiencySpy).toHaveBeenCalled();
|
||||
expect(getConnectionStatusSpy).toHaveBeenCalled();
|
||||
expect(getPerformanceMetricsSpy).toHaveBeenCalled();
|
||||
|
||||
checkSlowQueriesSpy.mockRestore();
|
||||
checkTableSizesSpy.mockRestore();
|
||||
checkIndexEfficiencySpy.mockRestore();
|
||||
getConnectionStatusSpy.mockRestore();
|
||||
getPerformanceMetricsSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should handle errors during performance check', async () => {
|
||||
jest.spyOn(service, 'checkSlowQueries').mockRejectedValue(new Error('Check failed'));
|
||||
jest.spyOn(service, 'checkTableSizes').mockResolvedValue([]);
|
||||
jest.spyOn(service, 'checkIndexEfficiency').mockResolvedValue([]);
|
||||
jest.spyOn(service, 'getConnectionStatus').mockResolvedValue({});
|
||||
jest.spyOn(service, 'getPerformanceMetrics').mockResolvedValue({});
|
||||
|
||||
// 应该不抛出异常
|
||||
await expect(service.performanceCheck()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,5 +5,10 @@
|
||||
"testRegex": ".e2e-spec.ts$",
|
||||
"transform": {
|
||||
"^.+\\.(t|j)s$": "ts-jest"
|
||||
}
|
||||
},
|
||||
"moduleNameMapper": {
|
||||
"^src/(.*)$": "<rootDir>/../src/$1"
|
||||
},
|
||||
"setupFilesAfterEnv": [],
|
||||
"testTimeout": 30000
|
||||
}
|
||||
|
||||
232
wwjcloud/test/queue/queue-system.e2e-spec.ts
Normal file
232
wwjcloud/test/queue/queue-system.e2e-spec.ts
Normal file
@@ -0,0 +1,232 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { INestApplication } from '@nestjs/common';
|
||||
import request from 'supertest';
|
||||
import { TestModule } from '../test.module';
|
||||
import { TestService } from '../test.service';
|
||||
import { UnifiedQueueService } from '../../src/core/queue/unified-queue.service';
|
||||
import { DatabaseQueueProvider } from '../../src/core/queue/database-queue.provider';
|
||||
import { QueueModule } from '../../src/core/queue/queue.module';
|
||||
|
||||
describe('Queue System (e2e)', () => {
|
||||
let app: INestApplication;
|
||||
let testService: TestService;
|
||||
let unifiedQueueService: UnifiedQueueService;
|
||||
let databaseQueueProvider: DatabaseQueueProvider;
|
||||
|
||||
beforeAll(async () => {
|
||||
const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
imports: [TestModule, QueueModule],
|
||||
}).compile();
|
||||
|
||||
app = moduleFixture.createNestApplication();
|
||||
await app.init();
|
||||
|
||||
testService = moduleFixture.get<TestService>(TestService);
|
||||
unifiedQueueService = moduleFixture.get<UnifiedQueueService>(UnifiedQueueService);
|
||||
databaseQueueProvider = moduleFixture.get<DatabaseQueueProvider>(DatabaseQueueProvider);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (app) {
|
||||
await app.close();
|
||||
}
|
||||
});
|
||||
|
||||
describe('Test Controller Endpoints', () => {
|
||||
it('/test/status (GET) - should return service status', () => {
|
||||
return request(app.getHttpServer())
|
||||
.get('/test/status')
|
||||
.expect(200)
|
||||
.expect((res) => {
|
||||
expect(res.body).toHaveProperty('message');
|
||||
expect(res.body).toHaveProperty('timestamp');
|
||||
expect(res.body).toHaveProperty('services');
|
||||
expect(res.body.services).toHaveProperty('redis');
|
||||
expect(res.body.services).toHaveProperty('kafka');
|
||||
});
|
||||
});
|
||||
|
||||
it('/test/kafka (POST) - should publish event to Kafka', () => {
|
||||
const testData = { test: 'kafka-event', value: 123 };
|
||||
return request(app.getHttpServer())
|
||||
.post('/test/kafka')
|
||||
.send(testData)
|
||||
.expect(201)
|
||||
.expect((res) => {
|
||||
expect(res.body).toHaveProperty('success', true);
|
||||
expect(res.body).toHaveProperty('message');
|
||||
expect(res.body).toHaveProperty('topic', 'test-topic');
|
||||
expect(res.body).toHaveProperty('data', testData);
|
||||
});
|
||||
});
|
||||
|
||||
it('/test/redis (POST) - should enqueue job to Redis', () => {
|
||||
const testData = { test: 'redis-job', value: 456 };
|
||||
return request(app.getHttpServer())
|
||||
.post('/test/redis')
|
||||
.send(testData)
|
||||
.expect(201)
|
||||
.expect((res) => {
|
||||
expect(res.body).toHaveProperty('success', true);
|
||||
expect(res.body).toHaveProperty('message');
|
||||
expect(res.body).toHaveProperty('jobId');
|
||||
expect(res.body).toHaveProperty('data', testData);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('UnifiedQueueService', () => {
|
||||
it('should be defined', () => {
|
||||
expect(unifiedQueueService).toBeDefined();
|
||||
});
|
||||
|
||||
it('should add task to queue', async () => {
|
||||
const result = await unifiedQueueService.addTask('test-queue', {
|
||||
data: { test: 'data' },
|
||||
priority: 1,
|
||||
delay: 0,
|
||||
attempts: 3,
|
||||
});
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('should process task from queue', async () => {
|
||||
let processedData: any = null;
|
||||
|
||||
await unifiedQueueService.processTask('test-queue', async (job: any) => {
|
||||
processedData = job.data;
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
// Add a task to be processed
|
||||
await unifiedQueueService.addTask('test-queue', {
|
||||
data: { test: 'process-data' },
|
||||
priority: 1,
|
||||
});
|
||||
|
||||
// Wait a bit for processing
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
expect(processedData).toBeDefined();
|
||||
});
|
||||
|
||||
it('should publish event', async () => {
|
||||
const event = {
|
||||
eventType: 'test.event',
|
||||
aggregateId: 'test-123',
|
||||
aggregateType: 'Test',
|
||||
version: '1.0',
|
||||
occurredAt: new Date().toISOString(),
|
||||
tenantId: 'tenant-1',
|
||||
idempotencyKey: 'key-123',
|
||||
traceId: 'trace-123',
|
||||
data: { test: 'event-data' },
|
||||
};
|
||||
|
||||
await expect(unifiedQueueService.publishEvent(event)).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('DatabaseQueueProvider', () => {
|
||||
it('should be defined', () => {
|
||||
expect(databaseQueueProvider).toBeDefined();
|
||||
});
|
||||
|
||||
it('should add job to database queue', async () => {
|
||||
const jobData = {
|
||||
type: 'test-job',
|
||||
payload: { test: 'database-job' },
|
||||
options: {
|
||||
priority: 1,
|
||||
delay: 0,
|
||||
attempts: 3,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await databaseQueueProvider.add('test-db-queue', jobData.type, jobData.payload, jobData.options);
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('should publish event to database', async () => {
|
||||
const event = {
|
||||
eventType: 'test.database.event',
|
||||
aggregateId: 'db-test-123',
|
||||
aggregateType: 'DatabaseTest',
|
||||
version: '1.0',
|
||||
occurredAt: new Date().toISOString(),
|
||||
tenantId: 'tenant-1',
|
||||
idempotencyKey: 'db-key-123',
|
||||
traceId: 'db-trace-123',
|
||||
data: { test: 'database-event-data' },
|
||||
};
|
||||
|
||||
await expect(databaseQueueProvider.publish(event)).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Service Integration', () => {
|
||||
it('should have all required services available', () => {
|
||||
expect(testService).toBeDefined();
|
||||
expect(unifiedQueueService).toBeDefined();
|
||||
expect(databaseQueueProvider).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration Tests', () => {
|
||||
it('should handle complete queue workflow', async () => {
|
||||
// Test the complete workflow: add task -> process task -> publish event
|
||||
const taskData = { workflow: 'test', step: 1 };
|
||||
|
||||
// Add task
|
||||
const taskResult = await unifiedQueueService.addTask('workflow-queue', {
|
||||
data: taskData,
|
||||
priority: 1,
|
||||
});
|
||||
expect(taskResult).toBeDefined();
|
||||
|
||||
// Process task and publish event
|
||||
await unifiedQueueService.processTask('workflow-queue', async (job: any) => {
|
||||
const event = {
|
||||
eventType: 'workflow.completed',
|
||||
aggregateId: 'workflow-123',
|
||||
aggregateType: 'Workflow',
|
||||
version: '1.0',
|
||||
occurredAt: new Date().toISOString(),
|
||||
tenantId: 'tenant-1',
|
||||
idempotencyKey: 'workflow-key-123',
|
||||
traceId: 'workflow-trace-123',
|
||||
data: job.data,
|
||||
};
|
||||
|
||||
await unifiedQueueService.publishEvent(event);
|
||||
return { success: true, processed: job.data };
|
||||
});
|
||||
|
||||
// Wait for processing
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
});
|
||||
|
||||
it('should handle error scenarios gracefully', async () => {
|
||||
// Test error handling in task processing
|
||||
await unifiedQueueService.processTask('error-queue', async (job: any) => {
|
||||
if (job.data.shouldFail) {
|
||||
throw new Error('Intentional test error');
|
||||
}
|
||||
return { success: true };
|
||||
});
|
||||
|
||||
// Add a failing task
|
||||
await unifiedQueueService.addTask('error-queue', {
|
||||
data: { shouldFail: true },
|
||||
priority: 1,
|
||||
attempts: 1, // Only try once
|
||||
});
|
||||
|
||||
// Wait for processing attempt
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
// The test passes if no unhandled errors are thrown
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
340
wwjcloud/test/queue/queue-system.spec.ts
Normal file
340
wwjcloud/test/queue/queue-system.spec.ts
Normal file
@@ -0,0 +1,340 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { UnifiedQueueService } from '../../src/core/queue/unified-queue.service';
|
||||
import { DatabaseQueueProvider } from '../../src/core/queue/database-queue.provider';
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { getRepositoryToken } from '@nestjs/typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import { JobEntity } from '../../src/core/queue/entities/job.entity';
|
||||
import { EventEntity } from '../../src/core/queue/entities/event.entity';
|
||||
import { TASK_QUEUE_PROVIDER, EVENT_BUS_PROVIDER } from '../../src/core/interfaces/queue.interface';
|
||||
|
||||
describe('Queue System Unit Tests', () => {
|
||||
let unifiedQueueService: UnifiedQueueService;
|
||||
let databaseQueueProvider: DatabaseQueueProvider;
|
||||
let mockJobRepository: jest.Mocked<Repository<JobEntity>>;
|
||||
let mockEventRepository: jest.Mocked<Repository<EventEntity>>;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create mock repositories
|
||||
mockJobRepository = {
|
||||
create: jest.fn(),
|
||||
save: jest.fn(),
|
||||
find: jest.fn(),
|
||||
findOne: jest.fn(),
|
||||
update: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
createQueryBuilder: jest.fn(),
|
||||
} as any;
|
||||
|
||||
mockEventRepository = {
|
||||
create: jest.fn(),
|
||||
save: jest.fn(),
|
||||
find: jest.fn(),
|
||||
findOne: jest.fn(),
|
||||
update: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
createQueryBuilder: jest.fn(),
|
||||
} as any;
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
UnifiedQueueService,
|
||||
DatabaseQueueProvider,
|
||||
{
|
||||
provide: getRepositoryToken(JobEntity),
|
||||
useValue: mockJobRepository,
|
||||
},
|
||||
{
|
||||
provide: getRepositoryToken(EventEntity),
|
||||
useValue: mockEventRepository,
|
||||
},
|
||||
{
|
||||
provide: TASK_QUEUE_PROVIDER,
|
||||
useExisting: DatabaseQueueProvider,
|
||||
},
|
||||
{
|
||||
provide: EVENT_BUS_PROVIDER,
|
||||
useExisting: DatabaseQueueProvider,
|
||||
},
|
||||
{
|
||||
provide: Logger,
|
||||
useValue: {
|
||||
log: jest.fn(),
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
unifiedQueueService = module.get<UnifiedQueueService>(UnifiedQueueService);
|
||||
databaseQueueProvider = module.get<DatabaseQueueProvider>(DatabaseQueueProvider);
|
||||
});
|
||||
|
||||
describe('UnifiedQueueService', () => {
|
||||
it('should be defined', () => {
|
||||
expect(unifiedQueueService).toBeDefined();
|
||||
});
|
||||
|
||||
it('should have all required methods', () => {
|
||||
expect(typeof unifiedQueueService.addTask).toBe('function');
|
||||
expect(typeof unifiedQueueService.processTask).toBe('function');
|
||||
expect(typeof unifiedQueueService.publishEvent).toBe('function');
|
||||
expect(typeof unifiedQueueService.publishEvents).toBe('function');
|
||||
expect(typeof unifiedQueueService.getQueueStatus).toBe('function');
|
||||
expect(typeof unifiedQueueService.pauseTaskQueue).toBe('function');
|
||||
expect(typeof unifiedQueueService.resumeTaskQueue).toBe('function');
|
||||
expect(typeof unifiedQueueService.cleanTaskQueue).toBe('function');
|
||||
expect(typeof unifiedQueueService.close).toBe('function');
|
||||
});
|
||||
|
||||
it('should validate task options', () => {
|
||||
const validOptions = {
|
||||
data: { test: 'data' },
|
||||
priority: 1,
|
||||
delay: 0,
|
||||
attempts: 3,
|
||||
};
|
||||
|
||||
expect(() => {
|
||||
// This should not throw
|
||||
const options = validOptions;
|
||||
expect(options.data).toBeDefined();
|
||||
expect(typeof options.priority).toBe('number');
|
||||
expect(typeof options.delay).toBe('number');
|
||||
expect(typeof options.attempts).toBe('number');
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should validate event structure', () => {
|
||||
const validEvent = {
|
||||
eventType: 'test.event',
|
||||
aggregateId: 'test-123',
|
||||
aggregateType: 'Test',
|
||||
version: '1.0',
|
||||
occurredAt: new Date().toISOString(),
|
||||
tenantId: 'tenant-1',
|
||||
idempotencyKey: 'key-123',
|
||||
traceId: 'trace-123',
|
||||
data: { test: 'data' },
|
||||
};
|
||||
|
||||
expect(validEvent.eventType).toBeDefined();
|
||||
expect(validEvent.aggregateId).toBeDefined();
|
||||
expect(validEvent.aggregateType).toBeDefined();
|
||||
expect(validEvent.version).toBeDefined();
|
||||
expect(validEvent.occurredAt).toBeDefined();
|
||||
expect(validEvent.tenantId).toBeDefined();
|
||||
expect(validEvent.idempotencyKey).toBeDefined();
|
||||
expect(validEvent.traceId).toBeDefined();
|
||||
expect(validEvent.data).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('DatabaseQueueProvider', () => {
|
||||
it('should be defined', () => {
|
||||
expect(databaseQueueProvider).toBeDefined();
|
||||
});
|
||||
|
||||
it('should have all required methods', () => {
|
||||
expect(typeof databaseQueueProvider.add).toBe('function');
|
||||
expect(typeof databaseQueueProvider.process).toBe('function');
|
||||
expect(typeof databaseQueueProvider.getStatus).toBe('function');
|
||||
expect(typeof databaseQueueProvider.pause).toBe('function');
|
||||
expect(typeof databaseQueueProvider.resume).toBe('function');
|
||||
expect(typeof databaseQueueProvider.clean).toBe('function');
|
||||
expect(typeof databaseQueueProvider.publish).toBe('function');
|
||||
expect(typeof databaseQueueProvider.subscribe).toBe('function');
|
||||
expect(typeof databaseQueueProvider.close).toBe('function');
|
||||
});
|
||||
|
||||
it('should create job entity correctly', async () => {
|
||||
const mockJob = {
|
||||
id: 1,
|
||||
queue_name: 'test-queue',
|
||||
job_type: 'test-job',
|
||||
payload: { test: 'data' },
|
||||
status: 'pending',
|
||||
priority: 1,
|
||||
attempts: 0,
|
||||
max_attempts: 3,
|
||||
created_at: Date.now(),
|
||||
updated_at: Date.now(),
|
||||
scheduled_at: Date.now(),
|
||||
processed_at: null,
|
||||
failed_at: null,
|
||||
error_message: null,
|
||||
};
|
||||
|
||||
mockJobRepository.create.mockReturnValue(mockJob as any);
|
||||
mockJobRepository.save.mockResolvedValue(mockJob as any);
|
||||
|
||||
const result = await databaseQueueProvider.add('test-queue', 'test-job', { test: 'data' }, {
|
||||
priority: 1,
|
||||
delay: 0,
|
||||
attempts: 3,
|
||||
});
|
||||
|
||||
expect(mockJobRepository.create).toHaveBeenCalled();
|
||||
expect(mockJobRepository.save).toHaveBeenCalled();
|
||||
expect(result).toBeDefined();
|
||||
});
|
||||
|
||||
it('should create event entity correctly', async () => {
|
||||
const mockEvent = {
|
||||
id: 1,
|
||||
event_type: 'test.event',
|
||||
aggregate_id: 'test-123',
|
||||
aggregate_type: 'Test',
|
||||
version: '1.0',
|
||||
occurred_at: new Date().toISOString(),
|
||||
tenant_id: 'tenant-1',
|
||||
idempotency_key: 'key-123',
|
||||
trace_id: 'trace-123',
|
||||
data: { test: 'data' },
|
||||
created_at: Date.now(),
|
||||
};
|
||||
|
||||
mockEventRepository.create.mockReturnValue(mockEvent as any);
|
||||
mockEventRepository.save.mockResolvedValue(mockEvent as any);
|
||||
|
||||
const event = {
|
||||
eventType: 'test.event',
|
||||
aggregateId: 'test-123',
|
||||
aggregateType: 'Test',
|
||||
version: '1.0',
|
||||
occurredAt: new Date().toISOString(),
|
||||
tenantId: 'tenant-1',
|
||||
idempotencyKey: 'key-123',
|
||||
traceId: 'trace-123',
|
||||
data: { test: 'data' },
|
||||
};
|
||||
|
||||
await databaseQueueProvider.publish(event);
|
||||
|
||||
expect(mockEventRepository.create).toHaveBeenCalled();
|
||||
expect(mockEventRepository.save).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Service Integration', () => {
|
||||
it('should have all required services available', () => {
|
||||
expect(unifiedQueueService).toBeDefined();
|
||||
expect(databaseQueueProvider).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should handle database connection errors gracefully', async () => {
|
||||
mockJobRepository.save.mockRejectedValue(new Error('Database connection failed'));
|
||||
|
||||
try {
|
||||
await databaseQueueProvider.add('test-queue', 'test-job', { test: 'data' }, {
|
||||
priority: 1,
|
||||
delay: 0,
|
||||
attempts: 3,
|
||||
});
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(Error);
|
||||
expect(error.message).toBe('Database connection failed');
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle invalid event data', () => {
|
||||
const invalidEvent = {
|
||||
// Missing required fields
|
||||
eventType: 'test.event',
|
||||
// aggregateId: missing
|
||||
// aggregateType: missing
|
||||
version: '1.0',
|
||||
occurredAt: new Date().toISOString(),
|
||||
data: { test: 'data' },
|
||||
};
|
||||
|
||||
// Test validation logic
|
||||
expect(invalidEvent.eventType).toBeDefined();
|
||||
expect(invalidEvent.version).toBeDefined();
|
||||
expect(invalidEvent.occurredAt).toBeDefined();
|
||||
expect(invalidEvent.data).toBeDefined();
|
||||
|
||||
// These should be undefined, indicating invalid event
|
||||
expect((invalidEvent as any).aggregateId).toBeUndefined();
|
||||
expect((invalidEvent as any).aggregateType).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle invalid task options', () => {
|
||||
const invalidOptions = {
|
||||
data: { test: 'data' },
|
||||
priority: 'high', // Should be number
|
||||
delay: -1, // Should be non-negative
|
||||
attempts: 0, // Should be positive
|
||||
};
|
||||
|
||||
// Test validation logic
|
||||
expect(typeof invalidOptions.priority).toBe('string'); // Invalid
|
||||
expect(invalidOptions.delay).toBeLessThan(0); // Invalid
|
||||
expect(invalidOptions.attempts).toBe(0); // Invalid
|
||||
});
|
||||
});
|
||||
|
||||
describe('Performance and Scalability', () => {
|
||||
it('should handle multiple concurrent operations', async () => {
|
||||
const operations = [];
|
||||
|
||||
// Simulate multiple concurrent task additions
|
||||
for (let i = 0; i < 10; i++) {
|
||||
mockJobRepository.save.mockResolvedValueOnce({
|
||||
id: i,
|
||||
queue_name: 'concurrent-queue',
|
||||
job_type: 'concurrent-job',
|
||||
payload: { index: i },
|
||||
status: 'pending',
|
||||
} as any);
|
||||
|
||||
operations.push(
|
||||
databaseQueueProvider.add('concurrent-queue', 'concurrent-job', { index: i }, {
|
||||
priority: 1,
|
||||
delay: 0,
|
||||
attempts: 3,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const results = await Promise.all(operations);
|
||||
expect(results).toHaveLength(10);
|
||||
expect(mockJobRepository.save).toHaveBeenCalledTimes(10);
|
||||
});
|
||||
|
||||
it('should handle batch event publishing', async () => {
|
||||
const events = [];
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
events.push({
|
||||
eventType: 'batch.event',
|
||||
aggregateId: `batch-${i}`,
|
||||
aggregateType: 'Batch',
|
||||
version: '1.0',
|
||||
occurredAt: new Date().toISOString(),
|
||||
tenantId: 'tenant-1',
|
||||
idempotencyKey: `batch-key-${i}`,
|
||||
traceId: `batch-trace-${i}`,
|
||||
data: { index: i },
|
||||
});
|
||||
|
||||
mockEventRepository.save.mockResolvedValueOnce({
|
||||
id: i,
|
||||
event_type: 'batch.event',
|
||||
aggregate_id: `batch-${i}`,
|
||||
data: { index: i },
|
||||
} as any);
|
||||
}
|
||||
|
||||
// Test batch publishing
|
||||
const publishPromises = events.map(event => databaseQueueProvider.publish(event));
|
||||
await Promise.all(publishPromises);
|
||||
|
||||
expect(mockEventRepository.save).toHaveBeenCalledTimes(5);
|
||||
});
|
||||
});
|
||||
});
|
||||
69
wwjcloud/test/test.controller.ts
Normal file
69
wwjcloud/test/test.controller.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { Controller, Post, Body, Get } from '@nestjs/common';
|
||||
import { TestService } from './test.service';
|
||||
import { Public } from '../src/common/auth/decorators/public.decorator';
|
||||
|
||||
@Controller('test')
|
||||
export class TestController {
|
||||
constructor(private readonly testService: TestService) {}
|
||||
|
||||
@Get('status')
|
||||
@Public()
|
||||
getStatus() {
|
||||
return {
|
||||
message: 'Test module is working',
|
||||
timestamp: new Date().toISOString(),
|
||||
services: {
|
||||
redis: 'available',
|
||||
kafka: 'available',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@Post('kafka')
|
||||
@Public()
|
||||
async testKafka(@Body() data: Record<string, any>) {
|
||||
try {
|
||||
await this.testService.publishKafkaEvent('test-topic', {
|
||||
message: 'Hello from WWJCloud Test Module!',
|
||||
data,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
message: 'Event published to Kafka successfully',
|
||||
topic: 'test-topic',
|
||||
data,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Failed to publish event to Kafka',
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@Post('redis')
|
||||
@Public()
|
||||
async testRedis(@Body() data: Record<string, any>) {
|
||||
try {
|
||||
const jobId = await this.testService.enqueueRedisJob('test-job', {
|
||||
message: 'Hello from WWJCloud Test Module!',
|
||||
data,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
message: 'Job queued to Redis successfully',
|
||||
jobId,
|
||||
data,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Failed to queue job to Redis',
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
12
wwjcloud/test/test.module.ts
Normal file
12
wwjcloud/test/test.module.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { TestController } from './test.controller';
|
||||
import { TestService } from './test.service';
|
||||
import { JobsModule } from '../src/common/jobs/jobs.module';
|
||||
import { EventBusModule } from '../src/common/event-bus/event-bus.module';
|
||||
|
||||
@Module({
|
||||
imports: [JobsModule, EventBusModule],
|
||||
controllers: [TestController],
|
||||
providers: [TestService],
|
||||
})
|
||||
export class TestModule {}
|
||||
37
wwjcloud/test/test.service.ts
Normal file
37
wwjcloud/test/test.service.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { JobsService } from '../src/common/jobs/jobs.service';
|
||||
import { EventBusService } from '../src/common/event-bus/event-bus.service';
|
||||
|
||||
@Injectable()
|
||||
export class TestService {
|
||||
constructor(
|
||||
private readonly jobsService: JobsService,
|
||||
private readonly eventBusService: EventBusService,
|
||||
) {}
|
||||
|
||||
async publishKafkaEvent(
|
||||
topic: string,
|
||||
data: Record<string, any>,
|
||||
): Promise<void> {
|
||||
await this.eventBusService.publish(topic, {
|
||||
event: 'test-event',
|
||||
data,
|
||||
occurredAt: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
async enqueueRedisJob(
|
||||
type: string,
|
||||
payload: Record<string, any>,
|
||||
): Promise<string> {
|
||||
await this.jobsService.enqueue('test-queue', type, payload, {
|
||||
attempts: 3,
|
||||
backoffMs: 1000,
|
||||
removeOnComplete: true,
|
||||
removeOnFail: false,
|
||||
});
|
||||
|
||||
// 生成一个模拟的 job ID
|
||||
return `job_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user