@friggframework/core 2.0.0--canary.461.2ca8c89.0 → 2.0.0--canary.461.3a5f072.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/database/use-cases/get-migration-status-use-case.test.js +215 -0
- package/database/use-cases/trigger-database-migration-use-case.js +9 -9
- package/database/use-cases/trigger-database-migration-use-case.test.js +265 -0
- package/package.json +5 -5
- package/handlers/workers/db-migration.test.js +0 -437
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for GetMigrationStatusUseCase
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const {
|
|
6
|
+
GetMigrationStatusUseCase,
|
|
7
|
+
ValidationError,
|
|
8
|
+
NotFoundError,
|
|
9
|
+
} = require('./get-migration-status-use-case');
|
|
10
|
+
|
|
11
|
+
describe('GetMigrationStatusUseCase', () => {
|
|
12
|
+
let useCase;
|
|
13
|
+
let mockProcessRepository;
|
|
14
|
+
|
|
15
|
+
beforeEach(() => {
|
|
16
|
+
// Create mock repository
|
|
17
|
+
mockProcessRepository = {
|
|
18
|
+
findById: jest.fn(),
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
// Create use case with mock
|
|
22
|
+
useCase = new GetMigrationStatusUseCase({
|
|
23
|
+
processRepository: mockProcessRepository,
|
|
24
|
+
});
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
afterEach(() => {
|
|
28
|
+
jest.clearAllMocks();
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
describe('constructor', () => {
|
|
32
|
+
it('should throw error if processRepository not provided', () => {
|
|
33
|
+
expect(() => {
|
|
34
|
+
new GetMigrationStatusUseCase({});
|
|
35
|
+
}).toThrow('processRepository dependency is required');
|
|
36
|
+
});
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
describe('execute', () => {
|
|
40
|
+
it('should return migration status for COMPLETED process', async () => {
|
|
41
|
+
const mockProcess = {
|
|
42
|
+
id: 'process-123',
|
|
43
|
+
type: 'DATABASE_MIGRATION',
|
|
44
|
+
state: 'COMPLETED',
|
|
45
|
+
context: {
|
|
46
|
+
dbType: 'postgresql',
|
|
47
|
+
stage: 'production',
|
|
48
|
+
migrationCommand: 'migrate deploy',
|
|
49
|
+
},
|
|
50
|
+
results: {
|
|
51
|
+
success: true,
|
|
52
|
+
duration: '2341ms',
|
|
53
|
+
timestamp: '2025-10-18T10:30:00Z',
|
|
54
|
+
},
|
|
55
|
+
createdAt: new Date('2025-10-18T10:29:55Z'),
|
|
56
|
+
updatedAt: new Date('2025-10-18T10:30:02Z'),
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
mockProcessRepository.findById.mockResolvedValue(mockProcess);
|
|
60
|
+
|
|
61
|
+
const result = await useCase.execute({ processId: 'process-123' });
|
|
62
|
+
|
|
63
|
+
expect(mockProcessRepository.findById).toHaveBeenCalledWith('process-123');
|
|
64
|
+
expect(result).toEqual({
|
|
65
|
+
processId: 'process-123',
|
|
66
|
+
type: 'DATABASE_MIGRATION',
|
|
67
|
+
state: 'COMPLETED',
|
|
68
|
+
context: mockProcess.context,
|
|
69
|
+
results: mockProcess.results,
|
|
70
|
+
createdAt: mockProcess.createdAt,
|
|
71
|
+
updatedAt: mockProcess.updatedAt,
|
|
72
|
+
});
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it('should return migration status for RUNNING process', async () => {
|
|
76
|
+
const mockProcess = {
|
|
77
|
+
id: 'process-456',
|
|
78
|
+
type: 'DATABASE_MIGRATION',
|
|
79
|
+
state: 'RUNNING',
|
|
80
|
+
context: {
|
|
81
|
+
dbType: 'mongodb',
|
|
82
|
+
stage: 'dev',
|
|
83
|
+
startedAt: '2025-10-18T10:30:00Z',
|
|
84
|
+
},
|
|
85
|
+
results: {},
|
|
86
|
+
createdAt: new Date('2025-10-18T10:29:55Z'),
|
|
87
|
+
updatedAt: new Date('2025-10-18T10:30:00Z'),
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
mockProcessRepository.findById.mockResolvedValue(mockProcess);
|
|
91
|
+
|
|
92
|
+
const result = await useCase.execute({ processId: 'process-456' });
|
|
93
|
+
|
|
94
|
+
expect(result.state).toBe('RUNNING');
|
|
95
|
+
expect(result.context.dbType).toBe('mongodb');
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
it('should return migration status for FAILED process', async () => {
|
|
99
|
+
const mockProcess = {
|
|
100
|
+
id: 'process-789',
|
|
101
|
+
type: 'DATABASE_MIGRATION',
|
|
102
|
+
state: 'FAILED',
|
|
103
|
+
context: {
|
|
104
|
+
dbType: 'postgresql',
|
|
105
|
+
stage: 'production',
|
|
106
|
+
failedAt: '2025-10-18T10:30:00Z',
|
|
107
|
+
},
|
|
108
|
+
results: {
|
|
109
|
+
error: 'Migration failed: syntax error',
|
|
110
|
+
errorType: 'MigrationError',
|
|
111
|
+
},
|
|
112
|
+
createdAt: new Date('2025-10-18T10:29:55Z'),
|
|
113
|
+
updatedAt: new Date('2025-10-18T10:30:00Z'),
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
mockProcessRepository.findById.mockResolvedValue(mockProcess);
|
|
117
|
+
|
|
118
|
+
const result = await useCase.execute({ processId: 'process-789' });
|
|
119
|
+
|
|
120
|
+
expect(result.state).toBe('FAILED');
|
|
121
|
+
expect(result.results.error).toContain('Migration failed');
|
|
122
|
+
});
|
|
123
|
+
|
|
124
|
+
it('should handle empty context and results', async () => {
|
|
125
|
+
const mockProcess = {
|
|
126
|
+
id: 'process-999',
|
|
127
|
+
type: 'DATABASE_MIGRATION',
|
|
128
|
+
state: 'INITIALIZING',
|
|
129
|
+
createdAt: new Date(),
|
|
130
|
+
updatedAt: new Date(),
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
mockProcessRepository.findById.mockResolvedValue(mockProcess);
|
|
134
|
+
|
|
135
|
+
const result = await useCase.execute({ processId: 'process-999' });
|
|
136
|
+
|
|
137
|
+
expect(result.context).toEqual({});
|
|
138
|
+
expect(result.results).toEqual({});
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
it('should throw NotFoundError if process does not exist', async () => {
|
|
142
|
+
mockProcessRepository.findById.mockResolvedValue(null);
|
|
143
|
+
|
|
144
|
+
await expect(
|
|
145
|
+
useCase.execute({ processId: 'nonexistent-123' })
|
|
146
|
+
).rejects.toThrow(NotFoundError);
|
|
147
|
+
|
|
148
|
+
await expect(
|
|
149
|
+
useCase.execute({ processId: 'nonexistent-123' })
|
|
150
|
+
).rejects.toThrow('Migration process not found: nonexistent-123');
|
|
151
|
+
});
|
|
152
|
+
|
|
153
|
+
it('should throw error if process is not a migration process', async () => {
|
|
154
|
+
const nonMigrationProcess = {
|
|
155
|
+
id: 'process-999',
|
|
156
|
+
type: 'CRM_SYNC', // Not a migration
|
|
157
|
+
state: 'RUNNING',
|
|
158
|
+
context: {},
|
|
159
|
+
results: {},
|
|
160
|
+
createdAt: new Date(),
|
|
161
|
+
updatedAt: new Date(),
|
|
162
|
+
};
|
|
163
|
+
|
|
164
|
+
mockProcessRepository.findById.mockResolvedValue(nonMigrationProcess);
|
|
165
|
+
|
|
166
|
+
await expect(
|
|
167
|
+
useCase.execute({ processId: 'process-999' })
|
|
168
|
+
).rejects.toThrow('Process process-999 is not a migration process (type: CRM_SYNC)');
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
it('should throw ValidationError if processId is missing', async () => {
|
|
172
|
+
await expect(
|
|
173
|
+
useCase.execute({})
|
|
174
|
+
).rejects.toThrow(ValidationError);
|
|
175
|
+
|
|
176
|
+
await expect(
|
|
177
|
+
useCase.execute({})
|
|
178
|
+
).rejects.toThrow('processId is required');
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
it('should throw ValidationError if processId is not a string', async () => {
|
|
182
|
+
await expect(
|
|
183
|
+
useCase.execute({ processId: 123 })
|
|
184
|
+
).rejects.toThrow('processId must be a string');
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
it('should handle repository errors', async () => {
|
|
188
|
+
mockProcessRepository.findById.mockRejectedValue(new Error('Database connection failed'));
|
|
189
|
+
|
|
190
|
+
await expect(
|
|
191
|
+
useCase.execute({ processId: 'process-123' })
|
|
192
|
+
).rejects.toThrow('Database connection failed');
|
|
193
|
+
});
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
describe('NotFoundError', () => {
|
|
197
|
+
it('should have correct properties', () => {
|
|
198
|
+
const error = new NotFoundError('test message');
|
|
199
|
+
expect(error.name).toBe('NotFoundError');
|
|
200
|
+
expect(error.message).toBe('test message');
|
|
201
|
+
expect(error.statusCode).toBe(404);
|
|
202
|
+
expect(error instanceof Error).toBe(true);
|
|
203
|
+
});
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
describe('ValidationError', () => {
|
|
207
|
+
it('should have correct name', () => {
|
|
208
|
+
const error = new ValidationError('test message');
|
|
209
|
+
expect(error.name).toBe('ValidationError');
|
|
210
|
+
expect(error.message).toBe('test message');
|
|
211
|
+
expect(error instanceof Error).toBe(true);
|
|
212
|
+
});
|
|
213
|
+
});
|
|
214
|
+
});
|
|
215
|
+
|
|
@@ -49,7 +49,7 @@ class TriggerDatabaseMigrationUseCase {
|
|
|
49
49
|
this._validateParams({ userId, dbType, stage });
|
|
50
50
|
|
|
51
51
|
// Create Process record for tracking
|
|
52
|
-
const
|
|
52
|
+
const migrationProcess = await this.processRepository.create({
|
|
53
53
|
userId,
|
|
54
54
|
integrationId: null, // System operation, not tied to integration
|
|
55
55
|
name: 'database-migration',
|
|
@@ -63,7 +63,7 @@ class TriggerDatabaseMigrationUseCase {
|
|
|
63
63
|
results: {},
|
|
64
64
|
});
|
|
65
65
|
|
|
66
|
-
console.log(`Created migration process: ${
|
|
66
|
+
console.log(`Created migration process: ${migrationProcess.id}`);
|
|
67
67
|
|
|
68
68
|
// Get queue URL from environment
|
|
69
69
|
const queueUrl = process.env.DB_MIGRATION_QUEUE_URL;
|
|
@@ -78,20 +78,20 @@ class TriggerDatabaseMigrationUseCase {
|
|
|
78
78
|
try {
|
|
79
79
|
await this.queuerUtil.send(
|
|
80
80
|
{
|
|
81
|
-
processId:
|
|
81
|
+
processId: migrationProcess.id,
|
|
82
82
|
dbType,
|
|
83
83
|
stage,
|
|
84
84
|
},
|
|
85
85
|
queueUrl
|
|
86
86
|
);
|
|
87
87
|
|
|
88
|
-
console.log(`Sent migration job to queue for process: ${
|
|
88
|
+
console.log(`Sent migration job to queue for process: ${migrationProcess.id}`);
|
|
89
89
|
} catch (error) {
|
|
90
90
|
console.error(`Failed to send migration to queue:`, error);
|
|
91
|
-
|
|
91
|
+
|
|
92
92
|
// Update process state to FAILED
|
|
93
93
|
await this.processRepository.updateState(
|
|
94
|
-
|
|
94
|
+
migrationProcess.id,
|
|
95
95
|
'FAILED',
|
|
96
96
|
{
|
|
97
97
|
error: 'Failed to queue migration job',
|
|
@@ -107,9 +107,9 @@ class TriggerDatabaseMigrationUseCase {
|
|
|
107
107
|
// Return process info immediately (don't wait for migration completion)
|
|
108
108
|
return {
|
|
109
109
|
success: true,
|
|
110
|
-
processId:
|
|
111
|
-
state:
|
|
112
|
-
statusUrl: `/db-migrate/${
|
|
110
|
+
processId: migrationProcess.id,
|
|
111
|
+
state: migrationProcess.state,
|
|
112
|
+
statusUrl: `/db-migrate/${migrationProcess.id}`,
|
|
113
113
|
message: 'Database migration queued successfully',
|
|
114
114
|
};
|
|
115
115
|
}
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for TriggerDatabaseMigrationUseCase
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const {
|
|
6
|
+
TriggerDatabaseMigrationUseCase,
|
|
7
|
+
ValidationError,
|
|
8
|
+
} = require('./trigger-database-migration-use-case');
|
|
9
|
+
|
|
10
|
+
describe('TriggerDatabaseMigrationUseCase', () => {
|
|
11
|
+
let useCase;
|
|
12
|
+
let mockProcessRepository;
|
|
13
|
+
let mockQueuerUtil;
|
|
14
|
+
let originalEnv;
|
|
15
|
+
|
|
16
|
+
beforeEach(() => {
|
|
17
|
+
// Save original environment value
|
|
18
|
+
originalEnv = process.env.DB_MIGRATION_QUEUE_URL;
|
|
19
|
+
|
|
20
|
+
// Set test environment
|
|
21
|
+
process.env.DB_MIGRATION_QUEUE_URL = 'https://sqs.us-east-1.amazonaws.com/123456789/test-queue';
|
|
22
|
+
|
|
23
|
+
// Create mock repository
|
|
24
|
+
mockProcessRepository = {
|
|
25
|
+
create: jest.fn().mockResolvedValue({
|
|
26
|
+
id: 'process-123',
|
|
27
|
+
userId: 'user-456',
|
|
28
|
+
integrationId: null,
|
|
29
|
+
name: 'database-migration',
|
|
30
|
+
type: 'DATABASE_MIGRATION',
|
|
31
|
+
state: 'INITIALIZING',
|
|
32
|
+
context: {
|
|
33
|
+
dbType: 'postgresql',
|
|
34
|
+
stage: 'production',
|
|
35
|
+
},
|
|
36
|
+
results: {},
|
|
37
|
+
createdAt: new Date(),
|
|
38
|
+
updatedAt: new Date(),
|
|
39
|
+
}),
|
|
40
|
+
updateState: jest.fn().mockResolvedValue(true),
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
// Create mock queuer util
|
|
44
|
+
mockQueuerUtil = {
|
|
45
|
+
send: jest.fn().mockResolvedValue({ MessageId: 'msg-123' }),
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
// Create use case with mocks
|
|
49
|
+
useCase = new TriggerDatabaseMigrationUseCase({
|
|
50
|
+
processRepository: mockProcessRepository,
|
|
51
|
+
queuerUtil: mockQueuerUtil,
|
|
52
|
+
});
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
afterEach(() => {
|
|
56
|
+
// Restore original environment
|
|
57
|
+
if (originalEnv !== undefined) {
|
|
58
|
+
process.env.DB_MIGRATION_QUEUE_URL = originalEnv;
|
|
59
|
+
} else {
|
|
60
|
+
delete process.env.DB_MIGRATION_QUEUE_URL;
|
|
61
|
+
}
|
|
62
|
+
jest.clearAllMocks();
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
describe('constructor', () => {
|
|
66
|
+
it('should throw error if processRepository not provided', () => {
|
|
67
|
+
expect(() => {
|
|
68
|
+
new TriggerDatabaseMigrationUseCase({});
|
|
69
|
+
}).toThrow('processRepository dependency is required');
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it('should accept custom queuerUtil', () => {
|
|
73
|
+
const customQueuer = { send: jest.fn() };
|
|
74
|
+
const instance = new TriggerDatabaseMigrationUseCase({
|
|
75
|
+
processRepository: mockProcessRepository,
|
|
76
|
+
queuerUtil: customQueuer,
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
expect(instance.queuerUtil).toBe(customQueuer);
|
|
80
|
+
});
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
describe('execute', () => {
|
|
84
|
+
it('should create process and queue migration job', async () => {
|
|
85
|
+
const result = await useCase.execute({
|
|
86
|
+
userId: 'user-456',
|
|
87
|
+
dbType: 'postgresql',
|
|
88
|
+
stage: 'production',
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
// Verify process creation
|
|
92
|
+
expect(mockProcessRepository.create).toHaveBeenCalledWith({
|
|
93
|
+
userId: 'user-456',
|
|
94
|
+
integrationId: null,
|
|
95
|
+
name: 'database-migration',
|
|
96
|
+
type: 'DATABASE_MIGRATION',
|
|
97
|
+
state: 'INITIALIZING',
|
|
98
|
+
context: expect.objectContaining({
|
|
99
|
+
dbType: 'postgresql',
|
|
100
|
+
stage: 'production',
|
|
101
|
+
}),
|
|
102
|
+
results: {},
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
// Verify SQS message sent
|
|
106
|
+
expect(mockQueuerUtil.send).toHaveBeenCalledWith(
|
|
107
|
+
{
|
|
108
|
+
processId: 'process-123',
|
|
109
|
+
dbType: 'postgresql',
|
|
110
|
+
stage: 'production',
|
|
111
|
+
},
|
|
112
|
+
'https://sqs.us-east-1.amazonaws.com/123456789/test-queue'
|
|
113
|
+
);
|
|
114
|
+
|
|
115
|
+
// Verify response
|
|
116
|
+
expect(result).toEqual({
|
|
117
|
+
success: true,
|
|
118
|
+
processId: 'process-123',
|
|
119
|
+
state: 'INITIALIZING',
|
|
120
|
+
statusUrl: '/db-migrate/process-123',
|
|
121
|
+
message: 'Database migration queued successfully',
|
|
122
|
+
});
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it('should handle MongoDB dbType', async () => {
|
|
126
|
+
await useCase.execute({
|
|
127
|
+
userId: 'user-456',
|
|
128
|
+
dbType: 'mongodb',
|
|
129
|
+
stage: 'dev',
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
expect(mockProcessRepository.create).toHaveBeenCalledWith(
|
|
133
|
+
expect.objectContaining({
|
|
134
|
+
context: expect.objectContaining({
|
|
135
|
+
dbType: 'mongodb',
|
|
136
|
+
stage: 'dev',
|
|
137
|
+
}),
|
|
138
|
+
})
|
|
139
|
+
);
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
it('should throw ValidationError if userId is missing', async () => {
|
|
143
|
+
await expect(
|
|
144
|
+
useCase.execute({
|
|
145
|
+
dbType: 'postgresql',
|
|
146
|
+
stage: 'production',
|
|
147
|
+
})
|
|
148
|
+
).rejects.toThrow(ValidationError);
|
|
149
|
+
|
|
150
|
+
await expect(
|
|
151
|
+
useCase.execute({
|
|
152
|
+
dbType: 'postgresql',
|
|
153
|
+
stage: 'production',
|
|
154
|
+
})
|
|
155
|
+
).rejects.toThrow('userId is required');
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
it('should throw ValidationError if userId is not a string', async () => {
|
|
159
|
+
await expect(
|
|
160
|
+
useCase.execute({
|
|
161
|
+
userId: 123,
|
|
162
|
+
dbType: 'postgresql',
|
|
163
|
+
stage: 'production',
|
|
164
|
+
})
|
|
165
|
+
).rejects.toThrow('userId must be a string');
|
|
166
|
+
});
|
|
167
|
+
|
|
168
|
+
it('should throw ValidationError if dbType is missing', async () => {
|
|
169
|
+
await expect(
|
|
170
|
+
useCase.execute({
|
|
171
|
+
userId: 'user-456',
|
|
172
|
+
stage: 'production',
|
|
173
|
+
})
|
|
174
|
+
).rejects.toThrow('dbType is required');
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
it('should throw ValidationError if dbType is invalid', async () => {
|
|
178
|
+
await expect(
|
|
179
|
+
useCase.execute({
|
|
180
|
+
userId: 'user-456',
|
|
181
|
+
dbType: 'mysql',
|
|
182
|
+
stage: 'production',
|
|
183
|
+
})
|
|
184
|
+
).rejects.toThrow('Invalid dbType: "mysql"');
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
it('should throw ValidationError if stage is missing', async () => {
|
|
188
|
+
await expect(
|
|
189
|
+
useCase.execute({
|
|
190
|
+
userId: 'user-456',
|
|
191
|
+
dbType: 'postgresql',
|
|
192
|
+
})
|
|
193
|
+
).rejects.toThrow('stage is required');
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
it('should throw ValidationError if stage is not a string', async () => {
|
|
197
|
+
await expect(
|
|
198
|
+
useCase.execute({
|
|
199
|
+
userId: 'user-456',
|
|
200
|
+
dbType: 'postgresql',
|
|
201
|
+
stage: 123,
|
|
202
|
+
})
|
|
203
|
+
).rejects.toThrow('stage must be a string');
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
it('should throw error if DB_MIGRATION_QUEUE_URL not set', async () => {
|
|
207
|
+
delete process.env.DB_MIGRATION_QUEUE_URL;
|
|
208
|
+
|
|
209
|
+
await expect(
|
|
210
|
+
useCase.execute({
|
|
211
|
+
userId: 'user-456',
|
|
212
|
+
dbType: 'postgresql',
|
|
213
|
+
stage: 'production',
|
|
214
|
+
})
|
|
215
|
+
).rejects.toThrow('DB_MIGRATION_QUEUE_URL environment variable is not set');
|
|
216
|
+
});
|
|
217
|
+
|
|
218
|
+
it('should update process to FAILED if queue send fails', async () => {
|
|
219
|
+
mockQueuerUtil.send.mockRejectedValue(new Error('SQS unavailable'));
|
|
220
|
+
|
|
221
|
+
await expect(
|
|
222
|
+
useCase.execute({
|
|
223
|
+
userId: 'user-456',
|
|
224
|
+
dbType: 'postgresql',
|
|
225
|
+
stage: 'production',
|
|
226
|
+
})
|
|
227
|
+
).rejects.toThrow('Failed to queue migration: SQS unavailable');
|
|
228
|
+
|
|
229
|
+
// Verify process was marked as failed
|
|
230
|
+
expect(mockProcessRepository.updateState).toHaveBeenCalledWith(
|
|
231
|
+
'process-123',
|
|
232
|
+
'FAILED',
|
|
233
|
+
{
|
|
234
|
+
error: 'Failed to queue migration job',
|
|
235
|
+
errorDetails: 'SQS unavailable',
|
|
236
|
+
}
|
|
237
|
+
);
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
it('should handle process creation failure', async () => {
|
|
241
|
+
mockProcessRepository.create.mockRejectedValue(new Error('Database error'));
|
|
242
|
+
|
|
243
|
+
await expect(
|
|
244
|
+
useCase.execute({
|
|
245
|
+
userId: 'user-456',
|
|
246
|
+
dbType: 'postgresql',
|
|
247
|
+
stage: 'production',
|
|
248
|
+
})
|
|
249
|
+
).rejects.toThrow('Database error');
|
|
250
|
+
|
|
251
|
+
// Should not attempt to send to queue if process creation fails
|
|
252
|
+
expect(mockQueuerUtil.send).not.toHaveBeenCalled();
|
|
253
|
+
});
|
|
254
|
+
});
|
|
255
|
+
|
|
256
|
+
describe('ValidationError', () => {
|
|
257
|
+
it('should have correct name', () => {
|
|
258
|
+
const error = new ValidationError('test message');
|
|
259
|
+
expect(error.name).toBe('ValidationError');
|
|
260
|
+
expect(error.message).toBe('test message');
|
|
261
|
+
expect(error instanceof Error).toBe(true);
|
|
262
|
+
});
|
|
263
|
+
});
|
|
264
|
+
});
|
|
265
|
+
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@friggframework/core",
|
|
3
3
|
"prettier": "@friggframework/prettier-config",
|
|
4
|
-
"version": "2.0.0--canary.461.
|
|
4
|
+
"version": "2.0.0--canary.461.3a5f072.0",
|
|
5
5
|
"dependencies": {
|
|
6
6
|
"@aws-sdk/client-apigatewaymanagementapi": "^3.588.0",
|
|
7
7
|
"@aws-sdk/client-kms": "^3.588.0",
|
|
@@ -37,9 +37,9 @@
|
|
|
37
37
|
}
|
|
38
38
|
},
|
|
39
39
|
"devDependencies": {
|
|
40
|
-
"@friggframework/eslint-config": "2.0.0--canary.461.
|
|
41
|
-
"@friggframework/prettier-config": "2.0.0--canary.461.
|
|
42
|
-
"@friggframework/test": "2.0.0--canary.461.
|
|
40
|
+
"@friggframework/eslint-config": "2.0.0--canary.461.3a5f072.0",
|
|
41
|
+
"@friggframework/prettier-config": "2.0.0--canary.461.3a5f072.0",
|
|
42
|
+
"@friggframework/test": "2.0.0--canary.461.3a5f072.0",
|
|
43
43
|
"@prisma/client": "^6.17.0",
|
|
44
44
|
"@types/lodash": "4.17.15",
|
|
45
45
|
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
|
@@ -79,5 +79,5 @@
|
|
|
79
79
|
"publishConfig": {
|
|
80
80
|
"access": "public"
|
|
81
81
|
},
|
|
82
|
-
"gitHead": "
|
|
82
|
+
"gitHead": "3a5f072fb4c5152089a3f2a6a389c00d9cfd5445"
|
|
83
83
|
}
|
|
@@ -1,437 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Unit tests for Database Migration Lambda Handler
|
|
3
|
-
*
|
|
4
|
-
* Tests the db-migration Lambda handler which runs Prisma migrations
|
|
5
|
-
* from within the VPC for CI/CD pipelines.
|
|
6
|
-
*/
|
|
7
|
-
|
|
8
|
-
// Mock the use case module before requiring the handler
|
|
9
|
-
jest.mock('../../database/use-cases/run-database-migration-use-case', () => {
|
|
10
|
-
const mockExecute = jest.fn();
|
|
11
|
-
return {
|
|
12
|
-
RunDatabaseMigrationUseCase: jest.fn().mockImplementation(() => ({
|
|
13
|
-
execute: mockExecute,
|
|
14
|
-
})),
|
|
15
|
-
MigrationError: class MigrationError extends Error {
|
|
16
|
-
constructor(message, context) {
|
|
17
|
-
super(message);
|
|
18
|
-
this.name = 'MigrationError';
|
|
19
|
-
this.context = context;
|
|
20
|
-
}
|
|
21
|
-
},
|
|
22
|
-
ValidationError: class ValidationError extends Error {
|
|
23
|
-
constructor(message) {
|
|
24
|
-
super(message);
|
|
25
|
-
this.name = 'ValidationError';
|
|
26
|
-
}
|
|
27
|
-
},
|
|
28
|
-
__mockExecute: mockExecute, // Expose for test access
|
|
29
|
-
};
|
|
30
|
-
});
|
|
31
|
-
|
|
32
|
-
const { handler } = require('./db-migration');
|
|
33
|
-
const {
|
|
34
|
-
RunDatabaseMigrationUseCase,
|
|
35
|
-
MigrationError,
|
|
36
|
-
ValidationError,
|
|
37
|
-
__mockExecute: mockExecute,
|
|
38
|
-
} = require('../../database/use-cases/run-database-migration-use-case');
|
|
39
|
-
|
|
40
|
-
describe('db-migration Lambda Handler', () => {
|
|
41
|
-
let originalEnv;
|
|
42
|
-
let mockContext;
|
|
43
|
-
|
|
44
|
-
beforeEach(() => {
|
|
45
|
-
// Save original environment
|
|
46
|
-
originalEnv = { ...process.env };
|
|
47
|
-
|
|
48
|
-
// Setup mock context
|
|
49
|
-
mockContext = {
|
|
50
|
-
requestId: 'test-request-id',
|
|
51
|
-
functionName: 'test-function',
|
|
52
|
-
getRemainingTimeInMillis: jest.fn(() => 300000), // 5 minutes
|
|
53
|
-
};
|
|
54
|
-
|
|
55
|
-
// Reset all mocks
|
|
56
|
-
jest.clearAllMocks();
|
|
57
|
-
|
|
58
|
-
// Set default environment variables
|
|
59
|
-
process.env.DATABASE_URL = 'postgresql://user:pass@localhost:5432/test_db';
|
|
60
|
-
process.env.DB_TYPE = 'postgresql';
|
|
61
|
-
process.env.STAGE = 'production';
|
|
62
|
-
|
|
63
|
-
// Setup default mock implementation - successful migration
|
|
64
|
-
mockExecute.mockResolvedValue({
|
|
65
|
-
success: true,
|
|
66
|
-
dbType: 'postgresql',
|
|
67
|
-
stage: 'production',
|
|
68
|
-
command: 'deploy',
|
|
69
|
-
message: 'Database migration completed successfully',
|
|
70
|
-
});
|
|
71
|
-
});
|
|
72
|
-
|
|
73
|
-
afterEach(() => {
|
|
74
|
-
// Restore original environment
|
|
75
|
-
process.env = originalEnv;
|
|
76
|
-
});
|
|
77
|
-
|
|
78
|
-
describe('Successful Migrations', () => {
|
|
79
|
-
test('should successfully run PostgreSQL migration in production', async () => {
|
|
80
|
-
process.env.STAGE = 'production';
|
|
81
|
-
mockExecute.mockResolvedValue({
|
|
82
|
-
success: true,
|
|
83
|
-
dbType: 'postgresql',
|
|
84
|
-
stage: 'production',
|
|
85
|
-
command: 'deploy',
|
|
86
|
-
message: 'Database migration completed successfully',
|
|
87
|
-
});
|
|
88
|
-
|
|
89
|
-
const result = await handler({}, mockContext);
|
|
90
|
-
|
|
91
|
-
expect(result.statusCode).toBe(200);
|
|
92
|
-
const body = JSON.parse(result.body);
|
|
93
|
-
expect(body.success).toBe(true);
|
|
94
|
-
expect(body.message).toBe('Database migration completed successfully');
|
|
95
|
-
expect(body.dbType).toBe('postgresql');
|
|
96
|
-
expect(body.stage).toBe('production');
|
|
97
|
-
expect(body.migrationCommand).toBe('deploy');
|
|
98
|
-
|
|
99
|
-
// Verify use case was called with correct parameters
|
|
100
|
-
expect(mockExecute).toHaveBeenCalledWith({
|
|
101
|
-
dbType: 'postgresql',
|
|
102
|
-
stage: 'production',
|
|
103
|
-
verbose: true,
|
|
104
|
-
});
|
|
105
|
-
});
|
|
106
|
-
|
|
107
|
-
test('should successfully run PostgreSQL migration in development', async () => {
|
|
108
|
-
process.env.STAGE = 'dev';
|
|
109
|
-
mockExecute.mockResolvedValue({
|
|
110
|
-
success: true,
|
|
111
|
-
dbType: 'postgresql',
|
|
112
|
-
stage: 'dev',
|
|
113
|
-
command: 'dev',
|
|
114
|
-
message: 'Database migration completed successfully',
|
|
115
|
-
});
|
|
116
|
-
|
|
117
|
-
const result = await handler({}, mockContext);
|
|
118
|
-
|
|
119
|
-
expect(result.statusCode).toBe(200);
|
|
120
|
-
const body = JSON.parse(result.body);
|
|
121
|
-
expect(body.success).toBe(true);
|
|
122
|
-
expect(body.migrationCommand).toBe('dev');
|
|
123
|
-
|
|
124
|
-
expect(mockExecute).toHaveBeenCalledWith({
|
|
125
|
-
dbType: 'postgresql',
|
|
126
|
-
stage: 'dev',
|
|
127
|
-
verbose: true,
|
|
128
|
-
});
|
|
129
|
-
});
|
|
130
|
-
|
|
131
|
-
test('should successfully run MongoDB migration', async () => {
|
|
132
|
-
process.env.DB_TYPE = 'mongodb';
|
|
133
|
-
process.env.DATABASE_URL = 'mongodb://localhost:27017/test_db';
|
|
134
|
-
mockExecute.mockResolvedValue({
|
|
135
|
-
success: true,
|
|
136
|
-
dbType: 'mongodb',
|
|
137
|
-
stage: 'production',
|
|
138
|
-
command: 'db push',
|
|
139
|
-
message: 'Database migration completed successfully',
|
|
140
|
-
});
|
|
141
|
-
|
|
142
|
-
const result = await handler({}, mockContext);
|
|
143
|
-
|
|
144
|
-
expect(result.statusCode).toBe(200);
|
|
145
|
-
const body = JSON.parse(result.body);
|
|
146
|
-
expect(body.success).toBe(true);
|
|
147
|
-
expect(body.dbType).toBe('mongodb');
|
|
148
|
-
expect(body.migrationCommand).toBe('db push');
|
|
149
|
-
|
|
150
|
-
expect(mockExecute).toHaveBeenCalledWith({
|
|
151
|
-
dbType: 'mongodb',
|
|
152
|
-
stage: 'production',
|
|
153
|
-
verbose: true,
|
|
154
|
-
});
|
|
155
|
-
});
|
|
156
|
-
|
|
157
|
-
test('should use default values when environment variables are missing', async () => {
|
|
158
|
-
delete process.env.DB_TYPE;
|
|
159
|
-
delete process.env.STAGE;
|
|
160
|
-
mockExecute.mockResolvedValue({
|
|
161
|
-
success: true,
|
|
162
|
-
dbType: 'postgresql',
|
|
163
|
-
stage: 'production',
|
|
164
|
-
command: 'deploy',
|
|
165
|
-
message: 'Database migration completed successfully',
|
|
166
|
-
});
|
|
167
|
-
|
|
168
|
-
const result = await handler({}, mockContext);
|
|
169
|
-
|
|
170
|
-
expect(result.statusCode).toBe(200);
|
|
171
|
-
const body = JSON.parse(result.body);
|
|
172
|
-
expect(body.dbType).toBe('postgresql'); // Default
|
|
173
|
-
expect(body.stage).toBe('production'); // Default
|
|
174
|
-
|
|
175
|
-
expect(mockExecute).toHaveBeenCalledWith({
|
|
176
|
-
dbType: 'postgresql', // Default
|
|
177
|
-
stage: 'production', // Default
|
|
178
|
-
verbose: true,
|
|
179
|
-
});
|
|
180
|
-
});
|
|
181
|
-
});
|
|
182
|
-
|
|
183
|
-
describe('Error Handling', () => {
|
|
184
|
-
test('should fail when DATABASE_URL is not set', async () => {
|
|
185
|
-
delete process.env.DATABASE_URL;
|
|
186
|
-
|
|
187
|
-
const result = await handler({}, mockContext);
|
|
188
|
-
|
|
189
|
-
expect(result.statusCode).toBe(500);
|
|
190
|
-
const body = JSON.parse(result.body);
|
|
191
|
-
expect(body.success).toBe(false);
|
|
192
|
-
expect(body.error).toContain('DATABASE_URL environment variable is not set');
|
|
193
|
-
|
|
194
|
-
// Verify use case was not called
|
|
195
|
-
expect(mockExecute).not.toHaveBeenCalled();
|
|
196
|
-
});
|
|
197
|
-
|
|
198
|
-
test('should fail when Prisma generate fails (MigrationError)', async () => {
|
|
199
|
-
mockExecute.mockRejectedValue(
|
|
200
|
-
new MigrationError('Failed to generate Prisma client: Schema file not found', {
|
|
201
|
-
dbType: 'postgresql',
|
|
202
|
-
stage: 'production',
|
|
203
|
-
step: 'generate',
|
|
204
|
-
})
|
|
205
|
-
);
|
|
206
|
-
|
|
207
|
-
const result = await handler({}, mockContext);
|
|
208
|
-
|
|
209
|
-
expect(result.statusCode).toBe(500);
|
|
210
|
-
const body = JSON.parse(result.body);
|
|
211
|
-
expect(body.success).toBe(false);
|
|
212
|
-
expect(body.error).toContain('Failed to generate Prisma client');
|
|
213
|
-
expect(body.error).toContain('Schema file not found');
|
|
214
|
-
expect(body.errorType).toBe('MigrationError');
|
|
215
|
-
});
|
|
216
|
-
|
|
217
|
-
test('should fail when PostgreSQL migration fails', async () => {
|
|
218
|
-
mockExecute.mockRejectedValue(
|
|
219
|
-
new MigrationError('PostgreSQL migration failed: Migration conflict detected', {
|
|
220
|
-
dbType: 'postgresql',
|
|
221
|
-
stage: 'production',
|
|
222
|
-
command: 'deploy',
|
|
223
|
-
step: 'migrate',
|
|
224
|
-
})
|
|
225
|
-
);
|
|
226
|
-
|
|
227
|
-
const result = await handler({}, mockContext);
|
|
228
|
-
|
|
229
|
-
expect(result.statusCode).toBe(500);
|
|
230
|
-
const body = JSON.parse(result.body);
|
|
231
|
-
expect(body.success).toBe(false);
|
|
232
|
-
expect(body.error).toContain('PostgreSQL migration failed');
|
|
233
|
-
expect(body.error).toContain('Migration conflict detected');
|
|
234
|
-
expect(body.errorType).toBe('MigrationError');
|
|
235
|
-
});
|
|
236
|
-
|
|
237
|
-
test('should fail when MongoDB push fails', async () => {
|
|
238
|
-
process.env.DB_TYPE = 'mongodb';
|
|
239
|
-
mockExecute.mockRejectedValue(
|
|
240
|
-
new MigrationError('MongoDB push failed: Connection timeout', {
|
|
241
|
-
dbType: 'mongodb',
|
|
242
|
-
stage: 'production',
|
|
243
|
-
command: 'db push',
|
|
244
|
-
step: 'push',
|
|
245
|
-
})
|
|
246
|
-
);
|
|
247
|
-
|
|
248
|
-
const result = await handler({}, mockContext);
|
|
249
|
-
|
|
250
|
-
expect(result.statusCode).toBe(500);
|
|
251
|
-
const body = JSON.parse(result.body);
|
|
252
|
-
expect(body.success).toBe(false);
|
|
253
|
-
expect(body.error).toContain('MongoDB push failed');
|
|
254
|
-
expect(body.error).toContain('Connection timeout');
|
|
255
|
-
expect(body.errorType).toBe('MigrationError');
|
|
256
|
-
});
|
|
257
|
-
|
|
258
|
-
test('should fail for unsupported database type (ValidationError)', async () => {
|
|
259
|
-
process.env.DB_TYPE = 'mysql';
|
|
260
|
-
mockExecute.mockRejectedValue(
|
|
261
|
-
new ValidationError("Unsupported database type: mysql. Must be 'postgresql' or 'mongodb'.")
|
|
262
|
-
);
|
|
263
|
-
|
|
264
|
-
const result = await handler({}, mockContext);
|
|
265
|
-
|
|
266
|
-
expect(result.statusCode).toBe(400); // Validation errors return 400
|
|
267
|
-
const body = JSON.parse(result.body);
|
|
268
|
-
expect(body.success).toBe(false);
|
|
269
|
-
expect(body.error).toContain('Unsupported database type: mysql');
|
|
270
|
-
expect(body.errorType).toBe('ValidationError');
|
|
271
|
-
});
|
|
272
|
-
|
|
273
|
-
test('should handle unexpected errors gracefully', async () => {
|
|
274
|
-
mockExecute.mockRejectedValue(new Error('Unexpected error'));
|
|
275
|
-
|
|
276
|
-
const result = await handler({}, mockContext);
|
|
277
|
-
|
|
278
|
-
expect(result.statusCode).toBe(500);
|
|
279
|
-
const body = JSON.parse(result.body);
|
|
280
|
-
expect(body.success).toBe(false);
|
|
281
|
-
expect(body.error).toContain('Unexpected error');
|
|
282
|
-
expect(body.errorType).toBe('Error');
|
|
283
|
-
expect(body.stack).toBeUndefined(); // Stack NOT included in production (default stage)
|
|
284
|
-
});
|
|
285
|
-
|
|
286
|
-
test('should sanitize error messages containing credentials', async () => {
|
|
287
|
-
mockExecute.mockRejectedValue(
|
|
288
|
-
new MigrationError(
|
|
289
|
-
'Connection failed to postgresql://user:password@host:5432/db',
|
|
290
|
-
{ dbType: 'postgresql', stage: 'production' }
|
|
291
|
-
)
|
|
292
|
-
);
|
|
293
|
-
|
|
294
|
-
const result = await handler({}, mockContext);
|
|
295
|
-
|
|
296
|
-
expect(result.statusCode).toBe(500);
|
|
297
|
-
const body = JSON.parse(result.body);
|
|
298
|
-
expect(body.success).toBe(false);
|
|
299
|
-
// Credentials should be sanitized
|
|
300
|
-
expect(body.error).toContain('postgresql://***:***@***');
|
|
301
|
-
expect(body.error).not.toContain('user:password');
|
|
302
|
-
});
|
|
303
|
-
|
|
304
|
-
test('should only include stack traces in development stages', async () => {
|
|
305
|
-
process.env.STAGE = 'dev';
|
|
306
|
-
mockExecute.mockRejectedValue(new Error('Test error'));
|
|
307
|
-
|
|
308
|
-
const result = await handler({}, mockContext);
|
|
309
|
-
|
|
310
|
-
const body = JSON.parse(result.body);
|
|
311
|
-
expect(body.stack).toBeDefined();
|
|
312
|
-
});
|
|
313
|
-
|
|
314
|
-
test('should not include stack traces in production stage', async () => {
|
|
315
|
-
process.env.STAGE = 'production';
|
|
316
|
-
mockExecute.mockRejectedValue(new Error('Test error'));
|
|
317
|
-
|
|
318
|
-
const result = await handler({}, mockContext);
|
|
319
|
-
|
|
320
|
-
const body = JSON.parse(result.body);
|
|
321
|
-
expect(body.stack).toBeUndefined();
|
|
322
|
-
});
|
|
323
|
-
});
|
|
324
|
-
|
|
325
|
-
describe('Environment Variable Handling', () => {
|
|
326
|
-
test('should sanitize DATABASE_URL in logs', async () => {
|
|
327
|
-
const consoleSpy = jest.spyOn(console, 'log').mockImplementation();
|
|
328
|
-
process.env.DATABASE_URL = 'postgresql://user:password@very-long-host.amazonaws.com:5432/database';
|
|
329
|
-
|
|
330
|
-
await handler({}, mockContext);
|
|
331
|
-
|
|
332
|
-
// Verify URL credentials are masked in logs
|
|
333
|
-
expect(consoleSpy).toHaveBeenCalledWith(
|
|
334
|
-
expect.stringContaining('postgresql://***:***@')
|
|
335
|
-
);
|
|
336
|
-
expect(consoleSpy).not.toHaveBeenCalledWith(
|
|
337
|
-
expect.stringContaining('user:password')
|
|
338
|
-
);
|
|
339
|
-
|
|
340
|
-
consoleSpy.mockRestore();
|
|
341
|
-
});
|
|
342
|
-
|
|
343
|
-
test('should handle different stage values correctly', async () => {
|
|
344
|
-
const stages = ['dev', 'test', 'local', 'staging', 'production'];
|
|
345
|
-
|
|
346
|
-
for (const stage of stages) {
|
|
347
|
-
process.env.STAGE = stage;
|
|
348
|
-
const command = ['dev', 'test', 'local'].includes(stage) ? 'dev' : 'deploy';
|
|
349
|
-
|
|
350
|
-
jest.clearAllMocks();
|
|
351
|
-
mockExecute.mockResolvedValue({
|
|
352
|
-
success: true,
|
|
353
|
-
dbType: 'postgresql',
|
|
354
|
-
stage,
|
|
355
|
-
command,
|
|
356
|
-
message: 'Database migration completed successfully',
|
|
357
|
-
});
|
|
358
|
-
|
|
359
|
-
const result = await handler({}, mockContext);
|
|
360
|
-
|
|
361
|
-
expect(result.statusCode).toBe(200);
|
|
362
|
-
const body = JSON.parse(result.body);
|
|
363
|
-
expect(body.stage).toBe(stage);
|
|
364
|
-
expect(mockExecute).toHaveBeenCalledWith({
|
|
365
|
-
dbType: 'postgresql',
|
|
366
|
-
stage,
|
|
367
|
-
verbose: true,
|
|
368
|
-
});
|
|
369
|
-
}
|
|
370
|
-
});
|
|
371
|
-
});
|
|
372
|
-
|
|
373
|
-
describe('Response Format', () => {
|
|
374
|
-
test('should return properly formatted success response', async () => {
|
|
375
|
-
const result = await handler({}, mockContext);
|
|
376
|
-
|
|
377
|
-
expect(result).toHaveProperty('statusCode', 200);
|
|
378
|
-
expect(result).toHaveProperty('body');
|
|
379
|
-
|
|
380
|
-
const body = JSON.parse(result.body);
|
|
381
|
-
expect(body).toHaveProperty('success', true);
|
|
382
|
-
expect(body).toHaveProperty('message');
|
|
383
|
-
expect(body).toHaveProperty('dbType');
|
|
384
|
-
expect(body).toHaveProperty('stage');
|
|
385
|
-
expect(body).toHaveProperty('migrationCommand');
|
|
386
|
-
expect(body).toHaveProperty('timestamp');
|
|
387
|
-
|
|
388
|
-
// Verify timestamp is valid ISO string
|
|
389
|
-
expect(() => new Date(body.timestamp)).not.toThrow();
|
|
390
|
-
});
|
|
391
|
-
|
|
392
|
-
test('should return properly formatted error response', async () => {
|
|
393
|
-
delete process.env.DATABASE_URL;
|
|
394
|
-
|
|
395
|
-
const result = await handler({}, mockContext);
|
|
396
|
-
|
|
397
|
-
expect(result).toHaveProperty('statusCode', 500);
|
|
398
|
-
expect(result).toHaveProperty('body');
|
|
399
|
-
|
|
400
|
-
const body = JSON.parse(result.body);
|
|
401
|
-
expect(body).toHaveProperty('success', false);
|
|
402
|
-
expect(body).toHaveProperty('error');
|
|
403
|
-
expect(typeof body.error).toBe('string');
|
|
404
|
-
});
|
|
405
|
-
});
|
|
406
|
-
|
|
407
|
-
describe('Logging', () => {
|
|
408
|
-
test('should log migration progress', async () => {
|
|
409
|
-
const consoleSpy = jest.spyOn(console, 'log').mockImplementation();
|
|
410
|
-
|
|
411
|
-
await handler({}, mockContext);
|
|
412
|
-
|
|
413
|
-
// Verify key log messages
|
|
414
|
-
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Database Migration Lambda Started'));
|
|
415
|
-
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Executing Database Migration'));
|
|
416
|
-
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Migration Summary'));
|
|
417
|
-
|
|
418
|
-
consoleSpy.mockRestore();
|
|
419
|
-
});
|
|
420
|
-
|
|
421
|
-
test('should log errors with details', async () => {
|
|
422
|
-
const consoleSpy = jest.spyOn(console, 'error').mockImplementation();
|
|
423
|
-
const testError = new MigrationError('Test migration error', {
|
|
424
|
-
dbType: 'postgresql',
|
|
425
|
-
stage: 'production',
|
|
426
|
-
});
|
|
427
|
-
mockExecute.mockRejectedValue(testError);
|
|
428
|
-
|
|
429
|
-
await handler({}, mockContext);
|
|
430
|
-
|
|
431
|
-
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Migration Failed'));
|
|
432
|
-
expect(consoleSpy).toHaveBeenCalledWith('Error:', 'MigrationError', 'Test migration error');
|
|
433
|
-
|
|
434
|
-
consoleSpy.mockRestore();
|
|
435
|
-
});
|
|
436
|
-
});
|
|
437
|
-
});
|