@friggframework/core 2.0.0--canary.464.f9d3fc0.0 → 2.0.0--canary.454.25d396a.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +28 -0
- package/database/prisma.js +2 -2
- package/database/use-cases/run-database-migration-use-case.js +137 -0
- package/database/use-cases/run-database-migration-use-case.test.js +310 -0
- package/database/utils/prisma-runner.js +313 -0
- package/database/utils/prisma-runner.test.js +486 -0
- package/handlers/routers/integration-webhook-routers.js +2 -2
- package/handlers/workers/db-migration.js +208 -0
- package/handlers/workers/db-migration.test.js +437 -0
- package/package.json +79 -66
- package/prisma-mongodb/schema.prisma +22 -20
- package/prisma-postgresql/schema.prisma +16 -14
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database Migration Lambda Handler
|
|
3
|
+
*
|
|
4
|
+
* Lambda function that runs Prisma database migrations from within the VPC,
|
|
5
|
+
* enabling CI/CD pipelines to migrate databases without requiring public access.
|
|
6
|
+
*
|
|
7
|
+
* This handler uses the prisma-runner utilities from @friggframework/core,
|
|
8
|
+
* ensuring consistency with the `frigg db:setup` command.
|
|
9
|
+
*
|
|
10
|
+
* Environment Variables Required:
|
|
11
|
+
* - DATABASE_URL: PostgreSQL connection string (automatically set from Secrets Manager)
|
|
12
|
+
* - DB_TYPE: Database type ('postgresql' or 'mongodb')
|
|
13
|
+
* - STAGE: Deployment stage (determines migration command: 'dev' or 'deploy')
|
|
14
|
+
*
|
|
15
|
+
* Invocation:
|
|
16
|
+
* aws lambda invoke \
|
|
17
|
+
* --function-name my-app-production-dbMigrate \
|
|
18
|
+
* --region us-east-1 \
|
|
19
|
+
* response.json
|
|
20
|
+
*
|
|
21
|
+
* Success Response:
|
|
22
|
+
* {
|
|
23
|
+
* "statusCode": 200,
|
|
24
|
+
* "body": {
|
|
25
|
+
* "success": true,
|
|
26
|
+
* "message": "Database migration completed successfully",
|
|
27
|
+
* "dbType": "postgresql",
|
|
28
|
+
* "stage": "production",
|
|
29
|
+
* "migrationCommand": "deploy"
|
|
30
|
+
* }
|
|
31
|
+
* }
|
|
32
|
+
*
|
|
33
|
+
* Error Response:
|
|
34
|
+
* {
|
|
35
|
+
* "statusCode": 500,
|
|
36
|
+
* "body": {
|
|
37
|
+
* "success": false,
|
|
38
|
+
* "error": "Migration failed: ...",
|
|
39
|
+
* "stack": "Error: ..."
|
|
40
|
+
* }
|
|
41
|
+
* }
|
|
42
|
+
*/
|
|
43
|
+
|
|
44
|
+
const {
|
|
45
|
+
RunDatabaseMigrationUseCase,
|
|
46
|
+
MigrationError,
|
|
47
|
+
ValidationError,
|
|
48
|
+
} = require('../../database/use-cases/run-database-migration-use-case');
|
|
49
|
+
|
|
50
|
+
// Inject prisma-runner as dependency
|
|
51
|
+
const prismaRunner = require('../../database/utils/prisma-runner');
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Sanitizes error messages to prevent credential leaks
|
|
55
|
+
* @param {string} errorMessage - Error message that might contain credentials
|
|
56
|
+
* @returns {string} Sanitized error message
|
|
57
|
+
*/
|
|
58
|
+
function sanitizeError(errorMessage) {
|
|
59
|
+
if (!errorMessage) return 'Unknown error';
|
|
60
|
+
|
|
61
|
+
return String(errorMessage)
|
|
62
|
+
// Remove PostgreSQL connection strings
|
|
63
|
+
.replace(/postgresql:\/\/[^@\s]+@[^\s/]+/gi, 'postgresql://***:***@***')
|
|
64
|
+
// Remove MongoDB connection strings
|
|
65
|
+
.replace(/mongodb(\+srv)?:\/\/[^@\s]+@[^\s/]+/gi, 'mongodb$1://***:***@***')
|
|
66
|
+
// Remove password parameters
|
|
67
|
+
.replace(/password[=:]\s*[^\s,;)]+/gi, 'password=***')
|
|
68
|
+
// Remove API keys
|
|
69
|
+
.replace(/apikey[=:]\s*[^\s,;)]+/gi, 'apikey=***')
|
|
70
|
+
.replace(/api[_-]?key[=:]\s*[^\s,;)]+/gi, 'api_key=***')
|
|
71
|
+
// Remove tokens
|
|
72
|
+
.replace(/token[=:]\s*[^\s,;)]+/gi, 'token=***')
|
|
73
|
+
.replace(/bearer\s+[^\s,;)]+/gi, 'bearer ***');
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Sanitizes DATABASE_URL for safe logging
|
|
78
|
+
* @param {string} url - Database URL
|
|
79
|
+
* @returns {string} Sanitized URL
|
|
80
|
+
*/
|
|
81
|
+
function sanitizeDatabaseUrl(url) {
|
|
82
|
+
if (!url) return '';
|
|
83
|
+
|
|
84
|
+
// Replace credentials in connection string
|
|
85
|
+
return url.replace(/(:\/\/)([^:]+):([^@]+)@/, '$1***:***@');
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Lambda handler entry point
|
|
90
|
+
* @param {Object} event - Lambda event (not used, migrations don't need input)
|
|
91
|
+
* @param {Object} context - Lambda context (contains AWS request ID, timeout info)
|
|
92
|
+
* @returns {Promise<Object>} Response with statusCode and body
|
|
93
|
+
*/
|
|
94
|
+
exports.handler = async (event, context) => {
|
|
95
|
+
console.log('========================================');
|
|
96
|
+
console.log('Database Migration Lambda Started');
|
|
97
|
+
console.log('========================================');
|
|
98
|
+
console.log('Event:', JSON.stringify(event, null, 2));
|
|
99
|
+
console.log('Context:', JSON.stringify({
|
|
100
|
+
requestId: context.requestId,
|
|
101
|
+
functionName: context.functionName,
|
|
102
|
+
remainingTimeInMillis: context.getRemainingTimeInMillis(),
|
|
103
|
+
}, null, 2));
|
|
104
|
+
|
|
105
|
+
// Get environment variables
|
|
106
|
+
const databaseUrl = process.env.DATABASE_URL;
|
|
107
|
+
const dbType = process.env.DB_TYPE || 'postgresql';
|
|
108
|
+
const stage = process.env.STAGE || 'production';
|
|
109
|
+
|
|
110
|
+
try {
|
|
111
|
+
// Validate DATABASE_URL is set
|
|
112
|
+
if (!databaseUrl) {
|
|
113
|
+
const error = 'DATABASE_URL environment variable is not set';
|
|
114
|
+
console.error('❌ Validation failed:', error);
|
|
115
|
+
return {
|
|
116
|
+
statusCode: 500,
|
|
117
|
+
body: JSON.stringify({
|
|
118
|
+
success: false,
|
|
119
|
+
error,
|
|
120
|
+
}),
|
|
121
|
+
};
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
console.log('✓ Environment validated');
|
|
125
|
+
console.log(` Database Type: ${dbType}`);
|
|
126
|
+
console.log(` Stage: ${stage}`);
|
|
127
|
+
console.log(` Database URL: ${sanitizeDatabaseUrl(databaseUrl)}`);
|
|
128
|
+
|
|
129
|
+
// Create use case with dependencies (Dependency Injection)
|
|
130
|
+
const runDatabaseMigrationUseCase = new RunDatabaseMigrationUseCase({
|
|
131
|
+
prismaRunner,
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
console.log('\n========================================');
|
|
135
|
+
console.log('Executing Database Migration');
|
|
136
|
+
console.log('========================================');
|
|
137
|
+
|
|
138
|
+
// Execute use case (business logic layer)
|
|
139
|
+
const result = await runDatabaseMigrationUseCase.execute({
|
|
140
|
+
dbType,
|
|
141
|
+
stage,
|
|
142
|
+
verbose: true, // Enable verbose output for Lambda CloudWatch logs
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
console.log('✓ Database migration completed successfully');
|
|
146
|
+
console.log('\n========================================');
|
|
147
|
+
console.log('Migration Summary');
|
|
148
|
+
console.log('========================================');
|
|
149
|
+
console.log(` Status: Success`);
|
|
150
|
+
console.log(` Database: ${result.dbType}`);
|
|
151
|
+
console.log(` Stage: ${result.stage}`);
|
|
152
|
+
console.log(` Command: ${result.command}`);
|
|
153
|
+
console.log('========================================');
|
|
154
|
+
|
|
155
|
+
// Return success response (adapter layer - HTTP mapping)
|
|
156
|
+
return {
|
|
157
|
+
statusCode: 200,
|
|
158
|
+
body: JSON.stringify({
|
|
159
|
+
success: true,
|
|
160
|
+
message: result.message,
|
|
161
|
+
dbType: result.dbType,
|
|
162
|
+
stage: result.stage,
|
|
163
|
+
migrationCommand: result.command,
|
|
164
|
+
timestamp: new Date().toISOString(),
|
|
165
|
+
}),
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
} catch (error) {
|
|
169
|
+
console.error('\n========================================');
|
|
170
|
+
console.error('Migration Failed');
|
|
171
|
+
console.error('========================================');
|
|
172
|
+
console.error('Error:', error.name, error.message);
|
|
173
|
+
|
|
174
|
+
// Log full stack trace to CloudWatch (only visible to developers)
|
|
175
|
+
if (error.stack) {
|
|
176
|
+
console.error('Stack:', error.stack);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// Log context if available (from MigrationError)
|
|
180
|
+
if (error.context) {
|
|
181
|
+
console.error('Context:', JSON.stringify(error.context, null, 2));
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
// Map domain errors to HTTP status codes (adapter layer)
|
|
185
|
+
let statusCode = 500;
|
|
186
|
+
let errorMessage = error.message || 'Unknown error occurred';
|
|
187
|
+
|
|
188
|
+
if (error instanceof ValidationError) {
|
|
189
|
+
statusCode = 400; // Bad Request for validation errors
|
|
190
|
+
} else if (error instanceof MigrationError) {
|
|
191
|
+
statusCode = 500; // Internal Server Error for migration failures
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Sanitize error message before returning
|
|
195
|
+
const sanitizedError = sanitizeError(errorMessage);
|
|
196
|
+
|
|
197
|
+
return {
|
|
198
|
+
statusCode,
|
|
199
|
+
body: JSON.stringify({
|
|
200
|
+
success: false,
|
|
201
|
+
error: sanitizedError,
|
|
202
|
+
errorType: error.name || 'Error',
|
|
203
|
+
// Only include stack traces in development environments
|
|
204
|
+
...(stage === 'dev' || stage === 'local' || stage === 'test' ? { stack: error.stack } : {}),
|
|
205
|
+
}),
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
};
|
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unit tests for Database Migration Lambda Handler
|
|
3
|
+
*
|
|
4
|
+
* Tests the db-migration Lambda handler which runs Prisma migrations
|
|
5
|
+
* from within the VPC for CI/CD pipelines.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
// Mock the use case module before requiring the handler
|
|
9
|
+
jest.mock('../../database/use-cases/run-database-migration-use-case', () => {
|
|
10
|
+
const mockExecute = jest.fn();
|
|
11
|
+
return {
|
|
12
|
+
RunDatabaseMigrationUseCase: jest.fn().mockImplementation(() => ({
|
|
13
|
+
execute: mockExecute,
|
|
14
|
+
})),
|
|
15
|
+
MigrationError: class MigrationError extends Error {
|
|
16
|
+
constructor(message, context) {
|
|
17
|
+
super(message);
|
|
18
|
+
this.name = 'MigrationError';
|
|
19
|
+
this.context = context;
|
|
20
|
+
}
|
|
21
|
+
},
|
|
22
|
+
ValidationError: class ValidationError extends Error {
|
|
23
|
+
constructor(message) {
|
|
24
|
+
super(message);
|
|
25
|
+
this.name = 'ValidationError';
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
__mockExecute: mockExecute, // Expose for test access
|
|
29
|
+
};
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
const { handler } = require('./db-migration');
|
|
33
|
+
const {
|
|
34
|
+
RunDatabaseMigrationUseCase,
|
|
35
|
+
MigrationError,
|
|
36
|
+
ValidationError,
|
|
37
|
+
__mockExecute: mockExecute,
|
|
38
|
+
} = require('../../database/use-cases/run-database-migration-use-case');
|
|
39
|
+
|
|
40
|
+
describe('db-migration Lambda Handler', () => {
|
|
41
|
+
let originalEnv;
|
|
42
|
+
let mockContext;
|
|
43
|
+
|
|
44
|
+
beforeEach(() => {
|
|
45
|
+
// Save original environment
|
|
46
|
+
originalEnv = { ...process.env };
|
|
47
|
+
|
|
48
|
+
// Setup mock context
|
|
49
|
+
mockContext = {
|
|
50
|
+
requestId: 'test-request-id',
|
|
51
|
+
functionName: 'test-function',
|
|
52
|
+
getRemainingTimeInMillis: jest.fn(() => 300000), // 5 minutes
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
// Reset all mocks
|
|
56
|
+
jest.clearAllMocks();
|
|
57
|
+
|
|
58
|
+
// Set default environment variables
|
|
59
|
+
process.env.DATABASE_URL = 'postgresql://user:pass@localhost:5432/test_db';
|
|
60
|
+
process.env.DB_TYPE = 'postgresql';
|
|
61
|
+
process.env.STAGE = 'production';
|
|
62
|
+
|
|
63
|
+
// Setup default mock implementation - successful migration
|
|
64
|
+
mockExecute.mockResolvedValue({
|
|
65
|
+
success: true,
|
|
66
|
+
dbType: 'postgresql',
|
|
67
|
+
stage: 'production',
|
|
68
|
+
command: 'deploy',
|
|
69
|
+
message: 'Database migration completed successfully',
|
|
70
|
+
});
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
afterEach(() => {
|
|
74
|
+
// Restore original environment
|
|
75
|
+
process.env = originalEnv;
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
describe('Successful Migrations', () => {
|
|
79
|
+
test('should successfully run PostgreSQL migration in production', async () => {
|
|
80
|
+
process.env.STAGE = 'production';
|
|
81
|
+
mockExecute.mockResolvedValue({
|
|
82
|
+
success: true,
|
|
83
|
+
dbType: 'postgresql',
|
|
84
|
+
stage: 'production',
|
|
85
|
+
command: 'deploy',
|
|
86
|
+
message: 'Database migration completed successfully',
|
|
87
|
+
});
|
|
88
|
+
|
|
89
|
+
const result = await handler({}, mockContext);
|
|
90
|
+
|
|
91
|
+
expect(result.statusCode).toBe(200);
|
|
92
|
+
const body = JSON.parse(result.body);
|
|
93
|
+
expect(body.success).toBe(true);
|
|
94
|
+
expect(body.message).toBe('Database migration completed successfully');
|
|
95
|
+
expect(body.dbType).toBe('postgresql');
|
|
96
|
+
expect(body.stage).toBe('production');
|
|
97
|
+
expect(body.migrationCommand).toBe('deploy');
|
|
98
|
+
|
|
99
|
+
// Verify use case was called with correct parameters
|
|
100
|
+
expect(mockExecute).toHaveBeenCalledWith({
|
|
101
|
+
dbType: 'postgresql',
|
|
102
|
+
stage: 'production',
|
|
103
|
+
verbose: true,
|
|
104
|
+
});
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
test('should successfully run PostgreSQL migration in development', async () => {
|
|
108
|
+
process.env.STAGE = 'dev';
|
|
109
|
+
mockExecute.mockResolvedValue({
|
|
110
|
+
success: true,
|
|
111
|
+
dbType: 'postgresql',
|
|
112
|
+
stage: 'dev',
|
|
113
|
+
command: 'dev',
|
|
114
|
+
message: 'Database migration completed successfully',
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
const result = await handler({}, mockContext);
|
|
118
|
+
|
|
119
|
+
expect(result.statusCode).toBe(200);
|
|
120
|
+
const body = JSON.parse(result.body);
|
|
121
|
+
expect(body.success).toBe(true);
|
|
122
|
+
expect(body.migrationCommand).toBe('dev');
|
|
123
|
+
|
|
124
|
+
expect(mockExecute).toHaveBeenCalledWith({
|
|
125
|
+
dbType: 'postgresql',
|
|
126
|
+
stage: 'dev',
|
|
127
|
+
verbose: true,
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
test('should successfully run MongoDB migration', async () => {
|
|
132
|
+
process.env.DB_TYPE = 'mongodb';
|
|
133
|
+
process.env.DATABASE_URL = 'mongodb://localhost:27017/test_db';
|
|
134
|
+
mockExecute.mockResolvedValue({
|
|
135
|
+
success: true,
|
|
136
|
+
dbType: 'mongodb',
|
|
137
|
+
stage: 'production',
|
|
138
|
+
command: 'db push',
|
|
139
|
+
message: 'Database migration completed successfully',
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
const result = await handler({}, mockContext);
|
|
143
|
+
|
|
144
|
+
expect(result.statusCode).toBe(200);
|
|
145
|
+
const body = JSON.parse(result.body);
|
|
146
|
+
expect(body.success).toBe(true);
|
|
147
|
+
expect(body.dbType).toBe('mongodb');
|
|
148
|
+
expect(body.migrationCommand).toBe('db push');
|
|
149
|
+
|
|
150
|
+
expect(mockExecute).toHaveBeenCalledWith({
|
|
151
|
+
dbType: 'mongodb',
|
|
152
|
+
stage: 'production',
|
|
153
|
+
verbose: true,
|
|
154
|
+
});
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
test('should use default values when environment variables are missing', async () => {
|
|
158
|
+
delete process.env.DB_TYPE;
|
|
159
|
+
delete process.env.STAGE;
|
|
160
|
+
mockExecute.mockResolvedValue({
|
|
161
|
+
success: true,
|
|
162
|
+
dbType: 'postgresql',
|
|
163
|
+
stage: 'production',
|
|
164
|
+
command: 'deploy',
|
|
165
|
+
message: 'Database migration completed successfully',
|
|
166
|
+
});
|
|
167
|
+
|
|
168
|
+
const result = await handler({}, mockContext);
|
|
169
|
+
|
|
170
|
+
expect(result.statusCode).toBe(200);
|
|
171
|
+
const body = JSON.parse(result.body);
|
|
172
|
+
expect(body.dbType).toBe('postgresql'); // Default
|
|
173
|
+
expect(body.stage).toBe('production'); // Default
|
|
174
|
+
|
|
175
|
+
expect(mockExecute).toHaveBeenCalledWith({
|
|
176
|
+
dbType: 'postgresql', // Default
|
|
177
|
+
stage: 'production', // Default
|
|
178
|
+
verbose: true,
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
describe('Error Handling', () => {
|
|
184
|
+
test('should fail when DATABASE_URL is not set', async () => {
|
|
185
|
+
delete process.env.DATABASE_URL;
|
|
186
|
+
|
|
187
|
+
const result = await handler({}, mockContext);
|
|
188
|
+
|
|
189
|
+
expect(result.statusCode).toBe(500);
|
|
190
|
+
const body = JSON.parse(result.body);
|
|
191
|
+
expect(body.success).toBe(false);
|
|
192
|
+
expect(body.error).toContain('DATABASE_URL environment variable is not set');
|
|
193
|
+
|
|
194
|
+
// Verify use case was not called
|
|
195
|
+
expect(mockExecute).not.toHaveBeenCalled();
|
|
196
|
+
});
|
|
197
|
+
|
|
198
|
+
test('should fail when Prisma generate fails (MigrationError)', async () => {
|
|
199
|
+
mockExecute.mockRejectedValue(
|
|
200
|
+
new MigrationError('Failed to generate Prisma client: Schema file not found', {
|
|
201
|
+
dbType: 'postgresql',
|
|
202
|
+
stage: 'production',
|
|
203
|
+
step: 'generate',
|
|
204
|
+
})
|
|
205
|
+
);
|
|
206
|
+
|
|
207
|
+
const result = await handler({}, mockContext);
|
|
208
|
+
|
|
209
|
+
expect(result.statusCode).toBe(500);
|
|
210
|
+
const body = JSON.parse(result.body);
|
|
211
|
+
expect(body.success).toBe(false);
|
|
212
|
+
expect(body.error).toContain('Failed to generate Prisma client');
|
|
213
|
+
expect(body.error).toContain('Schema file not found');
|
|
214
|
+
expect(body.errorType).toBe('MigrationError');
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
test('should fail when PostgreSQL migration fails', async () => {
|
|
218
|
+
mockExecute.mockRejectedValue(
|
|
219
|
+
new MigrationError('PostgreSQL migration failed: Migration conflict detected', {
|
|
220
|
+
dbType: 'postgresql',
|
|
221
|
+
stage: 'production',
|
|
222
|
+
command: 'deploy',
|
|
223
|
+
step: 'migrate',
|
|
224
|
+
})
|
|
225
|
+
);
|
|
226
|
+
|
|
227
|
+
const result = await handler({}, mockContext);
|
|
228
|
+
|
|
229
|
+
expect(result.statusCode).toBe(500);
|
|
230
|
+
const body = JSON.parse(result.body);
|
|
231
|
+
expect(body.success).toBe(false);
|
|
232
|
+
expect(body.error).toContain('PostgreSQL migration failed');
|
|
233
|
+
expect(body.error).toContain('Migration conflict detected');
|
|
234
|
+
expect(body.errorType).toBe('MigrationError');
|
|
235
|
+
});
|
|
236
|
+
|
|
237
|
+
test('should fail when MongoDB push fails', async () => {
|
|
238
|
+
process.env.DB_TYPE = 'mongodb';
|
|
239
|
+
mockExecute.mockRejectedValue(
|
|
240
|
+
new MigrationError('MongoDB push failed: Connection timeout', {
|
|
241
|
+
dbType: 'mongodb',
|
|
242
|
+
stage: 'production',
|
|
243
|
+
command: 'db push',
|
|
244
|
+
step: 'push',
|
|
245
|
+
})
|
|
246
|
+
);
|
|
247
|
+
|
|
248
|
+
const result = await handler({}, mockContext);
|
|
249
|
+
|
|
250
|
+
expect(result.statusCode).toBe(500);
|
|
251
|
+
const body = JSON.parse(result.body);
|
|
252
|
+
expect(body.success).toBe(false);
|
|
253
|
+
expect(body.error).toContain('MongoDB push failed');
|
|
254
|
+
expect(body.error).toContain('Connection timeout');
|
|
255
|
+
expect(body.errorType).toBe('MigrationError');
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
test('should fail for unsupported database type (ValidationError)', async () => {
|
|
259
|
+
process.env.DB_TYPE = 'mysql';
|
|
260
|
+
mockExecute.mockRejectedValue(
|
|
261
|
+
new ValidationError("Unsupported database type: mysql. Must be 'postgresql' or 'mongodb'.")
|
|
262
|
+
);
|
|
263
|
+
|
|
264
|
+
const result = await handler({}, mockContext);
|
|
265
|
+
|
|
266
|
+
expect(result.statusCode).toBe(400); // Validation errors return 400
|
|
267
|
+
const body = JSON.parse(result.body);
|
|
268
|
+
expect(body.success).toBe(false);
|
|
269
|
+
expect(body.error).toContain('Unsupported database type: mysql');
|
|
270
|
+
expect(body.errorType).toBe('ValidationError');
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
test('should handle unexpected errors gracefully', async () => {
|
|
274
|
+
mockExecute.mockRejectedValue(new Error('Unexpected error'));
|
|
275
|
+
|
|
276
|
+
const result = await handler({}, mockContext);
|
|
277
|
+
|
|
278
|
+
expect(result.statusCode).toBe(500);
|
|
279
|
+
const body = JSON.parse(result.body);
|
|
280
|
+
expect(body.success).toBe(false);
|
|
281
|
+
expect(body.error).toContain('Unexpected error');
|
|
282
|
+
expect(body.errorType).toBe('Error');
|
|
283
|
+
expect(body.stack).toBeUndefined(); // Stack NOT included in production (default stage)
|
|
284
|
+
});
|
|
285
|
+
|
|
286
|
+
test('should sanitize error messages containing credentials', async () => {
|
|
287
|
+
mockExecute.mockRejectedValue(
|
|
288
|
+
new MigrationError(
|
|
289
|
+
'Connection failed to postgresql://user:password@host:5432/db',
|
|
290
|
+
{ dbType: 'postgresql', stage: 'production' }
|
|
291
|
+
)
|
|
292
|
+
);
|
|
293
|
+
|
|
294
|
+
const result = await handler({}, mockContext);
|
|
295
|
+
|
|
296
|
+
expect(result.statusCode).toBe(500);
|
|
297
|
+
const body = JSON.parse(result.body);
|
|
298
|
+
expect(body.success).toBe(false);
|
|
299
|
+
// Credentials should be sanitized
|
|
300
|
+
expect(body.error).toContain('postgresql://***:***@***');
|
|
301
|
+
expect(body.error).not.toContain('user:password');
|
|
302
|
+
});
|
|
303
|
+
|
|
304
|
+
test('should only include stack traces in development stages', async () => {
|
|
305
|
+
process.env.STAGE = 'dev';
|
|
306
|
+
mockExecute.mockRejectedValue(new Error('Test error'));
|
|
307
|
+
|
|
308
|
+
const result = await handler({}, mockContext);
|
|
309
|
+
|
|
310
|
+
const body = JSON.parse(result.body);
|
|
311
|
+
expect(body.stack).toBeDefined();
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
test('should not include stack traces in production stage', async () => {
|
|
315
|
+
process.env.STAGE = 'production';
|
|
316
|
+
mockExecute.mockRejectedValue(new Error('Test error'));
|
|
317
|
+
|
|
318
|
+
const result = await handler({}, mockContext);
|
|
319
|
+
|
|
320
|
+
const body = JSON.parse(result.body);
|
|
321
|
+
expect(body.stack).toBeUndefined();
|
|
322
|
+
});
|
|
323
|
+
});
|
|
324
|
+
|
|
325
|
+
describe('Environment Variable Handling', () => {
|
|
326
|
+
test('should sanitize DATABASE_URL in logs', async () => {
|
|
327
|
+
const consoleSpy = jest.spyOn(console, 'log').mockImplementation();
|
|
328
|
+
process.env.DATABASE_URL = 'postgresql://user:password@very-long-host.amazonaws.com:5432/database';
|
|
329
|
+
|
|
330
|
+
await handler({}, mockContext);
|
|
331
|
+
|
|
332
|
+
// Verify URL credentials are masked in logs
|
|
333
|
+
expect(consoleSpy).toHaveBeenCalledWith(
|
|
334
|
+
expect.stringContaining('postgresql://***:***@')
|
|
335
|
+
);
|
|
336
|
+
expect(consoleSpy).not.toHaveBeenCalledWith(
|
|
337
|
+
expect.stringContaining('user:password')
|
|
338
|
+
);
|
|
339
|
+
|
|
340
|
+
consoleSpy.mockRestore();
|
|
341
|
+
});
|
|
342
|
+
|
|
343
|
+
test('should handle different stage values correctly', async () => {
|
|
344
|
+
const stages = ['dev', 'test', 'local', 'staging', 'production'];
|
|
345
|
+
|
|
346
|
+
for (const stage of stages) {
|
|
347
|
+
process.env.STAGE = stage;
|
|
348
|
+
const command = ['dev', 'test', 'local'].includes(stage) ? 'dev' : 'deploy';
|
|
349
|
+
|
|
350
|
+
jest.clearAllMocks();
|
|
351
|
+
mockExecute.mockResolvedValue({
|
|
352
|
+
success: true,
|
|
353
|
+
dbType: 'postgresql',
|
|
354
|
+
stage,
|
|
355
|
+
command,
|
|
356
|
+
message: 'Database migration completed successfully',
|
|
357
|
+
});
|
|
358
|
+
|
|
359
|
+
const result = await handler({}, mockContext);
|
|
360
|
+
|
|
361
|
+
expect(result.statusCode).toBe(200);
|
|
362
|
+
const body = JSON.parse(result.body);
|
|
363
|
+
expect(body.stage).toBe(stage);
|
|
364
|
+
expect(mockExecute).toHaveBeenCalledWith({
|
|
365
|
+
dbType: 'postgresql',
|
|
366
|
+
stage,
|
|
367
|
+
verbose: true,
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
});
|
|
371
|
+
});
|
|
372
|
+
|
|
373
|
+
describe('Response Format', () => {
|
|
374
|
+
test('should return properly formatted success response', async () => {
|
|
375
|
+
const result = await handler({}, mockContext);
|
|
376
|
+
|
|
377
|
+
expect(result).toHaveProperty('statusCode', 200);
|
|
378
|
+
expect(result).toHaveProperty('body');
|
|
379
|
+
|
|
380
|
+
const body = JSON.parse(result.body);
|
|
381
|
+
expect(body).toHaveProperty('success', true);
|
|
382
|
+
expect(body).toHaveProperty('message');
|
|
383
|
+
expect(body).toHaveProperty('dbType');
|
|
384
|
+
expect(body).toHaveProperty('stage');
|
|
385
|
+
expect(body).toHaveProperty('migrationCommand');
|
|
386
|
+
expect(body).toHaveProperty('timestamp');
|
|
387
|
+
|
|
388
|
+
// Verify timestamp is valid ISO string
|
|
389
|
+
expect(() => new Date(body.timestamp)).not.toThrow();
|
|
390
|
+
});
|
|
391
|
+
|
|
392
|
+
test('should return properly formatted error response', async () => {
|
|
393
|
+
delete process.env.DATABASE_URL;
|
|
394
|
+
|
|
395
|
+
const result = await handler({}, mockContext);
|
|
396
|
+
|
|
397
|
+
expect(result).toHaveProperty('statusCode', 500);
|
|
398
|
+
expect(result).toHaveProperty('body');
|
|
399
|
+
|
|
400
|
+
const body = JSON.parse(result.body);
|
|
401
|
+
expect(body).toHaveProperty('success', false);
|
|
402
|
+
expect(body).toHaveProperty('error');
|
|
403
|
+
expect(typeof body.error).toBe('string');
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
|
|
407
|
+
describe('Logging', () => {
|
|
408
|
+
test('should log migration progress', async () => {
|
|
409
|
+
const consoleSpy = jest.spyOn(console, 'log').mockImplementation();
|
|
410
|
+
|
|
411
|
+
await handler({}, mockContext);
|
|
412
|
+
|
|
413
|
+
// Verify key log messages
|
|
414
|
+
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Database Migration Lambda Started'));
|
|
415
|
+
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Executing Database Migration'));
|
|
416
|
+
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Migration Summary'));
|
|
417
|
+
|
|
418
|
+
consoleSpy.mockRestore();
|
|
419
|
+
});
|
|
420
|
+
|
|
421
|
+
test('should log errors with details', async () => {
|
|
422
|
+
const consoleSpy = jest.spyOn(console, 'error').mockImplementation();
|
|
423
|
+
const testError = new MigrationError('Test migration error', {
|
|
424
|
+
dbType: 'postgresql',
|
|
425
|
+
stage: 'production',
|
|
426
|
+
});
|
|
427
|
+
mockExecute.mockRejectedValue(testError);
|
|
428
|
+
|
|
429
|
+
await handler({}, mockContext);
|
|
430
|
+
|
|
431
|
+
expect(consoleSpy).toHaveBeenCalledWith(expect.stringContaining('Migration Failed'));
|
|
432
|
+
expect(consoleSpy).toHaveBeenCalledWith('Error:', 'MigrationError', 'Test migration error');
|
|
433
|
+
|
|
434
|
+
consoleSpy.mockRestore();
|
|
435
|
+
});
|
|
436
|
+
});
|
|
437
|
+
});
|