@friggframework/core 2.0.0--canary.461.e58db0a.0 → 2.0.0--canary.461.4f3c330.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,265 @@
1
+ /**
2
+ * Tests for TriggerDatabaseMigrationUseCase
3
+ */
4
+
5
+ const {
6
+ TriggerDatabaseMigrationUseCase,
7
+ ValidationError,
8
+ } = require('./trigger-database-migration-use-case');
9
+
10
+ describe('TriggerDatabaseMigrationUseCase', () => {
11
+ let useCase;
12
+ let mockProcessRepository;
13
+ let mockQueuerUtil;
14
+ let originalEnv;
15
+
16
+ beforeEach(() => {
17
+ // Save original environment value
18
+ originalEnv = process.env.DB_MIGRATION_QUEUE_URL;
19
+
20
+ // Set test environment
21
+ process.env.DB_MIGRATION_QUEUE_URL = 'https://sqs.us-east-1.amazonaws.com/123456789/test-queue';
22
+
23
+ // Create mock repository
24
+ mockProcessRepository = {
25
+ create: jest.fn().mockResolvedValue({
26
+ id: 'process-123',
27
+ userId: 'user-456',
28
+ integrationId: null,
29
+ name: 'database-migration',
30
+ type: 'DATABASE_MIGRATION',
31
+ state: 'INITIALIZING',
32
+ context: {
33
+ dbType: 'postgresql',
34
+ stage: 'production',
35
+ },
36
+ results: {},
37
+ createdAt: new Date(),
38
+ updatedAt: new Date(),
39
+ }),
40
+ updateState: jest.fn().mockResolvedValue(true),
41
+ };
42
+
43
+ // Create mock queuer util
44
+ mockQueuerUtil = {
45
+ send: jest.fn().mockResolvedValue({ MessageId: 'msg-123' }),
46
+ };
47
+
48
+ // Create use case with mocks
49
+ useCase = new TriggerDatabaseMigrationUseCase({
50
+ processRepository: mockProcessRepository,
51
+ queuerUtil: mockQueuerUtil,
52
+ });
53
+ });
54
+
55
+ afterEach(() => {
56
+ // Restore original environment
57
+ if (originalEnv !== undefined) {
58
+ process.env.DB_MIGRATION_QUEUE_URL = originalEnv;
59
+ } else {
60
+ delete process.env.DB_MIGRATION_QUEUE_URL;
61
+ }
62
+ jest.clearAllMocks();
63
+ });
64
+
65
+ describe('constructor', () => {
66
+ it('should throw error if processRepository not provided', () => {
67
+ expect(() => {
68
+ new TriggerDatabaseMigrationUseCase({});
69
+ }).toThrow('processRepository dependency is required');
70
+ });
71
+
72
+ it('should accept custom queuerUtil', () => {
73
+ const customQueuer = { send: jest.fn() };
74
+ const instance = new TriggerDatabaseMigrationUseCase({
75
+ processRepository: mockProcessRepository,
76
+ queuerUtil: customQueuer,
77
+ });
78
+
79
+ expect(instance.queuerUtil).toBe(customQueuer);
80
+ });
81
+ });
82
+
83
+ describe('execute', () => {
84
+ it('should create process and queue migration job', async () => {
85
+ const result = await useCase.execute({
86
+ userId: 'user-456',
87
+ dbType: 'postgresql',
88
+ stage: 'production',
89
+ });
90
+
91
+ // Verify process creation
92
+ expect(mockProcessRepository.create).toHaveBeenCalledWith({
93
+ userId: 'user-456',
94
+ integrationId: null,
95
+ name: 'database-migration',
96
+ type: 'DATABASE_MIGRATION',
97
+ state: 'INITIALIZING',
98
+ context: expect.objectContaining({
99
+ dbType: 'postgresql',
100
+ stage: 'production',
101
+ }),
102
+ results: {},
103
+ });
104
+
105
+ // Verify SQS message sent
106
+ expect(mockQueuerUtil.send).toHaveBeenCalledWith(
107
+ {
108
+ processId: 'process-123',
109
+ dbType: 'postgresql',
110
+ stage: 'production',
111
+ },
112
+ 'https://sqs.us-east-1.amazonaws.com/123456789/test-queue'
113
+ );
114
+
115
+ // Verify response
116
+ expect(result).toEqual({
117
+ success: true,
118
+ processId: 'process-123',
119
+ state: 'INITIALIZING',
120
+ statusUrl: '/db-migrate/process-123',
121
+ message: 'Database migration queued successfully',
122
+ });
123
+ });
124
+
125
+ it('should handle MongoDB dbType', async () => {
126
+ await useCase.execute({
127
+ userId: 'user-456',
128
+ dbType: 'mongodb',
129
+ stage: 'dev',
130
+ });
131
+
132
+ expect(mockProcessRepository.create).toHaveBeenCalledWith(
133
+ expect.objectContaining({
134
+ context: expect.objectContaining({
135
+ dbType: 'mongodb',
136
+ stage: 'dev',
137
+ }),
138
+ })
139
+ );
140
+ });
141
+
142
+ it('should throw ValidationError if userId is missing', async () => {
143
+ await expect(
144
+ useCase.execute({
145
+ dbType: 'postgresql',
146
+ stage: 'production',
147
+ })
148
+ ).rejects.toThrow(ValidationError);
149
+
150
+ await expect(
151
+ useCase.execute({
152
+ dbType: 'postgresql',
153
+ stage: 'production',
154
+ })
155
+ ).rejects.toThrow('userId is required');
156
+ });
157
+
158
+ it('should throw ValidationError if userId is not a string', async () => {
159
+ await expect(
160
+ useCase.execute({
161
+ userId: 123,
162
+ dbType: 'postgresql',
163
+ stage: 'production',
164
+ })
165
+ ).rejects.toThrow('userId must be a string');
166
+ });
167
+
168
+ it('should throw ValidationError if dbType is missing', async () => {
169
+ await expect(
170
+ useCase.execute({
171
+ userId: 'user-456',
172
+ stage: 'production',
173
+ })
174
+ ).rejects.toThrow('dbType is required');
175
+ });
176
+
177
+ it('should throw ValidationError if dbType is invalid', async () => {
178
+ await expect(
179
+ useCase.execute({
180
+ userId: 'user-456',
181
+ dbType: 'mysql',
182
+ stage: 'production',
183
+ })
184
+ ).rejects.toThrow('Invalid dbType: "mysql"');
185
+ });
186
+
187
+ it('should throw ValidationError if stage is missing', async () => {
188
+ await expect(
189
+ useCase.execute({
190
+ userId: 'user-456',
191
+ dbType: 'postgresql',
192
+ })
193
+ ).rejects.toThrow('stage is required');
194
+ });
195
+
196
+ it('should throw ValidationError if stage is not a string', async () => {
197
+ await expect(
198
+ useCase.execute({
199
+ userId: 'user-456',
200
+ dbType: 'postgresql',
201
+ stage: 123,
202
+ })
203
+ ).rejects.toThrow('stage must be a string');
204
+ });
205
+
206
+ it('should throw error if DB_MIGRATION_QUEUE_URL not set', async () => {
207
+ delete process.env.DB_MIGRATION_QUEUE_URL;
208
+
209
+ await expect(
210
+ useCase.execute({
211
+ userId: 'user-456',
212
+ dbType: 'postgresql',
213
+ stage: 'production',
214
+ })
215
+ ).rejects.toThrow('DB_MIGRATION_QUEUE_URL environment variable is not set');
216
+ });
217
+
218
+ it('should update process to FAILED if queue send fails', async () => {
219
+ mockQueuerUtil.send.mockRejectedValue(new Error('SQS unavailable'));
220
+
221
+ await expect(
222
+ useCase.execute({
223
+ userId: 'user-456',
224
+ dbType: 'postgresql',
225
+ stage: 'production',
226
+ })
227
+ ).rejects.toThrow('Failed to queue migration: SQS unavailable');
228
+
229
+ // Verify process was marked as failed
230
+ expect(mockProcessRepository.updateState).toHaveBeenCalledWith(
231
+ 'process-123',
232
+ 'FAILED',
233
+ {
234
+ error: 'Failed to queue migration job',
235
+ errorDetails: 'SQS unavailable',
236
+ }
237
+ );
238
+ });
239
+
240
+ it('should handle process creation failure', async () => {
241
+ mockProcessRepository.create.mockRejectedValue(new Error('Database error'));
242
+
243
+ await expect(
244
+ useCase.execute({
245
+ userId: 'user-456',
246
+ dbType: 'postgresql',
247
+ stage: 'production',
248
+ })
249
+ ).rejects.toThrow('Database error');
250
+
251
+ // Should not attempt to send to queue if process creation fails
252
+ expect(mockQueuerUtil.send).not.toHaveBeenCalled();
253
+ });
254
+ });
255
+
256
+ describe('ValidationError', () => {
257
+ it('should have correct name', () => {
258
+ const error = new ValidationError('test message');
259
+ expect(error.name).toBe('ValidationError');
260
+ expect(error.message).toBe('test message');
261
+ expect(error instanceof Error).toBe(true);
262
+ });
263
+ });
264
+ });
265
+
@@ -0,0 +1,20 @@
1
+ /**
2
+ * Database Migration Router Lambda Handler
3
+ *
4
+ * Wraps the Express router with Lambda infrastructure:
5
+ * - Express app with middleware (CORS, body-parser, error handling)
6
+ * - serverless-http for Lambda compatibility
7
+ * - createHandler for DB pooling + secrets management
8
+ *
9
+ * This matches the pattern used by health.handler.js and user.handler.js
10
+ */
11
+
12
+ const { createAppHandler } = require('../app-handler-helpers');
13
+ const dbMigrationRouter = require('./db-migration');
14
+
15
+ module.exports.handler = createAppHandler(
16
+ 'db-migration-router',
17
+ dbMigrationRouter,
18
+ true // shouldUseDatabase - need DB for Process repository
19
+ );
20
+
@@ -0,0 +1,175 @@
1
+ /**
2
+ * Database Migration Router
3
+ *
4
+ * HTTP API for triggering and monitoring database migrations.
5
+ *
6
+ * Endpoints:
7
+ * - POST /db-migrate - Trigger async migration (queues job)
8
+ * - GET /db-migrate/:processId - Check migration status
9
+ *
10
+ * Security:
11
+ * - Requires ADMIN_API_KEY header for all requests
12
+ *
13
+ * Architecture:
14
+ * - Router (Adapter Layer) → Use Cases (Domain) → Repositories (Infrastructure)
15
+ * - Follows DDD/Hexagonal architecture
16
+ */
17
+
18
+ const { Router } = require('express');
19
+ const catchAsyncError = require('express-async-handler');
20
+ const { createProcessRepository } = require('../../integrations/repositories/process-repository-factory');
21
+ const {
22
+ TriggerDatabaseMigrationUseCase,
23
+ ValidationError: TriggerValidationError,
24
+ } = require('../../database/use-cases/trigger-database-migration-use-case');
25
+ const {
26
+ GetMigrationStatusUseCase,
27
+ ValidationError: GetValidationError,
28
+ NotFoundError,
29
+ } = require('../../database/use-cases/get-migration-status-use-case');
30
+
31
+ const router = Router();
32
+
33
+ // Dependency injection (like health.js:34-70)
34
+ const processRepository = createProcessRepository();
35
+ const triggerMigrationUseCase = new TriggerDatabaseMigrationUseCase({
36
+ processRepository,
37
+ // Note: QueuerUtil is used directly in the use case (static utility)
38
+ });
39
+ const getStatusUseCase = new GetMigrationStatusUseCase({ processRepository });
40
+
41
+ /**
42
+ * Admin API key validation middleware
43
+ * Matches pattern from health.js:72-88
44
+ */
45
+ const validateApiKey = (req, res, next) => {
46
+ const apiKey = req.headers['x-api-key'];
47
+
48
+ if (!apiKey || apiKey !== process.env.ADMIN_API_KEY) {
49
+ console.error('Unauthorized access attempt to db-migrate endpoint');
50
+ return res.status(401).json({
51
+ status: 'error',
52
+ message: 'Unauthorized',
53
+ });
54
+ }
55
+
56
+ next();
57
+ };
58
+
59
+ // Apply API key validation to all routes
60
+ router.use(validateApiKey);
61
+
62
+ /**
63
+ * POST /db-migrate
64
+ *
65
+ * Trigger database migration (async via SQS queue)
66
+ *
67
+ * Request body:
68
+ * {
69
+ * userId: string (optional, defaults to 'admin'),
70
+ * dbType: 'postgresql' | 'mongodb',
71
+ * stage: string (e.g., 'production', 'dev')
72
+ * }
73
+ *
74
+ * Response (202 Accepted):
75
+ * {
76
+ * success: true,
77
+ * processId: string,
78
+ * state: 'INITIALIZING',
79
+ * statusUrl: string,
80
+ * message: string
81
+ * }
82
+ */
83
+ router.post(
84
+ '/db-migrate',
85
+ catchAsyncError(async (req, res) => {
86
+ const { dbType, stage } = req.body;
87
+ // TODO: Extract userId from JWT token when auth is implemented
88
+ const userId = req.body.userId || 'admin';
89
+
90
+ console.log(`Migration trigger request: dbType=${dbType}, stage=${stage}, userId=${userId}`);
91
+
92
+ try {
93
+ const result = await triggerMigrationUseCase.execute({
94
+ userId,
95
+ dbType,
96
+ stage,
97
+ });
98
+
99
+ // 202 Accepted - request accepted but not completed
100
+ res.status(202).json(result);
101
+ } catch (error) {
102
+ // Handle validation errors (400 Bad Request)
103
+ if (error instanceof TriggerValidationError) {
104
+ return res.status(400).json({
105
+ success: false,
106
+ error: error.message,
107
+ });
108
+ }
109
+
110
+ // Re-throw other errors for global error handler
111
+ throw error;
112
+ }
113
+ })
114
+ );
115
+
116
+ /**
117
+ * GET /db-migrate/:processId
118
+ *
119
+ * Get migration status by process ID
120
+ *
121
+ * Response (200 OK):
122
+ * {
123
+ * processId: string,
124
+ * type: 'DATABASE_MIGRATION',
125
+ * state: 'INITIALIZING' | 'RUNNING' | 'COMPLETED' | 'FAILED',
126
+ * context: {
127
+ * dbType: string,
128
+ * stage: string,
129
+ * migrationCommand: string (if started)
130
+ * },
131
+ * results: {
132
+ * success: boolean (if completed),
133
+ * duration: string (if completed),
134
+ * error: string (if failed)
135
+ * },
136
+ * createdAt: string,
137
+ * updatedAt: string
138
+ * }
139
+ */
140
+ router.get(
141
+ '/db-migrate/:processId',
142
+ catchAsyncError(async (req, res) => {
143
+ const { processId } = req.params;
144
+
145
+ console.log(`Migration status request: processId=${processId}`);
146
+
147
+ try {
148
+ const status = await getStatusUseCase.execute({ processId });
149
+
150
+ res.status(200).json(status);
151
+ } catch (error) {
152
+ // Handle not found errors (404 Not Found)
153
+ if (error instanceof NotFoundError) {
154
+ return res.status(404).json({
155
+ success: false,
156
+ error: error.message,
157
+ });
158
+ }
159
+
160
+ // Handle validation errors (400 Bad Request)
161
+ if (error instanceof GetValidationError) {
162
+ return res.status(400).json({
163
+ success: false,
164
+ error: error.message,
165
+ });
166
+ }
167
+
168
+ // Re-throw other errors for global error handler
169
+ throw error;
170
+ }
171
+ })
172
+ );
173
+
174
+ module.exports = router;
175
+
@@ -46,10 +46,20 @@ const {
46
46
  MigrationError,
47
47
  ValidationError,
48
48
  } = require('../../database/use-cases/run-database-migration-use-case');
49
+ const {
50
+ UpdateProcessState,
51
+ } = require('../../integrations/use-cases/update-process-state');
52
+ const {
53
+ createProcessRepository,
54
+ } = require('../../integrations/repositories/process-repository-factory');
49
55
 
50
56
  // Inject prisma-runner as dependency
51
57
  const prismaRunner = require('../../database/utils/prisma-runner');
52
58
 
59
+ // Create process repository and use case for tracking migration progress
60
+ const processRepository = createProcessRepository();
61
+ const updateProcessState = new UpdateProcessState({ processRepository });
62
+
53
63
  /**
54
64
  * Sanitizes error messages to prevent credential leaks
55
65
  * @param {string} errorMessage - Error message that might contain credentials
@@ -85,9 +95,48 @@ function sanitizeDatabaseUrl(url) {
85
95
  return url.replace(/(:\/\/)([^:]+):([^@]+)@/, '$1***:***@');
86
96
  }
87
97
 
98
+ /**
99
+ * Extract migration parameters from SQS event or direct invocation
100
+ * @param {Object} event - Lambda event (SQS or direct)
101
+ * @returns {Object} Extracted parameters { processId, dbType, stage }
102
+ */
103
+ function extractMigrationParams(event) {
104
+ let processId = null;
105
+ let dbType = null;
106
+ let stage = null;
107
+
108
+ // Check if this is an SQS event
109
+ if (event.Records && event.Records.length > 0) {
110
+ // SQS event - extract from message body
111
+ const message = JSON.parse(event.Records[0].body);
112
+ processId = message.processId;
113
+ dbType = message.dbType;
114
+ stage = message.stage;
115
+
116
+ console.log('SQS event detected');
117
+ console.log(` Process ID: ${processId}`);
118
+ console.log(` DB Type: ${dbType}`);
119
+ console.log(` Stage: ${stage}`);
120
+ } else {
121
+ // Direct invocation - use event properties or environment variables
122
+ processId = event.processId || null;
123
+ dbType = event.dbType || process.env.DB_TYPE || 'postgresql';
124
+ stage = event.stage || process.env.STAGE || 'production';
125
+
126
+ console.log('Direct invocation detected');
127
+ if (processId) {
128
+ console.log(` Process ID: ${processId}`);
129
+ }
130
+ console.log(` DB Type: ${dbType}`);
131
+ console.log(` Stage: ${stage}`);
132
+ }
133
+
134
+ return { processId, dbType, stage };
135
+ }
136
+
88
137
  /**
89
138
  * Lambda handler entry point
90
- * @param {Object} event - Lambda event (not used, migrations don't need input)
139
+ * @param {Object} event - Lambda event (SQS message or direct invocation)
91
140
  * @param {Object} context - Lambda context (contains AWS request ID, timeout info)
92
141
  * @returns {Promise<Object>} Response with statusCode and body
93
142
  */
@@ -102,10 +151,11 @@ exports.handler = async (event, context) => {
102
151
  remainingTimeInMillis: context.getRemainingTimeInMillis(),
103
152
  }, null, 2));
104
153
 
154
+ // Extract migration parameters from event
155
+ const { processId, dbType, stage } = extractMigrationParams(event);
156
+
105
157
  // Get environment variables
106
158
  const databaseUrl = process.env.DATABASE_URL;
107
- const dbType = process.env.DB_TYPE || 'postgresql';
108
- const stage = process.env.STAGE || 'production';
109
159
 
110
160
  try {
111
161
  // Validate DATABASE_URL is set
@@ -126,6 +176,14 @@ exports.handler = async (event, context) => {
126
176
  console.log(` Stage: ${stage}`);
127
177
  console.log(` Database URL: ${sanitizeDatabaseUrl(databaseUrl)}`);
128
178
 
179
+ // Update process state to RUNNING (if processId provided)
180
+ if (processId) {
181
+ console.log(`\n✓ Updating process state to RUNNING: ${processId}`);
182
+ await updateProcessState.execute(processId, 'RUNNING', {
183
+ startedAt: new Date().toISOString(),
184
+ });
185
+ }
186
+
129
187
  // Create use case with dependencies (Dependency Injection)
130
188
  const runDatabaseMigrationUseCase = new RunDatabaseMigrationUseCase({
131
189
  prismaRunner,
@@ -152,17 +210,32 @@ exports.handler = async (event, context) => {
152
210
  console.log(` Command: ${result.command}`);
153
211
  console.log('========================================');
154
212
 
213
+ // Update process state to COMPLETED (if processId provided)
214
+ if (processId) {
215
+ console.log(`\n✓ Updating process state to COMPLETED: ${processId}`);
216
+ await updateProcessState.execute(processId, 'COMPLETED', {
217
+ completedAt: new Date().toISOString(),
218
+ migrationCommand: result.command,
219
+ });
220
+ }
221
+
155
222
  // Return success response (adapter layer - HTTP mapping)
223
+ const responseBody = {
224
+ success: true,
225
+ message: result.message,
226
+ dbType: result.dbType,
227
+ stage: result.stage,
228
+ migrationCommand: result.command,
229
+ timestamp: new Date().toISOString(),
230
+ };
231
+
232
+ if (processId) {
233
+ responseBody.processId = processId;
234
+ }
235
+
156
236
  return {
157
237
  statusCode: 200,
158
- body: JSON.stringify({
159
- success: true,
160
- message: result.message,
161
- dbType: result.dbType,
162
- stage: result.stage,
163
- migrationCommand: result.command,
164
- timestamp: new Date().toISOString(),
165
- }),
238
+ body: JSON.stringify(responseBody),
166
239
  };
167
240
 
168
241
  } catch (error) {
@@ -194,15 +267,36 @@ exports.handler = async (event, context) => {
194
267
  // Sanitize error message before returning
195
268
  const sanitizedError = sanitizeError(errorMessage);
196
269
 
270
+ // Update process state to FAILED (if processId provided)
271
+ if (processId) {
272
+ try {
273
+ console.log(`\n✓ Updating process state to FAILED: ${processId}`);
274
+ await updateProcessState.execute(processId, 'FAILED', {
275
+ failedAt: new Date().toISOString(),
276
+ error: sanitizedError,
277
+ errorType: error.name || 'Error',
278
+ });
279
+ } catch (updateError) {
280
+ console.error('Failed to update process state:', updateError);
281
+ // Don't fail the entire handler if state update fails
282
+ }
283
+ }
284
+
285
+ const errorBody = {
286
+ success: false,
287
+ error: sanitizedError,
288
+ errorType: error.name || 'Error',
289
+ // Only include stack traces in development environments
290
+ ...(stage === 'dev' || stage === 'local' || stage === 'test' ? { stack: error.stack } : {}),
291
+ };
292
+
293
+ if (processId) {
294
+ errorBody.processId = processId;
295
+ }
296
+
197
297
  return {
198
298
  statusCode,
199
- body: JSON.stringify({
200
- success: false,
201
- error: sanitizedError,
202
- errorType: error.name || 'Error',
203
- // Only include stack traces in development environments
204
- ...(stage === 'dev' || stage === 'local' || stage === 'test' ? { stack: error.stack } : {}),
205
- }),
299
+ body: JSON.stringify(errorBody),
206
300
  };
207
301
  }
208
302
  };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@friggframework/core",
3
3
  "prettier": "@friggframework/prettier-config",
4
- "version": "2.0.0--canary.461.e58db0a.0",
4
+ "version": "2.0.0--canary.461.4f3c330.0",
5
5
  "dependencies": {
6
6
  "@aws-sdk/client-apigatewaymanagementapi": "^3.588.0",
7
7
  "@aws-sdk/client-kms": "^3.588.0",
@@ -37,9 +37,9 @@
37
37
  }
38
38
  },
39
39
  "devDependencies": {
40
- "@friggframework/eslint-config": "2.0.0--canary.461.e58db0a.0",
41
- "@friggframework/prettier-config": "2.0.0--canary.461.e58db0a.0",
42
- "@friggframework/test": "2.0.0--canary.461.e58db0a.0",
40
+ "@friggframework/eslint-config": "2.0.0--canary.461.4f3c330.0",
41
+ "@friggframework/prettier-config": "2.0.0--canary.461.4f3c330.0",
42
+ "@friggframework/test": "2.0.0--canary.461.4f3c330.0",
43
43
  "@prisma/client": "^6.17.0",
44
44
  "@types/lodash": "4.17.15",
45
45
  "@typescript-eslint/eslint-plugin": "^8.0.0",
@@ -79,5 +79,5 @@
79
79
  "publishConfig": {
80
80
  "access": "public"
81
81
  },
82
- "gitHead": "e58db0af84dd0c8fe3abdbbc472e433815c0f96d"
82
+ "gitHead": "4f3c33010ab48c11f66a630ea72f75afc2e21762"
83
83
  }