@friggframework/core 2.0.0--canary.461.bdbd057.0 → 2.0.0--canary.461.2ca8c89.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/database/use-cases/get-migration-status-use-case.js +97 -0
- package/database/use-cases/trigger-database-migration-use-case.js +169 -0
- package/handlers/routers/db-migration.handler.js +20 -0
- package/handlers/routers/db-migration.js +175 -0
- package/handlers/workers/db-migration.js +112 -18
- package/package.json +5 -5
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get Migration Status Use Case
|
|
3
|
+
*
|
|
4
|
+
* Retrieves the status of a database migration by process ID.
|
|
5
|
+
* Formats the Process record for migration-specific response.
|
|
6
|
+
*
|
|
7
|
+
* This use case follows the Frigg hexagonal architecture pattern where:
|
|
8
|
+
* - Routers (adapters) call use cases
|
|
9
|
+
* - Use cases contain business logic and formatting
|
|
10
|
+
* - Use cases call repositories for data access
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
class GetMigrationStatusUseCase {
|
|
14
|
+
/**
|
|
15
|
+
* @param {Object} dependencies
|
|
16
|
+
* @param {Object} dependencies.processRepository - Repository for process data access
|
|
17
|
+
*/
|
|
18
|
+
constructor({ processRepository }) {
|
|
19
|
+
if (!processRepository) {
|
|
20
|
+
throw new Error('processRepository dependency is required');
|
|
21
|
+
}
|
|
22
|
+
this.processRepository = processRepository;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Execute get migration status
|
|
27
|
+
*
|
|
28
|
+
* @param {Object} params
|
|
29
|
+
* @param {string} params.processId - Process ID to retrieve
|
|
30
|
+
* @returns {Promise<Object>} Migration status with process details
|
|
31
|
+
* @throws {NotFoundError} If process not found
|
|
32
|
+
* @throws {Error} If process is not a migration process
|
|
33
|
+
*/
|
|
34
|
+
async execute({ processId }) {
|
|
35
|
+
// Validation
|
|
36
|
+
if (!processId) {
|
|
37
|
+
throw new ValidationError('processId is required');
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (typeof processId !== 'string') {
|
|
41
|
+
throw new ValidationError('processId must be a string');
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Get process from repository
|
|
45
|
+
const process = await this.processRepository.findById(processId);
|
|
46
|
+
|
|
47
|
+
if (!process) {
|
|
48
|
+
throw new NotFoundError(`Migration process not found: ${processId}`);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// Verify this is a migration process
|
|
52
|
+
if (process.type !== 'DATABASE_MIGRATION') {
|
|
53
|
+
throw new Error(
|
|
54
|
+
`Process ${processId} is not a migration process (type: ${process.type})`
|
|
55
|
+
);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Format response
|
|
59
|
+
return {
|
|
60
|
+
processId: process.id,
|
|
61
|
+
type: process.type,
|
|
62
|
+
state: process.state,
|
|
63
|
+
context: process.context || {},
|
|
64
|
+
results: process.results || {},
|
|
65
|
+
createdAt: process.createdAt,
|
|
66
|
+
updatedAt: process.updatedAt,
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Custom error for validation failures
|
|
73
|
+
*/
|
|
74
|
+
class ValidationError extends Error {
|
|
75
|
+
constructor(message) {
|
|
76
|
+
super(message);
|
|
77
|
+
this.name = 'ValidationError';
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Custom error for not found resources
|
|
83
|
+
*/
|
|
84
|
+
class NotFoundError extends Error {
|
|
85
|
+
constructor(message) {
|
|
86
|
+
super(message);
|
|
87
|
+
this.name = 'NotFoundError';
|
|
88
|
+
this.statusCode = 404;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
module.exports = {
|
|
93
|
+
GetMigrationStatusUseCase,
|
|
94
|
+
ValidationError,
|
|
95
|
+
NotFoundError,
|
|
96
|
+
};
|
|
97
|
+
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Trigger Database Migration Use Case
|
|
3
|
+
*
|
|
4
|
+
* Business logic for triggering async database migrations via SQS queue.
|
|
5
|
+
* Creates a Process record for tracking and sends migration job to queue.
|
|
6
|
+
*
|
|
7
|
+
* This use case follows the Frigg hexagonal architecture pattern where:
|
|
8
|
+
* - Routers (adapters) call use cases
|
|
9
|
+
* - Use cases contain business logic and orchestration
|
|
10
|
+
* - Use cases call repositories for data access
|
|
11
|
+
* - Use cases delegate infrastructure concerns (SQS) to utilities
|
|
12
|
+
*
|
|
13
|
+
* Flow:
|
|
14
|
+
* 1. Validate migration parameters
|
|
15
|
+
* 2. Create Process record (state: INITIALIZING)
|
|
16
|
+
* 3. Send message to SQS queue (fire-and-forget)
|
|
17
|
+
* 4. Return process info immediately (async pattern)
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
const { QueuerUtil } = require('../../queues/queuer-util');
|
|
21
|
+
|
|
22
|
+
class TriggerDatabaseMigrationUseCase {
|
|
23
|
+
/**
|
|
24
|
+
* @param {Object} dependencies
|
|
25
|
+
* @param {Object} dependencies.processRepository - Repository for process data access
|
|
26
|
+
* @param {Object} [dependencies.queuerUtil] - SQS utility (injectable for testing)
|
|
27
|
+
*/
|
|
28
|
+
constructor({ processRepository, queuerUtil = QueuerUtil }) {
|
|
29
|
+
if (!processRepository) {
|
|
30
|
+
throw new Error('processRepository dependency is required');
|
|
31
|
+
}
|
|
32
|
+
this.processRepository = processRepository;
|
|
33
|
+
this.queuerUtil = queuerUtil;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Execute database migration trigger
|
|
38
|
+
*
|
|
39
|
+
* @param {Object} params
|
|
40
|
+
* @param {string} params.userId - User ID triggering the migration
|
|
41
|
+
* @param {string} params.dbType - Database type ('postgresql' or 'mongodb')
|
|
42
|
+
* @param {string} params.stage - Deployment stage (determines migration command)
|
|
43
|
+
* @returns {Promise<Object>} Process info { success, processId, state, statusUrl, message }
|
|
44
|
+
* @throws {ValidationError} If parameters are invalid
|
|
45
|
+
* @throws {Error} If process creation or queue send fails
|
|
46
|
+
*/
|
|
47
|
+
async execute({ userId, dbType, stage }) {
|
|
48
|
+
// Validation
|
|
49
|
+
this._validateParams({ userId, dbType, stage });
|
|
50
|
+
|
|
51
|
+
// Create Process record for tracking
|
|
52
|
+
const process = await this.processRepository.create({
|
|
53
|
+
userId,
|
|
54
|
+
integrationId: null, // System operation, not tied to integration
|
|
55
|
+
name: 'database-migration',
|
|
56
|
+
type: 'DATABASE_MIGRATION',
|
|
57
|
+
state: 'INITIALIZING',
|
|
58
|
+
context: {
|
|
59
|
+
dbType,
|
|
60
|
+
stage,
|
|
61
|
+
triggeredAt: new Date().toISOString(),
|
|
62
|
+
},
|
|
63
|
+
results: {},
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
console.log(`Created migration process: ${process.id}`);
|
|
67
|
+
|
|
68
|
+
// Get queue URL from environment
|
|
69
|
+
const queueUrl = process.env.DB_MIGRATION_QUEUE_URL;
|
|
70
|
+
if (!queueUrl) {
|
|
71
|
+
throw new Error(
|
|
72
|
+
'DB_MIGRATION_QUEUE_URL environment variable is not set. ' +
|
|
73
|
+
'Cannot send migration to queue.'
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Send message to SQS queue (async fire-and-forget)
|
|
78
|
+
try {
|
|
79
|
+
await this.queuerUtil.send(
|
|
80
|
+
{
|
|
81
|
+
processId: process.id,
|
|
82
|
+
dbType,
|
|
83
|
+
stage,
|
|
84
|
+
},
|
|
85
|
+
queueUrl
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
console.log(`Sent migration job to queue for process: ${process.id}`);
|
|
89
|
+
} catch (error) {
|
|
90
|
+
console.error(`Failed to send migration to queue:`, error);
|
|
91
|
+
|
|
92
|
+
// Update process state to FAILED
|
|
93
|
+
await this.processRepository.updateState(
|
|
94
|
+
process.id,
|
|
95
|
+
'FAILED',
|
|
96
|
+
{
|
|
97
|
+
error: 'Failed to queue migration job',
|
|
98
|
+
errorDetails: error.message,
|
|
99
|
+
}
|
|
100
|
+
);
|
|
101
|
+
|
|
102
|
+
throw new Error(
|
|
103
|
+
`Failed to queue migration: ${error.message}`
|
|
104
|
+
);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Return process info immediately (don't wait for migration completion)
|
|
108
|
+
return {
|
|
109
|
+
success: true,
|
|
110
|
+
processId: process.id,
|
|
111
|
+
state: process.state,
|
|
112
|
+
statusUrl: `/db-migrate/${process.id}`,
|
|
113
|
+
message: 'Database migration queued successfully',
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Validate execution parameters
|
|
119
|
+
* @private
|
|
120
|
+
*/
|
|
121
|
+
_validateParams({ userId, dbType, stage }) {
|
|
122
|
+
if (!userId) {
|
|
123
|
+
throw new ValidationError('userId is required');
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
if (typeof userId !== 'string') {
|
|
127
|
+
throw new ValidationError('userId must be a string');
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
if (!dbType) {
|
|
131
|
+
throw new ValidationError('dbType is required');
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
if (typeof dbType !== 'string') {
|
|
135
|
+
throw new ValidationError('dbType must be a string');
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const validDbTypes = ['postgresql', 'mongodb'];
|
|
139
|
+
if (!validDbTypes.includes(dbType)) {
|
|
140
|
+
throw new ValidationError(
|
|
141
|
+
`Invalid dbType: "${dbType}". Must be one of: ${validDbTypes.join(', ')}`
|
|
142
|
+
);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (!stage) {
|
|
146
|
+
throw new ValidationError('stage is required');
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
if (typeof stage !== 'string') {
|
|
150
|
+
throw new ValidationError('stage must be a string');
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Custom error for validation failures
|
|
157
|
+
*/
|
|
158
|
+
class ValidationError extends Error {
|
|
159
|
+
constructor(message) {
|
|
160
|
+
super(message);
|
|
161
|
+
this.name = 'ValidationError';
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
module.exports = {
|
|
166
|
+
TriggerDatabaseMigrationUseCase,
|
|
167
|
+
ValidationError,
|
|
168
|
+
};
|
|
169
|
+
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database Migration Router Lambda Handler
|
|
3
|
+
*
|
|
4
|
+
* Wraps the Express router with Lambda infrastructure:
|
|
5
|
+
* - Express app with middleware (CORS, body-parser, error handling)
|
|
6
|
+
* - serverless-http for Lambda compatibility
|
|
7
|
+
* - createHandler for DB pooling + secrets management
|
|
8
|
+
*
|
|
9
|
+
* This matches the pattern used by health.handler.js and user.handler.js
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const { createAppHandler } = require('../app-handler-helpers');
|
|
13
|
+
const dbMigrationRouter = require('./db-migration');
|
|
14
|
+
|
|
15
|
+
module.exports.handler = createAppHandler(
|
|
16
|
+
'db-migration-router',
|
|
17
|
+
dbMigrationRouter,
|
|
18
|
+
true // shouldUseDatabase - need DB for Process repository
|
|
19
|
+
);
|
|
20
|
+
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database Migration Router
|
|
3
|
+
*
|
|
4
|
+
* HTTP API for triggering and monitoring database migrations.
|
|
5
|
+
*
|
|
6
|
+
* Endpoints:
|
|
7
|
+
* - POST /db-migrate - Trigger async migration (queues job)
|
|
8
|
+
* - GET /db-migrate/:processId - Check migration status
|
|
9
|
+
*
|
|
10
|
+
* Security:
|
|
11
|
+
* - Requires ADMIN_API_KEY header for all requests
|
|
12
|
+
*
|
|
13
|
+
* Architecture:
|
|
14
|
+
* - Router (Adapter Layer) → Use Cases (Domain) → Repositories (Infrastructure)
|
|
15
|
+
* - Follows DDD/Hexagonal architecture
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
const { Router } = require('express');
|
|
19
|
+
const catchAsyncError = require('express-async-handler');
|
|
20
|
+
const { createProcessRepository } = require('../../integrations/repositories/process-repository-factory');
|
|
21
|
+
const {
|
|
22
|
+
TriggerDatabaseMigrationUseCase,
|
|
23
|
+
ValidationError: TriggerValidationError,
|
|
24
|
+
} = require('../../database/use-cases/trigger-database-migration-use-case');
|
|
25
|
+
const {
|
|
26
|
+
GetMigrationStatusUseCase,
|
|
27
|
+
ValidationError: GetValidationError,
|
|
28
|
+
NotFoundError,
|
|
29
|
+
} = require('../../database/use-cases/get-migration-status-use-case');
|
|
30
|
+
|
|
31
|
+
const router = Router();
|
|
32
|
+
|
|
33
|
+
// Dependency injection (like health.js:34-70)
|
|
34
|
+
const processRepository = createProcessRepository();
|
|
35
|
+
const triggerMigrationUseCase = new TriggerDatabaseMigrationUseCase({
|
|
36
|
+
processRepository,
|
|
37
|
+
// Note: QueuerUtil is used directly in the use case (static utility)
|
|
38
|
+
});
|
|
39
|
+
const getStatusUseCase = new GetMigrationStatusUseCase({ processRepository });
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Admin API key validation middleware
|
|
43
|
+
* Matches pattern from health.js:72-88
|
|
44
|
+
*/
|
|
45
|
+
const validateApiKey = (req, res, next) => {
|
|
46
|
+
const apiKey = req.headers['x-api-key'];
|
|
47
|
+
|
|
48
|
+
if (!apiKey || apiKey !== process.env.ADMIN_API_KEY) {
|
|
49
|
+
console.error('Unauthorized access attempt to db-migrate endpoint');
|
|
50
|
+
return res.status(401).json({
|
|
51
|
+
status: 'error',
|
|
52
|
+
message: 'Unauthorized',
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
next();
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
// Apply API key validation to all routes
|
|
60
|
+
router.use(validateApiKey);
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* POST /db-migrate
|
|
64
|
+
*
|
|
65
|
+
* Trigger database migration (async via SQS queue)
|
|
66
|
+
*
|
|
67
|
+
* Request body:
|
|
68
|
+
* {
|
|
69
|
+
* userId: string (optional, defaults to 'admin'),
|
|
70
|
+
* dbType: 'postgresql' | 'mongodb',
|
|
71
|
+
* stage: string (e.g., 'production', 'dev')
|
|
72
|
+
* }
|
|
73
|
+
*
|
|
74
|
+
* Response (202 Accepted):
|
|
75
|
+
* {
|
|
76
|
+
* success: true,
|
|
77
|
+
* processId: string,
|
|
78
|
+
* state: 'INITIALIZING',
|
|
79
|
+
* statusUrl: string,
|
|
80
|
+
* message: string
|
|
81
|
+
* }
|
|
82
|
+
*/
|
|
83
|
+
router.post(
|
|
84
|
+
'/db-migrate',
|
|
85
|
+
catchAsyncError(async (req, res) => {
|
|
86
|
+
const { dbType, stage } = req.body;
|
|
87
|
+
// TODO: Extract userId from JWT token when auth is implemented
|
|
88
|
+
const userId = req.body.userId || 'admin';
|
|
89
|
+
|
|
90
|
+
console.log(`Migration trigger request: dbType=${dbType}, stage=${stage}, userId=${userId}`);
|
|
91
|
+
|
|
92
|
+
try {
|
|
93
|
+
const result = await triggerMigrationUseCase.execute({
|
|
94
|
+
userId,
|
|
95
|
+
dbType,
|
|
96
|
+
stage,
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
// 202 Accepted - request accepted but not completed
|
|
100
|
+
res.status(202).json(result);
|
|
101
|
+
} catch (error) {
|
|
102
|
+
// Handle validation errors (400 Bad Request)
|
|
103
|
+
if (error instanceof TriggerValidationError) {
|
|
104
|
+
return res.status(400).json({
|
|
105
|
+
success: false,
|
|
106
|
+
error: error.message,
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Re-throw other errors for global error handler
|
|
111
|
+
throw error;
|
|
112
|
+
}
|
|
113
|
+
})
|
|
114
|
+
);
|
|
115
|
+
|
|
116
|
+
/**
|
|
117
|
+
* GET /db-migrate/:processId
|
|
118
|
+
*
|
|
119
|
+
* Get migration status by process ID
|
|
120
|
+
*
|
|
121
|
+
* Response (200 OK):
|
|
122
|
+
* {
|
|
123
|
+
* processId: string,
|
|
124
|
+
* type: 'DATABASE_MIGRATION',
|
|
125
|
+
* state: 'INITIALIZING' | 'RUNNING' | 'COMPLETED' | 'FAILED',
|
|
126
|
+
* context: {
|
|
127
|
+
* dbType: string,
|
|
128
|
+
* stage: string,
|
|
129
|
+
* migrationCommand: string (if started)
|
|
130
|
+
* },
|
|
131
|
+
* results: {
|
|
132
|
+
* success: boolean (if completed),
|
|
133
|
+
* duration: string (if completed),
|
|
134
|
+
* error: string (if failed)
|
|
135
|
+
* },
|
|
136
|
+
* createdAt: string,
|
|
137
|
+
* updatedAt: string
|
|
138
|
+
* }
|
|
139
|
+
*/
|
|
140
|
+
router.get(
|
|
141
|
+
'/db-migrate/:processId',
|
|
142
|
+
catchAsyncError(async (req, res) => {
|
|
143
|
+
const { processId } = req.params;
|
|
144
|
+
|
|
145
|
+
console.log(`Migration status request: processId=${processId}`);
|
|
146
|
+
|
|
147
|
+
try {
|
|
148
|
+
const status = await getStatusUseCase.execute({ processId });
|
|
149
|
+
|
|
150
|
+
res.status(200).json(status);
|
|
151
|
+
} catch (error) {
|
|
152
|
+
// Handle not found errors (404 Not Found)
|
|
153
|
+
if (error instanceof NotFoundError) {
|
|
154
|
+
return res.status(404).json({
|
|
155
|
+
success: false,
|
|
156
|
+
error: error.message,
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Handle validation errors (400 Bad Request)
|
|
161
|
+
if (error instanceof GetValidationError) {
|
|
162
|
+
return res.status(400).json({
|
|
163
|
+
success: false,
|
|
164
|
+
error: error.message,
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Re-throw other errors for global error handler
|
|
169
|
+
throw error;
|
|
170
|
+
}
|
|
171
|
+
})
|
|
172
|
+
);
|
|
173
|
+
|
|
174
|
+
module.exports = router;
|
|
175
|
+
|
|
@@ -46,10 +46,20 @@ const {
|
|
|
46
46
|
MigrationError,
|
|
47
47
|
ValidationError,
|
|
48
48
|
} = require('../../database/use-cases/run-database-migration-use-case');
|
|
49
|
+
const {
|
|
50
|
+
UpdateProcessState,
|
|
51
|
+
} = require('../../integrations/use-cases/update-process-state');
|
|
52
|
+
const {
|
|
53
|
+
createProcessRepository,
|
|
54
|
+
} = require('../../integrations/repositories/process-repository-factory');
|
|
49
55
|
|
|
50
56
|
// Inject prisma-runner as dependency
|
|
51
57
|
const prismaRunner = require('../../database/utils/prisma-runner');
|
|
52
58
|
|
|
59
|
+
// Create process repository and use case for tracking migration progress
|
|
60
|
+
const processRepository = createProcessRepository();
|
|
61
|
+
const updateProcessState = new UpdateProcessState({ processRepository });
|
|
62
|
+
|
|
53
63
|
/**
|
|
54
64
|
* Sanitizes error messages to prevent credential leaks
|
|
55
65
|
* @param {string} errorMessage - Error message that might contain credentials
|
|
@@ -85,9 +95,48 @@ function sanitizeDatabaseUrl(url) {
|
|
|
85
95
|
return url.replace(/(:\/\/)([^:]+):([^@]+)@/, '$1***:***@');
|
|
86
96
|
}
|
|
87
97
|
|
|
98
|
+
/**
|
|
99
|
+
* Extract migration parameters from SQS event or direct invocation
|
|
100
|
+
* @param {Object} event - Lambda event (SQS or direct)
|
|
101
|
+
* @returns {Object} Extracted parameters { processId, dbType, stage }
|
|
102
|
+
*/
|
|
103
|
+
function extractMigrationParams(event) {
|
|
104
|
+
let processId = null;
|
|
105
|
+
let dbType = null;
|
|
106
|
+
let stage = null;
|
|
107
|
+
|
|
108
|
+
// Check if this is an SQS event
|
|
109
|
+
if (event.Records && event.Records.length > 0) {
|
|
110
|
+
// SQS event - extract from message body
|
|
111
|
+
const message = JSON.parse(event.Records[0].body);
|
|
112
|
+
processId = message.processId;
|
|
113
|
+
dbType = message.dbType;
|
|
114
|
+
stage = message.stage;
|
|
115
|
+
|
|
116
|
+
console.log('SQS event detected');
|
|
117
|
+
console.log(` Process ID: ${processId}`);
|
|
118
|
+
console.log(` DB Type: ${dbType}`);
|
|
119
|
+
console.log(` Stage: ${stage}`);
|
|
120
|
+
} else {
|
|
121
|
+
// Direct invocation - use event properties or environment variables
|
|
122
|
+
processId = event.processId || null;
|
|
123
|
+
dbType = event.dbType || process.env.DB_TYPE || 'postgresql';
|
|
124
|
+
stage = event.stage || process.env.STAGE || 'production';
|
|
125
|
+
|
|
126
|
+
console.log('Direct invocation detected');
|
|
127
|
+
if (processId) {
|
|
128
|
+
console.log(` Process ID: ${processId}`);
|
|
129
|
+
}
|
|
130
|
+
console.log(` DB Type: ${dbType}`);
|
|
131
|
+
console.log(` Stage: ${stage}`);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return { processId, dbType, stage };
|
|
135
|
+
}
|
|
136
|
+
|
|
88
137
|
/**
|
|
89
138
|
* Lambda handler entry point
|
|
90
|
-
* @param {Object} event - Lambda event (
|
|
139
|
+
* @param {Object} event - Lambda event (SQS message or direct invocation)
|
|
91
140
|
* @param {Object} context - Lambda context (contains AWS request ID, timeout info)
|
|
92
141
|
* @returns {Promise<Object>} Response with statusCode and body
|
|
93
142
|
*/
|
|
@@ -102,10 +151,11 @@ exports.handler = async (event, context) => {
|
|
|
102
151
|
remainingTimeInMillis: context.getRemainingTimeInMillis(),
|
|
103
152
|
}, null, 2));
|
|
104
153
|
|
|
154
|
+
// Extract migration parameters from event
|
|
155
|
+
const { processId, dbType, stage } = extractMigrationParams(event);
|
|
156
|
+
|
|
105
157
|
// Get environment variables
|
|
106
158
|
const databaseUrl = process.env.DATABASE_URL;
|
|
107
|
-
const dbType = process.env.DB_TYPE || 'postgresql';
|
|
108
|
-
const stage = process.env.STAGE || 'production';
|
|
109
159
|
|
|
110
160
|
try {
|
|
111
161
|
// Validate DATABASE_URL is set
|
|
@@ -126,6 +176,14 @@ exports.handler = async (event, context) => {
|
|
|
126
176
|
console.log(` Stage: ${stage}`);
|
|
127
177
|
console.log(` Database URL: ${sanitizeDatabaseUrl(databaseUrl)}`);
|
|
128
178
|
|
|
179
|
+
// Update process state to RUNNING (if processId provided)
|
|
180
|
+
if (processId) {
|
|
181
|
+
console.log(`\n✓ Updating process state to RUNNING: ${processId}`);
|
|
182
|
+
await updateProcessState.execute(processId, 'RUNNING', {
|
|
183
|
+
startedAt: new Date().toISOString(),
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
|
|
129
187
|
// Create use case with dependencies (Dependency Injection)
|
|
130
188
|
const runDatabaseMigrationUseCase = new RunDatabaseMigrationUseCase({
|
|
131
189
|
prismaRunner,
|
|
@@ -152,17 +210,32 @@ exports.handler = async (event, context) => {
|
|
|
152
210
|
console.log(` Command: ${result.command}`);
|
|
153
211
|
console.log('========================================');
|
|
154
212
|
|
|
213
|
+
// Update process state to COMPLETED (if processId provided)
|
|
214
|
+
if (processId) {
|
|
215
|
+
console.log(`\n✓ Updating process state to COMPLETED: ${processId}`);
|
|
216
|
+
await updateProcessState.execute(processId, 'COMPLETED', {
|
|
217
|
+
completedAt: new Date().toISOString(),
|
|
218
|
+
migrationCommand: result.command,
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
|
|
155
222
|
// Return success response (adapter layer - HTTP mapping)
|
|
223
|
+
const responseBody = {
|
|
224
|
+
success: true,
|
|
225
|
+
message: result.message,
|
|
226
|
+
dbType: result.dbType,
|
|
227
|
+
stage: result.stage,
|
|
228
|
+
migrationCommand: result.command,
|
|
229
|
+
timestamp: new Date().toISOString(),
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
if (processId) {
|
|
233
|
+
responseBody.processId = processId;
|
|
234
|
+
}
|
|
235
|
+
|
|
156
236
|
return {
|
|
157
237
|
statusCode: 200,
|
|
158
|
-
body: JSON.stringify(
|
|
159
|
-
success: true,
|
|
160
|
-
message: result.message,
|
|
161
|
-
dbType: result.dbType,
|
|
162
|
-
stage: result.stage,
|
|
163
|
-
migrationCommand: result.command,
|
|
164
|
-
timestamp: new Date().toISOString(),
|
|
165
|
-
}),
|
|
238
|
+
body: JSON.stringify(responseBody),
|
|
166
239
|
};
|
|
167
240
|
|
|
168
241
|
} catch (error) {
|
|
@@ -194,15 +267,36 @@ exports.handler = async (event, context) => {
|
|
|
194
267
|
// Sanitize error message before returning
|
|
195
268
|
const sanitizedError = sanitizeError(errorMessage);
|
|
196
269
|
|
|
270
|
+
// Update process state to FAILED (if processId provided)
|
|
271
|
+
if (processId) {
|
|
272
|
+
try {
|
|
273
|
+
console.log(`\n✓ Updating process state to FAILED: ${processId}`);
|
|
274
|
+
await updateProcessState.execute(processId, 'FAILED', {
|
|
275
|
+
failedAt: new Date().toISOString(),
|
|
276
|
+
error: sanitizedError,
|
|
277
|
+
errorType: error.name || 'Error',
|
|
278
|
+
});
|
|
279
|
+
} catch (updateError) {
|
|
280
|
+
console.error('Failed to update process state:', updateError);
|
|
281
|
+
// Don't fail the entire handler if state update fails
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
const errorBody = {
|
|
286
|
+
success: false,
|
|
287
|
+
error: sanitizedError,
|
|
288
|
+
errorType: error.name || 'Error',
|
|
289
|
+
// Only include stack traces in development environments
|
|
290
|
+
...(stage === 'dev' || stage === 'local' || stage === 'test' ? { stack: error.stack } : {}),
|
|
291
|
+
};
|
|
292
|
+
|
|
293
|
+
if (processId) {
|
|
294
|
+
errorBody.processId = processId;
|
|
295
|
+
}
|
|
296
|
+
|
|
197
297
|
return {
|
|
198
298
|
statusCode,
|
|
199
|
-
body: JSON.stringify(
|
|
200
|
-
success: false,
|
|
201
|
-
error: sanitizedError,
|
|
202
|
-
errorType: error.name || 'Error',
|
|
203
|
-
// Only include stack traces in development environments
|
|
204
|
-
...(stage === 'dev' || stage === 'local' || stage === 'test' ? { stack: error.stack } : {}),
|
|
205
|
-
}),
|
|
299
|
+
body: JSON.stringify(errorBody),
|
|
206
300
|
};
|
|
207
301
|
}
|
|
208
302
|
};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@friggframework/core",
|
|
3
3
|
"prettier": "@friggframework/prettier-config",
|
|
4
|
-
"version": "2.0.0--canary.461.
|
|
4
|
+
"version": "2.0.0--canary.461.2ca8c89.0",
|
|
5
5
|
"dependencies": {
|
|
6
6
|
"@aws-sdk/client-apigatewaymanagementapi": "^3.588.0",
|
|
7
7
|
"@aws-sdk/client-kms": "^3.588.0",
|
|
@@ -37,9 +37,9 @@
|
|
|
37
37
|
}
|
|
38
38
|
},
|
|
39
39
|
"devDependencies": {
|
|
40
|
-
"@friggframework/eslint-config": "2.0.0--canary.461.
|
|
41
|
-
"@friggframework/prettier-config": "2.0.0--canary.461.
|
|
42
|
-
"@friggframework/test": "2.0.0--canary.461.
|
|
40
|
+
"@friggframework/eslint-config": "2.0.0--canary.461.2ca8c89.0",
|
|
41
|
+
"@friggframework/prettier-config": "2.0.0--canary.461.2ca8c89.0",
|
|
42
|
+
"@friggframework/test": "2.0.0--canary.461.2ca8c89.0",
|
|
43
43
|
"@prisma/client": "^6.17.0",
|
|
44
44
|
"@types/lodash": "4.17.15",
|
|
45
45
|
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
|
@@ -79,5 +79,5 @@
|
|
|
79
79
|
"publishConfig": {
|
|
80
80
|
"access": "public"
|
|
81
81
|
},
|
|
82
|
-
"gitHead": "
|
|
82
|
+
"gitHead": "2ca8c892d55c02f9040a052d8d077f6f2459e802"
|
|
83
83
|
}
|