@friggframework/core 2.0.0--canary.458.c150d9a.0 → 2.0.0--canary.459.51231dd.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/database/config.js +29 -1
- package/database/use-cases/test-encryption-use-case.js +6 -5
- package/docs/PROCESS_MANAGEMENT_QUEUE_SPEC.md +517 -0
- package/integrations/repositories/process-repository-factory.js +46 -0
- package/integrations/repositories/process-repository-interface.js +90 -0
- package/integrations/repositories/process-repository-mongo.js +190 -0
- package/integrations/repositories/process-repository-postgres.js +190 -0
- package/integrations/use-cases/create-process.js +128 -0
- package/integrations/use-cases/create-process.test.js +178 -0
- package/integrations/use-cases/get-process.js +87 -0
- package/integrations/use-cases/get-process.test.js +190 -0
- package/integrations/use-cases/index.js +8 -0
- package/integrations/use-cases/update-process-metrics.js +165 -0
- package/integrations/use-cases/update-process-metrics.test.js +308 -0
- package/integrations/use-cases/update-process-state.js +119 -0
- package/integrations/use-cases/update-process-state.test.js +256 -0
- package/package.json +5 -5
- package/prisma-mongodb/schema.prisma +44 -0
package/database/config.js
CHANGED
|
@@ -37,7 +37,35 @@ function getDatabaseType() {
|
|
|
37
37
|
);
|
|
38
38
|
}
|
|
39
39
|
|
|
40
|
-
|
|
40
|
+
let backendModule;
|
|
41
|
+
try {
|
|
42
|
+
backendModule = require(backendIndexPath);
|
|
43
|
+
} catch (requireError) {
|
|
44
|
+
// Extract the actual file with the error from the stack trace
|
|
45
|
+
// Skip internal Node.js files (node:internal/*) and find first user file
|
|
46
|
+
let errorFile = 'unknown file';
|
|
47
|
+
const stackLines = requireError.stack?.split('\n') || [];
|
|
48
|
+
|
|
49
|
+
for (const line of stackLines) {
|
|
50
|
+
// Match file paths in stack trace, excluding node:internal
|
|
51
|
+
const match = line.match(/\(([^)]+\.js):\d+:\d+\)/) || line.match(/at ([^(]+\.js):\d+:\d+/);
|
|
52
|
+
if (match && match[1] && !match[1].includes('node:internal')) {
|
|
53
|
+
errorFile = match[1];
|
|
54
|
+
break;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Provide better error context for syntax/runtime errors
|
|
59
|
+
throw new Error(
|
|
60
|
+
`[Frigg] Failed to load app definition from ${backendIndexPath}\n` +
|
|
61
|
+
`Error: ${requireError.message}\n` +
|
|
62
|
+
`File with error: ${errorFile}\n` +
|
|
63
|
+
`\nFull stack trace:\n${requireError.stack}\n\n` +
|
|
64
|
+
'This error occurred while loading your app definition or its dependencies. ' +
|
|
65
|
+
'Check the file listed above for syntax errors (trailing commas, missing brackets, etc.)'
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
|
|
41
69
|
database = backendModule?.Definition?.database;
|
|
42
70
|
|
|
43
71
|
if (!database) {
|
|
@@ -83,13 +83,14 @@ class TestEncryptionUseCase {
|
|
|
83
83
|
* @private
|
|
84
84
|
*/
|
|
85
85
|
_mapTestDataToCredential(testData) {
|
|
86
|
+
// Note: Using camelCase for Prisma compatibility (both MongoDB and PostgreSQL)
|
|
87
|
+
// Changed from snake_case (user_id, entity_id) to camelCase (userId, externalId)
|
|
86
88
|
return {
|
|
87
|
-
|
|
88
|
-
entity_id: 'test-encryption-entity',
|
|
89
|
+
externalId: 'test-encryption-entity',
|
|
89
90
|
data: {
|
|
90
|
-
access_token: testData.testSecret,
|
|
91
|
-
refresh_token: testData.nestedSecret?.value,
|
|
92
|
-
domain: testData.normalField,
|
|
91
|
+
access_token: testData.testSecret, // Encrypted field
|
|
92
|
+
refresh_token: testData.nestedSecret?.value, // Encrypted field
|
|
93
|
+
domain: testData.normalField, // Not encrypted
|
|
93
94
|
},
|
|
94
95
|
};
|
|
95
96
|
}
|
|
@@ -0,0 +1,517 @@
|
|
|
1
|
+
# Process Management FIFO Queue Specification
|
|
2
|
+
|
|
3
|
+
## Problem Statement
|
|
4
|
+
|
|
5
|
+
The current BaseCRMIntegration implementation has a **race condition** in process record updates:
|
|
6
|
+
|
|
7
|
+
1. Multiple queue workers process batches concurrently
|
|
8
|
+
2. Each worker calls `processManager.updateMetrics()`
|
|
9
|
+
3. Multiple workers read-modify-write the same process record simultaneously
|
|
10
|
+
4. **Result**: Lost updates, inconsistent metrics, potential data corruption
|
|
11
|
+
|
|
12
|
+
## Current Race Condition Example
|
|
13
|
+
|
|
14
|
+
```
|
|
15
|
+
Time 1: Worker A reads process.results.aggregateData.totalSynced = 100
|
|
16
|
+
Time 2: Worker B reads process.results.aggregateData.totalSynced = 100
|
|
17
|
+
Time 3: Worker A adds 50 → writes totalSynced = 150
|
|
18
|
+
Time 4: Worker B adds 30 → writes totalSynced = 130 (overwrites Worker A's update!)
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
## Solution: FIFO Queue for Process Updates
|
|
22
|
+
|
|
23
|
+
### Design Overview
|
|
24
|
+
|
|
25
|
+
Create a dedicated FIFO SQS queue in **Frigg Core** for all process management operations:
|
|
26
|
+
|
|
27
|
+
- **Queue Type**: FIFO (First-In-First-Out)
|
|
28
|
+
- **Message Group ID**: `process-{processId}` (ensures ordered processing per process)
|
|
29
|
+
- **Message Deduplication**: Enabled (prevents duplicate updates)
|
|
30
|
+
- **Dead Letter Queue**: Enabled (captures failed updates)
|
|
31
|
+
|
|
32
|
+
### Architecture
|
|
33
|
+
|
|
34
|
+
```
|
|
35
|
+
┌─────────────────────────────────────────────────────────────┐
|
|
36
|
+
│ Current Flow (Race Condition) │
|
|
37
|
+
├─────────────────────────────────────────────────────────────┤
|
|
38
|
+
│ Worker A ──┐ │
|
|
39
|
+
│ Worker B ──┼──→ ProcessManager.updateMetrics() │
|
|
40
|
+
│ Worker C ──┘ │
|
|
41
|
+
│ └──→ ProcessRepository.update() │
|
|
42
|
+
│ (Race condition!) │
|
|
43
|
+
└─────────────────────────────────────────────────────────────┘
|
|
44
|
+
|
|
45
|
+
┌─────────────────────────────────────────────────────────────┐
|
|
46
|
+
│ Proposed Flow (FIFO Queue) │
|
|
47
|
+
├─────────────────────────────────────────────────────────────┤
|
|
48
|
+
│ Worker A ──┐ │
|
|
49
|
+
│ Worker B ──┼──→ QueueManager.queueProcessUpdate() │
|
|
50
|
+
│ Worker C ──┘ │
|
|
51
|
+
│ └──→ ProcessManagementFIFOQueue │
|
|
52
|
+
│ └──→ ProcessUpdateHandler │
|
|
53
|
+
│ └──→ ProcessRepository.update()│
|
|
54
|
+
│ (Ordered, no races!) │
|
|
55
|
+
└─────────────────────────────────────────────────────────────┘
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Frigg Core Implementation
|
|
59
|
+
|
|
60
|
+
### 1. Process Management Queue Factory
|
|
61
|
+
|
|
62
|
+
**File**: `/packages/core/integrations/queues/process-management-queue-factory.js`
|
|
63
|
+
|
|
64
|
+
```javascript
|
|
65
|
+
const { SQS } = require('aws-sdk');
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Creates FIFO queue for process management operations
|
|
69
|
+
* Ensures ordered processing per process ID
|
|
70
|
+
*/
|
|
71
|
+
class ProcessManagementQueueFactory {
|
|
72
|
+
constructor({ region = 'us-east-1' } = {}) {
|
|
73
|
+
this.sqs = new SQS({ region });
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Create FIFO queue for process updates
|
|
78
|
+
* @param {string} integrationName - Integration name (for queue naming)
|
|
79
|
+
* @returns {Promise<string>} Queue URL
|
|
80
|
+
*/
|
|
81
|
+
async createProcessManagementQueue(integrationName) {
|
|
82
|
+
const queueName = `${integrationName}-process-management.fifo`;
|
|
83
|
+
|
|
84
|
+
const params = {
|
|
85
|
+
QueueName: queueName,
|
|
86
|
+
Attributes: {
|
|
87
|
+
FifoQueue: 'true',
|
|
88
|
+
ContentBasedDeduplication: 'true',
|
|
89
|
+
MessageRetentionPeriod: '1209600', // 14 days
|
|
90
|
+
VisibilityTimeoutSeconds: '30',
|
|
91
|
+
DelaySeconds: '0',
|
|
92
|
+
ReceiveMessageWaitTimeSeconds: '20', // Long polling
|
|
93
|
+
DeadLetterTargetArn: `${queueName}-dlq.fifo`, // DLQ
|
|
94
|
+
MaxReceiveCount: '3', // Retry failed messages 3 times
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
|
|
98
|
+
const result = await this.sqs.createQueue(params).promise();
|
|
99
|
+
return result.QueueUrl;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Send process update message to FIFO queue
|
|
104
|
+
* @param {string} queueUrl - FIFO queue URL
|
|
105
|
+
* @param {string} processId - Process ID (used as MessageGroupId)
|
|
106
|
+
* @param {string} operation - Operation type (UPDATE_STATE, UPDATE_METRICS, COMPLETE)
|
|
107
|
+
* @param {Object} data - Operation data
|
|
108
|
+
* @returns {Promise<void>}
|
|
109
|
+
*/
|
|
110
|
+
async sendProcessUpdate(queueUrl, processId, operation, data) {
|
|
111
|
+
const params = {
|
|
112
|
+
QueueUrl: queueUrl,
|
|
113
|
+
MessageBody: JSON.stringify({
|
|
114
|
+
processId,
|
|
115
|
+
operation,
|
|
116
|
+
data,
|
|
117
|
+
timestamp: new Date().toISOString()
|
|
118
|
+
}),
|
|
119
|
+
MessageGroupId: `process-${processId}`,
|
|
120
|
+
MessageDeduplicationId: `${processId}-${operation}-${Date.now()}`,
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
await this.sqs.sendMessage(params).promise();
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
module.exports = { ProcessManagementQueueFactory };
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
### 2. Process Update Handler
|
|
131
|
+
|
|
132
|
+
**File**: `/packages/core/integrations/handlers/process-update-handler.js`
|
|
133
|
+
|
|
134
|
+
```javascript
|
|
135
|
+
const {
|
|
136
|
+
UpdateProcessState,
|
|
137
|
+
UpdateProcessMetrics,
|
|
138
|
+
GetProcess,
|
|
139
|
+
} = require('../use-cases');
|
|
140
|
+
const { createProcessRepository } = require('../repositories/process-repository-factory');
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* Handler for process management FIFO queue messages
|
|
144
|
+
* Processes updates in order per process ID
|
|
145
|
+
*/
|
|
146
|
+
class ProcessUpdateHandler {
|
|
147
|
+
constructor() {
|
|
148
|
+
const processRepository = createProcessRepository();
|
|
149
|
+
this.updateProcessStateUseCase = new UpdateProcessState({ processRepository });
|
|
150
|
+
this.updateProcessMetricsUseCase = new UpdateProcessMetrics({ processRepository });
|
|
151
|
+
this.getProcessUseCase = new GetProcess({ processRepository });
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Handle process update message from FIFO queue
|
|
156
|
+
* @param {Object} message - SQS message
|
|
157
|
+
* @param {Object} message.body - Message body (JSON string)
|
|
158
|
+
* @returns {Promise<void>}
|
|
159
|
+
*/
|
|
160
|
+
async handle(message) {
|
|
161
|
+
try {
|
|
162
|
+
const { processId, operation, data } = JSON.parse(message.body);
|
|
163
|
+
|
|
164
|
+
switch (operation) {
|
|
165
|
+
case 'UPDATE_STATE':
|
|
166
|
+
await this.updateProcessStateUseCase.execute(
|
|
167
|
+
processId,
|
|
168
|
+
data.state,
|
|
169
|
+
data.contextUpdates
|
|
170
|
+
);
|
|
171
|
+
break;
|
|
172
|
+
|
|
173
|
+
case 'UPDATE_METRICS':
|
|
174
|
+
await this.updateProcessMetricsUseCase.execute(
|
|
175
|
+
processId,
|
|
176
|
+
data.metricsUpdate
|
|
177
|
+
);
|
|
178
|
+
break;
|
|
179
|
+
|
|
180
|
+
case 'COMPLETE_PROCESS':
|
|
181
|
+
await this.updateProcessStateUseCase.execute(
|
|
182
|
+
processId,
|
|
183
|
+
'COMPLETED',
|
|
184
|
+
{ endTime: new Date().toISOString() }
|
|
185
|
+
);
|
|
186
|
+
break;
|
|
187
|
+
|
|
188
|
+
case 'HANDLE_ERROR':
|
|
189
|
+
await this.updateProcessStateUseCase.execute(
|
|
190
|
+
processId,
|
|
191
|
+
'ERROR',
|
|
192
|
+
{
|
|
193
|
+
error: data.error.message,
|
|
194
|
+
errorStack: data.error.stack,
|
|
195
|
+
errorTimestamp: new Date().toISOString()
|
|
196
|
+
}
|
|
197
|
+
);
|
|
198
|
+
break;
|
|
199
|
+
|
|
200
|
+
default:
|
|
201
|
+
throw new Error(`Unknown process operation: ${operation}`);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
console.log(`Process update completed: ${operation} for process ${processId}`);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
console.error('Process update failed:', error);
|
|
207
|
+
throw error; // Will trigger SQS retry/DLQ
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
module.exports = { ProcessUpdateHandler };
|
|
213
|
+
```
|
|
214
|
+
|
|
215
|
+
### 3. QueueManager Enhancement
|
|
216
|
+
|
|
217
|
+
**File**: `/packages/core/integrations/queues/process-queue-manager.js`
|
|
218
|
+
|
|
219
|
+
```javascript
|
|
220
|
+
const { ProcessManagementQueueFactory } = require('./process-management-queue-factory');
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Manages process update operations via FIFO queue
|
|
224
|
+
* Prevents race conditions in concurrent process updates
|
|
225
|
+
*/
|
|
226
|
+
class ProcessQueueManager {
|
|
227
|
+
constructor({ region = 'us-east-1' } = {}) {
|
|
228
|
+
this.factory = new ProcessManagementQueueFactory({ region });
|
|
229
|
+
this.queueUrls = new Map(); // Cache queue URLs per integration
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Get or create FIFO queue for integration
|
|
234
|
+
* @param {string} integrationName - Integration name
|
|
235
|
+
* @returns {Promise<string>} Queue URL
|
|
236
|
+
*/
|
|
237
|
+
async getProcessQueueUrl(integrationName) {
|
|
238
|
+
if (!this.queueUrls.has(integrationName)) {
|
|
239
|
+
const queueUrl = await this.factory.createProcessManagementQueue(integrationName);
|
|
240
|
+
this.queueUrls.set(integrationName, queueUrl);
|
|
241
|
+
}
|
|
242
|
+
return this.queueUrls.get(integrationName);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* Queue process state update
|
|
247
|
+
* @param {string} integrationName - Integration name
|
|
248
|
+
* @param {string} processId - Process ID
|
|
249
|
+
* @param {string} state - New state
|
|
250
|
+
* @param {Object} contextUpdates - Context updates
|
|
251
|
+
* @returns {Promise<void>}
|
|
252
|
+
*/
|
|
253
|
+
async queueStateUpdate(integrationName, processId, state, contextUpdates = {}) {
|
|
254
|
+
const queueUrl = await this.getProcessQueueUrl(integrationName);
|
|
255
|
+
await this.factory.sendProcessUpdate(queueUrl, processId, 'UPDATE_STATE', {
|
|
256
|
+
state,
|
|
257
|
+
contextUpdates
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
/**
|
|
262
|
+
* Queue process metrics update
|
|
263
|
+
* @param {string} integrationName - Integration name
|
|
264
|
+
* @param {string} processId - Process ID
|
|
265
|
+
* @param {Object} metricsUpdate - Metrics to add
|
|
266
|
+
* @returns {Promise<void>}
|
|
267
|
+
*/
|
|
268
|
+
async queueMetricsUpdate(integrationName, processId, metricsUpdate) {
|
|
269
|
+
const queueUrl = await this.getProcessQueueUrl(integrationName);
|
|
270
|
+
await this.factory.sendProcessUpdate(queueUrl, processId, 'UPDATE_METRICS', {
|
|
271
|
+
metricsUpdate
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* Queue process completion
|
|
277
|
+
* @param {string} integrationName - Integration name
|
|
278
|
+
* @param {string} processId - Process ID
|
|
279
|
+
* @returns {Promise<void>}
|
|
280
|
+
*/
|
|
281
|
+
async queueProcessCompletion(integrationName, processId) {
|
|
282
|
+
const queueUrl = await this.getProcessQueueUrl(integrationName);
|
|
283
|
+
await this.factory.sendProcessUpdate(queueUrl, processId, 'COMPLETE_PROCESS', {});
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
/**
|
|
287
|
+
* Queue process error handling
|
|
288
|
+
* @param {string} integrationName - Integration name
|
|
289
|
+
* @param {string} processId - Process ID
|
|
290
|
+
* @param {Error} error - Error object
|
|
291
|
+
* @returns {Promise<void>}
|
|
292
|
+
*/
|
|
293
|
+
async queueErrorHandling(integrationName, processId, error) {
|
|
294
|
+
const queueUrl = await this.getProcessQueueUrl(integrationName);
|
|
295
|
+
await this.factory.sendProcessUpdate(queueUrl, processId, 'HANDLE_ERROR', {
|
|
296
|
+
error: {
|
|
297
|
+
message: error.message,
|
|
298
|
+
stack: error.stack
|
|
299
|
+
}
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
module.exports = { ProcessQueueManager };
|
|
305
|
+
```
|
|
306
|
+
|
|
307
|
+
## Integration with BaseCRMIntegration
|
|
308
|
+
|
|
309
|
+
### Updated ProcessManager
|
|
310
|
+
|
|
311
|
+
**File**: `/Users/sean/Documents/GitHub/quo--frigg/backend/src/base/services/ProcessManager.js`
|
|
312
|
+
|
|
313
|
+
```javascript
|
|
314
|
+
const { ProcessQueueManager } = require('@friggframework/core/integrations/queues/process-queue-manager');
|
|
315
|
+
|
|
316
|
+
class ProcessManager {
|
|
317
|
+
constructor({
|
|
318
|
+
createProcessUseCase,
|
|
319
|
+
updateProcessStateUseCase,
|
|
320
|
+
updateProcessMetricsUseCase,
|
|
321
|
+
getProcessUseCase,
|
|
322
|
+
integrationName, // NEW: For FIFO queue
|
|
323
|
+
}) {
|
|
324
|
+
// ... existing constructor ...
|
|
325
|
+
this.processQueueManager = new ProcessQueueManager();
|
|
326
|
+
this.integrationName = integrationName;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
/**
|
|
330
|
+
* Update process state via FIFO queue (prevents race conditions)
|
|
331
|
+
* @param {string} processId - Process ID to update
|
|
332
|
+
* @param {string} state - New state
|
|
333
|
+
* @param {Object} contextUpdates - Context updates
|
|
334
|
+
* @returns {Promise<void>} (async, no return value)
|
|
335
|
+
*/
|
|
336
|
+
async updateState(processId, state, contextUpdates = {}) {
|
|
337
|
+
await this.processQueueManager.queueStateUpdate(
|
|
338
|
+
this.integrationName,
|
|
339
|
+
processId,
|
|
340
|
+
state,
|
|
341
|
+
contextUpdates
|
|
342
|
+
);
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
/**
|
|
346
|
+
* Update process metrics via FIFO queue (prevents race conditions)
|
|
347
|
+
* @param {string} processId - Process ID to update
|
|
348
|
+
* @param {Object} metricsUpdate - Metrics to add
|
|
349
|
+
* @returns {Promise<void>} (async, no return value)
|
|
350
|
+
*/
|
|
351
|
+
async updateMetrics(processId, metricsUpdate) {
|
|
352
|
+
await this.processQueueManager.queueMetricsUpdate(
|
|
353
|
+
this.integrationName,
|
|
354
|
+
processId,
|
|
355
|
+
metricsUpdate
|
|
356
|
+
);
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
/**
|
|
360
|
+
* Complete process via FIFO queue
|
|
361
|
+
* @param {string} processId - Process ID to complete
|
|
362
|
+
* @returns {Promise<void>} (async, no return value)
|
|
363
|
+
*/
|
|
364
|
+
async completeProcess(processId) {
|
|
365
|
+
await this.processQueueManager.queueProcessCompletion(
|
|
366
|
+
this.integrationName,
|
|
367
|
+
processId
|
|
368
|
+
);
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
/**
|
|
372
|
+
* Handle process error via FIFO queue
|
|
373
|
+
* @param {string} processId - Process ID to update
|
|
374
|
+
* @param {Error} error - Error object
|
|
375
|
+
* @returns {Promise<void>} (async, no return value)
|
|
376
|
+
*/
|
|
377
|
+
async handleError(processId, error) {
|
|
378
|
+
await this.processQueueManager.queueErrorHandling(
|
|
379
|
+
this.integrationName,
|
|
380
|
+
processId,
|
|
381
|
+
error
|
|
382
|
+
);
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
```
|
|
386
|
+
|
|
387
|
+
## Serverless Infrastructure
|
|
388
|
+
|
|
389
|
+
### FIFO Queue Creation
|
|
390
|
+
|
|
391
|
+
**File**: `/packages/devtools/infrastructure/serverless-template.js`
|
|
392
|
+
|
|
393
|
+
```javascript
|
|
394
|
+
const attachProcessManagementQueues = (definition, AppDefinition) => {
|
|
395
|
+
for (const integration of AppDefinition.integrations) {
|
|
396
|
+
const integrationName = integration.Definition.name;
|
|
397
|
+
|
|
398
|
+
// Create FIFO queue for process management
|
|
399
|
+
const processQueueName = `${integrationName}ProcessManagementQueue`;
|
|
400
|
+
const processDLQName = `${integrationName}ProcessManagementDLQ`;
|
|
401
|
+
|
|
402
|
+
// FIFO Queue
|
|
403
|
+
definition.resources.Resources[processQueueName] = {
|
|
404
|
+
Type: 'AWS::SQS::Queue',
|
|
405
|
+
Properties: {
|
|
406
|
+
QueueName: `${integrationName}-process-management.fifo`,
|
|
407
|
+
FifoQueue: true,
|
|
408
|
+
ContentBasedDeduplication: true,
|
|
409
|
+
MessageRetentionPeriod: 1209600, // 14 days
|
|
410
|
+
VisibilityTimeoutSeconds: 30,
|
|
411
|
+
DelaySeconds: 0,
|
|
412
|
+
ReceiveMessageWaitTimeSeconds: 20, // Long polling
|
|
413
|
+
RedrivePolicy: {
|
|
414
|
+
deadLetterTargetArn: { 'Fn::GetAtt': [processDLQName, 'Arn'] },
|
|
415
|
+
maxReceiveCount: 3,
|
|
416
|
+
},
|
|
417
|
+
},
|
|
418
|
+
};
|
|
419
|
+
|
|
420
|
+
// Dead Letter Queue
|
|
421
|
+
definition.resources.Resources[processDLQName] = {
|
|
422
|
+
Type: 'AWS::SQS::Queue',
|
|
423
|
+
Properties: {
|
|
424
|
+
QueueName: `${integrationName}-process-management-dlq.fifo`,
|
|
425
|
+
FifoQueue: true,
|
|
426
|
+
MessageRetentionPeriod: 1209600,
|
|
427
|
+
},
|
|
428
|
+
};
|
|
429
|
+
|
|
430
|
+
// Process Update Handler Function
|
|
431
|
+
const processHandlerName = `${integrationName}ProcessUpdateHandler`;
|
|
432
|
+
definition.functions[processHandlerName] = {
|
|
433
|
+
handler: 'node_modules/@friggframework/core/handlers/process-update-handler.handler',
|
|
434
|
+
reservedConcurrency: 1, // Process updates sequentially per integration
|
|
435
|
+
events: [{
|
|
436
|
+
sqs: {
|
|
437
|
+
arn: { 'Fn::GetAtt': [processQueueName, 'Arn'] },
|
|
438
|
+
batchSize: 1, // Process one update at a time
|
|
439
|
+
maximumBatchingWindowInSeconds: 5,
|
|
440
|
+
},
|
|
441
|
+
}],
|
|
442
|
+
timeout: 30,
|
|
443
|
+
environment: {
|
|
444
|
+
INTEGRATION_NAME: integrationName,
|
|
445
|
+
},
|
|
446
|
+
};
|
|
447
|
+
}
|
|
448
|
+
};
|
|
449
|
+
```
|
|
450
|
+
|
|
451
|
+
## Benefits
|
|
452
|
+
|
|
453
|
+
### ✅ Race Condition Prevention
|
|
454
|
+
- FIFO queue ensures ordered processing per process ID
|
|
455
|
+
- MessageGroupId = `process-{processId}` guarantees sequential updates
|
|
456
|
+
- No more lost updates or inconsistent metrics
|
|
457
|
+
|
|
458
|
+
### ✅ Cost Optimization
|
|
459
|
+
- Only one FIFO queue per integration (not per process)
|
|
460
|
+
- MessageGroupId provides ordering without expensive per-process queues
|
|
461
|
+
- Long polling reduces API calls
|
|
462
|
+
|
|
463
|
+
### ✅ Reliability
|
|
464
|
+
- Dead Letter Queue captures failed updates
|
|
465
|
+
- Retry mechanism with exponential backoff
|
|
466
|
+
- Content-based deduplication prevents duplicate processing
|
|
467
|
+
|
|
468
|
+
### ✅ Scalability
|
|
469
|
+
- Each integration has its own process management queue
|
|
470
|
+
- Process updates don't block data processing
|
|
471
|
+
- Can scale process update handlers independently
|
|
472
|
+
|
|
473
|
+
## Migration Strategy
|
|
474
|
+
|
|
475
|
+
### Phase 1: Current Implementation (Native Queue)
|
|
476
|
+
- Use existing integration queue for process updates
|
|
477
|
+
- Accept potential race conditions for now
|
|
478
|
+
- Focus on core functionality
|
|
479
|
+
|
|
480
|
+
### Phase 2: FIFO Queue Implementation
|
|
481
|
+
- Implement FIFO queue infrastructure in Frigg Core
|
|
482
|
+
- Update ProcessManager to use FIFO queue
|
|
483
|
+
- Deploy with feature flag
|
|
484
|
+
|
|
485
|
+
### Phase 3: Full Migration
|
|
486
|
+
- Switch all integrations to FIFO queue
|
|
487
|
+
- Remove native queue process update code
|
|
488
|
+
- Monitor for race condition elimination
|
|
489
|
+
|
|
490
|
+
## Cost Analysis
|
|
491
|
+
|
|
492
|
+
### FIFO Queue Costs (per integration)
|
|
493
|
+
- **Queue Creation**: Free
|
|
494
|
+
- **Message Storage**: $0.40 per million messages
|
|
495
|
+
- **Message Processing**: $0.40 per million requests
|
|
496
|
+
- **Example**: 10 integrations, 1000 process updates/day = ~$2.40/month
|
|
497
|
+
|
|
498
|
+
### Benefits vs Costs
|
|
499
|
+
- **Cost**: ~$2.40/month for 10 integrations
|
|
500
|
+
- **Benefit**: Eliminates race conditions, ensures data consistency
|
|
501
|
+
- **ROI**: High - prevents data corruption and debugging time
|
|
502
|
+
|
|
503
|
+
## Implementation Priority
|
|
504
|
+
|
|
505
|
+
**High Priority** - Race conditions in process updates can cause:
|
|
506
|
+
- Lost sync progress
|
|
507
|
+
- Inconsistent metrics
|
|
508
|
+
- Difficult debugging
|
|
509
|
+
- Data integrity issues
|
|
510
|
+
|
|
511
|
+
**Recommended Timeline**:
|
|
512
|
+
1. **Week 1**: Implement FIFO queue infrastructure in Frigg Core
|
|
513
|
+
2. **Week 2**: Update ProcessManager to use FIFO queue
|
|
514
|
+
3. **Week 3**: Deploy and test with one integration
|
|
515
|
+
4. **Week 4**: Roll out to all integrations
|
|
516
|
+
|
|
517
|
+
This solution provides a robust, scalable approach to process management while maintaining the performance benefits of concurrent data processing.
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
const { ProcessRepositoryMongo } = require('./process-repository-mongo');
|
|
2
|
+
const { ProcessRepositoryPostgres } = require('./process-repository-postgres');
|
|
3
|
+
const config = require('../../database/config');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Process Repository Factory
|
|
7
|
+
* Creates the appropriate repository adapter based on database type
|
|
8
|
+
*
|
|
9
|
+
* This implements the Factory pattern for Hexagonal Architecture:
|
|
10
|
+
* - Reads database type from app definition (backend/index.js)
|
|
11
|
+
* - Returns correct adapter (MongoDB or PostgreSQL)
|
|
12
|
+
* - Provides clear error for unsupported databases
|
|
13
|
+
*
|
|
14
|
+
* Usage:
|
|
15
|
+
* ```javascript
|
|
16
|
+
* const repository = createProcessRepository();
|
|
17
|
+
* await repository.create({ userId, integrationId, name, type, state });
|
|
18
|
+
* ```
|
|
19
|
+
*
|
|
20
|
+
* @returns {ProcessRepositoryInterface} Configured repository adapter
|
|
21
|
+
* @throws {Error} If database type is not supported
|
|
22
|
+
*/
|
|
23
|
+
function createProcessRepository() {
|
|
24
|
+
const dbType = config.DB_TYPE;
|
|
25
|
+
|
|
26
|
+
switch (dbType) {
|
|
27
|
+
case 'mongodb':
|
|
28
|
+
return new ProcessRepositoryMongo();
|
|
29
|
+
|
|
30
|
+
case 'postgresql':
|
|
31
|
+
return new ProcessRepositoryPostgres();
|
|
32
|
+
|
|
33
|
+
default:
|
|
34
|
+
throw new Error(
|
|
35
|
+
`Unsupported database type: ${dbType}. Supported values: 'mongodb', 'postgresql'`
|
|
36
|
+
);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
module.exports = {
|
|
41
|
+
createProcessRepository,
|
|
42
|
+
// Export adapters for direct testing
|
|
43
|
+
ProcessRepositoryMongo,
|
|
44
|
+
ProcessRepositoryPostgres,
|
|
45
|
+
};
|
|
46
|
+
|