s3db.js 10.0.15 → 10.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/s3db.cjs.js +1758 -1574
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +1758 -1574
- package/dist/s3db.es.js.map +1 -1
- package/package.json +1 -1
- package/src/plugins/eventual-consistency/analytics.js +668 -0
- package/src/plugins/eventual-consistency/config.js +120 -0
- package/src/plugins/eventual-consistency/consolidation.js +770 -0
- package/src/plugins/eventual-consistency/garbage-collection.js +126 -0
- package/src/plugins/eventual-consistency/helpers.js +179 -0
- package/src/plugins/eventual-consistency/index.js +455 -0
- package/src/plugins/eventual-consistency/locks.js +77 -0
- package/src/plugins/eventual-consistency/partitions.js +45 -0
- package/src/plugins/eventual-consistency/setup.js +298 -0
- package/src/plugins/eventual-consistency/transactions.js +119 -0
- package/src/plugins/eventual-consistency/utils.js +182 -0
- package/src/plugins/eventual-consistency.plugin.js +216 -52
- package/src/plugins/index.js +1 -1
|
@@ -0,0 +1,770 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Consolidation logic for EventualConsistencyPlugin
|
|
3
|
+
* @module eventual-consistency/consolidation
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import tryFn from "../../concerns/try-fn.js";
|
|
7
|
+
import { PromisePool } from "@supercharge/promise-pool";
|
|
8
|
+
import { idGenerator } from "../../concerns/id.js";
|
|
9
|
+
import { getCohortInfo, createSyntheticSetTransaction } from "./utils.js";
|
|
10
|
+
import { cleanupStaleLocks } from "./locks.js";
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Start consolidation timer for a handler
|
|
14
|
+
*
|
|
15
|
+
* @param {Object} handler - Field handler
|
|
16
|
+
* @param {string} resourceName - Resource name
|
|
17
|
+
* @param {string} fieldName - Field name
|
|
18
|
+
* @param {Function} runConsolidationCallback - Callback to run consolidation
|
|
19
|
+
* @param {Object} config - Plugin configuration
|
|
20
|
+
* @returns {NodeJS.Timeout} Consolidation timer
|
|
21
|
+
*/
|
|
22
|
+
export function startConsolidationTimer(handler, resourceName, fieldName, runConsolidationCallback, config) {
|
|
23
|
+
const intervalMs = config.consolidationInterval * 1000; // Convert seconds to ms
|
|
24
|
+
|
|
25
|
+
if (config.verbose) {
|
|
26
|
+
const nextRun = new Date(Date.now() + intervalMs);
|
|
27
|
+
console.log(
|
|
28
|
+
`[EventualConsistency] ${resourceName}.${fieldName} - ` +
|
|
29
|
+
`Consolidation timer started. Next run at ${nextRun.toISOString()} ` +
|
|
30
|
+
`(every ${config.consolidationInterval}s)`
|
|
31
|
+
);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
handler.consolidationTimer = setInterval(async () => {
|
|
35
|
+
await runConsolidationCallback(handler, resourceName, fieldName);
|
|
36
|
+
}, intervalMs);
|
|
37
|
+
|
|
38
|
+
return handler.consolidationTimer;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Run consolidation for all pending transactions
|
|
43
|
+
*
|
|
44
|
+
* @param {Object} transactionResource - Transaction resource
|
|
45
|
+
* @param {Function} consolidateRecordFn - Function to consolidate individual records
|
|
46
|
+
* @param {Function} emitFn - Function to emit events
|
|
47
|
+
* @param {Object} config - Plugin configuration
|
|
48
|
+
* @returns {Promise<void>}
|
|
49
|
+
*/
|
|
50
|
+
export async function runConsolidation(transactionResource, consolidateRecordFn, emitFn, config) {
|
|
51
|
+
const startTime = Date.now();
|
|
52
|
+
|
|
53
|
+
if (config.verbose) {
|
|
54
|
+
console.log(
|
|
55
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
56
|
+
`Starting consolidation run at ${new Date().toISOString()}`
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
try {
|
|
61
|
+
// Query unapplied transactions from recent cohorts (last 24 hours by default)
|
|
62
|
+
// This uses hourly partition for O(1) performance instead of full scan
|
|
63
|
+
const now = new Date();
|
|
64
|
+
const hoursToCheck = config.consolidationWindow || 24; // Configurable lookback window (in hours)
|
|
65
|
+
const cohortHours = [];
|
|
66
|
+
|
|
67
|
+
for (let i = 0; i < hoursToCheck; i++) {
|
|
68
|
+
const date = new Date(now.getTime() - (i * 60 * 60 * 1000)); // Subtract hours
|
|
69
|
+
const cohortInfo = getCohortInfo(date, config.cohort.timezone, config.verbose);
|
|
70
|
+
cohortHours.push(cohortInfo.hour);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (config.verbose) {
|
|
74
|
+
console.log(
|
|
75
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
76
|
+
`Querying ${hoursToCheck} hour partitions for pending transactions...`
|
|
77
|
+
);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Query transactions by partition for each hour (parallel for speed)
|
|
81
|
+
const transactionsByHour = await Promise.all(
|
|
82
|
+
cohortHours.map(async (cohortHour) => {
|
|
83
|
+
const [ok, err, txns] = await tryFn(() =>
|
|
84
|
+
transactionResource.query({
|
|
85
|
+
cohortHour,
|
|
86
|
+
applied: false
|
|
87
|
+
})
|
|
88
|
+
);
|
|
89
|
+
return ok ? txns : [];
|
|
90
|
+
})
|
|
91
|
+
);
|
|
92
|
+
|
|
93
|
+
// Flatten all transactions
|
|
94
|
+
const transactions = transactionsByHour.flat();
|
|
95
|
+
|
|
96
|
+
if (transactions.length === 0) {
|
|
97
|
+
if (config.verbose) {
|
|
98
|
+
console.log(
|
|
99
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
100
|
+
`No pending transactions found. Next run in ${config.consolidationInterval}s`
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
return;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Get unique originalIds
|
|
107
|
+
const uniqueIds = [...new Set(transactions.map(t => t.originalId))];
|
|
108
|
+
|
|
109
|
+
if (config.verbose) {
|
|
110
|
+
console.log(
|
|
111
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
112
|
+
`Found ${transactions.length} pending transactions for ${uniqueIds.length} records. ` +
|
|
113
|
+
`Consolidating with concurrency=${config.consolidationConcurrency}...`
|
|
114
|
+
);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Consolidate each record in parallel with concurrency limit
|
|
118
|
+
const { results, errors } = await PromisePool
|
|
119
|
+
.for(uniqueIds)
|
|
120
|
+
.withConcurrency(config.consolidationConcurrency)
|
|
121
|
+
.process(async (id) => {
|
|
122
|
+
return await consolidateRecordFn(id);
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
const duration = Date.now() - startTime;
|
|
126
|
+
|
|
127
|
+
if (errors && errors.length > 0) {
|
|
128
|
+
console.error(
|
|
129
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
130
|
+
`Consolidation completed with ${errors.length} errors in ${duration}ms:`,
|
|
131
|
+
errors
|
|
132
|
+
);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (config.verbose) {
|
|
136
|
+
console.log(
|
|
137
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
138
|
+
`Consolidation complete: ${results.length} records consolidated in ${duration}ms ` +
|
|
139
|
+
`(${errors.length} errors). Next run in ${config.consolidationInterval}s`
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (emitFn) {
|
|
144
|
+
emitFn('eventual-consistency.consolidated', {
|
|
145
|
+
resource: config.resource,
|
|
146
|
+
field: config.field,
|
|
147
|
+
recordCount: uniqueIds.length,
|
|
148
|
+
successCount: results.length,
|
|
149
|
+
errorCount: errors.length,
|
|
150
|
+
duration
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
} catch (error) {
|
|
154
|
+
const duration = Date.now() - startTime;
|
|
155
|
+
console.error(
|
|
156
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
157
|
+
`Consolidation error after ${duration}ms:`,
|
|
158
|
+
error
|
|
159
|
+
);
|
|
160
|
+
if (emitFn) {
|
|
161
|
+
emitFn('eventual-consistency.consolidation-error', error);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Consolidate a single record
|
|
168
|
+
*
|
|
169
|
+
* @param {string} originalId - ID of the record to consolidate
|
|
170
|
+
* @param {Object} transactionResource - Transaction resource
|
|
171
|
+
* @param {Object} targetResource - Target resource
|
|
172
|
+
* @param {Object} lockResource - Lock resource
|
|
173
|
+
* @param {Object} analyticsResource - Analytics resource (optional)
|
|
174
|
+
* @param {Function} updateAnalyticsFn - Function to update analytics (optional)
|
|
175
|
+
* @param {Object} config - Plugin configuration
|
|
176
|
+
* @returns {Promise<number>} Consolidated value
|
|
177
|
+
*/
|
|
178
|
+
export async function consolidateRecord(
|
|
179
|
+
originalId,
|
|
180
|
+
transactionResource,
|
|
181
|
+
targetResource,
|
|
182
|
+
lockResource,
|
|
183
|
+
analyticsResource,
|
|
184
|
+
updateAnalyticsFn,
|
|
185
|
+
config
|
|
186
|
+
) {
|
|
187
|
+
// Clean up stale locks before attempting to acquire
|
|
188
|
+
await cleanupStaleLocks(lockResource, config);
|
|
189
|
+
|
|
190
|
+
// Acquire distributed lock to prevent concurrent consolidation
|
|
191
|
+
const lockId = `lock-${originalId}`;
|
|
192
|
+
const [lockAcquired, lockErr, lock] = await tryFn(() =>
|
|
193
|
+
lockResource.insert({
|
|
194
|
+
id: lockId,
|
|
195
|
+
lockedAt: Date.now(),
|
|
196
|
+
workerId: process.pid ? String(process.pid) : 'unknown'
|
|
197
|
+
})
|
|
198
|
+
);
|
|
199
|
+
|
|
200
|
+
// If lock couldn't be acquired, another worker is consolidating
|
|
201
|
+
if (!lockAcquired) {
|
|
202
|
+
if (config.verbose) {
|
|
203
|
+
console.log(`[EventualConsistency] Lock for ${originalId} already held, skipping`);
|
|
204
|
+
}
|
|
205
|
+
// Get current value and return (another worker will consolidate)
|
|
206
|
+
const [recordOk, recordErr, record] = await tryFn(() =>
|
|
207
|
+
targetResource.get(originalId)
|
|
208
|
+
);
|
|
209
|
+
return (recordOk && record) ? (record[config.field] || 0) : 0;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
try {
|
|
213
|
+
// Get all unapplied transactions for this record
|
|
214
|
+
const [ok, err, transactions] = await tryFn(() =>
|
|
215
|
+
transactionResource.query({
|
|
216
|
+
originalId,
|
|
217
|
+
applied: false
|
|
218
|
+
})
|
|
219
|
+
);
|
|
220
|
+
|
|
221
|
+
if (!ok || !transactions || transactions.length === 0) {
|
|
222
|
+
// No pending transactions - try to get current value from record
|
|
223
|
+
const [recordOk, recordErr, record] = await tryFn(() =>
|
|
224
|
+
targetResource.get(originalId)
|
|
225
|
+
);
|
|
226
|
+
const currentValue = (recordOk && record) ? (record[config.field] || 0) : 0;
|
|
227
|
+
|
|
228
|
+
if (config.verbose) {
|
|
229
|
+
console.log(
|
|
230
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
231
|
+
`No pending transactions for ${originalId}, skipping`
|
|
232
|
+
);
|
|
233
|
+
}
|
|
234
|
+
return currentValue;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Get the LAST APPLIED VALUE from transactions (not from record - avoids S3 eventual consistency issues)
|
|
238
|
+
// This is the source of truth for the current value
|
|
239
|
+
const [appliedOk, appliedErr, appliedTransactions] = await tryFn(() =>
|
|
240
|
+
transactionResource.query({
|
|
241
|
+
originalId,
|
|
242
|
+
applied: true
|
|
243
|
+
})
|
|
244
|
+
);
|
|
245
|
+
|
|
246
|
+
let currentValue = 0;
|
|
247
|
+
|
|
248
|
+
if (appliedOk && appliedTransactions && appliedTransactions.length > 0) {
|
|
249
|
+
// Check if record exists - if deleted, ignore old applied transactions
|
|
250
|
+
const [recordExistsOk, recordExistsErr, recordExists] = await tryFn(() =>
|
|
251
|
+
targetResource.get(originalId)
|
|
252
|
+
);
|
|
253
|
+
|
|
254
|
+
if (!recordExistsOk || !recordExists) {
|
|
255
|
+
// Record was deleted - ignore applied transactions and start fresh
|
|
256
|
+
// This prevents old values from being carried over after deletion
|
|
257
|
+
if (config.verbose) {
|
|
258
|
+
console.log(
|
|
259
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
260
|
+
`Record ${originalId} doesn't exist, deleting ${appliedTransactions.length} old applied transactions`
|
|
261
|
+
);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Delete old applied transactions to prevent them from being used when record is recreated
|
|
265
|
+
const { results, errors } = await PromisePool
|
|
266
|
+
.for(appliedTransactions)
|
|
267
|
+
.withConcurrency(10)
|
|
268
|
+
.process(async (txn) => {
|
|
269
|
+
const [deleted] = await tryFn(() => transactionResource.delete(txn.id));
|
|
270
|
+
return deleted;
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
if (config.verbose && errors && errors.length > 0) {
|
|
274
|
+
console.warn(
|
|
275
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
276
|
+
`Failed to delete ${errors.length} old applied transactions`
|
|
277
|
+
);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
currentValue = 0;
|
|
281
|
+
} else {
|
|
282
|
+
// Record exists - use applied transactions to calculate current value
|
|
283
|
+
// Sort by timestamp to get chronological order
|
|
284
|
+
appliedTransactions.sort((a, b) =>
|
|
285
|
+
new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
|
|
286
|
+
);
|
|
287
|
+
|
|
288
|
+
// Check if there's a 'set' operation in applied transactions
|
|
289
|
+
const hasSetInApplied = appliedTransactions.some(t => t.operation === 'set');
|
|
290
|
+
|
|
291
|
+
if (!hasSetInApplied) {
|
|
292
|
+
// No 'set' operation in applied transactions means we're missing the base value
|
|
293
|
+
// This can only happen if:
|
|
294
|
+
// 1. Record had an initial value before first transaction
|
|
295
|
+
// 2. First consolidation didn't create an anchor transaction (legacy behavior)
|
|
296
|
+
// Solution: Get the current record value and create an anchor transaction now
|
|
297
|
+
const recordValue = recordExists[config.field] || 0;
|
|
298
|
+
|
|
299
|
+
// Calculate what the base value was by subtracting all applied deltas
|
|
300
|
+
let appliedDelta = 0;
|
|
301
|
+
for (const t of appliedTransactions) {
|
|
302
|
+
if (t.operation === 'add') appliedDelta += t.value;
|
|
303
|
+
else if (t.operation === 'sub') appliedDelta -= t.value;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
const baseValue = recordValue - appliedDelta;
|
|
307
|
+
|
|
308
|
+
// Create and save anchor transaction with the base value
|
|
309
|
+
// Only create if baseValue is non-zero AND we don't already have an anchor transaction
|
|
310
|
+
const hasExistingAnchor = appliedTransactions.some(t => t.source === 'anchor');
|
|
311
|
+
if (baseValue !== 0 && !hasExistingAnchor) {
|
|
312
|
+
// Use the timestamp of the first applied transaction for cohort info
|
|
313
|
+
const firstTransactionDate = new Date(appliedTransactions[0].timestamp);
|
|
314
|
+
const cohortInfo = getCohortInfo(firstTransactionDate, config.cohort.timezone, config.verbose);
|
|
315
|
+
const anchorTransaction = {
|
|
316
|
+
id: idGenerator(),
|
|
317
|
+
originalId: originalId,
|
|
318
|
+
field: config.field,
|
|
319
|
+
value: baseValue,
|
|
320
|
+
operation: 'set',
|
|
321
|
+
timestamp: new Date(firstTransactionDate.getTime() - 1).toISOString(), // 1ms before first txn to ensure it's first
|
|
322
|
+
cohortDate: cohortInfo.date,
|
|
323
|
+
cohortHour: cohortInfo.hour,
|
|
324
|
+
cohortMonth: cohortInfo.month,
|
|
325
|
+
source: 'anchor',
|
|
326
|
+
applied: true
|
|
327
|
+
};
|
|
328
|
+
|
|
329
|
+
await transactionResource.insert(anchorTransaction);
|
|
330
|
+
|
|
331
|
+
// Prepend to applied transactions for this consolidation
|
|
332
|
+
appliedTransactions.unshift(anchorTransaction);
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
// Apply reducer to get the last consolidated value
|
|
337
|
+
currentValue = config.reducer(appliedTransactions);
|
|
338
|
+
}
|
|
339
|
+
} else {
|
|
340
|
+
// No applied transactions - this is the FIRST consolidation
|
|
341
|
+
// Try to get initial value from record
|
|
342
|
+
const [recordOk, recordErr, record] = await tryFn(() =>
|
|
343
|
+
targetResource.get(originalId)
|
|
344
|
+
);
|
|
345
|
+
currentValue = (recordOk && record) ? (record[config.field] || 0) : 0;
|
|
346
|
+
|
|
347
|
+
// If there's an initial value, create and save an anchor transaction
|
|
348
|
+
// This ensures all future consolidations have a reliable base value
|
|
349
|
+
if (currentValue !== 0) {
|
|
350
|
+
// Use timestamp of the first pending transaction (or current time if none)
|
|
351
|
+
let anchorTimestamp;
|
|
352
|
+
if (transactions && transactions.length > 0) {
|
|
353
|
+
const firstPendingDate = new Date(transactions[0].timestamp);
|
|
354
|
+
anchorTimestamp = new Date(firstPendingDate.getTime() - 1).toISOString();
|
|
355
|
+
} else {
|
|
356
|
+
anchorTimestamp = new Date().toISOString();
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
const cohortInfo = getCohortInfo(new Date(anchorTimestamp), config.cohort.timezone, config.verbose);
|
|
360
|
+
const anchorTransaction = {
|
|
361
|
+
id: idGenerator(),
|
|
362
|
+
originalId: originalId,
|
|
363
|
+
field: config.field,
|
|
364
|
+
value: currentValue,
|
|
365
|
+
operation: 'set',
|
|
366
|
+
timestamp: anchorTimestamp,
|
|
367
|
+
cohortDate: cohortInfo.date,
|
|
368
|
+
cohortHour: cohortInfo.hour,
|
|
369
|
+
cohortMonth: cohortInfo.month,
|
|
370
|
+
source: 'anchor',
|
|
371
|
+
applied: true
|
|
372
|
+
};
|
|
373
|
+
|
|
374
|
+
await transactionResource.insert(anchorTransaction);
|
|
375
|
+
|
|
376
|
+
if (config.verbose) {
|
|
377
|
+
console.log(
|
|
378
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
379
|
+
`Created anchor transaction for ${originalId} with base value ${currentValue}`
|
|
380
|
+
);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
if (config.verbose) {
|
|
386
|
+
console.log(
|
|
387
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
388
|
+
`Consolidating ${originalId}: ${transactions.length} pending transactions ` +
|
|
389
|
+
`(current: ${currentValue} from ${appliedOk && appliedTransactions?.length > 0 ? 'applied transactions' : 'record'})`
|
|
390
|
+
);
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// Sort pending transactions by timestamp
|
|
394
|
+
transactions.sort((a, b) =>
|
|
395
|
+
new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
|
|
396
|
+
);
|
|
397
|
+
|
|
398
|
+
// If there's a current value and no 'set' operations in pending transactions,
|
|
399
|
+
// prepend a synthetic set transaction to preserve the current value
|
|
400
|
+
const hasSetOperation = transactions.some(t => t.operation === 'set');
|
|
401
|
+
if (currentValue !== 0 && !hasSetOperation) {
|
|
402
|
+
transactions.unshift(createSyntheticSetTransaction(currentValue));
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
// Apply reducer to get consolidated value
|
|
406
|
+
const consolidatedValue = config.reducer(transactions);
|
|
407
|
+
|
|
408
|
+
if (config.verbose) {
|
|
409
|
+
console.log(
|
|
410
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
411
|
+
`${originalId}: ${currentValue} → ${consolidatedValue} ` +
|
|
412
|
+
`(${consolidatedValue > currentValue ? '+' : ''}${consolidatedValue - currentValue})`
|
|
413
|
+
);
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
// Update the original record
|
|
417
|
+
// NOTE: We do NOT attempt to insert non-existent records because:
|
|
418
|
+
// 1. Target resources typically have required fields we don't know about
|
|
419
|
+
// 2. Record creation should be the application's responsibility
|
|
420
|
+
// 3. Transactions will remain pending until the record is created
|
|
421
|
+
const [updateOk, updateErr] = await tryFn(() =>
|
|
422
|
+
targetResource.update(originalId, {
|
|
423
|
+
[config.field]: consolidatedValue
|
|
424
|
+
})
|
|
425
|
+
);
|
|
426
|
+
|
|
427
|
+
if (!updateOk) {
|
|
428
|
+
// Check if record doesn't exist
|
|
429
|
+
if (updateErr?.message?.includes('does not exist')) {
|
|
430
|
+
// Record doesn't exist - skip consolidation and keep transactions pending
|
|
431
|
+
if (config.verbose) {
|
|
432
|
+
console.warn(
|
|
433
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
434
|
+
`Record ${originalId} doesn't exist. Skipping consolidation. ` +
|
|
435
|
+
`${transactions.length} transactions will remain pending until record is created.`
|
|
436
|
+
);
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
// Return the consolidated value (for informational purposes)
|
|
440
|
+
// Transactions remain pending and will be picked up when record exists
|
|
441
|
+
return consolidatedValue;
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// Update failed for another reason - this is a real error
|
|
445
|
+
console.error(
|
|
446
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
447
|
+
`FAILED to update ${originalId}: ${updateErr?.message || updateErr}`,
|
|
448
|
+
{ error: updateErr, consolidatedValue, currentValue }
|
|
449
|
+
);
|
|
450
|
+
throw updateErr;
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
if (updateOk) {
|
|
454
|
+
// Mark transactions as applied (skip synthetic ones) - use PromisePool for controlled concurrency
|
|
455
|
+
const transactionsToUpdate = transactions.filter(txn => txn.id !== '__synthetic__');
|
|
456
|
+
|
|
457
|
+
const { results, errors } = await PromisePool
|
|
458
|
+
.for(transactionsToUpdate)
|
|
459
|
+
.withConcurrency(10) // Limit parallel updates
|
|
460
|
+
.process(async (txn) => {
|
|
461
|
+
const [ok, err] = await tryFn(() =>
|
|
462
|
+
transactionResource.update(txn.id, { applied: true })
|
|
463
|
+
);
|
|
464
|
+
|
|
465
|
+
if (!ok && config.verbose) {
|
|
466
|
+
console.warn(`[EventualConsistency] Failed to mark transaction ${txn.id} as applied:`, err?.message);
|
|
467
|
+
}
|
|
468
|
+
|
|
469
|
+
return ok;
|
|
470
|
+
});
|
|
471
|
+
|
|
472
|
+
if (errors && errors.length > 0 && config.verbose) {
|
|
473
|
+
console.warn(`[EventualConsistency] ${errors.length} transactions failed to mark as applied`);
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
// Update analytics if enabled (only for real transactions, not synthetic)
|
|
477
|
+
if (config.enableAnalytics && transactionsToUpdate.length > 0 && updateAnalyticsFn) {
|
|
478
|
+
await updateAnalyticsFn(transactionsToUpdate);
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// Invalidate cache for this record after consolidation
|
|
482
|
+
if (targetResource && targetResource.cache && typeof targetResource.cache.delete === 'function') {
|
|
483
|
+
try {
|
|
484
|
+
const cacheKey = await targetResource.cacheKeyFor({ id: originalId });
|
|
485
|
+
await targetResource.cache.delete(cacheKey);
|
|
486
|
+
|
|
487
|
+
if (config.verbose) {
|
|
488
|
+
console.log(
|
|
489
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
490
|
+
`Cache invalidated for ${originalId}`
|
|
491
|
+
);
|
|
492
|
+
}
|
|
493
|
+
} catch (cacheErr) {
|
|
494
|
+
// Log but don't fail consolidation if cache invalidation fails
|
|
495
|
+
if (config.verbose) {
|
|
496
|
+
console.warn(
|
|
497
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
498
|
+
`Failed to invalidate cache for ${originalId}: ${cacheErr?.message}`
|
|
499
|
+
);
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
return consolidatedValue;
|
|
506
|
+
} finally {
|
|
507
|
+
// Always release the lock
|
|
508
|
+
const [lockReleased, lockReleaseErr] = await tryFn(() => lockResource.delete(lockId));
|
|
509
|
+
|
|
510
|
+
if (!lockReleased && config.verbose) {
|
|
511
|
+
console.warn(`[EventualConsistency] Failed to release lock ${lockId}:`, lockReleaseErr?.message);
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
|
|
516
|
+
/**
|
|
517
|
+
* Get consolidated value without applying
|
|
518
|
+
*
|
|
519
|
+
* @param {string} originalId - ID of the record
|
|
520
|
+
* @param {Object} options - Query options
|
|
521
|
+
* @param {Object} transactionResource - Transaction resource
|
|
522
|
+
* @param {Object} targetResource - Target resource
|
|
523
|
+
* @param {Object} config - Plugin configuration
|
|
524
|
+
* @returns {Promise<number>} Consolidated value
|
|
525
|
+
*/
|
|
526
|
+
export async function getConsolidatedValue(originalId, options, transactionResource, targetResource, config) {
|
|
527
|
+
const includeApplied = options.includeApplied || false;
|
|
528
|
+
const startDate = options.startDate;
|
|
529
|
+
const endDate = options.endDate;
|
|
530
|
+
|
|
531
|
+
// Build query
|
|
532
|
+
const query = { originalId };
|
|
533
|
+
if (!includeApplied) {
|
|
534
|
+
query.applied = false;
|
|
535
|
+
}
|
|
536
|
+
|
|
537
|
+
// Get transactions
|
|
538
|
+
const [ok, err, transactions] = await tryFn(() =>
|
|
539
|
+
transactionResource.query(query)
|
|
540
|
+
);
|
|
541
|
+
|
|
542
|
+
if (!ok || !transactions || transactions.length === 0) {
|
|
543
|
+
// If no transactions, check if record exists and return its current value
|
|
544
|
+
const [recordOk, recordErr, record] = await tryFn(() =>
|
|
545
|
+
targetResource.get(originalId)
|
|
546
|
+
);
|
|
547
|
+
|
|
548
|
+
if (recordOk && record) {
|
|
549
|
+
return record[config.field] || 0;
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
return 0;
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
// Filter by date range if specified
|
|
556
|
+
let filtered = transactions;
|
|
557
|
+
if (startDate || endDate) {
|
|
558
|
+
filtered = transactions.filter(t => {
|
|
559
|
+
const timestamp = new Date(t.timestamp);
|
|
560
|
+
if (startDate && timestamp < new Date(startDate)) return false;
|
|
561
|
+
if (endDate && timestamp > new Date(endDate)) return false;
|
|
562
|
+
return true;
|
|
563
|
+
});
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
// Get current value from record
|
|
567
|
+
const [recordOk, recordErr, record] = await tryFn(() =>
|
|
568
|
+
targetResource.get(originalId)
|
|
569
|
+
);
|
|
570
|
+
const currentValue = (recordOk && record) ? (record[config.field] || 0) : 0;
|
|
571
|
+
|
|
572
|
+
// Check if there's a 'set' operation in filtered transactions
|
|
573
|
+
const hasSetOperation = filtered.some(t => t.operation === 'set');
|
|
574
|
+
|
|
575
|
+
// If current value exists and no 'set', prepend synthetic set transaction
|
|
576
|
+
if (currentValue !== 0 && !hasSetOperation) {
|
|
577
|
+
filtered.unshift(createSyntheticSetTransaction(currentValue));
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
// Sort by timestamp
|
|
581
|
+
filtered.sort((a, b) =>
|
|
582
|
+
new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
|
|
583
|
+
);
|
|
584
|
+
|
|
585
|
+
// Apply reducer
|
|
586
|
+
return config.reducer(filtered);
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
/**
|
|
590
|
+
* Get cohort statistics
|
|
591
|
+
*
|
|
592
|
+
* @param {string} cohortDate - Cohort date to get stats for
|
|
593
|
+
* @param {Object} transactionResource - Transaction resource
|
|
594
|
+
* @returns {Promise<Object|null>} Cohort statistics
|
|
595
|
+
*/
|
|
596
|
+
export async function getCohortStats(cohortDate, transactionResource) {
|
|
597
|
+
const [ok, err, transactions] = await tryFn(() =>
|
|
598
|
+
transactionResource.query({
|
|
599
|
+
cohortDate
|
|
600
|
+
})
|
|
601
|
+
);
|
|
602
|
+
|
|
603
|
+
if (!ok) return null;
|
|
604
|
+
|
|
605
|
+
const stats = {
|
|
606
|
+
date: cohortDate,
|
|
607
|
+
transactionCount: transactions.length,
|
|
608
|
+
totalValue: 0,
|
|
609
|
+
byOperation: { set: 0, add: 0, sub: 0 },
|
|
610
|
+
byOriginalId: {}
|
|
611
|
+
};
|
|
612
|
+
|
|
613
|
+
for (const txn of transactions) {
|
|
614
|
+
stats.totalValue += txn.value || 0;
|
|
615
|
+
stats.byOperation[txn.operation] = (stats.byOperation[txn.operation] || 0) + 1;
|
|
616
|
+
|
|
617
|
+
if (!stats.byOriginalId[txn.originalId]) {
|
|
618
|
+
stats.byOriginalId[txn.originalId] = {
|
|
619
|
+
count: 0,
|
|
620
|
+
value: 0
|
|
621
|
+
};
|
|
622
|
+
}
|
|
623
|
+
stats.byOriginalId[txn.originalId].count++;
|
|
624
|
+
stats.byOriginalId[txn.originalId].value += txn.value || 0;
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
return stats;
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
/**
|
|
631
|
+
* Recalculate from scratch by resetting all transactions to pending
|
|
632
|
+
* This is useful for debugging, recovery, or when you want to recompute everything
|
|
633
|
+
*
|
|
634
|
+
* @param {string} originalId - ID of the record to recalculate
|
|
635
|
+
* @param {Object} transactionResource - Transaction resource
|
|
636
|
+
* @param {Object} targetResource - Target resource
|
|
637
|
+
* @param {Object} lockResource - Lock resource
|
|
638
|
+
* @param {Function} consolidateRecordFn - Function to consolidate the record
|
|
639
|
+
* @param {Object} config - Plugin configuration
|
|
640
|
+
* @returns {Promise<number>} Recalculated value
|
|
641
|
+
*/
|
|
642
|
+
export async function recalculateRecord(
|
|
643
|
+
originalId,
|
|
644
|
+
transactionResource,
|
|
645
|
+
targetResource,
|
|
646
|
+
lockResource,
|
|
647
|
+
consolidateRecordFn,
|
|
648
|
+
config
|
|
649
|
+
) {
|
|
650
|
+
// Clean up stale locks before attempting to acquire
|
|
651
|
+
await cleanupStaleLocks(lockResource, config);
|
|
652
|
+
|
|
653
|
+
// Acquire distributed lock to prevent concurrent operations
|
|
654
|
+
const lockId = `lock-recalculate-${originalId}`;
|
|
655
|
+
const [lockAcquired, lockErr, lock] = await tryFn(() =>
|
|
656
|
+
lockResource.insert({
|
|
657
|
+
id: lockId,
|
|
658
|
+
lockedAt: Date.now(),
|
|
659
|
+
workerId: process.pid ? String(process.pid) : 'unknown'
|
|
660
|
+
})
|
|
661
|
+
);
|
|
662
|
+
|
|
663
|
+
// If lock couldn't be acquired, another worker is operating on this record
|
|
664
|
+
if (!lockAcquired) {
|
|
665
|
+
if (config.verbose) {
|
|
666
|
+
console.log(`[EventualConsistency] Recalculate lock for ${originalId} already held, skipping`);
|
|
667
|
+
}
|
|
668
|
+
throw new Error(`Cannot recalculate ${originalId}: lock already held by another worker`);
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
try {
|
|
672
|
+
if (config.verbose) {
|
|
673
|
+
console.log(
|
|
674
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
675
|
+
`Starting recalculation for ${originalId} (resetting all transactions to pending)`
|
|
676
|
+
);
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// Get ALL transactions for this record (both applied and pending)
|
|
680
|
+
const [allOk, allErr, allTransactions] = await tryFn(() =>
|
|
681
|
+
transactionResource.query({
|
|
682
|
+
originalId
|
|
683
|
+
})
|
|
684
|
+
);
|
|
685
|
+
|
|
686
|
+
if (!allOk || !allTransactions || allTransactions.length === 0) {
|
|
687
|
+
if (config.verbose) {
|
|
688
|
+
console.log(
|
|
689
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
690
|
+
`No transactions found for ${originalId}, nothing to recalculate`
|
|
691
|
+
);
|
|
692
|
+
}
|
|
693
|
+
return 0;
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
if (config.verbose) {
|
|
697
|
+
console.log(
|
|
698
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
699
|
+
`Found ${allTransactions.length} total transactions for ${originalId}, marking all as pending...`
|
|
700
|
+
);
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
// Mark ALL transactions as pending (applied: false)
|
|
704
|
+
// Exclude anchor transactions (they should always be applied)
|
|
705
|
+
const transactionsToReset = allTransactions.filter(txn => txn.source !== 'anchor');
|
|
706
|
+
|
|
707
|
+
const { results, errors } = await PromisePool
|
|
708
|
+
.for(transactionsToReset)
|
|
709
|
+
.withConcurrency(10)
|
|
710
|
+
.process(async (txn) => {
|
|
711
|
+
const [ok, err] = await tryFn(() =>
|
|
712
|
+
transactionResource.update(txn.id, { applied: false })
|
|
713
|
+
);
|
|
714
|
+
|
|
715
|
+
if (!ok && config.verbose) {
|
|
716
|
+
console.warn(`[EventualConsistency] Failed to reset transaction ${txn.id}:`, err?.message);
|
|
717
|
+
}
|
|
718
|
+
|
|
719
|
+
return ok;
|
|
720
|
+
});
|
|
721
|
+
|
|
722
|
+
if (errors && errors.length > 0) {
|
|
723
|
+
console.warn(
|
|
724
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
725
|
+
`Failed to reset ${errors.length} transactions during recalculation`
|
|
726
|
+
);
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
if (config.verbose) {
|
|
730
|
+
console.log(
|
|
731
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
732
|
+
`Reset ${results.length} transactions to pending, now resetting record value and running consolidation...`
|
|
733
|
+
);
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
// Reset the record's field value to 0 to prevent double-counting
|
|
737
|
+
// This ensures consolidation starts fresh without using the old value as an anchor
|
|
738
|
+
const [resetOk, resetErr] = await tryFn(() =>
|
|
739
|
+
targetResource.update(originalId, {
|
|
740
|
+
[config.field]: 0
|
|
741
|
+
})
|
|
742
|
+
);
|
|
743
|
+
|
|
744
|
+
if (!resetOk && config.verbose) {
|
|
745
|
+
console.warn(
|
|
746
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
747
|
+
`Failed to reset record value for ${originalId}: ${resetErr?.message}`
|
|
748
|
+
);
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
// Now run normal consolidation which will process all pending transactions
|
|
752
|
+
const consolidatedValue = await consolidateRecordFn(originalId);
|
|
753
|
+
|
|
754
|
+
if (config.verbose) {
|
|
755
|
+
console.log(
|
|
756
|
+
`[EventualConsistency] ${config.resource}.${config.field} - ` +
|
|
757
|
+
`Recalculation complete for ${originalId}: final value = ${consolidatedValue}`
|
|
758
|
+
);
|
|
759
|
+
}
|
|
760
|
+
|
|
761
|
+
return consolidatedValue;
|
|
762
|
+
} finally {
|
|
763
|
+
// Always release the lock
|
|
764
|
+
const [lockReleased, lockReleaseErr] = await tryFn(() => lockResource.delete(lockId));
|
|
765
|
+
|
|
766
|
+
if (!lockReleased && config.verbose) {
|
|
767
|
+
console.warn(`[EventualConsistency] Failed to release recalculate lock ${lockId}:`, lockReleaseErr?.message);
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
}
|