s3db.js 11.0.2 → 11.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/s3db.cjs.js +612 -308
- package/dist/s3db.cjs.js.map +1 -1
- package/dist/s3db.es.js +612 -308
- package/dist/s3db.es.js.map +1 -1
- package/package.json +1 -1
- package/src/concerns/plugin-storage.js +274 -9
- package/src/plugins/audit.plugin.js +94 -18
- package/src/plugins/eventual-consistency/analytics.js +131 -15
- package/src/plugins/eventual-consistency/config.js +3 -0
- package/src/plugins/eventual-consistency/consolidation.js +32 -36
- package/src/plugins/eventual-consistency/garbage-collection.js +11 -13
- package/src/plugins/eventual-consistency/index.js +28 -19
- package/src/plugins/eventual-consistency/install.js +9 -26
- package/src/plugins/eventual-consistency/partitions.js +5 -0
- package/src/plugins/eventual-consistency/transactions.js +1 -0
- package/src/plugins/eventual-consistency/utils.js +36 -1
- package/src/plugins/fulltext.plugin.js +76 -22
- package/src/plugins/metrics.plugin.js +70 -20
- package/src/plugins/s3-queue.plugin.js +21 -120
- package/src/plugins/scheduler.plugin.js +11 -37
|
@@ -64,12 +64,42 @@ export function getTimezoneOffset(timezone, verbose = false) {
|
|
|
64
64
|
}
|
|
65
65
|
}
|
|
66
66
|
|
|
67
|
+
/**
|
|
68
|
+
* Calculate ISO 8601 week number for a date
|
|
69
|
+
* @param {Date} date - Date to get week number for
|
|
70
|
+
* @returns {Object} Year and week number { year, week }
|
|
71
|
+
*/
|
|
72
|
+
function getISOWeek(date) {
|
|
73
|
+
// Copy date to avoid mutating original
|
|
74
|
+
const target = new Date(date.valueOf());
|
|
75
|
+
|
|
76
|
+
// ISO week starts on Monday (day 1)
|
|
77
|
+
// Find Thursday of this week (ISO week contains Jan 4th)
|
|
78
|
+
const dayNr = (date.getUTCDay() + 6) % 7; // Make Monday = 0 (use UTC)
|
|
79
|
+
target.setUTCDate(target.getUTCDate() - dayNr + 3); // Thursday of this week
|
|
80
|
+
|
|
81
|
+
// Get first Thursday of the year (use UTC)
|
|
82
|
+
const yearStart = new Date(Date.UTC(target.getUTCFullYear(), 0, 1));
|
|
83
|
+
const firstThursday = new Date(yearStart.valueOf());
|
|
84
|
+
if (yearStart.getUTCDay() !== 4) {
|
|
85
|
+
firstThursday.setUTCDate(yearStart.getUTCDate() + ((4 - yearStart.getUTCDay()) + 7) % 7);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Calculate week number
|
|
89
|
+
const weekNumber = 1 + Math.round((target - firstThursday) / 604800000);
|
|
90
|
+
|
|
91
|
+
return {
|
|
92
|
+
year: target.getUTCFullYear(),
|
|
93
|
+
week: weekNumber
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
|
|
67
97
|
/**
|
|
68
98
|
* Get cohort information for a date
|
|
69
99
|
* @param {Date} date - Date to get cohort info for
|
|
70
100
|
* @param {string} timezone - IANA timezone name
|
|
71
101
|
* @param {boolean} verbose - Whether to log warnings
|
|
72
|
-
* @returns {Object} Cohort information (date, hour, month)
|
|
102
|
+
* @returns {Object} Cohort information (date, hour, week, month)
|
|
73
103
|
*/
|
|
74
104
|
export function getCohortInfo(date, timezone, verbose = false) {
|
|
75
105
|
// Simple timezone offset calculation
|
|
@@ -81,9 +111,14 @@ export function getCohortInfo(date, timezone, verbose = false) {
|
|
|
81
111
|
const day = String(localDate.getDate()).padStart(2, '0');
|
|
82
112
|
const hour = String(localDate.getHours()).padStart(2, '0');
|
|
83
113
|
|
|
114
|
+
// Calculate ISO week
|
|
115
|
+
const { year: weekYear, week: weekNumber } = getISOWeek(localDate);
|
|
116
|
+
const week = `${weekYear}-W${String(weekNumber).padStart(2, '0')}`;
|
|
117
|
+
|
|
84
118
|
return {
|
|
85
119
|
date: `${year}-${month}-${day}`,
|
|
86
120
|
hour: `${year}-${month}-${day}T${hour}`, // ISO-like format for hour partition
|
|
121
|
+
week: week, // ISO 8601 week format (e.g., '2025-W42')
|
|
87
122
|
month: `${year}-${month}`
|
|
88
123
|
};
|
|
89
124
|
}
|
|
@@ -11,6 +11,8 @@ export class FullTextPlugin extends Plugin {
|
|
|
11
11
|
...options
|
|
12
12
|
};
|
|
13
13
|
this.indexes = new Map(); // In-memory index for simplicity
|
|
14
|
+
this.dirtyIndexes = new Set(); // Track changed index keys for incremental saves
|
|
15
|
+
this.deletedIndexes = new Set(); // Track deleted index keys
|
|
14
16
|
}
|
|
15
17
|
|
|
16
18
|
async onInstall() {
|
|
@@ -26,7 +28,11 @@ export class FullTextPlugin extends Plugin {
|
|
|
26
28
|
recordIds: 'json|required', // Array of record IDs containing this word
|
|
27
29
|
count: 'number|required',
|
|
28
30
|
lastUpdated: 'string|required'
|
|
29
|
-
}
|
|
31
|
+
},
|
|
32
|
+
partitions: {
|
|
33
|
+
byResource: { fields: { resourceName: 'string' } }
|
|
34
|
+
},
|
|
35
|
+
behavior: 'body-overflow'
|
|
30
36
|
}));
|
|
31
37
|
this.indexResource = ok ? indexResource : this.database.resources.fulltext_indexes;
|
|
32
38
|
|
|
@@ -69,26 +75,71 @@ export class FullTextPlugin extends Plugin {
|
|
|
69
75
|
|
|
70
76
|
async saveIndexes() {
|
|
71
77
|
if (!this.indexResource) return;
|
|
72
|
-
|
|
78
|
+
|
|
73
79
|
const [ok, err] = await tryFn(async () => {
|
|
74
|
-
//
|
|
75
|
-
const
|
|
76
|
-
|
|
77
|
-
|
|
80
|
+
// Delete indexes that were removed
|
|
81
|
+
for (const key of this.deletedIndexes) {
|
|
82
|
+
// Find and delete the index record using partition-aware query
|
|
83
|
+
const [resourceName] = key.split(':');
|
|
84
|
+
const [queryOk, queryErr, results] = await tryFn(() =>
|
|
85
|
+
this.indexResource.query({ resourceName })
|
|
86
|
+
);
|
|
87
|
+
|
|
88
|
+
if (queryOk && results) {
|
|
89
|
+
for (const index of results) {
|
|
90
|
+
const indexKey = `${index.resourceName}:${index.fieldName}:${index.word}`;
|
|
91
|
+
if (indexKey === key) {
|
|
92
|
+
await this.indexResource.delete(index.id);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
78
96
|
}
|
|
79
|
-
|
|
80
|
-
|
|
97
|
+
|
|
98
|
+
// Save or update dirty indexes
|
|
99
|
+
for (const key of this.dirtyIndexes) {
|
|
81
100
|
const [resourceName, fieldName, word] = key.split(':');
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
101
|
+
const data = this.indexes.get(key);
|
|
102
|
+
|
|
103
|
+
if (!data) continue; // Skip if index was deleted
|
|
104
|
+
|
|
105
|
+
// Try to find existing index record
|
|
106
|
+
const [queryOk, queryErr, results] = await tryFn(() =>
|
|
107
|
+
this.indexResource.query({ resourceName })
|
|
108
|
+
);
|
|
109
|
+
|
|
110
|
+
let existingRecord = null;
|
|
111
|
+
if (queryOk && results) {
|
|
112
|
+
existingRecord = results.find(
|
|
113
|
+
(index) => index.resourceName === resourceName &&
|
|
114
|
+
index.fieldName === fieldName &&
|
|
115
|
+
index.word === word
|
|
116
|
+
);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (existingRecord) {
|
|
120
|
+
// Update existing record
|
|
121
|
+
await this.indexResource.update(existingRecord.id, {
|
|
122
|
+
recordIds: data.recordIds,
|
|
123
|
+
count: data.count,
|
|
124
|
+
lastUpdated: new Date().toISOString()
|
|
125
|
+
});
|
|
126
|
+
} else {
|
|
127
|
+
// Insert new record
|
|
128
|
+
await this.indexResource.insert({
|
|
129
|
+
id: `index-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
|
130
|
+
resourceName,
|
|
131
|
+
fieldName,
|
|
132
|
+
word,
|
|
133
|
+
recordIds: data.recordIds,
|
|
134
|
+
count: data.count,
|
|
135
|
+
lastUpdated: new Date().toISOString()
|
|
136
|
+
});
|
|
137
|
+
}
|
|
91
138
|
}
|
|
139
|
+
|
|
140
|
+
// Clear tracking sets after successful save
|
|
141
|
+
this.dirtyIndexes.clear();
|
|
142
|
+
this.deletedIndexes.clear();
|
|
92
143
|
});
|
|
93
144
|
}
|
|
94
145
|
|
|
@@ -195,21 +246,22 @@ export class FullTextPlugin extends Plugin {
|
|
|
195
246
|
}
|
|
196
247
|
|
|
197
248
|
const words = this.tokenize(fieldValue);
|
|
198
|
-
|
|
249
|
+
|
|
199
250
|
for (const word of words) {
|
|
200
251
|
if (word.length < this.config.minWordLength) {
|
|
201
252
|
continue;
|
|
202
253
|
}
|
|
203
|
-
|
|
254
|
+
|
|
204
255
|
const key = `${resourceName}:${fieldName}:${word.toLowerCase()}`;
|
|
205
256
|
const existing = this.indexes.get(key) || { recordIds: [], count: 0 };
|
|
206
|
-
|
|
257
|
+
|
|
207
258
|
if (!existing.recordIds.includes(recordId)) {
|
|
208
259
|
existing.recordIds.push(recordId);
|
|
209
260
|
existing.count = existing.recordIds.length;
|
|
210
261
|
}
|
|
211
|
-
|
|
262
|
+
|
|
212
263
|
this.indexes.set(key, existing);
|
|
264
|
+
this.dirtyIndexes.add(key); // Mark as dirty for incremental save
|
|
213
265
|
}
|
|
214
266
|
}
|
|
215
267
|
}
|
|
@@ -221,11 +273,13 @@ export class FullTextPlugin extends Plugin {
|
|
|
221
273
|
if (index > -1) {
|
|
222
274
|
data.recordIds.splice(index, 1);
|
|
223
275
|
data.count = data.recordIds.length;
|
|
224
|
-
|
|
276
|
+
|
|
225
277
|
if (data.recordIds.length === 0) {
|
|
226
278
|
this.indexes.delete(key);
|
|
279
|
+
this.deletedIndexes.add(key); // Track deletion for incremental save
|
|
227
280
|
} else {
|
|
228
281
|
this.indexes.set(key, data);
|
|
282
|
+
this.dirtyIndexes.add(key); // Mark as dirty for incremental save
|
|
229
283
|
}
|
|
230
284
|
}
|
|
231
285
|
}
|
|
@@ -47,8 +47,13 @@ export class MetricsPlugin extends Plugin {
|
|
|
47
47
|
errors: 'number|required',
|
|
48
48
|
avgTime: 'number|required',
|
|
49
49
|
timestamp: 'string|required',
|
|
50
|
-
metadata: 'json'
|
|
51
|
-
|
|
50
|
+
metadata: 'json',
|
|
51
|
+
createdAt: 'string|required' // YYYY-MM-DD for partitioning
|
|
52
|
+
},
|
|
53
|
+
partitions: {
|
|
54
|
+
byDate: { fields: { createdAt: 'string|maxlength:10' } }
|
|
55
|
+
},
|
|
56
|
+
behavior: 'body-overflow'
|
|
52
57
|
}));
|
|
53
58
|
this.metricsResource = ok1 ? metricsResource : this.database.resources.plg_metrics;
|
|
54
59
|
|
|
@@ -60,8 +65,13 @@ export class MetricsPlugin extends Plugin {
|
|
|
60
65
|
operation: 'string|required',
|
|
61
66
|
error: 'string|required',
|
|
62
67
|
timestamp: 'string|required',
|
|
63
|
-
metadata: 'json'
|
|
64
|
-
|
|
68
|
+
metadata: 'json',
|
|
69
|
+
createdAt: 'string|required' // YYYY-MM-DD for partitioning
|
|
70
|
+
},
|
|
71
|
+
partitions: {
|
|
72
|
+
byDate: { fields: { createdAt: 'string|maxlength:10' } }
|
|
73
|
+
},
|
|
74
|
+
behavior: 'body-overflow'
|
|
65
75
|
}));
|
|
66
76
|
this.errorsResource = ok2 ? errorsResource : this.database.resources.plg_error_logs;
|
|
67
77
|
|
|
@@ -73,8 +83,13 @@ export class MetricsPlugin extends Plugin {
|
|
|
73
83
|
operation: 'string|required',
|
|
74
84
|
duration: 'number|required',
|
|
75
85
|
timestamp: 'string|required',
|
|
76
|
-
metadata: 'json'
|
|
77
|
-
|
|
86
|
+
metadata: 'json',
|
|
87
|
+
createdAt: 'string|required' // YYYY-MM-DD for partitioning
|
|
88
|
+
},
|
|
89
|
+
partitions: {
|
|
90
|
+
byDate: { fields: { createdAt: 'string|maxlength:10' } }
|
|
91
|
+
},
|
|
92
|
+
behavior: 'body-overflow'
|
|
78
93
|
}));
|
|
79
94
|
this.performanceResource = ok3 ? performanceResource : this.database.resources.plg_performance_logs;
|
|
80
95
|
});
|
|
@@ -359,6 +374,9 @@ export class MetricsPlugin extends Plugin {
|
|
|
359
374
|
}
|
|
360
375
|
|
|
361
376
|
// Flush operation metrics
|
|
377
|
+
const now = new Date();
|
|
378
|
+
const createdAt = now.toISOString().slice(0, 10); // YYYY-MM-DD
|
|
379
|
+
|
|
362
380
|
for (const [operation, data] of Object.entries(this.metrics.operations)) {
|
|
363
381
|
if (data.count > 0) {
|
|
364
382
|
await this.metricsResource.insert({
|
|
@@ -370,7 +388,8 @@ export class MetricsPlugin extends Plugin {
|
|
|
370
388
|
totalTime: data.totalTime,
|
|
371
389
|
errors: data.errors,
|
|
372
390
|
avgTime: data.count > 0 ? data.totalTime / data.count : 0,
|
|
373
|
-
timestamp:
|
|
391
|
+
timestamp: now.toISOString(),
|
|
392
|
+
createdAt,
|
|
374
393
|
metadata
|
|
375
394
|
});
|
|
376
395
|
}
|
|
@@ -389,7 +408,8 @@ export class MetricsPlugin extends Plugin {
|
|
|
389
408
|
totalTime: data.totalTime,
|
|
390
409
|
errors: data.errors,
|
|
391
410
|
avgTime: data.count > 0 ? data.totalTime / data.count : 0,
|
|
392
|
-
timestamp:
|
|
411
|
+
timestamp: now.toISOString(),
|
|
412
|
+
createdAt,
|
|
393
413
|
metadata: resourceMetadata
|
|
394
414
|
});
|
|
395
415
|
}
|
|
@@ -405,6 +425,7 @@ export class MetricsPlugin extends Plugin {
|
|
|
405
425
|
operation: perf.operation,
|
|
406
426
|
duration: perf.duration,
|
|
407
427
|
timestamp: perf.timestamp,
|
|
428
|
+
createdAt: perf.timestamp.slice(0, 10), // YYYY-MM-DD from timestamp
|
|
408
429
|
metadata: perfMetadata
|
|
409
430
|
});
|
|
410
431
|
}
|
|
@@ -420,6 +441,7 @@ export class MetricsPlugin extends Plugin {
|
|
|
420
441
|
error: error.error,
|
|
421
442
|
stack: error.stack,
|
|
422
443
|
timestamp: error.timestamp,
|
|
444
|
+
createdAt: error.timestamp.slice(0, 10), // YYYY-MM-DD from timestamp
|
|
423
445
|
metadata: errorMetadata
|
|
424
446
|
});
|
|
425
447
|
}
|
|
@@ -597,28 +619,56 @@ export class MetricsPlugin extends Plugin {
|
|
|
597
619
|
async cleanupOldData() {
|
|
598
620
|
const cutoffDate = new Date();
|
|
599
621
|
cutoffDate.setDate(cutoffDate.getDate() - this.config.retentionDays);
|
|
622
|
+
const cutoffDateStr = cutoffDate.toISOString().slice(0, 10); // YYYY-MM-DD
|
|
623
|
+
|
|
624
|
+
// Generate list of dates to delete (all dates before cutoff)
|
|
625
|
+
const datesToDelete = [];
|
|
626
|
+
const startDate = new Date(cutoffDate);
|
|
627
|
+
startDate.setDate(startDate.getDate() - 365); // Go back up to 1 year to catch old data
|
|
628
|
+
|
|
629
|
+
for (let d = new Date(startDate); d < cutoffDate; d.setDate(d.getDate() + 1)) {
|
|
630
|
+
datesToDelete.push(d.toISOString().slice(0, 10));
|
|
631
|
+
}
|
|
600
632
|
|
|
601
|
-
// Clean up old metrics
|
|
633
|
+
// Clean up old metrics using partition-aware deletion
|
|
602
634
|
if (this.metricsResource) {
|
|
603
|
-
const
|
|
604
|
-
|
|
605
|
-
|
|
635
|
+
for (const dateStr of datesToDelete) {
|
|
636
|
+
const [ok, err, oldMetrics] = await tryFn(() =>
|
|
637
|
+
this.metricsResource.query({ createdAt: dateStr })
|
|
638
|
+
);
|
|
639
|
+
if (ok && oldMetrics) {
|
|
640
|
+
for (const metric of oldMetrics) {
|
|
641
|
+
await tryFn(() => this.metricsResource.delete(metric.id));
|
|
642
|
+
}
|
|
643
|
+
}
|
|
606
644
|
}
|
|
607
645
|
}
|
|
608
646
|
|
|
609
|
-
// Clean up old error logs
|
|
647
|
+
// Clean up old error logs using partition-aware deletion
|
|
610
648
|
if (this.errorsResource) {
|
|
611
|
-
const
|
|
612
|
-
|
|
613
|
-
|
|
649
|
+
for (const dateStr of datesToDelete) {
|
|
650
|
+
const [ok, err, oldErrors] = await tryFn(() =>
|
|
651
|
+
this.errorsResource.query({ createdAt: dateStr })
|
|
652
|
+
);
|
|
653
|
+
if (ok && oldErrors) {
|
|
654
|
+
for (const error of oldErrors) {
|
|
655
|
+
await tryFn(() => this.errorsResource.delete(error.id));
|
|
656
|
+
}
|
|
657
|
+
}
|
|
614
658
|
}
|
|
615
659
|
}
|
|
616
660
|
|
|
617
|
-
// Clean up old performance logs
|
|
661
|
+
// Clean up old performance logs using partition-aware deletion
|
|
618
662
|
if (this.performanceResource) {
|
|
619
|
-
const
|
|
620
|
-
|
|
621
|
-
|
|
663
|
+
for (const dateStr of datesToDelete) {
|
|
664
|
+
const [ok, err, oldPerformance] = await tryFn(() =>
|
|
665
|
+
this.performanceResource.query({ createdAt: dateStr })
|
|
666
|
+
);
|
|
667
|
+
if (ok && oldPerformance) {
|
|
668
|
+
for (const perf of oldPerformance) {
|
|
669
|
+
await tryFn(() => this.performanceResource.delete(perf.id));
|
|
670
|
+
}
|
|
671
|
+
}
|
|
622
672
|
}
|
|
623
673
|
}
|
|
624
674
|
}
|
|
@@ -139,31 +139,8 @@ export class S3QueuePlugin extends Plugin {
|
|
|
139
139
|
|
|
140
140
|
this.queueResource = this.database.resources[queueName];
|
|
141
141
|
|
|
142
|
-
//
|
|
143
|
-
|
|
144
|
-
const [okLock, errLock] = await tryFn(() =>
|
|
145
|
-
this.database.createResource({
|
|
146
|
-
name: lockName,
|
|
147
|
-
attributes: {
|
|
148
|
-
id: 'string|required',
|
|
149
|
-
workerId: 'string|required',
|
|
150
|
-
timestamp: 'number|required',
|
|
151
|
-
ttl: 'number|default:5000'
|
|
152
|
-
},
|
|
153
|
-
behavior: 'body-overflow',
|
|
154
|
-
timestamps: false
|
|
155
|
-
})
|
|
156
|
-
);
|
|
157
|
-
|
|
158
|
-
if (okLock || this.database.resources[lockName]) {
|
|
159
|
-
this.lockResource = this.database.resources[lockName];
|
|
160
|
-
} else {
|
|
161
|
-
// Locks disabled if creation fails
|
|
162
|
-
this.lockResource = null;
|
|
163
|
-
if (this.config.verbose) {
|
|
164
|
-
console.log(`[S3QueuePlugin] Lock resource creation failed, locking disabled: ${errLock?.message}`);
|
|
165
|
-
}
|
|
166
|
-
}
|
|
142
|
+
// Locks are now managed by PluginStorage with TTL - no Resource needed
|
|
143
|
+
// Lock acquisition is handled via storage.acquireLock() with automatic expiration
|
|
167
144
|
|
|
168
145
|
// Add helper methods to target resource
|
|
169
146
|
this.addHelperMethods();
|
|
@@ -273,14 +250,7 @@ export class S3QueuePlugin extends Plugin {
|
|
|
273
250
|
}
|
|
274
251
|
}, 5000);
|
|
275
252
|
|
|
276
|
-
//
|
|
277
|
-
this.lockCleanupInterval = setInterval(() => {
|
|
278
|
-
this.cleanupStaleLocks().catch(err => {
|
|
279
|
-
if (this.config.verbose) {
|
|
280
|
-
console.log(`[lockCleanup] Error: ${err.message}`);
|
|
281
|
-
}
|
|
282
|
-
});
|
|
283
|
-
}, 10000);
|
|
253
|
+
// Lock cleanup no longer needed - TTL handles expiration automatically
|
|
284
254
|
|
|
285
255
|
// Start N workers
|
|
286
256
|
for (let i = 0; i < concurrency; i++) {
|
|
@@ -306,11 +276,7 @@ export class S3QueuePlugin extends Plugin {
|
|
|
306
276
|
this.cacheCleanupInterval = null;
|
|
307
277
|
}
|
|
308
278
|
|
|
309
|
-
//
|
|
310
|
-
if (this.lockCleanupInterval) {
|
|
311
|
-
clearInterval(this.lockCleanupInterval);
|
|
312
|
-
this.lockCleanupInterval = null;
|
|
313
|
-
}
|
|
279
|
+
// Lock cleanup interval no longer exists (TTL handles it)
|
|
314
280
|
|
|
315
281
|
// Wait for workers to finish current tasks
|
|
316
282
|
await Promise.all(this.workers);
|
|
@@ -383,59 +349,21 @@ export class S3QueuePlugin extends Plugin {
|
|
|
383
349
|
}
|
|
384
350
|
|
|
385
351
|
/**
|
|
386
|
-
* Acquire a distributed lock using
|
|
352
|
+
* Acquire a distributed lock using PluginStorage TTL
|
|
387
353
|
* This ensures only one worker can claim a message at a time
|
|
388
|
-
*
|
|
389
|
-
* Uses a two-step process:
|
|
390
|
-
* 1. Create lock resource (similar to queue resource) if not exists
|
|
391
|
-
* 2. Try to claim lock using ETag-based conditional update
|
|
392
354
|
*/
|
|
393
355
|
async acquireLock(messageId) {
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
}
|
|
397
|
-
|
|
398
|
-
const lockId = `lock-${messageId}`;
|
|
399
|
-
const now = Date.now();
|
|
356
|
+
const storage = this.getStorage();
|
|
357
|
+
const lockKey = `msg-${messageId}`;
|
|
400
358
|
|
|
401
359
|
try {
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
if (existingLock) {
|
|
408
|
-
// Lock exists - check if expired
|
|
409
|
-
const lockAge = now - existingLock.timestamp;
|
|
410
|
-
if (lockAge < existingLock.ttl) {
|
|
411
|
-
// Lock still valid, owned by another worker
|
|
412
|
-
return false;
|
|
413
|
-
}
|
|
414
|
-
// Lock expired - try to claim it with ETag
|
|
415
|
-
const [ok, err, result] = await tryFn(() =>
|
|
416
|
-
this.lockResource.updateConditional(lockId, {
|
|
417
|
-
workerId: this.workerId,
|
|
418
|
-
timestamp: now,
|
|
419
|
-
ttl: 5000
|
|
420
|
-
}, {
|
|
421
|
-
ifMatch: existingLock._etag
|
|
422
|
-
})
|
|
423
|
-
);
|
|
424
|
-
|
|
425
|
-
return ok && result.success;
|
|
426
|
-
}
|
|
360
|
+
const lock = await storage.acquireLock(lockKey, {
|
|
361
|
+
ttl: 5, // 5 seconds
|
|
362
|
+
timeout: 0, // Don't wait if locked
|
|
363
|
+
workerId: this.workerId
|
|
364
|
+
});
|
|
427
365
|
|
|
428
|
-
|
|
429
|
-
const [okCreate, errCreate] = await tryFn(() =>
|
|
430
|
-
this.lockResource.insert({
|
|
431
|
-
id: lockId,
|
|
432
|
-
workerId: this.workerId,
|
|
433
|
-
timestamp: now,
|
|
434
|
-
ttl: 5000
|
|
435
|
-
})
|
|
436
|
-
);
|
|
437
|
-
|
|
438
|
-
return okCreate;
|
|
366
|
+
return lock !== null;
|
|
439
367
|
} catch (error) {
|
|
440
368
|
// On any error, skip this message
|
|
441
369
|
if (this.config.verbose) {
|
|
@@ -446,17 +374,14 @@ export class S3QueuePlugin extends Plugin {
|
|
|
446
374
|
}
|
|
447
375
|
|
|
448
376
|
/**
|
|
449
|
-
* Release a distributed lock
|
|
377
|
+
* Release a distributed lock via PluginStorage
|
|
450
378
|
*/
|
|
451
379
|
async releaseLock(messageId) {
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
}
|
|
455
|
-
|
|
456
|
-
const lockId = `lock-${messageId}`;
|
|
380
|
+
const storage = this.getStorage();
|
|
381
|
+
const lockKey = `msg-${messageId}`;
|
|
457
382
|
|
|
458
383
|
try {
|
|
459
|
-
await
|
|
384
|
+
await storage.releaseLock(lockKey);
|
|
460
385
|
} catch (error) {
|
|
461
386
|
// Ignore errors on release (lock may have expired or been cleaned up)
|
|
462
387
|
if (this.config.verbose) {
|
|
@@ -466,36 +391,12 @@ export class S3QueuePlugin extends Plugin {
|
|
|
466
391
|
}
|
|
467
392
|
|
|
468
393
|
/**
|
|
469
|
-
* Clean up stale locks
|
|
470
|
-
*
|
|
394
|
+
* Clean up stale locks - NO LONGER NEEDED
|
|
395
|
+
* TTL handles automatic expiration, no manual cleanup required
|
|
471
396
|
*/
|
|
472
397
|
async cleanupStaleLocks() {
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
}
|
|
476
|
-
|
|
477
|
-
const now = Date.now();
|
|
478
|
-
|
|
479
|
-
try {
|
|
480
|
-
// List all locks
|
|
481
|
-
const locks = await this.lockResource.list();
|
|
482
|
-
|
|
483
|
-
// Delete expired locks
|
|
484
|
-
for (const lock of locks) {
|
|
485
|
-
const lockAge = now - lock.timestamp;
|
|
486
|
-
if (lockAge > lock.ttl) {
|
|
487
|
-
await this.lockResource.delete(lock.id);
|
|
488
|
-
if (this.config.verbose) {
|
|
489
|
-
console.log(`[cleanupStaleLocks] Removed expired lock: ${lock.id}`);
|
|
490
|
-
}
|
|
491
|
-
}
|
|
492
|
-
}
|
|
493
|
-
} catch (error) {
|
|
494
|
-
// Ignore errors in cleanup (non-critical)
|
|
495
|
-
if (this.config.verbose) {
|
|
496
|
-
console.log(`[cleanupStaleLocks] Error during cleanup: ${error.message}`);
|
|
497
|
-
}
|
|
498
|
-
}
|
|
398
|
+
// TTL automatically expires locks - no manual cleanup needed! ✨
|
|
399
|
+
return;
|
|
499
400
|
}
|
|
500
401
|
|
|
501
402
|
async attemptClaim(msg) {
|
|
@@ -163,7 +163,6 @@ export class SchedulerPlugin extends Plugin {
|
|
|
163
163
|
};
|
|
164
164
|
|
|
165
165
|
this.database = null;
|
|
166
|
-
this.lockResource = null;
|
|
167
166
|
this.jobs = new Map();
|
|
168
167
|
this.activeJobs = new Map();
|
|
169
168
|
this.timers = new Map();
|
|
@@ -218,9 +217,7 @@ export class SchedulerPlugin extends Plugin {
|
|
|
218
217
|
}
|
|
219
218
|
|
|
220
219
|
async onInstall() {
|
|
221
|
-
|
|
222
|
-
// Create lock resource for distributed locking
|
|
223
|
-
await this._createLockResource();
|
|
220
|
+
// Locks are now managed by PluginStorage with TTL - no Resource needed
|
|
224
221
|
|
|
225
222
|
// Create job execution history resource
|
|
226
223
|
if (this.config.persistJobs) {
|
|
@@ -258,27 +255,6 @@ export class SchedulerPlugin extends Plugin {
|
|
|
258
255
|
this.emit('initialized', { jobs: this.jobs.size });
|
|
259
256
|
}
|
|
260
257
|
|
|
261
|
-
async _createLockResource() {
|
|
262
|
-
const [ok, err, lockResource] = await tryFn(() =>
|
|
263
|
-
this.database.createResource({
|
|
264
|
-
name: 'plg_scheduler_job_locks',
|
|
265
|
-
attributes: {
|
|
266
|
-
id: 'string|required',
|
|
267
|
-
jobName: 'string|required',
|
|
268
|
-
lockedAt: 'number|required',
|
|
269
|
-
instanceId: 'string|optional'
|
|
270
|
-
},
|
|
271
|
-
behavior: 'body-only',
|
|
272
|
-
timestamps: false
|
|
273
|
-
})
|
|
274
|
-
);
|
|
275
|
-
|
|
276
|
-
if (!ok && !this.database.resources.plg_scheduler_job_locks) {
|
|
277
|
-
throw new Error(`Failed to create lock resource: ${err?.message}`);
|
|
278
|
-
}
|
|
279
|
-
|
|
280
|
-
this.lockResource = ok ? lockResource : this.database.resources.plg_scheduler_job_locks;
|
|
281
|
-
}
|
|
282
258
|
|
|
283
259
|
async _createJobHistoryResource() {
|
|
284
260
|
const [ok] = await tryFn(() => this.database.createResource({
|
|
@@ -416,19 +392,17 @@ export class SchedulerPlugin extends Plugin {
|
|
|
416
392
|
// Mark as active immediately (will be updated with executionId later)
|
|
417
393
|
this.activeJobs.set(jobName, 'acquiring-lock');
|
|
418
394
|
|
|
419
|
-
// Acquire distributed lock to prevent concurrent execution across instances
|
|
420
|
-
const
|
|
421
|
-
const
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
})
|
|
428
|
-
);
|
|
395
|
+
// Acquire distributed lock with TTL to prevent concurrent execution across instances
|
|
396
|
+
const storage = this.getStorage();
|
|
397
|
+
const lockKey = `job-${jobName}`;
|
|
398
|
+
const lock = await storage.acquireLock(lockKey, {
|
|
399
|
+
ttl: Math.ceil(job.timeout / 1000) + 60, // Job timeout + 60 seconds buffer
|
|
400
|
+
timeout: 0, // Don't wait if locked
|
|
401
|
+
workerId: process.pid ? String(process.pid) : 'unknown'
|
|
402
|
+
});
|
|
429
403
|
|
|
430
404
|
// If lock couldn't be acquired, another instance is executing this job
|
|
431
|
-
if (!
|
|
405
|
+
if (!lock) {
|
|
432
406
|
if (this.config.verbose) {
|
|
433
407
|
console.log(`[SchedulerPlugin] Job '${jobName}' already running on another instance`);
|
|
434
408
|
}
|
|
@@ -577,7 +551,7 @@ export class SchedulerPlugin extends Plugin {
|
|
|
577
551
|
}
|
|
578
552
|
} finally {
|
|
579
553
|
// Always release the distributed lock
|
|
580
|
-
await tryFn(() =>
|
|
554
|
+
await tryFn(() => storage.releaseLock(lockKey));
|
|
581
555
|
}
|
|
582
556
|
}
|
|
583
557
|
|