bulltrackers-module 1.0.581 → 1.0.583

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  /**
2
2
  * FILENAME: computation-system/WorkflowOrchestrator.js
3
3
  * UPDATED: Fixed 'Version Mismatch' deadlock for historical chains.
4
+ * UPDATED: Added Force-Run logic for 'isTest' computations on current day.
4
5
  */
5
6
 
6
7
  const { normalizeName, DEFINITIVE_EARLIEST_DATES } = require('./utils/utils');
@@ -46,12 +47,17 @@ function isDependencyReady(depName, isHistoricalSelf, currentStatusMap, prevStat
46
47
  function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus, manifestMap, prevDailyStatus = null) {
47
48
  const report = { runnable: [], blocked: [], impossible: [], failedDependency: [], reRuns: [], skipped: [] };
48
49
  const simulationStatus = { ...dailyStatus };
50
+ const isToday = dateStr === new Date().toISOString().slice(0, 10);
49
51
 
50
52
  for (const calc of calcsInPass) {
51
53
  const cName = normalizeName(calc.name);
52
54
  const stored = simulationStatus[cName];
53
55
  const currentHash = calc.hash;
54
56
 
57
+ // [NEW] Rule: 'isTest' computations always re-run on the current day.
58
+ // This ensures debug/test probes fire immediately to test system changes.
59
+ const shouldForceRun = isToday && (calc.isTest === true);
60
+
55
61
  // 1. Root Data Check
56
62
  const rootCheck = checkRootDependencies(calc, rootDataStatus);
57
63
  if (!rootCheck.canRun) {
@@ -65,7 +71,8 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
65
71
  }
66
72
 
67
73
  // --- OPTIMIZATION: Early skip if code matches AND data is stable ---
68
- if (stored?.hash === currentHash) {
74
+ // [UPDATED] We bypass this optimization if shouldForceRun is true
75
+ if (stored?.hash === currentHash && !shouldForceRun) {
69
76
  let hasDataDrift = false;
70
77
  let isBlocked = false;
71
78
  let missingDeps = [];
@@ -161,6 +168,9 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
161
168
  report.runnable.push({ ...taskPayload, reason: "New Calculation" });
162
169
  } else if (stored.hash !== currentHash) {
163
170
  report.reRuns.push({ ...taskPayload, oldHash: stored.hash, newHash: currentHash, reason: "Hash Mismatch" });
171
+ } else if (shouldForceRun) {
172
+ // [NEW] Logic to handle the forced run for Test probes
173
+ report.reRuns.push({ ...taskPayload, oldHash: stored.hash, newHash: currentHash, reason: "Test Computation (Always Run Today)" });
164
174
  } else if (hasDataDrift) {
165
175
  report.runnable.push({ ...taskPayload, reason: "Input Data Changed" });
166
176
  }
@@ -1,13 +1,9 @@
1
- /**
2
- * {
3
- * type: uploaded file
4
- * fileName: computation-system/context/ManifestBuilder.js
5
- * }
6
- */
7
1
  /**
8
2
  * @fileoverview Dynamic Manifest Builder - Handles Topological Sort and Auto-Discovery.
9
3
  * UPDATED: Removed Automatic Infra Hashing. Now relies strictly on SYSTEM_EPOCH.
10
- * UPDATED: Whitelisted 'rootDataSeries', 'dependencySeries', and 'mandatoryRoots' metadata fields.
4
+ * UPDATED: Whitelisted 'rootDataSeries', 'dependencySeries', 'mandatoryRoots', and 'isTest'.
5
+ * UPDATED: Whitelisted 'schedule' to allow calculations to define their own execution cadence (Daily/Weekly/Monthly).
6
+ * UPDATED: Whitelisted 'ttlDays' to allow calculations to define custom retention policies.
11
7
  */
12
8
  const { generateCodeHash, LEGACY_MAPPING } = require('../topology/HashManager.js');
13
9
  const { normalizeName } = require('../utils/utils');
@@ -192,7 +188,7 @@ function buildManifest(productLinesToRun = [], calculations) {
192
188
  if (typeof Class.getDependencies !== 'function') { log.fatal(`Calculation "${normalizedName}" missing static getDependencies().`); hasFatalError = true; return; }
193
189
 
194
190
  const metadata = Class.getMetadata();
195
- const weight = estimateComplexity(Class, metadata)
191
+ const weight = estimateComplexity(Class, metadata)
196
192
  const dependencies = Class.getDependencies().map(normalizeName);
197
193
  const codeStr = Class.toString();
198
194
  const selfCodeHash = generateCodeHash(codeStr);
@@ -233,22 +229,24 @@ function buildManifest(productLinesToRun = [], calculations) {
233
229
  const intrinsicHash = generateCodeHash(compositeHashString);
234
230
 
235
231
  const manifestEntry = {
236
- name: normalizedName,
237
- class: Class,
238
- category: folderName === 'core' && metadata.category ? metadata.category : folderName,
232
+ name: normalizedName,
233
+ class: Class,
234
+ category: folderName === 'core' && metadata.category ? metadata.category : folderName,
239
235
  sourcePackage: folderName,
240
- type: metadata.type,
241
- isPage: metadata.isPage === true,
242
- isHistorical: metadata.isHistorical !== undefined ? metadata.isHistorical : false,
236
+ type: metadata.type,
237
+ isPage: metadata.isPage === true,
238
+ isHistorical: metadata.isHistorical !== undefined ? metadata.isHistorical : false,
239
+ isTest: metadata.isTest === true,
243
240
  rootDataDependencies: metadata.rootDataDependencies || [],
244
- // [NEW] Pass Series & Mandatory Config
245
- rootDataSeries: metadata.rootDataSeries || null,
246
- dependencySeries: metadata.dependencySeries || null,
247
- mandatoryRoots: metadata.mandatoryRoots || [], // [NEW]
248
-
249
- canHaveMissingRoots: metadata.canHaveMissingRoots || false,
250
- userType: metadata.userType,
241
+ rootDataSeries: metadata.rootDataSeries || null,
242
+ dependencySeries: metadata.dependencySeries || null,
243
+ mandatoryRoots: metadata.mandatoryRoots || [],
244
+ canHaveMissingRoots: metadata.canHaveMissingRoots || false,
245
+ userType: metadata.userType,
251
246
  dependencies: dependencies,
247
+ schedule: metadata.schedule || null,
248
+ // [NEW] Added TTL Policy to Manifest
249
+ ttlDays: metadata.ttlDays,
252
250
  pass: 0,
253
251
  hash: intrinsicHash,
254
252
  weight: weight,
@@ -4,6 +4,7 @@
4
4
  * UPDATED: Implemented "Fast-Forward" Scanning Loop to skip empty dates efficiently.
5
5
  * UPDATED: Enforces Strict One-Shot Policy (Standard -> HighMem -> Dead Letter).
6
6
  * UPDATED: Generates Google Cloud Trace Context (traceId/spanId) for end-to-end monitoring.
7
+ * UPDATED: Added Schedule Awareness (Daily, Weekly, Monthly) to filter tasks by date.
7
8
  */
8
9
 
9
10
  const { getExpectedDateStrings, getEarliestDataDates, normalizeName, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils.js');
@@ -19,6 +20,42 @@ const BASE_SECONDS_PER_WEIGHT_UNIT = 3;
19
20
  const SESSION_CACHE_DURATION_MS = 1000 * 60 * 30; // 30 Minutes
20
21
  const STALE_LOCK_THRESHOLD_MS = 1000 * 60 * 15;
21
22
 
23
+ // =============================================================================
24
+ // HELPER: Schedule Logic
25
+ // =============================================================================
26
+ /**
27
+ * Checks if a computation is scheduled to run on a specific date.
28
+ * Defaults to DAILY if no schedule is present.
29
+ * * @param {string} dateStr - YYYY-MM-DD string
30
+ * @param {Object} scheduleConfig - { type: 'DAILY'|'WEEKLY'|'MONTHLY', days: [] }
31
+ * @returns {boolean} True if the computation should run
32
+ */
33
+ function isComputationScheduled(dateStr, scheduleConfig) {
34
+ // Default: Run every day if no schedule is provided or explicitly DAILY
35
+ if (!scheduleConfig || !scheduleConfig.type || scheduleConfig.type === 'DAILY') {
36
+ return true;
37
+ }
38
+
39
+ const date = new Date(dateStr + 'T00:00:00Z'); // Ensure UTC parsing
40
+
41
+ // Weekly Schedule: Check Day of Week (0=Sun, 1=Mon, ..., 6=Sat)
42
+ if (scheduleConfig.type === 'WEEKLY') {
43
+ const dayOfWeek = date.getUTCDay();
44
+ const validDays = Array.isArray(scheduleConfig.days) ? scheduleConfig.days : [scheduleConfig.day];
45
+ return validDays.includes(dayOfWeek);
46
+ }
47
+
48
+ // Monthly Schedule: Check Day of Month (1-31)
49
+ if (scheduleConfig.type === 'MONTHLY') {
50
+ const dayOfMonth = date.getUTCDate();
51
+ const validDates = Array.isArray(scheduleConfig.days) ? scheduleConfig.days : [scheduleConfig.day];
52
+ return validDates.includes(dayOfMonth);
53
+ }
54
+
55
+ // Fallback default
56
+ return true;
57
+ }
58
+
22
59
  // =============================================================================
23
60
  // HELPER: Ledger Awareness
24
61
  // =============================================================================
@@ -281,19 +318,33 @@ async function handlePassVerification(config, dependencies, computationManifest,
281
318
  const missingTasks = [];
282
319
 
283
320
  for (const date of sessionDates) {
321
+ // [SCHEDULE CHECK] Filter tasks that are not scheduled for this date
322
+ const scheduledComputations = calcsInPass.filter(c =>
323
+ isComputationScheduled(date, c.schedule)
324
+ );
325
+
326
+ if (scheduledComputations.length === 0) continue;
327
+
284
328
  const [dailyStatus, availability] = await Promise.all([
285
329
  fetchComputationStatus(date, config, dependencies),
286
330
  checkRootDataAvailability(date, config, dependencies, DEFINITIVE_EARLIEST_DATES)
287
331
  ]);
288
332
 
289
333
  let prevDailyStatus = null;
290
- if (calcsInPass.some(c => c.isHistorical)) {
334
+ if (scheduledComputations.some(c => c.isHistorical)) {
291
335
  const prevD = new Date(date + 'T00:00:00Z');
292
336
  prevD.setUTCDate(prevD.getUTCDate() - 1);
293
337
  prevDailyStatus = await fetchComputationStatus(prevD.toISOString().slice(0, 10), config, dependencies);
294
338
  }
295
339
 
296
- const report = analyzeDateExecution(date, calcsInPass, availability ? availability.status : {}, dailyStatus, manifestMap, prevDailyStatus);
340
+ const report = analyzeDateExecution(
341
+ date,
342
+ scheduledComputations, // Use filtered list
343
+ availability ? availability.status : {},
344
+ dailyStatus,
345
+ manifestMap,
346
+ prevDailyStatus
347
+ );
297
348
 
298
349
  const pending = [...report.runnable, ...report.reRuns];
299
350
 
@@ -327,6 +378,16 @@ async function handleSweepDispatch(config, dependencies, computationManifest, re
327
378
  const passes = groupByPass(computationManifest);
328
379
  const calcsInPass = passes[passToRun] || [];
329
380
 
381
+ // [SCHEDULE CHECK] Filter tasks that are not scheduled for this date
382
+ const scheduledComputations = calcsInPass.filter(c =>
383
+ isComputationScheduled(date, c.schedule)
384
+ );
385
+
386
+ if (scheduledComputations.length === 0) {
387
+ logger.log('INFO', `[Sweep] ${date} has no scheduled tasks for Pass ${passToRun}. Ignoring.`);
388
+ return { dispatched: 0 };
389
+ }
390
+
330
391
  // 1. Analyze specific date
331
392
  const [dailyStatus, availability] = await Promise.all([
332
393
  fetchComputationStatus(date, config, dependencies),
@@ -334,14 +395,21 @@ async function handleSweepDispatch(config, dependencies, computationManifest, re
334
395
  ]);
335
396
 
336
397
  let prevDailyStatus = null;
337
- if (calcsInPass.some(c => c.isHistorical)) {
398
+ if (scheduledComputations.some(c => c.isHistorical)) {
338
399
  const prevD = new Date(date + 'T00:00:00Z');
339
400
  prevD.setUTCDate(prevD.getUTCDate() - 1);
340
401
  prevDailyStatus = await fetchComputationStatus(prevD.toISOString().slice(0, 10), config, dependencies);
341
402
  }
342
403
 
343
404
  const manifestMap = new Map(computationManifest.map(c => [normalizeName(c.name), c]));
344
- const report = analyzeDateExecution(date, calcsInPass, availability ? availability.status : {}, dailyStatus, manifestMap, prevDailyStatus);
405
+ const report = analyzeDateExecution(
406
+ date,
407
+ scheduledComputations, // Use filtered list
408
+ availability ? availability.status : {},
409
+ dailyStatus,
410
+ manifestMap,
411
+ prevDailyStatus
412
+ );
345
413
  const pending = [...report.runnable, ...report.reRuns];
346
414
 
347
415
  if (pending.length === 0) {
@@ -491,10 +559,21 @@ async function handleStandardDispatch(config, dependencies, computationManifest,
491
559
  break;
492
560
  }
493
561
 
494
- // 2. Analyze Date
562
+ // 2. [SCHEDULE CHECK] Filter computations scheduled for this specific date
563
+ const scheduledComputations = calcsInThisPass.filter(c =>
564
+ isComputationScheduled(selectedDate, c.schedule)
565
+ );
566
+
567
+ // Optimization: If nothing is scheduled for today, skip expensive DB checks
568
+ if (scheduledComputations.length === 0) {
569
+ currentCursor++;
570
+ continue;
571
+ }
572
+
573
+ // 3. Analyze Date
495
574
  const earliestDates = await getEarliestDataDates(config, dependencies);
496
575
  let prevDailyStatusPromise = Promise.resolve(null);
497
- if (calcsInThisPass.some(c => c.isHistorical)) {
576
+ if (scheduledComputations.some(c => c.isHistorical)) {
498
577
  const prevD = new Date(selectedDate + 'T00:00:00Z');
499
578
  prevD.setUTCDate(prevD.getUTCDate() - 1);
500
579
  if (prevD >= earliestDates.absoluteEarliest) {
@@ -509,7 +588,14 @@ async function handleStandardDispatch(config, dependencies, computationManifest,
509
588
  ]);
510
589
 
511
590
  if (availability && availability.status) {
512
- const report = analyzeDateExecution(selectedDate, calcsInThisPass, availability.status, dailyStatus, manifestMap, prevDailyStatus);
591
+ const report = analyzeDateExecution(
592
+ selectedDate,
593
+ scheduledComputations, // Use filtered list
594
+ availability.status,
595
+ dailyStatus,
596
+ manifestMap,
597
+ prevDailyStatus
598
+ );
513
599
  let rawTasks = [...report.runnable, ...report.reRuns];
514
600
 
515
601
  if (rawTasks.length > 0) {
@@ -2,6 +2,7 @@
2
2
  * @fileoverview Handles saving computation results with observability and Smart Cleanup.
3
3
  * UPDATED: Fixed bug where Alert Computations failed to trigger Pub/Sub on empty FINAL flush.
4
4
  * UPDATED: Added support for 'isPage' mode to store per-user data in subcollections.
5
+ * UPDATED: Implemented TTL retention policy. Defaults to 90 days from the computation date.
5
6
  */
6
7
  const { commitBatchInChunks, generateDataHash } = require('../utils/utils');
7
8
  const { updateComputationStatus } = require('./StatusRepository');
@@ -17,6 +18,7 @@ const zlib = require('zlib');
17
18
  const NON_RETRYABLE_ERRORS = [ 'PERMISSION_DENIED', 'DATA_LOSS', 'FAILED_PRECONDITION' ];
18
19
  const SIMHASH_REGISTRY_COLLECTION = 'system_simhash_registry';
19
20
  const CONTRACTS_COLLECTION = 'system_contracts';
21
+ const DEFAULT_TTL_DAYS = 90;
20
22
 
21
23
  async function commitResults(stateObj, dStr, passName, config, deps, skipStatusWrite = false, options = {}) {
22
24
  const successUpdates = {};
@@ -56,8 +58,11 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
56
58
 
57
59
  // Check metadata for alert flag (defaults to false)
58
60
  const isAlertComputation = calc.manifest.isAlertComputation === true;
59
- // [NEW] Check metadata for page flag (defaults to false)
61
+ // Check metadata for page flag (defaults to false)
60
62
  const isPageComputation = calc.manifest.isPage === true;
63
+
64
+ // [NEW] Determine TTL Policy
65
+ const ttlDays = calc.manifest.ttlDays !== undefined ? calc.manifest.ttlDays : DEFAULT_TTL_DAYS;
61
66
 
62
67
  try {
63
68
  const result = await calc.getResult();
@@ -129,24 +134,31 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
129
134
  continue;
130
135
  }
131
136
 
132
- // [NEW] Page Computation Logic (Fan-Out)
137
+ // [NEW] Page Computation Logic (Fan-Out) with TTL
133
138
  // Bypasses standard compression/sharding to write per-user documents
134
139
  if (isPageComputation && !isEmpty) {
135
140
  const mainDocRef = db.collection(config.resultsCollection).doc(dStr)
136
141
  .collection(config.resultsSubcollection).doc(calc.manifest.category)
137
142
  .collection(config.computationsSubcollection).doc(name);
138
143
 
144
+ // Calculate expiration based on computation date
145
+ const expireAt = calculateExpirationDate(dStr, ttlDays);
146
+
139
147
  // 1. Fan-out writes for each user
140
148
  const pageWrites = [];
141
149
  // We assume result is { [cid]: { ...data... }, [cid2]: { ... } }
142
150
  for (const [cid, userData] of Object.entries(result)) {
143
- // STRATEGY: Use a fixed collection 'pages' so we can clean it up later
144
151
  // Path: .../{ComputationName}/pages/{cid}
145
152
  const userDocRef = mainDocRef.collection('pages').doc(cid);
146
153
 
154
+ // Inject _expireAt into the user data payload for free deletion
155
+ const payload = (typeof userData === 'object' && userData !== null)
156
+ ? { ...userData, _expireAt: expireAt }
157
+ : { value: userData, _expireAt: expireAt };
158
+
147
159
  pageWrites.push({
148
160
  ref: userDocRef,
149
- data: userData, // Write the raw data directly
161
+ data: payload,
150
162
  options: { merge: false } // Overwrite specifically for this run
151
163
  });
152
164
  }
@@ -156,17 +168,17 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
156
168
  await commitBatchInChunks(config, deps, pageWrites, `${name}::PageFanOut`);
157
169
  runMetrics.io.writes += pageWrites.length;
158
170
  runMetrics.storage.keys = pageWrites.length;
159
- logger.log('INFO', `[PageMode] ${name}: Wrote ${pageWrites.length} user pages.`);
171
+ logger.log('INFO', `[PageMode] ${name}: Wrote ${pageWrites.length} user pages. TTL: ${ttlDays}d.`);
160
172
  }
161
173
 
162
- // 3. Write the "Header" document (Important for Status/Metrics)
163
- // We store NO data here, just metadata saying "Go look in /pages"
174
+ // 3. Write the "Header" document (Important for Status/Metrics/TTL)
164
175
  if (flushMode !== 'INTERMEDIATE') {
165
176
  const headerData = {
166
177
  _completed: true,
167
178
  _isPageMode: true, // Flag for readers to know where to look
168
179
  _pageCount: pageWrites.length,
169
- _lastUpdated: new Date().toISOString()
180
+ _lastUpdated: new Date().toISOString(),
181
+ _expireAt: expireAt // Ensure the header also gets deleted
170
182
  };
171
183
 
172
184
  await mainDocRef.set(headerData, { merge: true });
@@ -187,7 +199,7 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
187
199
  continue; // Skip the standard writeSingleResult logic
188
200
  }
189
201
 
190
- // Standard Computation Logic (Compression or Sharding)
202
+ // Standard Computation Logic (Compression or Sharding) with TTL
191
203
  if (typeof result === 'object') runMetrics.storage.keys = Object.keys(result).length;
192
204
  const resultKeys = Object.keys(result || {});
193
205
  const isMultiDate = resultKeys.length > 0 && resultKeys.every(k => /^\d{4}-\d{2}-\d{2}$/.test(k));
@@ -196,8 +208,12 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
196
208
  const datePromises = resultKeys.map((historicalDate) => fanOutLimit(async () => {
197
209
  const dailyData = result[historicalDate];
198
210
  if (!dailyData || Object.keys(dailyData).length === 0) return;
211
+
212
+ // Calculate specific TTL for this historical date
213
+ const dailyExpireAt = calculateExpirationDate(historicalDate, ttlDays);
214
+
199
215
  const historicalDocRef = db.collection(config.resultsCollection).doc(historicalDate).collection(config.resultsSubcollection).doc(calc.manifest.category).collection(config.computationsSubcollection).doc(name);
200
- const stats = await writeSingleResult(dailyData, historicalDocRef, name, historicalDate, logger, config, deps, 0, 'STANDARD', false);
216
+ const stats = await writeSingleResult(dailyData, historicalDocRef, name, historicalDate, logger, config, deps, 0, 'STANDARD', false, dailyExpireAt);
201
217
  runMetrics.io.writes += stats.opCounts.writes;
202
218
  runMetrics.io.deletes += stats.opCounts.deletes;
203
219
 
@@ -213,8 +229,11 @@ async function commitResults(stateObj, dStr, passName, config, deps, skipStatusW
213
229
 
214
230
  if (calc.manifest.hash) { successUpdates[name] = { hash: calc.manifest.hash, simHash, resultHash, dependencyResultHashes: calc.manifest.dependencyResultHashes || {}, category: calc.manifest.category, composition: calc.manifest.composition, metrics: runMetrics }; }
215
231
  } else {
232
+ // Calculate TTL for the main run date
233
+ const runExpireAt = calculateExpirationDate(dStr, ttlDays);
234
+
216
235
  const mainDocRef = db.collection(config.resultsCollection).doc(dStr).collection(config.resultsSubcollection).doc(calc.manifest.category).collection(config.computationsSubcollection).doc(name);
217
- const writeStats = await writeSingleResult(result, mainDocRef, name, dStr, logger, config, deps, currentShardIndex, flushMode, isInitialWrite);
236
+ const writeStats = await writeSingleResult(result, mainDocRef, name, dStr, logger, config, deps, currentShardIndex, flushMode, isInitialWrite, runExpireAt);
218
237
 
219
238
  runMetrics.storage.sizeBytes = writeStats.totalSize;
220
239
  runMetrics.storage.isSharded = writeStats.isSharded;
@@ -291,7 +310,7 @@ async function fetchContracts(db, calcNames) {
291
310
  return map;
292
311
  }
293
312
 
294
- async function writeSingleResult(result, docRef, name, dateContext, logger, config, deps, startShardIndex = 0, flushMode = 'STANDARD', isInitialWrite = false) {
313
+ async function writeSingleResult(result, docRef, name, dateContext, logger, config, deps, startShardIndex = 0, flushMode = 'STANDARD', isInitialWrite = false, expireAt = null) {
295
314
  const opCounts = { writes: 0, deletes: 0 };
296
315
 
297
316
  // Always check for shards if we might compress
@@ -312,7 +331,7 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
312
331
  if (rawBuffer.length > 50 * 1024) {
313
332
  const compressedBuffer = zlib.gzipSync(rawBuffer);
314
333
  if (compressedBuffer.length < 900 * 1024) {
315
- logger.log('INFO', `[Compression] ${name}: Compressed ${(rawBuffer.length/1024).toFixed(0)}KB -> ${(compressedBuffer.length/1024).toFixed(0)}KB.`);
334
+ logger.log('INFO', `[Compression] ${name}: Compressed ${(rawBuffer.length/1024).toFixed(0)}KB -> ${(compressedBuffer.length/1024).toFixed(0)}KB. TTL: ${expireAt ? expireAt.toISOString().split('T')[0] : 'None'}`);
316
335
 
317
336
  const payloadBuffer = Buffer.from(compressedBuffer);
318
337
 
@@ -323,6 +342,11 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
323
342
  payload: payloadBuffer
324
343
  };
325
344
 
345
+ // Inject TTL if present
346
+ if (expireAt) {
347
+ compressedPayload._expireAt = expireAt;
348
+ }
349
+
326
350
  // Self-Healing: If we are writing compressed, we MUST ensure shards are gone.
327
351
  if (wasSharded) {
328
352
  const updates = [];
@@ -362,7 +386,7 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
362
386
  if (committed) break;
363
387
  const constraints = strategies[attempt];
364
388
  try {
365
- const updates = await prepareAutoShardedWrites(result, docRef, logger, constraints.bytes, constraints.keys, startShardIndex, flushMode);
389
+ const updates = await prepareAutoShardedWrites(result, docRef, logger, constraints.bytes, constraints.keys, startShardIndex, flushMode, expireAt);
366
390
 
367
391
  if (shouldWipeShards) {
368
392
  const shardCol = docRef.collection('_shards');
@@ -439,15 +463,26 @@ async function writeSingleResult(result, docRef, name, dateContext, logger, conf
439
463
  return finalStats;
440
464
  }
441
465
 
442
- async function prepareAutoShardedWrites(result, docRef, logger, maxBytes = 900 * 1024, maxKeys = null, startShardIndex = 0, flushMode = 'STANDARD') {
466
+ async function prepareAutoShardedWrites(result, docRef, logger, maxBytes = 900 * 1024, maxKeys = null, startShardIndex = 0, flushMode = 'STANDARD', expireAt = null) {
443
467
  const OVERHEAD_ALLOWANCE = 20 * 1024; const CHUNK_LIMIT = maxBytes - OVERHEAD_ALLOWANCE;
444
468
  const totalSize = calculateFirestoreBytes(result); const docPathSize = Buffer.byteLength(docRef.path, 'utf8') + 16;
445
469
  const writes = []; const shardCollection = docRef.collection('_shards');
446
470
  let currentChunk = {}; let currentChunkSize = 0; let currentKeyCount = 0;
447
471
  let shardIndex = startShardIndex;
448
472
 
473
+ // Helper to inject TTL into chunk/payload
474
+ const injectTTL = (data) => {
475
+ if (expireAt) {
476
+ return { ...data, _expireAt: expireAt };
477
+ }
478
+ return data;
479
+ };
480
+
449
481
  if (!maxKeys && (totalSize + docPathSize) < CHUNK_LIMIT && flushMode === 'STANDARD' && startShardIndex === 0) {
450
- const data = { ...result, _completed: true, _sharded: false, _lastUpdated: new Date().toISOString() };
482
+ const data = { ...result, _completed: true, _sharded: false, _lastUpdated: new Date().toISOString() };
483
+ // If single doc write (no shards), just inject expireAt into the main doc
484
+ if (expireAt) data._expireAt = expireAt;
485
+
451
486
  return [{ ref: docRef, data, options: { merge: true } }];
452
487
  }
453
488
 
@@ -457,14 +492,18 @@ async function prepareAutoShardedWrites(result, docRef, logger, maxBytes = 900 *
457
492
  const byteLimitReached = (currentChunkSize + itemSize > CHUNK_LIMIT); const keyLimitReached = (maxKeys && currentKeyCount + 1 >= maxKeys);
458
493
 
459
494
  if (byteLimitReached || keyLimitReached) {
460
- writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
495
+ // Write chunk with TTL
496
+ const chunkData = injectTTL(currentChunk);
497
+ writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: chunkData, options: { merge: false } });
461
498
  shardIndex++; currentChunk = {}; currentChunkSize = 0; currentKeyCount = 0;
462
499
  }
463
500
  currentChunk[key] = value; currentChunkSize += itemSize; currentKeyCount++;
464
501
  }
465
502
 
466
503
  if (Object.keys(currentChunk).length > 0) {
467
- writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: currentChunk, options: { merge: false } });
504
+ // Write remaining chunk with TTL
505
+ const chunkData = injectTTL(currentChunk);
506
+ writes.push({ ref: shardCollection.doc(`shard_${shardIndex}`), data: chunkData, options: { merge: false } });
468
507
  shardIndex++;
469
508
  }
470
509
 
@@ -475,6 +514,9 @@ async function prepareAutoShardedWrites(result, docRef, logger, maxBytes = 900 *
475
514
  _shardCount: shardIndex,
476
515
  _lastUpdated: new Date().toISOString()
477
516
  };
517
+ // Ensure the pointer/metadata document also has the TTL
518
+ if (expireAt) pointerData._expireAt = expireAt;
519
+
478
520
  writes.push({ ref: docRef, data: pointerData, options: { merge: true } });
479
521
  }
480
522
 
@@ -489,7 +531,7 @@ async function deleteOldCalculationData(dateStr, oldCategory, calcName, config,
489
531
 
490
532
  const batch = db.batch(); let ops = 0;
491
533
 
492
- // [NEW] Clean up 'pages' subcollection if it exists (for Page Mode)
534
+ // Clean up 'pages' subcollection if it exists (for Page Mode)
493
535
  const pagesCol = oldDocRef.collection('pages');
494
536
  // Note: listDocuments works nicely for small-ish collections.
495
537
  // If 'pages' has 10k+ docs, we rely on the implementation of listDocuments
@@ -520,4 +562,17 @@ function calculateFirestoreBytes(value) {
520
562
  if (typeof value === 'object') { let sum = 0; for (const k in value) { if (Object.prototype.hasOwnProperty.call(value, k)) { sum += (Buffer.byteLength(k, 'utf8') + 1) + calculateFirestoreBytes(value[k]); } } return sum; } return 0;
521
563
  }
522
564
 
565
+ /**
566
+ * Calculates the expiration date based on the computation date context (not execution time).
567
+ * @param {string} dateStr - The YYYY-MM-DD string of the computation context.
568
+ * @param {number} ttlDays - Days to retain data.
569
+ * @returns {Date} The expiration Date object.
570
+ */
571
+ function calculateExpirationDate(dateStr, ttlDays) {
572
+ const base = new Date(dateStr);
573
+ // Add days to the base computation date
574
+ base.setDate(base.getDate() + ttlDays);
575
+ return base;
576
+ }
577
+
523
578
  module.exports = { commitResults };
@@ -11,9 +11,22 @@ const { FieldValue } = require('@google-cloud/firestore');
11
11
  * Generates an OTP and returns it to the user to place in their bio.
12
12
  */
13
13
  async function initiateVerification(req, res, dependencies, config) {
14
- const { db } = dependencies;
14
+ const { db, logger } = dependencies;
15
15
  const { username } = req.body;
16
+
17
+ // Safety check: ensure config exists
18
+ if (!config) {
19
+ logger?.log('ERROR', '[Verification] Config is undefined or null in initiateVerification');
20
+ return res.status(500).json({ error: "Configuration not initialized." });
21
+ }
22
+
16
23
  const { verificationsCollection } = config;
24
+
25
+ // Safety check: ensure required config values exist
26
+ if (!verificationsCollection) {
27
+ logger?.log('ERROR', '[Verification] verificationsCollection is not defined in config');
28
+ return res.status(500).json({ error: "Configuration error: verificationsCollection not initialized." });
29
+ }
17
30
 
18
31
  if (!username || typeof username !== 'string') {
19
32
  return res.status(400).json({ error: "Invalid username." });
@@ -51,15 +64,40 @@ async function initiateVerification(req, res, dependencies, config) {
51
64
  async function finalizeVerification(req, res, dependencies, config) {
52
65
  const { db, logger } = dependencies;
53
66
  const { username } = req.body;
67
+
68
+ // Safety check: ensure config exists and is an object
69
+ if (!config || typeof config !== 'object') {
70
+ logger.log('ERROR', '[Verification] Config is undefined, null, or not an object', { configType: typeof config, configValue: config });
71
+ return res.status(500).json({ error: "Configuration not initialized." });
72
+ }
73
+
74
+ // Safe destructuring with defaults to prevent ReferenceErrors
54
75
  const {
55
- verificationsCollection,
56
- signedInUsersCollection,
57
- proxyConfig,
58
- headerConfig,
59
- pubsubTopicUserFetch,
60
- pubsubTopicUserFetchOnDemand,
61
- pubsubTopicSocialFetch
62
- } = config;
76
+ verificationsCollection = null,
77
+ signedInUsersCollection = null,
78
+ proxyConfig = null,
79
+ headerConfig = null,
80
+ pubsubTopicUserFetch = null,
81
+ pubsubTopicUserFetchOnDemand = null,
82
+ pubsubTopicSocialFetch = null
83
+ } = config || {};
84
+
85
+ // Safety check: ensure required config values exist
86
+ if (!signedInUsersCollection) {
87
+ logger.log('ERROR', '[Verification] signedInUsersCollection is not defined in config', {
88
+ configKeys: Object.keys(config),
89
+ signedInUsersCollection: config.signedInUsersCollection
90
+ });
91
+ return res.status(500).json({ error: "Configuration error: signedInUsersCollection not initialized." });
92
+ }
93
+
94
+ if (!verificationsCollection) {
95
+ logger.log('ERROR', '[Verification] verificationsCollection is not defined in config', {
96
+ configKeys: Object.keys(config),
97
+ verificationsCollection: config.verificationsCollection
98
+ });
99
+ return res.status(500).json({ error: "Configuration error: verificationsCollection not initialized." });
100
+ }
63
101
 
64
102
  // Use on-demand topic for user signup (API-triggered)
65
103
  const taskEngineTopic = pubsubTopicUserFetchOnDemand || pubsubTopicUserFetch || 'etoro-user-fetch-topic-ondemand';
@@ -173,7 +211,6 @@ async function finalizeVerification(req, res, dependencies, config) {
173
211
  // Generate a requestId for tracking and to ensure finalizeOnDemandRequest runs
174
212
  // This is critical - without requestId, the root data indexer and computations won't be triggered
175
213
  const requestId = `signup-${realCID}-${Date.now()}`;
176
- const { signedInUsersCollection } = config;
177
214
 
178
215
  // Create request tracking document (similar to user sync requests)
179
216
  try {
@@ -262,8 +299,24 @@ async function finalizeVerification(req, res, dependencies, config) {
262
299
  });
263
300
 
264
301
  } catch (error) {
265
- logger.log('ERROR', `[Verification] System error for ${username}`, error);
266
- return res.status(500).json({ error: error.message });
302
+ // Enhanced error logging to capture full error details
303
+ const errorDetails = {
304
+ message: error?.message || 'Unknown error',
305
+ stack: error?.stack || 'No stack trace',
306
+ name: error?.name || 'Error',
307
+ configExists: !!config,
308
+ configKeys: config ? Object.keys(config) : [],
309
+ signedInUsersCollection: config?.signedInUsersCollection || 'NOT SET',
310
+ verificationsCollection: config?.verificationsCollection || 'NOT SET'
311
+ };
312
+
313
+ logger.log('ERROR', `[Verification] System error for ${username}`, errorDetails);
314
+ logger.log('ERROR', `[Verification] Full error object:`, error);
315
+
316
+ return res.status(500).json({
317
+ error: error?.message || 'An unexpected error occurred during verification.',
318
+ details: process.env.NODE_ENV === 'development' ? errorDetails : undefined
319
+ });
267
320
  }
268
321
  }
269
322
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.581",
3
+ "version": "1.0.583",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [