bulltrackers-module 1.0.240 → 1.0.242
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Main Orchestrator. Coordinates the topological execution.
|
|
3
3
|
* UPDATED: Exports analyzeDateExecution for Build Reporting tools.
|
|
4
|
+
* UPDATED: Uses centralized DEFINITIVE_EARLIEST_DATES.
|
|
4
5
|
*/
|
|
5
|
-
const { normalizeName }
|
|
6
|
+
const { normalizeName, DEFINITIVE_EARLIEST_DATES } = require('./utils/utils');
|
|
6
7
|
const { checkRootDataAvailability } = require('./data/AvailabilityChecker');
|
|
7
8
|
const { fetchExistingResults } = require('./data/DependencyFetcher');
|
|
8
9
|
const { fetchComputationStatus, updateComputationStatus } = require('./persistence/StatusRepository');
|
|
@@ -123,6 +124,7 @@ function analyzeDateExecution(dateStr, calcsInPass, rootDataStatus, dailyStatus,
|
|
|
123
124
|
report.runnable.push(calc);
|
|
124
125
|
} else if (storedHash !== currentHash) {
|
|
125
126
|
// Hash Mismatch (Code Changed).
|
|
127
|
+
// Pass migration info here too, in case category ALSO changed.
|
|
126
128
|
report.reRuns.push({
|
|
127
129
|
name: cName,
|
|
128
130
|
oldHash: storedHash,
|
|
@@ -155,15 +157,8 @@ async function runDateComputation(dateStr, passToRun, calcsInThisPass, config, d
|
|
|
155
157
|
const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
|
|
156
158
|
|
|
157
159
|
// 2. Check Data Availability
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
history: new Date('2025-11-05T00:00:00Z'),
|
|
161
|
-
social: new Date('2025-10-30T00:00:00Z'),
|
|
162
|
-
insights: new Date('2025-08-26T00:00:00Z'),
|
|
163
|
-
price: new Date('2025-08-01T00:00:00Z')
|
|
164
|
-
};
|
|
165
|
-
|
|
166
|
-
const rootData = await checkRootDataAvailability(dateStr, config, dependencies, earliestDates);
|
|
160
|
+
// [UPDATE] Using centralized dates to ensure consistency with BuildReporter
|
|
161
|
+
const rootData = await checkRootDataAvailability(dateStr, config, dependencies, DEFINITIVE_EARLIEST_DATES);
|
|
167
162
|
const rootStatus = rootData ? rootData.status : { hasPortfolio: false, hasPrices: false, hasInsights: false, hasSocial: false, hasHistory: false };
|
|
168
163
|
|
|
169
164
|
// 3. ANALYZE EXECUTION
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* FILENAME: bulltrackers-module/functions/computation-system/helpers/computation_dispatcher.js
|
|
3
3
|
* PURPOSE: Dispatches granular computation tasks (1 task = 1 calculation/date).
|
|
4
|
-
* UPDATED: Implements "Atomic Task Dispatch"
|
|
4
|
+
* UPDATED: Implements "Atomic Task Dispatch" and uses DEFINITIVE dates to prevent waste.
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
-
const { getExpectedDateStrings, normalizeName } = require('../utils/utils.js');
|
|
7
|
+
const { getExpectedDateStrings, normalizeName, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils.js');
|
|
8
8
|
const { groupByPass } = require('../WorkflowOrchestrator.js');
|
|
9
9
|
const { PubSubUtils } = require('../../core/utils/pubsub_utils');
|
|
10
10
|
|
|
@@ -32,16 +32,9 @@ async function dispatchComputationPass(config, dependencies, computationManifest
|
|
|
32
32
|
logger.log('INFO', `[Dispatcher] Target Calculations: [${calcNames.join(', ')}]`);
|
|
33
33
|
|
|
34
34
|
// 2. Determine Date Range
|
|
35
|
-
//
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
history: new Date('2025-11-05T00:00:00Z'),
|
|
39
|
-
social: new Date('2025-10-30T00:00:00Z'),
|
|
40
|
-
insights: new Date('2025-08-26T00:00:00Z'),
|
|
41
|
-
price: new Date('2025-08-01T00:00:00Z')
|
|
42
|
-
};
|
|
43
|
-
|
|
44
|
-
const passEarliestDate = Object.values(earliestDates).reduce((a, b) => a < b ? a : b);
|
|
35
|
+
// [UPDATE] Using DEFINITIVE_EARLIEST_DATES ensures we don't dispatch tasks
|
|
36
|
+
// for years before data existed (e.g. 2023), saving massive Pub/Sub costs.
|
|
37
|
+
const passEarliestDate = Object.values(DEFINITIVE_EARLIEST_DATES).reduce((a, b) => a < b ? a : b);
|
|
45
38
|
const endDateUTC = new Date(Date.UTC(new Date().getUTCFullYear(), new Date().getUTCMonth(), new Date().getUTCDate() - 1));
|
|
46
39
|
const allExpectedDates = getExpectedDateStrings(passEarliestDate, endDateUTC);
|
|
47
40
|
|
|
@@ -1,16 +1,18 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* @fileoverview Build Reporter & Auto-Runner.
|
|
3
3
|
* Generates a "Pre-Flight" report of what the computation system WILL do.
|
|
4
|
-
* Simulates execution logic (Hash Mismatches)
|
|
4
|
+
* Simulates execution logic (Hash Mismatches) respecting DEFINITIVE start dates.
|
|
5
|
+
* UPDATED: Implements Parallel Execution to prevent DEADLINE_EXCEEDED on 90-day scans.
|
|
5
6
|
*/
|
|
6
7
|
|
|
7
8
|
const { analyzeDateExecution } = require('../WorkflowOrchestrator');
|
|
8
9
|
const { fetchComputationStatus } = require('../persistence/StatusRepository');
|
|
9
|
-
const { normalizeName, getExpectedDateStrings } = require('../utils/utils');
|
|
10
|
+
const { normalizeName, getExpectedDateStrings, DEFINITIVE_EARLIEST_DATES } = require('../utils/utils');
|
|
10
11
|
const { FieldValue } = require('@google-cloud/firestore');
|
|
12
|
+
const pLimit = require('p-limit');
|
|
11
13
|
|
|
12
14
|
// Attempt to load package.json to get version. Path depends on where this is invoked.
|
|
13
|
-
let packageVersion = '1.0.
|
|
15
|
+
let packageVersion = '1.0.300';
|
|
14
16
|
|
|
15
17
|
|
|
16
18
|
/**
|
|
@@ -33,6 +35,7 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
33
35
|
logger.log('INFO', `[BuildReporter] 🚀 New Version Detected (${buildId}). Auto-running Pre-flight Report...`);
|
|
34
36
|
// We do NOT await this in the main thread if called during init,
|
|
35
37
|
// but here we are in an async function so we proceed.
|
|
38
|
+
// Defaulting to 90 days for the auto-run to capture full historical impact
|
|
36
39
|
await generateBuildReport(config, dependencies, manifest, 90, buildId);
|
|
37
40
|
|
|
38
41
|
} catch (e) {
|
|
@@ -45,7 +48,7 @@ async function ensureBuildReport(config, dependencies, manifest) {
|
|
|
45
48
|
* @param {object} config
|
|
46
49
|
* @param {object} dependencies
|
|
47
50
|
* @param {Array} manifest
|
|
48
|
-
* @param {number} daysBack - Days to simulate (
|
|
51
|
+
* @param {number} daysBack - Days to simulate (Default 90)
|
|
49
52
|
* @param {string} customBuildId - Optional ID override
|
|
50
53
|
*/
|
|
51
54
|
async function generateBuildReport(config, dependencies, manifest, daysBack = 90, customBuildId = null) {
|
|
@@ -59,8 +62,6 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
59
62
|
const startDate = new Date();
|
|
60
63
|
startDate.setDate(today.getDate() - daysBack);
|
|
61
64
|
|
|
62
|
-
// We check UP TO yesterday usually, as today might be partial.
|
|
63
|
-
// But let's check today too to see immediate effects.
|
|
64
65
|
const datesToCheck = getExpectedDateStrings(startDate, today);
|
|
65
66
|
const manifestMap = new Map(manifest.map(c => [normalizeName(c.name), c]));
|
|
66
67
|
|
|
@@ -75,75 +76,106 @@ async function generateBuildReport(config, dependencies, manifest, daysBack = 90
|
|
|
75
76
|
let totalReRuns = 0;
|
|
76
77
|
let totalNew = 0;
|
|
77
78
|
|
|
78
|
-
// 2.
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
79
|
+
// 2. PARALLEL PROCESSING (Fix for DEADLINE_EXCEEDED)
|
|
80
|
+
// Run 20 reads in parallel.
|
|
81
|
+
// 90 days / 20 concurrent = ~5 batches = ~2-3 seconds total vs 45+ seconds sequentially.
|
|
82
|
+
const limit = pLimit(20);
|
|
83
|
+
|
|
84
|
+
const processingPromises = datesToCheck.map(dateStr => limit(async () => {
|
|
85
|
+
try {
|
|
86
|
+
// A. Fetch REAL status from DB (What ran previously?)
|
|
87
|
+
const dailyStatus = await fetchComputationStatus(dateStr, config, dependencies);
|
|
88
|
+
|
|
89
|
+
// B. SMART MOCK Root Data
|
|
90
|
+
const dateObj = new Date(dateStr + 'T00:00:00Z');
|
|
91
|
+
const mockRootDataStatus = {
|
|
92
|
+
hasPortfolio: dateObj >= DEFINITIVE_EARLIEST_DATES.portfolio,
|
|
93
|
+
hasHistory: dateObj >= DEFINITIVE_EARLIEST_DATES.history,
|
|
94
|
+
hasSocial: dateObj >= DEFINITIVE_EARLIEST_DATES.social,
|
|
95
|
+
hasInsights: dateObj >= DEFINITIVE_EARLIEST_DATES.insights,
|
|
96
|
+
hasPrices: dateObj >= DEFINITIVE_EARLIEST_DATES.price
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
// C. Run Logic Analysis
|
|
100
|
+
const analysis = analyzeDateExecution(dateStr, manifest, mockRootDataStatus, dailyStatus, manifestMap);
|
|
101
|
+
|
|
102
|
+
// D. Format Findings
|
|
103
|
+
const dateSummary = {
|
|
104
|
+
willRun: [],
|
|
105
|
+
willReRun: [],
|
|
106
|
+
blocked: [],
|
|
107
|
+
impossible: []
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
// -- Runnable (New) --
|
|
111
|
+
analysis.runnable.forEach(item => {
|
|
112
|
+
dateSummary.willRun.push({ name: item.name, reason: "New / No Previous Record" });
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
// -- Re-Runs (Hash Mismatch / Migration) --
|
|
116
|
+
analysis.reRuns.forEach(item => {
|
|
117
|
+
let reason = "Hash Mismatch";
|
|
118
|
+
let details = `Old: ${item.oldHash?.substring(0,6)}... New: ${item.newHash?.substring(0,6)}...`;
|
|
119
|
+
|
|
120
|
+
if (item.previousCategory) {
|
|
121
|
+
reason = "Migration";
|
|
122
|
+
details = `Moving ${item.previousCategory} -> ${item.newCategory}`;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
dateSummary.willReRun.push({ name: item.name, reason, details });
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
// -- Impossible (Permanent) --
|
|
129
|
+
analysis.impossible.forEach(item => {
|
|
130
|
+
dateSummary.impossible.push({ name: item.name, reason: item.reason });
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
// -- Blocked (Retriable) --
|
|
134
|
+
analysis.blocked.forEach(item => {
|
|
135
|
+
dateSummary.blocked.push({ name: item.name, reason: item.reason });
|
|
136
|
+
});
|
|
137
|
+
analysis.failedDependency.forEach(item => {
|
|
138
|
+
dateSummary.blocked.push({ name: item.name, reason: `Dependency Missing: ${item.missing.join(', ')}` });
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
// Return result for aggregation
|
|
142
|
+
const hasUpdates = dateSummary.willRun.length || dateSummary.willReRun.length || dateSummary.blocked.length || dateSummary.impossible.length;
|
|
115
143
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
dateSummary.blocked.push({ name: item.name, reason: `Dependency Missing: ${item.missing.join(', ')}` });
|
|
130
|
-
});
|
|
131
|
-
|
|
132
|
-
// Only add date to report if something interesting is happening
|
|
133
|
-
if (dateSummary.willRun.length || dateSummary.willReRun.length || dateSummary.blocked.length) {
|
|
134
|
-
reportData.dates[dateStr] = dateSummary;
|
|
135
|
-
totalNew += dateSummary.willRun.length;
|
|
136
|
-
totalReRuns += dateSummary.willReRun.length;
|
|
144
|
+
return {
|
|
145
|
+
dateStr,
|
|
146
|
+
dateSummary,
|
|
147
|
+
hasUpdates,
|
|
148
|
+
stats: {
|
|
149
|
+
new: dateSummary.willRun.length,
|
|
150
|
+
rerun: dateSummary.willReRun.length
|
|
151
|
+
}
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
} catch (err) {
|
|
155
|
+
logger.log('ERROR', `[BuildReporter] Error analyzing date ${dateStr}: ${err.message}`);
|
|
156
|
+
return null;
|
|
137
157
|
}
|
|
138
|
-
}
|
|
158
|
+
}));
|
|
159
|
+
|
|
160
|
+
// Wait for all dates to process
|
|
161
|
+
const results = await Promise.all(processingPromises);
|
|
162
|
+
|
|
163
|
+
// 3. Aggregate Results
|
|
164
|
+
results.forEach(res => {
|
|
165
|
+
if (res && res.hasUpdates) {
|
|
166
|
+
reportData.dates[res.dateStr] = res.dateSummary;
|
|
167
|
+
totalNew += res.stats.new;
|
|
168
|
+
totalReRuns += res.stats.rerun;
|
|
169
|
+
}
|
|
170
|
+
});
|
|
139
171
|
|
|
140
172
|
reportData.summary = { totalReRuns, totalNew, scanRange: `${datesToCheck[0]} to ${datesToCheck[datesToCheck.length-1]}` };
|
|
141
173
|
|
|
142
|
-
//
|
|
174
|
+
// 4. Store Report
|
|
143
175
|
const reportRef = db.collection('computation_build_records').doc(buildId);
|
|
144
176
|
await reportRef.set(reportData);
|
|
145
177
|
|
|
146
|
-
//
|
|
178
|
+
// 5. Update 'latest' pointer
|
|
147
179
|
await db.collection('computation_build_records').doc('latest').set({
|
|
148
180
|
...reportData,
|
|
149
181
|
note: "Latest build report pointer."
|
|
@@ -5,6 +5,15 @@
|
|
|
5
5
|
const { FieldValue, FieldPath } = require('@google-cloud/firestore');
|
|
6
6
|
const crypto = require('crypto');
|
|
7
7
|
|
|
8
|
+
// [NEW] Single Source of Truth for Data Availability
|
|
9
|
+
const DEFINITIVE_EARLIEST_DATES = {
|
|
10
|
+
portfolio: new Date('2025-09-25T00:00:00Z'),
|
|
11
|
+
history: new Date('2025-11-05T00:00:00Z'),
|
|
12
|
+
social: new Date('2025-10-30T00:00:00Z'),
|
|
13
|
+
insights: new Date('2025-08-26T00:00:00Z'),
|
|
14
|
+
price: new Date('2025-08-01T00:00:00Z')
|
|
15
|
+
};
|
|
16
|
+
|
|
8
17
|
/** Stage 1: Normalize a calculation name to kebab-case */
|
|
9
18
|
function normalizeName(name) { return name.replace(/_/g, '-'); }
|
|
10
19
|
|
|
@@ -211,5 +220,6 @@ module.exports = {
|
|
|
211
220
|
getExpectedDateStrings,
|
|
212
221
|
getEarliestDataDates,
|
|
213
222
|
generateCodeHash,
|
|
214
|
-
withRetry
|
|
223
|
+
withRetry,
|
|
224
|
+
DEFINITIVE_EARLIEST_DATES // [NEW EXPORT]
|
|
215
225
|
};
|