bulltrackers-module 1.0.555 → 1.0.557

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -132,7 +132,6 @@ async function getStableDateSession(config, dependencies, passToRun, dateLimitSt
132
132
  return allDates;
133
133
  }
134
134
 
135
- // =============================================================================
136
135
  // MAIN ENTRY POINT
137
136
  // =============================================================================
138
137
  async function dispatchComputationPass(config, dependencies, computationManifest, reqBody = {}) {
@@ -144,14 +143,100 @@ async function dispatchComputationPass(config, dependencies, computationManifest
144
143
  else if (action === 'SWEEP') {
145
144
  return handleSweepDispatch(config, dependencies, computationManifest, reqBody);
146
145
  }
147
- // [NEW] Handler for Final Forensics Reporting
148
146
  else if (action === 'REPORT') {
149
147
  return handleFinalSweepReporting(config, dependencies, computationManifest, reqBody);
150
148
  }
149
+ // [NEW] FORCE RUN HANDLER
150
+ else if (action === 'FORCE_RUN') {
151
+ return handleForceRun(config, dependencies, computationManifest, reqBody);
152
+ }
151
153
 
152
154
  return handleStandardDispatch(config, dependencies, computationManifest, reqBody);
153
155
  }
154
156
 
157
+ // =============================================================================
158
+ // NEW: Force Run Handler (Bypasses Checks)
159
+ // =============================================================================
160
+ async function handleForceRun(config, dependencies, computationManifest, reqBody) {
161
+ const { logger } = dependencies;
162
+ const pubsubUtils = new PubSubUtils(dependencies);
163
+ const computationName = reqBody.computation; // Required
164
+ const dateInput = reqBody.date; // Optional (YYYY-MM-DD)
165
+
166
+ if (!computationName) {
167
+ throw new Error('Force Run requires "computation" name.');
168
+ }
169
+
170
+ // 1. Verify Computation Exists
171
+ const manifestItem = computationManifest.find(c => normalizeName(c.name) === normalizeName(computationName));
172
+ if (!manifestItem) {
173
+ throw new Error(`Computation '${computationName}' not found in manifest.`);
174
+ }
175
+
176
+ // 2. Determine Target Dates
177
+ let targetDates = [];
178
+ if (dateInput) {
179
+ // Single Date Mode
180
+ targetDates = [dateInput];
181
+ } else {
182
+ // All Dates Mode (Backfill)
183
+ logger.log('INFO', `[ForceRun] No date provided. Calculating date range for ${computationName}...`);
184
+ const earliestDates = await getEarliestDataDates(config, dependencies);
185
+ // Calculate from system start until today
186
+ targetDates = getExpectedDateStrings(earliestDates.absoluteEarliest, new Date());
187
+ }
188
+
189
+ logger.log('WARN', `[ForceRun] 🚨 MANUALLY Triggering ${computationName} for ${targetDates.length} dates. Pass: ${manifestItem.pass}`);
190
+
191
+ // 3. Construct Tasks
192
+ const dispatchId = crypto.randomUUID();
193
+ const tasks = targetDates.map(date => {
194
+ const traceId = crypto.randomBytes(16).toString('hex');
195
+ const spanId = crypto.randomBytes(8).toString('hex');
196
+ return {
197
+ action: 'RUN_COMPUTATION_DATE',
198
+ computation: manifestItem.name,
199
+ date: date,
200
+ pass: manifestItem.pass || "1",
201
+ dispatchId: dispatchId,
202
+ triggerReason: 'MANUAL_FORCE_API',
203
+ resources: reqBody.resources || 'standard',
204
+ // Trace context allows you to find these specific runs in Cloud Trace
205
+ traceContext: { traceId, spanId, sampled: true }
206
+ };
207
+ });
208
+
209
+ // 4. Batch Publish (Chunked to stay under Pub/Sub limits)
210
+ const CHUNK_SIZE = 250; // Safe batch size
211
+ const topic = (reqBody.resources === 'high-mem')
212
+ ? (config.computationTopicHighMem || 'computation-tasks-highmem')
213
+ : (config.computationTopicStandard || 'computation-tasks');
214
+
215
+ let dispatchedCount = 0;
216
+ const chunks = [];
217
+ for (let i = 0; i < tasks.length; i += CHUNK_SIZE) {
218
+ chunks.push(tasks.slice(i, i + CHUNK_SIZE));
219
+ }
220
+
221
+ // Publish chunks sequentially to avoid memory spikes
222
+ for (const chunk of chunks) {
223
+ await pubsubUtils.batchPublishTasks(dependencies, {
224
+ topicName: topic,
225
+ tasks: chunk,
226
+ taskType: 'manual-force-run'
227
+ });
228
+ dispatchedCount += chunk.length;
229
+ }
230
+
231
+ return {
232
+ status: 'FORCED',
233
+ computation: computationName,
234
+ mode: dateInput ? 'SINGLE_DATE' : 'ALL_DATES',
235
+ datesTriggered: dispatchedCount,
236
+ targetTopic: topic
237
+ };
238
+ }
239
+
155
240
  // =============================================================================
156
241
  // NEW: Final Sweep Reporting Handler
157
242
  // =============================================================================
@@ -1,4 +1,4 @@
1
- # Cloud Workflows: Precision Cursor-Based Orchestrator
1
+ # Cloud Workflows: Precision Cursor-Based Orchestrator with Manual Override
2
2
  main:
3
3
  params: [input]
4
4
  steps:
@@ -9,6 +9,12 @@ main:
9
9
  - current_date: '${text.split(time.format(sys.now()), "T")[0]}'
10
10
  - date_to_run: '${default(map.get(input, "date"), current_date)}'
11
11
 
12
+ # --- 🔀 NEW: CHECK FOR MANUAL OVERRIDE ---
13
+ - check_manual_override:
14
+ switch:
15
+ - condition: '${map.get(input, "action") == "FORCE_RUN"}'
16
+ next: execute_manual_force_run
17
+
12
18
  # --- PHASE 1: EXECUTION (Standard + High Mem Retry) ---
13
19
  - run_sequential_passes:
14
20
  for:
@@ -80,7 +86,7 @@ main:
80
86
  - next_loop_retry:
81
87
  next: sequential_date_loop
82
88
 
83
- # --- VERIFICATION & SWEEP ---
89
+ # --- VERIFICATION & SWEEP (CRITICAL LOGIC PRESERVED) ---
84
90
  - verify_pass_completion:
85
91
  call: http.post
86
92
  args:
@@ -116,8 +122,6 @@ main:
116
122
  seconds: '${int(sweep_task.eta)}'
117
123
 
118
124
  # --- PHASE 2: FINAL FORENSIC REPORTING ---
119
- # Triggered after ALL execution attempts for this pass (Standard -> Verify -> HighMem Sweep)
120
- # We ask the dispatcher to run the FinalSweepReporter for the target date.
121
125
  - run_final_forensics:
122
126
  for:
123
127
  value: pass_id
@@ -139,5 +143,21 @@ main:
139
143
  args:
140
144
  text: '${"📝 FINAL REPORT: Pass " + pass_id + " -> " + report_res.body.issuesFound + " detailed forensic documents created."}'
141
145
 
142
- - finish:
143
- return: "Pipeline Complete with Forensic Analysis"
146
+ - finish_standard:
147
+ return: "Pipeline Complete with Forensic Analysis"
148
+
149
+ # --- 🚨 MANUAL OVERRIDE EXECUTION PATH ---
150
+ - execute_manual_force_run:
151
+ call: http.post
152
+ args:
153
+ url: '${"https://europe-west1-" + project + ".cloudfunctions.net/computation-dispatcher"}'
154
+ body:
155
+ action: 'FORCE_RUN'
156
+ computation: '${input.computation}'
157
+ date: '${map.get(input, "date")}' # Can be null for ALL_DATES
158
+ resources: '${map.get(input, "resources")}'
159
+ auth: { type: OIDC }
160
+ result: force_res
161
+
162
+ - finish_manual:
163
+ return: '${force_res.body}'
@@ -1,5 +1,6 @@
1
- # Data Feeder Pipeline (V3.3 - Optimized Waits & Isolated Testing)
2
- # Starts at 22:00 UTC via Cloud Scheduler.
1
+ # Data Feeder Pipeline (V4.0 - Lean Market Close)
2
+ # Schedule: 22:00 UTC
3
+ # Objective: Capture Market Close data & perform Global Indexing at midnight.
3
4
 
4
5
  main:
5
6
  params: [input]
@@ -10,7 +11,7 @@ main:
10
11
  - location: "europe-west1"
11
12
 
12
13
  # ==========================================
13
- # TEST MODE ROUTING
14
+ # TEST MODE ROUTING (Preserved for Debugging)
14
15
  # ==========================================
15
16
  - check_test_mode:
16
17
  switch:
@@ -18,9 +19,6 @@ main:
18
19
  steps:
19
20
  - route_test:
20
21
  switch:
21
- # --- ISOLATED FUNCTION TESTS (Run & Stop) ---
22
- # Use these to test a single function without triggering waits or next steps.
23
-
24
22
  - condition: '${input.target_step == "test_price"}'
25
23
  steps:
26
24
  - call_price_iso:
@@ -42,29 +40,7 @@ main:
42
40
  timeout: 300
43
41
  - return_insights:
44
42
  return: "Test Complete: Insights Fetcher executed."
45
-
46
- - condition: '${input.target_step == "test_rankings"}'
47
- steps:
48
- - call_rankings_iso:
49
- call: http.post
50
- args:
51
- url: '${"https://" + location + "-" + project + ".cloudfunctions.net/fetch-popular-investors"}'
52
- auth: { type: OIDC }
53
- timeout: 300
54
- - return_rankings:
55
- return: "Test Complete: Popular Investors executed."
56
-
57
- - condition: '${input.target_step == "test_social"}'
58
- steps:
59
- - call_social_iso:
60
- call: http.post
61
- args:
62
- url: '${"https://" + location + "-" + project + ".cloudfunctions.net/social-orchestrator"}'
63
- auth: { type: OIDC }
64
- timeout: 300
65
- - return_social:
66
- return: "Test Complete: Social Orchestrator executed."
67
-
43
+
68
44
  - condition: '${input.target_step == "test_indexer"}'
69
45
  steps:
70
46
  - call_indexer_iso:
@@ -76,18 +52,11 @@ main:
76
52
  - return_indexer:
77
53
  return: "Test Complete: Root Data Indexer executed."
78
54
 
79
- # --- PHASE JUMPS (Resumes flow from that point) ---
80
- - condition: '${input.target_step == "market"}'
81
- next: phase_2200_price
82
- - condition: '${input.target_step == "midnight"}'
83
- next: phase_0000_rankings
84
- - condition: '${input.target_step == "social"}'
85
- next: social_loop_start
86
-
87
55
  # ==========================================
88
56
  # PHASE 1: MARKET CLOSE (Starts 22:00 UTC)
89
57
  # ==========================================
90
58
 
59
+ # 1. Price Fetcher
91
60
  - phase_2200_price:
92
61
  try:
93
62
  call: http.post
@@ -101,12 +70,8 @@ main:
101
70
  - log_price_error:
102
71
  call: sys.log
103
72
  args: { severity: "WARNING", text: "Price fetch timed out/failed. Proceeding." }
104
-
105
- # FIXED: Only one 10-minute wait here now.
106
- - wait_10_after_price:
107
- call: sys.sleep
108
- args: { seconds: 600 }
109
73
 
74
+ # 2. Insights Fetcher (Can run immediately after or parallel)
110
75
  - phase_2200_insights:
111
76
  try:
112
77
  call: http.post
@@ -121,108 +86,30 @@ main:
121
86
  call: sys.log
122
87
  args: { severity: "WARNING", text: "Insights fetch timed out/failed. Proceeding." }
123
88
 
124
- - wait_10_after_insights:
125
- call: sys.sleep
126
- args: { seconds: 600 }
127
-
128
89
  # ==========================================
129
- # PHASE 2: WAIT FOR MIDNIGHT
90
+ # PHASE 2: WAIT FOR MIDNIGHT (Indexing)
130
91
  # ==========================================
131
92
 
132
93
  - align_to_midnight:
133
94
  assign:
134
95
  - now_sec: '${int(sys.now())}'
135
96
  - day_sec: 86400
97
+ # Calculates seconds remaining until the next 00:00 UTC
136
98
  - sleep_midnight: '${day_sec - (now_sec % day_sec)}'
99
+
137
100
  - wait_for_midnight:
138
101
  call: sys.sleep
139
102
  args: { seconds: '${sleep_midnight}' }
140
103
 
141
104
  # ==========================================
142
- # PHASE 3: MIDNIGHT TASKS (00:00 UTC)
105
+ # PHASE 3: GLOBAL INDEXING (00:00 UTC)
143
106
  # ==========================================
144
107
 
145
- - phase_0000_rankings:
146
- try:
147
- call: http.post
148
- args:
149
- url: '${"https://" + location + "-" + project + ".cloudfunctions.net/fetch-popular-investors"}'
150
- auth: { type: OIDC }
151
- timeout: 300
152
- except:
153
- as: e
154
- steps:
155
- - log_ranking_error:
156
- call: sys.log
157
- args: { severity: "WARNING", text: "Rankings failed. Proceeding to Social (risky)." }
158
-
159
- - wait_10_after_rankings:
160
- call: sys.sleep
161
- args: { seconds: 600 }
162
-
163
- - phase_0000_social:
164
- try:
165
- call: http.post
166
- args:
167
- url: '${"https://" + location + "-" + project + ".cloudfunctions.net/social-orchestrator"}'
168
- auth: { type: OIDC }
169
- timeout: 300
170
- except:
171
- as: e
172
- steps:
173
- - log_social_error:
174
- call: sys.log
175
- args: { severity: "WARNING", text: "Social failed. Proceeding." }
176
-
177
- - wait_10_after_social:
178
- call: sys.sleep
179
- args: { seconds: 600 }
180
-
181
108
  - global_index_midnight:
182
109
  call: http.post
183
110
  args:
184
111
  url: '${"https://" + location + "-" + project + ".cloudfunctions.net/root-data-indexer"}'
185
112
  auth: { type: OIDC }
186
113
 
187
- # ==========================================
188
- # PHASE 4: SOCIAL LOOP (Every 3 Hours)
189
- # ==========================================
190
-
191
- - init_social_loop:
192
- assign:
193
- - i: 0
194
-
195
- - social_loop_start:
196
- switch:
197
- - condition: ${i < 7} # Covers the remainder of the 24h cycle
198
- steps:
199
- - wait_3_hours:
200
- call: sys.sleep
201
- args: { seconds: 10800 }
202
-
203
- - run_social_recurring:
204
- try:
205
- call: http.post
206
- args:
207
- url: '${"https://" + location + "-" + project + ".cloudfunctions.net/social-orchestrator"}'
208
- auth: { type: OIDC }
209
- timeout: 300
210
- except:
211
- as: e
212
- steps:
213
- - log_loop_social_warn:
214
- call: sys.log
215
- args: { severity: "WARNING", text: "Loop Social timed out. Proceeding." }
216
-
217
- - wait_10_in_loop:
218
- call: sys.sleep
219
- args: { seconds: 600 }
220
-
221
- - increment_loop:
222
- assign:
223
- - i: '${i + 1}'
224
- - next_iteration:
225
- next: social_loop_start
226
-
227
114
  - finish:
228
- return: "Complete 24h Cycle Finished"
115
+ return: "Daily Close Cycle Finished (Price, Insights, Indexing)."
@@ -0,0 +1,55 @@
1
+ # Morning Prep Pipeline
2
+ # Schedule: 04:00 UTC
3
+ # Objective: Refresh Rankings -> Trigger Daily Update (Task Engine)
4
+
5
+ main:
6
+ params: [input]
7
+ steps:
8
+ - init:
9
+ assign:
10
+ - project: '${sys.get_env("GOOGLE_CLOUD_PROJECT_ID")}'
11
+ - location: "europe-west1"
12
+ # Define the Task Engine workflow ID to trigger later
13
+ - task_engine_workflow_id: "daily-update-pipeline"
14
+
15
+ # ==========================================
16
+ # STEP 1: REFRESH RANKINGS (The Master List)
17
+ # ==========================================
18
+ - fetch_rankings:
19
+ try:
20
+ call: http.post
21
+ args:
22
+ url: '${"https://" + location + "-" + project + ".cloudfunctions.net/fetch-popular-investors"}'
23
+ auth: { type: OIDC }
24
+ timeout: 540
25
+ except:
26
+ as: e
27
+ steps:
28
+ - log_rankings_fail:
29
+ call: sys.log
30
+ args: { severity: "ERROR", text: "Rankings Fetch Failed. Stopping pipeline to prevent stale Task Engine run." }
31
+ - raise_error:
32
+ raise: ${e}
33
+
34
+ # ==========================================
35
+ # STEP 2: TRIGGER TASK ENGINE
36
+ # ==========================================
37
+ # We trigger the workflow execution directly.
38
+ # This replaces the need for a separate 05:00 UTC Scheduler.
39
+
40
+ - trigger_daily_update:
41
+ call: googleapis.workflowexecutions.v1.projects.locations.workflows.executions.create
42
+ args:
43
+ parent: '${"projects/" + project + "/locations/" + location + "/workflows/" + task_engine_workflow_id}'
44
+ body:
45
+ # Pass explicit arguments to the Task Engine
46
+ argument: '{"userTypes": ["normal", "speculator"], "source": "morning_prep_trigger"}'
47
+ result: execution_result
48
+
49
+ - log_trigger:
50
+ call: sys.log
51
+ args:
52
+ text: '${"✅ Rankings Complete. Triggered Task Engine: " + execution_result.name}'
53
+
54
+ - finish:
55
+ return: "Morning Prep Complete. Task Engine launched."
@@ -2,10 +2,183 @@
2
2
  * @fileoverview Main orchestration logic.
3
3
  * REFACTORED: This file now contains the main pipe functions
4
4
  * that are called by the Cloud Function entry points.
5
+ * It includes the new HTTP handlers for Workflow-driven "Slow-Trickle" updates.
5
6
  * They receive all dependencies.
6
7
  */
7
8
  const { checkDiscoveryNeed, getDiscoveryCandidates, dispatchDiscovery } = require('./helpers/discovery_helpers');
8
9
  const { getUpdateTargets, dispatchUpdates } = require('./helpers/update_helpers');
10
+ const { FieldValue } = require('@google-cloud/firestore');
11
+
12
+ /**
13
+ * ENTRY POINT: HTTP Handler for Workflow Interaction
14
+ * Map this function to your HTTP Trigger in your index.js/exports.
15
+ * This handles the "PLAN" and "EXECUTE_WINDOW" phases of the slow-trickle update.
16
+ * * @param {object} req - Express request object.
17
+ * @param {object} res - Express response object.
18
+ * @param {object} dependencies - Contains logger, db, firestoreUtils, pubsubUtils.
19
+ * @param {object} config - Global configuration.
20
+ */
21
+ async function handleOrchestratorHttp(req, res, dependencies, config) {
22
+ const { logger } = dependencies;
23
+ const body = req.body || {};
24
+ const { action, userType, date, windows, planId, windowId } = body;
25
+
26
+ logger.log('INFO', `[Orchestrator HTTP] Received request: ${action}`, body);
27
+
28
+ try {
29
+ if (action === 'PLAN') {
30
+ // PHASE 1: Find users and split them into Firestore documents
31
+ if (!userType || !date) {
32
+ throw new Error("Missing userType or date for PLAN action");
33
+ }
34
+ const result = await planDailyUpdates(userType, date, windows || 10, config, dependencies);
35
+ res.status(200).send(result);
36
+
37
+ } else if (action === 'EXECUTE_WINDOW') {
38
+ // PHASE 2: Load specific window and dispatch
39
+ if (!planId || !windowId) {
40
+ throw new Error("Missing planId or windowId for EXECUTE_WINDOW action");
41
+ }
42
+ const result = await executeUpdateWindow(planId, windowId, userType, config, dependencies);
43
+ res.status(200).send(result);
44
+
45
+ } else if (action === 'LEGACY_RUN') {
46
+ // Support for triggering the old brute-force method via HTTP if needed
47
+ await runUpdateOrchestrator(config, dependencies);
48
+ res.status(200).send({ status: 'Completed legacy run' });
49
+
50
+ } else {
51
+ res.status(400).send({ error: `Unknown action: ${action}` });
52
+ }
53
+ } catch (error) {
54
+ logger.log('ERROR', `[Orchestrator HTTP] Fatal error in ${action}`, { errorMessage: error.message, stack: error.stack });
55
+ res.status(500).send({ error: error.message, stack: error.stack });
56
+ }
57
+ }
58
+
59
+ /**
60
+ * LOGIC: Plan the updates (Split into windows)
61
+ * 1. Fetches all users needing updates.
62
+ * 2. Shuffles them.
63
+ * 3. Splits them into 'n' windows.
64
+ * 4. Saves the windows to Firestore.
65
+ */
66
+ async function planDailyUpdates(userType, date, numberOfWindows, config, deps) {
67
+ const { logger, db } = deps;
68
+
69
+ // 1. Get ALL targets
70
+ // We construct thresholds to capture everyone due for today
71
+ const now = new Date();
72
+ const startOfTodayUTC = new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate()));
73
+ const DaysAgoUTC = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
74
+
75
+ const thresholds = {
76
+ dateThreshold: startOfTodayUTC,
77
+ gracePeriodThreshold: DaysAgoUTC
78
+ };
79
+
80
+ logger.log('INFO', `[Orchestrator Plan] Calculating targets for ${userType}...`);
81
+
82
+ // Reusing existing helper to get the raw list of users
83
+ const targets = await getUpdateTargets(userType, thresholds, config.updateConfig, deps);
84
+ logger.log('INFO', `[Orchestrator Plan] Found ${targets.length} candidates for ${userType}.`);
85
+
86
+ if (targets.length === 0) {
87
+ return { planId: null, totalUsers: 0, windowCount: 0, windowIds: [] };
88
+ }
89
+
90
+ // 2. Shuffle to randomize load (Fisher-Yates shuffle)
91
+ // This ensures that we don't always update the same users at the same time of day
92
+ for (let i = targets.length - 1; i > 0; i--) {
93
+ const j = Math.floor(Math.random() * (i + 1));
94
+ [targets[i], targets[j]] = [targets[j], targets[i]];
95
+ }
96
+
97
+ // 3. Split and Save
98
+ const chunkSize = Math.ceil(targets.length / numberOfWindows);
99
+ const planId = `plan_${userType}_${date}`;
100
+ const windowIds = [];
101
+
102
+ const batchWriter = db.batch();
103
+ let writeCount = 0;
104
+
105
+ for (let i = 0; i < numberOfWindows; i++) {
106
+ const start = i * chunkSize;
107
+ const end = start + chunkSize;
108
+ const chunk = targets.slice(start, end);
109
+
110
+ if (chunk.length > 0) {
111
+ // Store ONLY the necessary IDs/Data in Firestore
112
+ // Path: system_update_plans/{planId}/windows/{windowId}
113
+ const windowDocRef = db.collection('system_update_plans').doc(planId).collection('windows').doc(String(i + 1));
114
+
115
+ batchWriter.set(windowDocRef, {
116
+ userType: userType,
117
+ users: chunk, // This array contains the user objects/IDs
118
+ status: 'pending',
119
+ windowId: i + 1,
120
+ userCount: chunk.length,
121
+ createdAt: FieldValue.serverTimestamp(),
122
+ scheduledForDate: date
123
+ });
124
+
125
+ windowIds.push(i + 1);
126
+ writeCount++;
127
+ }
128
+ }
129
+
130
+ await batchWriter.commit();
131
+ logger.log('SUCCESS', `[Orchestrator Plan] Plan Saved: ${planId} with ${writeCount} windows containing ${targets.length} users.`);
132
+
133
+ return {
134
+ planId: planId,
135
+ totalUsers: targets.length,
136
+ windowCount: windowIds.length,
137
+ windowIds: windowIds
138
+ };
139
+ }
140
+
141
+ /**
142
+ * LOGIC: Execute a specific window
143
+ * 1. Reads the user list from Firestore.
144
+ * 2. Calls dispatchUpdates to send them to the Task Engine.
145
+ */
146
+ async function executeUpdateWindow(planId, windowId, userType, config, deps) {
147
+ const { logger, db } = deps;
148
+
149
+ // 1. Fetch the window from Firestore
150
+ const windowRef = db.collection('system_update_plans').doc(planId).collection('windows').doc(String(windowId));
151
+ const windowDoc = await windowRef.get();
152
+
153
+ if (!windowDoc.exists) {
154
+ throw new Error(`Window ${windowId} not found in plan ${planId}`);
155
+ }
156
+
157
+ const data = windowDoc.data();
158
+
159
+ // Idempotency check: prevent re-running a completed window
160
+ if (data.status === 'completed') {
161
+ logger.log('WARN', `[Orchestrator Execute] Window ${windowId} already completed. Skipping.`);
162
+ return { dispatchedCount: 0, status: 'already_completed' };
163
+ }
164
+
165
+ const targets = data.users;
166
+ logger.log('INFO', `[Orchestrator Execute] Window ${windowId}: Dispatching ${targets.length} users.`);
167
+
168
+ // 2. Dispatch using existing helper
169
+ // The helper handles batching for Pub/Sub and logging.
170
+ if (targets && targets.length > 0) {
171
+ await dispatchUpdates(targets, userType, config.updateConfig, deps);
172
+ }
173
+
174
+ // 3. Mark window as complete
175
+ await windowRef.update({
176
+ status: 'completed',
177
+ executedAt: FieldValue.serverTimestamp()
178
+ });
179
+
180
+ return { dispatchedCount: targets.length, status: 'success' };
181
+ }
9
182
 
10
183
  /** Stage 1: Main discovery orchestrator pipe */
11
184
  async function runDiscoveryOrchestrator(config, deps) {
@@ -135,4 +308,11 @@ function isUserTypeEnabled(userType, enabledTypes) {
135
308
  return enabledTypes.includes(userType);
136
309
  }
137
310
 
138
- module.exports = { runDiscoveryOrchestrator, runUpdateOrchestrator, runDiscovery, runUpdates, isUserTypeEnabled };
311
+ module.exports = {
312
+ handleOrchestratorHttp,
313
+ runDiscoveryOrchestrator,
314
+ runUpdateOrchestrator,
315
+ runDiscovery,
316
+ runUpdates,
317
+ isUserTypeEnabled
318
+ };
@@ -0,0 +1,83 @@
1
+ # bulltrackers-module/workflows/daily_update_pipeline.yaml
2
+ # Cloud Workflows: Slow-Trickle Daily Update Orchestrator
3
+ # Triggers the Orchestrator to PLAN updates, then EXECUTES them in timed windows.
4
+
5
+ main:
6
+ params: [input]
7
+ steps:
8
+ - init:
9
+ assign:
10
+ - project: '${sys.get_env("GOOGLE_CLOUD_PROJECT_ID")}'
11
+ - location: "europe-west1"
12
+ # Replace with your actual Orchestrator HTTP trigger URL
13
+ - orchestrator_url: '${"https://" + location + "-" + project + ".cloudfunctions.net/orchestrator-http"}'
14
+ - today: '${text.split(time.format(sys.now()), "T")[0]}'
15
+ # User types to process (can be passed in input or defaulted)
16
+ - user_types: '${default(map.get(input, "userTypes"), ["normal", "speculator"])}'
17
+ - default_windows: 10
18
+
19
+ - process_user_types_loop:
20
+ for:
21
+ value: user_type
22
+ in: ${user_types}
23
+ steps:
24
+ - log_start:
25
+ call: sys.log
26
+ args:
27
+ text: '${"Starting update cycle for: " + user_type}'
28
+
29
+ # --- PHASE 1: PLAN ---
30
+ - plan_updates:
31
+ call: http.post
32
+ args:
33
+ url: '${orchestrator_url}'
34
+ body:
35
+ action: 'PLAN'
36
+ userType: '${user_type}'
37
+ date: '${today}'
38
+ windows: '${default_windows}'
39
+ auth: { type: OIDC }
40
+ timeout: 300 # 5 minutes to query and split users
41
+ result: plan_res
42
+
43
+ - log_plan:
44
+ call: sys.log
45
+ args:
46
+ text: '${"📅 PLAN CREATED: " + user_type + " | PlanID: " + plan_res.body.planId + " | Users: " + plan_res.body.totalUsers + " | Windows: " + plan_res.body.windowCount}'
47
+
48
+ # --- PHASE 2: EXECUTE WINDOWS ---
49
+ - run_windows_loop:
50
+ for:
51
+ value: window_id
52
+ in: '${plan_res.body.windowIds}'
53
+ steps:
54
+ - execute_window:
55
+ call: http.post
56
+ args:
57
+ url: '${orchestrator_url}'
58
+ body:
59
+ action: 'EXECUTE_WINDOW'
60
+ planId: '${plan_res.body.planId}'
61
+ windowId: '${window_id}'
62
+ userType: '${user_type}'
63
+ auth: { type: OIDC }
64
+ result: exec_res
65
+
66
+ - log_execution:
67
+ call: sys.log
68
+ args:
69
+ text: '${"🚀 WINDOW EXECUTED: " + user_type + " Window " + window_id + "/" + plan_res.body.windowCount + ". Dispatched: " + exec_res.body.dispatchedCount}'
70
+
71
+ # --- PACING: Sleep between windows ---
72
+ # We skip the sleep after the very last window of the loop
73
+ - check_pacing_needed:
74
+ switch:
75
+ - condition: '${window_id < plan_res.body.windowCount}'
76
+ steps:
77
+ - wait_pacing:
78
+ call: sys.sleep
79
+ args:
80
+ seconds: 3600 # 1 Hour wait between blocks
81
+
82
+ - finish:
83
+ return: "Daily Slow-Trickle Update Completed."
package/index.js CHANGED
@@ -11,7 +11,13 @@ const { FirestoreBatchManager } = require('./functions/task-engine/utils/fire
11
11
  const firestoreUtils = require('./functions/core/utils/firestore_utils');
12
12
 
13
13
  // Orchestrator
14
- const { runDiscoveryOrchestrator, runUpdateOrchestrator } = require('./functions/orchestrator/index');
14
+ // [UPDATED] Imported handleOrchestratorHttp
15
+ const {
16
+ runDiscoveryOrchestrator,
17
+ runUpdateOrchestrator,
18
+ handleOrchestratorHttp
19
+ } = require('./functions/orchestrator/index');
20
+
15
21
  const { checkDiscoveryNeed, getDiscoveryCandidates, dispatchDiscovery } = require('./functions/orchestrator/helpers/discovery_helpers');
16
22
  const { getUpdateTargets, dispatchUpdates } = require('./functions/orchestrator/helpers/update_helpers');
17
23
 
@@ -72,6 +78,8 @@ const core = {
72
78
  };
73
79
 
74
80
  const orchestrator = {
81
+ // [UPDATED] Exported handleOrchestratorHttp so it can be mapped in Cloud Functions
82
+ handleOrchestratorHttp,
75
83
  runDiscoveryOrchestrator,
76
84
  runUpdateOrchestrator,
77
85
  checkDiscoveryNeed,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "bulltrackers-module",
3
- "version": "1.0.555",
3
+ "version": "1.0.557",
4
4
  "description": "Helper Functions for Bulltrackers.",
5
5
  "main": "index.js",
6
6
  "files": [