bulltrackers-module 1.0.152 → 1.0.153

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/functions/appscript-api/index.js +8 -38
  2. package/functions/computation-system/helpers/computation_pass_runner.js +38 -183
  3. package/functions/computation-system/helpers/orchestration_helpers.js +105 -326
  4. package/functions/computation-system/utils/data_loader.js +38 -133
  5. package/functions/computation-system/utils/schema_capture.js +7 -41
  6. package/functions/computation-system/utils/utils.js +37 -124
  7. package/functions/core/utils/firestore_utils.js +8 -46
  8. package/functions/core/utils/intelligent_header_manager.js +26 -128
  9. package/functions/core/utils/intelligent_proxy_manager.js +33 -171
  10. package/functions/core/utils/pubsub_utils.js +7 -24
  11. package/functions/dispatcher/helpers/dispatch_helpers.js +9 -30
  12. package/functions/dispatcher/index.js +7 -30
  13. package/functions/etoro-price-fetcher/helpers/handler_helpers.js +12 -80
  14. package/functions/fetch-insights/helpers/handler_helpers.js +18 -70
  15. package/functions/generic-api/helpers/api_helpers.js +28 -167
  16. package/functions/generic-api/index.js +49 -188
  17. package/functions/invalid-speculator-handler/helpers/handler_helpers.js +10 -47
  18. package/functions/orchestrator/helpers/discovery_helpers.js +1 -5
  19. package/functions/orchestrator/index.js +1 -6
  20. package/functions/price-backfill/helpers/handler_helpers.js +13 -69
  21. package/functions/social-orchestrator/helpers/orchestrator_helpers.js +5 -37
  22. package/functions/social-task-handler/helpers/handler_helpers.js +29 -186
  23. package/functions/speculator-cleanup-orchestrator/helpers/cleanup_helpers.js +19 -78
  24. package/functions/task-engine/handler_creator.js +2 -8
  25. package/functions/task-engine/helpers/update_helpers.js +17 -83
  26. package/functions/task-engine/helpers/verify_helpers.js +11 -56
  27. package/functions/task-engine/utils/firestore_batch_manager.js +16 -67
  28. package/functions/task-engine/utils/task_engine_utils.js +6 -35
  29. package/index.js +45 -43
  30. package/package.json +1 -1
@@ -19,41 +19,24 @@ class IntelligentHeaderManager {
19
19
  * @param {string} config.fallbackUserAgent - A fallback User-Agent if loading fails.
20
20
  */
21
21
  constructor(firestore, logger, config) {
22
- if (!firestore || !logger || !config) {
23
- throw new Error("IntelligentHeaderManager requires firestore, logger, and config objects.");
24
- }
25
- if (!config.headersCollectionName || !config.cacheDurationMs || !config.fallbackUserAgent) {
26
- throw new Error("IntelligentHeaderManager config is missing required keys (headersCollectionName, cacheDurationMs, fallbackUserAgent).");
27
- }
28
-
22
+ if (!firestore || !logger || !config) { throw new Error("IntelligentHeaderManager requires firestore, logger, and config objects."); }
23
+ if (!config.headersCollectionName || !config.cacheDurationMs || !config.fallbackUserAgent) { throw new Error("IntelligentHeaderManager config is missing required keys (headersCollectionName, cacheDurationMs, fallbackUserAgent)."); }
29
24
  this.firestore = firestore;
30
25
  this.logger = logger;
31
-
32
- // Load from config
33
26
  this.collectionName = config.headersCollectionName;
34
27
  this.cacheDuration = config.cacheDurationMs;
35
28
  this.fallbackUserAgent = config.fallbackUserAgent;
36
-
37
- // Internal state
38
29
  this.headers = [];
39
30
  this.lastFetched = null;
40
31
  this.performanceUpdates = {};
41
-
42
- // Initialize with fallback to ensure we always have at least one header
43
- this._setFallbackHeader();
44
- }
32
+ this._setFallbackHeader(); }
45
33
 
46
34
  /**
47
35
  * Sets the fallback header as the only available header.
48
36
  * @private
49
37
  */
50
38
  _setFallbackHeader() {
51
- this.headers = [{
52
- id: 'fallback',
53
- data: { 'User-Agent': this.fallbackUserAgent },
54
- performance: { total: 1, success: 1 }
55
- }];
56
- }
39
+ this.headers = [{ id: 'fallback', data: { 'User-Agent': this.fallbackUserAgent }, performance: { total: 1, success: 1 } }]; }
57
40
 
58
41
  /**
59
42
  * Fetches and caches header documents from Firestore. If the cache is fresh, it does nothing.
@@ -62,58 +45,22 @@ class IntelligentHeaderManager {
62
45
  */
63
46
  async _loadHeaders() {
64
47
  const now = new Date();
65
- if (this.lastFetched && (now - this.lastFetched < this.cacheDuration) && this.headers.length > 0) {
66
- return; // Cache is fresh
67
- }
68
-
48
+ if (this.lastFetched && (now - this.lastFetched < this.cacheDuration) && this.headers.length > 0) { return; }
69
49
  try {
70
50
  this.logger.log('INFO', '[HeaderManager] Refreshing header performance data from Firestore...');
71
51
  const snapshot = await this.firestore.collection(this.collectionName).get();
72
-
73
- if (snapshot.empty) {
74
- this.logger.log('WARN', `[HeaderManager] No documents found in headers collection: ${this.collectionName}. Using fallback.`);
75
- this._setFallbackHeader();
76
- this.lastFetched = new Date();
77
- return;
78
- }
79
-
52
+ if (snapshot.empty) { this.logger.log('WARN', `[HeaderManager] No documents found in headers collection: ${this.collectionName}. Using fallback.`); this._setFallbackHeader(); this.lastFetched = new Date(); return; }
80
53
  const loadedHeaders = snapshot.docs.map(doc => {
81
54
  const docData = doc.data();
82
-
83
- // Validate that the document has the required 'headers' field
84
- if (!docData.headers) {
85
- this.logger.log('WARN', `[HeaderManager] Document ${doc.id} is missing 'headers' field. Skipping.`);
86
- return null;
87
- }
88
-
89
- return {
90
- id: doc.id,
91
- data: docData.headers, // Changed from 'header' to 'headers'
92
- performance: {
93
- total: docData.totalRequests || 0,
94
- success: docData.successfulRequests || 0
95
- }
96
- };
97
- }).filter(Boolean); // Remove null entries
98
-
99
- if (loadedHeaders.length === 0) {
100
- this.logger.log('WARN', "[HeaderManager] No valid headers found in collection. Using fallback.");
101
- this._setFallbackHeader();
102
- } else {
103
- this.headers = loadedHeaders;
104
- this.logger.log('INFO', `[HeaderManager] Successfully loaded ${this.headers.length} headers.`);
105
- }
106
-
55
+ if (!docData.headers) { this.logger.log('WARN', `[HeaderManager] Document ${doc.id} is missing 'headers' field. Skipping.`); return null; }
56
+ return { id: doc.id, data: docData.headers, performance: { total: docData.totalRequests || 0, success: docData.successfulRequests || 0 } };
57
+ }).filter(Boolean);
58
+ if (loadedHeaders.length === 0) { this.logger.log('WARN', "[HeaderManager] No valid headers found in collection. Using fallback."); this._setFallbackHeader();
59
+ } else { this.headers = loadedHeaders; this.logger.log('INFO', `[HeaderManager] Successfully loaded ${this.headers.length} headers.`); }
107
60
  this.lastFetched = new Date();
108
-
109
61
  } catch (error) {
110
- this.logger.log('ERROR', '[HeaderManager] Failed to load headers from Firestore. Using fallback.', {
111
- errorMessage: error.message,
112
- errorStack: error.stack,
113
- collection: this.collectionName
114
- });
115
- this._setFallbackHeader();
116
- }
62
+ this.logger.log('ERROR', '[HeaderManager] Failed to load headers from Firestore. Using fallback.', { errorMessage: error.message, errorStack: error.stack, collection: this.collectionName });
63
+ this._setFallbackHeader(); }
117
64
  }
118
65
 
119
66
  /**
@@ -123,34 +70,11 @@ class IntelligentHeaderManager {
123
70
  */
124
71
  async selectHeader() {
125
72
  await this._loadHeaders();
126
-
127
- // This should never happen now because we always have fallback, but keep as safety check
128
- if (!this.headers || this.headers.length === 0) {
129
- this.logger.log('ERROR', '[HeaderManager] No headers available after load attempt. Returning fallback.');
130
- return { id: 'fallback', header: { 'User-Agent': this.fallbackUserAgent } };
131
- }
132
-
133
- // Calculate total score for weighted random selection
73
+ if (!this.headers || this.headers.length === 0) { this.logger.log('ERROR', '[HeaderManager] No headers available after load attempt. Returning fallback.'); return { id: 'fallback', header: { 'User-Agent': this.fallbackUserAgent } }; }
134
74
  let totalScore = 0;
135
- const weightedHeaders = this.headers.map(h => {
136
- const successRate = (h.performance.total === 0) ? 0.5 : (h.performance.success / h.performance.total);
137
- // Add-1 smoothing to avoid zero probability for new headers
138
- const score = (successRate + 1) * 10;
139
- totalScore += score;
140
- return { ...h, score };
141
- });
142
-
143
- // Select a random value
75
+ const weightedHeaders = this.headers.map(h => { const successRate = (h.performance.total === 0) ? 0.5 : (h.performance.success / h.performance.total); const score = (successRate + 1) * 10; totalScore += score; return { ...h, score }; });
144
76
  let random = Math.random() * totalScore;
145
-
146
- for (const h of weightedHeaders) {
147
- if (random < h.score) {
148
- return { id: h.id, header: h.data };
149
- }
150
- random -= h.score;
151
- }
152
-
153
- // Fallback in case of rounding errors
77
+ for (const h of weightedHeaders) { if (random < h.score) { return { id: h.id, header: h.data }; } random -= h.score; }
154
78
  const fallbackHeader = this.headers[0];
155
79
  return { id: fallbackHeader.id, header: fallbackHeader.data };
156
80
  }
@@ -161,16 +85,10 @@ class IntelligentHeaderManager {
161
85
  * @param {boolean} success - Whether the request was successful.
162
86
  */
163
87
  updatePerformance(headerId, success) {
164
- if (headerId === 'fallback') return; // Do not track performance of the fallback
165
-
166
- if (!this.performanceUpdates[headerId]) {
167
- this.performanceUpdates[headerId] = { successes: 0, failures: 0 };
168
- }
169
- if (success) {
170
- this.performanceUpdates[headerId].successes++;
171
- } else {
172
- this.performanceUpdates[headerId].failures++;
173
- }
88
+ if (headerId === 'fallback') return;
89
+ if (!this.performanceUpdates[headerId]) { this.performanceUpdates[headerId] = { successes: 0, failures: 0 }; }
90
+ if (success) { this.performanceUpdates[headerId].successes++;
91
+ } else { this.performanceUpdates[headerId].failures++; }
174
92
  }
175
93
 
176
94
  /**
@@ -179,34 +97,14 @@ class IntelligentHeaderManager {
179
97
  */
180
98
  async flushPerformanceUpdates() {
181
99
  const updatesToFlush = this.performanceUpdates;
182
- this.performanceUpdates = {}; // Reset the local cache
183
-
184
- if (Object.keys(updatesToFlush).length === 0) {
185
- return;
186
- }
187
-
100
+ this.performanceUpdates = {};
101
+ if (Object.keys(updatesToFlush).length === 0) { return; }
188
102
  this.logger.log('INFO', `[HeaderManager] Flushing performance updates for ${Object.keys(updatesToFlush).length} headers.`);
189
103
  const batch = this.firestore.batch();
190
-
191
- for (const headerId in updatesToFlush) {
192
- const updates = updatesToFlush[headerId];
193
- const docRef = this.firestore.collection(this.collectionName).doc(headerId);
194
- batch.update(docRef, {
195
- totalRequests: FieldValue.increment(updates.successes + updates.failures),
196
- successfulRequests: FieldValue.increment(updates.successes),
197
- lastUsed: FieldValue.serverTimestamp()
198
- });
199
- }
200
-
201
- try {
202
- await batch.commit();
203
- this.logger.log('SUCCESS', '[HeaderManager] Successfully flushed header performance updates to Firestore.');
204
- } catch (error) {
205
- this.logger.log('ERROR', '[HeaderManager] Failed to commit header performance batch.', { error: error.message });
206
- // Put updates back in memory to try again next time
207
- this.performanceUpdates = updatesToFlush;
208
- }
209
- }
104
+ for (const headerId in updatesToFlush) { const updates = updatesToFlush[headerId]; const docRef = this.firestore.collection(this.collectionName).doc(headerId);
105
+ batch.update(docRef, { totalRequests: FieldValue.increment(updates.successes + updates.failures), successfulRequests: FieldValue.increment(updates.successes), lastUsed: FieldValue.serverTimestamp() }); }
106
+ try { await batch.commit(); this.logger.log('SUCCESS', '[HeaderManager] Successfully flushed header performance updates to Firestore.');
107
+ } catch (error) { this.logger.log('ERROR', '[HeaderManager] Failed to commit header performance batch.', { error: error.message }); this.performanceUpdates = updatesToFlush; } }
210
108
  }
211
109
 
212
110
  module.exports = { IntelligentHeaderManager };
@@ -18,35 +18,19 @@ class IntelligentProxyManager {
18
18
  * @param {boolean} [config.proxyLockingEnabled=true] - Whether to enable the proxy locking mechanism.
19
19
  */
20
20
  constructor(firestore, logger, config) {
21
- if (!firestore || !logger || !config) {
22
- throw new Error("IntelligentProxyManager requires firestore, logger, and config objects.");
23
- }
24
- if (!config.proxyUrls || !config.cacheDurationMs || !config.proxiesCollectionName || !config.proxyPerformanceDocPath) {
25
- throw new Error("IntelligentProxyManager config is missing required keys (proxyUrls, cacheDurationMs, proxiesCollectionName, proxyPerformanceDocPath).");
26
- }
27
-
21
+ if (!firestore || !logger || !config) { throw new Error("IntelligentProxyManager requires firestore, logger, and config objects."); }
22
+ if (!config.proxyUrls || !config.cacheDurationMs || !config.proxiesCollectionName || !config.proxyPerformanceDocPath) { throw new Error("IntelligentProxyManager config is missing required keys (proxyUrls, cacheDurationMs, proxiesCollectionName, proxyPerformanceDocPath)."); }
28
23
  this.firestore = firestore;
29
24
  this.logger = logger;
30
-
31
- // Load from config
32
- this.proxyUrls = config.proxyUrls.filter(Boolean); // Filter out any empty/null URLs
25
+ this.proxyUrls = config.proxyUrls.filter(Boolean);
33
26
  this.CONFIG_CACHE_DURATION_MS = config.cacheDurationMs;
34
27
  this.PROXIES_COLLECTION = config.proxiesCollectionName;
35
28
  this.PERFORMANCE_DOC_PATH = config.proxyPerformanceDocPath;
36
- // NEW: Check for the locking enabled flag, default to true
37
29
  this.proxyLockingEnabled = config.proxyLockingEnabled !== false;
38
-
39
- // Internal state
40
- this.proxies = {}; // Stores { owner, url, status ('unlocked', 'locked') }
30
+ this.proxies = {};
41
31
  this.configLastLoaded = 0;
42
-
43
- if (this.proxyUrls.length === 0) {
44
- this.logger.log('WARN', '[ProxyManager] No proxy URLs provided in config.');
45
- } else {
46
- // UPDATED: Log locking status
47
- const lockingStatus = this.proxyLockingEnabled ? "Locking Mechanism Enabled" : "Locking Mechanism DISABLED";
48
- this.logger.log('INFO', `[ProxyManager] Initialized with ${this.proxyUrls.length} proxies and ${lockingStatus}.`);
49
- }
32
+ if (this.proxyUrls.length === 0) { this.logger.log('WARN', '[ProxyManager] No proxy URLs provided in config.');
33
+ } else { const lockingStatus = this.proxyLockingEnabled ? "Locking Mechanism Enabled" : "Locking Mechanism DISABLED"; this.logger.log('INFO', `[ProxyManager] Initialized with ${this.proxyUrls.length} proxies and ${lockingStatus}.`); }
50
34
  }
51
35
 
52
36
  /**
@@ -54,50 +38,19 @@ class IntelligentProxyManager {
54
38
  * Caches the configuration to reduce frequent database reads.
55
39
  */
56
40
  async _loadConfig() {
57
- if (Date.now() - this.configLastLoaded < this.CONFIG_CACHE_DURATION_MS) {
58
- return; // Cache is fresh
59
- }
60
- if (this.proxyUrls.length === 0) {
61
- return; // No proxies to load
62
- }
63
-
41
+ if (Date.now() - this.configLastLoaded < this.CONFIG_CACHE_DURATION_MS) { return; }
42
+ if (this.proxyUrls.length === 0) { return; }
64
43
  this.logger.log('INFO', "[ProxyManager] Refreshing proxy configuration and lock status...");
65
- try {
66
- const tempProxyStatus = {};
67
-
68
- // 1. Initialize all known proxies from config
69
- for (const url of this.proxyUrls) {
70
- const owner = new URL(url).hostname;
71
- tempProxyStatus[owner] = { owner, url, status: 'unlocked' }; // Default to unlocked
72
- }
73
-
74
- // 2. Load performance doc to get lock statuses - ONLY if locking is enabled
75
- if (this.proxyLockingEnabled) {
76
- const doc = await this.firestore.doc(this.PERFORMANCE_DOC_PATH).get();
77
- if (doc.exists) {
78
- const data = doc.data();
79
- if (data.locks) {
80
- for (const owner in data.locks) {
81
- if (tempProxyStatus[owner] && data.locks[owner].locked === true) {
82
- tempProxyStatus[owner].status = 'locked';
83
- }
84
- }
85
- }
86
- }
87
- } else {
88
- this.logger.log('TRACE', '[ProxyManager] Proxy locking is disabled, skipping lock status check.');
89
- }
90
-
44
+ try { const tempProxyStatus = {};
45
+ for (const url of this.proxyUrls) { const owner = new URL(url).hostname; tempProxyStatus[owner] = { owner, url, status: 'unlocked' }; }
46
+ if (this.proxyLockingEnabled) { const doc = await this.firestore.doc(this.PERFORMANCE_DOC_PATH).get();
47
+ if (doc.exists) { const data = doc.data(); if (data.locks) { for (const owner in data.locks) { if (tempProxyStatus[owner] && data.locks[owner].locked === true) { tempProxyStatus[owner].status = 'locked'; } } } }
48
+ } else { this.logger.log('TRACE', '[ProxyManager] Proxy locking is disabled, skipping lock status check.'); }
91
49
  this.proxies = tempProxyStatus;
92
50
  this.configLastLoaded = Date.now();
93
51
  this.logger.log('SUCCESS', `[ProxyManager] Refreshed ${Object.keys(this.proxies).length} proxy statuses.`);
94
-
95
52
  } catch (error) {
96
- this.logger.log('ERROR', '[ProxyManager] Failed to load proxy config from Firestore.', {
97
- errorMessage: error.message,
98
- path: this.PERFORMANCE_DOC_PATH
99
- });
100
- }
53
+ this.logger.log('ERROR', '[ProxyManager] Failed to load proxy config from Firestore.', { errorMessage: error.message, path: this.PERFORMANCE_DOC_PATH }); }
101
54
  }
102
55
 
103
56
  /**
@@ -107,20 +60,9 @@ class IntelligentProxyManager {
107
60
  async _selectProxy() {
108
61
  await this._loadConfig();
109
62
 
110
- // UPDATED: If locking is disabled, all proxies are available. If enabled, filter.
111
- const availableProxies = this.proxyLockingEnabled
112
- ? Object.values(this.proxies).filter(p => p.status === 'unlocked')
113
- : Object.values(this.proxies);
114
-
115
- if (availableProxies.length === 0) {
116
- const errorMsg = this.proxyLockingEnabled
117
- ? "All proxies are locked. No proxy available."
118
- : "No proxies are loaded. Cannot make request.";
119
- this.logger.log('ERROR', `[ProxyManager] ${errorMsg}`);
120
- throw new Error(errorMsg);
121
- }
122
-
123
- // Return a random available proxy
63
+ const availableProxies = this.proxyLockingEnabled ? Object.values(this.proxies).filter(p => p.status === 'unlocked') : Object.values(this.proxies);
64
+ if (availableProxies.length === 0) { const errorMsg = this.proxyLockingEnabled ? "All proxies are locked. No proxy available." : "No proxies are loaded. Cannot make request.";
65
+ this.logger.log('ERROR', `[ProxyManager] ${errorMsg}`); throw new Error(errorMsg); }
124
66
  const selected = availableProxies[Math.floor(Math.random() * availableProxies.length)];
125
67
  return { owner: selected.owner, url: selected.url };
126
68
  }
@@ -130,34 +72,12 @@ class IntelligentProxyManager {
130
72
  * @param {string} owner - The owner/ID of the proxy to lock.
131
73
  */
132
74
  async lockProxy(owner) {
133
- // NEW: Check if locking is enabled.
134
- if (!this.proxyLockingEnabled) {
135
- this.logger.log('TRACE', `[ProxyManager] Locking skipped for ${owner} (locking is disabled).`);
136
- return;
137
- }
138
-
139
- // 1. Update in-memory cache immediately
140
- if (this.proxies[owner]) {
141
- this.proxies[owner].status = 'locked';
142
- }
143
-
75
+ if (!this.proxyLockingEnabled) { this.logger.log('TRACE', `[ProxyManager] Locking skipped for ${owner} (locking is disabled).`); return; }
76
+ if (this.proxies[owner]) { this.proxies[owner].status = 'locked'; }
144
77
  this.logger.log('WARN', `[ProxyManager] Locking proxy: ${owner}`);
145
-
146
- // 2. Update Firestore
147
- try {
148
- const docRef = this.firestore.doc(this.PERFORMANCE_DOC_PATH);
149
- // Use dot notation to update a specific field in the 'locks' map
150
- await docRef.set({
151
- locks: {
152
- [owner]: {
153
- locked: true,
154
- lastLocked: FieldValue.serverTimestamp()
155
- }
156
- }
157
- }, { merge: true });
158
- } catch (error) {
159
- this.logger.log('ERROR', `[ProxyManager] Failed to write lock for ${owner} to Firestore.`, { errorMessage: error.message });
160
- }
78
+ try { const docRef = this.firestore.doc(this.PERFORMANCE_DOC_PATH);
79
+ await docRef.set({ locks: { [owner]: { locked: true, lastLocked: FieldValue.serverTimestamp() } } }, { merge: true });
80
+ } catch (error) { this.logger.log('ERROR', `[ProxyManager] Failed to write lock for ${owner} to Firestore.`, { errorMessage: error.message }); }
161
81
  }
162
82
 
163
83
  /**
@@ -168,24 +88,10 @@ class IntelligentProxyManager {
168
88
  */
169
89
  async fetch(targetUrl, options = {}) {
170
90
  let proxy = null;
171
- try {
172
- // 1. Select Proxy
173
- proxy = await this._selectProxy();
174
- } catch (error) {
175
- // No proxies available
176
- return { ok: false, status: 503, error: { message: error.message }, headers: new Headers() };
177
- }
178
-
179
- // 2. Make Request
91
+ try { proxy = await this._selectProxy();
92
+ } catch (error) { return { ok: false, status: 503, error: { message: error.message }, headers: new Headers() }; }
180
93
  const response = await this._fetchViaAppsScript(proxy.url, targetUrl, options);
181
-
182
- // 3. Handle Proxy Failure (e.g., quota error, network error)
183
- if (!response.ok && response.isUrlFetchError) {
184
- // isUrlFetchError is a custom flag from _fetchViaAppsScript
185
- // This call is now gated by the flag inside lockProxy()
186
- await this.lockProxy(proxy.owner);
187
- }
188
-
94
+ if (!response.ok && response.isUrlFetchError) { await this.lockProxy(proxy.owner); }
189
95
  return response;
190
96
  }
191
97
 
@@ -194,68 +100,24 @@ class IntelligentProxyManager {
194
100
  * @private
195
101
  */
196
102
  async _fetchViaAppsScript(proxyUrl, targetUrl, options) {
197
- // Spread the 'options' object (which contains headers, method, body, etc.)
198
- // into the top-level payload.
199
- const payload = {
200
- url: targetUrl,
201
- ...options
202
- };
203
-
103
+ const payload = { url: targetUrl, ...options };
204
104
  try {
205
- const response = await fetch(proxyUrl, {
206
- method: 'POST',
207
- headers: { 'Content-Type': 'application/json' },
208
- body: JSON.stringify(payload)
209
- });
210
-
105
+ const response = await fetch(proxyUrl, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(payload) });
211
106
  if (!response.ok) {
212
107
  const errorText = await response.text();
213
- this.logger.log('WARN', `[ProxyManager] Proxy infrastructure itself failed.`, {
214
- status: response.status,
215
- proxy: proxyUrl,
216
- error: errorText
217
- });
218
- return {
219
- ok: false,
220
- status: response.status,
221
- isUrlFetchError: true, // Flag this as a proxy infrastructure error
222
- error: { message: `Proxy infrastructure failed with status ${response.status}` },
223
- headers: response.headers,
224
- text: () => Promise.resolve(errorText)
225
- };
226
- }
227
-
108
+ this.logger.log('WARN', `[ProxyManager] Proxy infrastructure itself failed.`, { status: response.status, proxy: proxyUrl, error: errorText });
109
+ return { ok: false, status: response.status, isUrlFetchError: true, error: { message: `Proxy infrastructure failed with status ${response.status}` }, headers: response.headers, text: () => Promise.resolve(errorText) }; }
228
110
  const proxyResponse = await response.json();
229
-
230
111
  if (proxyResponse.error) {
231
112
  const errorMsg = proxyResponse.error.message || '';
232
- // Check for Google-side quota errors
233
113
  if (errorMsg.toLowerCase().includes('service invoked too many times')) {
234
114
  this.logger.log('WARN', `[ProxyManager] Proxy quota error: ${proxyUrl}`, { error: proxyResponse.error });
235
- return { ok: false, status: 500, error: proxyResponse.error, isUrlFetchError: true, headers: new Headers() };
236
- }
237
- // Other errors returned by the AppScript
238
- return { ok: false, status: 500, error: proxyResponse.error, headers: new Headers(), text: () => Promise.resolve(errorMsg) };
239
- }
240
-
241
- // Success
242
- return {
243
- ok: proxyResponse.statusCode >= 200 && proxyResponse.statusCode < 300,
244
- status: proxyResponse.statusCode,
245
- headers: new Headers(proxyResponse.headers || {}),
246
- json: () => Promise.resolve(JSON.parse(proxyResponse.body)),
247
- text: () => Promise.resolve(proxyResponse.body),
248
- };
115
+ return { ok: false, status: 500, error: proxyResponse.error, isUrlFetchError: true, headers: new Headers() }; }
116
+ return { ok: false, status: 500, error: proxyResponse.error, headers: new Headers(), text: () => Promise.resolve(errorMsg) }; }
117
+ return { ok: proxyResponse.statusCode >= 200 && proxyResponse.statusCode < 300, status: proxyResponse.statusCode, headers: new Headers(proxyResponse.headers || {}), json: () => Promise.resolve(JSON.parse(proxyResponse.body)), text: () => Promise.resolve(proxyResponse.body), };
249
118
  } catch (networkError) {
250
119
  this.logger.log('ERROR', `[ProxyManager] Network error calling proxy: ${proxyUrl}`, { errorMessage: networkError.message });
251
- return {
252
- ok: false,
253
- status: 0, // Network errors don't have a status
254
- isUrlFetchError: true, // Flag this as a proxy infrastructure error
255
- error: { message: `Network error: ${networkError.message}` },
256
- headers: new Headers()
257
- };
258
- }
120
+ return { ok: false, status: 0, isUrlFetchError: true, error: { message: `Network error: ${networkError.message}` }, headers: new Headers() }; }
259
121
  }
260
122
  }
261
123
 
@@ -18,36 +18,19 @@
18
18
  async function batchPublishTasks(dependencies, config) {
19
19
  const { pubsub, logger } = dependencies;
20
20
  const { topicName, tasks, taskType, maxPubsubBatchSize = 500 } = config;
21
-
22
- if (!tasks || tasks.length === 0) {
23
- logger.log('INFO',`[Core Utils] No ${taskType} tasks to publish to ${topicName}.`);
24
- return;
25
- }
21
+ if (!tasks || tasks.length === 0) { logger.log('INFO',`[Core Utils] No ${taskType} tasks to publish to ${topicName}.`); return; }
26
22
  logger.log('INFO',`[Core Utils] Publishing ${tasks.length} ${taskType} tasks to ${topicName}...`);
27
23
  const topic = pubsub.topic(topicName);
28
24
  let messagesPublished = 0;
29
-
30
25
  try {
31
- for (let i = 0; i < tasks.length; i += maxPubsubBatchSize) {
32
- const batchTasks = tasks.slice(i, i + maxPubsubBatchSize);
33
- const batchPromises = batchTasks.map(task => {
34
- const dataBuffer = Buffer.from(JSON.stringify(task));
35
- return topic.publishMessage({ data: dataBuffer })
36
- .catch(err => logger.log('ERROR', `[Core Utils] Failed to publish single message for ${taskType}`, { error: err.message, task: task }));
37
- });
26
+ for (let i = 0; i < tasks.length; i += maxPubsubBatchSize) { const batchTasks = tasks.slice(i, i + maxPubsubBatchSize);
27
+ const batchPromises = batchTasks.map(task => { const dataBuffer = Buffer.from(JSON.stringify(task));
28
+ return topic.publishMessage({ data: dataBuffer }) .catch(err => logger.log('ERROR', `[Core Utils] Failed to publish single message for ${taskType}`, { error: err.message, task: task })); });
38
29
  await Promise.all(batchPromises);
39
30
  messagesPublished += batchTasks.length;
40
- logger.log('TRACE', `[Core Utils] Published batch ${Math.ceil((i + 1) / maxPubsubBatchSize)} for ${taskType} (${batchTasks.length} messages)`);
41
- }
42
-
31
+ logger.log('TRACE', `[Core Utils] Published batch ${Math.ceil((i + 1) / maxPubsubBatchSize)} for ${taskType} (${batchTasks.length} messages)`); }
43
32
  logger.log('SUCCESS', `[Core Utils] Finished publishing ${messagesPublished} ${taskType} tasks to ${topicName}.`);
44
-
45
- } catch (error) {
46
- logger.log('ERROR', `[Core Utils] Error during batch publishing of ${taskType} tasks to ${topicName}`, { errorMessage: error.message });
47
- throw error;
48
- }
33
+ } catch (error) { logger.log('ERROR', `[Core Utils] Error during batch publishing of ${taskType} tasks to ${topicName}`, { errorMessage: error.message }); throw error; }
49
34
  }
50
35
 
51
- module.exports = {
52
- batchPublishTasks,
53
- };
36
+ module.exports = { batchPublishTasks };
@@ -1,5 +1,5 @@
1
1
  /**
2
- * @fileoverview Sub-pipe for dispatching tasks.
2
+ * @fileoverview Sub-pipe for dispatching tasks. Handles the speed that tasks are submitted to the task engine for scaling optimisation.
3
3
  * REFACTORED: Now stateless and receives dependencies.
4
4
  *
5
5
  * --- MODIFICATION ---
@@ -24,38 +24,17 @@ async function dispatchTasksInBatches(tasks, dependencies, config) {
24
24
  const { topicName, batchSize, batchDelayMs } = config;
25
25
  const topic = pubsub.topic(topicName);
26
26
  let totalTasksQueued = 0;
27
-
28
27
  logger.log('INFO', `[Module Dispatcher] Received ${tasks.length} tasks. Creating batches...`);
29
-
30
28
  for (let i = 0; i < tasks.length; i += batchSize) {
31
- const batch = tasks.slice(i, i + batchSize); // e.g., batch is [task1, task2, ... task100]
32
-
33
- try {
34
- // --- START MODIFICATION ---
35
- // Wrap the entire batch of tasks into a single object payload.
36
- // This is the payload the Task Engine will now receive.
37
- const messagePayload = { tasks: batch };
38
-
39
- // Publish the entire batch as a single message
40
- await topic.publishMessage({ json: messagePayload });
41
-
42
- // --- END MODIFICATION ---
43
-
44
- totalTasksQueued += batch.length;
45
- logger.log('INFO', `[Module Dispatcher] Dispatched batch ${Math.ceil((i + 1) / batchSize)} with ${batch.length} tasks as a single message.`);
46
-
47
- if (i + batchSize < tasks.length) {
48
- await sleep(batchDelayMs);
49
- }
50
- } catch (publishError) {
51
- logger.log('ERROR', `[Module Dispatcher] Failed to publish batch ${Math.ceil((i + 1) / batchSize)}. Error: ${publishError.message}`, { errorStack: publishError.stack });
52
- }
53
- }
54
-
29
+ const batch = tasks.slice(i, i + batchSize);
30
+ try { const messagePayload = { tasks: batch };
31
+ await topic.publishMessage({ json: messagePayload });
32
+ totalTasksQueued += batch.length;
33
+ logger.log('INFO', `[Module Dispatcher] Dispatched batch ${Math.ceil((i + 1) / batchSize)} with ${batch.length} tasks as a single message.`);
34
+ if (i + batchSize < tasks.length) { await sleep(batchDelayMs); }
35
+ } catch (publishError) { logger.log('ERROR', `[Module Dispatcher] Failed to publish batch ${Math.ceil((i + 1) / batchSize)}. Error: ${publishError.message}`, { errorStack: publishError.stack }); } }
55
36
  logger.log('SUCCESS', `[Module Dispatcher] Successfully dispatched ${totalTasksQueued} tasks in ${Math.ceil(tasks.length / batchSize)} batches.`);
56
37
  return totalTasksQueued;
57
38
  }
58
39
 
59
- module.exports = {
60
- dispatchTasksInBatches
61
- };
40
+ module.exports = { dispatchTasksInBatches };
@@ -1,12 +1,10 @@
1
1
  /**
2
2
  * @fileoverview Main entry point for the Dispatcher function.
3
3
  * REFACTORED: This file now contains the main pipe function 'handleRequest'.
4
+ * This is built to handle the large load of tasks produced by the orchestrator, to prevent a flood of requests which would cause problematic rate limiting within GCP scaling. This is effectively a cost efficiency script.
4
5
  */
5
6
 
6
- // --- 1. REMOVE the circular require ---
7
- // const { pipe } = require('../../index'); // <<< REMOVE THIS LINE
8
7
 
9
- // --- 2. ADD direct require for the specific sub-pipe needed ---
10
8
  const { dispatchTasksInBatches } = require('./helpers/dispatch_helpers');
11
9
 
12
10
 
@@ -20,34 +18,13 @@ const { dispatchTasksInBatches } = require('./helpers/dispatch_helpers');
20
18
  */
21
19
  async function handleRequest(message, context, config, dependencies) {
22
20
  const { logger } = dependencies;
23
- try {
24
- if (!message.data) {
25
- logger.log('WARN', '[Module Dispatcher] Received message without data.');
26
- return;
27
- }
21
+ try { if (!message.data) { logger.log('WARN', '[Module Dispatcher] Received message without data.'); return; }
28
22
  const decodedMessage = JSON.parse(Buffer.from(message.data, 'base64').toString());
29
23
  const { tasks } = decodedMessage;
30
-
31
- if (!tasks || !Array.isArray(tasks) || tasks.length === 0) {
32
- logger.log('WARN', '[Module Dispatcher] Received message with no valid tasks. Nothing to do.');
33
- return;
34
- }
35
-
36
- if (!config || !config.topicName || !config.batchSize || !config.batchDelayMs) {
37
- logger.log('ERROR', '[Module Dispatcher] Invalid configuration provided.', { config });
38
- throw new Error("Dispatcher module received invalid configuration.");
39
- }
40
-
41
- // --- 3. Use the directly required function ---
42
- // Call the sub-pipe -> becomes dispatchTasksInBatches
43
- await dispatchTasksInBatches(tasks, dependencies, config); // <<< USE DIRECTLY
44
-
45
- } catch (error) {
46
- logger.log('ERROR', '[Module Dispatcher] FATAL error processing message', { errorMessage: error.message, errorStack: error.stack });
47
- throw error;
48
- }
24
+ if (!tasks || !Array.isArray(tasks) || tasks.length === 0) { logger.log('WARN', '[Module Dispatcher] Received message with no valid tasks. Nothing to do.'); return; }
25
+ if (!config || !config.topicName || !config.batchSize || !config.batchDelayMs) { logger.log('ERROR', '[Module Dispatcher] Invalid configuration provided.', { config }); throw new Error("Dispatcher module received invalid configuration.");}
26
+ await dispatchTasksInBatches(tasks, dependencies, config);
27
+ } catch (error) { logger.log('ERROR', '[Module Dispatcher] FATAL error processing message', { errorMessage: error.message, errorStack: error.stack }); throw error; }
49
28
  }
50
29
 
51
- module.exports = {
52
- handleRequest,
53
- };
30
+ module.exports = { handleRequest };