@zintrust/workers 0.1.28 → 0.1.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -1
- package/dist/AnomalyDetection.d.ts +4 -0
- package/dist/AnomalyDetection.js +8 -0
- package/dist/BroadcastWorker.d.ts +2 -0
- package/dist/CanaryController.js +49 -5
- package/dist/ChaosEngineering.js +13 -0
- package/dist/ClusterLock.js +21 -10
- package/dist/DeadLetterQueue.js +12 -8
- package/dist/MultiQueueWorker.d.ts +1 -1
- package/dist/MultiQueueWorker.js +12 -7
- package/dist/NotificationWorker.d.ts +2 -0
- package/dist/PriorityQueue.d.ts +2 -2
- package/dist/PriorityQueue.js +20 -21
- package/dist/ResourceMonitor.js +65 -38
- package/dist/WorkerFactory.d.ts +23 -3
- package/dist/WorkerFactory.js +420 -40
- package/dist/WorkerInit.js +8 -3
- package/dist/WorkerMetrics.d.ts +2 -1
- package/dist/WorkerMetrics.js +152 -93
- package/dist/WorkerRegistry.d.ts +6 -0
- package/dist/WorkerRegistry.js +70 -1
- package/dist/WorkerShutdown.d.ts +21 -0
- package/dist/WorkerShutdown.js +82 -9
- package/dist/WorkerShutdownDurableObject.d.ts +12 -0
- package/dist/WorkerShutdownDurableObject.js +41 -0
- package/dist/build-manifest.json +171 -99
- package/dist/createQueueWorker.d.ts +2 -0
- package/dist/createQueueWorker.js +42 -27
- package/dist/dashboard/types.d.ts +5 -0
- package/dist/dashboard/workers-api.js +136 -43
- package/dist/http/WorkerApiController.js +1 -0
- package/dist/http/WorkerController.js +133 -85
- package/dist/http/WorkerMonitoringService.d.ts +11 -0
- package/dist/http/WorkerMonitoringService.js +62 -0
- package/dist/http/middleware/CustomValidation.js +1 -1
- package/dist/http/middleware/EditWorkerValidation.d.ts +1 -1
- package/dist/http/middleware/EditWorkerValidation.js +7 -6
- package/dist/http/middleware/ProcessorPathSanitizer.js +101 -35
- package/dist/http/middleware/WorkerValidationChain.js +1 -0
- package/dist/index.d.ts +2 -1
- package/dist/index.js +1 -0
- package/dist/routes/workers.js +48 -6
- package/dist/storage/WorkerStore.d.ts +4 -1
- package/dist/storage/WorkerStore.js +55 -7
- package/dist/telemetry/api/TelemetryAPI.d.ts +46 -0
- package/dist/telemetry/api/TelemetryAPI.js +219 -0
- package/dist/telemetry/api/TelemetryMonitoringService.d.ts +17 -0
- package/dist/telemetry/api/TelemetryMonitoringService.js +113 -0
- package/dist/telemetry/components/AlertPanel.d.ts +1 -0
- package/dist/telemetry/components/AlertPanel.js +13 -0
- package/dist/telemetry/components/CostTracking.d.ts +1 -0
- package/dist/telemetry/components/CostTracking.js +14 -0
- package/dist/telemetry/components/ResourceUsageChart.d.ts +1 -0
- package/dist/telemetry/components/ResourceUsageChart.js +11 -0
- package/dist/telemetry/components/WorkerHealthChart.d.ts +1 -0
- package/dist/telemetry/components/WorkerHealthChart.js +11 -0
- package/dist/telemetry/index.d.ts +15 -0
- package/dist/telemetry/index.js +60 -0
- package/dist/telemetry/routes/dashboard.d.ts +6 -0
- package/dist/telemetry/routes/dashboard.js +608 -0
- package/dist/ui/router/EmbeddedAssets.d.ts +4 -0
- package/dist/ui/router/EmbeddedAssets.js +13 -0
- package/dist/ui/router/ui.js +100 -4
- package/package.json +9 -5
- package/src/AnomalyDetection.ts +9 -0
- package/src/CanaryController.ts +41 -5
- package/src/ChaosEngineering.ts +14 -0
- package/src/ClusterLock.ts +22 -9
- package/src/DeadLetterQueue.ts +13 -8
- package/src/MultiQueueWorker.ts +15 -8
- package/src/PriorityQueue.ts +21 -22
- package/src/ResourceMonitor.ts +72 -40
- package/src/WorkerFactory.ts +545 -49
- package/src/WorkerInit.ts +8 -3
- package/src/WorkerMetrics.ts +183 -105
- package/src/WorkerRegistry.ts +80 -1
- package/src/WorkerShutdown.ts +115 -9
- package/src/WorkerShutdownDurableObject.ts +64 -0
- package/src/createQueueWorker.ts +73 -30
- package/src/dashboard/types.ts +5 -0
- package/src/dashboard/workers-api.ts +165 -52
- package/src/http/WorkerApiController.ts +1 -0
- package/src/http/WorkerController.ts +167 -90
- package/src/http/WorkerMonitoringService.ts +77 -0
- package/src/http/middleware/CustomValidation.ts +1 -1
- package/src/http/middleware/EditWorkerValidation.ts +7 -6
- package/src/http/middleware/ProcessorPathSanitizer.ts +123 -36
- package/src/http/middleware/WorkerValidationChain.ts +1 -0
- package/src/index.ts +6 -1
- package/src/routes/workers.ts +66 -9
- package/src/storage/WorkerStore.ts +59 -9
- package/src/telemetry/api/TelemetryAPI.ts +292 -0
- package/src/telemetry/api/TelemetryMonitoringService.ts +149 -0
- package/src/telemetry/components/AlertPanel.ts +13 -0
- package/src/telemetry/components/CostTracking.ts +14 -0
- package/src/telemetry/components/ResourceUsageChart.ts +11 -0
- package/src/telemetry/components/WorkerHealthChart.ts +11 -0
- package/src/telemetry/index.ts +121 -0
- package/src/telemetry/public/assets/zintrust-logo.svg +15 -0
- package/src/telemetry/routes/dashboard.ts +638 -0
- package/src/telemetry/styles/tailwind.css +1 -0
- package/src/telemetry/styles/zintrust-theme.css +8 -0
- package/src/ui/router/EmbeddedAssets.ts +13 -0
- package/src/ui/router/ui.ts +112 -5
- package/src/ui/workers/index.html +2 -2
- package/src/ui/workers/main.js +232 -61
- package/src/ui/workers/zintrust.svg +30 -0
- package/dist/dashboard/workers-dashboard-ui.d.ts +0 -3
- package/dist/dashboard/workers-dashboard-ui.js +0 -1026
- package/dist/dashboard/workers-dashboard.d.ts +0 -4
- package/dist/dashboard/workers-dashboard.js +0 -904
package/dist/WorkerFactory.js
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* Central factory for creating workers with all advanced features
|
|
4
4
|
* Sealed namespace for immutability
|
|
5
5
|
*/
|
|
6
|
-
import { appConfig, createRedisConnection, databaseConfig, Env, ErrorFactory, getBullMQSafeQueueName, Logger, NodeSingletons, queueConfig, registerDatabasesFromRuntimeConfig, useEnsureDbConnected, workersConfig, } from '@zintrust/core';
|
|
6
|
+
import { appConfig, createRedisConnection, databaseConfig, Env, ErrorFactory, getBullMQSafeQueueName, Logger, NodeSingletons, queueConfig, registerDatabasesFromRuntimeConfig, useEnsureDbConnected, workersConfig, ZintrustLang, } from '@zintrust/core';
|
|
7
7
|
import { Worker } from 'bullmq';
|
|
8
8
|
import { AutoScaler } from './AutoScaler';
|
|
9
9
|
import { CanaryController } from './CanaryController';
|
|
@@ -71,6 +71,9 @@ let workerStoreConfig = null;
|
|
|
71
71
|
const processorRegistry = new Map();
|
|
72
72
|
const processorPathRegistry = new Map();
|
|
73
73
|
const processorResolvers = [];
|
|
74
|
+
const processorSpecRegistry = new Map();
|
|
75
|
+
const processorCache = new Map();
|
|
76
|
+
let processorCacheSize = 0;
|
|
74
77
|
const buildPersistenceBootstrapConfig = () => {
|
|
75
78
|
const driver = Env.get('WORKER_PERSISTENCE_DRIVER', 'memory');
|
|
76
79
|
const config = {
|
|
@@ -112,10 +115,98 @@ const registerProcessorPaths = (paths) => {
|
|
|
112
115
|
const registerProcessorResolver = (resolver) => {
|
|
113
116
|
processorResolvers.push(resolver);
|
|
114
117
|
};
|
|
118
|
+
const registerProcessorSpec = (spec, processor) => {
|
|
119
|
+
if (!spec || typeof processor !== 'function')
|
|
120
|
+
return;
|
|
121
|
+
processorSpecRegistry.set(normalizeProcessorSpec(spec), processor);
|
|
122
|
+
};
|
|
115
123
|
const decodeProcessorPathEntities = (value) => value
|
|
116
124
|
.replaceAll(///gi, '/')
|
|
117
125
|
.replaceAll('/', '/')
|
|
118
126
|
.replaceAll(///gi, '/');
|
|
127
|
+
const isUrlSpec = (spec) => {
|
|
128
|
+
if (spec.startsWith('url:'))
|
|
129
|
+
return true;
|
|
130
|
+
return spec.includes('://');
|
|
131
|
+
};
|
|
132
|
+
const normalizeProcessorSpec = (spec) => spec.startsWith('url:') ? spec.slice(4) : spec;
|
|
133
|
+
const parseCacheControl = (value) => {
|
|
134
|
+
if (!value)
|
|
135
|
+
return {};
|
|
136
|
+
const parts = value.split(',').map((part) => part.trim().toLowerCase());
|
|
137
|
+
const maxAge = parts.find((part) => part.startsWith('max-age='));
|
|
138
|
+
if (!maxAge)
|
|
139
|
+
return {};
|
|
140
|
+
const raw = maxAge.split('=')[1];
|
|
141
|
+
const parsed = Number.parseInt(raw ?? '', 10);
|
|
142
|
+
return Number.isFinite(parsed) ? { maxAge: parsed } : {};
|
|
143
|
+
};
|
|
144
|
+
const getProcessorSpecConfig = () => workersConfig.processorSpec;
|
|
145
|
+
const computeSha256 = async (value) => {
|
|
146
|
+
if (typeof globalThis !== 'undefined' && globalThis.crypto?.subtle) {
|
|
147
|
+
const data = new TextEncoder().encode(value);
|
|
148
|
+
const digest = await globalThis.crypto.subtle.digest('SHA-256', data);
|
|
149
|
+
return Array.from(new Uint8Array(digest))
|
|
150
|
+
.map((b) => b.toString(16).padStart(2, '0'))
|
|
151
|
+
.join('');
|
|
152
|
+
}
|
|
153
|
+
if (typeof NodeSingletons.createHash === 'function') {
|
|
154
|
+
return NodeSingletons.createHash('sha256').update(value).digest('hex');
|
|
155
|
+
}
|
|
156
|
+
return String(Math.random()).slice(2);
|
|
157
|
+
};
|
|
158
|
+
const toBase64 = (value) => {
|
|
159
|
+
if (typeof Buffer !== 'undefined') {
|
|
160
|
+
return Buffer.from(value, 'utf-8').toString('base64');
|
|
161
|
+
}
|
|
162
|
+
if (typeof globalThis !== 'undefined' && typeof globalThis.btoa === 'function') {
|
|
163
|
+
const bytes = new TextEncoder().encode(value);
|
|
164
|
+
let binary = '';
|
|
165
|
+
bytes.forEach((byte) => {
|
|
166
|
+
binary += String.fromCodePoint(byte);
|
|
167
|
+
});
|
|
168
|
+
return globalThis.btoa(binary);
|
|
169
|
+
}
|
|
170
|
+
return value;
|
|
171
|
+
};
|
|
172
|
+
const getCachedProcessor = (key) => {
|
|
173
|
+
const entry = processorCache.get(key);
|
|
174
|
+
if (!entry)
|
|
175
|
+
return null;
|
|
176
|
+
const now = Date.now();
|
|
177
|
+
if (entry.expiresAt <= now) {
|
|
178
|
+
processorCache.delete(key);
|
|
179
|
+
processorCacheSize -= entry.size;
|
|
180
|
+
return null;
|
|
181
|
+
}
|
|
182
|
+
entry.lastAccess = now;
|
|
183
|
+
return entry;
|
|
184
|
+
};
|
|
185
|
+
const evictCacheIfNeeded = (maxSize) => {
|
|
186
|
+
if (processorCacheSize <= maxSize)
|
|
187
|
+
return;
|
|
188
|
+
const entries = Array.from(processorCache.entries());
|
|
189
|
+
entries.sort((a, b) => a[1].lastAccess - b[1].lastAccess);
|
|
190
|
+
for (const [key, entry] of entries) {
|
|
191
|
+
if (processorCacheSize <= maxSize)
|
|
192
|
+
break;
|
|
193
|
+
processorCache.delete(key);
|
|
194
|
+
processorCacheSize -= entry.size;
|
|
195
|
+
}
|
|
196
|
+
};
|
|
197
|
+
const setCachedProcessor = (key, entry, maxSize) => {
|
|
198
|
+
const existing = processorCache.get(key);
|
|
199
|
+
if (existing) {
|
|
200
|
+
processorCacheSize -= existing.size;
|
|
201
|
+
}
|
|
202
|
+
processorCache.set(key, entry);
|
|
203
|
+
processorCacheSize += entry.size;
|
|
204
|
+
evictCacheIfNeeded(maxSize);
|
|
205
|
+
};
|
|
206
|
+
const isAllowedRemoteHost = (host) => {
|
|
207
|
+
const allowlist = getProcessorSpecConfig().remoteAllowlist.map((value) => value.toLowerCase());
|
|
208
|
+
return allowlist.includes(host.toLowerCase());
|
|
209
|
+
};
|
|
119
210
|
const waitForWorkerConnection = async (worker, name, _queueName, timeoutMs) => {
|
|
120
211
|
const startTime = Date.now();
|
|
121
212
|
const checkInterval = 100; // 100ms between checks
|
|
@@ -174,6 +265,185 @@ const sanitizeProcessorPath = (value) => {
|
|
|
174
265
|
const relativePath = base.startsWith('.') ? base : `./${base}`;
|
|
175
266
|
return isAbsolutePath ? base : path.resolve(process.cwd(), relativePath);
|
|
176
267
|
};
|
|
268
|
+
const stripProcessorExtension = (value) => value.replace(/\.(ts|js)$/i, '');
|
|
269
|
+
const normalizeModulePath = (value) => value.replaceAll('\\', '/');
|
|
270
|
+
const buildProcessorModuleCandidates = (modulePath, resolvedPath) => {
|
|
271
|
+
const candidates = [];
|
|
272
|
+
const normalized = normalizeModulePath(modulePath.trim());
|
|
273
|
+
const normalizedResolved = normalizeModulePath(resolvedPath);
|
|
274
|
+
if (normalized.startsWith('/app/')) {
|
|
275
|
+
candidates.push(`@app/${stripProcessorExtension(normalized.slice(5))}`);
|
|
276
|
+
}
|
|
277
|
+
else if (normalized.startsWith('app/')) {
|
|
278
|
+
candidates.push(`@app/${stripProcessorExtension(normalized.slice(4))}`);
|
|
279
|
+
}
|
|
280
|
+
const appIndex = normalizedResolved.lastIndexOf('/app/');
|
|
281
|
+
if (appIndex !== -1) {
|
|
282
|
+
const relative = normalizedResolved.slice(appIndex + 5);
|
|
283
|
+
if (relative) {
|
|
284
|
+
candidates.push(`@app/${stripProcessorExtension(relative)}`);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
return Array.from(new Set(candidates));
|
|
288
|
+
};
|
|
289
|
+
const pickProcessorFromModule = (mod, source) => {
|
|
290
|
+
const candidate = mod?.['default'] ?? mod?.['processor'] ?? mod?.['handler'] ?? mod?.['handle'];
|
|
291
|
+
if (typeof candidate !== 'function') {
|
|
292
|
+
const keys = mod ? Object.keys(mod) : [];
|
|
293
|
+
Logger.warn(`Module imported from ${source} but no valid processor function found (exported: ${keys.join(', ')})`);
|
|
294
|
+
return undefined;
|
|
295
|
+
}
|
|
296
|
+
return candidate;
|
|
297
|
+
};
|
|
298
|
+
const extractZinTrustProcessor = (mod, source) => {
|
|
299
|
+
const candidate = mod?.['ZinTrustProcessor'];
|
|
300
|
+
if (typeof candidate !== 'function') {
|
|
301
|
+
const keys = mod ? Object.keys(mod) : [];
|
|
302
|
+
Logger.warn(`Module imported from ${source} but missing ZinTrustProcessor export (exported: ${keys.join(', ')})`);
|
|
303
|
+
return undefined;
|
|
304
|
+
}
|
|
305
|
+
return candidate;
|
|
306
|
+
};
|
|
307
|
+
const readResponseBody = async (response, maxSize) => {
|
|
308
|
+
const contentLength = response.headers.get('content-length');
|
|
309
|
+
if (contentLength) {
|
|
310
|
+
const size = Number.parseInt(contentLength, 10);
|
|
311
|
+
if (Number.isFinite(size) && size > maxSize) {
|
|
312
|
+
throw ErrorFactory.createConfigError('PROCESSOR_FETCH_SIZE_EXCEEDED');
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
const buffer = await response.arrayBuffer();
|
|
316
|
+
if (buffer.byteLength > maxSize) {
|
|
317
|
+
throw ErrorFactory.createConfigError('PROCESSOR_FETCH_SIZE_EXCEEDED');
|
|
318
|
+
}
|
|
319
|
+
return new TextDecoder().decode(buffer);
|
|
320
|
+
};
|
|
321
|
+
const computeCacheTtlSeconds = (config, cacheControl) => Math.min(config.cacheMaxTtlSeconds, cacheControl.maxAge ?? config.cacheDefaultTtlSeconds);
|
|
322
|
+
const refreshCachedProcessor = (existing, config, cacheControl) => {
|
|
323
|
+
const ttl = computeCacheTtlSeconds(config, cacheControl);
|
|
324
|
+
const now = Date.now();
|
|
325
|
+
existing.expiresAt = now + ttl * 1000;
|
|
326
|
+
existing.lastAccess = now;
|
|
327
|
+
return existing.processor;
|
|
328
|
+
};
|
|
329
|
+
const cacheProcessorFromResponse = async (params) => {
|
|
330
|
+
const { response, normalized, config, cacheKey } = params;
|
|
331
|
+
const code = await readResponseBody(response, config.fetchMaxSizeBytes);
|
|
332
|
+
const dataUrl = `data:text/javascript;base64,${toBase64(code)}`;
|
|
333
|
+
const mod = await import(dataUrl);
|
|
334
|
+
const processor = extractZinTrustProcessor(mod, normalized);
|
|
335
|
+
if (!processor) {
|
|
336
|
+
throw ErrorFactory.createConfigError('INVALID_PROCESSOR_URL_EXPORT');
|
|
337
|
+
}
|
|
338
|
+
const cacheControl = parseCacheControl(response.headers.get('cache-control'));
|
|
339
|
+
const ttl = computeCacheTtlSeconds(config, cacheControl);
|
|
340
|
+
const size = new TextEncoder().encode(code).byteLength;
|
|
341
|
+
const now = Date.now();
|
|
342
|
+
setCachedProcessor(cacheKey, {
|
|
343
|
+
code,
|
|
344
|
+
processor,
|
|
345
|
+
etag: response.headers.get('etag') ?? undefined,
|
|
346
|
+
cachedAt: now,
|
|
347
|
+
expiresAt: now + ttl * 1000,
|
|
348
|
+
size,
|
|
349
|
+
lastAccess: now,
|
|
350
|
+
}, config.cacheMaxSizeBytes);
|
|
351
|
+
return processor;
|
|
352
|
+
};
|
|
353
|
+
const delay = (ms) => new Promise((resolve) => {
|
|
354
|
+
globalThis.setTimeout(resolve, ms);
|
|
355
|
+
});
|
|
356
|
+
const fetchProcessorAttempt = async (params) => {
|
|
357
|
+
const { normalized, config, cacheKey, existing, attempt, maxAttempts } = params;
|
|
358
|
+
const controller = new AbortController();
|
|
359
|
+
const timeoutId = globalThis.setTimeout(() => controller.abort(), config.fetchTimeoutMs);
|
|
360
|
+
try {
|
|
361
|
+
const headers = {};
|
|
362
|
+
if (existing?.etag)
|
|
363
|
+
headers['If-None-Match'] = existing.etag;
|
|
364
|
+
const response = await fetch(normalized, {
|
|
365
|
+
method: 'GET',
|
|
366
|
+
headers,
|
|
367
|
+
signal: controller.signal,
|
|
368
|
+
});
|
|
369
|
+
if (response.status === 304 && existing) {
|
|
370
|
+
const cacheControl = parseCacheControl(response.headers.get('cache-control'));
|
|
371
|
+
return refreshCachedProcessor(existing, config, cacheControl);
|
|
372
|
+
}
|
|
373
|
+
if (!response.ok) {
|
|
374
|
+
throw ErrorFactory.createConfigError(`PROCESSOR_FETCH_FAILED:${response.status}`);
|
|
375
|
+
}
|
|
376
|
+
return await cacheProcessorFromResponse({ response, normalized, config, cacheKey });
|
|
377
|
+
}
|
|
378
|
+
catch (error) {
|
|
379
|
+
if (controller.signal.aborted) {
|
|
380
|
+
Logger.error('Processor URL fetch timeout', error);
|
|
381
|
+
}
|
|
382
|
+
else {
|
|
383
|
+
Logger.error('Processor URL fetch failed', error);
|
|
384
|
+
}
|
|
385
|
+
if (attempt >= maxAttempts) {
|
|
386
|
+
return undefined;
|
|
387
|
+
}
|
|
388
|
+
await delay(config.retryBackoffMs * attempt);
|
|
389
|
+
return fetchProcessorAttempt({
|
|
390
|
+
normalized,
|
|
391
|
+
config,
|
|
392
|
+
cacheKey,
|
|
393
|
+
existing,
|
|
394
|
+
attempt: attempt + 1,
|
|
395
|
+
maxAttempts,
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
finally {
|
|
399
|
+
clearTimeout(timeoutId);
|
|
400
|
+
}
|
|
401
|
+
};
|
|
402
|
+
const resolveProcessorFromUrl = async (spec) => {
|
|
403
|
+
const normalized = normalizeProcessorSpec(spec);
|
|
404
|
+
let parsed;
|
|
405
|
+
try {
|
|
406
|
+
parsed = new URL(normalized);
|
|
407
|
+
}
|
|
408
|
+
catch (error) {
|
|
409
|
+
Logger.error('Invalid processor URL spec', error);
|
|
410
|
+
return undefined;
|
|
411
|
+
}
|
|
412
|
+
if (parsed.protocol === 'file:') {
|
|
413
|
+
const filePath = decodeURIComponent(parsed.pathname);
|
|
414
|
+
return resolveProcessorFromPath(filePath);
|
|
415
|
+
}
|
|
416
|
+
if (parsed.protocol !== 'https:' && parsed.protocol !== 'file:') {
|
|
417
|
+
Logger.warn(`Invalid processor URL protocol: ${parsed.protocol}. Only https:// and file:// are supported.`);
|
|
418
|
+
}
|
|
419
|
+
if (!isAllowedRemoteHost(parsed.host) && parsed.protocol !== 'file:') {
|
|
420
|
+
Logger.warn(`Invalid processor URL host: ${parsed.host}. Host is not in the allowlist.`);
|
|
421
|
+
}
|
|
422
|
+
const config = getProcessorSpecConfig();
|
|
423
|
+
const cacheKey = await computeSha256(normalized);
|
|
424
|
+
const cached = getCachedProcessor(cacheKey);
|
|
425
|
+
if (cached)
|
|
426
|
+
return cached.processor;
|
|
427
|
+
return fetchProcessorAttempt({
|
|
428
|
+
normalized,
|
|
429
|
+
config,
|
|
430
|
+
cacheKey,
|
|
431
|
+
existing: processorCache.get(cacheKey),
|
|
432
|
+
attempt: 1,
|
|
433
|
+
maxAttempts: Math.max(1, config.retryAttempts),
|
|
434
|
+
});
|
|
435
|
+
};
|
|
436
|
+
const resolveProcessorSpec = async (spec) => {
|
|
437
|
+
if (!spec)
|
|
438
|
+
return undefined;
|
|
439
|
+
const normalized = normalizeProcessorSpec(spec);
|
|
440
|
+
const prebuilt = processorSpecRegistry.get(normalized) ?? processorSpecRegistry.get(spec);
|
|
441
|
+
if (prebuilt)
|
|
442
|
+
return prebuilt;
|
|
443
|
+
if (isUrlSpec(spec))
|
|
444
|
+
return resolveProcessorFromUrl(spec);
|
|
445
|
+
return resolveProcessorFromPath(spec);
|
|
446
|
+
};
|
|
177
447
|
const resolveProcessorFromPath = async (modulePath) => {
|
|
178
448
|
const trimmed = modulePath.trim();
|
|
179
449
|
if (!trimmed)
|
|
@@ -181,20 +451,35 @@ const resolveProcessorFromPath = async (modulePath) => {
|
|
|
181
451
|
const resolved = sanitizeProcessorPath(trimmed);
|
|
182
452
|
if (!resolved)
|
|
183
453
|
return undefined;
|
|
454
|
+
const importProcessorFromCandidates = async (candidates) => {
|
|
455
|
+
if (candidates.length === 0)
|
|
456
|
+
return undefined;
|
|
457
|
+
const [candidatePath, ...rest] = candidates;
|
|
458
|
+
try {
|
|
459
|
+
const mod = await import(candidatePath);
|
|
460
|
+
const candidate = pickProcessorFromModule(mod, candidatePath);
|
|
461
|
+
if (candidate)
|
|
462
|
+
return candidate;
|
|
463
|
+
}
|
|
464
|
+
catch (candidateError) {
|
|
465
|
+
Logger.debug(`Processor module candidate import failed: ${candidatePath}`, candidateError);
|
|
466
|
+
}
|
|
467
|
+
return importProcessorFromCandidates(rest);
|
|
468
|
+
};
|
|
184
469
|
try {
|
|
185
470
|
const mod = await import(resolved);
|
|
186
|
-
const candidate = mod
|
|
187
|
-
if (
|
|
188
|
-
|
|
189
|
-
}
|
|
190
|
-
return typeof candidate === 'function'
|
|
191
|
-
? candidate
|
|
192
|
-
: undefined;
|
|
471
|
+
const candidate = pickProcessorFromModule(mod, resolved);
|
|
472
|
+
if (candidate)
|
|
473
|
+
return candidate;
|
|
193
474
|
}
|
|
194
475
|
catch (err) {
|
|
476
|
+
const candidates = buildProcessorModuleCandidates(trimmed, resolved);
|
|
477
|
+
const resolvedCandidate = await importProcessorFromCandidates(candidates);
|
|
478
|
+
if (resolvedCandidate)
|
|
479
|
+
return resolvedCandidate;
|
|
195
480
|
Logger.error(`Failed to import processor from path: ${resolved}`, err);
|
|
196
|
-
return undefined;
|
|
197
481
|
}
|
|
482
|
+
return undefined;
|
|
198
483
|
};
|
|
199
484
|
const resolveProcessor = async (name) => {
|
|
200
485
|
const direct = processorRegistry.get(name);
|
|
@@ -203,7 +488,7 @@ const resolveProcessor = async (name) => {
|
|
|
203
488
|
const pathHint = processorPathRegistry.get(name);
|
|
204
489
|
if (pathHint) {
|
|
205
490
|
try {
|
|
206
|
-
const resolved = await
|
|
491
|
+
const resolved = await resolveProcessorSpec(pathHint);
|
|
207
492
|
if (resolved)
|
|
208
493
|
return resolved;
|
|
209
494
|
}
|
|
@@ -494,8 +779,12 @@ const resolveRedisFallbacks = () => {
|
|
|
494
779
|
const queueRedis = queueConfig.drivers.redis;
|
|
495
780
|
return {
|
|
496
781
|
host: queueRedis?.driver === 'redis' ? queueRedis.host : Env.get('REDIS_HOST', '127.0.0.1'),
|
|
497
|
-
port: queueRedis?.driver === 'redis'
|
|
498
|
-
|
|
782
|
+
port: queueRedis?.driver === 'redis'
|
|
783
|
+
? queueRedis.port
|
|
784
|
+
: Env.getInt('REDIS_PORT', ZintrustLang.REDIS_DEFAULT_PORT),
|
|
785
|
+
db: queueRedis?.driver === 'redis'
|
|
786
|
+
? queueRedis.database
|
|
787
|
+
: Env.getInt('REDIS_QUEUE_DB', ZintrustLang.REDIS_DEFAULT_DB),
|
|
499
788
|
password: queueRedis?.driver === 'redis' ? (queueRedis.password ?? '') : Env.get('REDIS_PASSWORD', ''),
|
|
500
789
|
};
|
|
501
790
|
};
|
|
@@ -503,7 +792,7 @@ const resolveRedisConfigFromEnv = (config, context) => {
|
|
|
503
792
|
const fallback = resolveRedisFallbacks();
|
|
504
793
|
const host = requireRedisHost(resolveEnvString(config.host ?? 'REDIS_HOST', fallback.host), context);
|
|
505
794
|
const port = resolveEnvInt(String(config.port ?? 'REDIS_PORT'), fallback.port);
|
|
506
|
-
const db =
|
|
795
|
+
const db = resolveEnvInt(config.db ?? 'REDIS_QUEUE_DB', fallback.db);
|
|
507
796
|
const password = resolveEnvString(config.password ?? 'REDIS_PASSWORD', fallback.password);
|
|
508
797
|
return {
|
|
509
798
|
host,
|
|
@@ -716,8 +1005,8 @@ const ensureWorkerStoreConfigured = async () => {
|
|
|
716
1005
|
};
|
|
717
1006
|
const buildWorkerRecord = (config, status) => {
|
|
718
1007
|
const now = new Date();
|
|
719
|
-
const
|
|
720
|
-
?
|
|
1008
|
+
const normalizedProcessorSpec = config.processorSpec
|
|
1009
|
+
? normalizeProcessorSpec(config.processorSpec)
|
|
721
1010
|
: null;
|
|
722
1011
|
return {
|
|
723
1012
|
name: config.name,
|
|
@@ -727,7 +1016,8 @@ const buildWorkerRecord = (config, status) => {
|
|
|
727
1016
|
autoStart: resolveAutoStart(config),
|
|
728
1017
|
concurrency: config.options?.concurrency ?? 1,
|
|
729
1018
|
region: config.datacenter?.primaryRegion ?? null,
|
|
730
|
-
|
|
1019
|
+
processorSpec: normalizedProcessorSpec ?? null,
|
|
1020
|
+
activeStatus: config.activeStatus ?? true,
|
|
731
1021
|
features: config.features ? { ...config.features } : null,
|
|
732
1022
|
infrastructure: config.infrastructure ? { ...config.infrastructure } : null,
|
|
733
1023
|
datacenter: config.datacenter ? { ...config.datacenter } : null,
|
|
@@ -994,6 +1284,7 @@ const registerWorkerInstance = (params) => {
|
|
|
994
1284
|
WorkerRegistry.register({
|
|
995
1285
|
name: config.name,
|
|
996
1286
|
config: {},
|
|
1287
|
+
activeStatus: config.activeStatus ?? true,
|
|
997
1288
|
version: workerVersion,
|
|
998
1289
|
region: config.datacenter?.primaryRegion,
|
|
999
1290
|
queues: [queueName],
|
|
@@ -1007,6 +1298,7 @@ const registerWorkerInstance = (params) => {
|
|
|
1007
1298
|
region: config.datacenter?.primaryRegion ?? 'unknown',
|
|
1008
1299
|
queueName,
|
|
1009
1300
|
concurrency: options?.concurrency ?? 1,
|
|
1301
|
+
activeStatus: config.activeStatus ?? true,
|
|
1010
1302
|
startedAt: new Date(),
|
|
1011
1303
|
stoppedAt: null,
|
|
1012
1304
|
lastProcessedAt: null,
|
|
@@ -1064,7 +1356,9 @@ export const WorkerFactory = Object.freeze({
|
|
|
1064
1356
|
registerProcessors,
|
|
1065
1357
|
registerProcessorPaths,
|
|
1066
1358
|
registerProcessorResolver,
|
|
1359
|
+
registerProcessorSpec,
|
|
1067
1360
|
resolveProcessorPath,
|
|
1361
|
+
resolveProcessorSpec,
|
|
1068
1362
|
/**
|
|
1069
1363
|
* Create new worker with full setup
|
|
1070
1364
|
*/
|
|
@@ -1134,10 +1428,6 @@ export const WorkerFactory = Object.freeze({
|
|
|
1134
1428
|
}
|
|
1135
1429
|
// Start health monitoring for the worker
|
|
1136
1430
|
startHealthMonitoring(name, worker, queueName);
|
|
1137
|
-
Logger.info(`Worker created: ${name}@${workerVersion}`, {
|
|
1138
|
-
queueName,
|
|
1139
|
-
features: Object.keys(features ?? {}).filter((k) => features?.[k] === true),
|
|
1140
|
-
});
|
|
1141
1431
|
return worker;
|
|
1142
1432
|
}
|
|
1143
1433
|
catch (error) {
|
|
@@ -1190,12 +1480,15 @@ export const WorkerFactory = Object.freeze({
|
|
|
1190
1480
|
/**
|
|
1191
1481
|
* Stop worker
|
|
1192
1482
|
*/
|
|
1193
|
-
async stop(name, persistenceOverride) {
|
|
1483
|
+
async stop(name, persistenceOverride, options) {
|
|
1484
|
+
const skipPersistedUpdate = options?.skipPersistedUpdate === true;
|
|
1194
1485
|
const instance = workers.get(name);
|
|
1195
1486
|
const store = await validateAndGetStore(name, instance?.config, persistenceOverride);
|
|
1196
1487
|
if (!instance) {
|
|
1197
|
-
|
|
1198
|
-
|
|
1488
|
+
if (!skipPersistedUpdate) {
|
|
1489
|
+
await store.update(name, { status: 'stopped', updatedAt: new Date() });
|
|
1490
|
+
Logger.info(`Worker marked stopped (not running): ${name}`);
|
|
1491
|
+
}
|
|
1199
1492
|
return;
|
|
1200
1493
|
}
|
|
1201
1494
|
// Execute beforeStop hooks
|
|
@@ -1230,15 +1523,17 @@ export const WorkerFactory = Object.freeze({
|
|
|
1230
1523
|
instance.status = WorkerCreationStatus.STOPPED;
|
|
1231
1524
|
// Stop health monitoring for this worker
|
|
1232
1525
|
HealthMonitor.unregister(name);
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1526
|
+
if (!skipPersistedUpdate) {
|
|
1527
|
+
try {
|
|
1528
|
+
await store.update(name, {
|
|
1529
|
+
status: WorkerCreationStatus.STOPPED,
|
|
1530
|
+
updatedAt: new Date(),
|
|
1531
|
+
});
|
|
1532
|
+
Logger.info(`Worker "${name}" status updated to stopped`);
|
|
1533
|
+
}
|
|
1534
|
+
catch (error) {
|
|
1535
|
+
Logger.error(`Failed to update worker "${name}" status`, error);
|
|
1536
|
+
}
|
|
1242
1537
|
}
|
|
1243
1538
|
await WorkerRegistry.stop(name);
|
|
1244
1539
|
// Execute afterStop hooks
|
|
@@ -1327,6 +1622,35 @@ export const WorkerFactory = Object.freeze({
|
|
|
1327
1622
|
}
|
|
1328
1623
|
await WorkerFactory.startFromPersisted(name, persistenceOverride);
|
|
1329
1624
|
},
|
|
1625
|
+
/**
|
|
1626
|
+
* Update active status for a worker
|
|
1627
|
+
*/
|
|
1628
|
+
async setWorkerActiveStatus(name, activeStatus, persistenceOverride) {
|
|
1629
|
+
const instance = workers.get(name);
|
|
1630
|
+
const store = await validateAndGetStore(name, instance?.config, persistenceOverride);
|
|
1631
|
+
if (instance) {
|
|
1632
|
+
instance.config.activeStatus = activeStatus;
|
|
1633
|
+
}
|
|
1634
|
+
await store.update(name, { activeStatus, updatedAt: new Date() });
|
|
1635
|
+
WorkerRegistry.setActiveStatus(name, activeStatus);
|
|
1636
|
+
if (activeStatus === false && instance) {
|
|
1637
|
+
await WorkerFactory.stop(name, persistenceOverride);
|
|
1638
|
+
}
|
|
1639
|
+
},
|
|
1640
|
+
/**
|
|
1641
|
+
* Get active status for a worker
|
|
1642
|
+
*/
|
|
1643
|
+
async getWorkerActiveStatus(name, persistenceOverride) {
|
|
1644
|
+
const instance = workers.get(name);
|
|
1645
|
+
if (instance?.config.activeStatus !== undefined) {
|
|
1646
|
+
return instance.config.activeStatus;
|
|
1647
|
+
}
|
|
1648
|
+
const store = await getStoreForWorker(instance?.config, persistenceOverride);
|
|
1649
|
+
const record = await store.get(name);
|
|
1650
|
+
if (!record)
|
|
1651
|
+
return null;
|
|
1652
|
+
return record.activeStatus ?? true;
|
|
1653
|
+
},
|
|
1330
1654
|
/**
|
|
1331
1655
|
* Update persisted worker record and in-memory config if running.
|
|
1332
1656
|
*/
|
|
@@ -1355,6 +1679,8 @@ export const WorkerFactory = Object.freeze({
|
|
|
1355
1679
|
...cfg.options,
|
|
1356
1680
|
concurrency: merged.concurrency ?? cfg.options?.concurrency,
|
|
1357
1681
|
},
|
|
1682
|
+
processorSpec: merged.processorSpec ?? cfg.processorSpec,
|
|
1683
|
+
activeStatus: merged.activeStatus ?? cfg.activeStatus,
|
|
1358
1684
|
infrastructure: merged.infrastructure ?? cfg.infrastructure,
|
|
1359
1685
|
features: merged.features ?? cfg.features,
|
|
1360
1686
|
datacenter: merged.datacenter ?? cfg.datacenter,
|
|
@@ -1371,6 +1697,13 @@ export const WorkerFactory = Object.freeze({
|
|
|
1371
1697
|
if (!instance) {
|
|
1372
1698
|
throw ErrorFactory.createNotFoundError(`Worker "${name}" not found`);
|
|
1373
1699
|
}
|
|
1700
|
+
if (instance.config.activeStatus === false) {
|
|
1701
|
+
throw ErrorFactory.createConfigError(`Worker "${name}" is inactive`);
|
|
1702
|
+
}
|
|
1703
|
+
const persisted = await store.get(name);
|
|
1704
|
+
if (persisted?.activeStatus === false) {
|
|
1705
|
+
throw ErrorFactory.createConfigError(`Worker "${name}" is inactive`);
|
|
1706
|
+
}
|
|
1374
1707
|
const version = instance.config.version ?? '1.0.0';
|
|
1375
1708
|
await WorkerRegistry.start(name, version);
|
|
1376
1709
|
instance.status = WorkerCreationStatus.RUNNING;
|
|
@@ -1392,12 +1725,15 @@ export const WorkerFactory = Object.freeze({
|
|
|
1392
1725
|
return records.map((record) => record.name);
|
|
1393
1726
|
},
|
|
1394
1727
|
async listPersistedRecords(persistenceOverride, options) {
|
|
1728
|
+
const includeInactive = options?.includeInactive === true;
|
|
1395
1729
|
if (!persistenceOverride) {
|
|
1396
1730
|
await ensureWorkerStoreConfigured();
|
|
1397
|
-
|
|
1731
|
+
const records = await workerStore.list(options);
|
|
1732
|
+
return includeInactive ? records : records.filter((record) => record.activeStatus !== false);
|
|
1398
1733
|
}
|
|
1399
1734
|
const store = await resolveWorkerStoreForPersistence(persistenceOverride);
|
|
1400
|
-
|
|
1735
|
+
const records = await store.list(options);
|
|
1736
|
+
return includeInactive ? records : records.filter((record) => record.activeStatus !== false);
|
|
1401
1737
|
},
|
|
1402
1738
|
/**
|
|
1403
1739
|
* Start a worker from persisted storage when it is not registered.
|
|
@@ -1407,24 +1743,29 @@ export const WorkerFactory = Object.freeze({
|
|
|
1407
1743
|
if (!record) {
|
|
1408
1744
|
throw ErrorFactory.createNotFoundError(`Worker "${name}" not found in persistence store`);
|
|
1409
1745
|
}
|
|
1746
|
+
if (record.activeStatus === false) {
|
|
1747
|
+
throw ErrorFactory.createConfigError(`Worker "${name}" is inactive`);
|
|
1748
|
+
}
|
|
1410
1749
|
let processor = await resolveProcessor(name);
|
|
1411
|
-
|
|
1750
|
+
const spec = record.processorSpec ?? undefined;
|
|
1751
|
+
if (!processor && spec) {
|
|
1412
1752
|
try {
|
|
1413
|
-
processor = await
|
|
1753
|
+
processor = await resolveProcessorSpec(spec);
|
|
1414
1754
|
}
|
|
1415
1755
|
catch (error) {
|
|
1416
1756
|
Logger.error(`Failed to resolve processor module for "${name}"`, error);
|
|
1417
1757
|
}
|
|
1418
1758
|
}
|
|
1419
1759
|
if (!processor) {
|
|
1420
|
-
throw ErrorFactory.createConfigError(`Worker "${name}" processor is not registered or resolvable. Register the processor at startup or persist a
|
|
1760
|
+
throw ErrorFactory.createConfigError(`Worker "${name}" processor is not registered or resolvable. Register the processor at startup or persist a processorSpec.`);
|
|
1421
1761
|
}
|
|
1422
1762
|
await WorkerFactory.create({
|
|
1423
1763
|
name: record.name,
|
|
1424
1764
|
queueName: record.queueName,
|
|
1425
1765
|
version: record.version ?? undefined,
|
|
1426
1766
|
processor,
|
|
1427
|
-
|
|
1767
|
+
processorSpec: record.processorSpec ?? undefined,
|
|
1768
|
+
activeStatus: record.activeStatus ?? true,
|
|
1428
1769
|
autoStart: true, // Override to true when manually starting
|
|
1429
1770
|
options: { concurrency: record.concurrency },
|
|
1430
1771
|
infrastructure: record.infrastructure,
|
|
@@ -1506,8 +1847,37 @@ export const WorkerFactory = Object.freeze({
|
|
|
1506
1847
|
*/
|
|
1507
1848
|
async shutdown() {
|
|
1508
1849
|
Logger.info('WorkerFactory shutting down...');
|
|
1509
|
-
const
|
|
1510
|
-
|
|
1850
|
+
const workerEntries = Array.from(workers.entries());
|
|
1851
|
+
const workerNames = workerEntries.map(([name]) => name);
|
|
1852
|
+
// Bulk-update persisted statuses before stopping workers to avoid per-worker DB updates
|
|
1853
|
+
// during shutdown (which can fail if DB connections are closing).
|
|
1854
|
+
const storeGroups = new Map();
|
|
1855
|
+
// Parallel get stores for all workers
|
|
1856
|
+
const storePromises = workerEntries.map(async ([name, instance]) => {
|
|
1857
|
+
const store = await getStoreForWorker(instance.config);
|
|
1858
|
+
return { name, store };
|
|
1859
|
+
});
|
|
1860
|
+
const storeMappings = await Promise.all(storePromises);
|
|
1861
|
+
for (const { name, store } of storeMappings) {
|
|
1862
|
+
const existing = storeGroups.get(store);
|
|
1863
|
+
if (existing) {
|
|
1864
|
+
existing.push(name);
|
|
1865
|
+
}
|
|
1866
|
+
else {
|
|
1867
|
+
storeGroups.set(store, [name]);
|
|
1868
|
+
}
|
|
1869
|
+
}
|
|
1870
|
+
// Parallel bulk updates for all store groups
|
|
1871
|
+
const updatePromises = Array.from(storeGroups.entries()).map(async ([store, names]) => {
|
|
1872
|
+
if (typeof store.updateMany === 'function') {
|
|
1873
|
+
await store.updateMany(names, {
|
|
1874
|
+
status: WorkerCreationStatus.STOPPED,
|
|
1875
|
+
updatedAt: new Date(),
|
|
1876
|
+
});
|
|
1877
|
+
}
|
|
1878
|
+
});
|
|
1879
|
+
await Promise.all(updatePromises);
|
|
1880
|
+
await Promise.all(workerNames.map(async (name) => WorkerFactory.stop(name, undefined, { skipPersistedUpdate: true })));
|
|
1511
1881
|
// Shutdown all modules
|
|
1512
1882
|
ResourceMonitor.stop();
|
|
1513
1883
|
await WorkerMetrics.shutdown();
|
|
@@ -1527,5 +1897,15 @@ export const WorkerFactory = Object.freeze({
|
|
|
1527
1897
|
workers.clear();
|
|
1528
1898
|
Logger.info('WorkerFactory shutdown complete');
|
|
1529
1899
|
},
|
|
1900
|
+
/**
|
|
1901
|
+
* Reset persistence connection state.
|
|
1902
|
+
* Useful when connections become stale in long-running processes or serverless environments.
|
|
1903
|
+
*/
|
|
1904
|
+
async resetPersistence() {
|
|
1905
|
+
workerStoreConfigured = false;
|
|
1906
|
+
workerStore = InMemoryWorkerStore.create();
|
|
1907
|
+
storeInstanceCache.clear();
|
|
1908
|
+
Logger.info('Worker persistence configuration reset');
|
|
1909
|
+
},
|
|
1530
1910
|
});
|
|
1531
1911
|
// Graceful shutdown handled by WorkerShutdown
|
package/dist/WorkerInit.js
CHANGED
|
@@ -115,7 +115,7 @@ async function initialize(options = {}) {
|
|
|
115
115
|
async function autoStartPersistedWorkers() {
|
|
116
116
|
// Check if auto-start is enabled globally via environment variable
|
|
117
117
|
Logger.debug('Auto-start check', {
|
|
118
|
-
envAutoStart:
|
|
118
|
+
envAutoStart: Env.getBool('WORKER_AUTO_START', false),
|
|
119
119
|
configAutoStart: workersConfig.defaultWorker?.autoStart,
|
|
120
120
|
});
|
|
121
121
|
if (workersConfig.defaultWorker?.autoStart !== true) {
|
|
@@ -129,6 +129,9 @@ async function autoStartPersistedWorkers() {
|
|
|
129
129
|
records: records.map((r) => ({ name: r.name, autoStart: r.autoStart })),
|
|
130
130
|
});
|
|
131
131
|
const candidates = records.filter((record) => {
|
|
132
|
+
if (record.activeStatus === false) {
|
|
133
|
+
return false;
|
|
134
|
+
}
|
|
132
135
|
// If autoStart is explicitly true, always include
|
|
133
136
|
if (record.autoStart === true) {
|
|
134
137
|
return true;
|
|
@@ -153,7 +156,8 @@ async function autoStartPersistedWorkers() {
|
|
|
153
156
|
return { name: record.name, started: true, skipped: false };
|
|
154
157
|
}
|
|
155
158
|
catch (error) {
|
|
156
|
-
|
|
159
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
160
|
+
Logger.warn(`Auto-start failed for worker ${record.name}: ${message}`);
|
|
157
161
|
return { name: record.name, started: false, skipped: false };
|
|
158
162
|
}
|
|
159
163
|
}));
|
|
@@ -166,7 +170,8 @@ async function autoStartPersistedWorkers() {
|
|
|
166
170
|
});
|
|
167
171
|
}
|
|
168
172
|
catch (error) {
|
|
169
|
-
|
|
173
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
174
|
+
Logger.warn(`Auto-start persisted workers failed: ${message}`);
|
|
170
175
|
}
|
|
171
176
|
}
|
|
172
177
|
/**
|