@goatlab/node-backend 0.2.5 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +146 -14
- package/dist/container/Container.d.ts +113 -25
- package/dist/container/Container.js +391 -168
- package/dist/container/Container.js.map +1 -1
- package/dist/container/examples/batch-operations.example.d.ts +1 -0
- package/dist/container/examples/batch-operations.example.js +165 -0
- package/dist/container/examples/batch-operations.example.js.map +1 -0
- package/dist/container/helpers.d.ts +8 -0
- package/dist/container/helpers.js +22 -0
- package/dist/container/helpers.js.map +1 -1
- package/dist/container/types.d.ts +60 -0
- package/dist/index.d.ts +1 -1
- package/dist/index.js.map +1 -1
- package/dist/server/bootstraps/getExpressTrpcApp.d.ts +5 -1
- package/dist/server/bootstraps/getExpressTrpcApp.js +216 -12
- package/dist/server/bootstraps/getExpressTrpcApp.js.map +1 -1
- package/dist/server/middleware/memoryMonitor.example.d.ts +1 -0
- package/dist/server/middleware/memoryMonitor.example.js +109 -0
- package/dist/server/middleware/memoryMonitor.example.js.map +1 -0
- package/dist/server/middleware/memoryMonitor.middleware.d.ts +42 -0
- package/dist/server/middleware/memoryMonitor.middleware.js +134 -0
- package/dist/server/middleware/memoryMonitor.middleware.js.map +1 -0
- package/dist/server/middleware/productionError.middleware.d.ts +16 -0
- package/dist/server/middleware/productionError.middleware.js +94 -0
- package/dist/server/middleware/productionError.middleware.js.map +1 -0
- package/dist/server/middleware/security.middleware.d.ts +28 -0
- package/dist/server/middleware/security.middleware.js +151 -0
- package/dist/server/middleware/security.middleware.js.map +1 -0
- package/dist/server/services/secrets/examples/container-preload.example.d.ts +1 -0
- package/dist/server/services/secrets/examples/container-preload.example.js +148 -0
- package/dist/server/services/secrets/examples/container-preload.example.js.map +1 -0
- package/dist/server/services/secrets/index.d.ts +1 -0
- package/dist/server/services/secrets/index.js +6 -0
- package/dist/server/services/secrets/index.js.map +1 -0
- package/dist/server/services/secrets/secret.service.d.ts +48 -6
- package/dist/server/services/secrets/secret.service.js +280 -28
- package/dist/server/services/secrets/secret.service.js.map +1 -1
- package/dist/server/services/translations/translation.model.js +2 -1
- package/dist/server/services/translations/translation.model.js.map +1 -1
- package/dist/server/services/translations/translation.service.d.ts +8 -1
- package/dist/server/services/translations/translation.service.js +123 -13
- package/dist/server/services/translations/translation.service.js.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +13 -1
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Container = void 0;
|
|
4
4
|
const async_hooks_1 = require("async_hooks");
|
|
5
|
-
const crypto_1 = require("crypto");
|
|
6
5
|
const LruCache_1 = require("./LruCache");
|
|
7
6
|
const helpers_1 = require("./helpers");
|
|
8
7
|
// Instantiation helper moved to helpers.ts for better performance
|
|
@@ -96,8 +95,9 @@ class Container {
|
|
|
96
95
|
/**
|
|
97
96
|
* Service instance cache managers - one per service type
|
|
98
97
|
* Each manager handles LRU caching for that specific service
|
|
98
|
+
* Lazy-allocated to save memory for unused services
|
|
99
99
|
*/
|
|
100
|
-
managers;
|
|
100
|
+
managers = {};
|
|
101
101
|
/**
|
|
102
102
|
* AsyncLocalStorage provides automatic tenant context isolation
|
|
103
103
|
* Each async call tree gets its own isolated service instances
|
|
@@ -125,11 +125,7 @@ class Container {
|
|
|
125
125
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
126
126
|
// ⚡ PERFORMANCE OPTIMIZATION CACHES
|
|
127
127
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
128
|
-
|
|
129
|
-
* Path string cache: converts ['user', 'repo'] -> "user.repo"
|
|
130
|
-
* Optimized to avoid repeated string joins and array operations
|
|
131
|
-
*/
|
|
132
|
-
pathCache = new Map();
|
|
128
|
+
// Path cache removed - direct string concatenation is faster
|
|
133
129
|
/**
|
|
134
130
|
* Proxy object cache: reuses proxy objects for the same paths
|
|
135
131
|
* Reduces memory allocation and improves performance
|
|
@@ -151,22 +147,23 @@ class Container {
|
|
|
151
147
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
152
148
|
/**
|
|
153
149
|
* High-performance metrics using Uint32Array for better JIT optimization
|
|
154
|
-
* Indices: [hits, misses, creates, ctx,
|
|
150
|
+
* Indices: [hits, misses, creates, ctx, proxy, initHits, resets, batchOps, batchErrors]
|
|
155
151
|
* Auto-wraps at 2^32 without overflow checks for maximum performance
|
|
156
152
|
*/
|
|
157
|
-
metrics = new Uint32Array(
|
|
153
|
+
metrics = new Uint32Array(9);
|
|
158
154
|
/**
|
|
159
155
|
* Metric indices for Uint32Array
|
|
160
156
|
*/
|
|
161
|
-
static
|
|
157
|
+
static METRIC = {
|
|
162
158
|
HITS: 0,
|
|
163
159
|
MISSES: 1,
|
|
164
160
|
CREATES: 2,
|
|
165
161
|
CONTEXTS: 3,
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
162
|
+
PROXIES: 4,
|
|
163
|
+
INIT_HITS: 5,
|
|
164
|
+
RESETS: 6,
|
|
165
|
+
BATCH_OPS: 7,
|
|
166
|
+
BATCH_ERRORS: 8
|
|
170
167
|
};
|
|
171
168
|
/**
|
|
172
169
|
* Legacy overflow threshold for test compatibility
|
|
@@ -181,11 +178,20 @@ class Container {
|
|
|
181
178
|
if (!this.options.enableMetrics)
|
|
182
179
|
return;
|
|
183
180
|
// Check for test mock of MAX_METRIC_VALUE (legacy compatibility)
|
|
184
|
-
if (this.MAX_METRIC_VALUE < 1000 &&
|
|
181
|
+
if (this.MAX_METRIC_VALUE < 1000 &&
|
|
182
|
+
this.metrics[idx] >= this.MAX_METRIC_VALUE) {
|
|
185
183
|
// Legacy test behavior - reset metrics when mock threshold reached
|
|
186
184
|
this.resetMetrics();
|
|
187
185
|
if (this.options.enableDiagnostics) {
|
|
188
|
-
const metricNames = [
|
|
186
|
+
const metricNames = [
|
|
187
|
+
'cacheHits',
|
|
188
|
+
'cacheMisses',
|
|
189
|
+
'instanceCreations',
|
|
190
|
+
'contextAccesses',
|
|
191
|
+
'proxyCacheHits',
|
|
192
|
+
'initializerCacheHits',
|
|
193
|
+
'resets'
|
|
194
|
+
];
|
|
189
195
|
console.warn(`Container metrics reset due to overflow protection. Metric '${metricNames[idx] || 'unknown'}' reached ${this.metrics[idx]}`);
|
|
190
196
|
}
|
|
191
197
|
}
|
|
@@ -218,14 +224,10 @@ class Container {
|
|
|
218
224
|
enableDiagnostics: false,
|
|
219
225
|
enableDistributedInvalidation: false,
|
|
220
226
|
distributedInvalidator: undefined,
|
|
221
|
-
...options
|
|
227
|
+
...options
|
|
222
228
|
};
|
|
223
|
-
// Initialize cache managers for each service
|
|
224
|
-
this.managers = this.createManagers(this.factories, this.options.cacheSize);
|
|
225
229
|
// Pre-cache factory lookups for better performance
|
|
226
230
|
this.preloadFactoryCache();
|
|
227
|
-
// Pre-warm proxy cache with common paths to reduce runtime allocation
|
|
228
|
-
this.prewarmProxyCache();
|
|
229
231
|
// Setup distributed cache invalidation if enabled
|
|
230
232
|
this.setupDistributedInvalidation();
|
|
231
233
|
}
|
|
@@ -233,47 +235,18 @@ class Container {
|
|
|
233
235
|
// 🏭 CACHE MANAGER SETUP
|
|
234
236
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
235
237
|
/**
|
|
236
|
-
*
|
|
237
|
-
*
|
|
238
|
+
* Get or create a cache manager for a service - lazy allocation
|
|
239
|
+
* Saves memory by only creating caches for services that are actually used
|
|
240
|
+
* Note: Type safety is enforced at compile time through generics, not runtime
|
|
238
241
|
*/
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
for (const [key, value] of Object.entries(defs)) {
|
|
242
|
-
const newPath = path.length === 0 ? [key] : [...path, key];
|
|
243
|
-
if (typeof value === 'function') {
|
|
244
|
-
// This is a factory function/constructor - create a cache for it
|
|
245
|
-
const flatKey = this.getOrCachePath(newPath);
|
|
246
|
-
managers[flatKey] = (0, LruCache_1.createServiceCache)(cacheSize);
|
|
247
|
-
}
|
|
248
|
-
else if (typeof value === 'object' && value !== null) {
|
|
249
|
-
// This is a nested object - recurse into it
|
|
250
|
-
const subManagers = this.createManagers(value, cacheSize, newPath);
|
|
251
|
-
Object.assign(managers, subManagers);
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
return managers;
|
|
242
|
+
getManager(key) {
|
|
243
|
+
return (this.managers[key] ??= (0, LruCache_1.createServiceCache)(this.options.cacheSize));
|
|
255
244
|
}
|
|
245
|
+
// createManagers() removed - managers are created lazily via getManager()
|
|
256
246
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
257
247
|
// ⚡ PERFORMANCE OPTIMIZATION HELPERS
|
|
258
248
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
259
|
-
|
|
260
|
-
* Optimized path caching that maintains flat key strings to avoid repeated joins
|
|
261
|
-
* Uses closure to keep pre-computed cache and final keys for maximum performance
|
|
262
|
-
*/
|
|
263
|
-
getOrCachePath(path) {
|
|
264
|
-
// Create cache key once and reuse the flat key computation
|
|
265
|
-
const stringPath = path.map(p => typeof p === 'symbol' ? p.toString() : p);
|
|
266
|
-
const cacheKey = stringPath.join('|'); // Cache key uses pipe separator
|
|
267
|
-
let cached = this.pathCache.get(cacheKey);
|
|
268
|
-
if (!cached) {
|
|
269
|
-
cached = stringPath.join('.'); // Final path uses dot separator
|
|
270
|
-
this.pathCache.set(cacheKey, cached);
|
|
271
|
-
}
|
|
272
|
-
else {
|
|
273
|
-
this.inc(Container.METRIC_INDICES.PATHS);
|
|
274
|
-
}
|
|
275
|
-
return cached;
|
|
276
|
-
}
|
|
249
|
+
// getOrCachePath() removed - direct string concatenation in createPreloadProxy()
|
|
277
250
|
/**
|
|
278
251
|
* Pre-populate the factory cache by walking the entire factory tree
|
|
279
252
|
* This eliminates the need for recursive object traversal during runtime
|
|
@@ -281,25 +254,7 @@ class Container {
|
|
|
281
254
|
preloadFactoryCache() {
|
|
282
255
|
this.walkFactories(this.factories, []);
|
|
283
256
|
}
|
|
284
|
-
|
|
285
|
-
* Pre-warm proxy cache with static builders for common paths
|
|
286
|
-
* This reduces proxy creation overhead during runtime access patterns
|
|
287
|
-
*/
|
|
288
|
-
prewarmProxyCache() {
|
|
289
|
-
// Pre-create proxies for all known factory paths to avoid runtime creation
|
|
290
|
-
for (const [path] of this.factoryCache.entries()) {
|
|
291
|
-
const pathParts = path.split('.');
|
|
292
|
-
// Pre-warm all parent paths (e.g., for "api.users", pre-warm "api")
|
|
293
|
-
for (let i = 1; i <= pathParts.length; i++) {
|
|
294
|
-
const subPath = pathParts.slice(0, i);
|
|
295
|
-
const pathKey = subPath.join('|');
|
|
296
|
-
if (!this.proxyCache.has(pathKey)) {
|
|
297
|
-
// Create and cache the proxy for this path
|
|
298
|
-
this.createPreloadProxy(subPath);
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
}
|
|
302
|
-
}
|
|
257
|
+
// prewarmProxyCache() removed - proxies are created lazily
|
|
303
258
|
/**
|
|
304
259
|
* Recursive factory tree walker that builds the flat factory cache
|
|
305
260
|
* Converts nested object structure to flat dot-notation keys
|
|
@@ -309,7 +264,7 @@ class Container {
|
|
|
309
264
|
const newPath = path.length === 0 ? [key] : [...path, key];
|
|
310
265
|
if (typeof value === 'function') {
|
|
311
266
|
// Found a factory - cache it with its full path
|
|
312
|
-
const flatKey =
|
|
267
|
+
const flatKey = newPath.join('.');
|
|
313
268
|
this.factoryCache.set(flatKey, value);
|
|
314
269
|
}
|
|
315
270
|
else if (typeof value === 'object' && value !== null) {
|
|
@@ -350,46 +305,38 @@ class Container {
|
|
|
350
305
|
*
|
|
351
306
|
* This enables natural dot-notation access while maintaining lazy loading
|
|
352
307
|
*/
|
|
353
|
-
createPreloadProxy(path =
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
this.inc(Container.METRIC_INDICES.PROXIES);
|
|
358
|
-
return this.proxyCache.get(pathKey);
|
|
308
|
+
createPreloadProxy(path = '') {
|
|
309
|
+
if (this.proxyCache.has(path)) {
|
|
310
|
+
this.inc(Container.METRIC.PROXIES);
|
|
311
|
+
return this.proxyCache.get(path);
|
|
359
312
|
}
|
|
360
313
|
const proxy = new Proxy({}, // Empty target object - all access is intercepted
|
|
361
314
|
{
|
|
362
315
|
get: (_, prop) => {
|
|
363
|
-
const newPath = path
|
|
364
|
-
const
|
|
365
|
-
const factory = this.factoryCache.get(flatKey);
|
|
316
|
+
const newPath = path ? `${path}.${String(prop)}` : String(prop);
|
|
317
|
+
const factory = this.factoryCache.get(newPath);
|
|
366
318
|
if (factory) {
|
|
367
319
|
// Found a factory - return a function that creates/caches instances
|
|
368
|
-
return (id, ...
|
|
369
|
-
const mgr = this.
|
|
320
|
+
return (id, ...args) => {
|
|
321
|
+
const mgr = this.getManager(newPath);
|
|
370
322
|
let inst = mgr.get(id);
|
|
371
323
|
if (!inst) {
|
|
372
|
-
|
|
373
|
-
this.inc(Container.
|
|
374
|
-
|
|
375
|
-
inst = (0, helpers_1.instantiate)(factory, params);
|
|
324
|
+
this.inc(Container.METRIC.MISSES);
|
|
325
|
+
this.inc(Container.METRIC.CREATES);
|
|
326
|
+
inst = (0, helpers_1.instantiate)(factory, args);
|
|
376
327
|
mgr.set(id, inst);
|
|
377
328
|
}
|
|
378
329
|
else {
|
|
379
|
-
|
|
380
|
-
this.inc(Container.METRIC_INDICES.HITS);
|
|
330
|
+
this.inc(Container.METRIC.HITS);
|
|
381
331
|
}
|
|
382
332
|
return inst;
|
|
383
333
|
};
|
|
384
334
|
}
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
}
|
|
389
|
-
},
|
|
335
|
+
// No factory found - must be a nested path, return another proxy
|
|
336
|
+
return this.createPreloadProxy(newPath);
|
|
337
|
+
}
|
|
390
338
|
});
|
|
391
|
-
|
|
392
|
-
this.proxyCache.set(pathKey, proxy);
|
|
339
|
+
this.proxyCache.set(path, proxy);
|
|
393
340
|
return proxy;
|
|
394
341
|
}
|
|
395
342
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
@@ -409,14 +356,21 @@ class Container {
|
|
|
409
356
|
* More efficient for pure synchronous code paths
|
|
410
357
|
*/
|
|
411
358
|
runWithContextSync(instances, tenantMetadata, fn) {
|
|
412
|
-
const
|
|
413
|
-
this.als.enterWith(
|
|
359
|
+
const prev = this.als.getStore();
|
|
360
|
+
this.als.enterWith({ instances, tenantMetadata });
|
|
414
361
|
try {
|
|
415
362
|
return fn();
|
|
416
363
|
}
|
|
417
364
|
finally {
|
|
418
|
-
|
|
419
|
-
|
|
365
|
+
if (prev) {
|
|
366
|
+
this.als.enterWith(prev);
|
|
367
|
+
}
|
|
368
|
+
else if ('disable' in this.als) {
|
|
369
|
+
// Node 20+ - fully clear context when no previous context
|
|
370
|
+
// The disable() method was added in Node.js 20.5.0 to properly clear ALS context
|
|
371
|
+
// In earlier versions, this check safely falls through without error
|
|
372
|
+
this.als.disable();
|
|
373
|
+
}
|
|
420
374
|
}
|
|
421
375
|
}
|
|
422
376
|
/**
|
|
@@ -435,7 +389,7 @@ class Container {
|
|
|
435
389
|
if (!store) {
|
|
436
390
|
throw new Error("No tenant context available. Make sure you're running within a container context.");
|
|
437
391
|
}
|
|
438
|
-
this.inc(Container.
|
|
392
|
+
this.inc(Container.METRIC.CONTEXTS);
|
|
439
393
|
return this.createContextProxy(store.instances);
|
|
440
394
|
}
|
|
441
395
|
/**
|
|
@@ -458,21 +412,25 @@ class Container {
|
|
|
458
412
|
const proxy = new Proxy(obj, {
|
|
459
413
|
get: (target, prop) => {
|
|
460
414
|
const newPath = path.length === 0 ? [prop] : [...path, prop];
|
|
461
|
-
const value = target
|
|
415
|
+
const value = Reflect.get(target, prop);
|
|
462
416
|
if (value === undefined) {
|
|
463
417
|
// Check if property exists but is undefined (vs completely missing)
|
|
464
418
|
if (prop in target) {
|
|
465
419
|
// Property exists but is undefined - this is valid (e.g., optional services)
|
|
466
420
|
return undefined;
|
|
467
421
|
}
|
|
468
|
-
// For symbols, especially well-known symbols like Symbol.iterator,
|
|
422
|
+
// For symbols, especially well-known symbols like Symbol.iterator,
|
|
469
423
|
// just return undefined instead of throwing an error
|
|
470
424
|
if (typeof prop === 'symbol') {
|
|
471
425
|
return undefined;
|
|
472
426
|
}
|
|
427
|
+
// Special case for 'then' to avoid Promise detection issues
|
|
428
|
+
if (prop === 'then') {
|
|
429
|
+
return undefined;
|
|
430
|
+
}
|
|
473
431
|
// Property doesn't exist - provide helpful error message
|
|
474
|
-
const servicePath =
|
|
475
|
-
const available =
|
|
432
|
+
const servicePath = newPath.join('.');
|
|
433
|
+
const available = Reflect.ownKeys(target).map(String).join(', ');
|
|
476
434
|
throw new Error(`Service '${servicePath}' not initialized. ` +
|
|
477
435
|
`Available services: ${available}`);
|
|
478
436
|
}
|
|
@@ -498,7 +456,7 @@ class Container {
|
|
|
498
456
|
}
|
|
499
457
|
// Return value as-is (primitives, functions, Promises, arrays)
|
|
500
458
|
return value;
|
|
501
|
-
}
|
|
459
|
+
}
|
|
502
460
|
});
|
|
503
461
|
// Cache using WeakMap for automatic garbage collection
|
|
504
462
|
this.contextProxyCache.set(obj, proxy);
|
|
@@ -508,26 +466,37 @@ class Container {
|
|
|
508
466
|
// 🚀 BOOTSTRAP & LIFECYCLE
|
|
509
467
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
510
468
|
/**
|
|
511
|
-
*
|
|
512
|
-
* Uses
|
|
469
|
+
* Simple string hash function for fallback tenant keys
|
|
470
|
+
* Uses djb2 algorithm - fast and good enough for cache keys
|
|
471
|
+
* Note: For very large metadata objects, consider upgrading to FNV-1a or crypto.createHash
|
|
472
|
+
* if collision resistance is critical. Current implementation is optimized for speed.
|
|
473
|
+
*/
|
|
474
|
+
simpleHash(str) {
|
|
475
|
+
let hash = 5381;
|
|
476
|
+
for (let i = 0; i < str.length; i++) {
|
|
477
|
+
hash = (hash << 5) + hash + str.charCodeAt(i);
|
|
478
|
+
}
|
|
479
|
+
return (hash >>> 0).toString(36);
|
|
480
|
+
}
|
|
481
|
+
/**
|
|
482
|
+
* Create a stable cache key from tenant metadata
|
|
483
|
+
* Uses common tenant properties or hashed JSON as fallback
|
|
513
484
|
*/
|
|
514
485
|
createTenantCacheKey(meta) {
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
486
|
+
const m = meta;
|
|
487
|
+
if (m.id || m.tenantId || m.name) {
|
|
488
|
+
return `tenant:${m.id ?? m.tenantId ?? m.name}`;
|
|
489
|
+
}
|
|
490
|
+
// Fallback to hashed JSON for complex metadata
|
|
519
491
|
try {
|
|
520
|
-
const
|
|
521
|
-
|
|
522
|
-
const hash = (0, crypto_1.createHash)('sha1').update(sortedMeta).digest('base64url');
|
|
523
|
-
return `tenant:${tenantId}:${hash}`;
|
|
492
|
+
const json = JSON.stringify(meta);
|
|
493
|
+
return `tenant:hash:${this.simpleHash(json)}`;
|
|
524
494
|
}
|
|
525
495
|
catch {
|
|
526
|
-
//
|
|
527
|
-
return `tenant:${
|
|
496
|
+
// Last resort for circular refs
|
|
497
|
+
return `tenant:ts:${Date.now()}`;
|
|
528
498
|
}
|
|
529
499
|
}
|
|
530
|
-
// Simple hash function removed in favor of crypto.createHash for better collision resistance
|
|
531
500
|
/**
|
|
532
501
|
* Get or create initialized instances for a tenant with race condition protection
|
|
533
502
|
* Uses both result caching and inflight promise deduplication
|
|
@@ -537,7 +506,7 @@ class Container {
|
|
|
537
506
|
// Check if we already have initialized instances for this tenant
|
|
538
507
|
const cachedInstances = this.initializerCache.get(cacheKey);
|
|
539
508
|
if (cachedInstances) {
|
|
540
|
-
this.inc(Container.
|
|
509
|
+
this.inc(Container.METRIC.INIT_HITS);
|
|
541
510
|
return cachedInstances;
|
|
542
511
|
}
|
|
543
512
|
// Check if initialization is already in progress for this tenant
|
|
@@ -603,6 +572,240 @@ class Container {
|
|
|
603
572
|
}
|
|
604
573
|
}
|
|
605
574
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
575
|
+
// BATCH OPERATIONS
|
|
576
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
577
|
+
/**
|
|
578
|
+
* Bootstrap multiple tenants in parallel with controlled concurrency
|
|
579
|
+
*
|
|
580
|
+
* This method enables efficient initialization of multiple tenants while:
|
|
581
|
+
* - Controlling concurrency to avoid overwhelming the system
|
|
582
|
+
* - Isolating errors so one failure doesn't affect others
|
|
583
|
+
* - Providing progress tracking for long-running operations
|
|
584
|
+
* - Collecting performance metrics for each operation
|
|
585
|
+
*
|
|
586
|
+
* @param tenantBatch - Array of tenant metadata and optional functions to execute
|
|
587
|
+
* @param options - Options for controlling the batch operation
|
|
588
|
+
* @returns Array of results for each tenant, including successes and failures
|
|
589
|
+
*
|
|
590
|
+
* ```typescript
|
|
591
|
+
* const results = await container.bootstrapBatch([
|
|
592
|
+
* { metadata: tenant1Meta, fn: async () => processТenant1() },
|
|
593
|
+
* { metadata: tenant2Meta, fn: async () => processTenant2() },
|
|
594
|
+
* { metadata: tenant3Meta } // No function, just bootstrap
|
|
595
|
+
* ], {
|
|
596
|
+
* concurrency: 5,
|
|
597
|
+
* continueOnError: true,
|
|
598
|
+
* onProgress: (completed, total) => console.log(`${completed}/${total}`)
|
|
599
|
+
* })
|
|
600
|
+
*
|
|
601
|
+
* // Process results
|
|
602
|
+
* for (const result of results) {
|
|
603
|
+
* if (result.status === 'success') {
|
|
604
|
+
* console.log(`Tenant ${result.metadata.id} initialized in ${result.metrics.duration}ms`)
|
|
605
|
+
* } else {
|
|
606
|
+
* console.error(`Tenant ${result.metadata.id} failed:`, result.error)
|
|
607
|
+
* }
|
|
608
|
+
* }
|
|
609
|
+
* ```
|
|
610
|
+
*/
|
|
611
|
+
async bootstrapBatch(tenantBatch, options = {}) {
|
|
612
|
+
const { concurrency = 10, continueOnError = true, timeout, onProgress } = options;
|
|
613
|
+
const results = [];
|
|
614
|
+
const total = tenantBatch.length;
|
|
615
|
+
let completed = 0;
|
|
616
|
+
let shouldAbort = false;
|
|
617
|
+
// Process tenants in chunks based on concurrency limit
|
|
618
|
+
for (let i = 0; i < total; i += concurrency) {
|
|
619
|
+
// Check if we should abort due to previous error in fail-fast mode
|
|
620
|
+
if (shouldAbort) {
|
|
621
|
+
break;
|
|
622
|
+
}
|
|
623
|
+
const chunk = tenantBatch.slice(i, i + concurrency);
|
|
624
|
+
const chunkPromises = chunk.map(async ({ metadata, fn }) => {
|
|
625
|
+
const startTime = Date.now();
|
|
626
|
+
try {
|
|
627
|
+
// Apply timeout if specified
|
|
628
|
+
let bootstrapPromise = this.bootstrap(metadata, fn);
|
|
629
|
+
if (timeout) {
|
|
630
|
+
bootstrapPromise = Promise.race([
|
|
631
|
+
bootstrapPromise,
|
|
632
|
+
new Promise((_, reject) => setTimeout(() => reject(new Error(`Bootstrap timeout after ${timeout}ms`)), timeout))
|
|
633
|
+
]);
|
|
634
|
+
}
|
|
635
|
+
const { instances, result } = await bootstrapPromise;
|
|
636
|
+
const endTime = Date.now();
|
|
637
|
+
this.inc(Container.METRIC.BATCH_OPS);
|
|
638
|
+
return {
|
|
639
|
+
metadata,
|
|
640
|
+
status: 'success',
|
|
641
|
+
instances,
|
|
642
|
+
result,
|
|
643
|
+
metrics: {
|
|
644
|
+
startTime,
|
|
645
|
+
endTime,
|
|
646
|
+
duration: endTime - startTime
|
|
647
|
+
}
|
|
648
|
+
};
|
|
649
|
+
}
|
|
650
|
+
catch (error) {
|
|
651
|
+
const endTime = Date.now();
|
|
652
|
+
this.inc(Container.METRIC.BATCH_ERRORS);
|
|
653
|
+
if (this.options.enableDiagnostics) {
|
|
654
|
+
console.error(`Batch bootstrap failed for tenant:`, metadata, error);
|
|
655
|
+
}
|
|
656
|
+
const result = {
|
|
657
|
+
metadata,
|
|
658
|
+
status: 'error',
|
|
659
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
660
|
+
metrics: {
|
|
661
|
+
startTime,
|
|
662
|
+
endTime,
|
|
663
|
+
duration: endTime - startTime
|
|
664
|
+
}
|
|
665
|
+
};
|
|
666
|
+
if (!continueOnError) {
|
|
667
|
+
// Mark that we should abort processing
|
|
668
|
+
shouldAbort = true;
|
|
669
|
+
}
|
|
670
|
+
return result;
|
|
671
|
+
}
|
|
672
|
+
finally {
|
|
673
|
+
completed++;
|
|
674
|
+
onProgress?.(completed, total, metadata);
|
|
675
|
+
}
|
|
676
|
+
});
|
|
677
|
+
// Wait for current chunk to complete before starting next
|
|
678
|
+
const chunkResults = await Promise.allSettled(chunkPromises);
|
|
679
|
+
// Extract results from Promise.allSettled
|
|
680
|
+
for (const settledResult of chunkResults) {
|
|
681
|
+
if (settledResult.status === 'fulfilled') {
|
|
682
|
+
results.push(settledResult.value);
|
|
683
|
+
}
|
|
684
|
+
else if (continueOnError) {
|
|
685
|
+
// This shouldn't happen as we handle errors above, but just in case
|
|
686
|
+
results.push({
|
|
687
|
+
metadata: tenantBatch[results.length].metadata,
|
|
688
|
+
status: 'error',
|
|
689
|
+
error: settledResult.reason,
|
|
690
|
+
metrics: {
|
|
691
|
+
startTime: Date.now(),
|
|
692
|
+
endTime: Date.now(),
|
|
693
|
+
duration: 0
|
|
694
|
+
}
|
|
695
|
+
});
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
// Check if we had any errors and should fail fast
|
|
699
|
+
if (!continueOnError && results.some(r => r.status === 'error')) {
|
|
700
|
+
const errorResult = results.find(r => r.status === 'error');
|
|
701
|
+
throw errorResult?.error || new Error('Batch operation failed');
|
|
702
|
+
}
|
|
703
|
+
}
|
|
704
|
+
return results;
|
|
705
|
+
}
|
|
706
|
+
/**
|
|
707
|
+
* Invalidate multiple tenant caches in batch
|
|
708
|
+
*
|
|
709
|
+
* Efficiently invalidates caches for multiple tenants with proper disposal
|
|
710
|
+
* and error handling. Useful for bulk updates or maintenance operations.
|
|
711
|
+
*
|
|
712
|
+
* @param tenantIds - Array of tenant IDs to invalidate
|
|
713
|
+
* @param reason - Optional reason for invalidation (for logging)
|
|
714
|
+
* @param distributed - Whether to propagate invalidation to other instances
|
|
715
|
+
* @returns Summary of the batch invalidation operation
|
|
716
|
+
*
|
|
717
|
+
* ```typescript
|
|
718
|
+
* const result = await container.invalidateTenantBatch(
|
|
719
|
+
* ['tenant1', 'tenant2', 'tenant3'],
|
|
720
|
+
* 'Bulk configuration update',
|
|
721
|
+
* true // Distribute to other instances
|
|
722
|
+
* )
|
|
723
|
+
*
|
|
724
|
+
* console.log(`Invalidated ${result.succeeded}/${result.total} tenants`)
|
|
725
|
+
* if (result.failed > 0) {
|
|
726
|
+
* console.error('Failed invalidations:', result.errors)
|
|
727
|
+
* }
|
|
728
|
+
* ```
|
|
729
|
+
*/
|
|
730
|
+
async invalidateTenantBatch(tenantIds, reason, distributed = false) {
|
|
731
|
+
const result = {
|
|
732
|
+
total: tenantIds.length,
|
|
733
|
+
succeeded: 0,
|
|
734
|
+
failed: 0,
|
|
735
|
+
errors: []
|
|
736
|
+
};
|
|
737
|
+
// Process invalidations in parallel with error isolation
|
|
738
|
+
const invalidationPromises = tenantIds.map(async (tenantId) => {
|
|
739
|
+
try {
|
|
740
|
+
if (distributed) {
|
|
741
|
+
await this.invalidateTenantDistributed(tenantId, reason);
|
|
742
|
+
}
|
|
743
|
+
else {
|
|
744
|
+
this.invalidateTenantLocally(tenantId, reason);
|
|
745
|
+
}
|
|
746
|
+
result.succeeded++;
|
|
747
|
+
}
|
|
748
|
+
catch (error) {
|
|
749
|
+
result.failed++;
|
|
750
|
+
result.errors.push({
|
|
751
|
+
key: tenantId,
|
|
752
|
+
error: error instanceof Error ? error : new Error(String(error))
|
|
753
|
+
});
|
|
754
|
+
if (this.options.enableDiagnostics) {
|
|
755
|
+
console.error(`Failed to invalidate tenant ${tenantId}:`, error);
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
});
|
|
759
|
+
await Promise.allSettled(invalidationPromises);
|
|
760
|
+
return result;
|
|
761
|
+
}
|
|
762
|
+
/**
|
|
763
|
+
* Invalidate multiple service caches in batch
|
|
764
|
+
*
|
|
765
|
+
* @param serviceTypes - Array of service types to invalidate
|
|
766
|
+
* @param reason - Optional reason for invalidation
|
|
767
|
+
* @param distributed - Whether to propagate invalidation
|
|
768
|
+
* @returns Summary of the batch invalidation operation
|
|
769
|
+
*
|
|
770
|
+
* ```typescript
|
|
771
|
+
* const result = await container.invalidateServiceBatch(
|
|
772
|
+
* ['database', 'api.users', 'api.auth'],
|
|
773
|
+
* 'Service configuration update'
|
|
774
|
+
* )
|
|
775
|
+
* ```
|
|
776
|
+
*/
|
|
777
|
+
async invalidateServiceBatch(serviceTypes, reason, distributed = false) {
|
|
778
|
+
const result = {
|
|
779
|
+
total: serviceTypes.length,
|
|
780
|
+
succeeded: 0,
|
|
781
|
+
failed: 0,
|
|
782
|
+
errors: []
|
|
783
|
+
};
|
|
784
|
+
const invalidationPromises = serviceTypes.map(async (serviceType) => {
|
|
785
|
+
try {
|
|
786
|
+
if (distributed) {
|
|
787
|
+
await this.invalidateServiceDistributed(serviceType, reason);
|
|
788
|
+
}
|
|
789
|
+
else {
|
|
790
|
+
this.invalidateServiceLocally(serviceType, reason);
|
|
791
|
+
}
|
|
792
|
+
result.succeeded++;
|
|
793
|
+
}
|
|
794
|
+
catch (error) {
|
|
795
|
+
result.failed++;
|
|
796
|
+
result.errors.push({
|
|
797
|
+
key: serviceType,
|
|
798
|
+
error: error instanceof Error ? error : new Error(String(error))
|
|
799
|
+
});
|
|
800
|
+
if (this.options.enableDiagnostics) {
|
|
801
|
+
console.error(`Failed to invalidate service ${serviceType}:`, error);
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
});
|
|
805
|
+
await Promise.allSettled(invalidationPromises);
|
|
806
|
+
return result;
|
|
807
|
+
}
|
|
808
|
+
// ═══════════════════════════════════════════════════════════════════════════
|
|
606
809
|
// 📊 OBSERVABILITY & DEBUGGING
|
|
607
810
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
608
811
|
/**
|
|
@@ -611,13 +814,14 @@ class Container {
|
|
|
611
814
|
*/
|
|
612
815
|
getMetrics() {
|
|
613
816
|
return {
|
|
614
|
-
cacheHits: this.metrics[Container.
|
|
615
|
-
cacheMisses: this.metrics[Container.
|
|
616
|
-
instanceCreations: this.metrics[Container.
|
|
617
|
-
contextAccesses: this.metrics[Container.
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
817
|
+
cacheHits: this.metrics[Container.METRIC.HITS],
|
|
818
|
+
cacheMisses: this.metrics[Container.METRIC.MISSES],
|
|
819
|
+
instanceCreations: this.metrics[Container.METRIC.CREATES],
|
|
820
|
+
contextAccesses: this.metrics[Container.METRIC.CONTEXTS],
|
|
821
|
+
proxyCacheHits: this.metrics[Container.METRIC.PROXIES],
|
|
822
|
+
initializerCacheHits: this.metrics[Container.METRIC.INIT_HITS],
|
|
823
|
+
batchOperations: this.metrics[Container.METRIC.BATCH_OPS],
|
|
824
|
+
batchErrors: this.metrics[Container.METRIC.BATCH_ERRORS]
|
|
621
825
|
};
|
|
622
826
|
}
|
|
623
827
|
/**
|
|
@@ -626,7 +830,7 @@ class Container {
|
|
|
626
830
|
*/
|
|
627
831
|
resetMetrics() {
|
|
628
832
|
this.metrics.fill(0);
|
|
629
|
-
this.inc(Container.
|
|
833
|
+
this.inc(Container.METRIC.RESETS);
|
|
630
834
|
}
|
|
631
835
|
/**
|
|
632
836
|
* Clear all service instance caches with proper disposal support
|
|
@@ -636,25 +840,37 @@ class Container {
|
|
|
636
840
|
// Dispose instances before clearing to prevent memory leaks
|
|
637
841
|
for (const manager of Object.values(this.managers)) {
|
|
638
842
|
// Call dispose hooks if manager supports iteration
|
|
639
|
-
if (typeof manager.
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
}
|
|
646
|
-
catch (err) {
|
|
647
|
-
if (this.options.enableDiagnostics) {
|
|
648
|
-
console.warn('Error disposing instance:', err);
|
|
649
|
-
}
|
|
843
|
+
if (typeof manager.values === 'function') {
|
|
844
|
+
const vals = manager.values?.() ?? [];
|
|
845
|
+
for (const inst of vals) {
|
|
846
|
+
(0, helpers_1.safeDispose)(inst).catch(err => {
|
|
847
|
+
if (this.options.enableDiagnostics) {
|
|
848
|
+
console.warn('Error disposing service instance:', err);
|
|
650
849
|
}
|
|
651
|
-
}
|
|
850
|
+
});
|
|
652
851
|
}
|
|
653
852
|
}
|
|
654
|
-
manager.clear
|
|
853
|
+
manager.clear();
|
|
854
|
+
}
|
|
855
|
+
// Clear optimization caches as well
|
|
856
|
+
this.proxyCache.clear();
|
|
857
|
+
this.initializerCache.clear();
|
|
858
|
+
this.initializerPromises.clear();
|
|
859
|
+
// Note: contextProxyCache is a WeakMap and will be garbage collected automatically
|
|
860
|
+
}
|
|
861
|
+
/**
|
|
862
|
+
* Async version of clearCaches that properly awaits all disposal operations
|
|
863
|
+
* Use this method when you need to ensure all resources are fully disposed
|
|
864
|
+
* before continuing (e.g., during graceful shutdown)
|
|
865
|
+
*/
|
|
866
|
+
async clearCachesAsync() {
|
|
867
|
+
// Dispose instances before clearing to prevent memory leaks
|
|
868
|
+
await Promise.all(Object.values(this.managers).flatMap(m => [...(m.values?.() ?? [])].map(helpers_1.safeDispose)));
|
|
869
|
+
// Clear all managers
|
|
870
|
+
for (const manager of Object.values(this.managers)) {
|
|
871
|
+
manager.clear();
|
|
655
872
|
}
|
|
656
873
|
// Clear optimization caches as well
|
|
657
|
-
this.pathCache.clear();
|
|
658
874
|
this.proxyCache.clear();
|
|
659
875
|
this.initializerCache.clear();
|
|
660
876
|
this.initializerPromises.clear();
|
|
@@ -738,15 +954,12 @@ class Container {
|
|
|
738
954
|
// Clear service instance caches for this tenant with disposal
|
|
739
955
|
for (const manager of Object.values(this.managers)) {
|
|
740
956
|
const instance = manager.get(tenantId);
|
|
741
|
-
if (instance
|
|
742
|
-
|
|
743
|
-
instance.dispose();
|
|
744
|
-
}
|
|
745
|
-
catch (err) {
|
|
957
|
+
if (instance) {
|
|
958
|
+
(0, helpers_1.safeDispose)(instance).catch(err => {
|
|
746
959
|
if (this.options.enableDiagnostics) {
|
|
747
960
|
console.warn('Error disposing tenant instance:', err);
|
|
748
961
|
}
|
|
749
|
-
}
|
|
962
|
+
});
|
|
750
963
|
}
|
|
751
964
|
manager.delete(tenantId);
|
|
752
965
|
}
|
|
@@ -768,18 +981,14 @@ class Container {
|
|
|
768
981
|
const manager = this.managers[serviceType];
|
|
769
982
|
if (manager) {
|
|
770
983
|
// Dispose instances before clearing
|
|
771
|
-
if (typeof manager.
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
984
|
+
if (typeof manager.values === 'function') {
|
|
985
|
+
const vals = manager.values?.() ?? [];
|
|
986
|
+
for (const inst of vals) {
|
|
987
|
+
(0, helpers_1.safeDispose)(inst).catch(err => {
|
|
988
|
+
if (this.options.enableDiagnostics) {
|
|
989
|
+
console.warn('Error disposing service instance:', err);
|
|
776
990
|
}
|
|
777
|
-
|
|
778
|
-
if (this.options.enableDiagnostics) {
|
|
779
|
-
console.warn('Error disposing service instance:', err);
|
|
780
|
-
}
|
|
781
|
-
}
|
|
782
|
-
}
|
|
991
|
+
});
|
|
783
992
|
}
|
|
784
993
|
}
|
|
785
994
|
manager.clear();
|
|
@@ -794,6 +1003,17 @@ class Container {
|
|
|
794
1003
|
}
|
|
795
1004
|
this.clearCaches();
|
|
796
1005
|
}
|
|
1006
|
+
/**
|
|
1007
|
+
* Dispose all service instances across all tenants and clear caches
|
|
1008
|
+
* Useful for graceful shutdown and testing cleanup
|
|
1009
|
+
* Note: This also clears all caches to prevent resurrection of disposed services
|
|
1010
|
+
*/
|
|
1011
|
+
async disposeAll() {
|
|
1012
|
+
// First dispose all instances
|
|
1013
|
+
await Promise.all(Object.values(this.managers).flatMap(manager => [...(manager.values?.() ?? [])].map(helpers_1.safeDispose)));
|
|
1014
|
+
// Then clear all caches to prevent resurrection
|
|
1015
|
+
await this.clearCachesAsync();
|
|
1016
|
+
}
|
|
797
1017
|
/**
|
|
798
1018
|
* Get detailed cache statistics for each service
|
|
799
1019
|
* Shows how many instances are cached and the cache limits
|
|
@@ -807,7 +1027,7 @@ class Container {
|
|
|
807
1027
|
: managerAny.size || 0;
|
|
808
1028
|
stats[key] = {
|
|
809
1029
|
size,
|
|
810
|
-
maxSize: this.options.cacheSize
|
|
1030
|
+
maxSize: this.options.cacheSize
|
|
811
1031
|
};
|
|
812
1032
|
}
|
|
813
1033
|
return stats;
|
|
@@ -819,18 +1039,21 @@ class Container {
|
|
|
819
1039
|
getPerformanceStats() {
|
|
820
1040
|
const cacheStats = this.getCacheStats();
|
|
821
1041
|
const totalCacheSize = Object.values(cacheStats).reduce((sum, stat) => sum + stat.size, 0);
|
|
822
|
-
const hits = this.metrics[Container.
|
|
823
|
-
const misses = this.metrics[Container.
|
|
1042
|
+
const hits = this.metrics[Container.METRIC.HITS];
|
|
1043
|
+
const misses = this.metrics[Container.METRIC.MISSES];
|
|
1044
|
+
const batchOps = this.metrics[Container.METRIC.BATCH_OPS];
|
|
1045
|
+
const batchErrors = this.metrics[Container.METRIC.BATCH_ERRORS];
|
|
824
1046
|
return {
|
|
825
1047
|
...this.getMetrics(),
|
|
826
1048
|
cacheStats,
|
|
827
1049
|
totalCacheSize,
|
|
828
|
-
pathCacheSize:
|
|
1050
|
+
pathCacheSize: 0, // removed - no longer tracked
|
|
829
1051
|
proxyCacheSize: this.proxyCache.size,
|
|
830
1052
|
factoryCacheSize: this.factoryCache.size,
|
|
831
1053
|
initializerCacheSize: this.initializerCache.size,
|
|
832
1054
|
initializerPromisesSize: this.initializerPromises.size,
|
|
833
1055
|
cacheHitRatio: hits + misses > 0 ? hits / (hits + misses) : 0,
|
|
1056
|
+
batchSuccessRatio: batchOps > 0 ? (batchOps - batchErrors) / batchOps : 0
|
|
834
1057
|
};
|
|
835
1058
|
}
|
|
836
1059
|
// ═══════════════════════════════════════════════════════════════════════════
|