@goatlab/node-backend 0.2.5 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/container/Container.d.ts +28 -24
- package/dist/container/Container.js +135 -162
- package/dist/container/Container.js.map +1 -1
- package/dist/container/helpers.d.ts +8 -0
- package/dist/container/helpers.js +22 -0
- package/dist/container/helpers.js.map +1 -1
- package/dist/container/types.d.ts +10 -0
- package/dist/server/bootstraps/getExpressTrpcApp.d.ts +5 -1
- package/dist/server/bootstraps/getExpressTrpcApp.js +154 -12
- package/dist/server/bootstraps/getExpressTrpcApp.js.map +1 -1
- package/dist/server/middleware/productionError.middleware.d.ts +16 -0
- package/dist/server/middleware/productionError.middleware.js +94 -0
- package/dist/server/middleware/productionError.middleware.js.map +1 -0
- package/dist/server/middleware/security.middleware.d.ts +28 -0
- package/dist/server/middleware/security.middleware.js +151 -0
- package/dist/server/middleware/security.middleware.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +13 -1
|
@@ -83,6 +83,7 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
83
83
|
/**
|
|
84
84
|
* Service instance cache managers - one per service type
|
|
85
85
|
* Each manager handles LRU caching for that specific service
|
|
86
|
+
* Lazy-allocated to save memory for unused services
|
|
86
87
|
*/
|
|
87
88
|
private readonly managers;
|
|
88
89
|
/**
|
|
@@ -109,11 +110,6 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
109
110
|
* Prevents concurrent bootstrap for same tenant from running initializer twice
|
|
110
111
|
*/
|
|
111
112
|
private readonly initializerPromises;
|
|
112
|
-
/**
|
|
113
|
-
* Path string cache: converts ['user', 'repo'] -> "user.repo"
|
|
114
|
-
* Optimized to avoid repeated string joins and array operations
|
|
115
|
-
*/
|
|
116
|
-
private readonly pathCache;
|
|
117
113
|
/**
|
|
118
114
|
* Proxy object cache: reuses proxy objects for the same paths
|
|
119
115
|
* Reduces memory allocation and improves performance
|
|
@@ -132,14 +128,14 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
132
128
|
private readonly initializerCache;
|
|
133
129
|
/**
|
|
134
130
|
* High-performance metrics using Uint32Array for better JIT optimization
|
|
135
|
-
* Indices: [hits, misses, creates, ctx,
|
|
131
|
+
* Indices: [hits, misses, creates, ctx, proxy, initHits, resets]
|
|
136
132
|
* Auto-wraps at 2^32 without overflow checks for maximum performance
|
|
137
133
|
*/
|
|
138
134
|
private readonly metrics;
|
|
139
135
|
/**
|
|
140
136
|
* Metric indices for Uint32Array
|
|
141
137
|
*/
|
|
142
|
-
private static readonly
|
|
138
|
+
private static readonly METRIC;
|
|
143
139
|
/**
|
|
144
140
|
* Legacy overflow threshold for test compatibility
|
|
145
141
|
* Note: With Uint32Array, overflow is handled automatically, but tests may mock this
|
|
@@ -166,25 +162,16 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
166
162
|
*/
|
|
167
163
|
constructor(factories: Defs, initializer: (preload: PreloadStructure<Defs>, meta: TenantMetadata) => Promise<Partial<InstancesStructure<Defs>>>, options?: ContainerOptions);
|
|
168
164
|
/**
|
|
169
|
-
*
|
|
170
|
-
*
|
|
171
|
-
|
|
172
|
-
private createManagers;
|
|
173
|
-
/**
|
|
174
|
-
* Optimized path caching that maintains flat key strings to avoid repeated joins
|
|
175
|
-
* Uses closure to keep pre-computed cache and final keys for maximum performance
|
|
165
|
+
* Get or create a cache manager for a service - lazy allocation
|
|
166
|
+
* Saves memory by only creating caches for services that are actually used
|
|
167
|
+
* Note: Type safety is enforced at compile time through generics, not runtime
|
|
176
168
|
*/
|
|
177
|
-
private
|
|
169
|
+
private getManager;
|
|
178
170
|
/**
|
|
179
171
|
* Pre-populate the factory cache by walking the entire factory tree
|
|
180
172
|
* This eliminates the need for recursive object traversal during runtime
|
|
181
173
|
*/
|
|
182
174
|
private preloadFactoryCache;
|
|
183
|
-
/**
|
|
184
|
-
* Pre-warm proxy cache with static builders for common paths
|
|
185
|
-
* This reduces proxy creation overhead during runtime access patterns
|
|
186
|
-
*/
|
|
187
|
-
private prewarmProxyCache;
|
|
188
175
|
/**
|
|
189
176
|
* Recursive factory tree walker that builds the flat factory cache
|
|
190
177
|
* Converts nested object structure to flat dot-notation keys
|
|
@@ -252,8 +239,15 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
252
239
|
*/
|
|
253
240
|
private createContextProxy;
|
|
254
241
|
/**
|
|
255
|
-
*
|
|
256
|
-
* Uses
|
|
242
|
+
* Simple string hash function for fallback tenant keys
|
|
243
|
+
* Uses djb2 algorithm - fast and good enough for cache keys
|
|
244
|
+
* Note: For very large metadata objects, consider upgrading to FNV-1a or crypto.createHash
|
|
245
|
+
* if collision resistance is critical. Current implementation is optimized for speed.
|
|
246
|
+
*/
|
|
247
|
+
private simpleHash;
|
|
248
|
+
/**
|
|
249
|
+
* Create a stable cache key from tenant metadata
|
|
250
|
+
* Uses common tenant properties or hashed JSON as fallback
|
|
257
251
|
*/
|
|
258
252
|
private createTenantCacheKey;
|
|
259
253
|
/**
|
|
@@ -300,7 +294,6 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
300
294
|
cacheMisses: number;
|
|
301
295
|
instanceCreations: number;
|
|
302
296
|
contextAccesses: number;
|
|
303
|
-
pathCacheHits: number;
|
|
304
297
|
proxyCacheHits: number;
|
|
305
298
|
initializerCacheHits: number;
|
|
306
299
|
};
|
|
@@ -314,6 +307,12 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
314
307
|
* Calls optional dispose() hooks to prevent memory leaks (sockets, db handles, etc.)
|
|
315
308
|
*/
|
|
316
309
|
clearCaches(): void;
|
|
310
|
+
/**
|
|
311
|
+
* Async version of clearCaches that properly awaits all disposal operations
|
|
312
|
+
* Use this method when you need to ensure all resources are fully disposed
|
|
313
|
+
* before continuing (e.g., during graceful shutdown)
|
|
314
|
+
*/
|
|
315
|
+
clearCachesAsync(): Promise<void>;
|
|
317
316
|
/**
|
|
318
317
|
* Setup distributed cache invalidation system
|
|
319
318
|
* Connects to Redis pub/sub for coordinating cache invalidation across instances
|
|
@@ -345,6 +344,12 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
345
344
|
* Invalidate all cached data (local only)
|
|
346
345
|
*/
|
|
347
346
|
private invalidateAllLocally;
|
|
347
|
+
/**
|
|
348
|
+
* Dispose all service instances across all tenants and clear caches
|
|
349
|
+
* Useful for graceful shutdown and testing cleanup
|
|
350
|
+
* Note: This also clears all caches to prevent resurrection of disposed services
|
|
351
|
+
*/
|
|
352
|
+
disposeAll(): Promise<void>;
|
|
348
353
|
/**
|
|
349
354
|
* Get detailed cache statistics for each service
|
|
350
355
|
* Shows how many instances are cached and the cache limits
|
|
@@ -373,7 +378,6 @@ export declare class Container<Defs extends Record<string, unknown>, TenantMetad
|
|
|
373
378
|
cacheMisses: number;
|
|
374
379
|
instanceCreations: number;
|
|
375
380
|
contextAccesses: number;
|
|
376
|
-
pathCacheHits: number;
|
|
377
381
|
proxyCacheHits: number;
|
|
378
382
|
initializerCacheHits: number;
|
|
379
383
|
};
|
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Container = void 0;
|
|
4
4
|
const async_hooks_1 = require("async_hooks");
|
|
5
|
-
const crypto_1 = require("crypto");
|
|
6
5
|
const LruCache_1 = require("./LruCache");
|
|
7
6
|
const helpers_1 = require("./helpers");
|
|
8
7
|
// Instantiation helper moved to helpers.ts for better performance
|
|
@@ -96,8 +95,9 @@ class Container {
|
|
|
96
95
|
/**
|
|
97
96
|
* Service instance cache managers - one per service type
|
|
98
97
|
* Each manager handles LRU caching for that specific service
|
|
98
|
+
* Lazy-allocated to save memory for unused services
|
|
99
99
|
*/
|
|
100
|
-
managers;
|
|
100
|
+
managers = {};
|
|
101
101
|
/**
|
|
102
102
|
* AsyncLocalStorage provides automatic tenant context isolation
|
|
103
103
|
* Each async call tree gets its own isolated service instances
|
|
@@ -125,11 +125,7 @@ class Container {
|
|
|
125
125
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
126
126
|
// ⚡ PERFORMANCE OPTIMIZATION CACHES
|
|
127
127
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
128
|
-
|
|
129
|
-
* Path string cache: converts ['user', 'repo'] -> "user.repo"
|
|
130
|
-
* Optimized to avoid repeated string joins and array operations
|
|
131
|
-
*/
|
|
132
|
-
pathCache = new Map();
|
|
128
|
+
// Path cache removed - direct string concatenation is faster
|
|
133
129
|
/**
|
|
134
130
|
* Proxy object cache: reuses proxy objects for the same paths
|
|
135
131
|
* Reduces memory allocation and improves performance
|
|
@@ -151,22 +147,21 @@ class Container {
|
|
|
151
147
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
152
148
|
/**
|
|
153
149
|
* High-performance metrics using Uint32Array for better JIT optimization
|
|
154
|
-
* Indices: [hits, misses, creates, ctx,
|
|
150
|
+
* Indices: [hits, misses, creates, ctx, proxy, initHits, resets]
|
|
155
151
|
* Auto-wraps at 2^32 without overflow checks for maximum performance
|
|
156
152
|
*/
|
|
157
|
-
metrics = new Uint32Array(
|
|
153
|
+
metrics = new Uint32Array(7);
|
|
158
154
|
/**
|
|
159
155
|
* Metric indices for Uint32Array
|
|
160
156
|
*/
|
|
161
|
-
static
|
|
157
|
+
static METRIC = {
|
|
162
158
|
HITS: 0,
|
|
163
159
|
MISSES: 1,
|
|
164
160
|
CREATES: 2,
|
|
165
161
|
CONTEXTS: 3,
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
RESETS: 7,
|
|
162
|
+
PROXIES: 4,
|
|
163
|
+
INIT_HITS: 5,
|
|
164
|
+
RESETS: 6,
|
|
170
165
|
};
|
|
171
166
|
/**
|
|
172
167
|
* Legacy overflow threshold for test compatibility
|
|
@@ -185,7 +180,7 @@ class Container {
|
|
|
185
180
|
// Legacy test behavior - reset metrics when mock threshold reached
|
|
186
181
|
this.resetMetrics();
|
|
187
182
|
if (this.options.enableDiagnostics) {
|
|
188
|
-
const metricNames = ['cacheHits', 'cacheMisses', 'instanceCreations', 'contextAccesses', '
|
|
183
|
+
const metricNames = ['cacheHits', 'cacheMisses', 'instanceCreations', 'contextAccesses', 'proxyCacheHits', 'initializerCacheHits', 'resets'];
|
|
189
184
|
console.warn(`Container metrics reset due to overflow protection. Metric '${metricNames[idx] || 'unknown'}' reached ${this.metrics[idx]}`);
|
|
190
185
|
}
|
|
191
186
|
}
|
|
@@ -220,12 +215,8 @@ class Container {
|
|
|
220
215
|
distributedInvalidator: undefined,
|
|
221
216
|
...options,
|
|
222
217
|
};
|
|
223
|
-
// Initialize cache managers for each service
|
|
224
|
-
this.managers = this.createManagers(this.factories, this.options.cacheSize);
|
|
225
218
|
// Pre-cache factory lookups for better performance
|
|
226
219
|
this.preloadFactoryCache();
|
|
227
|
-
// Pre-warm proxy cache with common paths to reduce runtime allocation
|
|
228
|
-
this.prewarmProxyCache();
|
|
229
220
|
// Setup distributed cache invalidation if enabled
|
|
230
221
|
this.setupDistributedInvalidation();
|
|
231
222
|
}
|
|
@@ -233,47 +224,18 @@ class Container {
|
|
|
233
224
|
// 🏭 CACHE MANAGER SETUP
|
|
234
225
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
235
226
|
/**
|
|
236
|
-
*
|
|
237
|
-
*
|
|
227
|
+
* Get or create a cache manager for a service - lazy allocation
|
|
228
|
+
* Saves memory by only creating caches for services that are actually used
|
|
229
|
+
* Note: Type safety is enforced at compile time through generics, not runtime
|
|
238
230
|
*/
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
for (const [key, value] of Object.entries(defs)) {
|
|
242
|
-
const newPath = path.length === 0 ? [key] : [...path, key];
|
|
243
|
-
if (typeof value === 'function') {
|
|
244
|
-
// This is a factory function/constructor - create a cache for it
|
|
245
|
-
const flatKey = this.getOrCachePath(newPath);
|
|
246
|
-
managers[flatKey] = (0, LruCache_1.createServiceCache)(cacheSize);
|
|
247
|
-
}
|
|
248
|
-
else if (typeof value === 'object' && value !== null) {
|
|
249
|
-
// This is a nested object - recurse into it
|
|
250
|
-
const subManagers = this.createManagers(value, cacheSize, newPath);
|
|
251
|
-
Object.assign(managers, subManagers);
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
return managers;
|
|
231
|
+
getManager(key) {
|
|
232
|
+
return (this.managers[key] ??= (0, LruCache_1.createServiceCache)(this.options.cacheSize));
|
|
255
233
|
}
|
|
234
|
+
// createManagers() removed - managers are created lazily via getManager()
|
|
256
235
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
257
236
|
// ⚡ PERFORMANCE OPTIMIZATION HELPERS
|
|
258
237
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
259
|
-
|
|
260
|
-
* Optimized path caching that maintains flat key strings to avoid repeated joins
|
|
261
|
-
* Uses closure to keep pre-computed cache and final keys for maximum performance
|
|
262
|
-
*/
|
|
263
|
-
getOrCachePath(path) {
|
|
264
|
-
// Create cache key once and reuse the flat key computation
|
|
265
|
-
const stringPath = path.map(p => typeof p === 'symbol' ? p.toString() : p);
|
|
266
|
-
const cacheKey = stringPath.join('|'); // Cache key uses pipe separator
|
|
267
|
-
let cached = this.pathCache.get(cacheKey);
|
|
268
|
-
if (!cached) {
|
|
269
|
-
cached = stringPath.join('.'); // Final path uses dot separator
|
|
270
|
-
this.pathCache.set(cacheKey, cached);
|
|
271
|
-
}
|
|
272
|
-
else {
|
|
273
|
-
this.inc(Container.METRIC_INDICES.PATHS);
|
|
274
|
-
}
|
|
275
|
-
return cached;
|
|
276
|
-
}
|
|
238
|
+
// getOrCachePath() removed - direct string concatenation in createPreloadProxy()
|
|
277
239
|
/**
|
|
278
240
|
* Pre-populate the factory cache by walking the entire factory tree
|
|
279
241
|
* This eliminates the need for recursive object traversal during runtime
|
|
@@ -281,25 +243,7 @@ class Container {
|
|
|
281
243
|
preloadFactoryCache() {
|
|
282
244
|
this.walkFactories(this.factories, []);
|
|
283
245
|
}
|
|
284
|
-
|
|
285
|
-
* Pre-warm proxy cache with static builders for common paths
|
|
286
|
-
* This reduces proxy creation overhead during runtime access patterns
|
|
287
|
-
*/
|
|
288
|
-
prewarmProxyCache() {
|
|
289
|
-
// Pre-create proxies for all known factory paths to avoid runtime creation
|
|
290
|
-
for (const [path] of this.factoryCache.entries()) {
|
|
291
|
-
const pathParts = path.split('.');
|
|
292
|
-
// Pre-warm all parent paths (e.g., for "api.users", pre-warm "api")
|
|
293
|
-
for (let i = 1; i <= pathParts.length; i++) {
|
|
294
|
-
const subPath = pathParts.slice(0, i);
|
|
295
|
-
const pathKey = subPath.join('|');
|
|
296
|
-
if (!this.proxyCache.has(pathKey)) {
|
|
297
|
-
// Create and cache the proxy for this path
|
|
298
|
-
this.createPreloadProxy(subPath);
|
|
299
|
-
}
|
|
300
|
-
}
|
|
301
|
-
}
|
|
302
|
-
}
|
|
246
|
+
// prewarmProxyCache() removed - proxies are created lazily
|
|
303
247
|
/**
|
|
304
248
|
* Recursive factory tree walker that builds the flat factory cache
|
|
305
249
|
* Converts nested object structure to flat dot-notation keys
|
|
@@ -309,7 +253,7 @@ class Container {
|
|
|
309
253
|
const newPath = path.length === 0 ? [key] : [...path, key];
|
|
310
254
|
if (typeof value === 'function') {
|
|
311
255
|
// Found a factory - cache it with its full path
|
|
312
|
-
const flatKey =
|
|
256
|
+
const flatKey = newPath.join('.');
|
|
313
257
|
this.factoryCache.set(flatKey, value);
|
|
314
258
|
}
|
|
315
259
|
else if (typeof value === 'object' && value !== null) {
|
|
@@ -350,46 +294,38 @@ class Container {
|
|
|
350
294
|
*
|
|
351
295
|
* This enables natural dot-notation access while maintaining lazy loading
|
|
352
296
|
*/
|
|
353
|
-
createPreloadProxy(path =
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
this.inc(Container.METRIC_INDICES.PROXIES);
|
|
358
|
-
return this.proxyCache.get(pathKey);
|
|
297
|
+
createPreloadProxy(path = '') {
|
|
298
|
+
if (this.proxyCache.has(path)) {
|
|
299
|
+
this.inc(Container.METRIC.PROXIES);
|
|
300
|
+
return this.proxyCache.get(path);
|
|
359
301
|
}
|
|
360
302
|
const proxy = new Proxy({}, // Empty target object - all access is intercepted
|
|
361
303
|
{
|
|
362
304
|
get: (_, prop) => {
|
|
363
|
-
const newPath = path
|
|
364
|
-
const
|
|
365
|
-
const factory = this.factoryCache.get(flatKey);
|
|
305
|
+
const newPath = path ? `${path}.${String(prop)}` : String(prop);
|
|
306
|
+
const factory = this.factoryCache.get(newPath);
|
|
366
307
|
if (factory) {
|
|
367
308
|
// Found a factory - return a function that creates/caches instances
|
|
368
|
-
return (id, ...
|
|
369
|
-
const mgr = this.
|
|
309
|
+
return (id, ...args) => {
|
|
310
|
+
const mgr = this.getManager(newPath);
|
|
370
311
|
let inst = mgr.get(id);
|
|
371
312
|
if (!inst) {
|
|
372
|
-
|
|
373
|
-
this.inc(Container.
|
|
374
|
-
|
|
375
|
-
inst = (0, helpers_1.instantiate)(factory, params);
|
|
313
|
+
this.inc(Container.METRIC.MISSES);
|
|
314
|
+
this.inc(Container.METRIC.CREATES);
|
|
315
|
+
inst = (0, helpers_1.instantiate)(factory, args);
|
|
376
316
|
mgr.set(id, inst);
|
|
377
317
|
}
|
|
378
318
|
else {
|
|
379
|
-
|
|
380
|
-
this.inc(Container.METRIC_INDICES.HITS);
|
|
319
|
+
this.inc(Container.METRIC.HITS);
|
|
381
320
|
}
|
|
382
321
|
return inst;
|
|
383
322
|
};
|
|
384
323
|
}
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
return this.createPreloadProxy(newPath);
|
|
388
|
-
}
|
|
324
|
+
// No factory found - must be a nested path, return another proxy
|
|
325
|
+
return this.createPreloadProxy(newPath);
|
|
389
326
|
},
|
|
390
327
|
});
|
|
391
|
-
|
|
392
|
-
this.proxyCache.set(pathKey, proxy);
|
|
328
|
+
this.proxyCache.set(path, proxy);
|
|
393
329
|
return proxy;
|
|
394
330
|
}
|
|
395
331
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
@@ -409,14 +345,21 @@ class Container {
|
|
|
409
345
|
* More efficient for pure synchronous code paths
|
|
410
346
|
*/
|
|
411
347
|
runWithContextSync(instances, tenantMetadata, fn) {
|
|
412
|
-
const
|
|
413
|
-
this.als.enterWith(
|
|
348
|
+
const prev = this.als.getStore();
|
|
349
|
+
this.als.enterWith({ instances, tenantMetadata });
|
|
414
350
|
try {
|
|
415
351
|
return fn();
|
|
416
352
|
}
|
|
417
353
|
finally {
|
|
418
|
-
|
|
419
|
-
|
|
354
|
+
if (prev) {
|
|
355
|
+
this.als.enterWith(prev);
|
|
356
|
+
}
|
|
357
|
+
else if ('disable' in this.als) {
|
|
358
|
+
// Node 20+ - fully clear context when no previous context
|
|
359
|
+
// The disable() method was added in Node.js 20.5.0 to properly clear ALS context
|
|
360
|
+
// In earlier versions, this check safely falls through without error
|
|
361
|
+
this.als.disable();
|
|
362
|
+
}
|
|
420
363
|
}
|
|
421
364
|
}
|
|
422
365
|
/**
|
|
@@ -435,7 +378,7 @@ class Container {
|
|
|
435
378
|
if (!store) {
|
|
436
379
|
throw new Error("No tenant context available. Make sure you're running within a container context.");
|
|
437
380
|
}
|
|
438
|
-
this.inc(Container.
|
|
381
|
+
this.inc(Container.METRIC.CONTEXTS);
|
|
439
382
|
return this.createContextProxy(store.instances);
|
|
440
383
|
}
|
|
441
384
|
/**
|
|
@@ -458,7 +401,7 @@ class Container {
|
|
|
458
401
|
const proxy = new Proxy(obj, {
|
|
459
402
|
get: (target, prop) => {
|
|
460
403
|
const newPath = path.length === 0 ? [prop] : [...path, prop];
|
|
461
|
-
const value = target
|
|
404
|
+
const value = Reflect.get(target, prop);
|
|
462
405
|
if (value === undefined) {
|
|
463
406
|
// Check if property exists but is undefined (vs completely missing)
|
|
464
407
|
if (prop in target) {
|
|
@@ -470,9 +413,13 @@ class Container {
|
|
|
470
413
|
if (typeof prop === 'symbol') {
|
|
471
414
|
return undefined;
|
|
472
415
|
}
|
|
416
|
+
// Special case for 'then' to avoid Promise detection issues
|
|
417
|
+
if (prop === 'then') {
|
|
418
|
+
return undefined;
|
|
419
|
+
}
|
|
473
420
|
// Property doesn't exist - provide helpful error message
|
|
474
|
-
const servicePath =
|
|
475
|
-
const available =
|
|
421
|
+
const servicePath = newPath.join('.');
|
|
422
|
+
const available = Reflect.ownKeys(target).map(String).join(', ');
|
|
476
423
|
throw new Error(`Service '${servicePath}' not initialized. ` +
|
|
477
424
|
`Available services: ${available}`);
|
|
478
425
|
}
|
|
@@ -508,26 +455,37 @@ class Container {
|
|
|
508
455
|
// 🚀 BOOTSTRAP & LIFECYCLE
|
|
509
456
|
// ═══════════════════════════════════════════════════════════════════════════
|
|
510
457
|
/**
|
|
511
|
-
*
|
|
512
|
-
* Uses
|
|
458
|
+
* Simple string hash function for fallback tenant keys
|
|
459
|
+
* Uses djb2 algorithm - fast and good enough for cache keys
|
|
460
|
+
* Note: For very large metadata objects, consider upgrading to FNV-1a or crypto.createHash
|
|
461
|
+
* if collision resistance is critical. Current implementation is optimized for speed.
|
|
462
|
+
*/
|
|
463
|
+
simpleHash(str) {
|
|
464
|
+
let hash = 5381;
|
|
465
|
+
for (let i = 0; i < str.length; i++) {
|
|
466
|
+
hash = ((hash << 5) + hash) + str.charCodeAt(i);
|
|
467
|
+
}
|
|
468
|
+
return (hash >>> 0).toString(36);
|
|
469
|
+
}
|
|
470
|
+
/**
|
|
471
|
+
* Create a stable cache key from tenant metadata
|
|
472
|
+
* Uses common tenant properties or hashed JSON as fallback
|
|
513
473
|
*/
|
|
514
474
|
createTenantCacheKey(meta) {
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
475
|
+
const m = meta;
|
|
476
|
+
if (m.id || m.tenantId || m.name) {
|
|
477
|
+
return `tenant:${m.id ?? m.tenantId ?? m.name}`;
|
|
478
|
+
}
|
|
479
|
+
// Fallback to hashed JSON for complex metadata
|
|
519
480
|
try {
|
|
520
|
-
const
|
|
521
|
-
|
|
522
|
-
const hash = (0, crypto_1.createHash)('sha1').update(sortedMeta).digest('base64url');
|
|
523
|
-
return `tenant:${tenantId}:${hash}`;
|
|
481
|
+
const json = JSON.stringify(meta);
|
|
482
|
+
return `tenant:hash:${this.simpleHash(json)}`;
|
|
524
483
|
}
|
|
525
484
|
catch {
|
|
526
|
-
//
|
|
527
|
-
return `tenant:${
|
|
485
|
+
// Last resort for circular refs
|
|
486
|
+
return `tenant:ts:${Date.now()}`;
|
|
528
487
|
}
|
|
529
488
|
}
|
|
530
|
-
// Simple hash function removed in favor of crypto.createHash for better collision resistance
|
|
531
489
|
/**
|
|
532
490
|
* Get or create initialized instances for a tenant with race condition protection
|
|
533
491
|
* Uses both result caching and inflight promise deduplication
|
|
@@ -537,7 +495,7 @@ class Container {
|
|
|
537
495
|
// Check if we already have initialized instances for this tenant
|
|
538
496
|
const cachedInstances = this.initializerCache.get(cacheKey);
|
|
539
497
|
if (cachedInstances) {
|
|
540
|
-
this.inc(Container.
|
|
498
|
+
this.inc(Container.METRIC.INIT_HITS);
|
|
541
499
|
return cachedInstances;
|
|
542
500
|
}
|
|
543
501
|
// Check if initialization is already in progress for this tenant
|
|
@@ -611,13 +569,12 @@ class Container {
|
|
|
611
569
|
*/
|
|
612
570
|
getMetrics() {
|
|
613
571
|
return {
|
|
614
|
-
cacheHits: this.metrics[Container.
|
|
615
|
-
cacheMisses: this.metrics[Container.
|
|
616
|
-
instanceCreations: this.metrics[Container.
|
|
617
|
-
contextAccesses: this.metrics[Container.
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
initializerCacheHits: this.metrics[Container.METRIC_INDICES.INIT_HITS],
|
|
572
|
+
cacheHits: this.metrics[Container.METRIC.HITS],
|
|
573
|
+
cacheMisses: this.metrics[Container.METRIC.MISSES],
|
|
574
|
+
instanceCreations: this.metrics[Container.METRIC.CREATES],
|
|
575
|
+
contextAccesses: this.metrics[Container.METRIC.CONTEXTS],
|
|
576
|
+
proxyCacheHits: this.metrics[Container.METRIC.PROXIES],
|
|
577
|
+
initializerCacheHits: this.metrics[Container.METRIC.INIT_HITS],
|
|
621
578
|
};
|
|
622
579
|
}
|
|
623
580
|
/**
|
|
@@ -626,7 +583,7 @@ class Container {
|
|
|
626
583
|
*/
|
|
627
584
|
resetMetrics() {
|
|
628
585
|
this.metrics.fill(0);
|
|
629
|
-
this.inc(Container.
|
|
586
|
+
this.inc(Container.METRIC.RESETS);
|
|
630
587
|
}
|
|
631
588
|
/**
|
|
632
589
|
* Clear all service instance caches with proper disposal support
|
|
@@ -636,25 +593,37 @@ class Container {
|
|
|
636
593
|
// Dispose instances before clearing to prevent memory leaks
|
|
637
594
|
for (const manager of Object.values(this.managers)) {
|
|
638
595
|
// Call dispose hooks if manager supports iteration
|
|
639
|
-
if (typeof manager.
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
}
|
|
646
|
-
catch (err) {
|
|
647
|
-
if (this.options.enableDiagnostics) {
|
|
648
|
-
console.warn('Error disposing instance:', err);
|
|
649
|
-
}
|
|
596
|
+
if (typeof manager.values === 'function') {
|
|
597
|
+
const vals = manager.values?.() ?? [];
|
|
598
|
+
for (const inst of vals) {
|
|
599
|
+
(0, helpers_1.safeDispose)(inst).catch(err => {
|
|
600
|
+
if (this.options.enableDiagnostics) {
|
|
601
|
+
console.warn('Error disposing service instance:', err);
|
|
650
602
|
}
|
|
651
|
-
}
|
|
603
|
+
});
|
|
652
604
|
}
|
|
653
605
|
}
|
|
654
|
-
manager.clear
|
|
606
|
+
manager.clear();
|
|
607
|
+
}
|
|
608
|
+
// Clear optimization caches as well
|
|
609
|
+
this.proxyCache.clear();
|
|
610
|
+
this.initializerCache.clear();
|
|
611
|
+
this.initializerPromises.clear();
|
|
612
|
+
// Note: contextProxyCache is a WeakMap and will be garbage collected automatically
|
|
613
|
+
}
|
|
614
|
+
/**
|
|
615
|
+
* Async version of clearCaches that properly awaits all disposal operations
|
|
616
|
+
* Use this method when you need to ensure all resources are fully disposed
|
|
617
|
+
* before continuing (e.g., during graceful shutdown)
|
|
618
|
+
*/
|
|
619
|
+
async clearCachesAsync() {
|
|
620
|
+
// Dispose instances before clearing to prevent memory leaks
|
|
621
|
+
await Promise.all(Object.values(this.managers).flatMap(m => [...(m.values?.() ?? [])].map(helpers_1.safeDispose)));
|
|
622
|
+
// Clear all managers
|
|
623
|
+
for (const manager of Object.values(this.managers)) {
|
|
624
|
+
manager.clear();
|
|
655
625
|
}
|
|
656
626
|
// Clear optimization caches as well
|
|
657
|
-
this.pathCache.clear();
|
|
658
627
|
this.proxyCache.clear();
|
|
659
628
|
this.initializerCache.clear();
|
|
660
629
|
this.initializerPromises.clear();
|
|
@@ -738,15 +707,12 @@ class Container {
|
|
|
738
707
|
// Clear service instance caches for this tenant with disposal
|
|
739
708
|
for (const manager of Object.values(this.managers)) {
|
|
740
709
|
const instance = manager.get(tenantId);
|
|
741
|
-
if (instance
|
|
742
|
-
|
|
743
|
-
instance.dispose();
|
|
744
|
-
}
|
|
745
|
-
catch (err) {
|
|
710
|
+
if (instance) {
|
|
711
|
+
(0, helpers_1.safeDispose)(instance).catch(err => {
|
|
746
712
|
if (this.options.enableDiagnostics) {
|
|
747
713
|
console.warn('Error disposing tenant instance:', err);
|
|
748
714
|
}
|
|
749
|
-
}
|
|
715
|
+
});
|
|
750
716
|
}
|
|
751
717
|
manager.delete(tenantId);
|
|
752
718
|
}
|
|
@@ -768,18 +734,14 @@ class Container {
|
|
|
768
734
|
const manager = this.managers[serviceType];
|
|
769
735
|
if (manager) {
|
|
770
736
|
// Dispose instances before clearing
|
|
771
|
-
if (typeof manager.
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
catch (err) {
|
|
778
|
-
if (this.options.enableDiagnostics) {
|
|
779
|
-
console.warn('Error disposing service instance:', err);
|
|
780
|
-
}
|
|
737
|
+
if (typeof manager.values === 'function') {
|
|
738
|
+
const vals = manager.values?.() ?? [];
|
|
739
|
+
for (const inst of vals) {
|
|
740
|
+
(0, helpers_1.safeDispose)(inst).catch(err => {
|
|
741
|
+
if (this.options.enableDiagnostics) {
|
|
742
|
+
console.warn('Error disposing service instance:', err);
|
|
781
743
|
}
|
|
782
|
-
}
|
|
744
|
+
});
|
|
783
745
|
}
|
|
784
746
|
}
|
|
785
747
|
manager.clear();
|
|
@@ -794,6 +756,17 @@ class Container {
|
|
|
794
756
|
}
|
|
795
757
|
this.clearCaches();
|
|
796
758
|
}
|
|
759
|
+
/**
|
|
760
|
+
* Dispose all service instances across all tenants and clear caches
|
|
761
|
+
* Useful for graceful shutdown and testing cleanup
|
|
762
|
+
* Note: This also clears all caches to prevent resurrection of disposed services
|
|
763
|
+
*/
|
|
764
|
+
async disposeAll() {
|
|
765
|
+
// First dispose all instances
|
|
766
|
+
await Promise.all(Object.values(this.managers).flatMap(manager => [...(manager.values?.() ?? [])].map(helpers_1.safeDispose)));
|
|
767
|
+
// Then clear all caches to prevent resurrection
|
|
768
|
+
await this.clearCachesAsync();
|
|
769
|
+
}
|
|
797
770
|
/**
|
|
798
771
|
* Get detailed cache statistics for each service
|
|
799
772
|
* Shows how many instances are cached and the cache limits
|
|
@@ -819,13 +792,13 @@ class Container {
|
|
|
819
792
|
getPerformanceStats() {
|
|
820
793
|
const cacheStats = this.getCacheStats();
|
|
821
794
|
const totalCacheSize = Object.values(cacheStats).reduce((sum, stat) => sum + stat.size, 0);
|
|
822
|
-
const hits = this.metrics[Container.
|
|
823
|
-
const misses = this.metrics[Container.
|
|
795
|
+
const hits = this.metrics[Container.METRIC.HITS];
|
|
796
|
+
const misses = this.metrics[Container.METRIC.MISSES];
|
|
824
797
|
return {
|
|
825
798
|
...this.getMetrics(),
|
|
826
799
|
cacheStats,
|
|
827
800
|
totalCacheSize,
|
|
828
|
-
pathCacheSize:
|
|
801
|
+
pathCacheSize: 0, // removed - no longer tracked
|
|
829
802
|
proxyCacheSize: this.proxyCache.size,
|
|
830
803
|
factoryCacheSize: this.factoryCache.size,
|
|
831
804
|
initializerCacheSize: this.initializerCache.size,
|