@morojs/moro 1.6.0 → 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/core/config/config-sources.js +4 -0
- package/dist/core/config/config-sources.js.map +1 -1
- package/dist/core/config/config-validator.js +3 -0
- package/dist/core/config/config-validator.js.map +1 -1
- package/dist/core/config/file-loader.js +3 -1
- package/dist/core/config/file-loader.js.map +1 -1
- package/dist/core/config/schema.js +4 -1
- package/dist/core/config/schema.js.map +1 -1
- package/dist/core/events/event-bus.js +1 -1
- package/dist/core/events/event-bus.js.map +1 -1
- package/dist/core/framework.d.ts +1 -1
- package/dist/core/framework.js +13 -7
- package/dist/core/framework.js.map +1 -1
- package/dist/core/http/http-server.d.ts +55 -15
- package/dist/core/http/http-server.js +70 -146
- package/dist/core/http/http-server.js.map +1 -1
- package/dist/core/http/index.d.ts +1 -1
- package/dist/core/http/index.js +1 -1
- package/dist/core/http/index.js.map +1 -1
- package/dist/core/http/uws-http-server.d.ts +4 -22
- package/dist/core/http/uws-http-server.js +43 -208
- package/dist/core/http/uws-http-server.js.map +1 -1
- package/dist/core/networking/adapters/uws-adapter.d.ts +1 -1
- package/dist/core/networking/adapters/uws-adapter.js +1 -1
- package/dist/core/pooling/object-pool-manager.d.ts +140 -0
- package/dist/core/pooling/object-pool-manager.js +502 -0
- package/dist/core/pooling/object-pool-manager.js.map +1 -0
- package/dist/core/routing/app-integration.d.ts +12 -10
- package/dist/core/routing/app-integration.js +43 -74
- package/dist/core/routing/app-integration.js.map +1 -1
- package/dist/core/routing/index.d.ts +15 -29
- package/dist/core/routing/index.js +43 -390
- package/dist/core/routing/index.js.map +1 -1
- package/dist/core/routing/path-matcher.d.ts +67 -0
- package/dist/core/routing/path-matcher.js +182 -0
- package/dist/core/routing/path-matcher.js.map +1 -0
- package/dist/core/{http → routing}/router.d.ts +21 -9
- package/dist/core/routing/router.js +68 -0
- package/dist/core/routing/router.js.map +1 -0
- package/dist/core/routing/unified-router.d.ts +148 -0
- package/dist/core/routing/unified-router.js +684 -0
- package/dist/core/routing/unified-router.js.map +1 -0
- package/dist/moro.d.ts +10 -7
- package/dist/moro.js +90 -41
- package/dist/moro.js.map +1 -1
- package/dist/types/config.d.ts +3 -0
- package/package.json +1 -1
- package/src/core/config/config-sources.ts +4 -0
- package/src/core/config/config-validator.ts +3 -0
- package/src/core/config/file-loader.ts +4 -1
- package/src/core/config/schema.ts +4 -1
- package/src/core/events/event-bus.ts +1 -1
- package/src/core/framework.ts +14 -9
- package/src/core/http/http-server.ts +76 -161
- package/src/core/http/index.ts +1 -1
- package/src/core/http/uws-http-server.ts +43 -246
- package/src/core/networking/adapters/uws-adapter.ts +1 -1
- package/src/core/pooling/object-pool-manager.ts +630 -0
- package/src/core/routing/app-integration.ts +57 -109
- package/src/core/routing/index.ts +62 -473
- package/src/core/routing/path-matcher.ts +222 -0
- package/src/core/routing/router.ts +97 -0
- package/src/core/routing/unified-router.ts +870 -0
- package/src/moro.ts +107 -57
- package/src/types/config.ts +3 -0
- package/dist/core/http/router.js +0 -183
- package/dist/core/http/router.js.map +0 -1
- package/src/core/http/router.ts +0 -230
|
@@ -0,0 +1,630 @@
|
|
|
1
|
+
// Unified Object Pool Manager
|
|
2
|
+
// Consolidates object pooling from http-server, router, and other components
|
|
3
|
+
|
|
4
|
+
import { createFrameworkLogger } from '../logger/index.js';
|
|
5
|
+
import * as crypto from 'crypto';
|
|
6
|
+
const logger = createFrameworkLogger('ObjectPoolManager');
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Generic object pool for reusable objects
|
|
10
|
+
*/
|
|
11
|
+
class ObjectPool<T> {
|
|
12
|
+
private pool: T[] = [];
|
|
13
|
+
private readonly factory: () => T;
|
|
14
|
+
private readonly reset?: (obj: T) => void;
|
|
15
|
+
private readonly maxSize: number;
|
|
16
|
+
private acquireCount = 0;
|
|
17
|
+
private releaseCount = 0;
|
|
18
|
+
private createCount = 0;
|
|
19
|
+
|
|
20
|
+
constructor(factory: () => T, maxSize: number = 100, reset?: (obj: T) => void) {
|
|
21
|
+
this.factory = factory;
|
|
22
|
+
this.maxSize = maxSize;
|
|
23
|
+
this.reset = reset;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
acquire(): T {
|
|
27
|
+
this.acquireCount++;
|
|
28
|
+
|
|
29
|
+
if (this.pool.length > 0) {
|
|
30
|
+
return this.pool.pop()!;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
this.createCount++;
|
|
34
|
+
return this.factory();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
release(obj: T): void {
|
|
38
|
+
if (this.pool.length >= this.maxSize) {
|
|
39
|
+
return; // Pool is full, let it be garbage collected
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
this.releaseCount++;
|
|
43
|
+
|
|
44
|
+
// Reset object if reset function provided
|
|
45
|
+
if (this.reset) {
|
|
46
|
+
this.reset(obj);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
this.pool.push(obj);
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
get size(): number {
|
|
53
|
+
return this.pool.length;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
get stats() {
|
|
57
|
+
return {
|
|
58
|
+
poolSize: this.pool.length,
|
|
59
|
+
maxSize: this.maxSize,
|
|
60
|
+
acquireCount: this.acquireCount,
|
|
61
|
+
releaseCount: this.releaseCount,
|
|
62
|
+
createCount: this.createCount,
|
|
63
|
+
utilization: this.maxSize > 0 ? this.pool.length / this.maxSize : 0,
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
clear(): void {
|
|
68
|
+
this.pool = [];
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* LRU Cache for route lookups
|
|
74
|
+
*/
|
|
75
|
+
class LRUCache<K, V> {
|
|
76
|
+
private cache = new Map<K, V>();
|
|
77
|
+
private readonly maxSize: number;
|
|
78
|
+
private hits = 0;
|
|
79
|
+
private misses = 0;
|
|
80
|
+
|
|
81
|
+
constructor(maxSize: number = 500) {
|
|
82
|
+
this.maxSize = maxSize;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
get(key: K): V | undefined {
|
|
86
|
+
const value = this.cache.get(key);
|
|
87
|
+
if (value !== undefined) {
|
|
88
|
+
this.hits++;
|
|
89
|
+
// Move to end (most recently used)
|
|
90
|
+
this.cache.delete(key);
|
|
91
|
+
this.cache.set(key, value);
|
|
92
|
+
return value;
|
|
93
|
+
}
|
|
94
|
+
this.misses++;
|
|
95
|
+
return undefined;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
set(key: K, value: V): void {
|
|
99
|
+
if (this.cache.has(key)) {
|
|
100
|
+
this.cache.delete(key);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
this.cache.set(key, value);
|
|
104
|
+
|
|
105
|
+
// Evict oldest if over capacity
|
|
106
|
+
if (this.cache.size > this.maxSize) {
|
|
107
|
+
const firstKey = this.cache.keys().next().value;
|
|
108
|
+
if (firstKey !== undefined) {
|
|
109
|
+
this.cache.delete(firstKey);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
has(key: K): boolean {
|
|
115
|
+
return this.cache.has(key);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
delete(key: K): boolean {
|
|
119
|
+
return this.cache.delete(key);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
clear(): void {
|
|
123
|
+
this.cache.clear();
|
|
124
|
+
this.hits = 0;
|
|
125
|
+
this.misses = 0;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
get size(): number {
|
|
129
|
+
return this.cache.size;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
get stats() {
|
|
133
|
+
const total = this.hits + this.misses;
|
|
134
|
+
return {
|
|
135
|
+
size: this.cache.size,
|
|
136
|
+
maxSize: this.maxSize,
|
|
137
|
+
hits: this.hits,
|
|
138
|
+
misses: this.misses,
|
|
139
|
+
hitRate: total > 0 ? this.hits / total : 0,
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* ObjectPoolManager - Singleton for managing all object pools
|
|
146
|
+
* Consolidates pools from:
|
|
147
|
+
* - MoroHttpServer (paramObjectPool, bufferPool)
|
|
148
|
+
* - Router (paramObjectPool)
|
|
149
|
+
* - Various route caches
|
|
150
|
+
*/
|
|
151
|
+
export class ObjectPoolManager {
|
|
152
|
+
private static instance: ObjectPoolManager | null = null;
|
|
153
|
+
|
|
154
|
+
// Parameter object pool (for route params)
|
|
155
|
+
private paramPool: ObjectPool<Record<string, string>>;
|
|
156
|
+
|
|
157
|
+
// Header object pool (for parsed headers)
|
|
158
|
+
private headerPool: ObjectPool<Record<string, string>>;
|
|
159
|
+
|
|
160
|
+
// Query object pool (for parsed query strings)
|
|
161
|
+
private queryPool: ObjectPool<Record<string, string>>;
|
|
162
|
+
|
|
163
|
+
// Buffer pools by size
|
|
164
|
+
private bufferPools: Map<number, ObjectPool<Buffer>>;
|
|
165
|
+
private readonly bufferSizes = [64, 256, 1024, 4096, 16384];
|
|
166
|
+
|
|
167
|
+
// Route lookup cache
|
|
168
|
+
private routeCache: LRUCache<string, any>;
|
|
169
|
+
|
|
170
|
+
// Response cache (for common responses - now includes full response metadata)
|
|
171
|
+
private responseCache: LRUCache<
|
|
172
|
+
string,
|
|
173
|
+
{ buffer: Buffer; headers: Record<string, string>; statusCode: number }
|
|
174
|
+
>;
|
|
175
|
+
|
|
176
|
+
// Performance monitoring
|
|
177
|
+
private performanceStats = {
|
|
178
|
+
poolHits: 0,
|
|
179
|
+
poolMisses: 0,
|
|
180
|
+
totalAcquisitions: 0,
|
|
181
|
+
totalReleases: 0,
|
|
182
|
+
lastAdjustment: 0,
|
|
183
|
+
adjustmentInterval: 60000, // 1 minute
|
|
184
|
+
};
|
|
185
|
+
|
|
186
|
+
// Adaptive pool sizing
|
|
187
|
+
private poolUsageHistory: Map<string, number[]> = new Map();
|
|
188
|
+
private adaptiveMode = true;
|
|
189
|
+
|
|
190
|
+
private constructor() {
|
|
191
|
+
// Set initial last adjustment time to prevent immediate adjustment
|
|
192
|
+
this.performanceStats.lastAdjustment = Date.now();
|
|
193
|
+
|
|
194
|
+
// Initialize parameter object pool with adaptive sizing
|
|
195
|
+
this.paramPool = new ObjectPool(
|
|
196
|
+
() => ({}),
|
|
197
|
+
100, // Initial size
|
|
198
|
+
(obj: Record<string, string>) => {
|
|
199
|
+
// Clear all properties
|
|
200
|
+
for (const key in obj) {
|
|
201
|
+
delete obj[key];
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
);
|
|
205
|
+
|
|
206
|
+
// Initialize header object pool
|
|
207
|
+
this.headerPool = new ObjectPool(
|
|
208
|
+
() => ({}),
|
|
209
|
+
50, // Initial size - headers are less common to pool
|
|
210
|
+
(obj: Record<string, string>) => {
|
|
211
|
+
// Clear all properties
|
|
212
|
+
for (const key in obj) {
|
|
213
|
+
delete obj[key];
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
);
|
|
217
|
+
|
|
218
|
+
// Initialize query object pool
|
|
219
|
+
this.queryPool = new ObjectPool(
|
|
220
|
+
() => ({}),
|
|
221
|
+
100, // Initial size - queries are very common
|
|
222
|
+
(obj: Record<string, string>) => {
|
|
223
|
+
// Clear all properties
|
|
224
|
+
for (const key in obj) {
|
|
225
|
+
delete obj[key];
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
);
|
|
229
|
+
|
|
230
|
+
// Initialize buffer pools with enhanced sizing
|
|
231
|
+
this.bufferPools = new Map();
|
|
232
|
+
this.bufferSizes.forEach(size => {
|
|
233
|
+
this.bufferPools.set(
|
|
234
|
+
size,
|
|
235
|
+
new ObjectPool<Buffer>(() => Buffer.allocUnsafe(size), this.getOptimalPoolSize(size))
|
|
236
|
+
);
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
// Initialize caches with enhanced statistics
|
|
240
|
+
this.routeCache = new LRUCache(500);
|
|
241
|
+
this.responseCache = new LRUCache(200); // Increased for full response caching
|
|
242
|
+
|
|
243
|
+
// Pre-warm pools for better initial performance
|
|
244
|
+
this.preWarmPools();
|
|
245
|
+
|
|
246
|
+
logger.debug(
|
|
247
|
+
'ObjectPoolManager initialized with enhanced performance features',
|
|
248
|
+
'Initialization'
|
|
249
|
+
);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* Pre-warm pools with initial objects for better startup performance
|
|
254
|
+
*/
|
|
255
|
+
private preWarmPools(): void {
|
|
256
|
+
// Pre-warm parameter pool
|
|
257
|
+
for (let i = 0; i < 20; i++) {
|
|
258
|
+
const obj = this.paramPool.acquire();
|
|
259
|
+
this.paramPool.release(obj);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Pre-warm header pool
|
|
263
|
+
for (let i = 0; i < 10; i++) {
|
|
264
|
+
const obj = this.headerPool.acquire();
|
|
265
|
+
this.headerPool.release(obj);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// Pre-warm query pool
|
|
269
|
+
for (let i = 0; i < 20; i++) {
|
|
270
|
+
const obj = this.queryPool.acquire();
|
|
271
|
+
this.queryPool.release(obj);
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
// Pre-warm buffer pools
|
|
275
|
+
this.bufferSizes.forEach(size => {
|
|
276
|
+
const pool = this.bufferPools.get(size);
|
|
277
|
+
if (pool) {
|
|
278
|
+
for (let i = 0; i < Math.min(10, pool.stats.maxSize); i++) {
|
|
279
|
+
const buffer = pool.acquire();
|
|
280
|
+
pool.release(buffer);
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
logger.debug('Object pools pre-warmed', 'PoolManager');
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
/**
|
|
289
|
+
* Get optimal pool size based on buffer size and usage patterns
|
|
290
|
+
*/
|
|
291
|
+
private getOptimalPoolSize(bufferSize: number): number {
|
|
292
|
+
// Smaller buffers can have larger pools
|
|
293
|
+
if (bufferSize <= 256) return 100;
|
|
294
|
+
if (bufferSize <= 1024) return 75;
|
|
295
|
+
if (bufferSize <= 4096) return 50;
|
|
296
|
+
return 25; // Large buffers need smaller pools
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
static getInstance(): ObjectPoolManager {
|
|
300
|
+
if (!this.instance) {
|
|
301
|
+
this.instance = new ObjectPoolManager();
|
|
302
|
+
}
|
|
303
|
+
return this.instance;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
/**
|
|
307
|
+
* Reset the singleton (useful for testing)
|
|
308
|
+
*/
|
|
309
|
+
static reset(): void {
|
|
310
|
+
if (this.instance) {
|
|
311
|
+
this.instance.clearAll();
|
|
312
|
+
}
|
|
313
|
+
this.instance = null;
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
// Parameter Object Pool
|
|
317
|
+
|
|
318
|
+
acquireParams(): Record<string, string> {
|
|
319
|
+
this.performanceStats.totalAcquisitions++;
|
|
320
|
+
const obj = this.paramPool.acquire();
|
|
321
|
+
|
|
322
|
+
if (this.poolUsageHistory.has('params')) {
|
|
323
|
+
const history = this.poolUsageHistory.get('params')!;
|
|
324
|
+
if (history.length >= 100) {
|
|
325
|
+
history.shift(); // Keep only last 100 measurements
|
|
326
|
+
}
|
|
327
|
+
history.push(this.paramPool.size);
|
|
328
|
+
} else {
|
|
329
|
+
this.poolUsageHistory.set('params', [this.paramPool.size]);
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
return obj;
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
releaseParams(obj: Record<string, string>): void {
|
|
336
|
+
this.performanceStats.totalReleases++;
|
|
337
|
+
this.paramPool.release(obj);
|
|
338
|
+
|
|
339
|
+
// Adaptive pool sizing based on usage patterns
|
|
340
|
+
if (
|
|
341
|
+
this.adaptiveMode &&
|
|
342
|
+
Date.now() - this.performanceStats.lastAdjustment > this.performanceStats.adjustmentInterval
|
|
343
|
+
) {
|
|
344
|
+
this.adjustPoolSizes();
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
// Header Object Pool
|
|
349
|
+
|
|
350
|
+
acquireHeaders(): Record<string, string> {
|
|
351
|
+
this.performanceStats.totalAcquisitions++;
|
|
352
|
+
return this.headerPool.acquire();
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
releaseHeaders(obj: Record<string, string>): void {
|
|
356
|
+
this.performanceStats.totalReleases++;
|
|
357
|
+
this.headerPool.release(obj);
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
// Query Object Pool
|
|
361
|
+
|
|
362
|
+
acquireQuery(): Record<string, string> {
|
|
363
|
+
this.performanceStats.totalAcquisitions++;
|
|
364
|
+
return this.queryPool.acquire();
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
releaseQuery(obj: Record<string, string>): void {
|
|
368
|
+
this.performanceStats.totalReleases++;
|
|
369
|
+
this.queryPool.release(obj);
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Request ID Generation
|
|
373
|
+
|
|
374
|
+
generateRequestId(): string {
|
|
375
|
+
return crypto.randomUUID();
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
// Buffer Pool
|
|
379
|
+
|
|
380
|
+
acquireBuffer(size: number): Buffer {
|
|
381
|
+
this.performanceStats.totalAcquisitions++;
|
|
382
|
+
|
|
383
|
+
// Find smallest buffer that fits
|
|
384
|
+
for (const poolSize of this.bufferSizes) {
|
|
385
|
+
if (size <= poolSize) {
|
|
386
|
+
const pool = this.bufferPools.get(poolSize);
|
|
387
|
+
if (pool) {
|
|
388
|
+
const buffer = pool.acquire();
|
|
389
|
+
this.performanceStats.poolHits++;
|
|
390
|
+
|
|
391
|
+
// Track usage for adaptive sizing
|
|
392
|
+
const poolKey = `buffer_${poolSize}`;
|
|
393
|
+
if (this.poolUsageHistory.has(poolKey)) {
|
|
394
|
+
const history = this.poolUsageHistory.get(poolKey)!;
|
|
395
|
+
if (history.length >= 100) {
|
|
396
|
+
history.shift();
|
|
397
|
+
}
|
|
398
|
+
history.push(pool.size);
|
|
399
|
+
} else {
|
|
400
|
+
this.poolUsageHistory.set(poolKey, [pool.size]);
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
return buffer;
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
// No pool available for this size, allocate directly
|
|
409
|
+
this.performanceStats.poolMisses++;
|
|
410
|
+
return Buffer.allocUnsafe(size);
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
releaseBuffer(buffer: Buffer): void {
|
|
414
|
+
this.performanceStats.totalReleases++;
|
|
415
|
+
const size = buffer.length;
|
|
416
|
+
const pool = this.bufferPools.get(size);
|
|
417
|
+
if (pool) {
|
|
418
|
+
pool.release(buffer);
|
|
419
|
+
|
|
420
|
+
// Adaptive pool sizing based on usage patterns
|
|
421
|
+
if (
|
|
422
|
+
this.adaptiveMode &&
|
|
423
|
+
Date.now() - this.performanceStats.lastAdjustment > this.performanceStats.adjustmentInterval
|
|
424
|
+
) {
|
|
425
|
+
this.adjustPoolSizes();
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
// If no pool for this size, let it be garbage collected
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
// Route Cache
|
|
432
|
+
|
|
433
|
+
getCachedRoute(key: string): any {
|
|
434
|
+
return this.routeCache.get(key);
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
cacheRoute(key: string, route: any): void {
|
|
438
|
+
this.routeCache.set(key, route);
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
hasCachedRoute(key: string): boolean {
|
|
442
|
+
return this.routeCache.has(key);
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
clearRouteCache(): void {
|
|
446
|
+
this.routeCache.clear();
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
// Response Cache (For Opt-In Explicit Caching Only)
|
|
450
|
+
// NOTE: This cache is NOT used automatically by the framework.
|
|
451
|
+
// It's available for developers who explicitly want to cache specific responses.
|
|
452
|
+
// Developers must carefully manage cache keys to avoid stale/incorrect data.
|
|
453
|
+
|
|
454
|
+
getCachedResponse(
|
|
455
|
+
key: string
|
|
456
|
+
): { buffer: Buffer; headers: Record<string, string>; statusCode: number } | undefined {
|
|
457
|
+
return this.responseCache.get(key);
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
cacheResponse(
|
|
461
|
+
key: string,
|
|
462
|
+
buffer: Buffer,
|
|
463
|
+
headers: Record<string, string>,
|
|
464
|
+
statusCode: number = 200
|
|
465
|
+
): void {
|
|
466
|
+
// Cache responses up to 4KB
|
|
467
|
+
// WARNING: Only use this for static responses that never change
|
|
468
|
+
if (buffer.length < 4096) {
|
|
469
|
+
this.responseCache.set(key, {
|
|
470
|
+
buffer: Buffer.from(buffer), // Clone to avoid mutations
|
|
471
|
+
headers: { ...headers }, // Clone headers
|
|
472
|
+
statusCode,
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
hasCachedResponse(key: string): boolean {
|
|
478
|
+
return this.responseCache.has(key);
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
clearResponseCache(): void {
|
|
482
|
+
this.responseCache.clear();
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
/**
|
|
486
|
+
* Adaptively adjust pool sizes based on usage patterns
|
|
487
|
+
*/
|
|
488
|
+
private adjustPoolSizes(): void {
|
|
489
|
+
this.performanceStats.lastAdjustment = Date.now();
|
|
490
|
+
|
|
491
|
+
// Adjust parameter pool size
|
|
492
|
+
const paramHistory = this.poolUsageHistory.get('params') || [];
|
|
493
|
+
if (paramHistory.length >= 10) {
|
|
494
|
+
const avgUsage = paramHistory.reduce((sum, size) => sum + size, 0) / paramHistory.length;
|
|
495
|
+
const targetSize = Math.min(Math.max(Math.round(avgUsage * 1.2), 50), 200); // 20% buffer, 50-200 range
|
|
496
|
+
|
|
497
|
+
if (Math.abs(this.paramPool.stats.maxSize - targetSize) > 10) {
|
|
498
|
+
logger.debug(
|
|
499
|
+
`Adjusting param pool size from ${this.paramPool.stats.maxSize} to ${targetSize}`,
|
|
500
|
+
'PoolManager'
|
|
501
|
+
);
|
|
502
|
+
// Note: We can't directly change maxSize, but we can log the recommendation
|
|
503
|
+
// In a real implementation, we'd recreate the pool with the new size
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
// Adjust buffer pool sizes
|
|
508
|
+
this.bufferSizes.forEach(size => {
|
|
509
|
+
const poolKey = `buffer_${size}`;
|
|
510
|
+
const history = this.poolUsageHistory.get(poolKey) || [];
|
|
511
|
+
if (history.length >= 10) {
|
|
512
|
+
const avgUsage = history.reduce((sum, size) => sum + size, 0) / history.length;
|
|
513
|
+
const pool = this.bufferPools.get(size);
|
|
514
|
+
if (pool) {
|
|
515
|
+
const currentMax = pool.stats.maxSize;
|
|
516
|
+
const targetMax = this.getOptimalPoolSize(size);
|
|
517
|
+
|
|
518
|
+
if (Math.abs(currentMax - targetMax) > 5) {
|
|
519
|
+
logger.debug(
|
|
520
|
+
`Buffer pool ${size}B: usage ${avgUsage.toFixed(1)}/${currentMax}, target ${targetMax}`,
|
|
521
|
+
'PoolManager'
|
|
522
|
+
);
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
});
|
|
527
|
+
|
|
528
|
+
logger.debug('Pool size adjustment cycle completed', 'PoolManager');
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
/**
|
|
532
|
+
* Enable or disable adaptive pool sizing
|
|
533
|
+
*/
|
|
534
|
+
setAdaptiveMode(enabled: boolean): void {
|
|
535
|
+
this.adaptiveMode = enabled;
|
|
536
|
+
logger.debug(`Adaptive pool sizing ${enabled ? 'enabled' : 'disabled'}`, 'PoolManager');
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
// Utility Methods
|
|
540
|
+
|
|
541
|
+
/**
|
|
542
|
+
* Clear all pools and caches
|
|
543
|
+
*/
|
|
544
|
+
clearAll(): void {
|
|
545
|
+
this.paramPool.clear();
|
|
546
|
+
this.headerPool.clear();
|
|
547
|
+
this.queryPool.clear();
|
|
548
|
+
this.bufferPools.forEach(pool => pool.clear());
|
|
549
|
+
this.routeCache.clear();
|
|
550
|
+
this.responseCache.clear();
|
|
551
|
+
logger.debug('All pools and caches cleared', 'Maintenance');
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
/**
|
|
555
|
+
* Force garbage collection on pooled objects
|
|
556
|
+
*/
|
|
557
|
+
forceCleanup(): void {
|
|
558
|
+
this.clearAll();
|
|
559
|
+
|
|
560
|
+
// Force GC if available
|
|
561
|
+
if (globalThis?.gc) {
|
|
562
|
+
globalThis.gc();
|
|
563
|
+
logger.debug('Forced garbage collection', 'Maintenance');
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
/**
|
|
568
|
+
* Get comprehensive performance statistics
|
|
569
|
+
*/
|
|
570
|
+
getStats() {
|
|
571
|
+
const bufferPoolStats: Record<string, any> = {};
|
|
572
|
+
this.bufferPools.forEach((pool, size) => {
|
|
573
|
+
bufferPoolStats[`${size}B`] = pool.stats;
|
|
574
|
+
});
|
|
575
|
+
|
|
576
|
+
return {
|
|
577
|
+
paramPool: this.paramPool.stats,
|
|
578
|
+
headerPool: this.headerPool.stats,
|
|
579
|
+
queryPool: this.queryPool.stats,
|
|
580
|
+
bufferPools: bufferPoolStats,
|
|
581
|
+
routeCache: this.routeCache.stats,
|
|
582
|
+
responseCache: this.responseCache.stats,
|
|
583
|
+
totalMemory: {
|
|
584
|
+
params: this.paramPool.size * 50, // Rough estimate
|
|
585
|
+
headers: this.headerPool.size * 100, // Rough estimate
|
|
586
|
+
queries: this.queryPool.size * 50, // Rough estimate
|
|
587
|
+
buffers: Array.from(this.bufferPools.values()).reduce((sum, pool) => sum + pool.size, 0),
|
|
588
|
+
routes: this.routeCache.size * 200, // Rough estimate
|
|
589
|
+
responses: this.responseCache.size * 1000, // Rough estimate (increased for full responses)
|
|
590
|
+
},
|
|
591
|
+
};
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
/**
|
|
595
|
+
* Get performance summary for monitoring
|
|
596
|
+
*/
|
|
597
|
+
getPerformanceSummary() {
|
|
598
|
+
const stats = this.getStats();
|
|
599
|
+
const routeCacheTotal = stats.routeCache.hits + stats.routeCache.misses;
|
|
600
|
+
const responseCacheTotal = stats.responseCache.hits + stats.responseCache.misses;
|
|
601
|
+
|
|
602
|
+
return {
|
|
603
|
+
routeCacheHitRate: routeCacheTotal > 0 ? (stats.routeCache.hits / routeCacheTotal) * 100 : 0,
|
|
604
|
+
responseCacheHitRate:
|
|
605
|
+
responseCacheTotal > 0 ? (stats.responseCache.hits / responseCacheTotal) * 100 : 0,
|
|
606
|
+
paramPoolUtilization: stats.paramPool.utilization * 100,
|
|
607
|
+
totalMemoryKB: Object.values(stats.totalMemory).reduce((a, b) => a + b, 0) / 1024,
|
|
608
|
+
};
|
|
609
|
+
}
|
|
610
|
+
|
|
611
|
+
/**
|
|
612
|
+
* Log performance statistics
|
|
613
|
+
*/
|
|
614
|
+
logStats(): void {
|
|
615
|
+
const summary = this.getPerformanceSummary();
|
|
616
|
+
logger.info('ObjectPoolManager Performance', 'Stats', {
|
|
617
|
+
routeCacheHitRate: `${summary.routeCacheHitRate.toFixed(1)}%`,
|
|
618
|
+
responseCacheHitRate: `${summary.responseCacheHitRate.toFixed(1)}%`,
|
|
619
|
+
paramPoolUtilization: `${summary.paramPoolUtilization.toFixed(1)}%`,
|
|
620
|
+
totalMemory: `${summary.totalMemoryKB.toFixed(1)} KB`,
|
|
621
|
+
});
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
/**
|
|
626
|
+
* Convenience function to get the singleton instance
|
|
627
|
+
*/
|
|
628
|
+
export function getPoolManager(): ObjectPoolManager {
|
|
629
|
+
return ObjectPoolManager.getInstance();
|
|
630
|
+
}
|