musubi-sdd 5.6.2 → 5.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,509 @@
1
+ /**
2
+ * MUSUBI Performance Module
3
+ *
4
+ * Phase 6 P0: Performance Optimization
5
+ *
6
+ * Exports:
7
+ * - LazyLoader: On-demand module loading
8
+ * - CacheManager: In-memory caching with LRU eviction
9
+ * - RequestCoalescer: Duplicate request deduplication
10
+ * - BatchProcessor: Bulk operation support
11
+ */
12
+
13
+ const {
14
+ LazyLoader,
15
+ ModuleCategory,
16
+ MODULE_REGISTRY,
17
+ createLazyProxy,
18
+ defaultLoader,
19
+ } = require('./lazy-loader');
20
+
21
+ const {
22
+ CacheEntry,
23
+ LRUCache,
24
+ CacheManager,
25
+ RequestCoalescer,
26
+ CacheNamespace,
27
+ defaultCacheManager,
28
+ } = require('./cache-manager');
29
+
30
+ const {
31
+ MemoryPressure,
32
+ ObjectPool,
33
+ WeakCache,
34
+ MemoryMonitor,
35
+ StreamingBuffer,
36
+ MemoryOptimizer,
37
+ defaultOptimizer,
38
+ } = require('./memory-optimizer');
39
+
40
+ const {
41
+ InitStage,
42
+ InitState,
43
+ InitModule,
44
+ StartupOptimizer,
45
+ WarmupCache,
46
+ InitProfiler,
47
+ defaultStartupOptimizer,
48
+ defaultWarmupCache,
49
+ defaultInitProfiler,
50
+ } = require('./startup-optimizer');
51
+
52
+ /**
53
+ * BatchProcessor for bulk operations
54
+ */
55
+ class BatchProcessor {
56
+ constructor(options = {}) {
57
+ this.options = {
58
+ batchSize: options.batchSize || 100,
59
+ maxWaitTime: options.maxWaitTime || 50, // ms
60
+ concurrency: options.concurrency || 4,
61
+ ...options,
62
+ };
63
+
64
+ this.queue = [];
65
+ this.timer = null;
66
+ this.processing = false;
67
+ }
68
+
69
+ /**
70
+ * Add an item to the batch
71
+ * @param {*} item - Item to process
72
+ * @param {Function} processor - Function to process item
73
+ * @returns {Promise<*>} - Processing result
74
+ */
75
+ add(item, processor) {
76
+ return new Promise((resolve, reject) => {
77
+ this.queue.push({ item, processor, resolve, reject });
78
+
79
+ // Start timer if not already running
80
+ if (!this.timer && !this.processing) {
81
+ this.timer = setTimeout(() => {
82
+ this.flush();
83
+ }, this.options.maxWaitTime);
84
+ }
85
+
86
+ // Flush if batch is full
87
+ if (this.queue.length >= this.options.batchSize) {
88
+ this.flush();
89
+ }
90
+ });
91
+ }
92
+
93
+ /**
94
+ * Process all queued items
95
+ */
96
+ async flush() {
97
+ if (this.timer) {
98
+ clearTimeout(this.timer);
99
+ this.timer = null;
100
+ }
101
+
102
+ if (this.queue.length === 0 || this.processing) {
103
+ return;
104
+ }
105
+
106
+ this.processing = true;
107
+ const batch = this.queue.splice(0, this.options.batchSize);
108
+
109
+ try {
110
+ // Process in parallel with concurrency limit
111
+ const chunks = [];
112
+ for (let i = 0; i < batch.length; i += this.options.concurrency) {
113
+ chunks.push(batch.slice(i, i + this.options.concurrency));
114
+ }
115
+
116
+ for (const chunk of chunks) {
117
+ await Promise.all(
118
+ chunk.map(async ({ item, processor, resolve, reject }) => {
119
+ try {
120
+ const result = await processor(item);
121
+ resolve(result);
122
+ } catch (error) {
123
+ reject(error);
124
+ }
125
+ })
126
+ );
127
+ }
128
+ } finally {
129
+ this.processing = false;
130
+
131
+ // Process remaining items
132
+ if (this.queue.length > 0) {
133
+ this.flush();
134
+ }
135
+ }
136
+ }
137
+
138
+ /**
139
+ * Get queue length
140
+ */
141
+ get queueLength() {
142
+ return this.queue.length;
143
+ }
144
+
145
+ /**
146
+ * Clear the queue
147
+ */
148
+ clear() {
149
+ if (this.timer) {
150
+ clearTimeout(this.timer);
151
+ this.timer = null;
152
+ }
153
+
154
+ // Reject all pending items
155
+ for (const { reject } of this.queue) {
156
+ reject(new Error('Batch processor cleared'));
157
+ }
158
+ this.queue = [];
159
+ }
160
+ }
161
+
162
+ /**
163
+ * Connection pool for reusing connections
164
+ */
165
+ class ConnectionPool {
166
+ constructor(factory, options = {}) {
167
+ this.factory = factory;
168
+ this.options = {
169
+ minSize: options.minSize || 1,
170
+ maxSize: options.maxSize || 10,
171
+ idleTimeout: options.idleTimeout || 30000, // 30 seconds
172
+ acquireTimeout: options.acquireTimeout || 5000, // 5 seconds
173
+ ...options,
174
+ };
175
+
176
+ this.pool = [];
177
+ this.waiting = [];
178
+ this.activeCount = 0;
179
+ this.destroyed = false;
180
+ }
181
+
182
+ /**
183
+ * Acquire a connection from the pool
184
+ * @returns {Promise<*>} - Connection
185
+ */
186
+ async acquire() {
187
+ if (this.destroyed) {
188
+ throw new Error('Pool has been destroyed');
189
+ }
190
+
191
+ // Try to get an idle connection
192
+ const connection = this.pool.shift();
193
+ if (connection) {
194
+ this.activeCount++;
195
+ return connection.resource;
196
+ }
197
+
198
+ // Create a new connection if under max
199
+ if (this.activeCount < this.options.maxSize) {
200
+ this.activeCount++;
201
+ try {
202
+ return await this.factory();
203
+ } catch (error) {
204
+ this.activeCount--;
205
+ throw error;
206
+ }
207
+ }
208
+
209
+ // Wait for a connection to be released
210
+ return new Promise((resolve, reject) => {
211
+ const timeout = setTimeout(() => {
212
+ const index = this.waiting.indexOf(waiter);
213
+ if (index !== -1) {
214
+ this.waiting.splice(index, 1);
215
+ }
216
+ reject(new Error('Connection acquire timeout'));
217
+ }, this.options.acquireTimeout);
218
+
219
+ const waiter = { resolve, reject, timeout };
220
+ this.waiting.push(waiter);
221
+ });
222
+ }
223
+
224
+ /**
225
+ * Release a connection back to the pool
226
+ * @param {*} resource - Connection to release
227
+ */
228
+ release(resource) {
229
+ // Check if someone is waiting
230
+ if (this.waiting.length > 0) {
231
+ const waiter = this.waiting.shift();
232
+ clearTimeout(waiter.timeout);
233
+ waiter.resolve(resource);
234
+ return;
235
+ }
236
+
237
+ // Return to pool if under max
238
+ if (this.pool.length < this.options.maxSize) {
239
+ this.pool.push({
240
+ resource,
241
+ idleSince: Date.now(),
242
+ });
243
+ }
244
+
245
+ this.activeCount--;
246
+
247
+ // Clean up idle connections
248
+ this._cleanupIdle();
249
+ }
250
+
251
+ /**
252
+ * Clean up idle connections
253
+ * @private
254
+ */
255
+ _cleanupIdle() {
256
+ const now = Date.now();
257
+ while (this.pool.length > this.options.minSize) {
258
+ const oldest = this.pool[0];
259
+ if (now - oldest.idleSince > this.options.idleTimeout) {
260
+ this.pool.shift();
261
+ } else {
262
+ break;
263
+ }
264
+ }
265
+ }
266
+
267
+ /**
268
+ * Execute a function with a pooled connection
269
+ * @param {Function} fn - Function to execute
270
+ * @returns {Promise<*>} - Result
271
+ */
272
+ async withConnection(fn) {
273
+ const connection = await this.acquire();
274
+ try {
275
+ return await fn(connection);
276
+ } finally {
277
+ this.release(connection);
278
+ }
279
+ }
280
+
281
+ /**
282
+ * Get pool statistics
283
+ */
284
+ getStats() {
285
+ return {
286
+ poolSize: this.pool.length,
287
+ activeCount: this.activeCount,
288
+ waitingCount: this.waiting.length,
289
+ maxSize: this.options.maxSize,
290
+ };
291
+ }
292
+
293
+ /**
294
+ * Destroy the pool
295
+ */
296
+ destroy() {
297
+ this.destroyed = true;
298
+
299
+ // Reject all waiting
300
+ for (const { reject, timeout } of this.waiting) {
301
+ clearTimeout(timeout);
302
+ reject(new Error('Pool destroyed'));
303
+ }
304
+ this.waiting = [];
305
+
306
+ // Clear pool
307
+ this.pool = [];
308
+ this.activeCount = 0;
309
+ }
310
+ }
311
+
312
+ /**
313
+ * Performance monitor for tracking metrics
314
+ */
315
+ class PerformanceMonitor {
316
+ constructor() {
317
+ this.metrics = new Map();
318
+ this.timers = new Map();
319
+ }
320
+
321
+ /**
322
+ * Start a timer
323
+ * @param {string} name - Timer name
324
+ */
325
+ startTimer(name) {
326
+ this.timers.set(name, process.hrtime.bigint());
327
+ }
328
+
329
+ /**
330
+ * End a timer and record the duration
331
+ * @param {string} name - Timer name
332
+ * @returns {number} - Duration in ms
333
+ */
334
+ endTimer(name) {
335
+ const start = this.timers.get(name);
336
+ if (!start) return 0;
337
+
338
+ const end = process.hrtime.bigint();
339
+ const durationNs = Number(end - start);
340
+ const durationMs = durationNs / 1_000_000;
341
+
342
+ this.timers.delete(name);
343
+ this.record(name, durationMs);
344
+
345
+ return durationMs;
346
+ }
347
+
348
+ /**
349
+ * Record a metric value
350
+ * @param {string} name - Metric name
351
+ * @param {number} value - Metric value
352
+ */
353
+ record(name, value) {
354
+ if (!this.metrics.has(name)) {
355
+ this.metrics.set(name, {
356
+ count: 0,
357
+ sum: 0,
358
+ min: Infinity,
359
+ max: -Infinity,
360
+ values: [],
361
+ });
362
+ }
363
+
364
+ const metric = this.metrics.get(name);
365
+ metric.count++;
366
+ metric.sum += value;
367
+ metric.min = Math.min(metric.min, value);
368
+ metric.max = Math.max(metric.max, value);
369
+
370
+ // Keep last 100 values for percentile calculation
371
+ metric.values.push(value);
372
+ if (metric.values.length > 100) {
373
+ metric.values.shift();
374
+ }
375
+ }
376
+
377
+ /**
378
+ * Get metric statistics
379
+ * @param {string} name - Metric name
380
+ * @returns {Object} - Statistics
381
+ */
382
+ getMetric(name) {
383
+ const metric = this.metrics.get(name);
384
+ if (!metric) return null;
385
+
386
+ const sorted = [...metric.values].sort((a, b) => a - b);
387
+ const p50Index = Math.floor(sorted.length * 0.5);
388
+ const p95Index = Math.floor(sorted.length * 0.95);
389
+ const p99Index = Math.floor(sorted.length * 0.99);
390
+
391
+ return {
392
+ count: metric.count,
393
+ avg: metric.sum / metric.count,
394
+ min: metric.min,
395
+ max: metric.max,
396
+ p50: sorted[p50Index] || 0,
397
+ p95: sorted[p95Index] || sorted[sorted.length - 1] || 0,
398
+ p99: sorted[p99Index] || sorted[sorted.length - 1] || 0,
399
+ };
400
+ }
401
+
402
+ /**
403
+ * Get all metrics
404
+ * @returns {Object} - All metrics
405
+ */
406
+ getAllMetrics() {
407
+ const result = {};
408
+ for (const name of this.metrics.keys()) {
409
+ result[name] = this.getMetric(name);
410
+ }
411
+ return result;
412
+ }
413
+
414
+ /**
415
+ * Clear all metrics
416
+ */
417
+ clear() {
418
+ this.metrics.clear();
419
+ this.timers.clear();
420
+ }
421
+
422
+ /**
423
+ * Create a wrapped function that tracks timing
424
+ * @param {string} name - Metric name
425
+ * @param {Function} fn - Function to wrap
426
+ * @returns {Function} - Wrapped function
427
+ */
428
+ wrap(name, fn) {
429
+ const monitor = this;
430
+ return async function (...args) {
431
+ monitor.startTimer(name);
432
+ try {
433
+ return await fn.apply(this, args);
434
+ } finally {
435
+ monitor.endTimer(name);
436
+ }
437
+ };
438
+ }
439
+
440
+ /**
441
+ * Create a wrapped sync function that tracks timing
442
+ * @param {string} name - Metric name
443
+ * @param {Function} fn - Function to wrap
444
+ * @returns {Function} - Wrapped function
445
+ */
446
+ wrapSync(name, fn) {
447
+ const monitor = this;
448
+ return function (...args) {
449
+ monitor.startTimer(name);
450
+ try {
451
+ return fn.apply(this, args);
452
+ } finally {
453
+ monitor.endTimer(name);
454
+ }
455
+ };
456
+ }
457
+ }
458
+
459
+ // Singleton instances
460
+ const defaultBatchProcessor = new BatchProcessor();
461
+ const defaultPerformanceMonitor = new PerformanceMonitor();
462
+
463
+ module.exports = {
464
+ // Lazy Loading
465
+ LazyLoader,
466
+ ModuleCategory,
467
+ MODULE_REGISTRY,
468
+ createLazyProxy,
469
+ defaultLoader,
470
+
471
+ // Caching
472
+ CacheEntry,
473
+ LRUCache,
474
+ CacheManager,
475
+ RequestCoalescer,
476
+ CacheNamespace,
477
+ defaultCacheManager,
478
+
479
+ // Batch Processing
480
+ BatchProcessor,
481
+ defaultBatchProcessor,
482
+
483
+ // Connection Pooling
484
+ ConnectionPool,
485
+
486
+ // Performance Monitoring
487
+ PerformanceMonitor,
488
+ defaultPerformanceMonitor,
489
+
490
+ // Memory Optimization (Phase 6 P1)
491
+ MemoryPressure,
492
+ ObjectPool,
493
+ WeakCache,
494
+ MemoryMonitor,
495
+ StreamingBuffer,
496
+ MemoryOptimizer,
497
+ defaultOptimizer,
498
+
499
+ // Startup Optimization (Phase 6 P2)
500
+ InitStage,
501
+ InitState,
502
+ InitModule,
503
+ StartupOptimizer,
504
+ WarmupCache,
505
+ InitProfiler,
506
+ defaultStartupOptimizer,
507
+ defaultWarmupCache,
508
+ defaultInitProfiler,
509
+ };