navis.js 3.0.2 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +51 -0
- package/examples/lambda-optimized.js +103 -0
- package/examples/lambda.js +30 -29
- package/package.json +1 -1
- package/src/core/lambda-handler.js +130 -0
- package/src/index.js +14 -0
- package/src/middleware/cold-start-tracker.js +56 -0
- package/src/utils/lazy-init.js +100 -0
- package/src/utils/service-client-pool.js +131 -0
package/README.md
CHANGED
|
@@ -112,6 +112,14 @@ navis metrics
|
|
|
112
112
|
- ✅ **Distributed tracing** - Trace and span management
|
|
113
113
|
- ✅ **Enhanced CLI** - Test and metrics commands
|
|
114
114
|
|
|
115
|
+
### v3.1 (Current)
|
|
116
|
+
|
|
117
|
+
- ✅ **Lambda cold start optimization** - Connection pooling, lazy initialization
|
|
118
|
+
- ✅ **ServiceClientPool** - Reuse HTTP connections across invocations
|
|
119
|
+
- ✅ **LazyInit utility** - Defer heavy operations until needed
|
|
120
|
+
- ✅ **LambdaHandler** - Optimized handler with warm-up support
|
|
121
|
+
- ✅ **Cold start tracking** - Monitor and log cold start metrics
|
|
122
|
+
|
|
115
123
|
## API Reference
|
|
116
124
|
|
|
117
125
|
### NavisApp
|
|
@@ -248,12 +256,54 @@ await nats.connect();
|
|
|
248
256
|
await nats.publish('user.created', { userId: 123 });
|
|
249
257
|
```
|
|
250
258
|
|
|
259
|
+
### Lambda Optimization (v3.1)
|
|
260
|
+
|
|
261
|
+
```javascript
|
|
262
|
+
const {
|
|
263
|
+
NavisApp,
|
|
264
|
+
getPool,
|
|
265
|
+
LambdaHandler,
|
|
266
|
+
coldStartTracker,
|
|
267
|
+
LazyInit,
|
|
268
|
+
} = require('navis.js');
|
|
269
|
+
|
|
270
|
+
// Initialize app OUTSIDE handler (reused across invocations)
|
|
271
|
+
const app = new NavisApp();
|
|
272
|
+
app.use(coldStartTracker);
|
|
273
|
+
|
|
274
|
+
// Connection pooling - reuse HTTP connections
|
|
275
|
+
const client = getPool().get('http://api.example.com', {
|
|
276
|
+
timeout: 3000,
|
|
277
|
+
maxRetries: 2,
|
|
278
|
+
});
|
|
279
|
+
|
|
280
|
+
// Lazy initialization - defer heavy operations
|
|
281
|
+
const dbConnection = new LazyInit();
|
|
282
|
+
app.get('/users', async (req, res) => {
|
|
283
|
+
const db = await dbConnection.init(async () => {
|
|
284
|
+
return await connectToDatabase(); // Only runs once
|
|
285
|
+
});
|
|
286
|
+
res.body = await db.query('SELECT * FROM users');
|
|
287
|
+
});
|
|
288
|
+
|
|
289
|
+
// Optimized Lambda handler
|
|
290
|
+
const handler = new LambdaHandler(app, {
|
|
291
|
+
enableMetrics: true,
|
|
292
|
+
warmupPath: '/warmup',
|
|
293
|
+
});
|
|
294
|
+
|
|
295
|
+
exports.handler = async (event, context) => {
|
|
296
|
+
return await handler.handle(event, context);
|
|
297
|
+
};
|
|
298
|
+
```
|
|
299
|
+
|
|
251
300
|
## Examples
|
|
252
301
|
|
|
253
302
|
See the `examples/` directory:
|
|
254
303
|
|
|
255
304
|
- `server.js` - Node.js HTTP server example
|
|
256
305
|
- `lambda.js` - AWS Lambda handler example
|
|
306
|
+
- `lambda-optimized.js` - Optimized Lambda handler with cold start optimizations (v3.1)
|
|
257
307
|
- `service-client-demo.js` - ServiceClient usage example
|
|
258
308
|
- `v2-features-demo.js` - v2 features demonstration (retry, circuit breaker, etc.)
|
|
259
309
|
- `v3-features-demo.js` - v3 features demonstration (messaging, observability, etc.)
|
|
@@ -273,6 +323,7 @@ Advanced features: async messaging (SQS/Kafka/NATS), observability, enhanced CLI
|
|
|
273
323
|
|
|
274
324
|
- [V2 Features Guide](./V2_FEATURES.md) - Complete v2 features documentation
|
|
275
325
|
- [V3 Features Guide](./V3_FEATURES.md) - Complete v3 features documentation
|
|
326
|
+
- [Lambda Optimization Guide](./LAMBDA_OPTIMIZATION.md) - Lambda cold start optimization guide (v3.1)
|
|
276
327
|
- [Verification Guide v2](./VERIFY_V2.md) - How to verify v2 features
|
|
277
328
|
- [Verification Guide v3](./VERIFY_V3.md) - How to verify v3 features
|
|
278
329
|
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Optimized Lambda Handler Example
|
|
3
|
+
* v3.1: Best practices for reducing cold start time
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { NavisApp, response, getPool } = require('../src/index');
|
|
7
|
+
const LambdaHandler = require('../src/core/lambda-handler');
|
|
8
|
+
const { coldStartTracker } = require('../src/middleware/cold-start-tracker');
|
|
9
|
+
|
|
10
|
+
// ============================================
|
|
11
|
+
// CRITICAL: Initialize app OUTSIDE handler
|
|
12
|
+
// This ensures the app is reused across invocations
|
|
13
|
+
// ============================================
|
|
14
|
+
const app = new NavisApp();
|
|
15
|
+
|
|
16
|
+
// Add cold start tracking middleware
|
|
17
|
+
app.use(coldStartTracker);
|
|
18
|
+
|
|
19
|
+
// ============================================
|
|
20
|
+
// Register routes at MODULE LEVEL (not in handler)
|
|
21
|
+
// This runs once per container, not per invocation
|
|
22
|
+
// ============================================
|
|
23
|
+
app.get('/', (req, res) => {
|
|
24
|
+
res.statusCode = 200;
|
|
25
|
+
res.body = {
|
|
26
|
+
message: 'Welcome to Navis.js Lambda (Optimized)!',
|
|
27
|
+
optimized: true,
|
|
28
|
+
};
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
app.get('/health', (req, res) => {
|
|
32
|
+
res.statusCode = 200;
|
|
33
|
+
res.body = { status: 'ok' };
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
app.get('/warmup', (req, res) => {
|
|
37
|
+
res.statusCode = 200;
|
|
38
|
+
res.body = { status: 'warmed' };
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// Example: Using ServiceClient with connection pooling
|
|
42
|
+
app.get('/external', async (req, res) => {
|
|
43
|
+
try {
|
|
44
|
+
// Get client from pool (reuses connections)
|
|
45
|
+
const client = getPool().get('http://api.example.com', {
|
|
46
|
+
timeout: 3000,
|
|
47
|
+
maxRetries: 2,
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
const result = await client.get('/data');
|
|
51
|
+
res.statusCode = 200;
|
|
52
|
+
res.body = { data: result.data };
|
|
53
|
+
} catch (error) {
|
|
54
|
+
res.statusCode = 500;
|
|
55
|
+
res.body = { error: error.message };
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
// ============================================
|
|
60
|
+
// OPTIMIZED HANDLER
|
|
61
|
+
// ============================================
|
|
62
|
+
|
|
63
|
+
// Create handler instance (reused across invocations)
|
|
64
|
+
const handler = new LambdaHandler(app, {
|
|
65
|
+
enableMetrics: true,
|
|
66
|
+
warmupPath: '/warmup',
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
// Cache the handler function (V8 optimization)
|
|
70
|
+
let cachedHandler = null;
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Lambda handler - optimized for cold starts
|
|
74
|
+
* @param {Object} event - Lambda event
|
|
75
|
+
* @param {Object} context - Lambda context
|
|
76
|
+
*/
|
|
77
|
+
exports.handler = async (event, context) => {
|
|
78
|
+
// Reuse handler function (V8 JIT optimization)
|
|
79
|
+
if (!cachedHandler) {
|
|
80
|
+
cachedHandler = async (event, context) => {
|
|
81
|
+
return await handler.handle(event, context);
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return await cachedHandler(event, context);
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Optional: Pre-warm function
|
|
90
|
+
* Can be called during container initialization
|
|
91
|
+
*/
|
|
92
|
+
exports.preWarm = async () => {
|
|
93
|
+
// Pre-initialize any heavy operations
|
|
94
|
+
// This runs once per container
|
|
95
|
+
console.log('Pre-warming Lambda container...');
|
|
96
|
+
|
|
97
|
+
// Example: Pre-initialize service clients
|
|
98
|
+
const pool = getPool();
|
|
99
|
+
pool.get('http://api.example.com', { timeout: 3000 });
|
|
100
|
+
|
|
101
|
+
console.log('Pre-warming complete');
|
|
102
|
+
};
|
|
103
|
+
|
package/examples/lambda.js
CHANGED
|
@@ -1,30 +1,31 @@
|
|
|
1
|
-
const { NavisApp } = require('../src/index');
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
res.
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
res.
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
res.
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
1
|
+
const { NavisApp } = require('../src/index');
|
|
2
|
+
|
|
3
|
+
// Initialize app at module level (reused across invocations)
|
|
4
|
+
const app = new NavisApp();
|
|
5
|
+
|
|
6
|
+
// Middleware example
|
|
7
|
+
app.use((req, res, next) => {
|
|
8
|
+
console.log(`Lambda: ${req.method} ${req.path}`);
|
|
9
|
+
next();
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
// Routes registered at module level (not in handler)
|
|
13
|
+
app.get('/', (req, res) => {
|
|
14
|
+
res.statusCode = 200;
|
|
15
|
+
res.body = { message: 'Welcome to Navis.js Lambda!' };
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
app.get('/health', (req, res) => {
|
|
19
|
+
res.statusCode = 200;
|
|
20
|
+
res.body = { status: 'ok' };
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
app.post('/echo', (req, res) => {
|
|
24
|
+
res.statusCode = 200;
|
|
25
|
+
res.body = { echo: req.body };
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
// Lambda handler - optimized for reuse
|
|
29
|
+
exports.handler = async (event) => {
|
|
30
|
+
return await app.handleLambda(event);
|
|
30
31
|
};
|
package/package.json
CHANGED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Optimized Lambda Handler
|
|
3
|
+
* v3.1: Enhanced Lambda handler with cold start optimizations
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
class LambdaHandler {
|
|
7
|
+
constructor(app, options = {}) {
|
|
8
|
+
this.app = app;
|
|
9
|
+
this.isWarm = false;
|
|
10
|
+
this.initTime = Date.now();
|
|
11
|
+
this.invocationCount = 0;
|
|
12
|
+
this.coldStartCount = 0;
|
|
13
|
+
this.enableMetrics = options.enableMetrics !== false;
|
|
14
|
+
this.warmupPath = options.warmupPath || '/warmup';
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Handle Lambda invocation
|
|
19
|
+
* @param {Object} event - Lambda event
|
|
20
|
+
* @param {Object} context - Lambda context
|
|
21
|
+
* @returns {Promise<Object>} - Lambda response
|
|
22
|
+
*/
|
|
23
|
+
async handle(event, context) {
|
|
24
|
+
this.invocationCount++;
|
|
25
|
+
|
|
26
|
+
// Detect warm-up events
|
|
27
|
+
if (this.isWarmupEvent(event)) {
|
|
28
|
+
return this.handleWarmup();
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Track cold start
|
|
32
|
+
const isColdStart = !this.isWarm;
|
|
33
|
+
if (isColdStart) {
|
|
34
|
+
this.isWarm = true;
|
|
35
|
+
this.coldStartCount++;
|
|
36
|
+
|
|
37
|
+
if (this.enableMetrics) {
|
|
38
|
+
const coldStartDuration = Date.now() - this.initTime;
|
|
39
|
+
console.log(JSON.stringify({
|
|
40
|
+
type: 'cold-start',
|
|
41
|
+
duration: coldStartDuration,
|
|
42
|
+
memoryLimit: context.memoryLimitInMB,
|
|
43
|
+
requestId: context.requestId,
|
|
44
|
+
}));
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Add cold start headers to response
|
|
49
|
+
const response = await this.app.handleLambda(event);
|
|
50
|
+
|
|
51
|
+
if (isColdStart && response.headers) {
|
|
52
|
+
response.headers['X-Cold-Start'] = 'true';
|
|
53
|
+
response.headers['X-Init-Time'] = (Date.now() - this.initTime).toString();
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return response;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Check if event is a warm-up event
|
|
61
|
+
* @param {Object} event - Lambda event
|
|
62
|
+
* @returns {boolean} - True if warm-up event
|
|
63
|
+
*/
|
|
64
|
+
isWarmupEvent(event) {
|
|
65
|
+
// Check various warm-up event formats
|
|
66
|
+
if (event.source === 'serverless-plugin-warmup') {
|
|
67
|
+
return true;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (event.warmup === true || event['serverless-plugin-warmup']) {
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Check if it's a warm-up HTTP request
|
|
75
|
+
if (event.httpMethod === 'GET' &&
|
|
76
|
+
(event.path === this.warmupPath || event.rawPath === this.warmupPath)) {
|
|
77
|
+
return true;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return false;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Handle warm-up event
|
|
85
|
+
* @returns {Object} - Warm-up response
|
|
86
|
+
*/
|
|
87
|
+
handleWarmup() {
|
|
88
|
+
// Mark as warm
|
|
89
|
+
this.isWarm = true;
|
|
90
|
+
|
|
91
|
+
return {
|
|
92
|
+
statusCode: 200,
|
|
93
|
+
headers: {
|
|
94
|
+
'Content-Type': 'application/json',
|
|
95
|
+
},
|
|
96
|
+
body: JSON.stringify({
|
|
97
|
+
status: 'warmed',
|
|
98
|
+
invocationCount: this.invocationCount,
|
|
99
|
+
coldStartCount: this.coldStartCount,
|
|
100
|
+
}),
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Get handler statistics
|
|
106
|
+
* @returns {Object} - Handler statistics
|
|
107
|
+
*/
|
|
108
|
+
getStats() {
|
|
109
|
+
return {
|
|
110
|
+
isWarm: this.isWarm,
|
|
111
|
+
invocationCount: this.invocationCount,
|
|
112
|
+
coldStartCount: this.coldStartCount,
|
|
113
|
+
initTime: this.initTime,
|
|
114
|
+
uptime: Date.now() - this.initTime,
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* Reset handler state (useful for testing)
|
|
120
|
+
*/
|
|
121
|
+
reset() {
|
|
122
|
+
this.isWarm = false;
|
|
123
|
+
this.invocationCount = 0;
|
|
124
|
+
this.coldStartCount = 0;
|
|
125
|
+
this.initTime = Date.now();
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
module.exports = LambdaHandler;
|
|
130
|
+
|
package/src/index.js
CHANGED
|
@@ -16,6 +16,12 @@ const Logger = require('./observability/logger');
|
|
|
16
16
|
const Metrics = require('./observability/metrics');
|
|
17
17
|
const Tracer = require('./observability/tracer');
|
|
18
18
|
|
|
19
|
+
// v3.1: Lambda Optimizations
|
|
20
|
+
const { getPool, ServiceClientPool } = require('./utils/service-client-pool');
|
|
21
|
+
const { LazyInit, createLazyInit } = require('./utils/lazy-init');
|
|
22
|
+
const LambdaHandler = require('./core/lambda-handler');
|
|
23
|
+
const { coldStartTracker } = require('./middleware/cold-start-tracker');
|
|
24
|
+
|
|
19
25
|
module.exports = {
|
|
20
26
|
// Core
|
|
21
27
|
NavisApp,
|
|
@@ -38,6 +44,14 @@ module.exports = {
|
|
|
38
44
|
Metrics,
|
|
39
45
|
Tracer,
|
|
40
46
|
|
|
47
|
+
// v3.1: Lambda Optimizations
|
|
48
|
+
ServiceClientPool,
|
|
49
|
+
getPool,
|
|
50
|
+
LazyInit,
|
|
51
|
+
createLazyInit,
|
|
52
|
+
LambdaHandler,
|
|
53
|
+
coldStartTracker,
|
|
54
|
+
|
|
41
55
|
// Utilities
|
|
42
56
|
response: {
|
|
43
57
|
success,
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Cold Start Tracker Middleware
|
|
3
|
+
* v3.1: Track and log cold start metrics
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
let isFirstInvocation = true;
|
|
7
|
+
let initTime = Date.now();
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Cold start tracking middleware
|
|
11
|
+
* Tracks cold starts and adds headers to response
|
|
12
|
+
* @param {Object} req - Request object
|
|
13
|
+
* @param {Object} res - Response object
|
|
14
|
+
* @param {Function} next - Next middleware
|
|
15
|
+
*/
|
|
16
|
+
function coldStartTracker(req, res, next) {
|
|
17
|
+
if (isFirstInvocation) {
|
|
18
|
+
const coldStartDuration = Date.now() - initTime;
|
|
19
|
+
|
|
20
|
+
// Add cold start info to response headers
|
|
21
|
+
if (res.headers) {
|
|
22
|
+
res.headers['X-Cold-Start'] = 'true';
|
|
23
|
+
res.headers['X-Cold-Start-Duration'] = coldStartDuration.toString();
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Log cold start (structured logging)
|
|
27
|
+
console.log(JSON.stringify({
|
|
28
|
+
type: 'cold-start',
|
|
29
|
+
duration: coldStartDuration,
|
|
30
|
+
path: req.path || req.url,
|
|
31
|
+
method: req.method,
|
|
32
|
+
}));
|
|
33
|
+
|
|
34
|
+
isFirstInvocation = false;
|
|
35
|
+
} else {
|
|
36
|
+
if (res.headers) {
|
|
37
|
+
res.headers['X-Cold-Start'] = 'false';
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
next();
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Reset cold start tracker (useful for testing)
|
|
46
|
+
*/
|
|
47
|
+
function resetColdStartTracker() {
|
|
48
|
+
isFirstInvocation = true;
|
|
49
|
+
initTime = Date.now();
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
module.exports = {
|
|
53
|
+
coldStartTracker,
|
|
54
|
+
resetColdStartTracker,
|
|
55
|
+
};
|
|
56
|
+
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lazy Initialization Utility
|
|
3
|
+
* v3.1: Defer heavy initialization until needed (reduces cold start time)
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
class LazyInit {
|
|
7
|
+
constructor(options = {}) {
|
|
8
|
+
this.initialized = false;
|
|
9
|
+
this.initPromise = null;
|
|
10
|
+
this.initFn = null;
|
|
11
|
+
this.autoInit = options.autoInit !== false; // Auto-init on first access
|
|
12
|
+
this.cacheResult = options.cacheResult !== false; // Cache initialization result
|
|
13
|
+
this.cachedResult = null;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Initialize with a function
|
|
18
|
+
* @param {Function} initFn - Initialization function (can be async)
|
|
19
|
+
* @returns {Promise} - Initialization promise
|
|
20
|
+
*/
|
|
21
|
+
async init(initFn) {
|
|
22
|
+
if (this.initialized && this.cacheResult) {
|
|
23
|
+
return this.cachedResult;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if (!this.initPromise) {
|
|
27
|
+
this.initFn = initFn;
|
|
28
|
+
this.initPromise = Promise.resolve(initFn()).then(result => {
|
|
29
|
+
this.initialized = true;
|
|
30
|
+
if (this.cacheResult) {
|
|
31
|
+
this.cachedResult = result;
|
|
32
|
+
}
|
|
33
|
+
return result;
|
|
34
|
+
}).catch(error => {
|
|
35
|
+
// Reset on error so it can be retried
|
|
36
|
+
this.initPromise = null;
|
|
37
|
+
throw error;
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return this.initPromise;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Check if initialized
|
|
46
|
+
* @returns {boolean} - True if initialized
|
|
47
|
+
*/
|
|
48
|
+
isInitialized() {
|
|
49
|
+
return this.initialized;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Get cached result (if available)
|
|
54
|
+
* @returns {*} - Cached initialization result
|
|
55
|
+
*/
|
|
56
|
+
getCached() {
|
|
57
|
+
return this.cachedResult;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Reset initialization state
|
|
62
|
+
*/
|
|
63
|
+
reset() {
|
|
64
|
+
this.initialized = false;
|
|
65
|
+
this.initPromise = null;
|
|
66
|
+
this.cachedResult = null;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Execute function with lazy initialization
|
|
71
|
+
* @param {Function} fn - Function to execute after initialization
|
|
72
|
+
* @returns {Promise} - Result of function execution
|
|
73
|
+
*/
|
|
74
|
+
async withInit(fn) {
|
|
75
|
+
if (!this.initialized && this.initFn) {
|
|
76
|
+
await this.init(this.initFn);
|
|
77
|
+
}
|
|
78
|
+
return await fn(this.cachedResult);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Create a lazy initializer
|
|
84
|
+
* @param {Function} initFn - Initialization function
|
|
85
|
+
* @param {Object} options - Options
|
|
86
|
+
* @returns {LazyInit} - LazyInit instance
|
|
87
|
+
*/
|
|
88
|
+
function createLazyInit(initFn, options = {}) {
|
|
89
|
+
const lazy = new LazyInit(options);
|
|
90
|
+
if (initFn) {
|
|
91
|
+
lazy.initFn = initFn;
|
|
92
|
+
}
|
|
93
|
+
return lazy;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
module.exports = {
|
|
97
|
+
LazyInit,
|
|
98
|
+
createLazyInit,
|
|
99
|
+
};
|
|
100
|
+
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* ServiceClient Pool - Connection reuse for Lambda
|
|
3
|
+
* v3.1: Connection pooling to reduce cold start overhead
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const ServiceClient = require('./service-client');
|
|
7
|
+
|
|
8
|
+
class ServiceClientPool {
|
|
9
|
+
constructor() {
|
|
10
|
+
this.clients = new Map();
|
|
11
|
+
this.maxSize = 10; // Maximum cached clients
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Get or create a ServiceClient instance
|
|
16
|
+
* Reuses existing clients to avoid re-initialization
|
|
17
|
+
* @param {string} baseUrl - Service base URL
|
|
18
|
+
* @param {Object} options - ServiceClient options
|
|
19
|
+
* @returns {ServiceClient} - Cached or new ServiceClient
|
|
20
|
+
*/
|
|
21
|
+
get(baseUrl, options = {}) {
|
|
22
|
+
// Create a unique key for this client configuration
|
|
23
|
+
const key = this._createKey(baseUrl, options);
|
|
24
|
+
|
|
25
|
+
if (!this.clients.has(key)) {
|
|
26
|
+
// Create new client if not in pool
|
|
27
|
+
const client = new ServiceClient(baseUrl, options);
|
|
28
|
+
this.clients.set(key, client);
|
|
29
|
+
|
|
30
|
+
// Limit pool size (remove oldest if needed)
|
|
31
|
+
if (this.clients.size > this.maxSize) {
|
|
32
|
+
const firstKey = this.clients.keys().next().value;
|
|
33
|
+
this.clients.delete(firstKey);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return this.clients.get(key);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Check if a client exists in pool
|
|
42
|
+
* @param {string} baseUrl - Service base URL
|
|
43
|
+
* @param {Object} options - ServiceClient options
|
|
44
|
+
* @returns {boolean} - True if client exists in pool
|
|
45
|
+
*/
|
|
46
|
+
has(baseUrl, options = {}) {
|
|
47
|
+
const key = this._createKey(baseUrl, options);
|
|
48
|
+
return this.clients.has(key);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Remove a client from pool
|
|
53
|
+
* @param {string} baseUrl - Service base URL
|
|
54
|
+
* @param {Object} options - ServiceClient options
|
|
55
|
+
*/
|
|
56
|
+
delete(baseUrl, options = {}) {
|
|
57
|
+
const key = this._createKey(baseUrl, options);
|
|
58
|
+
this.clients.delete(key);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Clear all clients from pool
|
|
63
|
+
*/
|
|
64
|
+
clear() {
|
|
65
|
+
this.clients.clear();
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Get pool size
|
|
70
|
+
* @returns {number} - Number of cached clients
|
|
71
|
+
*/
|
|
72
|
+
size() {
|
|
73
|
+
return this.clients.size;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Get all cached client URLs
|
|
78
|
+
* @returns {Array} - Array of base URLs
|
|
79
|
+
*/
|
|
80
|
+
getCachedUrls() {
|
|
81
|
+
return Array.from(this.clients.keys()).map(key => {
|
|
82
|
+
const [url] = key.split('::');
|
|
83
|
+
return url;
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Create unique key for client
|
|
89
|
+
* @private
|
|
90
|
+
*/
|
|
91
|
+
_createKey(baseUrl, options) {
|
|
92
|
+
// Normalize options to create consistent key
|
|
93
|
+
const normalizedOptions = {
|
|
94
|
+
timeout: options.timeout || 5000,
|
|
95
|
+
maxRetries: options.maxRetries,
|
|
96
|
+
retryBaseDelay: options.retryBaseDelay,
|
|
97
|
+
circuitBreaker: options.circuitBreaker ? JSON.stringify(options.circuitBreaker) : undefined,
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
return `${baseUrl}::${JSON.stringify(normalizedOptions)}`;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Singleton instance for Lambda (reused across invocations)
|
|
105
|
+
let poolInstance = null;
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Get singleton ServiceClientPool instance
|
|
109
|
+
* In Lambda, this instance persists across invocations
|
|
110
|
+
* @returns {ServiceClientPool} - Singleton pool instance
|
|
111
|
+
*/
|
|
112
|
+
function getPool() {
|
|
113
|
+
if (!poolInstance) {
|
|
114
|
+
poolInstance = new ServiceClientPool();
|
|
115
|
+
}
|
|
116
|
+
return poolInstance;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Reset pool (useful for testing)
|
|
121
|
+
*/
|
|
122
|
+
function resetPool() {
|
|
123
|
+
poolInstance = null;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
module.exports = {
|
|
127
|
+
ServiceClientPool,
|
|
128
|
+
getPool,
|
|
129
|
+
resetPool,
|
|
130
|
+
};
|
|
131
|
+
|