@deotio/mcp-sigv4-proxy 0.4.0 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/proxy.js +117 -25
  2. package/package.json +4 -4
package/dist/proxy.js CHANGED
@@ -3,6 +3,8 @@ import { SignatureV4 } from '@smithy/signature-v4';
3
3
  import { HttpRequest } from '@smithy/protocol-http';
4
4
  import { Sha256 } from '@aws-crypto/sha256-js';
5
5
  import readline from 'readline';
6
+ import { createRequire } from 'module';
7
+ const { version: PROXY_VERSION } = createRequire(import.meta.url)('../package.json');
6
8
  export const MAX_SSE_BUFFER_BYTES = 1_048_576; // 1 MB
7
9
  const DEFAULT_TIMEOUT_MS = 180_000; // 180s, matches AWS proxy
8
10
  const DEFAULT_RETRIES = 2;
@@ -19,7 +21,7 @@ const LOG_LEVEL_ORDER = {
19
21
  ERROR: 3,
20
22
  SILENT: 4,
21
23
  };
22
- let currentLogLevel = 'ERROR';
24
+ let currentLogLevel = 'INFO';
23
25
  export function setLogLevel(level) {
24
26
  currentLogLevel = level;
25
27
  }
@@ -65,11 +67,17 @@ export function validateEnv() {
65
67
  const inferred = parseEndpointUrl(url.hostname);
66
68
  const region = process.env.AWS_REGION || inferred?.region || 'us-east-1';
67
69
  const service = process.env.AWS_SERVICE || inferred?.service || 'bedrock-agentcore';
68
- // Parse log level
69
- const envLogLevel = (process.env.MCP_LOG_LEVEL ?? 'ERROR').toUpperCase();
70
- if (envLogLevel in LOG_LEVEL_ORDER) {
71
- setLogLevel(envLogLevel);
70
+ // Parse log level — only override the default if explicitly set
71
+ if (process.env.MCP_LOG_LEVEL) {
72
+ const envLogLevel = process.env.MCP_LOG_LEVEL.toUpperCase();
73
+ if (envLogLevel in LOG_LEVEL_ORDER) {
74
+ setLogLevel(envLogLevel);
75
+ }
76
+ else {
77
+ log('WARNING', `unknown MCP_LOG_LEVEL value "${process.env.MCP_LOG_LEVEL}", using default`);
78
+ }
72
79
  }
80
+ log('INFO', `v${PROXY_VERSION} starting`);
73
81
  // Parse timeout
74
82
  const timeoutMs = process.env.MCP_TIMEOUT
75
83
  ? Number(process.env.MCP_TIMEOUT) * 1000
@@ -270,8 +278,20 @@ export async function processLine(line, config, signer) {
270
278
  return;
271
279
  const { body, requestId } = input;
272
280
  const request = buildHttpRequest(config.url, body);
281
+ let signed;
282
+ try {
283
+ signed = await signer.sign(request);
284
+ }
285
+ catch (err) {
286
+ log('ERROR', `signing failed (check AWS credentials for profile/env): ${err}`);
287
+ process.stdout.write(JSON.stringify({
288
+ jsonrpc: '2.0',
289
+ id: requestId,
290
+ error: { code: -32000, message: 'Request signing failed — check AWS credentials' },
291
+ }) + '\n');
292
+ return;
293
+ }
273
294
  try {
274
- const signed = await signer.sign(request);
275
295
  log('DEBUG', `-> POST ${config.url.pathname}`);
276
296
  const response = await fetchWithRetry(config.url.toString(), {
277
297
  method: 'POST',
@@ -294,6 +314,8 @@ export async function processLine(line, config, signer) {
294
314
  const WARM_CACHEABLE = new Set([
295
315
  'initialize', 'tools/list', 'resources/list', 'prompts/list',
296
316
  ]);
317
+ // Exported for testing
318
+ export { syntheticInitializeResult };
297
319
  /**
298
320
  * Synthetic MCP initialize response returned when the backend hasn't responded yet.
299
321
  * Advertises tools/resources/prompts capabilities so Claude Code proceeds to list calls.
@@ -302,7 +324,7 @@ function syntheticInitializeResult() {
302
324
  return {
303
325
  protocolVersion: '2025-03-26',
304
326
  capabilities: { tools: {}, resources: {}, prompts: {} },
305
- serverInfo: { name: 'mcp-sigv4-proxy-warm', version: '0.4.0' },
327
+ serverInfo: { name: 'mcp-sigv4-proxy-warm', version: PROXY_VERSION },
306
328
  };
307
329
  }
308
330
  export async function warmBackend(config, signer) {
@@ -337,13 +359,24 @@ export async function warmBackend(config, signer) {
337
359
  initResponse = null;
338
360
  continue;
339
361
  }
340
- log('WARNING', `warm: initialize failed with HTTP ${initResponse.status}`);
362
+ const hint = initResponse.status === 403
363
+ ? ' (check IAM permissions for the calling identity)'
364
+ : initResponse.status === 404
365
+ ? ' (check MCP_SERVER_URL — endpoint not found)'
366
+ : initResponse.status === 406
367
+ ? ' (server rejected request — possibly missing Accept header)'
368
+ : '';
369
+ log('ERROR', `warm: initialize failed with HTTP ${initResponse.status}${hint}`);
341
370
  return false;
342
371
  }
343
372
  catch (err) {
373
+ const isCredentialError = err instanceof Error &&
374
+ (err.message.includes('credential') ||
375
+ err.message.includes('Could not load credentials') ||
376
+ err.message.includes('profile'));
344
377
  if (attempt < config.warmRetries) {
345
378
  const delay = config.warmRetryDelayMs * Math.pow(2, attempt);
346
- log('WARNING', `warm: initialize error (${err}), retrying in ${delay}ms`);
379
+ log(isCredentialError ? 'ERROR' : 'WARNING', `warm: initialize ${isCredentialError ? 'credential error' : 'error'} (${err}), retrying in ${delay}ms (${attempt + 1}/${config.warmRetries})`);
347
380
  await sleep(Math.min(delay, deadline - Date.now()));
348
381
  continue;
349
382
  }
@@ -389,11 +422,41 @@ export async function warmBackend(config, signer) {
389
422
  const response = await fetchWithTimeout(config.url.toString(), { method: 'POST', headers: signed.headers, body: listBody }, Math.min(config.timeoutMs, Math.max(1000, deadline - Date.now())));
390
423
  if (response.ok) {
391
424
  const body = await response.text();
392
- const parsed = JSON.parse(body);
393
- if (parsed.result) {
425
+ const ct = response.headers.get('content-type') ?? '';
426
+ let parsed = null;
427
+ if (ct.includes('text/event-stream')) {
428
+ // Extract the first data: line from the SSE stream
429
+ const dataLine = body.split('\n').find((l) => l.startsWith('data: '));
430
+ if (dataLine) {
431
+ try {
432
+ parsed = JSON.parse(dataLine.slice(6).trim());
433
+ }
434
+ catch {
435
+ log('WARNING', `warm: ${method} SSE data line is not valid JSON`);
436
+ }
437
+ }
438
+ else {
439
+ log('WARNING', `warm: ${method} returned SSE but contained no data: lines`);
440
+ }
441
+ }
442
+ else {
443
+ try {
444
+ parsed = JSON.parse(body);
445
+ }
446
+ catch {
447
+ log('WARNING', `warm: ${method} response is not valid JSON (content-type: ${ct})`);
448
+ }
449
+ }
450
+ if (parsed?.result) {
394
451
  state.cache[method] = parsed.result;
395
452
  log('DEBUG', `warm: cached ${method} (${JSON.stringify(parsed.result).length} bytes)`);
396
453
  }
454
+ else if (parsed) {
455
+ log('WARNING', `warm: ${method} response had no .result field`);
456
+ }
457
+ }
458
+ else {
459
+ log('WARNING', `warm: prefetch ${method} failed with HTTP ${response.status}`);
397
460
  }
398
461
  }
399
462
  catch (err) {
@@ -437,33 +500,62 @@ export function tryWarmResponse(body, requestId, warmState) {
437
500
  // Claude Code sends this too, but in warm mode we intercepted initialize so we handle it.
438
501
  return true;
439
502
  }
503
+ /**
504
+ * Handle a single parsed JSON-RPC message in warm mode.
505
+ * Returns true if the message was served locally (no need to forward to backend).
506
+ */
507
+ export async function handleWarmLine(input, ws) {
508
+ const method = JSON.parse(input.body).method;
509
+ if (method === 'initialize') {
510
+ // Respond IMMEDIATELY — never block on ws.ready here.
511
+ // This is the critical path: Claude Code's 30s timeout applies to this response.
512
+ const result = ws.cache.initialize ?? syntheticInitializeResult();
513
+ process.stdout.write(JSON.stringify({ jsonrpc: '2.0', id: input.requestId, result }) + '\n');
514
+ log('DEBUG', `warm: served initialize ${ws.cache.initialize ? 'from cache' : 'synthetic'}`);
515
+ return true;
516
+ }
517
+ if (WARM_CACHEABLE.has(method)) {
518
+ // List methods: serve from cache immediately if available…
519
+ if (tryWarmResponse(input.body, input.requestId, ws))
520
+ return true;
521
+ // …otherwise wait for warm-up to complete, then try cache again before forwarding
522
+ log('INFO', `warm: cache miss for ${method}, waiting for warm-up to complete`);
523
+ const warmed = await ws.ready;
524
+ log('INFO', `warm: warm-up ${warmed ? 'succeeded' : 'failed'} — ${method} ${warmed ? 'served from cache' : 'forwarding to backend'}`);
525
+ if (tryWarmResponse(input.body, input.requestId, ws))
526
+ return true;
527
+ log('INFO', `warm: cache still empty for ${method}, forwarding to backend`);
528
+ }
529
+ // Non-cacheable methods (tools/call etc): return false to forward normally.
530
+ // fetchWithRetry handles any residual 424s if the backend isn't warm yet.
531
+ return false;
532
+ }
440
533
  // --- Main entry ---
441
534
  export function startProxy() {
442
535
  const config = validateEnv();
443
536
  const signer = createSigner(config);
444
537
  const rl = readline.createInterface({ input: process.stdin, terminal: false });
445
- // Start warm-up in background if enabled (non-blocking)
446
- let warmState = null;
447
- if (config.warm) {
448
- warmBackend(config, signer).then((state) => { warmState = state; });
449
- }
538
+ // Start warm-up immediately. warmBackend() has no awaits before returning the WarmState
539
+ // object, so this promise resolves in the next microtask — effectively instant. The
540
+ // warmState.ready promise inside it is what takes minutes to resolve.
541
+ const warmStateP = config.warm
542
+ ? warmBackend(config, signer)
543
+ : null;
450
544
  let pending = Promise.resolve();
451
545
  rl.on('line', (line) => {
452
546
  pending = pending.then(async () => {
453
- // In warm mode, try to serve from cache first
454
- if (warmState) {
547
+ if (warmStateP) {
455
548
  const input = parseInputLine(line);
456
549
  if (input) {
457
- // Wait for warm-up to finish before deciding
458
- const warmActive = await warmState.ready;
459
- if (warmActive && tryWarmResponse(input.body, input.requestId, warmState)) {
460
- return; // served from cache
461
- }
462
- // Fall through: warm mode inactive or method not cacheable — forward normally
550
+ const ws = await warmStateP; // resolves almost instantly
551
+ if (await handleWarmLine(input, ws))
552
+ return;
463
553
  }
464
554
  }
465
555
  await processLine(line, config, signer);
466
- }).catch(() => { });
556
+ }).catch((err) => {
557
+ log('ERROR', `unhandled error in line processing: ${err}`);
558
+ });
467
559
  });
468
560
  rl.on('close', async () => {
469
561
  log('INFO', 'stdin closed, draining in-flight requests');
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@deotio/mcp-sigv4-proxy",
3
- "version": "0.4.0",
3
+ "version": "0.4.2",
4
4
  "description": "stdio MCP proxy with AWS SigV4 signing — connect Claude Code to any IAM-authenticated MCP server using a named AWS profile",
5
5
  "type": "module",
6
6
  "bin": {
@@ -19,16 +19,16 @@
19
19
  "dependencies": {
20
20
  "@aws-sdk/credential-providers": "^3.0.0",
21
21
  "@aws-crypto/sha256-js": "^5.0.0",
22
- "@smithy/protocol-http": "^4.0.0",
22
+ "@smithy/protocol-http": "^5.3.12",
23
23
  "@smithy/signature-v4": "^4.0.0"
24
24
  },
25
25
  "devDependencies": {
26
- "@types/jest": "^29.0.0",
26
+ "@types/jest": "^30.0.0",
27
27
  "@types/node": "^22.0.0",
28
28
  "@typescript-eslint/eslint-plugin": "^7.0.0",
29
29
  "@typescript-eslint/parser": "^7.0.0",
30
30
  "eslint": "^8.0.0",
31
- "jest": "^29.0.0",
31
+ "jest": "^30.3.0",
32
32
  "prettier": "^3.0.0",
33
33
  "ts-jest": "^29.0.0",
34
34
  "typescript": "^5.0.0"