@nerviq/cli 1.0.1 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/bin/cli.js +170 -73
  2. package/package.json +1 -1
  3. package/src/activity.js +20 -0
  4. package/src/aider/domain-packs.js +27 -2
  5. package/src/aider/mcp-packs.js +231 -0
  6. package/src/aider/techniques.js +3210 -1397
  7. package/src/audit.js +257 -2
  8. package/src/catalog.js +18 -2
  9. package/src/codex/domain-packs.js +23 -1
  10. package/src/codex/mcp-packs.js +254 -0
  11. package/src/codex/techniques.js +4738 -3257
  12. package/src/copilot/domain-packs.js +23 -1
  13. package/src/copilot/mcp-packs.js +254 -0
  14. package/src/copilot/techniques.js +3433 -1936
  15. package/src/cursor/domain-packs.js +23 -1
  16. package/src/cursor/mcp-packs.js +257 -0
  17. package/src/cursor/techniques.js +3697 -1869
  18. package/src/deprecation.js +98 -0
  19. package/src/domain-pack-expansion.js +571 -0
  20. package/src/domain-packs.js +25 -2
  21. package/src/formatters/otel.js +151 -0
  22. package/src/gemini/domain-packs.js +23 -1
  23. package/src/gemini/mcp-packs.js +257 -0
  24. package/src/gemini/techniques.js +3734 -2238
  25. package/src/integrations.js +194 -0
  26. package/src/mcp-packs.js +233 -0
  27. package/src/opencode/domain-packs.js +23 -1
  28. package/src/opencode/mcp-packs.js +231 -0
  29. package/src/opencode/techniques.js +3500 -1687
  30. package/src/org.js +68 -0
  31. package/src/source-urls.js +410 -260
  32. package/src/stack-checks.js +565 -0
  33. package/src/supplemental-checks.js +767 -0
  34. package/src/techniques.js +2929 -1449
  35. package/src/telemetry.js +160 -0
  36. package/src/windsurf/domain-packs.js +23 -1
  37. package/src/windsurf/mcp-packs.js +257 -0
  38. package/src/windsurf/techniques.js +3647 -1834
  39. package/src/workspace.js +233 -0
@@ -1,4 +1,6 @@
1
- const DOMAIN_PACKS = [
1
+ const { buildAdditionalDomainPacks, detectAdditionalDomainPacks } = require('./domain-pack-expansion');
2
+
3
+ const BASE_DOMAIN_PACKS = [
2
4
  {
3
5
  key: 'baseline-general',
4
6
  label: 'Baseline General',
@@ -129,6 +131,13 @@ const DOMAIN_PACKS = [
129
131
  },
130
132
  ];
131
133
 
134
+ const DOMAIN_PACKS = [
135
+ ...BASE_DOMAIN_PACKS,
136
+ ...buildAdditionalDomainPacks('claude', {
137
+ existingKeys: new Set(BASE_DOMAIN_PACKS.map((pack) => pack.key)),
138
+ }),
139
+ ];
140
+
132
141
  function uniqueByKey(items) {
133
142
  const seen = new Set();
134
143
  const result = [];
@@ -169,8 +178,9 @@ function detectDomainPacks(ctx, stacks, assets = null) {
169
178
  ctx.files.includes('wrangler.toml') || ctx.files.includes('serverless.yml') || ctx.files.includes('serverless.yaml') ||
170
179
  ctx.files.includes('cdk.json') || ctx.hasDir('infra') || ctx.hasDir('deploy') || ctx.hasDir('helm');
171
180
  const isOss = !!ctx.fileContent('LICENSE') && pkg.private !== true;
181
+ const hasCi = ctx.hasDir('.github/workflows');
172
182
  const isEnterpriseGoverned = !!(assets && assets.permissions && assets.permissions.hasDenyRules) &&
173
- !!(assets && assets.files && assets.files.settings) && ctx.hasDir('.github/workflows');
183
+ !!(assets && assets.files && assets.files.settings) && hasCi;
174
184
 
175
185
  if (hasBackend) {
176
186
  addMatch('backend-api', [
@@ -348,6 +358,19 @@ function detectDomainPacks(ctx, stacks, assets = null) {
348
358
  ]);
349
359
  }
350
360
 
361
+ detectAdditionalDomainPacks({
362
+ ctx,
363
+ pkg,
364
+ deps,
365
+ stackKeys,
366
+ addMatch,
367
+ hasBackend,
368
+ hasFrontend,
369
+ hasInfra,
370
+ hasCi,
371
+ isEnterpriseGoverned,
372
+ });
373
+
351
374
  const deduped = uniqueByKey(matches);
352
375
  if (deduped.length === 0) {
353
376
  return [{
@@ -0,0 +1,151 @@
1
+ /**
2
+ * OpenTelemetry Metrics Formatter
3
+ *
4
+ * Converts a nerviq audit result into an OpenTelemetry-compatible
5
+ * metrics export format (OTLP JSON, metrics signal).
6
+ *
7
+ * Metrics emitted:
8
+ * nerviq.audit.score — gauge 0-100
9
+ * nerviq.audit.checks.passed — gauge count of passing checks
10
+ * nerviq.audit.checks.failed — gauge count of failing checks
11
+ * nerviq.audit.checks.total — gauge total checks evaluated
12
+ * nerviq.audit.duration_ms — gauge audit wall-clock time (if provided)
13
+ *
14
+ * Each metric is tagged with:
15
+ * platform — e.g. "claude", "codex", "cursor"
16
+ * version — nerviq package version
17
+ *
18
+ * Output conforms to OTLP ExportMetricsServiceRequest JSON structure
19
+ * (opentelemetry-proto/collector/metrics/v1).
20
+ */
21
+
22
+ 'use strict';
23
+
24
+ const { version: nerviqVersion } = require('../../package.json');
25
+
26
+ // ─── Helpers ─────────────────────────────────────────────────────────────────
27
+
28
+ function unixNanoNow() {
29
+ // Returns current time as a bigint nanoseconds string for OTLP
30
+ return String(BigInt(Date.now()) * 1_000_000n);
31
+ }
32
+
33
+ function makeResource(platform) {
34
+ return {
35
+ attributes: [
36
+ { key: 'service.name', value: { stringValue: 'nerviq' } },
37
+ { key: 'service.version', value: { stringValue: nerviqVersion } },
38
+ { key: 'nerviq.platform', value: { stringValue: platform } },
39
+ ],
40
+ droppedAttributesCount: 0,
41
+ };
42
+ }
43
+
44
+ function makeGauge(name, description, unit, value, attributes = [], timeUnixNano) {
45
+ if (value === null || value === undefined) return null;
46
+ return {
47
+ name,
48
+ description,
49
+ unit,
50
+ gauge: {
51
+ dataPoints: [
52
+ {
53
+ attributes,
54
+ startTimeUnixNano: timeUnixNano,
55
+ timeUnixNano,
56
+ asDouble: Number(value),
57
+ },
58
+ ],
59
+ },
60
+ };
61
+ }
62
+
63
+ // ─── Main formatter ───────────────────────────────────────────────────────────
64
+
65
+ /**
66
+ * Convert a nerviq audit result to an OTLP-compatible metrics payload.
67
+ *
68
+ * @param {object} auditResult — result from audit()
69
+ * @param {number} [auditResult.score]
70
+ * @param {number} [auditResult.passed]
71
+ * @param {number} [auditResult.failed]
72
+ * @param {number} [auditResult.total]
73
+ * @param {string} [auditResult.platform]
74
+ * @param {number} [auditResult.durationMs] — optional, set by caller
75
+ * @returns {object} OTLP ExportMetricsServiceRequest JSON
76
+ */
77
+ function formatOtelMetrics(auditResult) {
78
+ const platform = auditResult.platform || 'claude';
79
+ const now = unixNanoNow();
80
+
81
+ const sharedAttributes = [
82
+ { key: 'nerviq.platform', value: { stringValue: platform } },
83
+ { key: 'nerviq.version', value: { stringValue: nerviqVersion } },
84
+ ];
85
+
86
+ const metrics = [
87
+ makeGauge(
88
+ 'nerviq.audit.score',
89
+ 'Nerviq audit score (0-100)',
90
+ '1',
91
+ auditResult.score,
92
+ sharedAttributes,
93
+ now,
94
+ ),
95
+ makeGauge(
96
+ 'nerviq.audit.checks.passed',
97
+ 'Number of checks that passed',
98
+ '{checks}',
99
+ auditResult.passed,
100
+ sharedAttributes,
101
+ now,
102
+ ),
103
+ makeGauge(
104
+ 'nerviq.audit.checks.failed',
105
+ 'Number of checks that failed',
106
+ '{checks}',
107
+ auditResult.failed,
108
+ sharedAttributes,
109
+ now,
110
+ ),
111
+ makeGauge(
112
+ 'nerviq.audit.checks.total',
113
+ 'Total number of checks evaluated',
114
+ '{checks}',
115
+ auditResult.total ?? ((auditResult.passed || 0) + (auditResult.failed || 0)),
116
+ sharedAttributes,
117
+ now,
118
+ ),
119
+ ].filter(Boolean);
120
+
121
+ if (auditResult.durationMs != null) {
122
+ const dm = makeGauge(
123
+ 'nerviq.audit.duration_ms',
124
+ 'Audit wall-clock duration in milliseconds',
125
+ 'ms',
126
+ auditResult.durationMs,
127
+ sharedAttributes,
128
+ now,
129
+ );
130
+ if (dm) metrics.push(dm);
131
+ }
132
+
133
+ return {
134
+ resourceMetrics: [
135
+ {
136
+ resource: makeResource(platform),
137
+ scopeMetrics: [
138
+ {
139
+ scope: {
140
+ name: 'nerviq',
141
+ version: nerviqVersion,
142
+ },
143
+ metrics,
144
+ },
145
+ ],
146
+ },
147
+ ],
148
+ };
149
+ }
150
+
151
+ module.exports = { formatOtelMetrics };
@@ -1,4 +1,6 @@
1
- const GEMINI_DOMAIN_PACKS = [
1
+ const { buildAdditionalDomainPacks, detectAdditionalDomainPacks } = require('../domain-pack-expansion');
2
+
3
+ const BASE_GEMINI_DOMAIN_PACKS = [
2
4
  {
3
5
  key: 'baseline-general',
4
6
  label: 'Baseline General',
@@ -161,6 +163,13 @@ const GEMINI_DOMAIN_PACKS = [
161
163
  },
162
164
  ];
163
165
 
166
+ const GEMINI_DOMAIN_PACKS = [
167
+ ...BASE_GEMINI_DOMAIN_PACKS,
168
+ ...buildAdditionalDomainPacks('gemini', {
169
+ existingKeys: new Set(BASE_GEMINI_DOMAIN_PACKS.map((pack) => pack.key)),
170
+ }),
171
+ ];
172
+
164
173
  function uniqueByKey(items) {
165
174
  const seen = new Set();
166
175
  const result = [];
@@ -353,6 +362,19 @@ function detectGeminiDomainPacks(ctx, stacks = [], assets = {}) {
353
362
  ]);
354
363
  }
355
364
 
365
+ detectAdditionalDomainPacks({
366
+ ctx,
367
+ pkg,
368
+ deps,
369
+ stackKeys,
370
+ addMatch,
371
+ hasBackend,
372
+ hasFrontend,
373
+ hasInfra,
374
+ hasCi,
375
+ isEnterpriseGoverned,
376
+ });
377
+
356
378
  if (matches.length === 0) {
357
379
  addMatch('baseline-general', [
358
380
  'No stronger platform-specific domain dominated, so a safe general Gemini CLI baseline is the best starting point.',
@@ -379,6 +379,237 @@ const GEMINI_MCP_PACKS = [
379
379
  jsonProjection: { command: 'npx', args: ['-y', 'huggingface-mcp-server'], env: { HF_TOKEN: '${HF_TOKEN}' } },
380
380
  excludeTools: [],
381
381
  },
382
+ // ── 23 new packs ─────────────────────────────────────────────────────────
383
+ {
384
+ key: 'supabase-mcp', label: 'Supabase',
385
+ description: 'Database, auth, and storage for Supabase.',
386
+ useWhen: 'Repos using Supabase.',
387
+ adoption: 'Requires: SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY.',
388
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['SUPABASE_URL', 'SUPABASE_SERVICE_ROLE_KEY'],
389
+ serverName: 'supabase',
390
+ jsonProjection: { command: 'npx', args: ['-y', '@supabase/mcp-server-supabase@latest'], env: { SUPABASE_URL: '${SUPABASE_URL}', SUPABASE_SERVICE_ROLE_KEY: '${SUPABASE_SERVICE_ROLE_KEY}' } },
391
+ excludeTools: ['delete_project', 'drop_table'],
392
+ },
393
+ {
394
+ key: 'prisma-mcp', label: 'Prisma ORM',
395
+ description: 'Schema inspection and migrations via Prisma.',
396
+ useWhen: 'Repos with a Prisma schema.',
397
+ adoption: 'Requires: DATABASE_URL.',
398
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['DATABASE_URL'],
399
+ serverName: 'prisma',
400
+ jsonProjection: { command: 'npx', args: ['-y', 'prisma-mcp-server@latest'], env: { DATABASE_URL: '${DATABASE_URL}' } },
401
+ excludeTools: ['drop_database'],
402
+ },
403
+ {
404
+ key: 'vercel-mcp', label: 'Vercel',
405
+ description: 'Deployment management via Vercel.',
406
+ useWhen: 'Repos deployed on Vercel.',
407
+ adoption: 'Requires: VERCEL_TOKEN.',
408
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['VERCEL_TOKEN'],
409
+ serverName: 'vercel',
410
+ jsonProjection: { command: 'npx', args: ['-y', '@vercel/mcp-server@latest'], env: { VERCEL_TOKEN: '${VERCEL_TOKEN}' } },
411
+ excludeTools: ['delete_project', 'delete_deployment'],
412
+ },
413
+ {
414
+ key: 'cloudflare-mcp', label: 'Cloudflare',
415
+ description: 'Workers, KV, R2, and D1 management.',
416
+ useWhen: 'Repos using Cloudflare edge.',
417
+ adoption: 'Requires: CLOUDFLARE_API_TOKEN.',
418
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['CLOUDFLARE_API_TOKEN'],
419
+ serverName: 'cloudflare',
420
+ jsonProjection: { command: 'npx', args: ['-y', '@cloudflare/mcp-server-cloudflare@latest'], env: { CLOUDFLARE_API_TOKEN: '${CLOUDFLARE_API_TOKEN}' } },
421
+ excludeTools: ['delete_worker', 'purge_cache'],
422
+ },
423
+ {
424
+ key: 'aws-mcp', label: 'AWS',
425
+ description: 'S3, Lambda, DynamoDB access.',
426
+ useWhen: 'Repos using AWS.',
427
+ adoption: 'Requires: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION.',
428
+ trustLevel: 'low', transport: 'stdio', requiredAuth: ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_REGION'],
429
+ serverName: 'aws',
430
+ jsonProjection: { command: 'npx', args: ['-y', '@aws-samples/mcp-server-aws@latest'], env: { AWS_ACCESS_KEY_ID: '${AWS_ACCESS_KEY_ID}', AWS_SECRET_ACCESS_KEY: '${AWS_SECRET_ACCESS_KEY}', AWS_REGION: '${AWS_REGION}' } },
431
+ excludeTools: ['delete_stack', 'terminate_instances', 'delete_bucket'],
432
+ },
433
+ {
434
+ key: 'redis-mcp', label: 'Redis',
435
+ description: 'Cache and session management.',
436
+ useWhen: 'Repos using Redis.',
437
+ adoption: 'Requires: REDIS_URL.',
438
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['REDIS_URL'],
439
+ serverName: 'redis',
440
+ jsonProjection: { command: 'npx', args: ['-y', 'redis-mcp-server@latest'], env: { REDIS_URL: '${REDIS_URL}' } },
441
+ excludeTools: ['flushall', 'flushdb'],
442
+ },
443
+ {
444
+ key: 'mongodb-mcp', label: 'MongoDB',
445
+ description: 'Document database access.',
446
+ useWhen: 'Repos using MongoDB.',
447
+ adoption: 'Requires: MONGODB_URI.',
448
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['MONGODB_URI'],
449
+ serverName: 'mongodb',
450
+ jsonProjection: { command: 'npx', args: ['-y', '@mongodb-js/mongodb-mcp-server@latest'], env: { MONGODB_URI: '${MONGODB_URI}' } },
451
+ excludeTools: ['drop_collection', 'drop_database'],
452
+ },
453
+ {
454
+ key: 'twilio-mcp', label: 'Twilio',
455
+ description: 'SMS, voice, and messaging.',
456
+ useWhen: 'Repos using Twilio.',
457
+ adoption: 'Requires: TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN.',
458
+ trustLevel: 'low', transport: 'stdio', requiredAuth: ['TWILIO_ACCOUNT_SID', 'TWILIO_AUTH_TOKEN'],
459
+ serverName: 'twilio',
460
+ jsonProjection: { command: 'npx', args: ['-y', 'twilio-mcp-server@latest'], env: { TWILIO_ACCOUNT_SID: '${TWILIO_ACCOUNT_SID}', TWILIO_AUTH_TOKEN: '${TWILIO_AUTH_TOKEN}' } },
461
+ excludeTools: ['delete_message'],
462
+ },
463
+ {
464
+ key: 'sendgrid-mcp', label: 'SendGrid',
465
+ description: 'Transactional email delivery.',
466
+ useWhen: 'Repos using SendGrid.',
467
+ adoption: 'Requires: SENDGRID_API_KEY.',
468
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['SENDGRID_API_KEY'],
469
+ serverName: 'sendgrid',
470
+ jsonProjection: { command: 'npx', args: ['-y', 'sendgrid-mcp-server@latest'], env: { SENDGRID_API_KEY: '${SENDGRID_API_KEY}' } },
471
+ excludeTools: [],
472
+ },
473
+ {
474
+ key: 'algolia-mcp', label: 'Algolia Search',
475
+ description: 'Search indexing via Algolia.',
476
+ useWhen: 'Repos using Algolia.',
477
+ adoption: 'Requires: ALGOLIA_APP_ID, ALGOLIA_API_KEY.',
478
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['ALGOLIA_APP_ID', 'ALGOLIA_API_KEY'],
479
+ serverName: 'algolia',
480
+ jsonProjection: { command: 'npx', args: ['-y', 'algolia-mcp-server@latest'], env: { ALGOLIA_APP_ID: '${ALGOLIA_APP_ID}', ALGOLIA_API_KEY: '${ALGOLIA_API_KEY}' } },
481
+ excludeTools: ['delete_index'],
482
+ },
483
+ {
484
+ key: 'planetscale-mcp', label: 'PlanetScale',
485
+ description: 'Serverless MySQL via PlanetScale.',
486
+ useWhen: 'Repos on PlanetScale.',
487
+ adoption: 'Requires: PLANETSCALE_TOKEN.',
488
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['PLANETSCALE_TOKEN'],
489
+ serverName: 'planetscale',
490
+ jsonProjection: { command: 'npx', args: ['-y', 'planetscale-mcp-server@latest'], env: { PLANETSCALE_TOKEN: '${PLANETSCALE_TOKEN}' } },
491
+ excludeTools: ['delete_database'],
492
+ },
493
+ {
494
+ key: 'neon-mcp', label: 'Neon Serverless Postgres',
495
+ description: 'Serverless Postgres via Neon.',
496
+ useWhen: 'Repos using Neon.',
497
+ adoption: 'Requires: NEON_API_KEY.',
498
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['NEON_API_KEY'],
499
+ serverName: 'neon',
500
+ jsonProjection: { command: 'npx', args: ['-y', '@neondatabase/mcp-server-neon@latest'], env: { NEON_API_KEY: '${NEON_API_KEY}' } },
501
+ excludeTools: ['delete_project'],
502
+ },
503
+ {
504
+ key: 'turso-mcp', label: 'Turso Edge SQLite',
505
+ description: 'Edge SQLite via Turso.',
506
+ useWhen: 'Repos using Turso.',
507
+ adoption: 'Requires: TURSO_DATABASE_URL, TURSO_AUTH_TOKEN.',
508
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['TURSO_DATABASE_URL', 'TURSO_AUTH_TOKEN'],
509
+ serverName: 'turso',
510
+ jsonProjection: { command: 'npx', args: ['-y', 'turso-mcp-server@latest'], env: { TURSO_DATABASE_URL: '${TURSO_DATABASE_URL}', TURSO_AUTH_TOKEN: '${TURSO_AUTH_TOKEN}' } },
511
+ excludeTools: ['destroy_database'],
512
+ },
513
+ {
514
+ key: 'upstash-mcp', label: 'Upstash Redis+Kafka',
515
+ description: 'Serverless Redis and Kafka.',
516
+ useWhen: 'Repos using Upstash.',
517
+ adoption: 'Requires: UPSTASH_REDIS_REST_URL, UPSTASH_REDIS_REST_TOKEN.',
518
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['UPSTASH_REDIS_REST_URL', 'UPSTASH_REDIS_REST_TOKEN'],
519
+ serverName: 'upstash',
520
+ jsonProjection: { command: 'npx', args: ['-y', '@upstash/mcp-server@latest'], env: { UPSTASH_REDIS_REST_URL: '${UPSTASH_REDIS_REST_URL}', UPSTASH_REDIS_REST_TOKEN: '${UPSTASH_REDIS_REST_TOKEN}' } },
521
+ excludeTools: [],
522
+ },
523
+ {
524
+ key: 'convex-mcp', label: 'Convex',
525
+ description: 'Reactive backend via Convex.',
526
+ useWhen: 'Repos using Convex.',
527
+ adoption: 'Requires: CONVEX_DEPLOYMENT.',
528
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['CONVEX_DEPLOYMENT'],
529
+ serverName: 'convex',
530
+ jsonProjection: { command: 'npx', args: ['-y', '@convex-dev/mcp-server@latest'], env: { CONVEX_DEPLOYMENT: '${CONVEX_DEPLOYMENT}' } },
531
+ excludeTools: ['delete_deployment'],
532
+ },
533
+ {
534
+ key: 'clerk-mcp', label: 'Clerk Authentication',
535
+ description: 'User auth via Clerk.',
536
+ useWhen: 'Repos using Clerk.',
537
+ adoption: 'Requires: CLERK_SECRET_KEY.',
538
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['CLERK_SECRET_KEY'],
539
+ serverName: 'clerk',
540
+ jsonProjection: { command: 'npx', args: ['-y', '@clerk/mcp-server@latest'], env: { CLERK_SECRET_KEY: '${CLERK_SECRET_KEY}' } },
541
+ excludeTools: ['delete_user'],
542
+ },
543
+ {
544
+ key: 'resend-mcp', label: 'Resend Email',
545
+ description: 'Transactional email via Resend.',
546
+ useWhen: 'Repos using Resend.',
547
+ adoption: 'Requires: RESEND_API_KEY.',
548
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['RESEND_API_KEY'],
549
+ serverName: 'resend',
550
+ jsonProjection: { command: 'npx', args: ['-y', 'resend-mcp-server@latest'], env: { RESEND_API_KEY: '${RESEND_API_KEY}' } },
551
+ excludeTools: [],
552
+ },
553
+ {
554
+ key: 'temporal-mcp', label: 'Temporal Workflow',
555
+ description: 'Workflow orchestration via Temporal.',
556
+ useWhen: 'Repos using Temporal.',
557
+ adoption: 'Requires: TEMPORAL_ADDRESS.',
558
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['TEMPORAL_ADDRESS'],
559
+ serverName: 'temporal',
560
+ jsonProjection: { command: 'npx', args: ['-y', 'temporal-mcp-server@latest'], env: { TEMPORAL_ADDRESS: '${TEMPORAL_ADDRESS}' } },
561
+ excludeTools: ['terminate_workflow'],
562
+ },
563
+ {
564
+ key: 'launchdarkly-mcp', label: 'LaunchDarkly',
565
+ description: 'Feature flags via LaunchDarkly.',
566
+ useWhen: 'Repos using LaunchDarkly.',
567
+ adoption: 'Requires: LAUNCHDARKLY_ACCESS_TOKEN.',
568
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['LAUNCHDARKLY_ACCESS_TOKEN'],
569
+ serverName: 'launchdarkly',
570
+ jsonProjection: { command: 'npx', args: ['-y', 'launchdarkly-mcp-server@latest'], env: { LAUNCHDARKLY_ACCESS_TOKEN: '${LAUNCHDARKLY_ACCESS_TOKEN}' } },
571
+ excludeTools: ['delete_flag'],
572
+ },
573
+ {
574
+ key: 'datadog-mcp', label: 'Datadog',
575
+ description: 'Monitoring and APM via Datadog.',
576
+ useWhen: 'Repos using Datadog.',
577
+ adoption: 'Requires: DATADOG_API_KEY, DATADOG_APP_KEY.',
578
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['DATADOG_API_KEY', 'DATADOG_APP_KEY'],
579
+ serverName: 'datadog',
580
+ jsonProjection: { command: 'npx', args: ['-y', '@datadog/mcp-server@latest'], env: { DATADOG_API_KEY: '${DATADOG_API_KEY}', DATADOG_APP_KEY: '${DATADOG_APP_KEY}' } },
581
+ excludeTools: ['delete_monitor'],
582
+ },
583
+ {
584
+ key: 'grafana-mcp', label: 'Grafana',
585
+ description: 'Dashboards via Grafana.',
586
+ useWhen: 'Repos using Grafana.',
587
+ adoption: 'Requires: GRAFANA_URL, GRAFANA_API_KEY.',
588
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['GRAFANA_URL', 'GRAFANA_API_KEY'],
589
+ serverName: 'grafana',
590
+ jsonProjection: { command: 'npx', args: ['-y', 'grafana-mcp-server@latest'], env: { GRAFANA_URL: '${GRAFANA_URL}', GRAFANA_API_KEY: '${GRAFANA_API_KEY}' } },
591
+ excludeTools: ['delete_dashboard'],
592
+ },
593
+ {
594
+ key: 'circleci-mcp', label: 'CircleCI',
595
+ description: 'CI/CD via CircleCI.',
596
+ useWhen: 'Repos using CircleCI.',
597
+ adoption: 'Requires: CIRCLECI_TOKEN.',
598
+ trustLevel: 'medium', transport: 'stdio', requiredAuth: ['CIRCLECI_TOKEN'],
599
+ serverName: 'circleci',
600
+ jsonProjection: { command: 'npx', args: ['-y', 'circleci-mcp-server@latest'], env: { CIRCLECI_TOKEN: '${CIRCLECI_TOKEN}' } },
601
+ excludeTools: ['cancel_pipeline'],
602
+ },
603
+ {
604
+ key: 'anthropic-mcp', label: 'Anthropic Claude API',
605
+ description: 'Claude API for AI-powered apps.',
606
+ useWhen: 'Repos building on Claude API.',
607
+ adoption: 'Requires: ANTHROPIC_API_KEY.',
608
+ trustLevel: 'high', transport: 'stdio', requiredAuth: ['ANTHROPIC_API_KEY'],
609
+ serverName: 'anthropic',
610
+ jsonProjection: { command: 'npx', args: ['-y', '@anthropic-ai/mcp-server@latest'], env: { ANTHROPIC_API_KEY: '${ANTHROPIC_API_KEY}' } },
611
+ excludeTools: [],
612
+ },
382
613
  ];
383
614
 
384
615
  // --- Helpers ---
@@ -573,6 +804,32 @@ function recommendGeminiMcpPacks(stacks = [], domainPacks = [], options = {}) {
573
804
  }
574
805
 
575
806
  // Note: Gemini CLI has built-in web search, so no separate search MCP pack is needed.
807
+ // ── 23 new packs recommendation logic ────────────────────────────────────
808
+ if (ctx) {
809
+ if (hasDependency(deps, '@supabase/supabase-js') || hasDependency(deps, '@supabase/auth-helpers-nextjs') || hasFileContentMatch(ctx, '.env', /SUPABASE/i) || hasFileContentMatch(ctx, '.env.example', /SUPABASE/i)) recommended.add('supabase-mcp');
810
+ if (hasFileContentMatch(ctx, 'schema.prisma', /\S/) || hasDependency(deps, '@prisma/client') || hasDependency(deps, 'prisma')) recommended.add('prisma-mcp');
811
+ if (ctx.files.includes('vercel.json') || hasFileContentMatch(ctx, 'package.json', /"deploy":\s*"vercel/i) || hasFileContentMatch(ctx, '.env', /VERCEL_TOKEN/i)) recommended.add('vercel-mcp');
812
+ if (hasFileContentMatch(ctx, 'wrangler.toml', /\S/) || hasDependency(deps, 'wrangler') || hasFileContentMatch(ctx, '.env', /CLOUDFLARE/i)) recommended.add('cloudflare-mcp');
813
+ if (hasFileContentMatch(ctx, '.env', /AWS_ACCESS_KEY/i) || ctx.files.some(f => /serverless\.yml|template\.ya?ml|cdk\.json/.test(f))) recommended.add('aws-mcp');
814
+ if (hasDependency(deps, 'redis') || hasDependency(deps, 'ioredis') || hasDependency(deps, '@redis/client') || hasFileContentMatch(ctx, '.env', /REDIS_URL/i)) recommended.add('redis-mcp');
815
+ if (hasDependency(deps, 'mongoose') || hasDependency(deps, 'mongodb') || hasFileContentMatch(ctx, '.env', /MONGODB_URI/i)) recommended.add('mongodb-mcp');
816
+ if (hasDependency(deps, 'twilio') || hasFileContentMatch(ctx, '.env', /TWILIO_/i)) recommended.add('twilio-mcp');
817
+ if (hasDependency(deps, '@sendgrid/mail') || hasFileContentMatch(ctx, '.env', /SENDGRID_API_KEY/i)) recommended.add('sendgrid-mcp');
818
+ if (hasDependency(deps, 'algoliasearch') || hasDependency(deps, '@algolia/client-search') || hasFileContentMatch(ctx, '.env', /ALGOLIA_/i)) recommended.add('algolia-mcp');
819
+ if (hasFileContentMatch(ctx, '.env', /PLANETSCALE_TOKEN/i)) recommended.add('planetscale-mcp');
820
+ if (hasDependency(deps, '@neondatabase/serverless') || hasFileContentMatch(ctx, '.env', /NEON_/i)) recommended.add('neon-mcp');
821
+ if (hasDependency(deps, '@libsql/client') || hasFileContentMatch(ctx, '.env', /TURSO_/i)) recommended.add('turso-mcp');
822
+ if (hasDependency(deps, '@upstash/redis') || hasDependency(deps, '@upstash/kafka') || hasFileContentMatch(ctx, '.env', /UPSTASH_/i)) recommended.add('upstash-mcp');
823
+ if (hasDependency(deps, 'convex') || hasFileContentMatch(ctx, 'convex.json', /\S/) || hasFileContentMatch(ctx, '.env', /CONVEX_/i)) recommended.add('convex-mcp');
824
+ if (hasDependency(deps, '@clerk/nextjs') || hasDependency(deps, '@clerk/backend') || hasFileContentMatch(ctx, '.env', /CLERK_/i)) recommended.add('clerk-mcp');
825
+ if (hasDependency(deps, 'resend') || hasFileContentMatch(ctx, '.env', /RESEND_API_KEY/i)) recommended.add('resend-mcp');
826
+ if (hasDependency(deps, '@temporalio/client') || hasFileContentMatch(ctx, '.env', /TEMPORAL_/i)) recommended.add('temporal-mcp');
827
+ if (hasDependency(deps, '@launchdarkly/node-server-sdk') || hasFileContentMatch(ctx, '.env', /LAUNCHDARKLY_/i)) recommended.add('launchdarkly-mcp');
828
+ if (hasDependency(deps, 'dd-trace') || hasFileContentMatch(ctx, '.env', /DATADOG_/i)) recommended.add('datadog-mcp');
829
+ if (hasFileContentMatch(ctx, 'docker-compose.yml', /grafana/i) || hasFileContentMatch(ctx, '.env', /GRAFANA_/i)) recommended.add('grafana-mcp');
830
+ if (ctx.files.some(f => /\.circleci\/config/.test(f)) || hasFileContentMatch(ctx, '.env', /CIRCLECI_/i)) recommended.add('circleci-mcp');
831
+ if (hasDependency(deps, '@anthropic-ai/sdk') || hasDependency(deps, 'anthropic') || hasFileContentMatch(ctx, '.env', /ANTHROPIC_API_KEY/i)) recommended.add('anthropic-mcp');
832
+ }
576
833
 
577
834
  return GEMINI_MCP_PACKS
578
835
  .filter(pack => recommended.has(pack.key))