@schemashift/core 0.8.0 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -20,24 +20,36 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var index_exports = {};
22
22
  __export(index_exports, {
23
+ BehavioralWarningAnalyzer: () => BehavioralWarningAnalyzer,
24
+ BundleEstimator: () => BundleEstimator,
23
25
  CompatibilityAnalyzer: () => CompatibilityAnalyzer,
24
26
  ComplexityEstimator: () => ComplexityEstimator,
25
27
  DetailedAnalyzer: () => DetailedAnalyzer,
28
+ DriftDetector: () => DriftDetector,
26
29
  EcosystemAnalyzer: () => EcosystemAnalyzer,
27
30
  FormResolverMigrator: () => FormResolverMigrator,
31
+ GOVERNANCE_TEMPLATES: () => GOVERNANCE_TEMPLATES,
28
32
  GovernanceEngine: () => GovernanceEngine,
29
33
  IncrementalTracker: () => IncrementalTracker,
34
+ MigrationAuditLog: () => MigrationAuditLog,
30
35
  MigrationChain: () => MigrationChain,
31
36
  MonorepoResolver: () => MonorepoResolver,
32
37
  PackageUpdater: () => PackageUpdater,
38
+ PerformanceAnalyzer: () => PerformanceAnalyzer,
33
39
  PluginLoader: () => PluginLoader,
34
40
  SchemaAnalyzer: () => SchemaAnalyzer,
35
41
  SchemaDependencyResolver: () => SchemaDependencyResolver,
42
+ TestScaffolder: () => TestScaffolder,
36
43
  TransformEngine: () => TransformEngine,
44
+ TypeDedupDetector: () => TypeDedupDetector,
37
45
  buildCallChain: () => buildCallChain,
46
+ computeParallelBatches: () => computeParallelBatches,
38
47
  detectFormLibraries: () => detectFormLibraries,
39
48
  detectSchemaLibrary: () => detectSchemaLibrary,
40
49
  detectStandardSchema: () => detectStandardSchema,
50
+ getGovernanceTemplate: () => getGovernanceTemplate,
51
+ getGovernanceTemplateNames: () => getGovernanceTemplateNames,
52
+ getGovernanceTemplatesByCategory: () => getGovernanceTemplatesByCategory,
41
53
  isInsideComment: () => isInsideComment,
42
54
  isInsideStringLiteral: () => isInsideStringLiteral,
43
55
  loadConfig: () => loadConfig,
@@ -61,6 +73,9 @@ var LIBRARY_PATTERNS = {
61
73
  joi: [/^joi$/, /^@hapi\/joi$/],
62
74
  "io-ts": [/^io-ts$/, /^io-ts\//],
63
75
  valibot: [/^valibot$/],
76
+ arktype: [/^arktype$/],
77
+ superstruct: [/^superstruct$/],
78
+ effect: [/^@effect\/schema$/],
64
79
  v4: [],
65
80
  // Target version, not detectable from imports
66
81
  unknown: []
@@ -315,6 +330,521 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
315
330
  return buildCallChain(newBase, factory.name, factory.args, mappedMethods);
316
331
  }
317
332
 
333
+ // src/audit-log.ts
334
+ var import_node_crypto = require("crypto");
335
+ var import_node_fs = require("fs");
336
+ var import_node_path = require("path");
337
+ var AUDIT_DIR = ".schemashift";
338
+ var AUDIT_FILE = "audit-log.json";
339
+ var AUDIT_VERSION = 1;
340
+ var MigrationAuditLog = class {
341
+ logDir;
342
+ logPath;
343
+ constructor(projectPath) {
344
+ this.logDir = (0, import_node_path.join)(projectPath, AUDIT_DIR);
345
+ this.logPath = (0, import_node_path.join)(this.logDir, AUDIT_FILE);
346
+ }
347
+ /**
348
+ * Append a new entry to the audit log.
349
+ */
350
+ append(entry) {
351
+ const log = this.read();
352
+ log.entries.push(entry);
353
+ this.write(log);
354
+ }
355
+ /**
356
+ * Create an audit entry for a file transformation.
357
+ */
358
+ createEntry(params) {
359
+ return {
360
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
361
+ migrationId: params.migrationId,
362
+ filePath: params.filePath,
363
+ action: "transform",
364
+ from: params.from,
365
+ to: params.to,
366
+ success: params.success,
367
+ beforeHash: this.hashContent(params.originalCode),
368
+ afterHash: params.transformedCode ? this.hashContent(params.transformedCode) : void 0,
369
+ warningCount: params.warningCount,
370
+ errorCount: params.errorCount,
371
+ riskScore: params.riskScore,
372
+ duration: params.duration,
373
+ user: this.getCurrentUser(),
374
+ metadata: params.metadata || this.collectMetadata()
375
+ };
376
+ }
377
+ /**
378
+ * Read the current audit log.
379
+ */
380
+ read() {
381
+ if (!(0, import_node_fs.existsSync)(this.logPath)) {
382
+ return { version: AUDIT_VERSION, entries: [] };
383
+ }
384
+ try {
385
+ const content = (0, import_node_fs.readFileSync)(this.logPath, "utf-8");
386
+ if (!content.trim()) {
387
+ return { version: AUDIT_VERSION, entries: [] };
388
+ }
389
+ return JSON.parse(content);
390
+ } catch {
391
+ return { version: AUDIT_VERSION, entries: [] };
392
+ }
393
+ }
394
+ /**
395
+ * Get entries for a specific migration.
396
+ */
397
+ getByMigration(migrationId) {
398
+ const log = this.read();
399
+ return log.entries.filter((e) => e.migrationId === migrationId);
400
+ }
401
+ /**
402
+ * Get summary statistics for the audit log.
403
+ */
404
+ getSummary() {
405
+ const log = this.read();
406
+ const migrationIds = new Set(log.entries.map((e) => e.migrationId));
407
+ const migrationPaths = [...new Set(log.entries.map((e) => `${e.from}->${e.to}`))];
408
+ return {
409
+ totalMigrations: migrationIds.size,
410
+ totalFiles: log.entries.length,
411
+ successCount: log.entries.filter((e) => e.success).length,
412
+ failureCount: log.entries.filter((e) => !e.success).length,
413
+ migrationPaths
414
+ };
415
+ }
416
+ /**
417
+ * Export audit log as JSON string.
418
+ */
419
+ exportJson() {
420
+ const log = this.read();
421
+ return JSON.stringify(log, null, 2);
422
+ }
423
+ /**
424
+ * Export audit log as CSV string.
425
+ */
426
+ exportCsv() {
427
+ const log = this.read();
428
+ const headers = [
429
+ "timestamp",
430
+ "migrationId",
431
+ "filePath",
432
+ "action",
433
+ "from",
434
+ "to",
435
+ "success",
436
+ "warningCount",
437
+ "errorCount",
438
+ "riskScore",
439
+ "user",
440
+ "duration"
441
+ ];
442
+ const rows = log.entries.map(
443
+ (e) => headers.map((h) => {
444
+ const val = e[h];
445
+ if (val === void 0 || val === null) return "";
446
+ return String(val).includes(",") ? `"${String(val)}"` : String(val);
447
+ }).join(",")
448
+ );
449
+ return [headers.join(","), ...rows].join("\n");
450
+ }
451
+ /**
452
+ * Get entries filtered by date range.
453
+ */
454
+ getByDateRange(start, end) {
455
+ const log = this.read();
456
+ return log.entries.filter((e) => {
457
+ const ts = new Date(e.timestamp);
458
+ return ts >= start && ts <= end;
459
+ });
460
+ }
461
+ /**
462
+ * Clear the audit log.
463
+ */
464
+ clear() {
465
+ this.write({ version: AUDIT_VERSION, entries: [] });
466
+ }
467
+ collectMetadata() {
468
+ return {
469
+ hostname: process.env.HOSTNAME || void 0,
470
+ nodeVersion: process.version,
471
+ ciJobId: process.env.CI_JOB_ID || process.env.GITHUB_RUN_ID || void 0,
472
+ ciProvider: process.env.GITHUB_ACTIONS ? "github" : process.env.GITLAB_CI ? "gitlab" : process.env.CIRCLECI ? "circleci" : process.env.JENKINS_URL ? "jenkins" : void 0,
473
+ gitBranch: process.env.GITHUB_REF_NAME || process.env.CI_COMMIT_BRANCH || void 0,
474
+ gitCommit: process.env.GITHUB_SHA || process.env.CI_COMMIT_SHA || void 0
475
+ };
476
+ }
477
+ write(log) {
478
+ if (!(0, import_node_fs.existsSync)(this.logDir)) {
479
+ (0, import_node_fs.mkdirSync)(this.logDir, { recursive: true });
480
+ }
481
+ (0, import_node_fs.writeFileSync)(this.logPath, JSON.stringify(log, null, 2));
482
+ }
483
+ hashContent(content) {
484
+ return (0, import_node_crypto.createHash)("sha256").update(content).digest("hex").substring(0, 16);
485
+ }
486
+ getCurrentUser() {
487
+ return process.env.USER || process.env.USERNAME || void 0;
488
+ }
489
+ };
490
+
491
+ // src/behavioral-warnings.ts
492
+ var BEHAVIORAL_RULES = [
493
+ // Yup -> Zod: Type coercion differences
494
+ {
495
+ category: "type-coercion",
496
+ migrations: ["yup->zod"],
497
+ detect: (text, filePath) => {
498
+ const warnings = [];
499
+ if (/yup\.(number|date)\s*\(\)/.test(text)) {
500
+ warnings.push({
501
+ category: "type-coercion",
502
+ message: "Yup silently coerces types; Zod rejects mismatches.",
503
+ detail: `Yup's number() accepts strings like "42" and coerces them. Zod's number() rejects strings. Use z.coerce.number() for equivalent behavior, especially for HTML form inputs which always return strings.`,
504
+ filePath,
505
+ severity: "warning",
506
+ migration: "yup->zod"
507
+ });
508
+ }
509
+ return warnings;
510
+ }
511
+ },
512
+ // Yup -> Zod: Form input string values
513
+ {
514
+ category: "form-input",
515
+ migrations: ["yup->zod"],
516
+ detect: (text, filePath) => {
517
+ const warnings = [];
518
+ const hasFormImport = /yupResolver|useFormik|from\s+['"]formik['"]|from\s+['"]@hookform/.test(
519
+ text
520
+ );
521
+ const hasNumberOrDate = /yup\.(number|date)\s*\(\)/.test(text);
522
+ if (hasFormImport && hasNumberOrDate) {
523
+ warnings.push({
524
+ category: "form-input",
525
+ message: "HTML inputs return strings \u2014 Zod will reject unless using z.coerce.*",
526
+ detail: 'HTML <input type="number"> returns strings. Yup coerces automatically, but Zod requires explicit coercion. Use z.coerce.number() or register({ valueAsNumber: true }) in React Hook Form.',
527
+ filePath,
528
+ severity: "error",
529
+ migration: "yup->zod"
530
+ });
531
+ }
532
+ return warnings;
533
+ }
534
+ },
535
+ // Joi -> Zod: Error handling paradigm shift
536
+ {
537
+ category: "error-handling",
538
+ migrations: ["joi->zod"],
539
+ detect: (text, filePath) => {
540
+ const warnings = [];
541
+ if (/\.validate\s*\(/.test(text) && /[Jj]oi/.test(text)) {
542
+ warnings.push({
543
+ category: "error-handling",
544
+ message: "Joi .validate() returns { value, error }; Zod .parse() throws.",
545
+ detail: "Joi uses an inspection pattern: .validate() returns an object with value and error. Zod .parse() throws a ZodError on failure. Use .safeParse() for a non-throwing equivalent that returns { success, data, error }.",
546
+ filePath,
547
+ severity: "warning",
548
+ migration: "joi->zod"
549
+ });
550
+ }
551
+ return warnings;
552
+ }
553
+ },
554
+ // Joi -> Zod: Null handling differences
555
+ {
556
+ category: "null-handling",
557
+ migrations: ["joi->zod"],
558
+ detect: (text, filePath) => {
559
+ const warnings = [];
560
+ if (/\.allow\s*\(\s*null\s*\)/.test(text)) {
561
+ warnings.push({
562
+ category: "null-handling",
563
+ message: "Joi .allow(null) vs Zod .nullable() have subtle differences.",
564
+ detail: 'Joi .allow(null) permits null alongside the base type. Zod .nullable() wraps the type in a union with null. Joi .allow("", null) has no single Zod equivalent \u2014 use z.union() or .transform().',
565
+ filePath,
566
+ severity: "info",
567
+ migration: "joi->zod"
568
+ });
569
+ }
570
+ return warnings;
571
+ }
572
+ },
573
+ // Zod v3 -> v4: Default value behavior change
574
+ {
575
+ category: "default-values",
576
+ migrations: ["zod-v3->v4"],
577
+ detect: (text, filePath) => {
578
+ const warnings = [];
579
+ if (/\.default\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
580
+ warnings.push({
581
+ category: "default-values",
582
+ message: ".default() + .optional() behavior changed silently in Zod v4.",
583
+ detail: "In Zod v3, .default(val).optional() returned undefined when property was missing. In Zod v4, it always returns the default value. This can cause unexpected behavior in API responses and form handling.",
584
+ filePath,
585
+ severity: "error",
586
+ migration: "zod-v3->v4"
587
+ });
588
+ }
589
+ if (/\.catch\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
590
+ warnings.push({
591
+ category: "default-values",
592
+ message: ".catch() + .optional() behavior changed in Zod v4.",
593
+ detail: "In Zod v4, object properties with .catch() that are .optional() now always return the caught value, even when the property is missing from input.",
594
+ filePath,
595
+ severity: "warning",
596
+ migration: "zod-v3->v4"
597
+ });
598
+ }
599
+ return warnings;
600
+ }
601
+ },
602
+ // Zod v3 -> v4: Error format differences
603
+ {
604
+ category: "error-format",
605
+ migrations: ["zod-v3->v4"],
606
+ detect: (text, filePath) => {
607
+ const warnings = [];
608
+ if (/ZodError/.test(text) && /instanceof\s+Error/.test(text)) {
609
+ warnings.push({
610
+ category: "error-format",
611
+ message: "ZodError no longer extends Error in Zod v4.",
612
+ detail: 'In Zod v4, ZodError no longer extends Error. Code using "instanceof Error" to catch ZodErrors will silently miss them. Use "instanceof ZodError" or z.isZodError() instead.',
613
+ filePath,
614
+ severity: "error",
615
+ migration: "zod-v3->v4"
616
+ });
617
+ }
618
+ return warnings;
619
+ }
620
+ },
621
+ // Zod v3 -> v4: Validation behavior differences
622
+ {
623
+ category: "validation-behavior",
624
+ migrations: ["zod-v3->v4"],
625
+ detect: (text, filePath) => {
626
+ const warnings = [];
627
+ if (/\.transform\s*\(/.test(text) && /\.refine\s*\(/.test(text)) {
628
+ warnings.push({
629
+ category: "validation-behavior",
630
+ message: ".transform() after .refine() behavior changed in Zod v4.",
631
+ detail: "In Zod v4, .transform() after .refine() may execute even if the refinement fails. Previously, transform was skipped on refinement failure.",
632
+ filePath,
633
+ severity: "warning",
634
+ migration: "zod-v3->v4"
635
+ });
636
+ }
637
+ return warnings;
638
+ }
639
+ },
640
+ // Zod -> Valibot: Error handling differences
641
+ {
642
+ category: "error-handling",
643
+ migrations: ["zod->valibot"],
644
+ detect: (text, filePath) => {
645
+ const warnings = [];
646
+ if (/\.parse\s*\(/.test(text) && /z\./.test(text)) {
647
+ warnings.push({
648
+ category: "error-handling",
649
+ message: "Zod .parse() throws ZodError; Valibot v.parse() throws ValiError.",
650
+ detail: "Error class and structure differ between Zod and Valibot. ZodError has .issues array; ValiError has .issues with different structure. Update all error handling code that inspects validation errors.",
651
+ filePath,
652
+ severity: "warning",
653
+ migration: "zod->valibot"
654
+ });
655
+ }
656
+ return warnings;
657
+ }
658
+ },
659
+ // io-ts -> Zod: Either monad vs throw/safeParse
660
+ {
661
+ category: "error-handling",
662
+ migrations: ["io-ts->zod"],
663
+ detect: (text, filePath) => {
664
+ const warnings = [];
665
+ if (/\bEither\b/.test(text) || /\b(fold|chain|map)\s*\(/.test(text)) {
666
+ warnings.push({
667
+ category: "error-handling",
668
+ message: "io-ts uses Either monad for errors; Zod uses throw/safeParse.",
669
+ detail: "io-ts returns Either<Errors, T> (Right for success, Left for failure). Zod .parse() throws, .safeParse() returns { success, data, error }. All fold/chain/map patterns over Either must be rewritten.",
670
+ filePath,
671
+ severity: "error",
672
+ migration: "io-ts->zod"
673
+ });
674
+ }
675
+ return warnings;
676
+ }
677
+ }
678
+ ];
679
+ var BehavioralWarningAnalyzer = class {
680
+ analyze(sourceFiles, from, to) {
681
+ const migration = `${from}->${to}`;
682
+ const warnings = [];
683
+ const applicableRules = BEHAVIORAL_RULES.filter((r) => r.migrations.includes(migration));
684
+ for (const sourceFile of sourceFiles) {
685
+ const filePath = sourceFile.getFilePath();
686
+ const text = sourceFile.getFullText();
687
+ const hasSourceLib = this.fileUsesLibrary(sourceFile, from);
688
+ if (!hasSourceLib) continue;
689
+ for (const rule of applicableRules) {
690
+ const ruleWarnings = rule.detect(text, filePath);
691
+ warnings.push(...ruleWarnings);
692
+ }
693
+ }
694
+ const summary = this.generateSummary(warnings, migration);
695
+ return { warnings, migrationPath: migration, summary };
696
+ }
697
+ fileUsesLibrary(sourceFile, library) {
698
+ for (const imp of sourceFile.getImportDeclarations()) {
699
+ const detected = detectSchemaLibrary(imp.getModuleSpecifierValue());
700
+ if (detected === library) return true;
701
+ if (library === "zod-v3" && detected === "zod") return true;
702
+ if (library === "zod" && detected === "zod") return true;
703
+ }
704
+ return false;
705
+ }
706
+ generateSummary(warnings, migration) {
707
+ if (warnings.length === 0) {
708
+ return `No behavioral differences detected for ${migration} migration.`;
709
+ }
710
+ const errorCount = warnings.filter((w) => w.severity === "error").length;
711
+ const warningCount = warnings.filter((w) => w.severity === "warning").length;
712
+ const infoCount = warnings.filter((w) => w.severity === "info").length;
713
+ const parts = [];
714
+ if (errorCount > 0) parts.push(`${errorCount} critical`);
715
+ if (warningCount > 0) parts.push(`${warningCount} warnings`);
716
+ if (infoCount > 0) parts.push(`${infoCount} info`);
717
+ return `Found ${warnings.length} behavioral difference(s) for ${migration}: ${parts.join(", ")}. Review before migrating.`;
718
+ }
719
+ };
720
+
721
+ // src/bundle-estimator.ts
722
+ var LIBRARY_SIZES = {
723
+ zod: { fullKb: 14, baseKb: 14, treeShakable: false },
724
+ "zod-v3": { fullKb: 14, baseKb: 14, treeShakable: false },
725
+ v4: { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
726
+ "zod-v4": { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
727
+ "zod-mini": { fullKb: 7.5, baseKb: 3.5, treeShakable: true },
728
+ yup: { fullKb: 13.6, baseKb: 13.6, treeShakable: false },
729
+ joi: { fullKb: 29.7, baseKb: 29.7, treeShakable: false },
730
+ "io-ts": { fullKb: 6.5, baseKb: 6.5, treeShakable: true },
731
+ valibot: { fullKb: 5.8, baseKb: 1.4, treeShakable: true }
732
+ };
733
+ var VALIDATOR_OVERHEAD = {
734
+ valibot: 0.05
735
+ };
736
+ var COMMON_VALIDATORS = /* @__PURE__ */ new Set([
737
+ "string",
738
+ "number",
739
+ "boolean",
740
+ "object",
741
+ "array",
742
+ "optional",
743
+ "nullable",
744
+ "enum",
745
+ "union",
746
+ "literal",
747
+ "date",
748
+ "email",
749
+ "url",
750
+ "uuid",
751
+ "min",
752
+ "max",
753
+ "regex",
754
+ "transform",
755
+ "refine",
756
+ "default",
757
+ "record",
758
+ "tuple",
759
+ "lazy",
760
+ "discriminatedUnion",
761
+ "intersection",
762
+ "partial",
763
+ "pick",
764
+ "omit",
765
+ "brand",
766
+ "pipe"
767
+ ]);
768
+ var BundleEstimator = class {
769
+ estimate(sourceFiles, from, to) {
770
+ const usedValidators = this.countUsedValidators(sourceFiles);
771
+ const fromInfo = this.getLibraryInfo(from, usedValidators);
772
+ const toInfo = this.getLibraryInfo(to, usedValidators);
773
+ const estimatedDelta = toInfo.estimatedUsedKb - fromInfo.estimatedUsedKb;
774
+ const deltaPercent = fromInfo.estimatedUsedKb > 0 ? Math.round(estimatedDelta / fromInfo.estimatedUsedKb * 100) : 0;
775
+ const caveats = this.generateCaveats(from, to, usedValidators);
776
+ const summary = this.generateSummary(fromInfo, toInfo, estimatedDelta, deltaPercent);
777
+ return {
778
+ from: fromInfo,
779
+ to: toInfo,
780
+ estimatedDelta,
781
+ deltaPercent,
782
+ summary,
783
+ caveats
784
+ };
785
+ }
786
+ countUsedValidators(sourceFiles) {
787
+ const usedSet = /* @__PURE__ */ new Set();
788
+ for (const file of sourceFiles) {
789
+ const text = file.getFullText();
790
+ for (const validator of COMMON_VALIDATORS) {
791
+ const pattern = new RegExp(`\\.${validator}\\s*[(<]`, "g");
792
+ if (pattern.test(text)) {
793
+ usedSet.add(validator);
794
+ }
795
+ }
796
+ }
797
+ return usedSet.size;
798
+ }
799
+ getLibraryInfo(library, usedValidators) {
800
+ const sizeKey = library === "zod-v3" ? "zod" : library;
801
+ const sizes = LIBRARY_SIZES[sizeKey] ?? { fullKb: 10, baseKb: 10, treeShakable: false };
802
+ let estimatedUsedKb;
803
+ if (sizes.treeShakable) {
804
+ const overhead = VALIDATOR_OVERHEAD[sizeKey] ?? 0.05;
805
+ estimatedUsedKb = Math.min(sizes.baseKb + usedValidators * overhead, sizes.fullKb);
806
+ } else {
807
+ estimatedUsedKb = sizes.fullKb;
808
+ }
809
+ return {
810
+ library: sizeKey,
811
+ minifiedGzipKb: sizes.fullKb,
812
+ treeShakable: sizes.treeShakable,
813
+ estimatedUsedKb: Math.round(estimatedUsedKb * 10) / 10
814
+ };
815
+ }
816
+ generateCaveats(from, to, _usedValidators) {
817
+ const caveats = [
818
+ "Sizes are estimates based on minified+gzipped bundle analysis.",
819
+ "Actual impact depends on bundler configuration, tree-shaking, and code splitting."
820
+ ];
821
+ if (to === "valibot") {
822
+ caveats.push(
823
+ "Valibot is fully tree-shakable \u2014 actual size depends on which validators you use."
824
+ );
825
+ caveats.push(
826
+ "Some developers report smaller-than-expected savings (6kB or less) in real projects."
827
+ );
828
+ }
829
+ if (from === "zod-v3" && to === "v4") {
830
+ caveats.push(
831
+ "Zod v4 is ~26% larger than v3 due to JIT compilation engine. Consider zod/mini for size-sensitive apps."
832
+ );
833
+ }
834
+ if (from === "joi") {
835
+ caveats.push(
836
+ "Joi is the largest schema library. Any migration will likely reduce bundle size."
837
+ );
838
+ }
839
+ return caveats;
840
+ }
841
+ generateSummary(from, to, delta, deltaPercent) {
842
+ const direction = delta > 0 ? "increase" : delta < 0 ? "decrease" : "no change";
843
+ const absDelta = Math.abs(Math.round(delta * 10) / 10);
844
+ return `Estimated bundle ${direction}: ${from.library} (${from.estimatedUsedKb}kB) \u2192 ${to.library} (${to.estimatedUsedKb}kB) = ${delta > 0 ? "+" : delta < 0 ? "-" : ""}${absDelta}kB (${deltaPercent > 0 ? "+" : ""}${deltaPercent}%)`;
845
+ }
846
+ };
847
+
318
848
  // src/chain.ts
319
849
  var import_ts_morph3 = require("ts-morph");
320
850
  var MigrationChain = class {
@@ -381,12 +911,12 @@ var MigrationChain = class {
381
911
  };
382
912
 
383
913
  // src/compatibility.ts
384
- var import_node_fs2 = require("fs");
385
- var import_node_path2 = require("path");
914
+ var import_node_fs3 = require("fs");
915
+ var import_node_path3 = require("path");
386
916
 
387
917
  // src/ecosystem.ts
388
- var import_node_fs = require("fs");
389
- var import_node_path = require("path");
918
+ var import_node_fs2 = require("fs");
919
+ var import_node_path2 = require("path");
390
920
  var ECOSYSTEM_RULES = [
391
921
  // ORM integrations
392
922
  {
@@ -539,6 +1069,177 @@ var ECOSYSTEM_RULES = [
539
1069
  severity: "warning",
540
1070
  upgradeCommand: "npm install @asteasolutions/zod-to-openapi@latest"
541
1071
  })
1072
+ },
1073
+ // AI/MCP integrations
1074
+ {
1075
+ package: "@modelcontextprotocol/sdk",
1076
+ category: "api",
1077
+ migrations: ["zod-v3->v4"],
1078
+ check: () => ({
1079
+ issue: "MCP SDK may have Zod v4 compatibility issues. MCP servers typically expect Zod v3 schemas.",
1080
+ suggestion: "Check MCP SDK release notes for Zod v4 support before upgrading. Consider staying on Zod v3 for MCP servers.",
1081
+ severity: "warning",
1082
+ upgradeCommand: "npm install @modelcontextprotocol/sdk@latest"
1083
+ })
1084
+ },
1085
+ {
1086
+ package: "@openai/agents",
1087
+ category: "api",
1088
+ migrations: ["zod-v3->v4"],
1089
+ check: () => ({
1090
+ issue: "OpenAI Agents SDK recommends pinning to zod@3.25.67 due to TS2589 errors with newer versions.",
1091
+ suggestion: "Pin zod to 3.25.67 for OpenAI Agents SDK compatibility, or wait for an SDK update with Zod v4 support.",
1092
+ severity: "error"
1093
+ })
1094
+ },
1095
+ // Zod-based HTTP/API clients
1096
+ {
1097
+ package: "zodios",
1098
+ category: "api",
1099
+ migrations: ["zod-v3->v4"],
1100
+ check: () => ({
1101
+ issue: "Zodios uses Zod schemas for API contract definitions. Zod v4 type changes may break contracts.",
1102
+ suggestion: "Upgrade Zodios to a Zod v4-compatible version and verify all API contracts.",
1103
+ severity: "warning",
1104
+ upgradeCommand: "npm install @zodios/core@latest"
1105
+ })
1106
+ },
1107
+ {
1108
+ package: "@zodios/core",
1109
+ category: "api",
1110
+ migrations: ["zod-v3->v4"],
1111
+ check: () => ({
1112
+ issue: "@zodios/core uses Zod schemas for API contract definitions. Zod v4 type changes may break contracts.",
1113
+ suggestion: "Upgrade @zodios/core to a Zod v4-compatible version and verify all API contracts.",
1114
+ severity: "warning",
1115
+ upgradeCommand: "npm install @zodios/core@latest"
1116
+ })
1117
+ },
1118
+ {
1119
+ package: "@ts-rest/core",
1120
+ category: "api",
1121
+ migrations: ["zod-v3->v4"],
1122
+ check: () => ({
1123
+ issue: "@ts-rest/core uses Zod for contract definitions. Zod v4 type incompatibilities may break runtime validation.",
1124
+ suggestion: "Upgrade @ts-rest/core to a version with Zod v4 support.",
1125
+ severity: "warning",
1126
+ upgradeCommand: "npm install @ts-rest/core@latest"
1127
+ })
1128
+ },
1129
+ {
1130
+ package: "trpc-openapi",
1131
+ category: "openapi",
1132
+ migrations: ["zod-v3->v4"],
1133
+ check: () => ({
1134
+ issue: "trpc-openapi needs a v4-compatible version for Zod v4.",
1135
+ suggestion: "Check for a Zod v4-compatible version of trpc-openapi before upgrading.",
1136
+ severity: "warning",
1137
+ upgradeCommand: "npm install trpc-openapi@latest"
1138
+ })
1139
+ },
1140
+ // Form data and URL state libraries
1141
+ {
1142
+ package: "zod-form-data",
1143
+ category: "form",
1144
+ migrations: ["zod-v3->v4"],
1145
+ check: () => ({
1146
+ issue: "zod-form-data relies on Zod v3 internals (_def) which moved to _zod.def in v4.",
1147
+ suggestion: "Upgrade zod-form-data to a Zod v4-compatible version.",
1148
+ severity: "error",
1149
+ upgradeCommand: "npm install zod-form-data@latest"
1150
+ })
1151
+ },
1152
+ {
1153
+ package: "@conform-to/zod",
1154
+ category: "form",
1155
+ migrations: ["zod-v3->v4"],
1156
+ check: () => ({
1157
+ issue: "@conform-to/zod may have Zod v4 compatibility issues.",
1158
+ suggestion: "Upgrade @conform-to/zod to the latest version with Zod v4 support.",
1159
+ severity: "warning",
1160
+ upgradeCommand: "npm install @conform-to/zod@latest"
1161
+ })
1162
+ },
1163
+ {
1164
+ package: "nuqs",
1165
+ category: "validation-util",
1166
+ migrations: ["zod-v3->v4"],
1167
+ check: () => ({
1168
+ issue: "nuqs uses Zod for URL state parsing. Zod v4 changes may affect URL parameter validation.",
1169
+ suggestion: "Upgrade nuqs to a version with Zod v4 support.",
1170
+ severity: "warning",
1171
+ upgradeCommand: "npm install nuqs@latest"
1172
+ })
1173
+ },
1174
+ // Schema library detection for cross-library migrations
1175
+ {
1176
+ package: "@effect/schema",
1177
+ category: "validation-util",
1178
+ migrations: ["io-ts->zod"],
1179
+ check: () => ({
1180
+ issue: "@effect/schema detected \u2014 this is the successor to io-ts/fp-ts. Consider migrating to Effect Schema instead of Zod if you prefer FP patterns.",
1181
+ suggestion: "If using fp-ts patterns heavily, consider Effect Schema as the migration target instead of Zod.",
1182
+ severity: "info"
1183
+ })
1184
+ },
1185
+ {
1186
+ package: "arktype",
1187
+ category: "validation-util",
1188
+ migrations: ["zod->valibot", "zod-v3->v4"],
1189
+ check: (_version, migration) => {
1190
+ if (migration === "zod->valibot") {
1191
+ return {
1192
+ issue: "ArkType detected alongside Zod. Consider ArkType as a migration target \u2014 it offers 100x faster validation and Standard Schema support.",
1193
+ suggestion: "Consider migrating to ArkType for performance-critical paths, or keep Zod for ecosystem compatibility.",
1194
+ severity: "info"
1195
+ };
1196
+ }
1197
+ return {
1198
+ issue: "ArkType detected alongside Zod. ArkType supports Standard Schema, making it interoperable with Zod v4.",
1199
+ suggestion: "No action needed \u2014 ArkType and Zod v4 can coexist via Standard Schema.",
1200
+ severity: "info"
1201
+ };
1202
+ }
1203
+ },
1204
+ {
1205
+ package: "superstruct",
1206
+ category: "validation-util",
1207
+ migrations: ["yup->zod", "joi->zod"],
1208
+ check: () => ({
1209
+ issue: "Superstruct detected in the project. Consider migrating Superstruct schemas to Zod as well for a unified validation approach.",
1210
+ suggestion: "Use SchemaShift to migrate Superstruct schemas alongside Yup/Joi schemas.",
1211
+ severity: "info"
1212
+ })
1213
+ },
1214
+ // Additional validation utilities
1215
+ {
1216
+ package: "zod-to-json-schema",
1217
+ category: "validation-util",
1218
+ migrations: ["zod-v3->v4"],
1219
+ check: (version) => {
1220
+ const majorMatch = version.match(/(\d+)/);
1221
+ const major = majorMatch?.[1] ? Number.parseInt(majorMatch[1], 10) : 0;
1222
+ if (major < 4) {
1223
+ return {
1224
+ issue: "zod-to-json-schema v3 may not fully support Zod v4 schemas.",
1225
+ suggestion: "Upgrade to zod-to-json-schema v4+ for full Zod v4 support.",
1226
+ severity: "warning",
1227
+ upgradeCommand: "npm install zod-to-json-schema@latest"
1228
+ };
1229
+ }
1230
+ return null;
1231
+ }
1232
+ },
1233
+ {
1234
+ package: "react-hook-form",
1235
+ category: "form",
1236
+ migrations: ["zod-v3->v4"],
1237
+ check: () => ({
1238
+ issue: "React Hook Form with zodResolver may throw uncaught ZodError instead of populating formState.errors with Zod v4.",
1239
+ suggestion: "Upgrade @hookform/resolvers to the latest version and test form validation thoroughly.",
1240
+ severity: "warning",
1241
+ upgradeCommand: "npm install @hookform/resolvers@latest react-hook-form@latest"
1242
+ })
542
1243
  }
543
1244
  ];
544
1245
  var EcosystemAnalyzer = class {
@@ -547,13 +1248,13 @@ var EcosystemAnalyzer = class {
547
1248
  const dependencies = [];
548
1249
  const warnings = [];
549
1250
  const blockers = [];
550
- const pkgPath = (0, import_node_path.join)(projectPath, "package.json");
551
- if (!(0, import_node_fs.existsSync)(pkgPath)) {
1251
+ const pkgPath = (0, import_node_path2.join)(projectPath, "package.json");
1252
+ if (!(0, import_node_fs2.existsSync)(pkgPath)) {
552
1253
  return { dependencies, warnings, blockers };
553
1254
  }
554
1255
  let allDeps = {};
555
1256
  try {
556
- const pkg = JSON.parse((0, import_node_fs.readFileSync)(pkgPath, "utf-8"));
1257
+ const pkg = JSON.parse((0, import_node_fs2.readFileSync)(pkgPath, "utf-8"));
557
1258
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
558
1259
  } catch {
559
1260
  return { dependencies, warnings, blockers };
@@ -583,6 +1284,20 @@ var EcosystemAnalyzer = class {
583
1284
  }
584
1285
  return { dependencies, warnings, blockers };
585
1286
  }
1287
+ /**
1288
+ * Returns a list of npm install commands needed to resolve ecosystem issues.
1289
+ */
1290
+ getUpgradeCommands(report) {
1291
+ const commands = [];
1292
+ const seen = /* @__PURE__ */ new Set();
1293
+ for (const dep of report.dependencies) {
1294
+ if (dep.upgradeCommand && !seen.has(dep.upgradeCommand)) {
1295
+ seen.add(dep.upgradeCommand);
1296
+ commands.push(dep.upgradeCommand);
1297
+ }
1298
+ }
1299
+ return commands;
1300
+ }
586
1301
  };
587
1302
 
588
1303
  // src/compatibility.ts
@@ -660,10 +1375,10 @@ var CompatibilityAnalyzer = class {
660
1375
  ecosystemAnalyzer = new EcosystemAnalyzer();
661
1376
  detectVersions(projectPath) {
662
1377
  const versions = [];
663
- const pkgPath = (0, import_node_path2.join)(projectPath, "package.json");
664
- if (!(0, import_node_fs2.existsSync)(pkgPath)) return versions;
1378
+ const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
1379
+ if (!(0, import_node_fs3.existsSync)(pkgPath)) return versions;
665
1380
  try {
666
- const pkg = JSON.parse((0, import_node_fs2.readFileSync)(pkgPath, "utf-8"));
1381
+ const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
667
1382
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
668
1383
  const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
669
1384
  for (const lib of knownLibs) {
@@ -885,8 +1600,8 @@ async function loadConfig(configPath) {
885
1600
  }
886
1601
 
887
1602
  // src/dependency-graph.ts
888
- var import_node_fs3 = require("fs");
889
- var import_node_path3 = require("path");
1603
+ var import_node_fs4 = require("fs");
1604
+ var import_node_path4 = require("path");
890
1605
  var SchemaDependencyResolver = class {
891
1606
  resolve(project, filePaths) {
892
1607
  const fileSet = new Set(filePaths);
@@ -973,39 +1688,96 @@ var SchemaDependencyResolver = class {
973
1688
  }
974
1689
  };
975
1690
  var SCHEMA_PACKAGES = /* @__PURE__ */ new Set(["zod", "yup", "joi", "io-ts", "valibot", "@effect/schema"]);
1691
+ function computeParallelBatches(packages, suggestedOrder) {
1692
+ const nameSet = new Set(packages.map((p) => p.name));
1693
+ const depMap = /* @__PURE__ */ new Map();
1694
+ for (const pkg of packages) {
1695
+ depMap.set(pkg.name, new Set(pkg.dependencies.filter((d) => nameSet.has(d))));
1696
+ }
1697
+ const depths = /* @__PURE__ */ new Map();
1698
+ const getDepth = (name, visited) => {
1699
+ const cached = depths.get(name);
1700
+ if (cached !== void 0) return cached;
1701
+ if (visited.has(name)) return 0;
1702
+ visited.add(name);
1703
+ const deps = depMap.get(name) ?? /* @__PURE__ */ new Set();
1704
+ let maxDepth = 0;
1705
+ for (const dep of deps) {
1706
+ maxDepth = Math.max(maxDepth, getDepth(dep, visited) + 1);
1707
+ }
1708
+ depths.set(name, maxDepth);
1709
+ return maxDepth;
1710
+ };
1711
+ for (const name of suggestedOrder) {
1712
+ getDepth(name, /* @__PURE__ */ new Set());
1713
+ }
1714
+ const batchMap = /* @__PURE__ */ new Map();
1715
+ for (const name of suggestedOrder) {
1716
+ const depth = depths.get(name) ?? 0;
1717
+ const batch = batchMap.get(depth) ?? [];
1718
+ batch.push(name);
1719
+ batchMap.set(depth, batch);
1720
+ }
1721
+ const batches = [];
1722
+ const sortedDepths = [...batchMap.keys()].sort((a, b) => a - b);
1723
+ for (const depth of sortedDepths) {
1724
+ const pkgs = batchMap.get(depth);
1725
+ if (pkgs) batches.push({ index: batches.length, packages: pkgs });
1726
+ }
1727
+ return batches;
1728
+ }
976
1729
  var MonorepoResolver = class {
977
1730
  detect(projectPath) {
978
- const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
979
- if (!(0, import_node_fs3.existsSync)(pkgPath)) return false;
980
- try {
981
- const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
982
- return !!pkg.workspaces;
983
- } catch {
984
- return false;
1731
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1732
+ if ((0, import_node_fs4.existsSync)(pkgPath)) {
1733
+ try {
1734
+ const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1735
+ if (pkg.workspaces) return true;
1736
+ } catch {
1737
+ }
1738
+ }
1739
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml"))) return true;
1740
+ return false;
1741
+ }
1742
+ /**
1743
+ * Detect which workspace manager is being used.
1744
+ */
1745
+ detectManager(projectPath) {
1746
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
1747
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1748
+ if ((0, import_node_fs4.existsSync)(pkgPath)) {
1749
+ try {
1750
+ const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1751
+ if (pkg.packageManager?.startsWith("yarn")) return "yarn";
1752
+ if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
1753
+ } catch {
1754
+ }
985
1755
  }
1756
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-lock.yaml"))) return "pnpm";
1757
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "yarn.lock"))) return "yarn";
1758
+ return "npm";
986
1759
  }
987
1760
  analyze(projectPath) {
988
- const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
989
- if (!(0, import_node_fs3.existsSync)(pkgPath)) {
1761
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1762
+ if (!(0, import_node_fs4.existsSync)(pkgPath)) {
990
1763
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
991
1764
  }
992
1765
  let workspaceGlobs;
993
1766
  try {
994
- const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
995
- if (!pkg.workspaces) {
1767
+ workspaceGlobs = this.resolveWorkspaceGlobs(projectPath);
1768
+ if (workspaceGlobs.length === 0) {
996
1769
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
997
1770
  }
998
- workspaceGlobs = Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
999
1771
  } catch {
1000
1772
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
1001
1773
  }
1002
1774
  const packages = [];
1003
1775
  const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
1004
1776
  for (const dir of resolvedDirs) {
1005
- const wsPkgPath = (0, import_node_path3.join)(dir, "package.json");
1006
- if (!(0, import_node_fs3.existsSync)(wsPkgPath)) continue;
1777
+ const wsPkgPath = (0, import_node_path4.join)(dir, "package.json");
1778
+ if (!(0, import_node_fs4.existsSync)(wsPkgPath)) continue;
1007
1779
  try {
1008
- const wsPkg = JSON.parse((0, import_node_fs3.readFileSync)(wsPkgPath, "utf-8"));
1780
+ const wsPkg = JSON.parse((0, import_node_fs4.readFileSync)(wsPkgPath, "utf-8"));
1009
1781
  if (!wsPkg.name) continue;
1010
1782
  const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
1011
1783
  const depNames = Object.keys(allDeps);
@@ -1044,18 +1816,70 @@ var MonorepoResolver = class {
1044
1816
  }
1045
1817
  return sorted;
1046
1818
  }
1819
+ /**
1820
+ * Resolve workspace glob patterns from any supported format.
1821
+ * Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
1822
+ */
1823
+ resolveWorkspaceGlobs(projectPath) {
1824
+ const pnpmPath = (0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml");
1825
+ if ((0, import_node_fs4.existsSync)(pnpmPath)) {
1826
+ return this.parsePnpmWorkspace(pnpmPath);
1827
+ }
1828
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1829
+ if ((0, import_node_fs4.existsSync)(pkgPath)) {
1830
+ try {
1831
+ const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1832
+ if (pkg.workspaces) {
1833
+ return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
1834
+ }
1835
+ } catch {
1836
+ }
1837
+ }
1838
+ return [];
1839
+ }
1840
+ /**
1841
+ * Parse pnpm-workspace.yaml to extract workspace package globs.
1842
+ * Simple YAML parsing for the common format:
1843
+ * ```
1844
+ * packages:
1845
+ * - 'packages/*'
1846
+ * - 'apps/*'
1847
+ * ```
1848
+ */
1849
+ parsePnpmWorkspace(filePath) {
1850
+ const content = (0, import_node_fs4.readFileSync)(filePath, "utf-8");
1851
+ const globs = [];
1852
+ let inPackages = false;
1853
+ for (const line of content.split("\n")) {
1854
+ const trimmed = line.trim();
1855
+ if (trimmed === "packages:") {
1856
+ inPackages = true;
1857
+ continue;
1858
+ }
1859
+ if (inPackages && /^\w/.test(trimmed) && !trimmed.startsWith("-")) {
1860
+ break;
1861
+ }
1862
+ if (inPackages && trimmed.startsWith("-")) {
1863
+ const pattern = trimmed.replace(/^-\s*/, "").replace(/^['"]|['"]$/g, "");
1864
+ if (pattern) {
1865
+ globs.push(pattern);
1866
+ }
1867
+ }
1868
+ }
1869
+ return globs;
1870
+ }
1047
1871
  resolveWorkspaceDirs(projectPath, globs) {
1048
1872
  const dirs = [];
1049
1873
  for (const glob of globs) {
1050
1874
  const clean = glob.replace(/\/?\*$/, "");
1051
- const base = (0, import_node_path3.resolve)(projectPath, clean);
1052
- if (!(0, import_node_fs3.existsSync)(base)) continue;
1875
+ const base = (0, import_node_path4.resolve)(projectPath, clean);
1876
+ if (!(0, import_node_fs4.existsSync)(base)) continue;
1053
1877
  if (glob.endsWith("*")) {
1054
1878
  try {
1055
- const entries = (0, import_node_fs3.readdirSync)(base, { withFileTypes: true });
1879
+ const entries = (0, import_node_fs4.readdirSync)(base, { withFileTypes: true });
1056
1880
  for (const entry of entries) {
1057
1881
  if (entry.isDirectory()) {
1058
- dirs.push((0, import_node_path3.join)(base, entry.name));
1882
+ dirs.push((0, import_node_path4.join)(base, entry.name));
1059
1883
  }
1060
1884
  }
1061
1885
  } catch {
@@ -1069,8 +1893,8 @@ var MonorepoResolver = class {
1069
1893
  };
1070
1894
 
1071
1895
  // src/detailed-analyzer.ts
1072
- var import_node_fs4 = require("fs");
1073
- var import_node_path4 = require("path");
1896
+ var import_node_fs5 = require("fs");
1897
+ var import_node_path5 = require("path");
1074
1898
  var COMPLEXITY_CHAIN_WEIGHT = 2;
1075
1899
  var COMPLEXITY_DEPTH_WEIGHT = 3;
1076
1900
  var COMPLEXITY_VALIDATION_WEIGHT = 1;
@@ -1135,10 +1959,10 @@ var DetailedAnalyzer = class {
1135
1959
  }
1136
1960
  detectLibraryVersions(projectPath) {
1137
1961
  const versions = [];
1138
- const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1139
- if (!(0, import_node_fs4.existsSync)(pkgPath)) return versions;
1962
+ const pkgPath = (0, import_node_path5.join)(projectPath, "package.json");
1963
+ if (!(0, import_node_fs5.existsSync)(pkgPath)) return versions;
1140
1964
  try {
1141
- const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1965
+ const pkg = JSON.parse((0, import_node_fs5.readFileSync)(pkgPath, "utf-8"));
1142
1966
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1143
1967
  const allDeps = {
1144
1968
  ...pkg.dependencies,
@@ -1311,6 +2135,165 @@ var DetailedAnalyzer = class {
1311
2135
  }
1312
2136
  };
1313
2137
 
2138
+ // src/drift-detector.ts
2139
+ var import_node_crypto2 = require("crypto");
2140
+ var import_node_fs6 = require("fs");
2141
+ var import_node_path6 = require("path");
2142
+ var SNAPSHOT_DIR = ".schemashift";
2143
+ var SNAPSHOT_FILE = "schema-snapshot.json";
2144
+ var SNAPSHOT_VERSION = 1;
2145
+ var DriftDetector = class {
2146
+ snapshotDir;
2147
+ snapshotPath;
2148
+ constructor(projectPath) {
2149
+ this.snapshotDir = (0, import_node_path6.join)(projectPath, SNAPSHOT_DIR);
2150
+ this.snapshotPath = (0, import_node_path6.join)(this.snapshotDir, SNAPSHOT_FILE);
2151
+ }
2152
+ /**
2153
+ * Take a snapshot of the current schema state
2154
+ */
2155
+ snapshot(files, projectPath) {
2156
+ const schemas = [];
2157
+ for (const filePath of files) {
2158
+ if (!(0, import_node_fs6.existsSync)(filePath)) continue;
2159
+ const content = (0, import_node_fs6.readFileSync)(filePath, "utf-8");
2160
+ const library = this.detectLibraryFromContent(content);
2161
+ if (library === "unknown") continue;
2162
+ const schemaNames = this.extractSchemaNames(content);
2163
+ schemas.push({
2164
+ filePath: (0, import_node_path6.relative)(projectPath, filePath),
2165
+ library,
2166
+ contentHash: this.hashContent(content),
2167
+ schemaCount: schemaNames.length,
2168
+ schemaNames
2169
+ });
2170
+ }
2171
+ const snapshot = {
2172
+ version: SNAPSHOT_VERSION,
2173
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
2174
+ projectPath,
2175
+ schemas
2176
+ };
2177
+ return snapshot;
2178
+ }
2179
+ /**
2180
+ * Save a snapshot to disk
2181
+ */
2182
+ saveSnapshot(snapshot) {
2183
+ if (!(0, import_node_fs6.existsSync)(this.snapshotDir)) {
2184
+ (0, import_node_fs6.mkdirSync)(this.snapshotDir, { recursive: true });
2185
+ }
2186
+ (0, import_node_fs6.writeFileSync)(this.snapshotPath, JSON.stringify(snapshot, null, 2));
2187
+ }
2188
+ /**
2189
+ * Load saved snapshot from disk
2190
+ */
2191
+ loadSnapshot() {
2192
+ if (!(0, import_node_fs6.existsSync)(this.snapshotPath)) {
2193
+ return null;
2194
+ }
2195
+ try {
2196
+ const content = (0, import_node_fs6.readFileSync)(this.snapshotPath, "utf-8");
2197
+ return JSON.parse(content);
2198
+ } catch {
2199
+ return null;
2200
+ }
2201
+ }
2202
+ /**
2203
+ * Compare current state against saved snapshot
2204
+ */
2205
+ detect(currentFiles, projectPath) {
2206
+ const saved = this.loadSnapshot();
2207
+ if (!saved) {
2208
+ return {
2209
+ hasDrift: false,
2210
+ added: [],
2211
+ removed: [],
2212
+ modified: [],
2213
+ unchanged: 0,
2214
+ totalFiles: 0,
2215
+ snapshotTimestamp: ""
2216
+ };
2217
+ }
2218
+ const current = this.snapshot(currentFiles, projectPath);
2219
+ return this.compareSnapshots(saved, current);
2220
+ }
2221
+ /**
2222
+ * Compare two snapshots and return drift results
2223
+ */
2224
+ compareSnapshots(baseline, current) {
2225
+ const baselineMap = new Map(baseline.schemas.map((s) => [s.filePath, s]));
2226
+ const currentMap = new Map(current.schemas.map((s) => [s.filePath, s]));
2227
+ const added = [];
2228
+ const removed = [];
2229
+ const modified = [];
2230
+ let unchanged = 0;
2231
+ for (const [path, currentFile] of currentMap) {
2232
+ const baselineFile = baselineMap.get(path);
2233
+ if (!baselineFile) {
2234
+ added.push(currentFile);
2235
+ } else if (currentFile.contentHash !== baselineFile.contentHash) {
2236
+ const addedSchemas = currentFile.schemaNames.filter(
2237
+ (n) => !baselineFile.schemaNames.includes(n)
2238
+ );
2239
+ const removedSchemas = baselineFile.schemaNames.filter(
2240
+ (n) => !currentFile.schemaNames.includes(n)
2241
+ );
2242
+ modified.push({
2243
+ filePath: path,
2244
+ library: currentFile.library,
2245
+ previousHash: baselineFile.contentHash,
2246
+ currentHash: currentFile.contentHash,
2247
+ previousSchemaCount: baselineFile.schemaCount,
2248
+ currentSchemaCount: currentFile.schemaCount,
2249
+ addedSchemas,
2250
+ removedSchemas
2251
+ });
2252
+ } else {
2253
+ unchanged++;
2254
+ }
2255
+ }
2256
+ for (const [path, baselineFile] of baselineMap) {
2257
+ if (!currentMap.has(path)) {
2258
+ removed.push(baselineFile);
2259
+ }
2260
+ }
2261
+ return {
2262
+ hasDrift: added.length > 0 || removed.length > 0 || modified.length > 0,
2263
+ added,
2264
+ removed,
2265
+ modified,
2266
+ unchanged,
2267
+ totalFiles: currentMap.size,
2268
+ snapshotTimestamp: baseline.timestamp
2269
+ };
2270
+ }
2271
+ extractSchemaNames(content) {
2272
+ const names = [];
2273
+ const pattern = /(?:const|let|var)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|t\.|v\.|type\(|object\(|string\(|S\.)/g;
2274
+ for (const match of content.matchAll(pattern)) {
2275
+ if (match[1]) names.push(match[1]);
2276
+ }
2277
+ return names;
2278
+ }
2279
+ detectLibraryFromContent(content) {
2280
+ if (/from\s*['"]zod['"]/.test(content) || /\bz\./.test(content)) return "zod";
2281
+ if (/from\s*['"]yup['"]/.test(content) || /\byup\./.test(content)) return "yup";
2282
+ if (/from\s*['"]joi['"]/.test(content) || /\bJoi\./.test(content)) return "joi";
2283
+ if (/from\s*['"]io-ts['"]/.test(content) || /\bt\./.test(content) && /from\s*['"]io-ts/.test(content))
2284
+ return "io-ts";
2285
+ if (/from\s*['"]valibot['"]/.test(content) || /\bv\./.test(content) && /from\s*['"]valibot/.test(content))
2286
+ return "valibot";
2287
+ if (/from\s*['"]arktype['"]/.test(content)) return "arktype";
2288
+ if (/from\s*['"]superstruct['"]/.test(content)) return "superstruct";
2289
+ if (/from\s*['"]@effect\/schema['"]/.test(content)) return "effect";
2290
+ return "unknown";
2291
+ }
2292
+ hashContent(content) {
2293
+ return (0, import_node_crypto2.createHash)("sha256").update(content).digest("hex").substring(0, 16);
2294
+ }
2295
+ };
2296
+
1314
2297
  // src/form-resolver-migrator.ts
1315
2298
  var RESOLVER_MAPPINGS = {
1316
2299
  "yup->zod": [
@@ -1398,6 +2381,7 @@ var FormResolverMigrator = class {
1398
2381
  // src/governance.ts
1399
2382
  var GovernanceEngine = class {
1400
2383
  rules = /* @__PURE__ */ new Map();
2384
+ customRuleFunctions = /* @__PURE__ */ new Map();
1401
2385
  configure(rules) {
1402
2386
  this.rules.clear();
1403
2387
  for (const [name, config] of Object.entries(rules)) {
@@ -1406,6 +2390,13 @@ var GovernanceEngine = class {
1406
2390
  }
1407
2391
  }
1408
2392
  }
2393
+ /**
2394
+ * Register a custom governance rule function.
2395
+ * Custom rules are executed per-file alongside built-in rules.
2396
+ */
2397
+ registerRule(name, fn) {
2398
+ this.customRuleFunctions.set(name, fn);
2399
+ }
1409
2400
  analyze(project) {
1410
2401
  const violations = [];
1411
2402
  let schemasChecked = 0;
@@ -1481,6 +2472,104 @@ var GovernanceEngine = class {
1481
2472
  });
1482
2473
  }
1483
2474
  }
2475
+ if (this.rules.has("require-safeParse")) {
2476
+ if (text.includes(".parse(") && !text.includes(".safeParse(")) {
2477
+ violations.push({
2478
+ rule: "require-safeParse",
2479
+ message: `Schema "${schemaName}" uses .parse() \u2014 prefer .safeParse() for safer error handling`,
2480
+ filePath,
2481
+ lineNumber,
2482
+ schemaName,
2483
+ severity: "warning",
2484
+ fixable: true
2485
+ });
2486
+ }
2487
+ }
2488
+ if (this.rules.has("require-description")) {
2489
+ if (!text.includes(".describe(")) {
2490
+ violations.push({
2491
+ rule: "require-description",
2492
+ message: `Schema "${schemaName}" missing .describe() \u2014 add a description for documentation`,
2493
+ filePath,
2494
+ lineNumber,
2495
+ schemaName,
2496
+ severity: "warning",
2497
+ fixable: true
2498
+ });
2499
+ }
2500
+ }
2501
+ if (this.rules.has("no-coerce-in-api")) {
2502
+ if (/\.coerce\./.test(text)) {
2503
+ violations.push({
2504
+ rule: "no-coerce-in-api",
2505
+ message: `Schema "${schemaName}" uses z.coerce.* \u2014 coercion in API validation is a security risk`,
2506
+ filePath,
2507
+ lineNumber,
2508
+ schemaName,
2509
+ severity: "error",
2510
+ fixable: false
2511
+ });
2512
+ }
2513
+ }
2514
+ if (this.rules.has("require-max-length")) {
2515
+ if (text.includes(".string()") && !text.includes(".max(") && !text.includes(".length(")) {
2516
+ violations.push({
2517
+ rule: "require-max-length",
2518
+ message: `Schema "${schemaName}" has string without max length \u2014 required for DoS prevention`,
2519
+ filePath,
2520
+ lineNumber,
2521
+ schemaName,
2522
+ severity: "error",
2523
+ fixable: true
2524
+ });
2525
+ }
2526
+ }
2527
+ if (this.rules.has("max-nesting-depth")) {
2528
+ const config = this.rules.get("max-nesting-depth") ?? {};
2529
+ const maxDepth = config.threshold ?? 5;
2530
+ const depth = this.measureNestingDepth(text);
2531
+ if (depth > maxDepth) {
2532
+ violations.push({
2533
+ rule: "max-nesting-depth",
2534
+ message: `Schema "${schemaName}" nesting depth (${depth}) exceeds limit (${maxDepth})`,
2535
+ filePath,
2536
+ lineNumber,
2537
+ schemaName,
2538
+ severity: "warning",
2539
+ fixable: false
2540
+ });
2541
+ }
2542
+ }
2543
+ }
2544
+ }
2545
+ for (const sourceFile of project.getSourceFiles()) {
2546
+ const library = this.detectFileLibrary(sourceFile);
2547
+ if (library === "unknown") continue;
2548
+ const filePath = sourceFile.getFilePath();
2549
+ const text = sourceFile.getFullText();
2550
+ if (this.rules.has("no-dynamic-schemas")) {
2551
+ const dynamicPatterns = this.detectDynamicSchemas(text, library);
2552
+ for (const lineNumber of dynamicPatterns) {
2553
+ violations.push({
2554
+ rule: "no-dynamic-schemas",
2555
+ message: "Schema created inside function body \u2014 move to module level for performance",
2556
+ filePath,
2557
+ lineNumber,
2558
+ schemaName: "(dynamic)",
2559
+ severity: "warning",
2560
+ fixable: false
2561
+ });
2562
+ }
2563
+ }
2564
+ }
2565
+ for (const [ruleName, ruleFn] of this.customRuleFunctions) {
2566
+ const config = this.rules.get(ruleName);
2567
+ if (!config) continue;
2568
+ for (const sourceFile of project.getSourceFiles()) {
2569
+ const library = this.detectFileLibrary(sourceFile);
2570
+ if (library === "unknown") continue;
2571
+ const ruleViolations = ruleFn(sourceFile, config);
2572
+ violations.push(...ruleViolations);
1484
2573
  }
1485
2574
  }
1486
2575
  return {
@@ -1497,6 +2586,57 @@ var GovernanceEngine = class {
1497
2586
  }
1498
2587
  return "unknown";
1499
2588
  }
2589
+ measureNestingDepth(text) {
2590
+ let maxDepth = 0;
2591
+ let current = 0;
2592
+ for (const char of text) {
2593
+ if (char === "(") {
2594
+ current++;
2595
+ if (current > maxDepth) maxDepth = current;
2596
+ } else if (char === ")") {
2597
+ current--;
2598
+ }
2599
+ }
2600
+ return maxDepth;
2601
+ }
2602
+ detectDynamicSchemas(text, library) {
2603
+ const lineNumbers = [];
2604
+ const prefix = this.getSchemaPrefix(library);
2605
+ if (!prefix) return lineNumbers;
2606
+ const lines = text.split("\n");
2607
+ let insideFunction = 0;
2608
+ for (let i = 0; i < lines.length; i++) {
2609
+ const line = lines[i] ?? "";
2610
+ const opens = (line.match(/\{/g) || []).length;
2611
+ const closes = (line.match(/\}/g) || []).length;
2612
+ if (/(?:function\s+\w+|=>)\s*\{/.test(line)) {
2613
+ insideFunction += opens;
2614
+ insideFunction -= closes;
2615
+ continue;
2616
+ }
2617
+ insideFunction += opens - closes;
2618
+ if (insideFunction > 0 && line.includes(prefix)) {
2619
+ lineNumbers.push(i + 1);
2620
+ }
2621
+ }
2622
+ return lineNumbers;
2623
+ }
2624
+ getSchemaPrefix(library) {
2625
+ switch (library) {
2626
+ case "zod":
2627
+ return "z.";
2628
+ case "yup":
2629
+ return "yup.";
2630
+ case "joi":
2631
+ return "Joi.";
2632
+ case "io-ts":
2633
+ return "t.";
2634
+ case "valibot":
2635
+ return "v.";
2636
+ default:
2637
+ return null;
2638
+ }
2639
+ }
1500
2640
  isSchemaExpression(text, library) {
1501
2641
  switch (library) {
1502
2642
  case "zod":
@@ -1515,17 +2655,265 @@ var GovernanceEngine = class {
1515
2655
  }
1516
2656
  };
1517
2657
 
2658
+ // src/governance-templates.ts
2659
+ var GOVERNANCE_TEMPLATES = [
2660
+ {
2661
+ name: "no-any-schemas",
2662
+ description: "Disallow z.any(), yup.mixed() without constraints, and similar unrestricted types",
2663
+ category: "security",
2664
+ rule: (sourceFile, _config) => {
2665
+ const violations = [];
2666
+ const text = sourceFile.getFullText();
2667
+ const filePath = sourceFile.getFilePath();
2668
+ const lines = text.split("\n");
2669
+ const anyPatterns = [
2670
+ /\bz\.any\(\)/,
2671
+ /\byup\.mixed\(\)/,
2672
+ /\bt\.any\b/,
2673
+ /\bv\.any\(\)/,
2674
+ /\bunknown\(\)/
2675
+ ];
2676
+ for (let i = 0; i < lines.length; i++) {
2677
+ const line = lines[i] ?? "";
2678
+ for (const pattern of anyPatterns) {
2679
+ if (pattern.test(line)) {
2680
+ violations.push({
2681
+ rule: "no-any-schemas",
2682
+ message: "Unrestricted type (any/mixed/unknown) found. Use a specific type with constraints.",
2683
+ filePath,
2684
+ lineNumber: i + 1,
2685
+ schemaName: "",
2686
+ severity: "error",
2687
+ fixable: false
2688
+ });
2689
+ }
2690
+ }
2691
+ }
2692
+ return violations;
2693
+ }
2694
+ },
2695
+ {
2696
+ name: "require-descriptions",
2697
+ description: "All exported schemas must have .describe() for documentation",
2698
+ category: "quality",
2699
+ rule: (sourceFile, _config) => {
2700
+ const violations = [];
2701
+ const text = sourceFile.getFullText();
2702
+ const filePath = sourceFile.getFilePath();
2703
+ const lines = text.split("\n");
2704
+ for (let i = 0; i < lines.length; i++) {
2705
+ const line = lines[i] ?? "";
2706
+ if (/export\s+(const|let)\s+\w+.*=\s*(z\.|yup\.)/.test(line)) {
2707
+ let fullStatement = line;
2708
+ let j = i + 1;
2709
+ while (j < lines.length && !lines[j]?.includes(";") && j < i + 10) {
2710
+ fullStatement += lines[j] ?? "";
2711
+ j++;
2712
+ }
2713
+ if (j < lines.length) fullStatement += lines[j] ?? "";
2714
+ if (!fullStatement.includes(".describe(")) {
2715
+ const nameMatch = line.match(/(?:const|let)\s+(\w+)/);
2716
+ violations.push({
2717
+ rule: "require-descriptions",
2718
+ message: `Exported schema ${nameMatch?.[1] || "unknown"} should include .describe() for documentation.`,
2719
+ filePath,
2720
+ lineNumber: i + 1,
2721
+ schemaName: nameMatch?.[1] || "",
2722
+ severity: "warning",
2723
+ fixable: true
2724
+ });
2725
+ }
2726
+ }
2727
+ }
2728
+ return violations;
2729
+ }
2730
+ },
2731
+ {
2732
+ name: "max-nesting-depth",
2733
+ description: "Limit schema nesting depth to prevent TypeScript performance issues",
2734
+ category: "performance",
2735
+ rule: (sourceFile, config) => {
2736
+ const violations = [];
2737
+ const text = sourceFile.getFullText();
2738
+ const filePath = sourceFile.getFilePath();
2739
+ const maxDepth = config.threshold || 5;
2740
+ const lines = text.split("\n");
2741
+ let currentDepth = 0;
2742
+ let maxFoundDepth = 0;
2743
+ let deepestLine = 0;
2744
+ for (let i = 0; i < lines.length; i++) {
2745
+ const line = lines[i] ?? "";
2746
+ for (const char of line) {
2747
+ if (char === "(" || char === "{" || char === "[") {
2748
+ currentDepth++;
2749
+ if (currentDepth > maxFoundDepth) {
2750
+ maxFoundDepth = currentDepth;
2751
+ deepestLine = i + 1;
2752
+ }
2753
+ }
2754
+ if (char === ")" || char === "}" || char === "]") {
2755
+ currentDepth = Math.max(0, currentDepth - 1);
2756
+ }
2757
+ }
2758
+ }
2759
+ if (maxFoundDepth > maxDepth) {
2760
+ violations.push({
2761
+ rule: "max-nesting-depth",
2762
+ message: `Schema nesting depth ${maxFoundDepth} exceeds maximum of ${maxDepth}. Consider breaking into smaller schemas.`,
2763
+ filePath,
2764
+ lineNumber: deepestLine,
2765
+ schemaName: "",
2766
+ severity: "warning",
2767
+ fixable: false
2768
+ });
2769
+ }
2770
+ return violations;
2771
+ }
2772
+ },
2773
+ {
2774
+ name: "no-deprecated-methods",
2775
+ description: "Flag usage of deprecated schema methods",
2776
+ category: "quality",
2777
+ rule: (sourceFile, _config) => {
2778
+ const violations = [];
2779
+ const text = sourceFile.getFullText();
2780
+ const filePath = sourceFile.getFilePath();
2781
+ const lines = text.split("\n");
2782
+ const deprecatedPatterns = [
2783
+ {
2784
+ pattern: /\.deepPartial\(\)/,
2785
+ message: ".deepPartial() is removed in Zod v4. Use recursive .partial() instead."
2786
+ },
2787
+ {
2788
+ pattern: /\.strip\(\)/,
2789
+ message: ".strip() is deprecated. Use z.strictObject() or explicit stripping."
2790
+ },
2791
+ {
2792
+ pattern: /z\.promise\(/,
2793
+ message: "z.promise() is deprecated in Zod v4. Use native Promise types."
2794
+ },
2795
+ {
2796
+ pattern: /z\.ostring\(\)/,
2797
+ message: "z.ostring() is removed in Zod v4. Use z.string().optional()."
2798
+ },
2799
+ {
2800
+ pattern: /z\.onumber\(\)/,
2801
+ message: "z.onumber() is removed in Zod v4. Use z.number().optional()."
2802
+ },
2803
+ {
2804
+ pattern: /z\.oboolean\(\)/,
2805
+ message: "z.oboolean() is removed in Zod v4. Use z.boolean().optional()."
2806
+ },
2807
+ {
2808
+ pattern: /z\.preprocess\(/,
2809
+ message: "z.preprocess() is removed in Zod v4. Use z.coerce.* instead."
2810
+ }
2811
+ ];
2812
+ for (let i = 0; i < lines.length; i++) {
2813
+ const line = lines[i] ?? "";
2814
+ for (const { pattern, message } of deprecatedPatterns) {
2815
+ if (pattern.test(line)) {
2816
+ violations.push({
2817
+ rule: "no-deprecated-methods",
2818
+ message,
2819
+ filePath,
2820
+ lineNumber: i + 1,
2821
+ schemaName: "",
2822
+ severity: "warning",
2823
+ fixable: false
2824
+ });
2825
+ }
2826
+ }
2827
+ }
2828
+ return violations;
2829
+ }
2830
+ },
2831
+ {
2832
+ name: "naming-convention",
2833
+ description: "Enforce schema naming pattern (e.g., must end with Schema)",
2834
+ category: "quality",
2835
+ rule: (sourceFile, config) => {
2836
+ const violations = [];
2837
+ const text = sourceFile.getFullText();
2838
+ const filePath = sourceFile.getFilePath();
2839
+ const lines = text.split("\n");
2840
+ const pattern = new RegExp(config.pattern || ".*Schema$");
2841
+ for (let i = 0; i < lines.length; i++) {
2842
+ const line = lines[i] ?? "";
2843
+ const match = line.match(
2844
+ /(?:const|let)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|t\.|v\.|type\(|object\(|string\()/
2845
+ );
2846
+ if (match?.[1] && !pattern.test(match[1])) {
2847
+ violations.push({
2848
+ rule: "naming-convention",
2849
+ message: `Schema "${match[1]}" does not match naming pattern ${pattern.source}.`,
2850
+ filePath,
2851
+ lineNumber: i + 1,
2852
+ schemaName: match[1],
2853
+ severity: "warning",
2854
+ fixable: false
2855
+ });
2856
+ }
2857
+ }
2858
+ return violations;
2859
+ }
2860
+ },
2861
+ {
2862
+ name: "require-max-length",
2863
+ description: "String schemas must have .max() to prevent DoS via unbounded input",
2864
+ category: "security",
2865
+ rule: (sourceFile, _config) => {
2866
+ const violations = [];
2867
+ const text = sourceFile.getFullText();
2868
+ const filePath = sourceFile.getFilePath();
2869
+ const lines = text.split("\n");
2870
+ for (let i = 0; i < lines.length; i++) {
2871
+ const line = lines[i] ?? "";
2872
+ if (/z\.string\(\)/.test(line) && !line.includes(".max(") && !line.includes(".length(")) {
2873
+ let fullChain = line;
2874
+ let j = i + 1;
2875
+ while (j < lines.length && j < i + 5 && /^\s*\./.test(lines[j] ?? "")) {
2876
+ fullChain += lines[j] ?? "";
2877
+ j++;
2878
+ }
2879
+ if (!fullChain.includes(".max(") && !fullChain.includes(".length(")) {
2880
+ violations.push({
2881
+ rule: "require-max-length",
2882
+ message: "String schema should have .max() to prevent unbounded input (DoS protection).",
2883
+ filePath,
2884
+ lineNumber: i + 1,
2885
+ schemaName: "",
2886
+ severity: "warning",
2887
+ fixable: true
2888
+ });
2889
+ }
2890
+ }
2891
+ }
2892
+ return violations;
2893
+ }
2894
+ }
2895
+ ];
2896
+ function getGovernanceTemplate(name) {
2897
+ return GOVERNANCE_TEMPLATES.find((t) => t.name === name);
2898
+ }
2899
+ function getGovernanceTemplatesByCategory(category) {
2900
+ return GOVERNANCE_TEMPLATES.filter((t) => t.category === category);
2901
+ }
2902
+ function getGovernanceTemplateNames() {
2903
+ return GOVERNANCE_TEMPLATES.map((t) => t.name);
2904
+ }
2905
+
1518
2906
  // src/incremental.ts
1519
- var import_node_fs5 = require("fs");
1520
- var import_node_path5 = require("path");
2907
+ var import_node_fs7 = require("fs");
2908
+ var import_node_path7 = require("path");
1521
2909
  var STATE_DIR = ".schemashift";
1522
2910
  var STATE_FILE = "incremental.json";
1523
2911
  var IncrementalTracker = class {
1524
2912
  stateDir;
1525
2913
  statePath;
1526
2914
  constructor(projectPath) {
1527
- this.stateDir = (0, import_node_path5.join)(projectPath, STATE_DIR);
1528
- this.statePath = (0, import_node_path5.join)(this.stateDir, STATE_FILE);
2915
+ this.stateDir = (0, import_node_path7.join)(projectPath, STATE_DIR);
2916
+ this.statePath = (0, import_node_path7.join)(this.stateDir, STATE_FILE);
1529
2917
  }
1530
2918
  start(files, from, to) {
1531
2919
  const state = {
@@ -1560,9 +2948,9 @@ var IncrementalTracker = class {
1560
2948
  this.saveState(state);
1561
2949
  }
1562
2950
  getState() {
1563
- if (!(0, import_node_fs5.existsSync)(this.statePath)) return null;
2951
+ if (!(0, import_node_fs7.existsSync)(this.statePath)) return null;
1564
2952
  try {
1565
- return JSON.parse((0, import_node_fs5.readFileSync)(this.statePath, "utf-8"));
2953
+ return JSON.parse((0, import_node_fs7.readFileSync)(this.statePath, "utf-8"));
1566
2954
  } catch {
1567
2955
  return null;
1568
2956
  }
@@ -1589,21 +2977,21 @@ var IncrementalTracker = class {
1589
2977
  };
1590
2978
  }
1591
2979
  clear() {
1592
- if ((0, import_node_fs5.existsSync)(this.statePath)) {
1593
- (0, import_node_fs5.writeFileSync)(this.statePath, "");
2980
+ if ((0, import_node_fs7.existsSync)(this.statePath)) {
2981
+ (0, import_node_fs7.unlinkSync)(this.statePath);
1594
2982
  }
1595
2983
  }
1596
2984
  saveState(state) {
1597
- if (!(0, import_node_fs5.existsSync)(this.stateDir)) {
1598
- (0, import_node_fs5.mkdirSync)(this.stateDir, { recursive: true });
2985
+ if (!(0, import_node_fs7.existsSync)(this.stateDir)) {
2986
+ (0, import_node_fs7.mkdirSync)(this.stateDir, { recursive: true });
1599
2987
  }
1600
- (0, import_node_fs5.writeFileSync)(this.statePath, JSON.stringify(state, null, 2));
2988
+ (0, import_node_fs7.writeFileSync)(this.statePath, JSON.stringify(state, null, 2));
1601
2989
  }
1602
2990
  };
1603
2991
 
1604
2992
  // src/package-updater.ts
1605
- var import_node_fs6 = require("fs");
1606
- var import_node_path6 = require("path");
2993
+ var import_node_fs8 = require("fs");
2994
+ var import_node_path8 = require("path");
1607
2995
  var TARGET_VERSIONS = {
1608
2996
  "yup->zod": { zod: "^3.24.0" },
1609
2997
  "joi->zod": { zod: "^3.24.0" },
@@ -1624,14 +3012,14 @@ var PackageUpdater = class {
1624
3012
  const add = {};
1625
3013
  const remove = [];
1626
3014
  const warnings = [];
1627
- const pkgPath = (0, import_node_path6.join)(projectPath, "package.json");
1628
- if (!(0, import_node_fs6.existsSync)(pkgPath)) {
3015
+ const pkgPath = (0, import_node_path8.join)(projectPath, "package.json");
3016
+ if (!(0, import_node_fs8.existsSync)(pkgPath)) {
1629
3017
  warnings.push("No package.json found. Cannot plan dependency updates.");
1630
3018
  return { add, remove, warnings };
1631
3019
  }
1632
3020
  let pkg;
1633
3021
  try {
1634
- pkg = JSON.parse((0, import_node_fs6.readFileSync)(pkgPath, "utf-8"));
3022
+ pkg = JSON.parse((0, import_node_fs8.readFileSync)(pkgPath, "utf-8"));
1635
3023
  } catch {
1636
3024
  warnings.push("Could not parse package.json.");
1637
3025
  return { add, remove, warnings };
@@ -1661,9 +3049,9 @@ var PackageUpdater = class {
1661
3049
  return { add, remove, warnings };
1662
3050
  }
1663
3051
  apply(projectPath, plan) {
1664
- const pkgPath = (0, import_node_path6.join)(projectPath, "package.json");
1665
- if (!(0, import_node_fs6.existsSync)(pkgPath)) return;
1666
- const pkgText = (0, import_node_fs6.readFileSync)(pkgPath, "utf-8");
3052
+ const pkgPath = (0, import_node_path8.join)(projectPath, "package.json");
3053
+ if (!(0, import_node_fs8.existsSync)(pkgPath)) return;
3054
+ const pkgText = (0, import_node_fs8.readFileSync)(pkgPath, "utf-8");
1667
3055
  const pkg = JSON.parse(pkgText);
1668
3056
  if (!pkg.dependencies) pkg.dependencies = {};
1669
3057
  for (const [name, version] of Object.entries(plan.add)) {
@@ -1673,11 +3061,133 @@ var PackageUpdater = class {
1673
3061
  pkg.dependencies[name] = version;
1674
3062
  }
1675
3063
  }
1676
- (0, import_node_fs6.writeFileSync)(pkgPath, `${JSON.stringify(pkg, null, 2)}
3064
+ (0, import_node_fs8.writeFileSync)(pkgPath, `${JSON.stringify(pkg, null, 2)}
1677
3065
  `);
1678
3066
  }
1679
3067
  };
1680
3068
 
3069
+ // src/performance-analyzer.ts
3070
+ var PerformanceAnalyzer = class {
3071
+ analyze(sourceFiles, from, to) {
3072
+ const warnings = [];
3073
+ let parseCallSites = 0;
3074
+ let dynamicSchemaCount = 0;
3075
+ for (const file of sourceFiles) {
3076
+ const text = file.getFullText();
3077
+ const filePath = file.getFilePath();
3078
+ const parseMatches = text.match(/\.(parse|safeParse)\s*\(/g);
3079
+ if (parseMatches) {
3080
+ parseCallSites += parseMatches.length;
3081
+ }
3082
+ const dynamicResult = this.detectDynamicSchemas(text, filePath);
3083
+ dynamicSchemaCount += dynamicResult.count;
3084
+ warnings.push(...dynamicResult.warnings);
3085
+ this.addMigrationWarnings(text, filePath, from, to, warnings);
3086
+ }
3087
+ const recommendation = this.getRecommendation(from, to, parseCallSites, dynamicSchemaCount);
3088
+ const summary = this.generateSummary(warnings, parseCallSites, dynamicSchemaCount);
3089
+ return {
3090
+ warnings,
3091
+ parseCallSites,
3092
+ dynamicSchemaCount,
3093
+ recommendation,
3094
+ summary
3095
+ };
3096
+ }
3097
+ detectDynamicSchemas(text, filePath) {
3098
+ const warnings = [];
3099
+ let count = 0;
3100
+ const functionBodyPattern = /(?:function\s+\w+\s*\([^)]*\)|const\s+\w+\s*=\s*(?:async\s+)?(?:\([^)]*\)|[a-zA-Z_]\w*)\s*=>)\s*\{[^}]*(?:z\.|yup\.|Joi\.|v\.)\w+\s*\(/g;
3101
+ for (const match of text.matchAll(functionBodyPattern)) {
3102
+ count++;
3103
+ const lineNumber = text.substring(0, match.index).split("\n").length;
3104
+ warnings.push({
3105
+ category: "dynamic-schemas",
3106
+ message: "Schema created inside function body \u2014 may cause performance issues with Zod v4.",
3107
+ detail: "Zod v4 uses JIT compilation, making schema creation ~17x slower than v3. Move schema definitions to module level to avoid re-creation on every call.",
3108
+ filePath,
3109
+ lineNumber,
3110
+ severity: "warning"
3111
+ });
3112
+ }
3113
+ const reactComponentPattern = /(?:function\s+[A-Z]\w*\s*\([^)]*\)|const\s+[A-Z]\w*\s*[:=])[^{]*\{[^}]*(?:z\.|yup\.|Joi\.)\w+\s*\(/g;
3114
+ for (const match of text.matchAll(reactComponentPattern)) {
3115
+ count++;
3116
+ const lineNumber = text.substring(0, match.index).split("\n").length;
3117
+ warnings.push({
3118
+ category: "schema-creation",
3119
+ message: "Schema appears to be created inside a React component.",
3120
+ detail: "Schemas created inside React components are re-created on every render. Move schema definitions outside the component or wrap in useMemo(). This is especially important for Zod v4 due to JIT compilation overhead.",
3121
+ filePath,
3122
+ lineNumber,
3123
+ severity: "warning"
3124
+ });
3125
+ }
3126
+ return { count, warnings };
3127
+ }
3128
+ addMigrationWarnings(text, filePath, from, to, warnings) {
3129
+ const migration = `${from}->${to}`;
3130
+ if (migration === "zod-v3->v4") {
3131
+ if (/edge-runtime|@vercel\/edge|cloudflare.*workers|deno\.serve|Deno\.serve/i.test(text) || /export\s+const\s+runtime\s*=\s*['"]edge['"]/i.test(text)) {
3132
+ warnings.push({
3133
+ category: "cold-start",
3134
+ message: "Edge/serverless environment detected \u2014 Zod v4 JIT compilation increases cold start time.",
3135
+ detail: "Zod v4 JIT trades slower schema creation for faster repeated parsing. In serverless/edge environments with short-lived instances, the JIT cost may not amortize. Consider Valibot or staying on Zod v3 for cold-start-sensitive code.",
3136
+ filePath,
3137
+ severity: "warning"
3138
+ });
3139
+ }
3140
+ const parseCount = (text.match(/\.parse\s*\(/g) || []).length;
3141
+ if (parseCount > 10) {
3142
+ warnings.push({
3143
+ category: "repeated-parsing",
3144
+ message: `High parse() usage (${parseCount} call sites) \u2014 Zod v4 JIT will benefit here.`,
3145
+ detail: "Zod v4 JIT compilation makes repeated parsing ~8x faster. This file has many parse() calls and will see performance improvement.",
3146
+ filePath,
3147
+ severity: "info"
3148
+ });
3149
+ }
3150
+ }
3151
+ if (migration === "zod->valibot" && /\.parse\s*\(/.test(text)) {
3152
+ warnings.push({
3153
+ category: "repeated-parsing",
3154
+ message: "Valibot parsing performance is comparable to Zod v4 for most schemas.",
3155
+ detail: "Valibot v1+ offers similar runtime performance to Zod v4 with significantly smaller bundle size. No JIT overhead means consistent performance across all environments.",
3156
+ filePath,
3157
+ severity: "info"
3158
+ });
3159
+ }
3160
+ }
3161
+ getRecommendation(from, to, parseCallSites, dynamicSchemaCount) {
3162
+ const migration = `${from}->${to}`;
3163
+ if (migration === "zod-v3->v4") {
3164
+ if (dynamicSchemaCount > 5) {
3165
+ return "Many dynamic schemas detected. Zod v4 JIT makes schema creation 17x slower. Move schemas to module level before migrating, or consider Valibot for size-sensitive apps.";
3166
+ }
3167
+ if (parseCallSites > 50) {
3168
+ return "High parse() volume detected. Zod v4 JIT will significantly benefit repeated parsing (up to 8x faster). Migration recommended for performance.";
3169
+ }
3170
+ return "Moderate usage detected. Zod v4 trades slower startup for faster runtime parsing.";
3171
+ }
3172
+ if (migration === "zod->valibot") {
3173
+ return "Valibot offers similar runtime performance with significantly smaller bundle size. Best suited for bundle-size-sensitive applications.";
3174
+ }
3175
+ if (from === "yup" || from === "joi") {
3176
+ return `Migrating from ${from} to ${to} should have neutral or positive performance impact.`;
3177
+ }
3178
+ return "Performance impact depends on usage patterns. Review warnings for details.";
3179
+ }
3180
+ generateSummary(warnings, parseCallSites, dynamicSchemaCount) {
3181
+ const parts = [];
3182
+ parts.push(`${parseCallSites} parse/safeParse call sites`);
3183
+ if (dynamicSchemaCount > 0) {
3184
+ parts.push(`${dynamicSchemaCount} dynamic schema creation sites`);
3185
+ }
3186
+ parts.push(`${warnings.length} performance warning(s)`);
3187
+ return parts.join(", ");
3188
+ }
3189
+ };
3190
+
1681
3191
  // src/plugin-loader.ts
1682
3192
  var PluginLoader = class {
1683
3193
  async loadPlugins(pluginPaths) {
@@ -1723,8 +3233,8 @@ var PluginLoader = class {
1723
3233
  };
1724
3234
 
1725
3235
  // src/standard-schema.ts
1726
- var import_node_fs7 = require("fs");
1727
- var import_node_path7 = require("path");
3236
+ var import_node_fs9 = require("fs");
3237
+ var import_node_path9 = require("path");
1728
3238
  var STANDARD_SCHEMA_LIBRARIES = {
1729
3239
  zod: { minMajor: 3, minMinor: 23 },
1730
3240
  // Zod v3.23+ and v4+
@@ -1753,16 +3263,16 @@ function isVersionCompatible(version, minMajor, minMinor) {
1753
3263
  return false;
1754
3264
  }
1755
3265
  function detectStandardSchema(projectPath) {
1756
- const pkgPath = (0, import_node_path7.join)(projectPath, "package.json");
1757
- if (!(0, import_node_fs7.existsSync)(pkgPath)) {
1758
- return { detected: false, compatibleLibraries: [], recommendation: "" };
3266
+ const pkgPath = (0, import_node_path9.join)(projectPath, "package.json");
3267
+ if (!(0, import_node_fs9.existsSync)(pkgPath)) {
3268
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1759
3269
  }
1760
3270
  let allDeps = {};
1761
3271
  try {
1762
- const pkg = JSON.parse((0, import_node_fs7.readFileSync)(pkgPath, "utf-8"));
3272
+ const pkg = JSON.parse((0, import_node_fs9.readFileSync)(pkgPath, "utf-8"));
1763
3273
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1764
3274
  } catch {
1765
- return { detected: false, compatibleLibraries: [], recommendation: "" };
3275
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1766
3276
  }
1767
3277
  const hasExplicitStandardSchema = "@standard-schema/spec" in allDeps;
1768
3278
  const compatibleLibraries = [];
@@ -1781,9 +3291,155 @@ function detectStandardSchema(projectPath) {
1781
3291
  } else if (hasExplicitStandardSchema) {
1782
3292
  recommendation = "Standard Schema spec detected. Ensure your validation library supports Standard Schema for maximum interoperability.";
1783
3293
  }
1784
- return { detected, compatibleLibraries, recommendation };
3294
+ let adoptionPath;
3295
+ if (detected && !hasExplicitStandardSchema) {
3296
+ adoptionPath = "Install @standard-schema/spec for explicit Standard Schema support. This enables library-agnostic validation consumers to accept your schemas without depending on a specific library. Run: npm install @standard-schema/spec";
3297
+ } else if (!detected) {
3298
+ adoptionPath = "Consider migrating to a Standard Schema-compatible library (Zod v3.23+, Valibot v1+, ArkType v2+) to future-proof your validation layer and reduce library lock-in.";
3299
+ }
3300
+ const interopTools = detected ? [
3301
+ "tRPC v11+ (Standard Schema input validation)",
3302
+ "TanStack Form (schema-agnostic validation)",
3303
+ "TanStack Router (route parameter validation)",
3304
+ "Hono (request validation middleware)",
3305
+ "Conform (progressive form validation)",
3306
+ "Nuxt (runtime config validation)"
3307
+ ] : [];
3308
+ return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
1785
3309
  }
1786
3310
 
3311
+ // src/test-scaffolder.ts
3312
+ var TestScaffolder = class {
3313
+ scaffold(sourceFiles, from, to) {
3314
+ const tests = [];
3315
+ let totalSchemas = 0;
3316
+ for (const file of sourceFiles) {
3317
+ const schemas = this.extractSchemaNames(file, from);
3318
+ if (schemas.length === 0) continue;
3319
+ totalSchemas += schemas.length;
3320
+ const testCode = this.generateTestFile(file, schemas, from, to);
3321
+ const filePath = file.getFilePath().replace(/\.tsx?$/, ".migration-test.ts");
3322
+ tests.push({ filePath, testCode, schemaCount: schemas.length });
3323
+ }
3324
+ const summary = tests.length > 0 ? `Generated ${tests.length} test file(s) covering ${totalSchemas} schema(s) for ${from}->${to} migration.` : "No schemas found to generate tests for.";
3325
+ return { tests, totalSchemas, summary };
3326
+ }
3327
+ extractSchemaNames(file, library) {
3328
+ const names = [];
3329
+ const prefixes = this.getLibraryPrefixes(library);
3330
+ for (const varDecl of file.getVariableDeclarations()) {
3331
+ const initializer = varDecl.getInitializer();
3332
+ if (!initializer) continue;
3333
+ const text = initializer.getText();
3334
+ if (prefixes.some((p) => text.startsWith(p))) {
3335
+ names.push(varDecl.getName());
3336
+ }
3337
+ }
3338
+ return names;
3339
+ }
3340
+ getLibraryPrefixes(library) {
3341
+ switch (library) {
3342
+ case "zod":
3343
+ case "zod-v3":
3344
+ return ["z.", "zod."];
3345
+ case "yup":
3346
+ return ["yup.", "Yup."];
3347
+ case "joi":
3348
+ return ["Joi.", "joi."];
3349
+ case "io-ts":
3350
+ return ["t."];
3351
+ case "valibot":
3352
+ return ["v.", "valibot."];
3353
+ default:
3354
+ return ["z."];
3355
+ }
3356
+ }
3357
+ generateTestFile(file, schemaNames, from, to) {
3358
+ const relativePath = file.getFilePath();
3359
+ const schemaImports = schemaNames.join(", ");
3360
+ const parseMethod = this.getParseMethod(to);
3361
+ const errorClass = this.getErrorClass(to);
3362
+ const testCases = schemaNames.map((name) => this.generateSchemaTests(name, to, parseMethod, errorClass)).join("\n\n");
3363
+ return `/**
3364
+ * Migration validation tests for ${from} -> ${to}
3365
+ * Auto-generated by SchemaShift
3366
+ *
3367
+ * These tests verify that schema behavior is preserved after migration.
3368
+ * Run before and after migration to ensure equivalence.
3369
+ *
3370
+ * Source: ${relativePath}
3371
+ */
3372
+ import { describe, expect, it } from 'vitest';
3373
+ import { ${schemaImports} } from '${relativePath.replace(/\.ts$/, ".js")}';
3374
+
3375
+ describe('Migration validation: ${relativePath}', () => {
3376
+ ${testCases}
3377
+ });
3378
+ `;
3379
+ }
3380
+ getParseMethod(to) {
3381
+ switch (to) {
3382
+ case "valibot":
3383
+ return "v.safeParse";
3384
+ default:
3385
+ return ".safeParse";
3386
+ }
3387
+ }
3388
+ getErrorClass(to) {
3389
+ switch (to) {
3390
+ case "valibot":
3391
+ return "ValiError";
3392
+ case "zod":
3393
+ case "v4":
3394
+ return "ZodError";
3395
+ default:
3396
+ return "Error";
3397
+ }
3398
+ }
3399
+ generateSchemaTests(schemaName, to, _parseMethod, _errorClass) {
3400
+ if (to === "valibot") {
3401
+ return ` describe('${schemaName}', () => {
3402
+ it('should accept valid data', () => {
3403
+ // TODO(schemashift): Add valid test data for ${schemaName}
3404
+ // const result = v.safeParse(${schemaName}, validData);
3405
+ // expect(result.success).toBe(true);
3406
+ });
3407
+
3408
+ it('should reject invalid data', () => {
3409
+ // TODO(schemashift): Add invalid test data for ${schemaName}
3410
+ // const result = v.safeParse(${schemaName}, invalidData);
3411
+ // expect(result.success).toBe(false);
3412
+ });
3413
+
3414
+ it('should preserve error messages', () => {
3415
+ // TODO(schemashift): Verify custom error messages are preserved
3416
+ // const result = v.safeParse(${schemaName}, invalidData);
3417
+ // expect(result.issues?.[0]?.message).toContain('expected message');
3418
+ });
3419
+ });`;
3420
+ }
3421
+ return ` describe('${schemaName}', () => {
3422
+ it('should accept valid data', () => {
3423
+ // TODO(schemashift): Add valid test data for ${schemaName}
3424
+ // const result = ${schemaName}.safeParse(validData);
3425
+ // expect(result.success).toBe(true);
3426
+ });
3427
+
3428
+ it('should reject invalid data', () => {
3429
+ // TODO(schemashift): Add invalid test data for ${schemaName}
3430
+ // const result = ${schemaName}.safeParse(invalidData);
3431
+ // expect(result.success).toBe(false);
3432
+ });
3433
+
3434
+ it('should preserve error messages', () => {
3435
+ // TODO(schemashift): Verify custom error messages are preserved
3436
+ // const result = ${schemaName}.safeParse(invalidData);
3437
+ // expect(result.error?.issues[0]?.message).toContain('expected message');
3438
+ });
3439
+ });`;
3440
+ }
3441
+ };
3442
+
1787
3443
  // src/transform.ts
1788
3444
  var TransformEngine = class {
1789
3445
  handlers = /* @__PURE__ */ new Map();
@@ -1798,9 +3454,10 @@ var TransformEngine = class {
1798
3454
  }
1799
3455
  getSupportedPaths() {
1800
3456
  return Array.from(this.handlers.keys()).map((key) => {
1801
- const [from, to] = key.split("->");
1802
- return { from, to };
1803
- });
3457
+ const parts = key.split("->");
3458
+ if (parts.length !== 2) return null;
3459
+ return { from: parts[0], to: parts[1] };
3460
+ }).filter((entry) => entry !== null);
1804
3461
  }
1805
3462
  transform(sourceFile, from, to, options) {
1806
3463
  const handler = this.getHandler(from, to);
@@ -1816,26 +3473,165 @@ var TransformEngine = class {
1816
3473
  return handler.transform(sourceFile, options);
1817
3474
  }
1818
3475
  };
3476
+
3477
+ // src/type-dedup-detector.ts
3478
+ var import_ts_morph4 = require("ts-morph");
3479
+ var TypeDedupDetector = class {
3480
+ detect(sourceFiles) {
3481
+ const typeDefinitions = this.collectTypeDefinitions(sourceFiles);
3482
+ const schemaDefinitions = this.collectSchemaDefinitions(sourceFiles);
3483
+ const candidates = this.findMatches(typeDefinitions, schemaDefinitions);
3484
+ const summary = candidates.length > 0 ? `Found ${candidates.length} type definition(s) that may duplicate schema shapes. After migration, replace with z.infer<typeof schema>.` : "No duplicate type definitions detected.";
3485
+ return { candidates, summary };
3486
+ }
3487
+ collectTypeDefinitions(sourceFiles) {
3488
+ const types = [];
3489
+ for (const file of sourceFiles) {
3490
+ const filePath = file.getFilePath();
3491
+ for (const iface of file.getInterfaces()) {
3492
+ const fields = iface.getProperties().map((p) => p.getName());
3493
+ if (fields.length > 0) {
3494
+ types.push({
3495
+ name: iface.getName(),
3496
+ fields,
3497
+ filePath,
3498
+ lineNumber: iface.getStartLineNumber()
3499
+ });
3500
+ }
3501
+ }
3502
+ for (const typeAlias of file.getTypeAliases()) {
3503
+ const typeNode = typeAlias.getTypeNode();
3504
+ if (!typeNode) continue;
3505
+ if (import_ts_morph4.Node.isTypeLiteral(typeNode)) {
3506
+ const fields = typeNode.getProperties().map((p) => p.getName());
3507
+ if (fields.length > 0) {
3508
+ types.push({
3509
+ name: typeAlias.getName(),
3510
+ fields,
3511
+ filePath,
3512
+ lineNumber: typeAlias.getStartLineNumber()
3513
+ });
3514
+ }
3515
+ }
3516
+ }
3517
+ }
3518
+ return types;
3519
+ }
3520
+ collectSchemaDefinitions(sourceFiles) {
3521
+ const schemas = [];
3522
+ for (const file of sourceFiles) {
3523
+ const filePath = file.getFilePath();
3524
+ for (const varDecl of file.getVariableDeclarations()) {
3525
+ const initializer = varDecl.getInitializer();
3526
+ if (!initializer) continue;
3527
+ const text = initializer.getText();
3528
+ const isSchema = /(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\.object\s*\(/.test(text) || /Joi\.object\s*\(/.test(text);
3529
+ if (!isSchema) continue;
3530
+ const fields = this.extractSchemaFields(text);
3531
+ if (fields.length > 0) {
3532
+ schemas.push({
3533
+ name: varDecl.getName(),
3534
+ fields,
3535
+ filePath,
3536
+ lineNumber: varDecl.getStartLineNumber()
3537
+ });
3538
+ }
3539
+ }
3540
+ }
3541
+ return schemas;
3542
+ }
3543
+ extractSchemaFields(text) {
3544
+ const fields = [];
3545
+ const fieldPattern = /\b(\w+)\s*:\s*(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\./g;
3546
+ for (const match of text.matchAll(fieldPattern)) {
3547
+ if (match[1]) {
3548
+ fields.push(match[1]);
3549
+ }
3550
+ }
3551
+ return fields;
3552
+ }
3553
+ findMatches(types, schemas) {
3554
+ const candidates = [];
3555
+ for (const typeDef of types) {
3556
+ for (const schemaDef of schemas) {
3557
+ const matchedFields = this.getMatchedFields(typeDef.fields, schemaDef.fields);
3558
+ if (matchedFields.length < 2) continue;
3559
+ const typeFieldCount = typeDef.fields.length;
3560
+ const schemaFieldCount = schemaDef.fields.length;
3561
+ const matchRatio = matchedFields.length / Math.max(typeFieldCount, schemaFieldCount);
3562
+ let confidence;
3563
+ if (matchRatio >= 0.8) {
3564
+ confidence = "high";
3565
+ } else if (matchRatio >= 0.5) {
3566
+ confidence = "medium";
3567
+ } else {
3568
+ confidence = "low";
3569
+ }
3570
+ if (confidence === "low" && !this.namesRelated(typeDef.name, schemaDef.name)) {
3571
+ continue;
3572
+ }
3573
+ candidates.push({
3574
+ typeName: typeDef.name,
3575
+ typeFilePath: typeDef.filePath,
3576
+ typeLineNumber: typeDef.lineNumber,
3577
+ schemaName: schemaDef.name,
3578
+ schemaFilePath: schemaDef.filePath,
3579
+ schemaLineNumber: schemaDef.lineNumber,
3580
+ matchedFields,
3581
+ confidence,
3582
+ suggestion: `Replace "type/interface ${typeDef.name}" with "type ${typeDef.name} = z.infer<typeof ${schemaDef.name}>" (${matchedFields.length}/${typeFieldCount} fields match).`
3583
+ });
3584
+ }
3585
+ }
3586
+ candidates.sort((a, b) => {
3587
+ const confidenceOrder = { high: 0, medium: 1, low: 2 };
3588
+ const diff = confidenceOrder[a.confidence] - confidenceOrder[b.confidence];
3589
+ if (diff !== 0) return diff;
3590
+ return b.matchedFields.length - a.matchedFields.length;
3591
+ });
3592
+ return candidates;
3593
+ }
3594
+ getMatchedFields(typeFields, schemaFields) {
3595
+ const schemaSet = new Set(schemaFields);
3596
+ return typeFields.filter((f) => schemaSet.has(f));
3597
+ }
3598
+ namesRelated(typeName, schemaName) {
3599
+ const normalize = (name) => name.toLowerCase().replace(/schema|type|interface|i$/gi, "");
3600
+ return normalize(typeName) === normalize(schemaName);
3601
+ }
3602
+ };
1819
3603
  // Annotate the CommonJS export names for ESM import in node:
1820
3604
  0 && (module.exports = {
3605
+ BehavioralWarningAnalyzer,
3606
+ BundleEstimator,
1821
3607
  CompatibilityAnalyzer,
1822
3608
  ComplexityEstimator,
1823
3609
  DetailedAnalyzer,
3610
+ DriftDetector,
1824
3611
  EcosystemAnalyzer,
1825
3612
  FormResolverMigrator,
3613
+ GOVERNANCE_TEMPLATES,
1826
3614
  GovernanceEngine,
1827
3615
  IncrementalTracker,
3616
+ MigrationAuditLog,
1828
3617
  MigrationChain,
1829
3618
  MonorepoResolver,
1830
3619
  PackageUpdater,
3620
+ PerformanceAnalyzer,
1831
3621
  PluginLoader,
1832
3622
  SchemaAnalyzer,
1833
3623
  SchemaDependencyResolver,
3624
+ TestScaffolder,
1834
3625
  TransformEngine,
3626
+ TypeDedupDetector,
1835
3627
  buildCallChain,
3628
+ computeParallelBatches,
1836
3629
  detectFormLibraries,
1837
3630
  detectSchemaLibrary,
1838
3631
  detectStandardSchema,
3632
+ getGovernanceTemplate,
3633
+ getGovernanceTemplateNames,
3634
+ getGovernanceTemplatesByCategory,
1839
3635
  isInsideComment,
1840
3636
  isInsideStringLiteral,
1841
3637
  loadConfig,