@schemashift/core 0.8.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -20,6 +20,8 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var index_exports = {};
22
22
  __export(index_exports, {
23
+ BehavioralWarningAnalyzer: () => BehavioralWarningAnalyzer,
24
+ BundleEstimator: () => BundleEstimator,
23
25
  CompatibilityAnalyzer: () => CompatibilityAnalyzer,
24
26
  ComplexityEstimator: () => ComplexityEstimator,
25
27
  DetailedAnalyzer: () => DetailedAnalyzer,
@@ -27,14 +29,19 @@ __export(index_exports, {
27
29
  FormResolverMigrator: () => FormResolverMigrator,
28
30
  GovernanceEngine: () => GovernanceEngine,
29
31
  IncrementalTracker: () => IncrementalTracker,
32
+ MigrationAuditLog: () => MigrationAuditLog,
30
33
  MigrationChain: () => MigrationChain,
31
34
  MonorepoResolver: () => MonorepoResolver,
32
35
  PackageUpdater: () => PackageUpdater,
36
+ PerformanceAnalyzer: () => PerformanceAnalyzer,
33
37
  PluginLoader: () => PluginLoader,
34
38
  SchemaAnalyzer: () => SchemaAnalyzer,
35
39
  SchemaDependencyResolver: () => SchemaDependencyResolver,
40
+ TestScaffolder: () => TestScaffolder,
36
41
  TransformEngine: () => TransformEngine,
42
+ TypeDedupDetector: () => TypeDedupDetector,
37
43
  buildCallChain: () => buildCallChain,
44
+ computeParallelBatches: () => computeParallelBatches,
38
45
  detectFormLibraries: () => detectFormLibraries,
39
46
  detectSchemaLibrary: () => detectSchemaLibrary,
40
47
  detectStandardSchema: () => detectStandardSchema,
@@ -315,6 +322,465 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
315
322
  return buildCallChain(newBase, factory.name, factory.args, mappedMethods);
316
323
  }
317
324
 
325
+ // src/audit-log.ts
326
+ var import_node_crypto = require("crypto");
327
+ var import_node_fs = require("fs");
328
+ var import_node_path = require("path");
329
+ var AUDIT_DIR = ".schemashift";
330
+ var AUDIT_FILE = "audit-log.json";
331
+ var AUDIT_VERSION = 1;
332
+ var MigrationAuditLog = class {
333
+ logDir;
334
+ logPath;
335
+ constructor(projectPath) {
336
+ this.logDir = (0, import_node_path.join)(projectPath, AUDIT_DIR);
337
+ this.logPath = (0, import_node_path.join)(this.logDir, AUDIT_FILE);
338
+ }
339
+ /**
340
+ * Append a new entry to the audit log.
341
+ */
342
+ append(entry) {
343
+ const log = this.read();
344
+ log.entries.push(entry);
345
+ this.write(log);
346
+ }
347
+ /**
348
+ * Create an audit entry for a file transformation.
349
+ */
350
+ createEntry(params) {
351
+ return {
352
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
353
+ migrationId: params.migrationId,
354
+ filePath: params.filePath,
355
+ action: "transform",
356
+ from: params.from,
357
+ to: params.to,
358
+ success: params.success,
359
+ beforeHash: this.hashContent(params.originalCode),
360
+ afterHash: params.transformedCode ? this.hashContent(params.transformedCode) : void 0,
361
+ warningCount: params.warningCount,
362
+ errorCount: params.errorCount,
363
+ riskScore: params.riskScore,
364
+ duration: params.duration,
365
+ user: this.getCurrentUser()
366
+ };
367
+ }
368
+ /**
369
+ * Read the current audit log.
370
+ */
371
+ read() {
372
+ if (!(0, import_node_fs.existsSync)(this.logPath)) {
373
+ return { version: AUDIT_VERSION, entries: [] };
374
+ }
375
+ try {
376
+ const content = (0, import_node_fs.readFileSync)(this.logPath, "utf-8");
377
+ if (!content.trim()) {
378
+ return { version: AUDIT_VERSION, entries: [] };
379
+ }
380
+ return JSON.parse(content);
381
+ } catch {
382
+ return { version: AUDIT_VERSION, entries: [] };
383
+ }
384
+ }
385
+ /**
386
+ * Get entries for a specific migration.
387
+ */
388
+ getByMigration(migrationId) {
389
+ const log = this.read();
390
+ return log.entries.filter((e) => e.migrationId === migrationId);
391
+ }
392
+ /**
393
+ * Get summary statistics for the audit log.
394
+ */
395
+ getSummary() {
396
+ const log = this.read();
397
+ const migrationIds = new Set(log.entries.map((e) => e.migrationId));
398
+ const migrationPaths = [...new Set(log.entries.map((e) => `${e.from}->${e.to}`))];
399
+ return {
400
+ totalMigrations: migrationIds.size,
401
+ totalFiles: log.entries.length,
402
+ successCount: log.entries.filter((e) => e.success).length,
403
+ failureCount: log.entries.filter((e) => !e.success).length,
404
+ migrationPaths
405
+ };
406
+ }
407
+ /**
408
+ * Clear the audit log.
409
+ */
410
+ clear() {
411
+ this.write({ version: AUDIT_VERSION, entries: [] });
412
+ }
413
+ write(log) {
414
+ if (!(0, import_node_fs.existsSync)(this.logDir)) {
415
+ (0, import_node_fs.mkdirSync)(this.logDir, { recursive: true });
416
+ }
417
+ (0, import_node_fs.writeFileSync)(this.logPath, JSON.stringify(log, null, 2));
418
+ }
419
+ hashContent(content) {
420
+ return (0, import_node_crypto.createHash)("sha256").update(content).digest("hex").substring(0, 16);
421
+ }
422
+ getCurrentUser() {
423
+ return process.env.USER || process.env.USERNAME || void 0;
424
+ }
425
+ };
426
+
427
+ // src/behavioral-warnings.ts
428
+ var BEHAVIORAL_RULES = [
429
+ // Yup -> Zod: Type coercion differences
430
+ {
431
+ category: "type-coercion",
432
+ migrations: ["yup->zod"],
433
+ detect: (text, filePath) => {
434
+ const warnings = [];
435
+ if (/yup\.(number|date)\s*\(\)/.test(text)) {
436
+ warnings.push({
437
+ category: "type-coercion",
438
+ message: "Yup silently coerces types; Zod rejects mismatches.",
439
+ detail: `Yup's number() accepts strings like "42" and coerces them. Zod's number() rejects strings. Use z.coerce.number() for equivalent behavior, especially for HTML form inputs which always return strings.`,
440
+ filePath,
441
+ severity: "warning",
442
+ migration: "yup->zod"
443
+ });
444
+ }
445
+ return warnings;
446
+ }
447
+ },
448
+ // Yup -> Zod: Form input string values
449
+ {
450
+ category: "form-input",
451
+ migrations: ["yup->zod"],
452
+ detect: (text, filePath) => {
453
+ const warnings = [];
454
+ const hasFormImport = /yupResolver|useFormik|from\s+['"]formik['"]|from\s+['"]@hookform/.test(
455
+ text
456
+ );
457
+ const hasNumberOrDate = /yup\.(number|date)\s*\(\)/.test(text);
458
+ if (hasFormImport && hasNumberOrDate) {
459
+ warnings.push({
460
+ category: "form-input",
461
+ message: "HTML inputs return strings \u2014 Zod will reject unless using z.coerce.*",
462
+ detail: 'HTML <input type="number"> returns strings. Yup coerces automatically, but Zod requires explicit coercion. Use z.coerce.number() or register({ valueAsNumber: true }) in React Hook Form.',
463
+ filePath,
464
+ severity: "error",
465
+ migration: "yup->zod"
466
+ });
467
+ }
468
+ return warnings;
469
+ }
470
+ },
471
+ // Joi -> Zod: Error handling paradigm shift
472
+ {
473
+ category: "error-handling",
474
+ migrations: ["joi->zod"],
475
+ detect: (text, filePath) => {
476
+ const warnings = [];
477
+ if (/\.validate\s*\(/.test(text) && /[Jj]oi/.test(text)) {
478
+ warnings.push({
479
+ category: "error-handling",
480
+ message: "Joi .validate() returns { value, error }; Zod .parse() throws.",
481
+ detail: "Joi uses an inspection pattern: .validate() returns an object with value and error. Zod .parse() throws a ZodError on failure. Use .safeParse() for a non-throwing equivalent that returns { success, data, error }.",
482
+ filePath,
483
+ severity: "warning",
484
+ migration: "joi->zod"
485
+ });
486
+ }
487
+ return warnings;
488
+ }
489
+ },
490
+ // Joi -> Zod: Null handling differences
491
+ {
492
+ category: "null-handling",
493
+ migrations: ["joi->zod"],
494
+ detect: (text, filePath) => {
495
+ const warnings = [];
496
+ if (/\.allow\s*\(\s*null\s*\)/.test(text)) {
497
+ warnings.push({
498
+ category: "null-handling",
499
+ message: "Joi .allow(null) vs Zod .nullable() have subtle differences.",
500
+ detail: 'Joi .allow(null) permits null alongside the base type. Zod .nullable() wraps the type in a union with null. Joi .allow("", null) has no single Zod equivalent \u2014 use z.union() or .transform().',
501
+ filePath,
502
+ severity: "info",
503
+ migration: "joi->zod"
504
+ });
505
+ }
506
+ return warnings;
507
+ }
508
+ },
509
+ // Zod v3 -> v4: Default value behavior change
510
+ {
511
+ category: "default-values",
512
+ migrations: ["zod-v3->v4"],
513
+ detect: (text, filePath) => {
514
+ const warnings = [];
515
+ if (/\.default\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
516
+ warnings.push({
517
+ category: "default-values",
518
+ message: ".default() + .optional() behavior changed silently in Zod v4.",
519
+ detail: "In Zod v3, .default(val).optional() returned undefined when property was missing. In Zod v4, it always returns the default value. This can cause unexpected behavior in API responses and form handling.",
520
+ filePath,
521
+ severity: "error",
522
+ migration: "zod-v3->v4"
523
+ });
524
+ }
525
+ if (/\.catch\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
526
+ warnings.push({
527
+ category: "default-values",
528
+ message: ".catch() + .optional() behavior changed in Zod v4.",
529
+ detail: "In Zod v4, object properties with .catch() that are .optional() now always return the caught value, even when the property is missing from input.",
530
+ filePath,
531
+ severity: "warning",
532
+ migration: "zod-v3->v4"
533
+ });
534
+ }
535
+ return warnings;
536
+ }
537
+ },
538
+ // Zod v3 -> v4: Error format differences
539
+ {
540
+ category: "error-format",
541
+ migrations: ["zod-v3->v4"],
542
+ detect: (text, filePath) => {
543
+ const warnings = [];
544
+ if (/ZodError/.test(text) && /instanceof\s+Error/.test(text)) {
545
+ warnings.push({
546
+ category: "error-format",
547
+ message: "ZodError no longer extends Error in Zod v4.",
548
+ detail: 'In Zod v4, ZodError no longer extends Error. Code using "instanceof Error" to catch ZodErrors will silently miss them. Use "instanceof ZodError" or z.isZodError() instead.',
549
+ filePath,
550
+ severity: "error",
551
+ migration: "zod-v3->v4"
552
+ });
553
+ }
554
+ return warnings;
555
+ }
556
+ },
557
+ // Zod v3 -> v4: Validation behavior differences
558
+ {
559
+ category: "validation-behavior",
560
+ migrations: ["zod-v3->v4"],
561
+ detect: (text, filePath) => {
562
+ const warnings = [];
563
+ if (/\.transform\s*\(/.test(text) && /\.refine\s*\(/.test(text)) {
564
+ warnings.push({
565
+ category: "validation-behavior",
566
+ message: ".transform() after .refine() behavior changed in Zod v4.",
567
+ detail: "In Zod v4, .transform() after .refine() may execute even if the refinement fails. Previously, transform was skipped on refinement failure.",
568
+ filePath,
569
+ severity: "warning",
570
+ migration: "zod-v3->v4"
571
+ });
572
+ }
573
+ return warnings;
574
+ }
575
+ },
576
+ // Zod -> Valibot: Error handling differences
577
+ {
578
+ category: "error-handling",
579
+ migrations: ["zod->valibot"],
580
+ detect: (text, filePath) => {
581
+ const warnings = [];
582
+ if (/\.parse\s*\(/.test(text) && /z\./.test(text)) {
583
+ warnings.push({
584
+ category: "error-handling",
585
+ message: "Zod .parse() throws ZodError; Valibot v.parse() throws ValiError.",
586
+ detail: "Error class and structure differ between Zod and Valibot. ZodError has .issues array; ValiError has .issues with different structure. Update all error handling code that inspects validation errors.",
587
+ filePath,
588
+ severity: "warning",
589
+ migration: "zod->valibot"
590
+ });
591
+ }
592
+ return warnings;
593
+ }
594
+ },
595
+ // io-ts -> Zod: Either monad vs throw/safeParse
596
+ {
597
+ category: "error-handling",
598
+ migrations: ["io-ts->zod"],
599
+ detect: (text, filePath) => {
600
+ const warnings = [];
601
+ if (/\bEither\b/.test(text) || /\b(fold|chain|map)\s*\(/.test(text)) {
602
+ warnings.push({
603
+ category: "error-handling",
604
+ message: "io-ts uses Either monad for errors; Zod uses throw/safeParse.",
605
+ detail: "io-ts returns Either<Errors, T> (Right for success, Left for failure). Zod .parse() throws, .safeParse() returns { success, data, error }. All fold/chain/map patterns over Either must be rewritten.",
606
+ filePath,
607
+ severity: "error",
608
+ migration: "io-ts->zod"
609
+ });
610
+ }
611
+ return warnings;
612
+ }
613
+ }
614
+ ];
615
+ var BehavioralWarningAnalyzer = class {
616
+ analyze(sourceFiles, from, to) {
617
+ const migration = `${from}->${to}`;
618
+ const warnings = [];
619
+ const applicableRules = BEHAVIORAL_RULES.filter((r) => r.migrations.includes(migration));
620
+ for (const sourceFile of sourceFiles) {
621
+ const filePath = sourceFile.getFilePath();
622
+ const text = sourceFile.getFullText();
623
+ const hasSourceLib = this.fileUsesLibrary(sourceFile, from);
624
+ if (!hasSourceLib) continue;
625
+ for (const rule of applicableRules) {
626
+ const ruleWarnings = rule.detect(text, filePath);
627
+ warnings.push(...ruleWarnings);
628
+ }
629
+ }
630
+ const summary = this.generateSummary(warnings, migration);
631
+ return { warnings, migrationPath: migration, summary };
632
+ }
633
+ fileUsesLibrary(sourceFile, library) {
634
+ for (const imp of sourceFile.getImportDeclarations()) {
635
+ const detected = detectSchemaLibrary(imp.getModuleSpecifierValue());
636
+ if (detected === library) return true;
637
+ if (library === "zod-v3" && detected === "zod") return true;
638
+ if (library === "zod" && detected === "zod") return true;
639
+ }
640
+ return false;
641
+ }
642
+ generateSummary(warnings, migration) {
643
+ if (warnings.length === 0) {
644
+ return `No behavioral differences detected for ${migration} migration.`;
645
+ }
646
+ const errorCount = warnings.filter((w) => w.severity === "error").length;
647
+ const warningCount = warnings.filter((w) => w.severity === "warning").length;
648
+ const infoCount = warnings.filter((w) => w.severity === "info").length;
649
+ const parts = [];
650
+ if (errorCount > 0) parts.push(`${errorCount} critical`);
651
+ if (warningCount > 0) parts.push(`${warningCount} warnings`);
652
+ if (infoCount > 0) parts.push(`${infoCount} info`);
653
+ return `Found ${warnings.length} behavioral difference(s) for ${migration}: ${parts.join(", ")}. Review before migrating.`;
654
+ }
655
+ };
656
+
657
+ // src/bundle-estimator.ts
658
+ var LIBRARY_SIZES = {
659
+ zod: { fullKb: 14, baseKb: 14, treeShakable: false },
660
+ "zod-v3": { fullKb: 14, baseKb: 14, treeShakable: false },
661
+ v4: { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
662
+ "zod-v4": { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
663
+ "zod-mini": { fullKb: 7.5, baseKb: 3.5, treeShakable: true },
664
+ yup: { fullKb: 13.6, baseKb: 13.6, treeShakable: false },
665
+ joi: { fullKb: 29.7, baseKb: 29.7, treeShakable: false },
666
+ "io-ts": { fullKb: 6.5, baseKb: 6.5, treeShakable: true },
667
+ valibot: { fullKb: 5.8, baseKb: 1.4, treeShakable: true }
668
+ };
669
+ var VALIDATOR_OVERHEAD = {
670
+ valibot: 0.05
671
+ };
672
+ var COMMON_VALIDATORS = /* @__PURE__ */ new Set([
673
+ "string",
674
+ "number",
675
+ "boolean",
676
+ "object",
677
+ "array",
678
+ "optional",
679
+ "nullable",
680
+ "enum",
681
+ "union",
682
+ "literal",
683
+ "date",
684
+ "email",
685
+ "url",
686
+ "uuid",
687
+ "min",
688
+ "max",
689
+ "regex",
690
+ "transform",
691
+ "refine",
692
+ "default",
693
+ "record",
694
+ "tuple",
695
+ "lazy",
696
+ "discriminatedUnion",
697
+ "intersection",
698
+ "partial",
699
+ "pick",
700
+ "omit",
701
+ "brand",
702
+ "pipe"
703
+ ]);
704
+ var BundleEstimator = class {
705
+ estimate(sourceFiles, from, to) {
706
+ const usedValidators = this.countUsedValidators(sourceFiles);
707
+ const fromInfo = this.getLibraryInfo(from, usedValidators);
708
+ const toInfo = this.getLibraryInfo(to, usedValidators);
709
+ const estimatedDelta = toInfo.estimatedUsedKb - fromInfo.estimatedUsedKb;
710
+ const deltaPercent = fromInfo.estimatedUsedKb > 0 ? Math.round(estimatedDelta / fromInfo.estimatedUsedKb * 100) : 0;
711
+ const caveats = this.generateCaveats(from, to, usedValidators);
712
+ const summary = this.generateSummary(fromInfo, toInfo, estimatedDelta, deltaPercent);
713
+ return {
714
+ from: fromInfo,
715
+ to: toInfo,
716
+ estimatedDelta,
717
+ deltaPercent,
718
+ summary,
719
+ caveats
720
+ };
721
+ }
722
+ countUsedValidators(sourceFiles) {
723
+ const usedSet = /* @__PURE__ */ new Set();
724
+ for (const file of sourceFiles) {
725
+ const text = file.getFullText();
726
+ for (const validator of COMMON_VALIDATORS) {
727
+ const pattern = new RegExp(`\\.${validator}\\s*[(<]`, "g");
728
+ if (pattern.test(text)) {
729
+ usedSet.add(validator);
730
+ }
731
+ }
732
+ }
733
+ return usedSet.size;
734
+ }
735
+ getLibraryInfo(library, usedValidators) {
736
+ const sizeKey = library === "zod-v3" ? "zod" : library;
737
+ const sizes = LIBRARY_SIZES[sizeKey] ?? { fullKb: 10, baseKb: 10, treeShakable: false };
738
+ let estimatedUsedKb;
739
+ if (sizes.treeShakable) {
740
+ const overhead = VALIDATOR_OVERHEAD[sizeKey] ?? 0.05;
741
+ estimatedUsedKb = Math.min(sizes.baseKb + usedValidators * overhead, sizes.fullKb);
742
+ } else {
743
+ estimatedUsedKb = sizes.fullKb;
744
+ }
745
+ return {
746
+ library: sizeKey,
747
+ minifiedGzipKb: sizes.fullKb,
748
+ treeShakable: sizes.treeShakable,
749
+ estimatedUsedKb: Math.round(estimatedUsedKb * 10) / 10
750
+ };
751
+ }
752
+ generateCaveats(from, to, _usedValidators) {
753
+ const caveats = [
754
+ "Sizes are estimates based on minified+gzipped bundle analysis.",
755
+ "Actual impact depends on bundler configuration, tree-shaking, and code splitting."
756
+ ];
757
+ if (to === "valibot") {
758
+ caveats.push(
759
+ "Valibot is fully tree-shakable \u2014 actual size depends on which validators you use."
760
+ );
761
+ caveats.push(
762
+ "Some developers report smaller-than-expected savings (6kB or less) in real projects."
763
+ );
764
+ }
765
+ if (from === "zod-v3" && to === "v4") {
766
+ caveats.push(
767
+ "Zod v4 is ~26% larger than v3 due to JIT compilation engine. Consider zod/mini for size-sensitive apps."
768
+ );
769
+ }
770
+ if (from === "joi") {
771
+ caveats.push(
772
+ "Joi is the largest schema library. Any migration will likely reduce bundle size."
773
+ );
774
+ }
775
+ return caveats;
776
+ }
777
+ generateSummary(from, to, delta, deltaPercent) {
778
+ const direction = delta > 0 ? "increase" : delta < 0 ? "decrease" : "no change";
779
+ const absDelta = Math.abs(Math.round(delta * 10) / 10);
780
+ return `Estimated bundle ${direction}: ${from.library} (${from.estimatedUsedKb}kB) \u2192 ${to.library} (${to.estimatedUsedKb}kB) = ${delta > 0 ? "+" : delta < 0 ? "-" : ""}${absDelta}kB (${deltaPercent > 0 ? "+" : ""}${deltaPercent}%)`;
781
+ }
782
+ };
783
+
318
784
  // src/chain.ts
319
785
  var import_ts_morph3 = require("ts-morph");
320
786
  var MigrationChain = class {
@@ -381,12 +847,12 @@ var MigrationChain = class {
381
847
  };
382
848
 
383
849
  // src/compatibility.ts
384
- var import_node_fs2 = require("fs");
385
- var import_node_path2 = require("path");
850
+ var import_node_fs3 = require("fs");
851
+ var import_node_path3 = require("path");
386
852
 
387
853
  // src/ecosystem.ts
388
- var import_node_fs = require("fs");
389
- var import_node_path = require("path");
854
+ var import_node_fs2 = require("fs");
855
+ var import_node_path2 = require("path");
390
856
  var ECOSYSTEM_RULES = [
391
857
  // ORM integrations
392
858
  {
@@ -539,6 +1005,58 @@ var ECOSYSTEM_RULES = [
539
1005
  severity: "warning",
540
1006
  upgradeCommand: "npm install @asteasolutions/zod-to-openapi@latest"
541
1007
  })
1008
+ },
1009
+ // AI/MCP integrations
1010
+ {
1011
+ package: "@modelcontextprotocol/sdk",
1012
+ category: "api",
1013
+ migrations: ["zod-v3->v4"],
1014
+ check: () => ({
1015
+ issue: "MCP SDK may have Zod v4 compatibility issues. MCP servers typically expect Zod v3 schemas.",
1016
+ suggestion: "Check MCP SDK release notes for Zod v4 support before upgrading. Consider staying on Zod v3 for MCP servers.",
1017
+ severity: "warning",
1018
+ upgradeCommand: "npm install @modelcontextprotocol/sdk@latest"
1019
+ })
1020
+ },
1021
+ {
1022
+ package: "@openai/agents",
1023
+ category: "api",
1024
+ migrations: ["zod-v3->v4"],
1025
+ check: () => ({
1026
+ issue: "OpenAI Agents SDK recommends pinning to zod@3.25.67 due to TS2589 errors with newer versions.",
1027
+ suggestion: "Pin zod to 3.25.67 for OpenAI Agents SDK compatibility, or wait for an SDK update with Zod v4 support.",
1028
+ severity: "error"
1029
+ })
1030
+ },
1031
+ // Additional validation utilities
1032
+ {
1033
+ package: "zod-to-json-schema",
1034
+ category: "validation-util",
1035
+ migrations: ["zod-v3->v4"],
1036
+ check: (version) => {
1037
+ const majorMatch = version.match(/(\d+)/);
1038
+ const major = majorMatch?.[1] ? Number.parseInt(majorMatch[1], 10) : 0;
1039
+ if (major < 4) {
1040
+ return {
1041
+ issue: "zod-to-json-schema v3 may not fully support Zod v4 schemas.",
1042
+ suggestion: "Upgrade to zod-to-json-schema v4+ for full Zod v4 support.",
1043
+ severity: "warning",
1044
+ upgradeCommand: "npm install zod-to-json-schema@latest"
1045
+ };
1046
+ }
1047
+ return null;
1048
+ }
1049
+ },
1050
+ {
1051
+ package: "react-hook-form",
1052
+ category: "form",
1053
+ migrations: ["zod-v3->v4"],
1054
+ check: () => ({
1055
+ issue: "React Hook Form with zodResolver may throw uncaught ZodError instead of populating formState.errors with Zod v4.",
1056
+ suggestion: "Upgrade @hookform/resolvers to the latest version and test form validation thoroughly.",
1057
+ severity: "warning",
1058
+ upgradeCommand: "npm install @hookform/resolvers@latest react-hook-form@latest"
1059
+ })
542
1060
  }
543
1061
  ];
544
1062
  var EcosystemAnalyzer = class {
@@ -547,13 +1065,13 @@ var EcosystemAnalyzer = class {
547
1065
  const dependencies = [];
548
1066
  const warnings = [];
549
1067
  const blockers = [];
550
- const pkgPath = (0, import_node_path.join)(projectPath, "package.json");
551
- if (!(0, import_node_fs.existsSync)(pkgPath)) {
1068
+ const pkgPath = (0, import_node_path2.join)(projectPath, "package.json");
1069
+ if (!(0, import_node_fs2.existsSync)(pkgPath)) {
552
1070
  return { dependencies, warnings, blockers };
553
1071
  }
554
1072
  let allDeps = {};
555
1073
  try {
556
- const pkg = JSON.parse((0, import_node_fs.readFileSync)(pkgPath, "utf-8"));
1074
+ const pkg = JSON.parse((0, import_node_fs2.readFileSync)(pkgPath, "utf-8"));
557
1075
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
558
1076
  } catch {
559
1077
  return { dependencies, warnings, blockers };
@@ -583,6 +1101,20 @@ var EcosystemAnalyzer = class {
583
1101
  }
584
1102
  return { dependencies, warnings, blockers };
585
1103
  }
1104
+ /**
1105
+ * Returns a list of npm install commands needed to resolve ecosystem issues.
1106
+ */
1107
+ getUpgradeCommands(report) {
1108
+ const commands = [];
1109
+ const seen = /* @__PURE__ */ new Set();
1110
+ for (const dep of report.dependencies) {
1111
+ if (dep.upgradeCommand && !seen.has(dep.upgradeCommand)) {
1112
+ seen.add(dep.upgradeCommand);
1113
+ commands.push(dep.upgradeCommand);
1114
+ }
1115
+ }
1116
+ return commands;
1117
+ }
586
1118
  };
587
1119
 
588
1120
  // src/compatibility.ts
@@ -660,10 +1192,10 @@ var CompatibilityAnalyzer = class {
660
1192
  ecosystemAnalyzer = new EcosystemAnalyzer();
661
1193
  detectVersions(projectPath) {
662
1194
  const versions = [];
663
- const pkgPath = (0, import_node_path2.join)(projectPath, "package.json");
664
- if (!(0, import_node_fs2.existsSync)(pkgPath)) return versions;
1195
+ const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
1196
+ if (!(0, import_node_fs3.existsSync)(pkgPath)) return versions;
665
1197
  try {
666
- const pkg = JSON.parse((0, import_node_fs2.readFileSync)(pkgPath, "utf-8"));
1198
+ const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
667
1199
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
668
1200
  const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
669
1201
  for (const lib of knownLibs) {
@@ -885,8 +1417,8 @@ async function loadConfig(configPath) {
885
1417
  }
886
1418
 
887
1419
  // src/dependency-graph.ts
888
- var import_node_fs3 = require("fs");
889
- var import_node_path3 = require("path");
1420
+ var import_node_fs4 = require("fs");
1421
+ var import_node_path4 = require("path");
890
1422
  var SchemaDependencyResolver = class {
891
1423
  resolve(project, filePaths) {
892
1424
  const fileSet = new Set(filePaths);
@@ -973,39 +1505,96 @@ var SchemaDependencyResolver = class {
973
1505
  }
974
1506
  };
975
1507
  var SCHEMA_PACKAGES = /* @__PURE__ */ new Set(["zod", "yup", "joi", "io-ts", "valibot", "@effect/schema"]);
1508
+ function computeParallelBatches(packages, suggestedOrder) {
1509
+ const nameSet = new Set(packages.map((p) => p.name));
1510
+ const depMap = /* @__PURE__ */ new Map();
1511
+ for (const pkg of packages) {
1512
+ depMap.set(pkg.name, new Set(pkg.dependencies.filter((d) => nameSet.has(d))));
1513
+ }
1514
+ const depths = /* @__PURE__ */ new Map();
1515
+ const getDepth = (name, visited) => {
1516
+ const cached = depths.get(name);
1517
+ if (cached !== void 0) return cached;
1518
+ if (visited.has(name)) return 0;
1519
+ visited.add(name);
1520
+ const deps = depMap.get(name) ?? /* @__PURE__ */ new Set();
1521
+ let maxDepth = 0;
1522
+ for (const dep of deps) {
1523
+ maxDepth = Math.max(maxDepth, getDepth(dep, visited) + 1);
1524
+ }
1525
+ depths.set(name, maxDepth);
1526
+ return maxDepth;
1527
+ };
1528
+ for (const name of suggestedOrder) {
1529
+ getDepth(name, /* @__PURE__ */ new Set());
1530
+ }
1531
+ const batchMap = /* @__PURE__ */ new Map();
1532
+ for (const name of suggestedOrder) {
1533
+ const depth = depths.get(name) ?? 0;
1534
+ const batch = batchMap.get(depth) ?? [];
1535
+ batch.push(name);
1536
+ batchMap.set(depth, batch);
1537
+ }
1538
+ const batches = [];
1539
+ const sortedDepths = [...batchMap.keys()].sort((a, b) => a - b);
1540
+ for (const depth of sortedDepths) {
1541
+ const pkgs = batchMap.get(depth);
1542
+ if (pkgs) batches.push({ index: batches.length, packages: pkgs });
1543
+ }
1544
+ return batches;
1545
+ }
976
1546
  var MonorepoResolver = class {
977
1547
  detect(projectPath) {
978
- const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
979
- if (!(0, import_node_fs3.existsSync)(pkgPath)) return false;
980
- try {
981
- const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
982
- return !!pkg.workspaces;
983
- } catch {
984
- return false;
1548
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1549
+ if ((0, import_node_fs4.existsSync)(pkgPath)) {
1550
+ try {
1551
+ const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1552
+ if (pkg.workspaces) return true;
1553
+ } catch {
1554
+ }
1555
+ }
1556
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml"))) return true;
1557
+ return false;
1558
+ }
1559
+ /**
1560
+ * Detect which workspace manager is being used.
1561
+ */
1562
+ detectManager(projectPath) {
1563
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
1564
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1565
+ if ((0, import_node_fs4.existsSync)(pkgPath)) {
1566
+ try {
1567
+ const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1568
+ if (pkg.packageManager?.startsWith("yarn")) return "yarn";
1569
+ if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
1570
+ } catch {
1571
+ }
985
1572
  }
1573
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-lock.yaml"))) return "pnpm";
1574
+ if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "yarn.lock"))) return "yarn";
1575
+ return "npm";
986
1576
  }
987
1577
  analyze(projectPath) {
988
- const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
989
- if (!(0, import_node_fs3.existsSync)(pkgPath)) {
1578
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1579
+ if (!(0, import_node_fs4.existsSync)(pkgPath)) {
990
1580
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
991
1581
  }
992
1582
  let workspaceGlobs;
993
1583
  try {
994
- const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
995
- if (!pkg.workspaces) {
1584
+ workspaceGlobs = this.resolveWorkspaceGlobs(projectPath);
1585
+ if (workspaceGlobs.length === 0) {
996
1586
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
997
1587
  }
998
- workspaceGlobs = Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
999
1588
  } catch {
1000
1589
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
1001
1590
  }
1002
1591
  const packages = [];
1003
1592
  const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
1004
1593
  for (const dir of resolvedDirs) {
1005
- const wsPkgPath = (0, import_node_path3.join)(dir, "package.json");
1006
- if (!(0, import_node_fs3.existsSync)(wsPkgPath)) continue;
1594
+ const wsPkgPath = (0, import_node_path4.join)(dir, "package.json");
1595
+ if (!(0, import_node_fs4.existsSync)(wsPkgPath)) continue;
1007
1596
  try {
1008
- const wsPkg = JSON.parse((0, import_node_fs3.readFileSync)(wsPkgPath, "utf-8"));
1597
+ const wsPkg = JSON.parse((0, import_node_fs4.readFileSync)(wsPkgPath, "utf-8"));
1009
1598
  if (!wsPkg.name) continue;
1010
1599
  const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
1011
1600
  const depNames = Object.keys(allDeps);
@@ -1044,18 +1633,70 @@ var MonorepoResolver = class {
1044
1633
  }
1045
1634
  return sorted;
1046
1635
  }
1636
+ /**
1637
+ * Resolve workspace glob patterns from any supported format.
1638
+ * Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
1639
+ */
1640
+ resolveWorkspaceGlobs(projectPath) {
1641
+ const pnpmPath = (0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml");
1642
+ if ((0, import_node_fs4.existsSync)(pnpmPath)) {
1643
+ return this.parsePnpmWorkspace(pnpmPath);
1644
+ }
1645
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1646
+ if ((0, import_node_fs4.existsSync)(pkgPath)) {
1647
+ try {
1648
+ const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1649
+ if (pkg.workspaces) {
1650
+ return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
1651
+ }
1652
+ } catch {
1653
+ }
1654
+ }
1655
+ return [];
1656
+ }
1657
+ /**
1658
+ * Parse pnpm-workspace.yaml to extract workspace package globs.
1659
+ * Simple YAML parsing for the common format:
1660
+ * ```
1661
+ * packages:
1662
+ * - 'packages/*'
1663
+ * - 'apps/*'
1664
+ * ```
1665
+ */
1666
+ parsePnpmWorkspace(filePath) {
1667
+ const content = (0, import_node_fs4.readFileSync)(filePath, "utf-8");
1668
+ const globs = [];
1669
+ let inPackages = false;
1670
+ for (const line of content.split("\n")) {
1671
+ const trimmed = line.trim();
1672
+ if (trimmed === "packages:") {
1673
+ inPackages = true;
1674
+ continue;
1675
+ }
1676
+ if (inPackages && /^\w/.test(trimmed) && !trimmed.startsWith("-")) {
1677
+ break;
1678
+ }
1679
+ if (inPackages && trimmed.startsWith("-")) {
1680
+ const pattern = trimmed.replace(/^-\s*/, "").replace(/^['"]|['"]$/g, "");
1681
+ if (pattern) {
1682
+ globs.push(pattern);
1683
+ }
1684
+ }
1685
+ }
1686
+ return globs;
1687
+ }
1047
1688
  resolveWorkspaceDirs(projectPath, globs) {
1048
1689
  const dirs = [];
1049
1690
  for (const glob of globs) {
1050
1691
  const clean = glob.replace(/\/?\*$/, "");
1051
- const base = (0, import_node_path3.resolve)(projectPath, clean);
1052
- if (!(0, import_node_fs3.existsSync)(base)) continue;
1692
+ const base = (0, import_node_path4.resolve)(projectPath, clean);
1693
+ if (!(0, import_node_fs4.existsSync)(base)) continue;
1053
1694
  if (glob.endsWith("*")) {
1054
1695
  try {
1055
- const entries = (0, import_node_fs3.readdirSync)(base, { withFileTypes: true });
1696
+ const entries = (0, import_node_fs4.readdirSync)(base, { withFileTypes: true });
1056
1697
  for (const entry of entries) {
1057
1698
  if (entry.isDirectory()) {
1058
- dirs.push((0, import_node_path3.join)(base, entry.name));
1699
+ dirs.push((0, import_node_path4.join)(base, entry.name));
1059
1700
  }
1060
1701
  }
1061
1702
  } catch {
@@ -1069,8 +1710,8 @@ var MonorepoResolver = class {
1069
1710
  };
1070
1711
 
1071
1712
  // src/detailed-analyzer.ts
1072
- var import_node_fs4 = require("fs");
1073
- var import_node_path4 = require("path");
1713
+ var import_node_fs5 = require("fs");
1714
+ var import_node_path5 = require("path");
1074
1715
  var COMPLEXITY_CHAIN_WEIGHT = 2;
1075
1716
  var COMPLEXITY_DEPTH_WEIGHT = 3;
1076
1717
  var COMPLEXITY_VALIDATION_WEIGHT = 1;
@@ -1135,10 +1776,10 @@ var DetailedAnalyzer = class {
1135
1776
  }
1136
1777
  detectLibraryVersions(projectPath) {
1137
1778
  const versions = [];
1138
- const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1139
- if (!(0, import_node_fs4.existsSync)(pkgPath)) return versions;
1779
+ const pkgPath = (0, import_node_path5.join)(projectPath, "package.json");
1780
+ if (!(0, import_node_fs5.existsSync)(pkgPath)) return versions;
1140
1781
  try {
1141
- const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1782
+ const pkg = JSON.parse((0, import_node_fs5.readFileSync)(pkgPath, "utf-8"));
1142
1783
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1143
1784
  const allDeps = {
1144
1785
  ...pkg.dependencies,
@@ -1398,6 +2039,7 @@ var FormResolverMigrator = class {
1398
2039
  // src/governance.ts
1399
2040
  var GovernanceEngine = class {
1400
2041
  rules = /* @__PURE__ */ new Map();
2042
+ customRuleFunctions = /* @__PURE__ */ new Map();
1401
2043
  configure(rules) {
1402
2044
  this.rules.clear();
1403
2045
  for (const [name, config] of Object.entries(rules)) {
@@ -1406,6 +2048,13 @@ var GovernanceEngine = class {
1406
2048
  }
1407
2049
  }
1408
2050
  }
2051
+ /**
2052
+ * Register a custom governance rule function.
2053
+ * Custom rules are executed per-file alongside built-in rules.
2054
+ */
2055
+ registerRule(name, fn) {
2056
+ this.customRuleFunctions.set(name, fn);
2057
+ }
1409
2058
  analyze(project) {
1410
2059
  const violations = [];
1411
2060
  let schemasChecked = 0;
@@ -1481,6 +2130,104 @@ var GovernanceEngine = class {
1481
2130
  });
1482
2131
  }
1483
2132
  }
2133
+ if (this.rules.has("require-safeParse")) {
2134
+ if (text.includes(".parse(") && !text.includes(".safeParse(")) {
2135
+ violations.push({
2136
+ rule: "require-safeParse",
2137
+ message: `Schema "${schemaName}" uses .parse() \u2014 prefer .safeParse() for safer error handling`,
2138
+ filePath,
2139
+ lineNumber,
2140
+ schemaName,
2141
+ severity: "warning",
2142
+ fixable: true
2143
+ });
2144
+ }
2145
+ }
2146
+ if (this.rules.has("require-description")) {
2147
+ if (!text.includes(".describe(")) {
2148
+ violations.push({
2149
+ rule: "require-description",
2150
+ message: `Schema "${schemaName}" missing .describe() \u2014 add a description for documentation`,
2151
+ filePath,
2152
+ lineNumber,
2153
+ schemaName,
2154
+ severity: "warning",
2155
+ fixable: true
2156
+ });
2157
+ }
2158
+ }
2159
+ if (this.rules.has("no-coerce-in-api")) {
2160
+ if (/\.coerce\./.test(text)) {
2161
+ violations.push({
2162
+ rule: "no-coerce-in-api",
2163
+ message: `Schema "${schemaName}" uses z.coerce.* \u2014 coercion in API validation is a security risk`,
2164
+ filePath,
2165
+ lineNumber,
2166
+ schemaName,
2167
+ severity: "error",
2168
+ fixable: false
2169
+ });
2170
+ }
2171
+ }
2172
+ if (this.rules.has("require-max-length")) {
2173
+ if (text.includes(".string()") && !text.includes(".max(") && !text.includes(".length(")) {
2174
+ violations.push({
2175
+ rule: "require-max-length",
2176
+ message: `Schema "${schemaName}" has string without max length \u2014 required for DoS prevention`,
2177
+ filePath,
2178
+ lineNumber,
2179
+ schemaName,
2180
+ severity: "error",
2181
+ fixable: true
2182
+ });
2183
+ }
2184
+ }
2185
+ if (this.rules.has("max-nesting-depth")) {
2186
+ const config = this.rules.get("max-nesting-depth") ?? {};
2187
+ const maxDepth = config.threshold ?? 5;
2188
+ const depth = this.measureNestingDepth(text);
2189
+ if (depth > maxDepth) {
2190
+ violations.push({
2191
+ rule: "max-nesting-depth",
2192
+ message: `Schema "${schemaName}" nesting depth (${depth}) exceeds limit (${maxDepth})`,
2193
+ filePath,
2194
+ lineNumber,
2195
+ schemaName,
2196
+ severity: "warning",
2197
+ fixable: false
2198
+ });
2199
+ }
2200
+ }
2201
+ }
2202
+ }
2203
+ for (const sourceFile of project.getSourceFiles()) {
2204
+ const library = this.detectFileLibrary(sourceFile);
2205
+ if (library === "unknown") continue;
2206
+ const filePath = sourceFile.getFilePath();
2207
+ const text = sourceFile.getFullText();
2208
+ if (this.rules.has("no-dynamic-schemas")) {
2209
+ const dynamicPatterns = this.detectDynamicSchemas(text, library);
2210
+ for (const lineNumber of dynamicPatterns) {
2211
+ violations.push({
2212
+ rule: "no-dynamic-schemas",
2213
+ message: "Schema created inside function body \u2014 move to module level for performance",
2214
+ filePath,
2215
+ lineNumber,
2216
+ schemaName: "(dynamic)",
2217
+ severity: "warning",
2218
+ fixable: false
2219
+ });
2220
+ }
2221
+ }
2222
+ }
2223
+ for (const [ruleName, ruleFn] of this.customRuleFunctions) {
2224
+ const config = this.rules.get(ruleName);
2225
+ if (!config) continue;
2226
+ for (const sourceFile of project.getSourceFiles()) {
2227
+ const library = this.detectFileLibrary(sourceFile);
2228
+ if (library === "unknown") continue;
2229
+ const ruleViolations = ruleFn(sourceFile, config);
2230
+ violations.push(...ruleViolations);
1484
2231
  }
1485
2232
  }
1486
2233
  return {
@@ -1497,6 +2244,57 @@ var GovernanceEngine = class {
1497
2244
  }
1498
2245
  return "unknown";
1499
2246
  }
2247
+ measureNestingDepth(text) {
2248
+ let maxDepth = 0;
2249
+ let current = 0;
2250
+ for (const char of text) {
2251
+ if (char === "(") {
2252
+ current++;
2253
+ if (current > maxDepth) maxDepth = current;
2254
+ } else if (char === ")") {
2255
+ current--;
2256
+ }
2257
+ }
2258
+ return maxDepth;
2259
+ }
2260
+ detectDynamicSchemas(text, library) {
2261
+ const lineNumbers = [];
2262
+ const prefix = this.getSchemaPrefix(library);
2263
+ if (!prefix) return lineNumbers;
2264
+ const lines = text.split("\n");
2265
+ let insideFunction = 0;
2266
+ for (let i = 0; i < lines.length; i++) {
2267
+ const line = lines[i] ?? "";
2268
+ const opens = (line.match(/\{/g) || []).length;
2269
+ const closes = (line.match(/\}/g) || []).length;
2270
+ if (/(?:function\s+\w+|=>)\s*\{/.test(line)) {
2271
+ insideFunction += opens;
2272
+ insideFunction -= closes;
2273
+ continue;
2274
+ }
2275
+ insideFunction += opens - closes;
2276
+ if (insideFunction > 0 && line.includes(prefix)) {
2277
+ lineNumbers.push(i + 1);
2278
+ }
2279
+ }
2280
+ return lineNumbers;
2281
+ }
2282
+ getSchemaPrefix(library) {
2283
+ switch (library) {
2284
+ case "zod":
2285
+ return "z.";
2286
+ case "yup":
2287
+ return "yup.";
2288
+ case "joi":
2289
+ return "Joi.";
2290
+ case "io-ts":
2291
+ return "t.";
2292
+ case "valibot":
2293
+ return "v.";
2294
+ default:
2295
+ return null;
2296
+ }
2297
+ }
1500
2298
  isSchemaExpression(text, library) {
1501
2299
  switch (library) {
1502
2300
  case "zod":
@@ -1516,16 +2314,16 @@ var GovernanceEngine = class {
1516
2314
  };
1517
2315
 
1518
2316
  // src/incremental.ts
1519
- var import_node_fs5 = require("fs");
1520
- var import_node_path5 = require("path");
2317
+ var import_node_fs6 = require("fs");
2318
+ var import_node_path6 = require("path");
1521
2319
  var STATE_DIR = ".schemashift";
1522
2320
  var STATE_FILE = "incremental.json";
1523
2321
  var IncrementalTracker = class {
1524
2322
  stateDir;
1525
2323
  statePath;
1526
2324
  constructor(projectPath) {
1527
- this.stateDir = (0, import_node_path5.join)(projectPath, STATE_DIR);
1528
- this.statePath = (0, import_node_path5.join)(this.stateDir, STATE_FILE);
2325
+ this.stateDir = (0, import_node_path6.join)(projectPath, STATE_DIR);
2326
+ this.statePath = (0, import_node_path6.join)(this.stateDir, STATE_FILE);
1529
2327
  }
1530
2328
  start(files, from, to) {
1531
2329
  const state = {
@@ -1560,9 +2358,9 @@ var IncrementalTracker = class {
1560
2358
  this.saveState(state);
1561
2359
  }
1562
2360
  getState() {
1563
- if (!(0, import_node_fs5.existsSync)(this.statePath)) return null;
2361
+ if (!(0, import_node_fs6.existsSync)(this.statePath)) return null;
1564
2362
  try {
1565
- return JSON.parse((0, import_node_fs5.readFileSync)(this.statePath, "utf-8"));
2363
+ return JSON.parse((0, import_node_fs6.readFileSync)(this.statePath, "utf-8"));
1566
2364
  } catch {
1567
2365
  return null;
1568
2366
  }
@@ -1589,21 +2387,21 @@ var IncrementalTracker = class {
1589
2387
  };
1590
2388
  }
1591
2389
  clear() {
1592
- if ((0, import_node_fs5.existsSync)(this.statePath)) {
1593
- (0, import_node_fs5.writeFileSync)(this.statePath, "");
2390
+ if ((0, import_node_fs6.existsSync)(this.statePath)) {
2391
+ (0, import_node_fs6.unlinkSync)(this.statePath);
1594
2392
  }
1595
2393
  }
1596
2394
  saveState(state) {
1597
- if (!(0, import_node_fs5.existsSync)(this.stateDir)) {
1598
- (0, import_node_fs5.mkdirSync)(this.stateDir, { recursive: true });
2395
+ if (!(0, import_node_fs6.existsSync)(this.stateDir)) {
2396
+ (0, import_node_fs6.mkdirSync)(this.stateDir, { recursive: true });
1599
2397
  }
1600
- (0, import_node_fs5.writeFileSync)(this.statePath, JSON.stringify(state, null, 2));
2398
+ (0, import_node_fs6.writeFileSync)(this.statePath, JSON.stringify(state, null, 2));
1601
2399
  }
1602
2400
  };
1603
2401
 
1604
2402
  // src/package-updater.ts
1605
- var import_node_fs6 = require("fs");
1606
- var import_node_path6 = require("path");
2403
+ var import_node_fs7 = require("fs");
2404
+ var import_node_path7 = require("path");
1607
2405
  var TARGET_VERSIONS = {
1608
2406
  "yup->zod": { zod: "^3.24.0" },
1609
2407
  "joi->zod": { zod: "^3.24.0" },
@@ -1624,14 +2422,14 @@ var PackageUpdater = class {
1624
2422
  const add = {};
1625
2423
  const remove = [];
1626
2424
  const warnings = [];
1627
- const pkgPath = (0, import_node_path6.join)(projectPath, "package.json");
1628
- if (!(0, import_node_fs6.existsSync)(pkgPath)) {
2425
+ const pkgPath = (0, import_node_path7.join)(projectPath, "package.json");
2426
+ if (!(0, import_node_fs7.existsSync)(pkgPath)) {
1629
2427
  warnings.push("No package.json found. Cannot plan dependency updates.");
1630
2428
  return { add, remove, warnings };
1631
2429
  }
1632
2430
  let pkg;
1633
2431
  try {
1634
- pkg = JSON.parse((0, import_node_fs6.readFileSync)(pkgPath, "utf-8"));
2432
+ pkg = JSON.parse((0, import_node_fs7.readFileSync)(pkgPath, "utf-8"));
1635
2433
  } catch {
1636
2434
  warnings.push("Could not parse package.json.");
1637
2435
  return { add, remove, warnings };
@@ -1661,9 +2459,9 @@ var PackageUpdater = class {
1661
2459
  return { add, remove, warnings };
1662
2460
  }
1663
2461
  apply(projectPath, plan) {
1664
- const pkgPath = (0, import_node_path6.join)(projectPath, "package.json");
1665
- if (!(0, import_node_fs6.existsSync)(pkgPath)) return;
1666
- const pkgText = (0, import_node_fs6.readFileSync)(pkgPath, "utf-8");
2462
+ const pkgPath = (0, import_node_path7.join)(projectPath, "package.json");
2463
+ if (!(0, import_node_fs7.existsSync)(pkgPath)) return;
2464
+ const pkgText = (0, import_node_fs7.readFileSync)(pkgPath, "utf-8");
1667
2465
  const pkg = JSON.parse(pkgText);
1668
2466
  if (!pkg.dependencies) pkg.dependencies = {};
1669
2467
  for (const [name, version] of Object.entries(plan.add)) {
@@ -1673,11 +2471,133 @@ var PackageUpdater = class {
1673
2471
  pkg.dependencies[name] = version;
1674
2472
  }
1675
2473
  }
1676
- (0, import_node_fs6.writeFileSync)(pkgPath, `${JSON.stringify(pkg, null, 2)}
2474
+ (0, import_node_fs7.writeFileSync)(pkgPath, `${JSON.stringify(pkg, null, 2)}
1677
2475
  `);
1678
2476
  }
1679
2477
  };
1680
2478
 
2479
+ // src/performance-analyzer.ts
2480
+ var PerformanceAnalyzer = class {
2481
+ analyze(sourceFiles, from, to) {
2482
+ const warnings = [];
2483
+ let parseCallSites = 0;
2484
+ let dynamicSchemaCount = 0;
2485
+ for (const file of sourceFiles) {
2486
+ const text = file.getFullText();
2487
+ const filePath = file.getFilePath();
2488
+ const parseMatches = text.match(/\.(parse|safeParse)\s*\(/g);
2489
+ if (parseMatches) {
2490
+ parseCallSites += parseMatches.length;
2491
+ }
2492
+ const dynamicResult = this.detectDynamicSchemas(text, filePath);
2493
+ dynamicSchemaCount += dynamicResult.count;
2494
+ warnings.push(...dynamicResult.warnings);
2495
+ this.addMigrationWarnings(text, filePath, from, to, warnings);
2496
+ }
2497
+ const recommendation = this.getRecommendation(from, to, parseCallSites, dynamicSchemaCount);
2498
+ const summary = this.generateSummary(warnings, parseCallSites, dynamicSchemaCount);
2499
+ return {
2500
+ warnings,
2501
+ parseCallSites,
2502
+ dynamicSchemaCount,
2503
+ recommendation,
2504
+ summary
2505
+ };
2506
+ }
2507
+ detectDynamicSchemas(text, filePath) {
2508
+ const warnings = [];
2509
+ let count = 0;
2510
+ const functionBodyPattern = /(?:function\s+\w+\s*\([^)]*\)|const\s+\w+\s*=\s*(?:async\s+)?(?:\([^)]*\)|[a-zA-Z_]\w*)\s*=>)\s*\{[^}]*(?:z\.|yup\.|Joi\.|v\.)\w+\s*\(/g;
2511
+ for (const match of text.matchAll(functionBodyPattern)) {
2512
+ count++;
2513
+ const lineNumber = text.substring(0, match.index).split("\n").length;
2514
+ warnings.push({
2515
+ category: "dynamic-schemas",
2516
+ message: "Schema created inside function body \u2014 may cause performance issues with Zod v4.",
2517
+ detail: "Zod v4 uses JIT compilation, making schema creation ~17x slower than v3. Move schema definitions to module level to avoid re-creation on every call.",
2518
+ filePath,
2519
+ lineNumber,
2520
+ severity: "warning"
2521
+ });
2522
+ }
2523
+ const reactComponentPattern = /(?:function\s+[A-Z]\w*\s*\([^)]*\)|const\s+[A-Z]\w*\s*[:=])[^{]*\{[^}]*(?:z\.|yup\.|Joi\.)\w+\s*\(/g;
2524
+ for (const match of text.matchAll(reactComponentPattern)) {
2525
+ count++;
2526
+ const lineNumber = text.substring(0, match.index).split("\n").length;
2527
+ warnings.push({
2528
+ category: "schema-creation",
2529
+ message: "Schema appears to be created inside a React component.",
2530
+ detail: "Schemas created inside React components are re-created on every render. Move schema definitions outside the component or wrap in useMemo(). This is especially important for Zod v4 due to JIT compilation overhead.",
2531
+ filePath,
2532
+ lineNumber,
2533
+ severity: "warning"
2534
+ });
2535
+ }
2536
+ return { count, warnings };
2537
+ }
2538
+ addMigrationWarnings(text, filePath, from, to, warnings) {
2539
+ const migration = `${from}->${to}`;
2540
+ if (migration === "zod-v3->v4") {
2541
+ if (/edge-runtime|@vercel\/edge|cloudflare.*workers|deno\.serve|Deno\.serve/i.test(text) || /export\s+const\s+runtime\s*=\s*['"]edge['"]/i.test(text)) {
2542
+ warnings.push({
2543
+ category: "cold-start",
2544
+ message: "Edge/serverless environment detected \u2014 Zod v4 JIT compilation increases cold start time.",
2545
+ detail: "Zod v4 JIT trades slower schema creation for faster repeated parsing. In serverless/edge environments with short-lived instances, the JIT cost may not amortize. Consider Valibot or staying on Zod v3 for cold-start-sensitive code.",
2546
+ filePath,
2547
+ severity: "warning"
2548
+ });
2549
+ }
2550
+ const parseCount = (text.match(/\.parse\s*\(/g) || []).length;
2551
+ if (parseCount > 10) {
2552
+ warnings.push({
2553
+ category: "repeated-parsing",
2554
+ message: `High parse() usage (${parseCount} call sites) \u2014 Zod v4 JIT will benefit here.`,
2555
+ detail: "Zod v4 JIT compilation makes repeated parsing ~8x faster. This file has many parse() calls and will see performance improvement.",
2556
+ filePath,
2557
+ severity: "info"
2558
+ });
2559
+ }
2560
+ }
2561
+ if (migration === "zod->valibot" && /\.parse\s*\(/.test(text)) {
2562
+ warnings.push({
2563
+ category: "repeated-parsing",
2564
+ message: "Valibot parsing performance is comparable to Zod v4 for most schemas.",
2565
+ detail: "Valibot v1+ offers similar runtime performance to Zod v4 with significantly smaller bundle size. No JIT overhead means consistent performance across all environments.",
2566
+ filePath,
2567
+ severity: "info"
2568
+ });
2569
+ }
2570
+ }
2571
+ getRecommendation(from, to, parseCallSites, dynamicSchemaCount) {
2572
+ const migration = `${from}->${to}`;
2573
+ if (migration === "zod-v3->v4") {
2574
+ if (dynamicSchemaCount > 5) {
2575
+ return "Many dynamic schemas detected. Zod v4 JIT makes schema creation 17x slower. Move schemas to module level before migrating, or consider Valibot for size-sensitive apps.";
2576
+ }
2577
+ if (parseCallSites > 50) {
2578
+ return "High parse() volume detected. Zod v4 JIT will significantly benefit repeated parsing (up to 8x faster). Migration recommended for performance.";
2579
+ }
2580
+ return "Moderate usage detected. Zod v4 trades slower startup for faster runtime parsing.";
2581
+ }
2582
+ if (migration === "zod->valibot") {
2583
+ return "Valibot offers similar runtime performance with significantly smaller bundle size. Best suited for bundle-size-sensitive applications.";
2584
+ }
2585
+ if (from === "yup" || from === "joi") {
2586
+ return `Migrating from ${from} to ${to} should have neutral or positive performance impact.`;
2587
+ }
2588
+ return "Performance impact depends on usage patterns. Review warnings for details.";
2589
+ }
2590
+ generateSummary(warnings, parseCallSites, dynamicSchemaCount) {
2591
+ const parts = [];
2592
+ parts.push(`${parseCallSites} parse/safeParse call sites`);
2593
+ if (dynamicSchemaCount > 0) {
2594
+ parts.push(`${dynamicSchemaCount} dynamic schema creation sites`);
2595
+ }
2596
+ parts.push(`${warnings.length} performance warning(s)`);
2597
+ return parts.join(", ");
2598
+ }
2599
+ };
2600
+
1681
2601
  // src/plugin-loader.ts
1682
2602
  var PluginLoader = class {
1683
2603
  async loadPlugins(pluginPaths) {
@@ -1723,8 +2643,8 @@ var PluginLoader = class {
1723
2643
  };
1724
2644
 
1725
2645
  // src/standard-schema.ts
1726
- var import_node_fs7 = require("fs");
1727
- var import_node_path7 = require("path");
2646
+ var import_node_fs8 = require("fs");
2647
+ var import_node_path8 = require("path");
1728
2648
  var STANDARD_SCHEMA_LIBRARIES = {
1729
2649
  zod: { minMajor: 3, minMinor: 23 },
1730
2650
  // Zod v3.23+ and v4+
@@ -1753,16 +2673,16 @@ function isVersionCompatible(version, minMajor, minMinor) {
1753
2673
  return false;
1754
2674
  }
1755
2675
  function detectStandardSchema(projectPath) {
1756
- const pkgPath = (0, import_node_path7.join)(projectPath, "package.json");
1757
- if (!(0, import_node_fs7.existsSync)(pkgPath)) {
1758
- return { detected: false, compatibleLibraries: [], recommendation: "" };
2676
+ const pkgPath = (0, import_node_path8.join)(projectPath, "package.json");
2677
+ if (!(0, import_node_fs8.existsSync)(pkgPath)) {
2678
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1759
2679
  }
1760
2680
  let allDeps = {};
1761
2681
  try {
1762
- const pkg = JSON.parse((0, import_node_fs7.readFileSync)(pkgPath, "utf-8"));
2682
+ const pkg = JSON.parse((0, import_node_fs8.readFileSync)(pkgPath, "utf-8"));
1763
2683
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1764
2684
  } catch {
1765
- return { detected: false, compatibleLibraries: [], recommendation: "" };
2685
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1766
2686
  }
1767
2687
  const hasExplicitStandardSchema = "@standard-schema/spec" in allDeps;
1768
2688
  const compatibleLibraries = [];
@@ -1781,9 +2701,155 @@ function detectStandardSchema(projectPath) {
1781
2701
  } else if (hasExplicitStandardSchema) {
1782
2702
  recommendation = "Standard Schema spec detected. Ensure your validation library supports Standard Schema for maximum interoperability.";
1783
2703
  }
1784
- return { detected, compatibleLibraries, recommendation };
2704
+ let adoptionPath;
2705
+ if (detected && !hasExplicitStandardSchema) {
2706
+ adoptionPath = "Install @standard-schema/spec for explicit Standard Schema support. This enables library-agnostic validation consumers to accept your schemas without depending on a specific library. Run: npm install @standard-schema/spec";
2707
+ } else if (!detected) {
2708
+ adoptionPath = "Consider migrating to a Standard Schema-compatible library (Zod v3.23+, Valibot v1+, ArkType v2+) to future-proof your validation layer and reduce library lock-in.";
2709
+ }
2710
+ const interopTools = detected ? [
2711
+ "tRPC v11+ (Standard Schema input validation)",
2712
+ "TanStack Form (schema-agnostic validation)",
2713
+ "TanStack Router (route parameter validation)",
2714
+ "Hono (request validation middleware)",
2715
+ "Conform (progressive form validation)",
2716
+ "Nuxt (runtime config validation)"
2717
+ ] : [];
2718
+ return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
1785
2719
  }
1786
2720
 
2721
+ // src/test-scaffolder.ts
2722
+ var TestScaffolder = class {
2723
+ scaffold(sourceFiles, from, to) {
2724
+ const tests = [];
2725
+ let totalSchemas = 0;
2726
+ for (const file of sourceFiles) {
2727
+ const schemas = this.extractSchemaNames(file, from);
2728
+ if (schemas.length === 0) continue;
2729
+ totalSchemas += schemas.length;
2730
+ const testCode = this.generateTestFile(file, schemas, from, to);
2731
+ const filePath = file.getFilePath().replace(/\.tsx?$/, ".migration-test.ts");
2732
+ tests.push({ filePath, testCode, schemaCount: schemas.length });
2733
+ }
2734
+ const summary = tests.length > 0 ? `Generated ${tests.length} test file(s) covering ${totalSchemas} schema(s) for ${from}->${to} migration.` : "No schemas found to generate tests for.";
2735
+ return { tests, totalSchemas, summary };
2736
+ }
2737
+ extractSchemaNames(file, library) {
2738
+ const names = [];
2739
+ const prefixes = this.getLibraryPrefixes(library);
2740
+ for (const varDecl of file.getVariableDeclarations()) {
2741
+ const initializer = varDecl.getInitializer();
2742
+ if (!initializer) continue;
2743
+ const text = initializer.getText();
2744
+ if (prefixes.some((p) => text.startsWith(p))) {
2745
+ names.push(varDecl.getName());
2746
+ }
2747
+ }
2748
+ return names;
2749
+ }
2750
+ getLibraryPrefixes(library) {
2751
+ switch (library) {
2752
+ case "zod":
2753
+ case "zod-v3":
2754
+ return ["z.", "zod."];
2755
+ case "yup":
2756
+ return ["yup.", "Yup."];
2757
+ case "joi":
2758
+ return ["Joi.", "joi."];
2759
+ case "io-ts":
2760
+ return ["t."];
2761
+ case "valibot":
2762
+ return ["v.", "valibot."];
2763
+ default:
2764
+ return ["z."];
2765
+ }
2766
+ }
2767
+ generateTestFile(file, schemaNames, from, to) {
2768
+ const relativePath = file.getFilePath();
2769
+ const schemaImports = schemaNames.join(", ");
2770
+ const parseMethod = this.getParseMethod(to);
2771
+ const errorClass = this.getErrorClass(to);
2772
+ const testCases = schemaNames.map((name) => this.generateSchemaTests(name, to, parseMethod, errorClass)).join("\n\n");
2773
+ return `/**
2774
+ * Migration validation tests for ${from} -> ${to}
2775
+ * Auto-generated by SchemaShift
2776
+ *
2777
+ * These tests verify that schema behavior is preserved after migration.
2778
+ * Run before and after migration to ensure equivalence.
2779
+ *
2780
+ * Source: ${relativePath}
2781
+ */
2782
+ import { describe, expect, it } from 'vitest';
2783
+ import { ${schemaImports} } from '${relativePath.replace(/\.ts$/, ".js")}';
2784
+
2785
+ describe('Migration validation: ${relativePath}', () => {
2786
+ ${testCases}
2787
+ });
2788
+ `;
2789
+ }
2790
+ getParseMethod(to) {
2791
+ switch (to) {
2792
+ case "valibot":
2793
+ return "v.safeParse";
2794
+ default:
2795
+ return ".safeParse";
2796
+ }
2797
+ }
2798
+ getErrorClass(to) {
2799
+ switch (to) {
2800
+ case "valibot":
2801
+ return "ValiError";
2802
+ case "zod":
2803
+ case "v4":
2804
+ return "ZodError";
2805
+ default:
2806
+ return "Error";
2807
+ }
2808
+ }
2809
+ generateSchemaTests(schemaName, to, _parseMethod, _errorClass) {
2810
+ if (to === "valibot") {
2811
+ return ` describe('${schemaName}', () => {
2812
+ it('should accept valid data', () => {
2813
+ // TODO(schemashift): Add valid test data for ${schemaName}
2814
+ // const result = v.safeParse(${schemaName}, validData);
2815
+ // expect(result.success).toBe(true);
2816
+ });
2817
+
2818
+ it('should reject invalid data', () => {
2819
+ // TODO(schemashift): Add invalid test data for ${schemaName}
2820
+ // const result = v.safeParse(${schemaName}, invalidData);
2821
+ // expect(result.success).toBe(false);
2822
+ });
2823
+
2824
+ it('should preserve error messages', () => {
2825
+ // TODO(schemashift): Verify custom error messages are preserved
2826
+ // const result = v.safeParse(${schemaName}, invalidData);
2827
+ // expect(result.issues?.[0]?.message).toContain('expected message');
2828
+ });
2829
+ });`;
2830
+ }
2831
+ return ` describe('${schemaName}', () => {
2832
+ it('should accept valid data', () => {
2833
+ // TODO(schemashift): Add valid test data for ${schemaName}
2834
+ // const result = ${schemaName}.safeParse(validData);
2835
+ // expect(result.success).toBe(true);
2836
+ });
2837
+
2838
+ it('should reject invalid data', () => {
2839
+ // TODO(schemashift): Add invalid test data for ${schemaName}
2840
+ // const result = ${schemaName}.safeParse(invalidData);
2841
+ // expect(result.success).toBe(false);
2842
+ });
2843
+
2844
+ it('should preserve error messages', () => {
2845
+ // TODO(schemashift): Verify custom error messages are preserved
2846
+ // const result = ${schemaName}.safeParse(invalidData);
2847
+ // expect(result.error?.issues[0]?.message).toContain('expected message');
2848
+ });
2849
+ });`;
2850
+ }
2851
+ };
2852
+
1787
2853
  // src/transform.ts
1788
2854
  var TransformEngine = class {
1789
2855
  handlers = /* @__PURE__ */ new Map();
@@ -1798,9 +2864,10 @@ var TransformEngine = class {
1798
2864
  }
1799
2865
  getSupportedPaths() {
1800
2866
  return Array.from(this.handlers.keys()).map((key) => {
1801
- const [from, to] = key.split("->");
1802
- return { from, to };
1803
- });
2867
+ const parts = key.split("->");
2868
+ if (parts.length !== 2) return null;
2869
+ return { from: parts[0], to: parts[1] };
2870
+ }).filter((entry) => entry !== null);
1804
2871
  }
1805
2872
  transform(sourceFile, from, to, options) {
1806
2873
  const handler = this.getHandler(from, to);
@@ -1816,8 +2883,137 @@ var TransformEngine = class {
1816
2883
  return handler.transform(sourceFile, options);
1817
2884
  }
1818
2885
  };
2886
+
2887
+ // src/type-dedup-detector.ts
2888
+ var import_ts_morph4 = require("ts-morph");
2889
+ var TypeDedupDetector = class {
2890
+ detect(sourceFiles) {
2891
+ const typeDefinitions = this.collectTypeDefinitions(sourceFiles);
2892
+ const schemaDefinitions = this.collectSchemaDefinitions(sourceFiles);
2893
+ const candidates = this.findMatches(typeDefinitions, schemaDefinitions);
2894
+ const summary = candidates.length > 0 ? `Found ${candidates.length} type definition(s) that may duplicate schema shapes. After migration, replace with z.infer<typeof schema>.` : "No duplicate type definitions detected.";
2895
+ return { candidates, summary };
2896
+ }
2897
+ collectTypeDefinitions(sourceFiles) {
2898
+ const types = [];
2899
+ for (const file of sourceFiles) {
2900
+ const filePath = file.getFilePath();
2901
+ for (const iface of file.getInterfaces()) {
2902
+ const fields = iface.getProperties().map((p) => p.getName());
2903
+ if (fields.length > 0) {
2904
+ types.push({
2905
+ name: iface.getName(),
2906
+ fields,
2907
+ filePath,
2908
+ lineNumber: iface.getStartLineNumber()
2909
+ });
2910
+ }
2911
+ }
2912
+ for (const typeAlias of file.getTypeAliases()) {
2913
+ const typeNode = typeAlias.getTypeNode();
2914
+ if (!typeNode) continue;
2915
+ if (import_ts_morph4.Node.isTypeLiteral(typeNode)) {
2916
+ const fields = typeNode.getProperties().map((p) => p.getName());
2917
+ if (fields.length > 0) {
2918
+ types.push({
2919
+ name: typeAlias.getName(),
2920
+ fields,
2921
+ filePath,
2922
+ lineNumber: typeAlias.getStartLineNumber()
2923
+ });
2924
+ }
2925
+ }
2926
+ }
2927
+ }
2928
+ return types;
2929
+ }
2930
+ collectSchemaDefinitions(sourceFiles) {
2931
+ const schemas = [];
2932
+ for (const file of sourceFiles) {
2933
+ const filePath = file.getFilePath();
2934
+ for (const varDecl of file.getVariableDeclarations()) {
2935
+ const initializer = varDecl.getInitializer();
2936
+ if (!initializer) continue;
2937
+ const text = initializer.getText();
2938
+ const isSchema = /(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\.object\s*\(/.test(text) || /Joi\.object\s*\(/.test(text);
2939
+ if (!isSchema) continue;
2940
+ const fields = this.extractSchemaFields(text);
2941
+ if (fields.length > 0) {
2942
+ schemas.push({
2943
+ name: varDecl.getName(),
2944
+ fields,
2945
+ filePath,
2946
+ lineNumber: varDecl.getStartLineNumber()
2947
+ });
2948
+ }
2949
+ }
2950
+ }
2951
+ return schemas;
2952
+ }
2953
+ extractSchemaFields(text) {
2954
+ const fields = [];
2955
+ const fieldPattern = /\b(\w+)\s*:\s*(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\./g;
2956
+ for (const match of text.matchAll(fieldPattern)) {
2957
+ if (match[1]) {
2958
+ fields.push(match[1]);
2959
+ }
2960
+ }
2961
+ return fields;
2962
+ }
2963
+ findMatches(types, schemas) {
2964
+ const candidates = [];
2965
+ for (const typeDef of types) {
2966
+ for (const schemaDef of schemas) {
2967
+ const matchedFields = this.getMatchedFields(typeDef.fields, schemaDef.fields);
2968
+ if (matchedFields.length < 2) continue;
2969
+ const typeFieldCount = typeDef.fields.length;
2970
+ const schemaFieldCount = schemaDef.fields.length;
2971
+ const matchRatio = matchedFields.length / Math.max(typeFieldCount, schemaFieldCount);
2972
+ let confidence;
2973
+ if (matchRatio >= 0.8) {
2974
+ confidence = "high";
2975
+ } else if (matchRatio >= 0.5) {
2976
+ confidence = "medium";
2977
+ } else {
2978
+ confidence = "low";
2979
+ }
2980
+ if (confidence === "low" && !this.namesRelated(typeDef.name, schemaDef.name)) {
2981
+ continue;
2982
+ }
2983
+ candidates.push({
2984
+ typeName: typeDef.name,
2985
+ typeFilePath: typeDef.filePath,
2986
+ typeLineNumber: typeDef.lineNumber,
2987
+ schemaName: schemaDef.name,
2988
+ schemaFilePath: schemaDef.filePath,
2989
+ schemaLineNumber: schemaDef.lineNumber,
2990
+ matchedFields,
2991
+ confidence,
2992
+ suggestion: `Replace "type/interface ${typeDef.name}" with "type ${typeDef.name} = z.infer<typeof ${schemaDef.name}>" (${matchedFields.length}/${typeFieldCount} fields match).`
2993
+ });
2994
+ }
2995
+ }
2996
+ candidates.sort((a, b) => {
2997
+ const confidenceOrder = { high: 0, medium: 1, low: 2 };
2998
+ const diff = confidenceOrder[a.confidence] - confidenceOrder[b.confidence];
2999
+ if (diff !== 0) return diff;
3000
+ return b.matchedFields.length - a.matchedFields.length;
3001
+ });
3002
+ return candidates;
3003
+ }
3004
+ getMatchedFields(typeFields, schemaFields) {
3005
+ const schemaSet = new Set(schemaFields);
3006
+ return typeFields.filter((f) => schemaSet.has(f));
3007
+ }
3008
+ namesRelated(typeName, schemaName) {
3009
+ const normalize = (name) => name.toLowerCase().replace(/schema|type|interface|i$/gi, "");
3010
+ return normalize(typeName) === normalize(schemaName);
3011
+ }
3012
+ };
1819
3013
  // Annotate the CommonJS export names for ESM import in node:
1820
3014
  0 && (module.exports = {
3015
+ BehavioralWarningAnalyzer,
3016
+ BundleEstimator,
1821
3017
  CompatibilityAnalyzer,
1822
3018
  ComplexityEstimator,
1823
3019
  DetailedAnalyzer,
@@ -1825,14 +3021,19 @@ var TransformEngine = class {
1825
3021
  FormResolverMigrator,
1826
3022
  GovernanceEngine,
1827
3023
  IncrementalTracker,
3024
+ MigrationAuditLog,
1828
3025
  MigrationChain,
1829
3026
  MonorepoResolver,
1830
3027
  PackageUpdater,
3028
+ PerformanceAnalyzer,
1831
3029
  PluginLoader,
1832
3030
  SchemaAnalyzer,
1833
3031
  SchemaDependencyResolver,
3032
+ TestScaffolder,
1834
3033
  TransformEngine,
3034
+ TypeDedupDetector,
1835
3035
  buildCallChain,
3036
+ computeParallelBatches,
1836
3037
  detectFormLibraries,
1837
3038
  detectSchemaLibrary,
1838
3039
  detectStandardSchema,