@schemashift/core 0.8.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -264,6 +264,465 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
264
264
  return buildCallChain(newBase, factory.name, factory.args, mappedMethods);
265
265
  }
266
266
 
267
+ // src/audit-log.ts
268
+ import { createHash } from "crypto";
269
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
270
+ import { join } from "path";
271
+ var AUDIT_DIR = ".schemashift";
272
+ var AUDIT_FILE = "audit-log.json";
273
+ var AUDIT_VERSION = 1;
274
+ var MigrationAuditLog = class {
275
+ logDir;
276
+ logPath;
277
+ constructor(projectPath) {
278
+ this.logDir = join(projectPath, AUDIT_DIR);
279
+ this.logPath = join(this.logDir, AUDIT_FILE);
280
+ }
281
+ /**
282
+ * Append a new entry to the audit log.
283
+ */
284
+ append(entry) {
285
+ const log = this.read();
286
+ log.entries.push(entry);
287
+ this.write(log);
288
+ }
289
+ /**
290
+ * Create an audit entry for a file transformation.
291
+ */
292
+ createEntry(params) {
293
+ return {
294
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
295
+ migrationId: params.migrationId,
296
+ filePath: params.filePath,
297
+ action: "transform",
298
+ from: params.from,
299
+ to: params.to,
300
+ success: params.success,
301
+ beforeHash: this.hashContent(params.originalCode),
302
+ afterHash: params.transformedCode ? this.hashContent(params.transformedCode) : void 0,
303
+ warningCount: params.warningCount,
304
+ errorCount: params.errorCount,
305
+ riskScore: params.riskScore,
306
+ duration: params.duration,
307
+ user: this.getCurrentUser()
308
+ };
309
+ }
310
+ /**
311
+ * Read the current audit log.
312
+ */
313
+ read() {
314
+ if (!existsSync(this.logPath)) {
315
+ return { version: AUDIT_VERSION, entries: [] };
316
+ }
317
+ try {
318
+ const content = readFileSync(this.logPath, "utf-8");
319
+ if (!content.trim()) {
320
+ return { version: AUDIT_VERSION, entries: [] };
321
+ }
322
+ return JSON.parse(content);
323
+ } catch {
324
+ return { version: AUDIT_VERSION, entries: [] };
325
+ }
326
+ }
327
+ /**
328
+ * Get entries for a specific migration.
329
+ */
330
+ getByMigration(migrationId) {
331
+ const log = this.read();
332
+ return log.entries.filter((e) => e.migrationId === migrationId);
333
+ }
334
+ /**
335
+ * Get summary statistics for the audit log.
336
+ */
337
+ getSummary() {
338
+ const log = this.read();
339
+ const migrationIds = new Set(log.entries.map((e) => e.migrationId));
340
+ const migrationPaths = [...new Set(log.entries.map((e) => `${e.from}->${e.to}`))];
341
+ return {
342
+ totalMigrations: migrationIds.size,
343
+ totalFiles: log.entries.length,
344
+ successCount: log.entries.filter((e) => e.success).length,
345
+ failureCount: log.entries.filter((e) => !e.success).length,
346
+ migrationPaths
347
+ };
348
+ }
349
+ /**
350
+ * Clear the audit log.
351
+ */
352
+ clear() {
353
+ this.write({ version: AUDIT_VERSION, entries: [] });
354
+ }
355
+ write(log) {
356
+ if (!existsSync(this.logDir)) {
357
+ mkdirSync(this.logDir, { recursive: true });
358
+ }
359
+ writeFileSync(this.logPath, JSON.stringify(log, null, 2));
360
+ }
361
+ hashContent(content) {
362
+ return createHash("sha256").update(content).digest("hex").substring(0, 16);
363
+ }
364
+ getCurrentUser() {
365
+ return process.env.USER || process.env.USERNAME || void 0;
366
+ }
367
+ };
368
+
369
+ // src/behavioral-warnings.ts
370
+ var BEHAVIORAL_RULES = [
371
+ // Yup -> Zod: Type coercion differences
372
+ {
373
+ category: "type-coercion",
374
+ migrations: ["yup->zod"],
375
+ detect: (text, filePath) => {
376
+ const warnings = [];
377
+ if (/yup\.(number|date)\s*\(\)/.test(text)) {
378
+ warnings.push({
379
+ category: "type-coercion",
380
+ message: "Yup silently coerces types; Zod rejects mismatches.",
381
+ detail: `Yup's number() accepts strings like "42" and coerces them. Zod's number() rejects strings. Use z.coerce.number() for equivalent behavior, especially for HTML form inputs which always return strings.`,
382
+ filePath,
383
+ severity: "warning",
384
+ migration: "yup->zod"
385
+ });
386
+ }
387
+ return warnings;
388
+ }
389
+ },
390
+ // Yup -> Zod: Form input string values
391
+ {
392
+ category: "form-input",
393
+ migrations: ["yup->zod"],
394
+ detect: (text, filePath) => {
395
+ const warnings = [];
396
+ const hasFormImport = /yupResolver|useFormik|from\s+['"]formik['"]|from\s+['"]@hookform/.test(
397
+ text
398
+ );
399
+ const hasNumberOrDate = /yup\.(number|date)\s*\(\)/.test(text);
400
+ if (hasFormImport && hasNumberOrDate) {
401
+ warnings.push({
402
+ category: "form-input",
403
+ message: "HTML inputs return strings \u2014 Zod will reject unless using z.coerce.*",
404
+ detail: 'HTML <input type="number"> returns strings. Yup coerces automatically, but Zod requires explicit coercion. Use z.coerce.number() or register({ valueAsNumber: true }) in React Hook Form.',
405
+ filePath,
406
+ severity: "error",
407
+ migration: "yup->zod"
408
+ });
409
+ }
410
+ return warnings;
411
+ }
412
+ },
413
+ // Joi -> Zod: Error handling paradigm shift
414
+ {
415
+ category: "error-handling",
416
+ migrations: ["joi->zod"],
417
+ detect: (text, filePath) => {
418
+ const warnings = [];
419
+ if (/\.validate\s*\(/.test(text) && /[Jj]oi/.test(text)) {
420
+ warnings.push({
421
+ category: "error-handling",
422
+ message: "Joi .validate() returns { value, error }; Zod .parse() throws.",
423
+ detail: "Joi uses an inspection pattern: .validate() returns an object with value and error. Zod .parse() throws a ZodError on failure. Use .safeParse() for a non-throwing equivalent that returns { success, data, error }.",
424
+ filePath,
425
+ severity: "warning",
426
+ migration: "joi->zod"
427
+ });
428
+ }
429
+ return warnings;
430
+ }
431
+ },
432
+ // Joi -> Zod: Null handling differences
433
+ {
434
+ category: "null-handling",
435
+ migrations: ["joi->zod"],
436
+ detect: (text, filePath) => {
437
+ const warnings = [];
438
+ if (/\.allow\s*\(\s*null\s*\)/.test(text)) {
439
+ warnings.push({
440
+ category: "null-handling",
441
+ message: "Joi .allow(null) vs Zod .nullable() have subtle differences.",
442
+ detail: 'Joi .allow(null) permits null alongside the base type. Zod .nullable() wraps the type in a union with null. Joi .allow("", null) has no single Zod equivalent \u2014 use z.union() or .transform().',
443
+ filePath,
444
+ severity: "info",
445
+ migration: "joi->zod"
446
+ });
447
+ }
448
+ return warnings;
449
+ }
450
+ },
451
+ // Zod v3 -> v4: Default value behavior change
452
+ {
453
+ category: "default-values",
454
+ migrations: ["zod-v3->v4"],
455
+ detect: (text, filePath) => {
456
+ const warnings = [];
457
+ if (/\.default\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
458
+ warnings.push({
459
+ category: "default-values",
460
+ message: ".default() + .optional() behavior changed silently in Zod v4.",
461
+ detail: "In Zod v3, .default(val).optional() returned undefined when property was missing. In Zod v4, it always returns the default value. This can cause unexpected behavior in API responses and form handling.",
462
+ filePath,
463
+ severity: "error",
464
+ migration: "zod-v3->v4"
465
+ });
466
+ }
467
+ if (/\.catch\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
468
+ warnings.push({
469
+ category: "default-values",
470
+ message: ".catch() + .optional() behavior changed in Zod v4.",
471
+ detail: "In Zod v4, object properties with .catch() that are .optional() now always return the caught value, even when the property is missing from input.",
472
+ filePath,
473
+ severity: "warning",
474
+ migration: "zod-v3->v4"
475
+ });
476
+ }
477
+ return warnings;
478
+ }
479
+ },
480
+ // Zod v3 -> v4: Error format differences
481
+ {
482
+ category: "error-format",
483
+ migrations: ["zod-v3->v4"],
484
+ detect: (text, filePath) => {
485
+ const warnings = [];
486
+ if (/ZodError/.test(text) && /instanceof\s+Error/.test(text)) {
487
+ warnings.push({
488
+ category: "error-format",
489
+ message: "ZodError no longer extends Error in Zod v4.",
490
+ detail: 'In Zod v4, ZodError no longer extends Error. Code using "instanceof Error" to catch ZodErrors will silently miss them. Use "instanceof ZodError" or z.isZodError() instead.',
491
+ filePath,
492
+ severity: "error",
493
+ migration: "zod-v3->v4"
494
+ });
495
+ }
496
+ return warnings;
497
+ }
498
+ },
499
+ // Zod v3 -> v4: Validation behavior differences
500
+ {
501
+ category: "validation-behavior",
502
+ migrations: ["zod-v3->v4"],
503
+ detect: (text, filePath) => {
504
+ const warnings = [];
505
+ if (/\.transform\s*\(/.test(text) && /\.refine\s*\(/.test(text)) {
506
+ warnings.push({
507
+ category: "validation-behavior",
508
+ message: ".transform() after .refine() behavior changed in Zod v4.",
509
+ detail: "In Zod v4, .transform() after .refine() may execute even if the refinement fails. Previously, transform was skipped on refinement failure.",
510
+ filePath,
511
+ severity: "warning",
512
+ migration: "zod-v3->v4"
513
+ });
514
+ }
515
+ return warnings;
516
+ }
517
+ },
518
+ // Zod -> Valibot: Error handling differences
519
+ {
520
+ category: "error-handling",
521
+ migrations: ["zod->valibot"],
522
+ detect: (text, filePath) => {
523
+ const warnings = [];
524
+ if (/\.parse\s*\(/.test(text) && /z\./.test(text)) {
525
+ warnings.push({
526
+ category: "error-handling",
527
+ message: "Zod .parse() throws ZodError; Valibot v.parse() throws ValiError.",
528
+ detail: "Error class and structure differ between Zod and Valibot. ZodError has .issues array; ValiError has .issues with different structure. Update all error handling code that inspects validation errors.",
529
+ filePath,
530
+ severity: "warning",
531
+ migration: "zod->valibot"
532
+ });
533
+ }
534
+ return warnings;
535
+ }
536
+ },
537
+ // io-ts -> Zod: Either monad vs throw/safeParse
538
+ {
539
+ category: "error-handling",
540
+ migrations: ["io-ts->zod"],
541
+ detect: (text, filePath) => {
542
+ const warnings = [];
543
+ if (/\bEither\b/.test(text) || /\b(fold|chain|map)\s*\(/.test(text)) {
544
+ warnings.push({
545
+ category: "error-handling",
546
+ message: "io-ts uses Either monad for errors; Zod uses throw/safeParse.",
547
+ detail: "io-ts returns Either<Errors, T> (Right for success, Left for failure). Zod .parse() throws, .safeParse() returns { success, data, error }. All fold/chain/map patterns over Either must be rewritten.",
548
+ filePath,
549
+ severity: "error",
550
+ migration: "io-ts->zod"
551
+ });
552
+ }
553
+ return warnings;
554
+ }
555
+ }
556
+ ];
557
+ var BehavioralWarningAnalyzer = class {
558
+ analyze(sourceFiles, from, to) {
559
+ const migration = `${from}->${to}`;
560
+ const warnings = [];
561
+ const applicableRules = BEHAVIORAL_RULES.filter((r) => r.migrations.includes(migration));
562
+ for (const sourceFile of sourceFiles) {
563
+ const filePath = sourceFile.getFilePath();
564
+ const text = sourceFile.getFullText();
565
+ const hasSourceLib = this.fileUsesLibrary(sourceFile, from);
566
+ if (!hasSourceLib) continue;
567
+ for (const rule of applicableRules) {
568
+ const ruleWarnings = rule.detect(text, filePath);
569
+ warnings.push(...ruleWarnings);
570
+ }
571
+ }
572
+ const summary = this.generateSummary(warnings, migration);
573
+ return { warnings, migrationPath: migration, summary };
574
+ }
575
+ fileUsesLibrary(sourceFile, library) {
576
+ for (const imp of sourceFile.getImportDeclarations()) {
577
+ const detected = detectSchemaLibrary(imp.getModuleSpecifierValue());
578
+ if (detected === library) return true;
579
+ if (library === "zod-v3" && detected === "zod") return true;
580
+ if (library === "zod" && detected === "zod") return true;
581
+ }
582
+ return false;
583
+ }
584
+ generateSummary(warnings, migration) {
585
+ if (warnings.length === 0) {
586
+ return `No behavioral differences detected for ${migration} migration.`;
587
+ }
588
+ const errorCount = warnings.filter((w) => w.severity === "error").length;
589
+ const warningCount = warnings.filter((w) => w.severity === "warning").length;
590
+ const infoCount = warnings.filter((w) => w.severity === "info").length;
591
+ const parts = [];
592
+ if (errorCount > 0) parts.push(`${errorCount} critical`);
593
+ if (warningCount > 0) parts.push(`${warningCount} warnings`);
594
+ if (infoCount > 0) parts.push(`${infoCount} info`);
595
+ return `Found ${warnings.length} behavioral difference(s) for ${migration}: ${parts.join(", ")}. Review before migrating.`;
596
+ }
597
+ };
598
+
599
+ // src/bundle-estimator.ts
600
+ var LIBRARY_SIZES = {
601
+ zod: { fullKb: 14, baseKb: 14, treeShakable: false },
602
+ "zod-v3": { fullKb: 14, baseKb: 14, treeShakable: false },
603
+ v4: { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
604
+ "zod-v4": { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
605
+ "zod-mini": { fullKb: 7.5, baseKb: 3.5, treeShakable: true },
606
+ yup: { fullKb: 13.6, baseKb: 13.6, treeShakable: false },
607
+ joi: { fullKb: 29.7, baseKb: 29.7, treeShakable: false },
608
+ "io-ts": { fullKb: 6.5, baseKb: 6.5, treeShakable: true },
609
+ valibot: { fullKb: 5.8, baseKb: 1.4, treeShakable: true }
610
+ };
611
+ var VALIDATOR_OVERHEAD = {
612
+ valibot: 0.05
613
+ };
614
+ var COMMON_VALIDATORS = /* @__PURE__ */ new Set([
615
+ "string",
616
+ "number",
617
+ "boolean",
618
+ "object",
619
+ "array",
620
+ "optional",
621
+ "nullable",
622
+ "enum",
623
+ "union",
624
+ "literal",
625
+ "date",
626
+ "email",
627
+ "url",
628
+ "uuid",
629
+ "min",
630
+ "max",
631
+ "regex",
632
+ "transform",
633
+ "refine",
634
+ "default",
635
+ "record",
636
+ "tuple",
637
+ "lazy",
638
+ "discriminatedUnion",
639
+ "intersection",
640
+ "partial",
641
+ "pick",
642
+ "omit",
643
+ "brand",
644
+ "pipe"
645
+ ]);
646
+ var BundleEstimator = class {
647
+ estimate(sourceFiles, from, to) {
648
+ const usedValidators = this.countUsedValidators(sourceFiles);
649
+ const fromInfo = this.getLibraryInfo(from, usedValidators);
650
+ const toInfo = this.getLibraryInfo(to, usedValidators);
651
+ const estimatedDelta = toInfo.estimatedUsedKb - fromInfo.estimatedUsedKb;
652
+ const deltaPercent = fromInfo.estimatedUsedKb > 0 ? Math.round(estimatedDelta / fromInfo.estimatedUsedKb * 100) : 0;
653
+ const caveats = this.generateCaveats(from, to, usedValidators);
654
+ const summary = this.generateSummary(fromInfo, toInfo, estimatedDelta, deltaPercent);
655
+ return {
656
+ from: fromInfo,
657
+ to: toInfo,
658
+ estimatedDelta,
659
+ deltaPercent,
660
+ summary,
661
+ caveats
662
+ };
663
+ }
664
+ countUsedValidators(sourceFiles) {
665
+ const usedSet = /* @__PURE__ */ new Set();
666
+ for (const file of sourceFiles) {
667
+ const text = file.getFullText();
668
+ for (const validator of COMMON_VALIDATORS) {
669
+ const pattern = new RegExp(`\\.${validator}\\s*[(<]`, "g");
670
+ if (pattern.test(text)) {
671
+ usedSet.add(validator);
672
+ }
673
+ }
674
+ }
675
+ return usedSet.size;
676
+ }
677
+ getLibraryInfo(library, usedValidators) {
678
+ const sizeKey = library === "zod-v3" ? "zod" : library;
679
+ const sizes = LIBRARY_SIZES[sizeKey] ?? { fullKb: 10, baseKb: 10, treeShakable: false };
680
+ let estimatedUsedKb;
681
+ if (sizes.treeShakable) {
682
+ const overhead = VALIDATOR_OVERHEAD[sizeKey] ?? 0.05;
683
+ estimatedUsedKb = Math.min(sizes.baseKb + usedValidators * overhead, sizes.fullKb);
684
+ } else {
685
+ estimatedUsedKb = sizes.fullKb;
686
+ }
687
+ return {
688
+ library: sizeKey,
689
+ minifiedGzipKb: sizes.fullKb,
690
+ treeShakable: sizes.treeShakable,
691
+ estimatedUsedKb: Math.round(estimatedUsedKb * 10) / 10
692
+ };
693
+ }
694
+ generateCaveats(from, to, _usedValidators) {
695
+ const caveats = [
696
+ "Sizes are estimates based on minified+gzipped bundle analysis.",
697
+ "Actual impact depends on bundler configuration, tree-shaking, and code splitting."
698
+ ];
699
+ if (to === "valibot") {
700
+ caveats.push(
701
+ "Valibot is fully tree-shakable \u2014 actual size depends on which validators you use."
702
+ );
703
+ caveats.push(
704
+ "Some developers report smaller-than-expected savings (6kB or less) in real projects."
705
+ );
706
+ }
707
+ if (from === "zod-v3" && to === "v4") {
708
+ caveats.push(
709
+ "Zod v4 is ~26% larger than v3 due to JIT compilation engine. Consider zod/mini for size-sensitive apps."
710
+ );
711
+ }
712
+ if (from === "joi") {
713
+ caveats.push(
714
+ "Joi is the largest schema library. Any migration will likely reduce bundle size."
715
+ );
716
+ }
717
+ return caveats;
718
+ }
719
+ generateSummary(from, to, delta, deltaPercent) {
720
+ const direction = delta > 0 ? "increase" : delta < 0 ? "decrease" : "no change";
721
+ const absDelta = Math.abs(Math.round(delta * 10) / 10);
722
+ return `Estimated bundle ${direction}: ${from.library} (${from.estimatedUsedKb}kB) \u2192 ${to.library} (${to.estimatedUsedKb}kB) = ${delta > 0 ? "+" : delta < 0 ? "-" : ""}${absDelta}kB (${deltaPercent > 0 ? "+" : ""}${deltaPercent}%)`;
723
+ }
724
+ };
725
+
267
726
  // src/chain.ts
268
727
  import { Project as Project2 } from "ts-morph";
269
728
  var MigrationChain = class {
@@ -330,12 +789,12 @@ var MigrationChain = class {
330
789
  };
331
790
 
332
791
  // src/compatibility.ts
333
- import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
334
- import { join as join2 } from "path";
792
+ import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
793
+ import { join as join3 } from "path";
335
794
 
336
795
  // src/ecosystem.ts
337
- import { existsSync, readFileSync } from "fs";
338
- import { join } from "path";
796
+ import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
797
+ import { join as join2 } from "path";
339
798
  var ECOSYSTEM_RULES = [
340
799
  // ORM integrations
341
800
  {
@@ -488,6 +947,58 @@ var ECOSYSTEM_RULES = [
488
947
  severity: "warning",
489
948
  upgradeCommand: "npm install @asteasolutions/zod-to-openapi@latest"
490
949
  })
950
+ },
951
+ // AI/MCP integrations
952
+ {
953
+ package: "@modelcontextprotocol/sdk",
954
+ category: "api",
955
+ migrations: ["zod-v3->v4"],
956
+ check: () => ({
957
+ issue: "MCP SDK may have Zod v4 compatibility issues. MCP servers typically expect Zod v3 schemas.",
958
+ suggestion: "Check MCP SDK release notes for Zod v4 support before upgrading. Consider staying on Zod v3 for MCP servers.",
959
+ severity: "warning",
960
+ upgradeCommand: "npm install @modelcontextprotocol/sdk@latest"
961
+ })
962
+ },
963
+ {
964
+ package: "@openai/agents",
965
+ category: "api",
966
+ migrations: ["zod-v3->v4"],
967
+ check: () => ({
968
+ issue: "OpenAI Agents SDK recommends pinning to zod@3.25.67 due to TS2589 errors with newer versions.",
969
+ suggestion: "Pin zod to 3.25.67 for OpenAI Agents SDK compatibility, or wait for an SDK update with Zod v4 support.",
970
+ severity: "error"
971
+ })
972
+ },
973
+ // Additional validation utilities
974
+ {
975
+ package: "zod-to-json-schema",
976
+ category: "validation-util",
977
+ migrations: ["zod-v3->v4"],
978
+ check: (version) => {
979
+ const majorMatch = version.match(/(\d+)/);
980
+ const major = majorMatch?.[1] ? Number.parseInt(majorMatch[1], 10) : 0;
981
+ if (major < 4) {
982
+ return {
983
+ issue: "zod-to-json-schema v3 may not fully support Zod v4 schemas.",
984
+ suggestion: "Upgrade to zod-to-json-schema v4+ for full Zod v4 support.",
985
+ severity: "warning",
986
+ upgradeCommand: "npm install zod-to-json-schema@latest"
987
+ };
988
+ }
989
+ return null;
990
+ }
991
+ },
992
+ {
993
+ package: "react-hook-form",
994
+ category: "form",
995
+ migrations: ["zod-v3->v4"],
996
+ check: () => ({
997
+ issue: "React Hook Form with zodResolver may throw uncaught ZodError instead of populating formState.errors with Zod v4.",
998
+ suggestion: "Upgrade @hookform/resolvers to the latest version and test form validation thoroughly.",
999
+ severity: "warning",
1000
+ upgradeCommand: "npm install @hookform/resolvers@latest react-hook-form@latest"
1001
+ })
491
1002
  }
492
1003
  ];
493
1004
  var EcosystemAnalyzer = class {
@@ -496,13 +1007,13 @@ var EcosystemAnalyzer = class {
496
1007
  const dependencies = [];
497
1008
  const warnings = [];
498
1009
  const blockers = [];
499
- const pkgPath = join(projectPath, "package.json");
500
- if (!existsSync(pkgPath)) {
1010
+ const pkgPath = join2(projectPath, "package.json");
1011
+ if (!existsSync2(pkgPath)) {
501
1012
  return { dependencies, warnings, blockers };
502
1013
  }
503
1014
  let allDeps = {};
504
1015
  try {
505
- const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
1016
+ const pkg = JSON.parse(readFileSync2(pkgPath, "utf-8"));
506
1017
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
507
1018
  } catch {
508
1019
  return { dependencies, warnings, blockers };
@@ -532,6 +1043,20 @@ var EcosystemAnalyzer = class {
532
1043
  }
533
1044
  return { dependencies, warnings, blockers };
534
1045
  }
1046
+ /**
1047
+ * Returns a list of npm install commands needed to resolve ecosystem issues.
1048
+ */
1049
+ getUpgradeCommands(report) {
1050
+ const commands = [];
1051
+ const seen = /* @__PURE__ */ new Set();
1052
+ for (const dep of report.dependencies) {
1053
+ if (dep.upgradeCommand && !seen.has(dep.upgradeCommand)) {
1054
+ seen.add(dep.upgradeCommand);
1055
+ commands.push(dep.upgradeCommand);
1056
+ }
1057
+ }
1058
+ return commands;
1059
+ }
535
1060
  };
536
1061
 
537
1062
  // src/compatibility.ts
@@ -609,10 +1134,10 @@ var CompatibilityAnalyzer = class {
609
1134
  ecosystemAnalyzer = new EcosystemAnalyzer();
610
1135
  detectVersions(projectPath) {
611
1136
  const versions = [];
612
- const pkgPath = join2(projectPath, "package.json");
613
- if (!existsSync2(pkgPath)) return versions;
1137
+ const pkgPath = join3(projectPath, "package.json");
1138
+ if (!existsSync3(pkgPath)) return versions;
614
1139
  try {
615
- const pkg = JSON.parse(readFileSync2(pkgPath, "utf-8"));
1140
+ const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
616
1141
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
617
1142
  const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
618
1143
  for (const lib of knownLibs) {
@@ -834,8 +1359,8 @@ async function loadConfig(configPath) {
834
1359
  }
835
1360
 
836
1361
  // src/dependency-graph.ts
837
- import { existsSync as existsSync3, readdirSync, readFileSync as readFileSync3 } from "fs";
838
- import { join as join3, resolve } from "path";
1362
+ import { existsSync as existsSync4, readdirSync, readFileSync as readFileSync4 } from "fs";
1363
+ import { join as join4, resolve } from "path";
839
1364
  var SchemaDependencyResolver = class {
840
1365
  resolve(project, filePaths) {
841
1366
  const fileSet = new Set(filePaths);
@@ -922,39 +1447,96 @@ var SchemaDependencyResolver = class {
922
1447
  }
923
1448
  };
924
1449
  var SCHEMA_PACKAGES = /* @__PURE__ */ new Set(["zod", "yup", "joi", "io-ts", "valibot", "@effect/schema"]);
1450
+ function computeParallelBatches(packages, suggestedOrder) {
1451
+ const nameSet = new Set(packages.map((p) => p.name));
1452
+ const depMap = /* @__PURE__ */ new Map();
1453
+ for (const pkg of packages) {
1454
+ depMap.set(pkg.name, new Set(pkg.dependencies.filter((d) => nameSet.has(d))));
1455
+ }
1456
+ const depths = /* @__PURE__ */ new Map();
1457
+ const getDepth = (name, visited) => {
1458
+ const cached = depths.get(name);
1459
+ if (cached !== void 0) return cached;
1460
+ if (visited.has(name)) return 0;
1461
+ visited.add(name);
1462
+ const deps = depMap.get(name) ?? /* @__PURE__ */ new Set();
1463
+ let maxDepth = 0;
1464
+ for (const dep of deps) {
1465
+ maxDepth = Math.max(maxDepth, getDepth(dep, visited) + 1);
1466
+ }
1467
+ depths.set(name, maxDepth);
1468
+ return maxDepth;
1469
+ };
1470
+ for (const name of suggestedOrder) {
1471
+ getDepth(name, /* @__PURE__ */ new Set());
1472
+ }
1473
+ const batchMap = /* @__PURE__ */ new Map();
1474
+ for (const name of suggestedOrder) {
1475
+ const depth = depths.get(name) ?? 0;
1476
+ const batch = batchMap.get(depth) ?? [];
1477
+ batch.push(name);
1478
+ batchMap.set(depth, batch);
1479
+ }
1480
+ const batches = [];
1481
+ const sortedDepths = [...batchMap.keys()].sort((a, b) => a - b);
1482
+ for (const depth of sortedDepths) {
1483
+ const pkgs = batchMap.get(depth);
1484
+ if (pkgs) batches.push({ index: batches.length, packages: pkgs });
1485
+ }
1486
+ return batches;
1487
+ }
925
1488
  var MonorepoResolver = class {
926
1489
  detect(projectPath) {
927
- const pkgPath = join3(projectPath, "package.json");
928
- if (!existsSync3(pkgPath)) return false;
929
- try {
930
- const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
931
- return !!pkg.workspaces;
932
- } catch {
933
- return false;
1490
+ const pkgPath = join4(projectPath, "package.json");
1491
+ if (existsSync4(pkgPath)) {
1492
+ try {
1493
+ const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1494
+ if (pkg.workspaces) return true;
1495
+ } catch {
1496
+ }
1497
+ }
1498
+ if (existsSync4(join4(projectPath, "pnpm-workspace.yaml"))) return true;
1499
+ return false;
1500
+ }
1501
+ /**
1502
+ * Detect which workspace manager is being used.
1503
+ */
1504
+ detectManager(projectPath) {
1505
+ if (existsSync4(join4(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
1506
+ const pkgPath = join4(projectPath, "package.json");
1507
+ if (existsSync4(pkgPath)) {
1508
+ try {
1509
+ const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1510
+ if (pkg.packageManager?.startsWith("yarn")) return "yarn";
1511
+ if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
1512
+ } catch {
1513
+ }
934
1514
  }
1515
+ if (existsSync4(join4(projectPath, "pnpm-lock.yaml"))) return "pnpm";
1516
+ if (existsSync4(join4(projectPath, "yarn.lock"))) return "yarn";
1517
+ return "npm";
935
1518
  }
936
1519
  analyze(projectPath) {
937
- const pkgPath = join3(projectPath, "package.json");
938
- if (!existsSync3(pkgPath)) {
1520
+ const pkgPath = join4(projectPath, "package.json");
1521
+ if (!existsSync4(pkgPath)) {
939
1522
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
940
1523
  }
941
1524
  let workspaceGlobs;
942
1525
  try {
943
- const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
944
- if (!pkg.workspaces) {
1526
+ workspaceGlobs = this.resolveWorkspaceGlobs(projectPath);
1527
+ if (workspaceGlobs.length === 0) {
945
1528
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
946
1529
  }
947
- workspaceGlobs = Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
948
1530
  } catch {
949
1531
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
950
1532
  }
951
1533
  const packages = [];
952
1534
  const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
953
1535
  for (const dir of resolvedDirs) {
954
- const wsPkgPath = join3(dir, "package.json");
955
- if (!existsSync3(wsPkgPath)) continue;
1536
+ const wsPkgPath = join4(dir, "package.json");
1537
+ if (!existsSync4(wsPkgPath)) continue;
956
1538
  try {
957
- const wsPkg = JSON.parse(readFileSync3(wsPkgPath, "utf-8"));
1539
+ const wsPkg = JSON.parse(readFileSync4(wsPkgPath, "utf-8"));
958
1540
  if (!wsPkg.name) continue;
959
1541
  const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
960
1542
  const depNames = Object.keys(allDeps);
@@ -993,18 +1575,70 @@ var MonorepoResolver = class {
993
1575
  }
994
1576
  return sorted;
995
1577
  }
1578
+ /**
1579
+ * Resolve workspace glob patterns from any supported format.
1580
+ * Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
1581
+ */
1582
+ resolveWorkspaceGlobs(projectPath) {
1583
+ const pnpmPath = join4(projectPath, "pnpm-workspace.yaml");
1584
+ if (existsSync4(pnpmPath)) {
1585
+ return this.parsePnpmWorkspace(pnpmPath);
1586
+ }
1587
+ const pkgPath = join4(projectPath, "package.json");
1588
+ if (existsSync4(pkgPath)) {
1589
+ try {
1590
+ const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1591
+ if (pkg.workspaces) {
1592
+ return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
1593
+ }
1594
+ } catch {
1595
+ }
1596
+ }
1597
+ return [];
1598
+ }
1599
+ /**
1600
+ * Parse pnpm-workspace.yaml to extract workspace package globs.
1601
+ * Simple YAML parsing for the common format:
1602
+ * ```
1603
+ * packages:
1604
+ * - 'packages/*'
1605
+ * - 'apps/*'
1606
+ * ```
1607
+ */
1608
+ parsePnpmWorkspace(filePath) {
1609
+ const content = readFileSync4(filePath, "utf-8");
1610
+ const globs = [];
1611
+ let inPackages = false;
1612
+ for (const line of content.split("\n")) {
1613
+ const trimmed = line.trim();
1614
+ if (trimmed === "packages:") {
1615
+ inPackages = true;
1616
+ continue;
1617
+ }
1618
+ if (inPackages && /^\w/.test(trimmed) && !trimmed.startsWith("-")) {
1619
+ break;
1620
+ }
1621
+ if (inPackages && trimmed.startsWith("-")) {
1622
+ const pattern = trimmed.replace(/^-\s*/, "").replace(/^['"]|['"]$/g, "");
1623
+ if (pattern) {
1624
+ globs.push(pattern);
1625
+ }
1626
+ }
1627
+ }
1628
+ return globs;
1629
+ }
996
1630
  resolveWorkspaceDirs(projectPath, globs) {
997
1631
  const dirs = [];
998
1632
  for (const glob of globs) {
999
1633
  const clean = glob.replace(/\/?\*$/, "");
1000
1634
  const base = resolve(projectPath, clean);
1001
- if (!existsSync3(base)) continue;
1635
+ if (!existsSync4(base)) continue;
1002
1636
  if (glob.endsWith("*")) {
1003
1637
  try {
1004
1638
  const entries = readdirSync(base, { withFileTypes: true });
1005
1639
  for (const entry of entries) {
1006
1640
  if (entry.isDirectory()) {
1007
- dirs.push(join3(base, entry.name));
1641
+ dirs.push(join4(base, entry.name));
1008
1642
  }
1009
1643
  }
1010
1644
  } catch {
@@ -1018,8 +1652,8 @@ var MonorepoResolver = class {
1018
1652
  };
1019
1653
 
1020
1654
  // src/detailed-analyzer.ts
1021
- import { existsSync as existsSync4, readFileSync as readFileSync4 } from "fs";
1022
- import { join as join4 } from "path";
1655
+ import { existsSync as existsSync5, readFileSync as readFileSync5 } from "fs";
1656
+ import { join as join5 } from "path";
1023
1657
  var COMPLEXITY_CHAIN_WEIGHT = 2;
1024
1658
  var COMPLEXITY_DEPTH_WEIGHT = 3;
1025
1659
  var COMPLEXITY_VALIDATION_WEIGHT = 1;
@@ -1084,10 +1718,10 @@ var DetailedAnalyzer = class {
1084
1718
  }
1085
1719
  detectLibraryVersions(projectPath) {
1086
1720
  const versions = [];
1087
- const pkgPath = join4(projectPath, "package.json");
1088
- if (!existsSync4(pkgPath)) return versions;
1721
+ const pkgPath = join5(projectPath, "package.json");
1722
+ if (!existsSync5(pkgPath)) return versions;
1089
1723
  try {
1090
- const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1724
+ const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
1091
1725
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1092
1726
  const allDeps = {
1093
1727
  ...pkg.dependencies,
@@ -1347,6 +1981,7 @@ var FormResolverMigrator = class {
1347
1981
  // src/governance.ts
1348
1982
  var GovernanceEngine = class {
1349
1983
  rules = /* @__PURE__ */ new Map();
1984
+ customRuleFunctions = /* @__PURE__ */ new Map();
1350
1985
  configure(rules) {
1351
1986
  this.rules.clear();
1352
1987
  for (const [name, config] of Object.entries(rules)) {
@@ -1355,6 +1990,13 @@ var GovernanceEngine = class {
1355
1990
  }
1356
1991
  }
1357
1992
  }
1993
+ /**
1994
+ * Register a custom governance rule function.
1995
+ * Custom rules are executed per-file alongside built-in rules.
1996
+ */
1997
+ registerRule(name, fn) {
1998
+ this.customRuleFunctions.set(name, fn);
1999
+ }
1358
2000
  analyze(project) {
1359
2001
  const violations = [];
1360
2002
  let schemasChecked = 0;
@@ -1430,6 +2072,104 @@ var GovernanceEngine = class {
1430
2072
  });
1431
2073
  }
1432
2074
  }
2075
+ if (this.rules.has("require-safeParse")) {
2076
+ if (text.includes(".parse(") && !text.includes(".safeParse(")) {
2077
+ violations.push({
2078
+ rule: "require-safeParse",
2079
+ message: `Schema "${schemaName}" uses .parse() \u2014 prefer .safeParse() for safer error handling`,
2080
+ filePath,
2081
+ lineNumber,
2082
+ schemaName,
2083
+ severity: "warning",
2084
+ fixable: true
2085
+ });
2086
+ }
2087
+ }
2088
+ if (this.rules.has("require-description")) {
2089
+ if (!text.includes(".describe(")) {
2090
+ violations.push({
2091
+ rule: "require-description",
2092
+ message: `Schema "${schemaName}" missing .describe() \u2014 add a description for documentation`,
2093
+ filePath,
2094
+ lineNumber,
2095
+ schemaName,
2096
+ severity: "warning",
2097
+ fixable: true
2098
+ });
2099
+ }
2100
+ }
2101
+ if (this.rules.has("no-coerce-in-api")) {
2102
+ if (/\.coerce\./.test(text)) {
2103
+ violations.push({
2104
+ rule: "no-coerce-in-api",
2105
+ message: `Schema "${schemaName}" uses z.coerce.* \u2014 coercion in API validation is a security risk`,
2106
+ filePath,
2107
+ lineNumber,
2108
+ schemaName,
2109
+ severity: "error",
2110
+ fixable: false
2111
+ });
2112
+ }
2113
+ }
2114
+ if (this.rules.has("require-max-length")) {
2115
+ if (text.includes(".string()") && !text.includes(".max(") && !text.includes(".length(")) {
2116
+ violations.push({
2117
+ rule: "require-max-length",
2118
+ message: `Schema "${schemaName}" has string without max length \u2014 required for DoS prevention`,
2119
+ filePath,
2120
+ lineNumber,
2121
+ schemaName,
2122
+ severity: "error",
2123
+ fixable: true
2124
+ });
2125
+ }
2126
+ }
2127
+ if (this.rules.has("max-nesting-depth")) {
2128
+ const config = this.rules.get("max-nesting-depth") ?? {};
2129
+ const maxDepth = config.threshold ?? 5;
2130
+ const depth = this.measureNestingDepth(text);
2131
+ if (depth > maxDepth) {
2132
+ violations.push({
2133
+ rule: "max-nesting-depth",
2134
+ message: `Schema "${schemaName}" nesting depth (${depth}) exceeds limit (${maxDepth})`,
2135
+ filePath,
2136
+ lineNumber,
2137
+ schemaName,
2138
+ severity: "warning",
2139
+ fixable: false
2140
+ });
2141
+ }
2142
+ }
2143
+ }
2144
+ }
2145
+ for (const sourceFile of project.getSourceFiles()) {
2146
+ const library = this.detectFileLibrary(sourceFile);
2147
+ if (library === "unknown") continue;
2148
+ const filePath = sourceFile.getFilePath();
2149
+ const text = sourceFile.getFullText();
2150
+ if (this.rules.has("no-dynamic-schemas")) {
2151
+ const dynamicPatterns = this.detectDynamicSchemas(text, library);
2152
+ for (const lineNumber of dynamicPatterns) {
2153
+ violations.push({
2154
+ rule: "no-dynamic-schemas",
2155
+ message: "Schema created inside function body \u2014 move to module level for performance",
2156
+ filePath,
2157
+ lineNumber,
2158
+ schemaName: "(dynamic)",
2159
+ severity: "warning",
2160
+ fixable: false
2161
+ });
2162
+ }
2163
+ }
2164
+ }
2165
+ for (const [ruleName, ruleFn] of this.customRuleFunctions) {
2166
+ const config = this.rules.get(ruleName);
2167
+ if (!config) continue;
2168
+ for (const sourceFile of project.getSourceFiles()) {
2169
+ const library = this.detectFileLibrary(sourceFile);
2170
+ if (library === "unknown") continue;
2171
+ const ruleViolations = ruleFn(sourceFile, config);
2172
+ violations.push(...ruleViolations);
1433
2173
  }
1434
2174
  }
1435
2175
  return {
@@ -1446,6 +2186,57 @@ var GovernanceEngine = class {
1446
2186
  }
1447
2187
  return "unknown";
1448
2188
  }
2189
+ measureNestingDepth(text) {
2190
+ let maxDepth = 0;
2191
+ let current = 0;
2192
+ for (const char of text) {
2193
+ if (char === "(") {
2194
+ current++;
2195
+ if (current > maxDepth) maxDepth = current;
2196
+ } else if (char === ")") {
2197
+ current--;
2198
+ }
2199
+ }
2200
+ return maxDepth;
2201
+ }
2202
+ detectDynamicSchemas(text, library) {
2203
+ const lineNumbers = [];
2204
+ const prefix = this.getSchemaPrefix(library);
2205
+ if (!prefix) return lineNumbers;
2206
+ const lines = text.split("\n");
2207
+ let insideFunction = 0;
2208
+ for (let i = 0; i < lines.length; i++) {
2209
+ const line = lines[i] ?? "";
2210
+ const opens = (line.match(/\{/g) || []).length;
2211
+ const closes = (line.match(/\}/g) || []).length;
2212
+ if (/(?:function\s+\w+|=>)\s*\{/.test(line)) {
2213
+ insideFunction += opens;
2214
+ insideFunction -= closes;
2215
+ continue;
2216
+ }
2217
+ insideFunction += opens - closes;
2218
+ if (insideFunction > 0 && line.includes(prefix)) {
2219
+ lineNumbers.push(i + 1);
2220
+ }
2221
+ }
2222
+ return lineNumbers;
2223
+ }
2224
+ getSchemaPrefix(library) {
2225
+ switch (library) {
2226
+ case "zod":
2227
+ return "z.";
2228
+ case "yup":
2229
+ return "yup.";
2230
+ case "joi":
2231
+ return "Joi.";
2232
+ case "io-ts":
2233
+ return "t.";
2234
+ case "valibot":
2235
+ return "v.";
2236
+ default:
2237
+ return null;
2238
+ }
2239
+ }
1449
2240
  isSchemaExpression(text, library) {
1450
2241
  switch (library) {
1451
2242
  case "zod":
@@ -1465,16 +2256,16 @@ var GovernanceEngine = class {
1465
2256
  };
1466
2257
 
1467
2258
  // src/incremental.ts
1468
- import { existsSync as existsSync5, mkdirSync, readFileSync as readFileSync5, writeFileSync } from "fs";
1469
- import { join as join5 } from "path";
2259
+ import { existsSync as existsSync6, mkdirSync as mkdirSync2, readFileSync as readFileSync6, unlinkSync, writeFileSync as writeFileSync2 } from "fs";
2260
+ import { join as join6 } from "path";
1470
2261
  var STATE_DIR = ".schemashift";
1471
2262
  var STATE_FILE = "incremental.json";
1472
2263
  var IncrementalTracker = class {
1473
2264
  stateDir;
1474
2265
  statePath;
1475
2266
  constructor(projectPath) {
1476
- this.stateDir = join5(projectPath, STATE_DIR);
1477
- this.statePath = join5(this.stateDir, STATE_FILE);
2267
+ this.stateDir = join6(projectPath, STATE_DIR);
2268
+ this.statePath = join6(this.stateDir, STATE_FILE);
1478
2269
  }
1479
2270
  start(files, from, to) {
1480
2271
  const state = {
@@ -1509,9 +2300,9 @@ var IncrementalTracker = class {
1509
2300
  this.saveState(state);
1510
2301
  }
1511
2302
  getState() {
1512
- if (!existsSync5(this.statePath)) return null;
2303
+ if (!existsSync6(this.statePath)) return null;
1513
2304
  try {
1514
- return JSON.parse(readFileSync5(this.statePath, "utf-8"));
2305
+ return JSON.parse(readFileSync6(this.statePath, "utf-8"));
1515
2306
  } catch {
1516
2307
  return null;
1517
2308
  }
@@ -1538,21 +2329,21 @@ var IncrementalTracker = class {
1538
2329
  };
1539
2330
  }
1540
2331
  clear() {
1541
- if (existsSync5(this.statePath)) {
1542
- writeFileSync(this.statePath, "");
2332
+ if (existsSync6(this.statePath)) {
2333
+ unlinkSync(this.statePath);
1543
2334
  }
1544
2335
  }
1545
2336
  saveState(state) {
1546
- if (!existsSync5(this.stateDir)) {
1547
- mkdirSync(this.stateDir, { recursive: true });
2337
+ if (!existsSync6(this.stateDir)) {
2338
+ mkdirSync2(this.stateDir, { recursive: true });
1548
2339
  }
1549
- writeFileSync(this.statePath, JSON.stringify(state, null, 2));
2340
+ writeFileSync2(this.statePath, JSON.stringify(state, null, 2));
1550
2341
  }
1551
2342
  };
1552
2343
 
1553
2344
  // src/package-updater.ts
1554
- import { existsSync as existsSync6, readFileSync as readFileSync6, writeFileSync as writeFileSync2 } from "fs";
1555
- import { join as join6 } from "path";
2345
+ import { existsSync as existsSync7, readFileSync as readFileSync7, writeFileSync as writeFileSync3 } from "fs";
2346
+ import { join as join7 } from "path";
1556
2347
  var TARGET_VERSIONS = {
1557
2348
  "yup->zod": { zod: "^3.24.0" },
1558
2349
  "joi->zod": { zod: "^3.24.0" },
@@ -1573,14 +2364,14 @@ var PackageUpdater = class {
1573
2364
  const add = {};
1574
2365
  const remove = [];
1575
2366
  const warnings = [];
1576
- const pkgPath = join6(projectPath, "package.json");
1577
- if (!existsSync6(pkgPath)) {
2367
+ const pkgPath = join7(projectPath, "package.json");
2368
+ if (!existsSync7(pkgPath)) {
1578
2369
  warnings.push("No package.json found. Cannot plan dependency updates.");
1579
2370
  return { add, remove, warnings };
1580
2371
  }
1581
2372
  let pkg;
1582
2373
  try {
1583
- pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
2374
+ pkg = JSON.parse(readFileSync7(pkgPath, "utf-8"));
1584
2375
  } catch {
1585
2376
  warnings.push("Could not parse package.json.");
1586
2377
  return { add, remove, warnings };
@@ -1610,9 +2401,9 @@ var PackageUpdater = class {
1610
2401
  return { add, remove, warnings };
1611
2402
  }
1612
2403
  apply(projectPath, plan) {
1613
- const pkgPath = join6(projectPath, "package.json");
1614
- if (!existsSync6(pkgPath)) return;
1615
- const pkgText = readFileSync6(pkgPath, "utf-8");
2404
+ const pkgPath = join7(projectPath, "package.json");
2405
+ if (!existsSync7(pkgPath)) return;
2406
+ const pkgText = readFileSync7(pkgPath, "utf-8");
1616
2407
  const pkg = JSON.parse(pkgText);
1617
2408
  if (!pkg.dependencies) pkg.dependencies = {};
1618
2409
  for (const [name, version] of Object.entries(plan.add)) {
@@ -1622,11 +2413,133 @@ var PackageUpdater = class {
1622
2413
  pkg.dependencies[name] = version;
1623
2414
  }
1624
2415
  }
1625
- writeFileSync2(pkgPath, `${JSON.stringify(pkg, null, 2)}
2416
+ writeFileSync3(pkgPath, `${JSON.stringify(pkg, null, 2)}
1626
2417
  `);
1627
2418
  }
1628
2419
  };
1629
2420
 
2421
+ // src/performance-analyzer.ts
2422
+ var PerformanceAnalyzer = class {
2423
+ analyze(sourceFiles, from, to) {
2424
+ const warnings = [];
2425
+ let parseCallSites = 0;
2426
+ let dynamicSchemaCount = 0;
2427
+ for (const file of sourceFiles) {
2428
+ const text = file.getFullText();
2429
+ const filePath = file.getFilePath();
2430
+ const parseMatches = text.match(/\.(parse|safeParse)\s*\(/g);
2431
+ if (parseMatches) {
2432
+ parseCallSites += parseMatches.length;
2433
+ }
2434
+ const dynamicResult = this.detectDynamicSchemas(text, filePath);
2435
+ dynamicSchemaCount += dynamicResult.count;
2436
+ warnings.push(...dynamicResult.warnings);
2437
+ this.addMigrationWarnings(text, filePath, from, to, warnings);
2438
+ }
2439
+ const recommendation = this.getRecommendation(from, to, parseCallSites, dynamicSchemaCount);
2440
+ const summary = this.generateSummary(warnings, parseCallSites, dynamicSchemaCount);
2441
+ return {
2442
+ warnings,
2443
+ parseCallSites,
2444
+ dynamicSchemaCount,
2445
+ recommendation,
2446
+ summary
2447
+ };
2448
+ }
2449
+ detectDynamicSchemas(text, filePath) {
2450
+ const warnings = [];
2451
+ let count = 0;
2452
+ const functionBodyPattern = /(?:function\s+\w+\s*\([^)]*\)|const\s+\w+\s*=\s*(?:async\s+)?(?:\([^)]*\)|[a-zA-Z_]\w*)\s*=>)\s*\{[^}]*(?:z\.|yup\.|Joi\.|v\.)\w+\s*\(/g;
2453
+ for (const match of text.matchAll(functionBodyPattern)) {
2454
+ count++;
2455
+ const lineNumber = text.substring(0, match.index).split("\n").length;
2456
+ warnings.push({
2457
+ category: "dynamic-schemas",
2458
+ message: "Schema created inside function body \u2014 may cause performance issues with Zod v4.",
2459
+ detail: "Zod v4 uses JIT compilation, making schema creation ~17x slower than v3. Move schema definitions to module level to avoid re-creation on every call.",
2460
+ filePath,
2461
+ lineNumber,
2462
+ severity: "warning"
2463
+ });
2464
+ }
2465
+ const reactComponentPattern = /(?:function\s+[A-Z]\w*\s*\([^)]*\)|const\s+[A-Z]\w*\s*[:=])[^{]*\{[^}]*(?:z\.|yup\.|Joi\.)\w+\s*\(/g;
2466
+ for (const match of text.matchAll(reactComponentPattern)) {
2467
+ count++;
2468
+ const lineNumber = text.substring(0, match.index).split("\n").length;
2469
+ warnings.push({
2470
+ category: "schema-creation",
2471
+ message: "Schema appears to be created inside a React component.",
2472
+ detail: "Schemas created inside React components are re-created on every render. Move schema definitions outside the component or wrap in useMemo(). This is especially important for Zod v4 due to JIT compilation overhead.",
2473
+ filePath,
2474
+ lineNumber,
2475
+ severity: "warning"
2476
+ });
2477
+ }
2478
+ return { count, warnings };
2479
+ }
2480
+ addMigrationWarnings(text, filePath, from, to, warnings) {
2481
+ const migration = `${from}->${to}`;
2482
+ if (migration === "zod-v3->v4") {
2483
+ if (/edge-runtime|@vercel\/edge|cloudflare.*workers|deno\.serve|Deno\.serve/i.test(text) || /export\s+const\s+runtime\s*=\s*['"]edge['"]/i.test(text)) {
2484
+ warnings.push({
2485
+ category: "cold-start",
2486
+ message: "Edge/serverless environment detected \u2014 Zod v4 JIT compilation increases cold start time.",
2487
+ detail: "Zod v4 JIT trades slower schema creation for faster repeated parsing. In serverless/edge environments with short-lived instances, the JIT cost may not amortize. Consider Valibot or staying on Zod v3 for cold-start-sensitive code.",
2488
+ filePath,
2489
+ severity: "warning"
2490
+ });
2491
+ }
2492
+ const parseCount = (text.match(/\.parse\s*\(/g) || []).length;
2493
+ if (parseCount > 10) {
2494
+ warnings.push({
2495
+ category: "repeated-parsing",
2496
+ message: `High parse() usage (${parseCount} call sites) \u2014 Zod v4 JIT will benefit here.`,
2497
+ detail: "Zod v4 JIT compilation makes repeated parsing ~8x faster. This file has many parse() calls and will see performance improvement.",
2498
+ filePath,
2499
+ severity: "info"
2500
+ });
2501
+ }
2502
+ }
2503
+ if (migration === "zod->valibot" && /\.parse\s*\(/.test(text)) {
2504
+ warnings.push({
2505
+ category: "repeated-parsing",
2506
+ message: "Valibot parsing performance is comparable to Zod v4 for most schemas.",
2507
+ detail: "Valibot v1+ offers similar runtime performance to Zod v4 with significantly smaller bundle size. No JIT overhead means consistent performance across all environments.",
2508
+ filePath,
2509
+ severity: "info"
2510
+ });
2511
+ }
2512
+ }
2513
+ getRecommendation(from, to, parseCallSites, dynamicSchemaCount) {
2514
+ const migration = `${from}->${to}`;
2515
+ if (migration === "zod-v3->v4") {
2516
+ if (dynamicSchemaCount > 5) {
2517
+ return "Many dynamic schemas detected. Zod v4 JIT makes schema creation 17x slower. Move schemas to module level before migrating, or consider Valibot for size-sensitive apps.";
2518
+ }
2519
+ if (parseCallSites > 50) {
2520
+ return "High parse() volume detected. Zod v4 JIT will significantly benefit repeated parsing (up to 8x faster). Migration recommended for performance.";
2521
+ }
2522
+ return "Moderate usage detected. Zod v4 trades slower startup for faster runtime parsing.";
2523
+ }
2524
+ if (migration === "zod->valibot") {
2525
+ return "Valibot offers similar runtime performance with significantly smaller bundle size. Best suited for bundle-size-sensitive applications.";
2526
+ }
2527
+ if (from === "yup" || from === "joi") {
2528
+ return `Migrating from ${from} to ${to} should have neutral or positive performance impact.`;
2529
+ }
2530
+ return "Performance impact depends on usage patterns. Review warnings for details.";
2531
+ }
2532
+ generateSummary(warnings, parseCallSites, dynamicSchemaCount) {
2533
+ const parts = [];
2534
+ parts.push(`${parseCallSites} parse/safeParse call sites`);
2535
+ if (dynamicSchemaCount > 0) {
2536
+ parts.push(`${dynamicSchemaCount} dynamic schema creation sites`);
2537
+ }
2538
+ parts.push(`${warnings.length} performance warning(s)`);
2539
+ return parts.join(", ");
2540
+ }
2541
+ };
2542
+
1630
2543
  // src/plugin-loader.ts
1631
2544
  var PluginLoader = class {
1632
2545
  async loadPlugins(pluginPaths) {
@@ -1672,8 +2585,8 @@ var PluginLoader = class {
1672
2585
  };
1673
2586
 
1674
2587
  // src/standard-schema.ts
1675
- import { existsSync as existsSync7, readFileSync as readFileSync7 } from "fs";
1676
- import { join as join7 } from "path";
2588
+ import { existsSync as existsSync8, readFileSync as readFileSync8 } from "fs";
2589
+ import { join as join8 } from "path";
1677
2590
  var STANDARD_SCHEMA_LIBRARIES = {
1678
2591
  zod: { minMajor: 3, minMinor: 23 },
1679
2592
  // Zod v3.23+ and v4+
@@ -1702,16 +2615,16 @@ function isVersionCompatible(version, minMajor, minMinor) {
1702
2615
  return false;
1703
2616
  }
1704
2617
  function detectStandardSchema(projectPath) {
1705
- const pkgPath = join7(projectPath, "package.json");
1706
- if (!existsSync7(pkgPath)) {
1707
- return { detected: false, compatibleLibraries: [], recommendation: "" };
2618
+ const pkgPath = join8(projectPath, "package.json");
2619
+ if (!existsSync8(pkgPath)) {
2620
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1708
2621
  }
1709
2622
  let allDeps = {};
1710
2623
  try {
1711
- const pkg = JSON.parse(readFileSync7(pkgPath, "utf-8"));
2624
+ const pkg = JSON.parse(readFileSync8(pkgPath, "utf-8"));
1712
2625
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1713
2626
  } catch {
1714
- return { detected: false, compatibleLibraries: [], recommendation: "" };
2627
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1715
2628
  }
1716
2629
  const hasExplicitStandardSchema = "@standard-schema/spec" in allDeps;
1717
2630
  const compatibleLibraries = [];
@@ -1730,9 +2643,155 @@ function detectStandardSchema(projectPath) {
1730
2643
  } else if (hasExplicitStandardSchema) {
1731
2644
  recommendation = "Standard Schema spec detected. Ensure your validation library supports Standard Schema for maximum interoperability.";
1732
2645
  }
1733
- return { detected, compatibleLibraries, recommendation };
2646
+ let adoptionPath;
2647
+ if (detected && !hasExplicitStandardSchema) {
2648
+ adoptionPath = "Install @standard-schema/spec for explicit Standard Schema support. This enables library-agnostic validation consumers to accept your schemas without depending on a specific library. Run: npm install @standard-schema/spec";
2649
+ } else if (!detected) {
2650
+ adoptionPath = "Consider migrating to a Standard Schema-compatible library (Zod v3.23+, Valibot v1+, ArkType v2+) to future-proof your validation layer and reduce library lock-in.";
2651
+ }
2652
+ const interopTools = detected ? [
2653
+ "tRPC v11+ (Standard Schema input validation)",
2654
+ "TanStack Form (schema-agnostic validation)",
2655
+ "TanStack Router (route parameter validation)",
2656
+ "Hono (request validation middleware)",
2657
+ "Conform (progressive form validation)",
2658
+ "Nuxt (runtime config validation)"
2659
+ ] : [];
2660
+ return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
1734
2661
  }
1735
2662
 
2663
+ // src/test-scaffolder.ts
2664
+ var TestScaffolder = class {
2665
+ scaffold(sourceFiles, from, to) {
2666
+ const tests = [];
2667
+ let totalSchemas = 0;
2668
+ for (const file of sourceFiles) {
2669
+ const schemas = this.extractSchemaNames(file, from);
2670
+ if (schemas.length === 0) continue;
2671
+ totalSchemas += schemas.length;
2672
+ const testCode = this.generateTestFile(file, schemas, from, to);
2673
+ const filePath = file.getFilePath().replace(/\.tsx?$/, ".migration-test.ts");
2674
+ tests.push({ filePath, testCode, schemaCount: schemas.length });
2675
+ }
2676
+ const summary = tests.length > 0 ? `Generated ${tests.length} test file(s) covering ${totalSchemas} schema(s) for ${from}->${to} migration.` : "No schemas found to generate tests for.";
2677
+ return { tests, totalSchemas, summary };
2678
+ }
2679
+ extractSchemaNames(file, library) {
2680
+ const names = [];
2681
+ const prefixes = this.getLibraryPrefixes(library);
2682
+ for (const varDecl of file.getVariableDeclarations()) {
2683
+ const initializer = varDecl.getInitializer();
2684
+ if (!initializer) continue;
2685
+ const text = initializer.getText();
2686
+ if (prefixes.some((p) => text.startsWith(p))) {
2687
+ names.push(varDecl.getName());
2688
+ }
2689
+ }
2690
+ return names;
2691
+ }
2692
+ getLibraryPrefixes(library) {
2693
+ switch (library) {
2694
+ case "zod":
2695
+ case "zod-v3":
2696
+ return ["z.", "zod."];
2697
+ case "yup":
2698
+ return ["yup.", "Yup."];
2699
+ case "joi":
2700
+ return ["Joi.", "joi."];
2701
+ case "io-ts":
2702
+ return ["t."];
2703
+ case "valibot":
2704
+ return ["v.", "valibot."];
2705
+ default:
2706
+ return ["z."];
2707
+ }
2708
+ }
2709
+ generateTestFile(file, schemaNames, from, to) {
2710
+ const relativePath = file.getFilePath();
2711
+ const schemaImports = schemaNames.join(", ");
2712
+ const parseMethod = this.getParseMethod(to);
2713
+ const errorClass = this.getErrorClass(to);
2714
+ const testCases = schemaNames.map((name) => this.generateSchemaTests(name, to, parseMethod, errorClass)).join("\n\n");
2715
+ return `/**
2716
+ * Migration validation tests for ${from} -> ${to}
2717
+ * Auto-generated by SchemaShift
2718
+ *
2719
+ * These tests verify that schema behavior is preserved after migration.
2720
+ * Run before and after migration to ensure equivalence.
2721
+ *
2722
+ * Source: ${relativePath}
2723
+ */
2724
+ import { describe, expect, it } from 'vitest';
2725
+ import { ${schemaImports} } from '${relativePath.replace(/\.ts$/, ".js")}';
2726
+
2727
+ describe('Migration validation: ${relativePath}', () => {
2728
+ ${testCases}
2729
+ });
2730
+ `;
2731
+ }
2732
+ getParseMethod(to) {
2733
+ switch (to) {
2734
+ case "valibot":
2735
+ return "v.safeParse";
2736
+ default:
2737
+ return ".safeParse";
2738
+ }
2739
+ }
2740
+ getErrorClass(to) {
2741
+ switch (to) {
2742
+ case "valibot":
2743
+ return "ValiError";
2744
+ case "zod":
2745
+ case "v4":
2746
+ return "ZodError";
2747
+ default:
2748
+ return "Error";
2749
+ }
2750
+ }
2751
+ generateSchemaTests(schemaName, to, _parseMethod, _errorClass) {
2752
+ if (to === "valibot") {
2753
+ return ` describe('${schemaName}', () => {
2754
+ it('should accept valid data', () => {
2755
+ // TODO(schemashift): Add valid test data for ${schemaName}
2756
+ // const result = v.safeParse(${schemaName}, validData);
2757
+ // expect(result.success).toBe(true);
2758
+ });
2759
+
2760
+ it('should reject invalid data', () => {
2761
+ // TODO(schemashift): Add invalid test data for ${schemaName}
2762
+ // const result = v.safeParse(${schemaName}, invalidData);
2763
+ // expect(result.success).toBe(false);
2764
+ });
2765
+
2766
+ it('should preserve error messages', () => {
2767
+ // TODO(schemashift): Verify custom error messages are preserved
2768
+ // const result = v.safeParse(${schemaName}, invalidData);
2769
+ // expect(result.issues?.[0]?.message).toContain('expected message');
2770
+ });
2771
+ });`;
2772
+ }
2773
+ return ` describe('${schemaName}', () => {
2774
+ it('should accept valid data', () => {
2775
+ // TODO(schemashift): Add valid test data for ${schemaName}
2776
+ // const result = ${schemaName}.safeParse(validData);
2777
+ // expect(result.success).toBe(true);
2778
+ });
2779
+
2780
+ it('should reject invalid data', () => {
2781
+ // TODO(schemashift): Add invalid test data for ${schemaName}
2782
+ // const result = ${schemaName}.safeParse(invalidData);
2783
+ // expect(result.success).toBe(false);
2784
+ });
2785
+
2786
+ it('should preserve error messages', () => {
2787
+ // TODO(schemashift): Verify custom error messages are preserved
2788
+ // const result = ${schemaName}.safeParse(invalidData);
2789
+ // expect(result.error?.issues[0]?.message).toContain('expected message');
2790
+ });
2791
+ });`;
2792
+ }
2793
+ };
2794
+
1736
2795
  // src/transform.ts
1737
2796
  var TransformEngine = class {
1738
2797
  handlers = /* @__PURE__ */ new Map();
@@ -1747,9 +2806,10 @@ var TransformEngine = class {
1747
2806
  }
1748
2807
  getSupportedPaths() {
1749
2808
  return Array.from(this.handlers.keys()).map((key) => {
1750
- const [from, to] = key.split("->");
1751
- return { from, to };
1752
- });
2809
+ const parts = key.split("->");
2810
+ if (parts.length !== 2) return null;
2811
+ return { from: parts[0], to: parts[1] };
2812
+ }).filter((entry) => entry !== null);
1753
2813
  }
1754
2814
  transform(sourceFile, from, to, options) {
1755
2815
  const handler = this.getHandler(from, to);
@@ -1765,7 +2825,136 @@ var TransformEngine = class {
1765
2825
  return handler.transform(sourceFile, options);
1766
2826
  }
1767
2827
  };
2828
+
2829
+ // src/type-dedup-detector.ts
2830
+ import { Node } from "ts-morph";
2831
+ var TypeDedupDetector = class {
2832
+ detect(sourceFiles) {
2833
+ const typeDefinitions = this.collectTypeDefinitions(sourceFiles);
2834
+ const schemaDefinitions = this.collectSchemaDefinitions(sourceFiles);
2835
+ const candidates = this.findMatches(typeDefinitions, schemaDefinitions);
2836
+ const summary = candidates.length > 0 ? `Found ${candidates.length} type definition(s) that may duplicate schema shapes. After migration, replace with z.infer<typeof schema>.` : "No duplicate type definitions detected.";
2837
+ return { candidates, summary };
2838
+ }
2839
+ collectTypeDefinitions(sourceFiles) {
2840
+ const types = [];
2841
+ for (const file of sourceFiles) {
2842
+ const filePath = file.getFilePath();
2843
+ for (const iface of file.getInterfaces()) {
2844
+ const fields = iface.getProperties().map((p) => p.getName());
2845
+ if (fields.length > 0) {
2846
+ types.push({
2847
+ name: iface.getName(),
2848
+ fields,
2849
+ filePath,
2850
+ lineNumber: iface.getStartLineNumber()
2851
+ });
2852
+ }
2853
+ }
2854
+ for (const typeAlias of file.getTypeAliases()) {
2855
+ const typeNode = typeAlias.getTypeNode();
2856
+ if (!typeNode) continue;
2857
+ if (Node.isTypeLiteral(typeNode)) {
2858
+ const fields = typeNode.getProperties().map((p) => p.getName());
2859
+ if (fields.length > 0) {
2860
+ types.push({
2861
+ name: typeAlias.getName(),
2862
+ fields,
2863
+ filePath,
2864
+ lineNumber: typeAlias.getStartLineNumber()
2865
+ });
2866
+ }
2867
+ }
2868
+ }
2869
+ }
2870
+ return types;
2871
+ }
2872
+ collectSchemaDefinitions(sourceFiles) {
2873
+ const schemas = [];
2874
+ for (const file of sourceFiles) {
2875
+ const filePath = file.getFilePath();
2876
+ for (const varDecl of file.getVariableDeclarations()) {
2877
+ const initializer = varDecl.getInitializer();
2878
+ if (!initializer) continue;
2879
+ const text = initializer.getText();
2880
+ const isSchema = /(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\.object\s*\(/.test(text) || /Joi\.object\s*\(/.test(text);
2881
+ if (!isSchema) continue;
2882
+ const fields = this.extractSchemaFields(text);
2883
+ if (fields.length > 0) {
2884
+ schemas.push({
2885
+ name: varDecl.getName(),
2886
+ fields,
2887
+ filePath,
2888
+ lineNumber: varDecl.getStartLineNumber()
2889
+ });
2890
+ }
2891
+ }
2892
+ }
2893
+ return schemas;
2894
+ }
2895
+ extractSchemaFields(text) {
2896
+ const fields = [];
2897
+ const fieldPattern = /\b(\w+)\s*:\s*(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\./g;
2898
+ for (const match of text.matchAll(fieldPattern)) {
2899
+ if (match[1]) {
2900
+ fields.push(match[1]);
2901
+ }
2902
+ }
2903
+ return fields;
2904
+ }
2905
+ findMatches(types, schemas) {
2906
+ const candidates = [];
2907
+ for (const typeDef of types) {
2908
+ for (const schemaDef of schemas) {
2909
+ const matchedFields = this.getMatchedFields(typeDef.fields, schemaDef.fields);
2910
+ if (matchedFields.length < 2) continue;
2911
+ const typeFieldCount = typeDef.fields.length;
2912
+ const schemaFieldCount = schemaDef.fields.length;
2913
+ const matchRatio = matchedFields.length / Math.max(typeFieldCount, schemaFieldCount);
2914
+ let confidence;
2915
+ if (matchRatio >= 0.8) {
2916
+ confidence = "high";
2917
+ } else if (matchRatio >= 0.5) {
2918
+ confidence = "medium";
2919
+ } else {
2920
+ confidence = "low";
2921
+ }
2922
+ if (confidence === "low" && !this.namesRelated(typeDef.name, schemaDef.name)) {
2923
+ continue;
2924
+ }
2925
+ candidates.push({
2926
+ typeName: typeDef.name,
2927
+ typeFilePath: typeDef.filePath,
2928
+ typeLineNumber: typeDef.lineNumber,
2929
+ schemaName: schemaDef.name,
2930
+ schemaFilePath: schemaDef.filePath,
2931
+ schemaLineNumber: schemaDef.lineNumber,
2932
+ matchedFields,
2933
+ confidence,
2934
+ suggestion: `Replace "type/interface ${typeDef.name}" with "type ${typeDef.name} = z.infer<typeof ${schemaDef.name}>" (${matchedFields.length}/${typeFieldCount} fields match).`
2935
+ });
2936
+ }
2937
+ }
2938
+ candidates.sort((a, b) => {
2939
+ const confidenceOrder = { high: 0, medium: 1, low: 2 };
2940
+ const diff = confidenceOrder[a.confidence] - confidenceOrder[b.confidence];
2941
+ if (diff !== 0) return diff;
2942
+ return b.matchedFields.length - a.matchedFields.length;
2943
+ });
2944
+ return candidates;
2945
+ }
2946
+ getMatchedFields(typeFields, schemaFields) {
2947
+ const schemaSet = new Set(schemaFields);
2948
+ return typeFields.filter((f) => schemaSet.has(f));
2949
+ }
2950
+ namesRelated(typeName, schemaName) {
2951
+ const normalize = (name) => name.toLowerCase().replace(/schema|type|interface|i$/gi, "");
2952
+ return normalize(typeName) === normalize(schemaName);
2953
+ }
2954
+ };
1768
2955
  export {
2956
+ BehavioralWarningAnalyzer,
2957
+ BundleEstimator,
1769
2958
  CompatibilityAnalyzer,
1770
2959
  ComplexityEstimator,
1771
2960
  DetailedAnalyzer,
@@ -1773,14 +2962,19 @@ export {
1773
2962
  FormResolverMigrator,
1774
2963
  GovernanceEngine,
1775
2964
  IncrementalTracker,
2965
+ MigrationAuditLog,
1776
2966
  MigrationChain,
1777
2967
  MonorepoResolver,
1778
2968
  PackageUpdater,
2969
+ PerformanceAnalyzer,
1779
2970
  PluginLoader,
1780
2971
  SchemaAnalyzer,
1781
2972
  SchemaDependencyResolver,
2973
+ TestScaffolder,
1782
2974
  TransformEngine,
2975
+ TypeDedupDetector,
1783
2976
  buildCallChain,
2977
+ computeParallelBatches,
1784
2978
  detectFormLibraries,
1785
2979
  detectSchemaLibrary,
1786
2980
  detectStandardSchema,