@schemashift/core 0.8.0 → 0.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -10,6 +10,9 @@ var LIBRARY_PATTERNS = {
10
10
  joi: [/^joi$/, /^@hapi\/joi$/],
11
11
  "io-ts": [/^io-ts$/, /^io-ts\//],
12
12
  valibot: [/^valibot$/],
13
+ arktype: [/^arktype$/],
14
+ superstruct: [/^superstruct$/],
15
+ effect: [/^@effect\/schema$/],
13
16
  v4: [],
14
17
  // Target version, not detectable from imports
15
18
  unknown: []
@@ -264,6 +267,521 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
264
267
  return buildCallChain(newBase, factory.name, factory.args, mappedMethods);
265
268
  }
266
269
 
270
+ // src/audit-log.ts
271
+ import { createHash } from "crypto";
272
+ import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
273
+ import { join } from "path";
274
+ var AUDIT_DIR = ".schemashift";
275
+ var AUDIT_FILE = "audit-log.json";
276
+ var AUDIT_VERSION = 1;
277
+ var MigrationAuditLog = class {
278
+ logDir;
279
+ logPath;
280
+ constructor(projectPath) {
281
+ this.logDir = join(projectPath, AUDIT_DIR);
282
+ this.logPath = join(this.logDir, AUDIT_FILE);
283
+ }
284
+ /**
285
+ * Append a new entry to the audit log.
286
+ */
287
+ append(entry) {
288
+ const log = this.read();
289
+ log.entries.push(entry);
290
+ this.write(log);
291
+ }
292
+ /**
293
+ * Create an audit entry for a file transformation.
294
+ */
295
+ createEntry(params) {
296
+ return {
297
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
298
+ migrationId: params.migrationId,
299
+ filePath: params.filePath,
300
+ action: "transform",
301
+ from: params.from,
302
+ to: params.to,
303
+ success: params.success,
304
+ beforeHash: this.hashContent(params.originalCode),
305
+ afterHash: params.transformedCode ? this.hashContent(params.transformedCode) : void 0,
306
+ warningCount: params.warningCount,
307
+ errorCount: params.errorCount,
308
+ riskScore: params.riskScore,
309
+ duration: params.duration,
310
+ user: this.getCurrentUser(),
311
+ metadata: params.metadata || this.collectMetadata()
312
+ };
313
+ }
314
+ /**
315
+ * Read the current audit log.
316
+ */
317
+ read() {
318
+ if (!existsSync(this.logPath)) {
319
+ return { version: AUDIT_VERSION, entries: [] };
320
+ }
321
+ try {
322
+ const content = readFileSync(this.logPath, "utf-8");
323
+ if (!content.trim()) {
324
+ return { version: AUDIT_VERSION, entries: [] };
325
+ }
326
+ return JSON.parse(content);
327
+ } catch {
328
+ return { version: AUDIT_VERSION, entries: [] };
329
+ }
330
+ }
331
+ /**
332
+ * Get entries for a specific migration.
333
+ */
334
+ getByMigration(migrationId) {
335
+ const log = this.read();
336
+ return log.entries.filter((e) => e.migrationId === migrationId);
337
+ }
338
+ /**
339
+ * Get summary statistics for the audit log.
340
+ */
341
+ getSummary() {
342
+ const log = this.read();
343
+ const migrationIds = new Set(log.entries.map((e) => e.migrationId));
344
+ const migrationPaths = [...new Set(log.entries.map((e) => `${e.from}->${e.to}`))];
345
+ return {
346
+ totalMigrations: migrationIds.size,
347
+ totalFiles: log.entries.length,
348
+ successCount: log.entries.filter((e) => e.success).length,
349
+ failureCount: log.entries.filter((e) => !e.success).length,
350
+ migrationPaths
351
+ };
352
+ }
353
+ /**
354
+ * Export audit log as JSON string.
355
+ */
356
+ exportJson() {
357
+ const log = this.read();
358
+ return JSON.stringify(log, null, 2);
359
+ }
360
+ /**
361
+ * Export audit log as CSV string.
362
+ */
363
+ exportCsv() {
364
+ const log = this.read();
365
+ const headers = [
366
+ "timestamp",
367
+ "migrationId",
368
+ "filePath",
369
+ "action",
370
+ "from",
371
+ "to",
372
+ "success",
373
+ "warningCount",
374
+ "errorCount",
375
+ "riskScore",
376
+ "user",
377
+ "duration"
378
+ ];
379
+ const rows = log.entries.map(
380
+ (e) => headers.map((h) => {
381
+ const val = e[h];
382
+ if (val === void 0 || val === null) return "";
383
+ return String(val).includes(",") ? `"${String(val)}"` : String(val);
384
+ }).join(",")
385
+ );
386
+ return [headers.join(","), ...rows].join("\n");
387
+ }
388
+ /**
389
+ * Get entries filtered by date range.
390
+ */
391
+ getByDateRange(start, end) {
392
+ const log = this.read();
393
+ return log.entries.filter((e) => {
394
+ const ts = new Date(e.timestamp);
395
+ return ts >= start && ts <= end;
396
+ });
397
+ }
398
+ /**
399
+ * Clear the audit log.
400
+ */
401
+ clear() {
402
+ this.write({ version: AUDIT_VERSION, entries: [] });
403
+ }
404
+ collectMetadata() {
405
+ return {
406
+ hostname: process.env.HOSTNAME || void 0,
407
+ nodeVersion: process.version,
408
+ ciJobId: process.env.CI_JOB_ID || process.env.GITHUB_RUN_ID || void 0,
409
+ ciProvider: process.env.GITHUB_ACTIONS ? "github" : process.env.GITLAB_CI ? "gitlab" : process.env.CIRCLECI ? "circleci" : process.env.JENKINS_URL ? "jenkins" : void 0,
410
+ gitBranch: process.env.GITHUB_REF_NAME || process.env.CI_COMMIT_BRANCH || void 0,
411
+ gitCommit: process.env.GITHUB_SHA || process.env.CI_COMMIT_SHA || void 0
412
+ };
413
+ }
414
+ write(log) {
415
+ if (!existsSync(this.logDir)) {
416
+ mkdirSync(this.logDir, { recursive: true });
417
+ }
418
+ writeFileSync(this.logPath, JSON.stringify(log, null, 2));
419
+ }
420
+ hashContent(content) {
421
+ return createHash("sha256").update(content).digest("hex").substring(0, 16);
422
+ }
423
+ getCurrentUser() {
424
+ return process.env.USER || process.env.USERNAME || void 0;
425
+ }
426
+ };
427
+
428
+ // src/behavioral-warnings.ts
429
+ var BEHAVIORAL_RULES = [
430
+ // Yup -> Zod: Type coercion differences
431
+ {
432
+ category: "type-coercion",
433
+ migrations: ["yup->zod"],
434
+ detect: (text, filePath) => {
435
+ const warnings = [];
436
+ if (/yup\.(number|date)\s*\(\)/.test(text)) {
437
+ warnings.push({
438
+ category: "type-coercion",
439
+ message: "Yup silently coerces types; Zod rejects mismatches.",
440
+ detail: `Yup's number() accepts strings like "42" and coerces them. Zod's number() rejects strings. Use z.coerce.number() for equivalent behavior, especially for HTML form inputs which always return strings.`,
441
+ filePath,
442
+ severity: "warning",
443
+ migration: "yup->zod"
444
+ });
445
+ }
446
+ return warnings;
447
+ }
448
+ },
449
+ // Yup -> Zod: Form input string values
450
+ {
451
+ category: "form-input",
452
+ migrations: ["yup->zod"],
453
+ detect: (text, filePath) => {
454
+ const warnings = [];
455
+ const hasFormImport = /yupResolver|useFormik|from\s+['"]formik['"]|from\s+['"]@hookform/.test(
456
+ text
457
+ );
458
+ const hasNumberOrDate = /yup\.(number|date)\s*\(\)/.test(text);
459
+ if (hasFormImport && hasNumberOrDate) {
460
+ warnings.push({
461
+ category: "form-input",
462
+ message: "HTML inputs return strings \u2014 Zod will reject unless using z.coerce.*",
463
+ detail: 'HTML <input type="number"> returns strings. Yup coerces automatically, but Zod requires explicit coercion. Use z.coerce.number() or register({ valueAsNumber: true }) in React Hook Form.',
464
+ filePath,
465
+ severity: "error",
466
+ migration: "yup->zod"
467
+ });
468
+ }
469
+ return warnings;
470
+ }
471
+ },
472
+ // Joi -> Zod: Error handling paradigm shift
473
+ {
474
+ category: "error-handling",
475
+ migrations: ["joi->zod"],
476
+ detect: (text, filePath) => {
477
+ const warnings = [];
478
+ if (/\.validate\s*\(/.test(text) && /[Jj]oi/.test(text)) {
479
+ warnings.push({
480
+ category: "error-handling",
481
+ message: "Joi .validate() returns { value, error }; Zod .parse() throws.",
482
+ detail: "Joi uses an inspection pattern: .validate() returns an object with value and error. Zod .parse() throws a ZodError on failure. Use .safeParse() for a non-throwing equivalent that returns { success, data, error }.",
483
+ filePath,
484
+ severity: "warning",
485
+ migration: "joi->zod"
486
+ });
487
+ }
488
+ return warnings;
489
+ }
490
+ },
491
+ // Joi -> Zod: Null handling differences
492
+ {
493
+ category: "null-handling",
494
+ migrations: ["joi->zod"],
495
+ detect: (text, filePath) => {
496
+ const warnings = [];
497
+ if (/\.allow\s*\(\s*null\s*\)/.test(text)) {
498
+ warnings.push({
499
+ category: "null-handling",
500
+ message: "Joi .allow(null) vs Zod .nullable() have subtle differences.",
501
+ detail: 'Joi .allow(null) permits null alongside the base type. Zod .nullable() wraps the type in a union with null. Joi .allow("", null) has no single Zod equivalent \u2014 use z.union() or .transform().',
502
+ filePath,
503
+ severity: "info",
504
+ migration: "joi->zod"
505
+ });
506
+ }
507
+ return warnings;
508
+ }
509
+ },
510
+ // Zod v3 -> v4: Default value behavior change
511
+ {
512
+ category: "default-values",
513
+ migrations: ["zod-v3->v4"],
514
+ detect: (text, filePath) => {
515
+ const warnings = [];
516
+ if (/\.default\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
517
+ warnings.push({
518
+ category: "default-values",
519
+ message: ".default() + .optional() behavior changed silently in Zod v4.",
520
+ detail: "In Zod v3, .default(val).optional() returned undefined when property was missing. In Zod v4, it always returns the default value. This can cause unexpected behavior in API responses and form handling.",
521
+ filePath,
522
+ severity: "error",
523
+ migration: "zod-v3->v4"
524
+ });
525
+ }
526
+ if (/\.catch\s*\(/.test(text) && /\.optional\s*\(\)/.test(text)) {
527
+ warnings.push({
528
+ category: "default-values",
529
+ message: ".catch() + .optional() behavior changed in Zod v4.",
530
+ detail: "In Zod v4, object properties with .catch() that are .optional() now always return the caught value, even when the property is missing from input.",
531
+ filePath,
532
+ severity: "warning",
533
+ migration: "zod-v3->v4"
534
+ });
535
+ }
536
+ return warnings;
537
+ }
538
+ },
539
+ // Zod v3 -> v4: Error format differences
540
+ {
541
+ category: "error-format",
542
+ migrations: ["zod-v3->v4"],
543
+ detect: (text, filePath) => {
544
+ const warnings = [];
545
+ if (/ZodError/.test(text) && /instanceof\s+Error/.test(text)) {
546
+ warnings.push({
547
+ category: "error-format",
548
+ message: "ZodError no longer extends Error in Zod v4.",
549
+ detail: 'In Zod v4, ZodError no longer extends Error. Code using "instanceof Error" to catch ZodErrors will silently miss them. Use "instanceof ZodError" or z.isZodError() instead.',
550
+ filePath,
551
+ severity: "error",
552
+ migration: "zod-v3->v4"
553
+ });
554
+ }
555
+ return warnings;
556
+ }
557
+ },
558
+ // Zod v3 -> v4: Validation behavior differences
559
+ {
560
+ category: "validation-behavior",
561
+ migrations: ["zod-v3->v4"],
562
+ detect: (text, filePath) => {
563
+ const warnings = [];
564
+ if (/\.transform\s*\(/.test(text) && /\.refine\s*\(/.test(text)) {
565
+ warnings.push({
566
+ category: "validation-behavior",
567
+ message: ".transform() after .refine() behavior changed in Zod v4.",
568
+ detail: "In Zod v4, .transform() after .refine() may execute even if the refinement fails. Previously, transform was skipped on refinement failure.",
569
+ filePath,
570
+ severity: "warning",
571
+ migration: "zod-v3->v4"
572
+ });
573
+ }
574
+ return warnings;
575
+ }
576
+ },
577
+ // Zod -> Valibot: Error handling differences
578
+ {
579
+ category: "error-handling",
580
+ migrations: ["zod->valibot"],
581
+ detect: (text, filePath) => {
582
+ const warnings = [];
583
+ if (/\.parse\s*\(/.test(text) && /z\./.test(text)) {
584
+ warnings.push({
585
+ category: "error-handling",
586
+ message: "Zod .parse() throws ZodError; Valibot v.parse() throws ValiError.",
587
+ detail: "Error class and structure differ between Zod and Valibot. ZodError has .issues array; ValiError has .issues with different structure. Update all error handling code that inspects validation errors.",
588
+ filePath,
589
+ severity: "warning",
590
+ migration: "zod->valibot"
591
+ });
592
+ }
593
+ return warnings;
594
+ }
595
+ },
596
+ // io-ts -> Zod: Either monad vs throw/safeParse
597
+ {
598
+ category: "error-handling",
599
+ migrations: ["io-ts->zod"],
600
+ detect: (text, filePath) => {
601
+ const warnings = [];
602
+ if (/\bEither\b/.test(text) || /\b(fold|chain|map)\s*\(/.test(text)) {
603
+ warnings.push({
604
+ category: "error-handling",
605
+ message: "io-ts uses Either monad for errors; Zod uses throw/safeParse.",
606
+ detail: "io-ts returns Either<Errors, T> (Right for success, Left for failure). Zod .parse() throws, .safeParse() returns { success, data, error }. All fold/chain/map patterns over Either must be rewritten.",
607
+ filePath,
608
+ severity: "error",
609
+ migration: "io-ts->zod"
610
+ });
611
+ }
612
+ return warnings;
613
+ }
614
+ }
615
+ ];
616
+ var BehavioralWarningAnalyzer = class {
617
+ analyze(sourceFiles, from, to) {
618
+ const migration = `${from}->${to}`;
619
+ const warnings = [];
620
+ const applicableRules = BEHAVIORAL_RULES.filter((r) => r.migrations.includes(migration));
621
+ for (const sourceFile of sourceFiles) {
622
+ const filePath = sourceFile.getFilePath();
623
+ const text = sourceFile.getFullText();
624
+ const hasSourceLib = this.fileUsesLibrary(sourceFile, from);
625
+ if (!hasSourceLib) continue;
626
+ for (const rule of applicableRules) {
627
+ const ruleWarnings = rule.detect(text, filePath);
628
+ warnings.push(...ruleWarnings);
629
+ }
630
+ }
631
+ const summary = this.generateSummary(warnings, migration);
632
+ return { warnings, migrationPath: migration, summary };
633
+ }
634
+ fileUsesLibrary(sourceFile, library) {
635
+ for (const imp of sourceFile.getImportDeclarations()) {
636
+ const detected = detectSchemaLibrary(imp.getModuleSpecifierValue());
637
+ if (detected === library) return true;
638
+ if (library === "zod-v3" && detected === "zod") return true;
639
+ if (library === "zod" && detected === "zod") return true;
640
+ }
641
+ return false;
642
+ }
643
+ generateSummary(warnings, migration) {
644
+ if (warnings.length === 0) {
645
+ return `No behavioral differences detected for ${migration} migration.`;
646
+ }
647
+ const errorCount = warnings.filter((w) => w.severity === "error").length;
648
+ const warningCount = warnings.filter((w) => w.severity === "warning").length;
649
+ const infoCount = warnings.filter((w) => w.severity === "info").length;
650
+ const parts = [];
651
+ if (errorCount > 0) parts.push(`${errorCount} critical`);
652
+ if (warningCount > 0) parts.push(`${warningCount} warnings`);
653
+ if (infoCount > 0) parts.push(`${infoCount} info`);
654
+ return `Found ${warnings.length} behavioral difference(s) for ${migration}: ${parts.join(", ")}. Review before migrating.`;
655
+ }
656
+ };
657
+
658
+ // src/bundle-estimator.ts
659
+ var LIBRARY_SIZES = {
660
+ zod: { fullKb: 14, baseKb: 14, treeShakable: false },
661
+ "zod-v3": { fullKb: 14, baseKb: 14, treeShakable: false },
662
+ v4: { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
663
+ "zod-v4": { fullKb: 17.7, baseKb: 17.7, treeShakable: false },
664
+ "zod-mini": { fullKb: 7.5, baseKb: 3.5, treeShakable: true },
665
+ yup: { fullKb: 13.6, baseKb: 13.6, treeShakable: false },
666
+ joi: { fullKb: 29.7, baseKb: 29.7, treeShakable: false },
667
+ "io-ts": { fullKb: 6.5, baseKb: 6.5, treeShakable: true },
668
+ valibot: { fullKb: 5.8, baseKb: 1.4, treeShakable: true }
669
+ };
670
+ var VALIDATOR_OVERHEAD = {
671
+ valibot: 0.05
672
+ };
673
+ var COMMON_VALIDATORS = /* @__PURE__ */ new Set([
674
+ "string",
675
+ "number",
676
+ "boolean",
677
+ "object",
678
+ "array",
679
+ "optional",
680
+ "nullable",
681
+ "enum",
682
+ "union",
683
+ "literal",
684
+ "date",
685
+ "email",
686
+ "url",
687
+ "uuid",
688
+ "min",
689
+ "max",
690
+ "regex",
691
+ "transform",
692
+ "refine",
693
+ "default",
694
+ "record",
695
+ "tuple",
696
+ "lazy",
697
+ "discriminatedUnion",
698
+ "intersection",
699
+ "partial",
700
+ "pick",
701
+ "omit",
702
+ "brand",
703
+ "pipe"
704
+ ]);
705
+ var BundleEstimator = class {
706
+ estimate(sourceFiles, from, to) {
707
+ const usedValidators = this.countUsedValidators(sourceFiles);
708
+ const fromInfo = this.getLibraryInfo(from, usedValidators);
709
+ const toInfo = this.getLibraryInfo(to, usedValidators);
710
+ const estimatedDelta = toInfo.estimatedUsedKb - fromInfo.estimatedUsedKb;
711
+ const deltaPercent = fromInfo.estimatedUsedKb > 0 ? Math.round(estimatedDelta / fromInfo.estimatedUsedKb * 100) : 0;
712
+ const caveats = this.generateCaveats(from, to, usedValidators);
713
+ const summary = this.generateSummary(fromInfo, toInfo, estimatedDelta, deltaPercent);
714
+ return {
715
+ from: fromInfo,
716
+ to: toInfo,
717
+ estimatedDelta,
718
+ deltaPercent,
719
+ summary,
720
+ caveats
721
+ };
722
+ }
723
+ countUsedValidators(sourceFiles) {
724
+ const usedSet = /* @__PURE__ */ new Set();
725
+ for (const file of sourceFiles) {
726
+ const text = file.getFullText();
727
+ for (const validator of COMMON_VALIDATORS) {
728
+ const pattern = new RegExp(`\\.${validator}\\s*[(<]`, "g");
729
+ if (pattern.test(text)) {
730
+ usedSet.add(validator);
731
+ }
732
+ }
733
+ }
734
+ return usedSet.size;
735
+ }
736
+ getLibraryInfo(library, usedValidators) {
737
+ const sizeKey = library === "zod-v3" ? "zod" : library;
738
+ const sizes = LIBRARY_SIZES[sizeKey] ?? { fullKb: 10, baseKb: 10, treeShakable: false };
739
+ let estimatedUsedKb;
740
+ if (sizes.treeShakable) {
741
+ const overhead = VALIDATOR_OVERHEAD[sizeKey] ?? 0.05;
742
+ estimatedUsedKb = Math.min(sizes.baseKb + usedValidators * overhead, sizes.fullKb);
743
+ } else {
744
+ estimatedUsedKb = sizes.fullKb;
745
+ }
746
+ return {
747
+ library: sizeKey,
748
+ minifiedGzipKb: sizes.fullKb,
749
+ treeShakable: sizes.treeShakable,
750
+ estimatedUsedKb: Math.round(estimatedUsedKb * 10) / 10
751
+ };
752
+ }
753
+ generateCaveats(from, to, _usedValidators) {
754
+ const caveats = [
755
+ "Sizes are estimates based on minified+gzipped bundle analysis.",
756
+ "Actual impact depends on bundler configuration, tree-shaking, and code splitting."
757
+ ];
758
+ if (to === "valibot") {
759
+ caveats.push(
760
+ "Valibot is fully tree-shakable \u2014 actual size depends on which validators you use."
761
+ );
762
+ caveats.push(
763
+ "Some developers report smaller-than-expected savings (6kB or less) in real projects."
764
+ );
765
+ }
766
+ if (from === "zod-v3" && to === "v4") {
767
+ caveats.push(
768
+ "Zod v4 is ~26% larger than v3 due to JIT compilation engine. Consider zod/mini for size-sensitive apps."
769
+ );
770
+ }
771
+ if (from === "joi") {
772
+ caveats.push(
773
+ "Joi is the largest schema library. Any migration will likely reduce bundle size."
774
+ );
775
+ }
776
+ return caveats;
777
+ }
778
+ generateSummary(from, to, delta, deltaPercent) {
779
+ const direction = delta > 0 ? "increase" : delta < 0 ? "decrease" : "no change";
780
+ const absDelta = Math.abs(Math.round(delta * 10) / 10);
781
+ return `Estimated bundle ${direction}: ${from.library} (${from.estimatedUsedKb}kB) \u2192 ${to.library} (${to.estimatedUsedKb}kB) = ${delta > 0 ? "+" : delta < 0 ? "-" : ""}${absDelta}kB (${deltaPercent > 0 ? "+" : ""}${deltaPercent}%)`;
782
+ }
783
+ };
784
+
267
785
  // src/chain.ts
268
786
  import { Project as Project2 } from "ts-morph";
269
787
  var MigrationChain = class {
@@ -330,12 +848,12 @@ var MigrationChain = class {
330
848
  };
331
849
 
332
850
  // src/compatibility.ts
333
- import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
334
- import { join as join2 } from "path";
851
+ import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
852
+ import { join as join3 } from "path";
335
853
 
336
854
  // src/ecosystem.ts
337
- import { existsSync, readFileSync } from "fs";
338
- import { join } from "path";
855
+ import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
856
+ import { join as join2 } from "path";
339
857
  var ECOSYSTEM_RULES = [
340
858
  // ORM integrations
341
859
  {
@@ -488,6 +1006,177 @@ var ECOSYSTEM_RULES = [
488
1006
  severity: "warning",
489
1007
  upgradeCommand: "npm install @asteasolutions/zod-to-openapi@latest"
490
1008
  })
1009
+ },
1010
+ // AI/MCP integrations
1011
+ {
1012
+ package: "@modelcontextprotocol/sdk",
1013
+ category: "api",
1014
+ migrations: ["zod-v3->v4"],
1015
+ check: () => ({
1016
+ issue: "MCP SDK may have Zod v4 compatibility issues. MCP servers typically expect Zod v3 schemas.",
1017
+ suggestion: "Check MCP SDK release notes for Zod v4 support before upgrading. Consider staying on Zod v3 for MCP servers.",
1018
+ severity: "warning",
1019
+ upgradeCommand: "npm install @modelcontextprotocol/sdk@latest"
1020
+ })
1021
+ },
1022
+ {
1023
+ package: "@openai/agents",
1024
+ category: "api",
1025
+ migrations: ["zod-v3->v4"],
1026
+ check: () => ({
1027
+ issue: "OpenAI Agents SDK recommends pinning to zod@3.25.67 due to TS2589 errors with newer versions.",
1028
+ suggestion: "Pin zod to 3.25.67 for OpenAI Agents SDK compatibility, or wait for an SDK update with Zod v4 support.",
1029
+ severity: "error"
1030
+ })
1031
+ },
1032
+ // Zod-based HTTP/API clients
1033
+ {
1034
+ package: "zodios",
1035
+ category: "api",
1036
+ migrations: ["zod-v3->v4"],
1037
+ check: () => ({
1038
+ issue: "Zodios uses Zod schemas for API contract definitions. Zod v4 type changes may break contracts.",
1039
+ suggestion: "Upgrade Zodios to a Zod v4-compatible version and verify all API contracts.",
1040
+ severity: "warning",
1041
+ upgradeCommand: "npm install @zodios/core@latest"
1042
+ })
1043
+ },
1044
+ {
1045
+ package: "@zodios/core",
1046
+ category: "api",
1047
+ migrations: ["zod-v3->v4"],
1048
+ check: () => ({
1049
+ issue: "@zodios/core uses Zod schemas for API contract definitions. Zod v4 type changes may break contracts.",
1050
+ suggestion: "Upgrade @zodios/core to a Zod v4-compatible version and verify all API contracts.",
1051
+ severity: "warning",
1052
+ upgradeCommand: "npm install @zodios/core@latest"
1053
+ })
1054
+ },
1055
+ {
1056
+ package: "@ts-rest/core",
1057
+ category: "api",
1058
+ migrations: ["zod-v3->v4"],
1059
+ check: () => ({
1060
+ issue: "@ts-rest/core uses Zod for contract definitions. Zod v4 type incompatibilities may break runtime validation.",
1061
+ suggestion: "Upgrade @ts-rest/core to a version with Zod v4 support.",
1062
+ severity: "warning",
1063
+ upgradeCommand: "npm install @ts-rest/core@latest"
1064
+ })
1065
+ },
1066
+ {
1067
+ package: "trpc-openapi",
1068
+ category: "openapi",
1069
+ migrations: ["zod-v3->v4"],
1070
+ check: () => ({
1071
+ issue: "trpc-openapi needs a v4-compatible version for Zod v4.",
1072
+ suggestion: "Check for a Zod v4-compatible version of trpc-openapi before upgrading.",
1073
+ severity: "warning",
1074
+ upgradeCommand: "npm install trpc-openapi@latest"
1075
+ })
1076
+ },
1077
+ // Form data and URL state libraries
1078
+ {
1079
+ package: "zod-form-data",
1080
+ category: "form",
1081
+ migrations: ["zod-v3->v4"],
1082
+ check: () => ({
1083
+ issue: "zod-form-data relies on Zod v3 internals (_def) which moved to _zod.def in v4.",
1084
+ suggestion: "Upgrade zod-form-data to a Zod v4-compatible version.",
1085
+ severity: "error",
1086
+ upgradeCommand: "npm install zod-form-data@latest"
1087
+ })
1088
+ },
1089
+ {
1090
+ package: "@conform-to/zod",
1091
+ category: "form",
1092
+ migrations: ["zod-v3->v4"],
1093
+ check: () => ({
1094
+ issue: "@conform-to/zod may have Zod v4 compatibility issues.",
1095
+ suggestion: "Upgrade @conform-to/zod to the latest version with Zod v4 support.",
1096
+ severity: "warning",
1097
+ upgradeCommand: "npm install @conform-to/zod@latest"
1098
+ })
1099
+ },
1100
+ {
1101
+ package: "nuqs",
1102
+ category: "validation-util",
1103
+ migrations: ["zod-v3->v4"],
1104
+ check: () => ({
1105
+ issue: "nuqs uses Zod for URL state parsing. Zod v4 changes may affect URL parameter validation.",
1106
+ suggestion: "Upgrade nuqs to a version with Zod v4 support.",
1107
+ severity: "warning",
1108
+ upgradeCommand: "npm install nuqs@latest"
1109
+ })
1110
+ },
1111
+ // Schema library detection for cross-library migrations
1112
+ {
1113
+ package: "@effect/schema",
1114
+ category: "validation-util",
1115
+ migrations: ["io-ts->zod"],
1116
+ check: () => ({
1117
+ issue: "@effect/schema detected \u2014 this is the successor to io-ts/fp-ts. Consider migrating to Effect Schema instead of Zod if you prefer FP patterns.",
1118
+ suggestion: "If using fp-ts patterns heavily, consider Effect Schema as the migration target instead of Zod.",
1119
+ severity: "info"
1120
+ })
1121
+ },
1122
+ {
1123
+ package: "arktype",
1124
+ category: "validation-util",
1125
+ migrations: ["zod->valibot", "zod-v3->v4"],
1126
+ check: (_version, migration) => {
1127
+ if (migration === "zod->valibot") {
1128
+ return {
1129
+ issue: "ArkType detected alongside Zod. Consider ArkType as a migration target \u2014 it offers 100x faster validation and Standard Schema support.",
1130
+ suggestion: "Consider migrating to ArkType for performance-critical paths, or keep Zod for ecosystem compatibility.",
1131
+ severity: "info"
1132
+ };
1133
+ }
1134
+ return {
1135
+ issue: "ArkType detected alongside Zod. ArkType supports Standard Schema, making it interoperable with Zod v4.",
1136
+ suggestion: "No action needed \u2014 ArkType and Zod v4 can coexist via Standard Schema.",
1137
+ severity: "info"
1138
+ };
1139
+ }
1140
+ },
1141
+ {
1142
+ package: "superstruct",
1143
+ category: "validation-util",
1144
+ migrations: ["yup->zod", "joi->zod"],
1145
+ check: () => ({
1146
+ issue: "Superstruct detected in the project. Consider migrating Superstruct schemas to Zod as well for a unified validation approach.",
1147
+ suggestion: "Use SchemaShift to migrate Superstruct schemas alongside Yup/Joi schemas.",
1148
+ severity: "info"
1149
+ })
1150
+ },
1151
+ // Additional validation utilities
1152
+ {
1153
+ package: "zod-to-json-schema",
1154
+ category: "validation-util",
1155
+ migrations: ["zod-v3->v4"],
1156
+ check: (version) => {
1157
+ const majorMatch = version.match(/(\d+)/);
1158
+ const major = majorMatch?.[1] ? Number.parseInt(majorMatch[1], 10) : 0;
1159
+ if (major < 4) {
1160
+ return {
1161
+ issue: "zod-to-json-schema v3 may not fully support Zod v4 schemas.",
1162
+ suggestion: "Upgrade to zod-to-json-schema v4+ for full Zod v4 support.",
1163
+ severity: "warning",
1164
+ upgradeCommand: "npm install zod-to-json-schema@latest"
1165
+ };
1166
+ }
1167
+ return null;
1168
+ }
1169
+ },
1170
+ {
1171
+ package: "react-hook-form",
1172
+ category: "form",
1173
+ migrations: ["zod-v3->v4"],
1174
+ check: () => ({
1175
+ issue: "React Hook Form with zodResolver may throw uncaught ZodError instead of populating formState.errors with Zod v4.",
1176
+ suggestion: "Upgrade @hookform/resolvers to the latest version and test form validation thoroughly.",
1177
+ severity: "warning",
1178
+ upgradeCommand: "npm install @hookform/resolvers@latest react-hook-form@latest"
1179
+ })
491
1180
  }
492
1181
  ];
493
1182
  var EcosystemAnalyzer = class {
@@ -496,13 +1185,13 @@ var EcosystemAnalyzer = class {
496
1185
  const dependencies = [];
497
1186
  const warnings = [];
498
1187
  const blockers = [];
499
- const pkgPath = join(projectPath, "package.json");
500
- if (!existsSync(pkgPath)) {
1188
+ const pkgPath = join2(projectPath, "package.json");
1189
+ if (!existsSync2(pkgPath)) {
501
1190
  return { dependencies, warnings, blockers };
502
1191
  }
503
1192
  let allDeps = {};
504
1193
  try {
505
- const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
1194
+ const pkg = JSON.parse(readFileSync2(pkgPath, "utf-8"));
506
1195
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
507
1196
  } catch {
508
1197
  return { dependencies, warnings, blockers };
@@ -532,6 +1221,20 @@ var EcosystemAnalyzer = class {
532
1221
  }
533
1222
  return { dependencies, warnings, blockers };
534
1223
  }
1224
+ /**
1225
+ * Returns a list of npm install commands needed to resolve ecosystem issues.
1226
+ */
1227
+ getUpgradeCommands(report) {
1228
+ const commands = [];
1229
+ const seen = /* @__PURE__ */ new Set();
1230
+ for (const dep of report.dependencies) {
1231
+ if (dep.upgradeCommand && !seen.has(dep.upgradeCommand)) {
1232
+ seen.add(dep.upgradeCommand);
1233
+ commands.push(dep.upgradeCommand);
1234
+ }
1235
+ }
1236
+ return commands;
1237
+ }
535
1238
  };
536
1239
 
537
1240
  // src/compatibility.ts
@@ -609,10 +1312,10 @@ var CompatibilityAnalyzer = class {
609
1312
  ecosystemAnalyzer = new EcosystemAnalyzer();
610
1313
  detectVersions(projectPath) {
611
1314
  const versions = [];
612
- const pkgPath = join2(projectPath, "package.json");
613
- if (!existsSync2(pkgPath)) return versions;
1315
+ const pkgPath = join3(projectPath, "package.json");
1316
+ if (!existsSync3(pkgPath)) return versions;
614
1317
  try {
615
- const pkg = JSON.parse(readFileSync2(pkgPath, "utf-8"));
1318
+ const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
616
1319
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
617
1320
  const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
618
1321
  for (const lib of knownLibs) {
@@ -834,8 +1537,8 @@ async function loadConfig(configPath) {
834
1537
  }
835
1538
 
836
1539
  // src/dependency-graph.ts
837
- import { existsSync as existsSync3, readdirSync, readFileSync as readFileSync3 } from "fs";
838
- import { join as join3, resolve } from "path";
1540
+ import { existsSync as existsSync4, readdirSync, readFileSync as readFileSync4 } from "fs";
1541
+ import { join as join4, resolve } from "path";
839
1542
  var SchemaDependencyResolver = class {
840
1543
  resolve(project, filePaths) {
841
1544
  const fileSet = new Set(filePaths);
@@ -922,39 +1625,96 @@ var SchemaDependencyResolver = class {
922
1625
  }
923
1626
  };
924
1627
  var SCHEMA_PACKAGES = /* @__PURE__ */ new Set(["zod", "yup", "joi", "io-ts", "valibot", "@effect/schema"]);
1628
+ function computeParallelBatches(packages, suggestedOrder) {
1629
+ const nameSet = new Set(packages.map((p) => p.name));
1630
+ const depMap = /* @__PURE__ */ new Map();
1631
+ for (const pkg of packages) {
1632
+ depMap.set(pkg.name, new Set(pkg.dependencies.filter((d) => nameSet.has(d))));
1633
+ }
1634
+ const depths = /* @__PURE__ */ new Map();
1635
+ const getDepth = (name, visited) => {
1636
+ const cached = depths.get(name);
1637
+ if (cached !== void 0) return cached;
1638
+ if (visited.has(name)) return 0;
1639
+ visited.add(name);
1640
+ const deps = depMap.get(name) ?? /* @__PURE__ */ new Set();
1641
+ let maxDepth = 0;
1642
+ for (const dep of deps) {
1643
+ maxDepth = Math.max(maxDepth, getDepth(dep, visited) + 1);
1644
+ }
1645
+ depths.set(name, maxDepth);
1646
+ return maxDepth;
1647
+ };
1648
+ for (const name of suggestedOrder) {
1649
+ getDepth(name, /* @__PURE__ */ new Set());
1650
+ }
1651
+ const batchMap = /* @__PURE__ */ new Map();
1652
+ for (const name of suggestedOrder) {
1653
+ const depth = depths.get(name) ?? 0;
1654
+ const batch = batchMap.get(depth) ?? [];
1655
+ batch.push(name);
1656
+ batchMap.set(depth, batch);
1657
+ }
1658
+ const batches = [];
1659
+ const sortedDepths = [...batchMap.keys()].sort((a, b) => a - b);
1660
+ for (const depth of sortedDepths) {
1661
+ const pkgs = batchMap.get(depth);
1662
+ if (pkgs) batches.push({ index: batches.length, packages: pkgs });
1663
+ }
1664
+ return batches;
1665
+ }
925
1666
  var MonorepoResolver = class {
926
1667
  detect(projectPath) {
927
- const pkgPath = join3(projectPath, "package.json");
928
- if (!existsSync3(pkgPath)) return false;
929
- try {
930
- const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
931
- return !!pkg.workspaces;
932
- } catch {
933
- return false;
1668
+ const pkgPath = join4(projectPath, "package.json");
1669
+ if (existsSync4(pkgPath)) {
1670
+ try {
1671
+ const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1672
+ if (pkg.workspaces) return true;
1673
+ } catch {
1674
+ }
934
1675
  }
1676
+ if (existsSync4(join4(projectPath, "pnpm-workspace.yaml"))) return true;
1677
+ return false;
1678
+ }
1679
+ /**
1680
+ * Detect which workspace manager is being used.
1681
+ */
1682
+ detectManager(projectPath) {
1683
+ if (existsSync4(join4(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
1684
+ const pkgPath = join4(projectPath, "package.json");
1685
+ if (existsSync4(pkgPath)) {
1686
+ try {
1687
+ const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1688
+ if (pkg.packageManager?.startsWith("yarn")) return "yarn";
1689
+ if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
1690
+ } catch {
1691
+ }
1692
+ }
1693
+ if (existsSync4(join4(projectPath, "pnpm-lock.yaml"))) return "pnpm";
1694
+ if (existsSync4(join4(projectPath, "yarn.lock"))) return "yarn";
1695
+ return "npm";
935
1696
  }
936
1697
  analyze(projectPath) {
937
- const pkgPath = join3(projectPath, "package.json");
938
- if (!existsSync3(pkgPath)) {
1698
+ const pkgPath = join4(projectPath, "package.json");
1699
+ if (!existsSync4(pkgPath)) {
939
1700
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
940
1701
  }
941
1702
  let workspaceGlobs;
942
1703
  try {
943
- const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
944
- if (!pkg.workspaces) {
1704
+ workspaceGlobs = this.resolveWorkspaceGlobs(projectPath);
1705
+ if (workspaceGlobs.length === 0) {
945
1706
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
946
1707
  }
947
- workspaceGlobs = Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
948
1708
  } catch {
949
1709
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
950
1710
  }
951
1711
  const packages = [];
952
1712
  const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
953
1713
  for (const dir of resolvedDirs) {
954
- const wsPkgPath = join3(dir, "package.json");
955
- if (!existsSync3(wsPkgPath)) continue;
1714
+ const wsPkgPath = join4(dir, "package.json");
1715
+ if (!existsSync4(wsPkgPath)) continue;
956
1716
  try {
957
- const wsPkg = JSON.parse(readFileSync3(wsPkgPath, "utf-8"));
1717
+ const wsPkg = JSON.parse(readFileSync4(wsPkgPath, "utf-8"));
958
1718
  if (!wsPkg.name) continue;
959
1719
  const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
960
1720
  const depNames = Object.keys(allDeps);
@@ -993,18 +1753,70 @@ var MonorepoResolver = class {
993
1753
  }
994
1754
  return sorted;
995
1755
  }
1756
+ /**
1757
+ * Resolve workspace glob patterns from any supported format.
1758
+ * Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
1759
+ */
1760
+ resolveWorkspaceGlobs(projectPath) {
1761
+ const pnpmPath = join4(projectPath, "pnpm-workspace.yaml");
1762
+ if (existsSync4(pnpmPath)) {
1763
+ return this.parsePnpmWorkspace(pnpmPath);
1764
+ }
1765
+ const pkgPath = join4(projectPath, "package.json");
1766
+ if (existsSync4(pkgPath)) {
1767
+ try {
1768
+ const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1769
+ if (pkg.workspaces) {
1770
+ return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
1771
+ }
1772
+ } catch {
1773
+ }
1774
+ }
1775
+ return [];
1776
+ }
1777
+ /**
1778
+ * Parse pnpm-workspace.yaml to extract workspace package globs.
1779
+ * Simple YAML parsing for the common format:
1780
+ * ```
1781
+ * packages:
1782
+ * - 'packages/*'
1783
+ * - 'apps/*'
1784
+ * ```
1785
+ */
1786
+ parsePnpmWorkspace(filePath) {
1787
+ const content = readFileSync4(filePath, "utf-8");
1788
+ const globs = [];
1789
+ let inPackages = false;
1790
+ for (const line of content.split("\n")) {
1791
+ const trimmed = line.trim();
1792
+ if (trimmed === "packages:") {
1793
+ inPackages = true;
1794
+ continue;
1795
+ }
1796
+ if (inPackages && /^\w/.test(trimmed) && !trimmed.startsWith("-")) {
1797
+ break;
1798
+ }
1799
+ if (inPackages && trimmed.startsWith("-")) {
1800
+ const pattern = trimmed.replace(/^-\s*/, "").replace(/^['"]|['"]$/g, "");
1801
+ if (pattern) {
1802
+ globs.push(pattern);
1803
+ }
1804
+ }
1805
+ }
1806
+ return globs;
1807
+ }
996
1808
  resolveWorkspaceDirs(projectPath, globs) {
997
1809
  const dirs = [];
998
1810
  for (const glob of globs) {
999
1811
  const clean = glob.replace(/\/?\*$/, "");
1000
1812
  const base = resolve(projectPath, clean);
1001
- if (!existsSync3(base)) continue;
1813
+ if (!existsSync4(base)) continue;
1002
1814
  if (glob.endsWith("*")) {
1003
1815
  try {
1004
1816
  const entries = readdirSync(base, { withFileTypes: true });
1005
1817
  for (const entry of entries) {
1006
1818
  if (entry.isDirectory()) {
1007
- dirs.push(join3(base, entry.name));
1819
+ dirs.push(join4(base, entry.name));
1008
1820
  }
1009
1821
  }
1010
1822
  } catch {
@@ -1018,8 +1830,8 @@ var MonorepoResolver = class {
1018
1830
  };
1019
1831
 
1020
1832
  // src/detailed-analyzer.ts
1021
- import { existsSync as existsSync4, readFileSync as readFileSync4 } from "fs";
1022
- import { join as join4 } from "path";
1833
+ import { existsSync as existsSync5, readFileSync as readFileSync5 } from "fs";
1834
+ import { join as join5 } from "path";
1023
1835
  var COMPLEXITY_CHAIN_WEIGHT = 2;
1024
1836
  var COMPLEXITY_DEPTH_WEIGHT = 3;
1025
1837
  var COMPLEXITY_VALIDATION_WEIGHT = 1;
@@ -1084,10 +1896,10 @@ var DetailedAnalyzer = class {
1084
1896
  }
1085
1897
  detectLibraryVersions(projectPath) {
1086
1898
  const versions = [];
1087
- const pkgPath = join4(projectPath, "package.json");
1088
- if (!existsSync4(pkgPath)) return versions;
1899
+ const pkgPath = join5(projectPath, "package.json");
1900
+ if (!existsSync5(pkgPath)) return versions;
1089
1901
  try {
1090
- const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1902
+ const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
1091
1903
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1092
1904
  const allDeps = {
1093
1905
  ...pkg.dependencies,
@@ -1260,6 +2072,165 @@ var DetailedAnalyzer = class {
1260
2072
  }
1261
2073
  };
1262
2074
 
2075
+ // src/drift-detector.ts
2076
+ import { createHash as createHash2 } from "crypto";
2077
+ import { existsSync as existsSync6, mkdirSync as mkdirSync2, readFileSync as readFileSync6, writeFileSync as writeFileSync2 } from "fs";
2078
+ import { join as join6, relative } from "path";
2079
+ var SNAPSHOT_DIR = ".schemashift";
2080
+ var SNAPSHOT_FILE = "schema-snapshot.json";
2081
+ var SNAPSHOT_VERSION = 1;
2082
+ var DriftDetector = class {
2083
+ snapshotDir;
2084
+ snapshotPath;
2085
+ constructor(projectPath) {
2086
+ this.snapshotDir = join6(projectPath, SNAPSHOT_DIR);
2087
+ this.snapshotPath = join6(this.snapshotDir, SNAPSHOT_FILE);
2088
+ }
2089
+ /**
2090
+ * Take a snapshot of the current schema state
2091
+ */
2092
+ snapshot(files, projectPath) {
2093
+ const schemas = [];
2094
+ for (const filePath of files) {
2095
+ if (!existsSync6(filePath)) continue;
2096
+ const content = readFileSync6(filePath, "utf-8");
2097
+ const library = this.detectLibraryFromContent(content);
2098
+ if (library === "unknown") continue;
2099
+ const schemaNames = this.extractSchemaNames(content);
2100
+ schemas.push({
2101
+ filePath: relative(projectPath, filePath),
2102
+ library,
2103
+ contentHash: this.hashContent(content),
2104
+ schemaCount: schemaNames.length,
2105
+ schemaNames
2106
+ });
2107
+ }
2108
+ const snapshot = {
2109
+ version: SNAPSHOT_VERSION,
2110
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
2111
+ projectPath,
2112
+ schemas
2113
+ };
2114
+ return snapshot;
2115
+ }
2116
+ /**
2117
+ * Save a snapshot to disk
2118
+ */
2119
+ saveSnapshot(snapshot) {
2120
+ if (!existsSync6(this.snapshotDir)) {
2121
+ mkdirSync2(this.snapshotDir, { recursive: true });
2122
+ }
2123
+ writeFileSync2(this.snapshotPath, JSON.stringify(snapshot, null, 2));
2124
+ }
2125
+ /**
2126
+ * Load saved snapshot from disk
2127
+ */
2128
+ loadSnapshot() {
2129
+ if (!existsSync6(this.snapshotPath)) {
2130
+ return null;
2131
+ }
2132
+ try {
2133
+ const content = readFileSync6(this.snapshotPath, "utf-8");
2134
+ return JSON.parse(content);
2135
+ } catch {
2136
+ return null;
2137
+ }
2138
+ }
2139
+ /**
2140
+ * Compare current state against saved snapshot
2141
+ */
2142
+ detect(currentFiles, projectPath) {
2143
+ const saved = this.loadSnapshot();
2144
+ if (!saved) {
2145
+ return {
2146
+ hasDrift: false,
2147
+ added: [],
2148
+ removed: [],
2149
+ modified: [],
2150
+ unchanged: 0,
2151
+ totalFiles: 0,
2152
+ snapshotTimestamp: ""
2153
+ };
2154
+ }
2155
+ const current = this.snapshot(currentFiles, projectPath);
2156
+ return this.compareSnapshots(saved, current);
2157
+ }
2158
+ /**
2159
+ * Compare two snapshots and return drift results
2160
+ */
2161
+ compareSnapshots(baseline, current) {
2162
+ const baselineMap = new Map(baseline.schemas.map((s) => [s.filePath, s]));
2163
+ const currentMap = new Map(current.schemas.map((s) => [s.filePath, s]));
2164
+ const added = [];
2165
+ const removed = [];
2166
+ const modified = [];
2167
+ let unchanged = 0;
2168
+ for (const [path, currentFile] of currentMap) {
2169
+ const baselineFile = baselineMap.get(path);
2170
+ if (!baselineFile) {
2171
+ added.push(currentFile);
2172
+ } else if (currentFile.contentHash !== baselineFile.contentHash) {
2173
+ const addedSchemas = currentFile.schemaNames.filter(
2174
+ (n) => !baselineFile.schemaNames.includes(n)
2175
+ );
2176
+ const removedSchemas = baselineFile.schemaNames.filter(
2177
+ (n) => !currentFile.schemaNames.includes(n)
2178
+ );
2179
+ modified.push({
2180
+ filePath: path,
2181
+ library: currentFile.library,
2182
+ previousHash: baselineFile.contentHash,
2183
+ currentHash: currentFile.contentHash,
2184
+ previousSchemaCount: baselineFile.schemaCount,
2185
+ currentSchemaCount: currentFile.schemaCount,
2186
+ addedSchemas,
2187
+ removedSchemas
2188
+ });
2189
+ } else {
2190
+ unchanged++;
2191
+ }
2192
+ }
2193
+ for (const [path, baselineFile] of baselineMap) {
2194
+ if (!currentMap.has(path)) {
2195
+ removed.push(baselineFile);
2196
+ }
2197
+ }
2198
+ return {
2199
+ hasDrift: added.length > 0 || removed.length > 0 || modified.length > 0,
2200
+ added,
2201
+ removed,
2202
+ modified,
2203
+ unchanged,
2204
+ totalFiles: currentMap.size,
2205
+ snapshotTimestamp: baseline.timestamp
2206
+ };
2207
+ }
2208
+ extractSchemaNames(content) {
2209
+ const names = [];
2210
+ const pattern = /(?:const|let|var)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|t\.|v\.|type\(|object\(|string\(|S\.)/g;
2211
+ for (const match of content.matchAll(pattern)) {
2212
+ if (match[1]) names.push(match[1]);
2213
+ }
2214
+ return names;
2215
+ }
2216
+ detectLibraryFromContent(content) {
2217
+ if (/from\s*['"]zod['"]/.test(content) || /\bz\./.test(content)) return "zod";
2218
+ if (/from\s*['"]yup['"]/.test(content) || /\byup\./.test(content)) return "yup";
2219
+ if (/from\s*['"]joi['"]/.test(content) || /\bJoi\./.test(content)) return "joi";
2220
+ if (/from\s*['"]io-ts['"]/.test(content) || /\bt\./.test(content) && /from\s*['"]io-ts/.test(content))
2221
+ return "io-ts";
2222
+ if (/from\s*['"]valibot['"]/.test(content) || /\bv\./.test(content) && /from\s*['"]valibot/.test(content))
2223
+ return "valibot";
2224
+ if (/from\s*['"]arktype['"]/.test(content)) return "arktype";
2225
+ if (/from\s*['"]superstruct['"]/.test(content)) return "superstruct";
2226
+ if (/from\s*['"]@effect\/schema['"]/.test(content)) return "effect";
2227
+ return "unknown";
2228
+ }
2229
+ hashContent(content) {
2230
+ return createHash2("sha256").update(content).digest("hex").substring(0, 16);
2231
+ }
2232
+ };
2233
+
1263
2234
  // src/form-resolver-migrator.ts
1264
2235
  var RESOLVER_MAPPINGS = {
1265
2236
  "yup->zod": [
@@ -1347,6 +2318,7 @@ var FormResolverMigrator = class {
1347
2318
  // src/governance.ts
1348
2319
  var GovernanceEngine = class {
1349
2320
  rules = /* @__PURE__ */ new Map();
2321
+ customRuleFunctions = /* @__PURE__ */ new Map();
1350
2322
  configure(rules) {
1351
2323
  this.rules.clear();
1352
2324
  for (const [name, config] of Object.entries(rules)) {
@@ -1355,6 +2327,13 @@ var GovernanceEngine = class {
1355
2327
  }
1356
2328
  }
1357
2329
  }
2330
+ /**
2331
+ * Register a custom governance rule function.
2332
+ * Custom rules are executed per-file alongside built-in rules.
2333
+ */
2334
+ registerRule(name, fn) {
2335
+ this.customRuleFunctions.set(name, fn);
2336
+ }
1358
2337
  analyze(project) {
1359
2338
  const violations = [];
1360
2339
  let schemasChecked = 0;
@@ -1430,6 +2409,104 @@ var GovernanceEngine = class {
1430
2409
  });
1431
2410
  }
1432
2411
  }
2412
+ if (this.rules.has("require-safeParse")) {
2413
+ if (text.includes(".parse(") && !text.includes(".safeParse(")) {
2414
+ violations.push({
2415
+ rule: "require-safeParse",
2416
+ message: `Schema "${schemaName}" uses .parse() \u2014 prefer .safeParse() for safer error handling`,
2417
+ filePath,
2418
+ lineNumber,
2419
+ schemaName,
2420
+ severity: "warning",
2421
+ fixable: true
2422
+ });
2423
+ }
2424
+ }
2425
+ if (this.rules.has("require-description")) {
2426
+ if (!text.includes(".describe(")) {
2427
+ violations.push({
2428
+ rule: "require-description",
2429
+ message: `Schema "${schemaName}" missing .describe() \u2014 add a description for documentation`,
2430
+ filePath,
2431
+ lineNumber,
2432
+ schemaName,
2433
+ severity: "warning",
2434
+ fixable: true
2435
+ });
2436
+ }
2437
+ }
2438
+ if (this.rules.has("no-coerce-in-api")) {
2439
+ if (/\.coerce\./.test(text)) {
2440
+ violations.push({
2441
+ rule: "no-coerce-in-api",
2442
+ message: `Schema "${schemaName}" uses z.coerce.* \u2014 coercion in API validation is a security risk`,
2443
+ filePath,
2444
+ lineNumber,
2445
+ schemaName,
2446
+ severity: "error",
2447
+ fixable: false
2448
+ });
2449
+ }
2450
+ }
2451
+ if (this.rules.has("require-max-length")) {
2452
+ if (text.includes(".string()") && !text.includes(".max(") && !text.includes(".length(")) {
2453
+ violations.push({
2454
+ rule: "require-max-length",
2455
+ message: `Schema "${schemaName}" has string without max length \u2014 required for DoS prevention`,
2456
+ filePath,
2457
+ lineNumber,
2458
+ schemaName,
2459
+ severity: "error",
2460
+ fixable: true
2461
+ });
2462
+ }
2463
+ }
2464
+ if (this.rules.has("max-nesting-depth")) {
2465
+ const config = this.rules.get("max-nesting-depth") ?? {};
2466
+ const maxDepth = config.threshold ?? 5;
2467
+ const depth = this.measureNestingDepth(text);
2468
+ if (depth > maxDepth) {
2469
+ violations.push({
2470
+ rule: "max-nesting-depth",
2471
+ message: `Schema "${schemaName}" nesting depth (${depth}) exceeds limit (${maxDepth})`,
2472
+ filePath,
2473
+ lineNumber,
2474
+ schemaName,
2475
+ severity: "warning",
2476
+ fixable: false
2477
+ });
2478
+ }
2479
+ }
2480
+ }
2481
+ }
2482
+ for (const sourceFile of project.getSourceFiles()) {
2483
+ const library = this.detectFileLibrary(sourceFile);
2484
+ if (library === "unknown") continue;
2485
+ const filePath = sourceFile.getFilePath();
2486
+ const text = sourceFile.getFullText();
2487
+ if (this.rules.has("no-dynamic-schemas")) {
2488
+ const dynamicPatterns = this.detectDynamicSchemas(text, library);
2489
+ for (const lineNumber of dynamicPatterns) {
2490
+ violations.push({
2491
+ rule: "no-dynamic-schemas",
2492
+ message: "Schema created inside function body \u2014 move to module level for performance",
2493
+ filePath,
2494
+ lineNumber,
2495
+ schemaName: "(dynamic)",
2496
+ severity: "warning",
2497
+ fixable: false
2498
+ });
2499
+ }
2500
+ }
2501
+ }
2502
+ for (const [ruleName, ruleFn] of this.customRuleFunctions) {
2503
+ const config = this.rules.get(ruleName);
2504
+ if (!config) continue;
2505
+ for (const sourceFile of project.getSourceFiles()) {
2506
+ const library = this.detectFileLibrary(sourceFile);
2507
+ if (library === "unknown") continue;
2508
+ const ruleViolations = ruleFn(sourceFile, config);
2509
+ violations.push(...ruleViolations);
1433
2510
  }
1434
2511
  }
1435
2512
  return {
@@ -1446,6 +2523,57 @@ var GovernanceEngine = class {
1446
2523
  }
1447
2524
  return "unknown";
1448
2525
  }
2526
+ measureNestingDepth(text) {
2527
+ let maxDepth = 0;
2528
+ let current = 0;
2529
+ for (const char of text) {
2530
+ if (char === "(") {
2531
+ current++;
2532
+ if (current > maxDepth) maxDepth = current;
2533
+ } else if (char === ")") {
2534
+ current--;
2535
+ }
2536
+ }
2537
+ return maxDepth;
2538
+ }
2539
+ detectDynamicSchemas(text, library) {
2540
+ const lineNumbers = [];
2541
+ const prefix = this.getSchemaPrefix(library);
2542
+ if (!prefix) return lineNumbers;
2543
+ const lines = text.split("\n");
2544
+ let insideFunction = 0;
2545
+ for (let i = 0; i < lines.length; i++) {
2546
+ const line = lines[i] ?? "";
2547
+ const opens = (line.match(/\{/g) || []).length;
2548
+ const closes = (line.match(/\}/g) || []).length;
2549
+ if (/(?:function\s+\w+|=>)\s*\{/.test(line)) {
2550
+ insideFunction += opens;
2551
+ insideFunction -= closes;
2552
+ continue;
2553
+ }
2554
+ insideFunction += opens - closes;
2555
+ if (insideFunction > 0 && line.includes(prefix)) {
2556
+ lineNumbers.push(i + 1);
2557
+ }
2558
+ }
2559
+ return lineNumbers;
2560
+ }
2561
+ getSchemaPrefix(library) {
2562
+ switch (library) {
2563
+ case "zod":
2564
+ return "z.";
2565
+ case "yup":
2566
+ return "yup.";
2567
+ case "joi":
2568
+ return "Joi.";
2569
+ case "io-ts":
2570
+ return "t.";
2571
+ case "valibot":
2572
+ return "v.";
2573
+ default:
2574
+ return null;
2575
+ }
2576
+ }
1449
2577
  isSchemaExpression(text, library) {
1450
2578
  switch (library) {
1451
2579
  case "zod":
@@ -1464,17 +2592,265 @@ var GovernanceEngine = class {
1464
2592
  }
1465
2593
  };
1466
2594
 
2595
+ // src/governance-templates.ts
2596
+ var GOVERNANCE_TEMPLATES = [
2597
+ {
2598
+ name: "no-any-schemas",
2599
+ description: "Disallow z.any(), yup.mixed() without constraints, and similar unrestricted types",
2600
+ category: "security",
2601
+ rule: (sourceFile, _config) => {
2602
+ const violations = [];
2603
+ const text = sourceFile.getFullText();
2604
+ const filePath = sourceFile.getFilePath();
2605
+ const lines = text.split("\n");
2606
+ const anyPatterns = [
2607
+ /\bz\.any\(\)/,
2608
+ /\byup\.mixed\(\)/,
2609
+ /\bt\.any\b/,
2610
+ /\bv\.any\(\)/,
2611
+ /\bunknown\(\)/
2612
+ ];
2613
+ for (let i = 0; i < lines.length; i++) {
2614
+ const line = lines[i] ?? "";
2615
+ for (const pattern of anyPatterns) {
2616
+ if (pattern.test(line)) {
2617
+ violations.push({
2618
+ rule: "no-any-schemas",
2619
+ message: "Unrestricted type (any/mixed/unknown) found. Use a specific type with constraints.",
2620
+ filePath,
2621
+ lineNumber: i + 1,
2622
+ schemaName: "",
2623
+ severity: "error",
2624
+ fixable: false
2625
+ });
2626
+ }
2627
+ }
2628
+ }
2629
+ return violations;
2630
+ }
2631
+ },
2632
+ {
2633
+ name: "require-descriptions",
2634
+ description: "All exported schemas must have .describe() for documentation",
2635
+ category: "quality",
2636
+ rule: (sourceFile, _config) => {
2637
+ const violations = [];
2638
+ const text = sourceFile.getFullText();
2639
+ const filePath = sourceFile.getFilePath();
2640
+ const lines = text.split("\n");
2641
+ for (let i = 0; i < lines.length; i++) {
2642
+ const line = lines[i] ?? "";
2643
+ if (/export\s+(const|let)\s+\w+.*=\s*(z\.|yup\.)/.test(line)) {
2644
+ let fullStatement = line;
2645
+ let j = i + 1;
2646
+ while (j < lines.length && !lines[j]?.includes(";") && j < i + 10) {
2647
+ fullStatement += lines[j] ?? "";
2648
+ j++;
2649
+ }
2650
+ if (j < lines.length) fullStatement += lines[j] ?? "";
2651
+ if (!fullStatement.includes(".describe(")) {
2652
+ const nameMatch = line.match(/(?:const|let)\s+(\w+)/);
2653
+ violations.push({
2654
+ rule: "require-descriptions",
2655
+ message: `Exported schema ${nameMatch?.[1] || "unknown"} should include .describe() for documentation.`,
2656
+ filePath,
2657
+ lineNumber: i + 1,
2658
+ schemaName: nameMatch?.[1] || "",
2659
+ severity: "warning",
2660
+ fixable: true
2661
+ });
2662
+ }
2663
+ }
2664
+ }
2665
+ return violations;
2666
+ }
2667
+ },
2668
+ {
2669
+ name: "max-nesting-depth",
2670
+ description: "Limit schema nesting depth to prevent TypeScript performance issues",
2671
+ category: "performance",
2672
+ rule: (sourceFile, config) => {
2673
+ const violations = [];
2674
+ const text = sourceFile.getFullText();
2675
+ const filePath = sourceFile.getFilePath();
2676
+ const maxDepth = config.threshold || 5;
2677
+ const lines = text.split("\n");
2678
+ let currentDepth = 0;
2679
+ let maxFoundDepth = 0;
2680
+ let deepestLine = 0;
2681
+ for (let i = 0; i < lines.length; i++) {
2682
+ const line = lines[i] ?? "";
2683
+ for (const char of line) {
2684
+ if (char === "(" || char === "{" || char === "[") {
2685
+ currentDepth++;
2686
+ if (currentDepth > maxFoundDepth) {
2687
+ maxFoundDepth = currentDepth;
2688
+ deepestLine = i + 1;
2689
+ }
2690
+ }
2691
+ if (char === ")" || char === "}" || char === "]") {
2692
+ currentDepth = Math.max(0, currentDepth - 1);
2693
+ }
2694
+ }
2695
+ }
2696
+ if (maxFoundDepth > maxDepth) {
2697
+ violations.push({
2698
+ rule: "max-nesting-depth",
2699
+ message: `Schema nesting depth ${maxFoundDepth} exceeds maximum of ${maxDepth}. Consider breaking into smaller schemas.`,
2700
+ filePath,
2701
+ lineNumber: deepestLine,
2702
+ schemaName: "",
2703
+ severity: "warning",
2704
+ fixable: false
2705
+ });
2706
+ }
2707
+ return violations;
2708
+ }
2709
+ },
2710
+ {
2711
+ name: "no-deprecated-methods",
2712
+ description: "Flag usage of deprecated schema methods",
2713
+ category: "quality",
2714
+ rule: (sourceFile, _config) => {
2715
+ const violations = [];
2716
+ const text = sourceFile.getFullText();
2717
+ const filePath = sourceFile.getFilePath();
2718
+ const lines = text.split("\n");
2719
+ const deprecatedPatterns = [
2720
+ {
2721
+ pattern: /\.deepPartial\(\)/,
2722
+ message: ".deepPartial() is removed in Zod v4. Use recursive .partial() instead."
2723
+ },
2724
+ {
2725
+ pattern: /\.strip\(\)/,
2726
+ message: ".strip() is deprecated. Use z.strictObject() or explicit stripping."
2727
+ },
2728
+ {
2729
+ pattern: /z\.promise\(/,
2730
+ message: "z.promise() is deprecated in Zod v4. Use native Promise types."
2731
+ },
2732
+ {
2733
+ pattern: /z\.ostring\(\)/,
2734
+ message: "z.ostring() is removed in Zod v4. Use z.string().optional()."
2735
+ },
2736
+ {
2737
+ pattern: /z\.onumber\(\)/,
2738
+ message: "z.onumber() is removed in Zod v4. Use z.number().optional()."
2739
+ },
2740
+ {
2741
+ pattern: /z\.oboolean\(\)/,
2742
+ message: "z.oboolean() is removed in Zod v4. Use z.boolean().optional()."
2743
+ },
2744
+ {
2745
+ pattern: /z\.preprocess\(/,
2746
+ message: "z.preprocess() is removed in Zod v4. Use z.coerce.* instead."
2747
+ }
2748
+ ];
2749
+ for (let i = 0; i < lines.length; i++) {
2750
+ const line = lines[i] ?? "";
2751
+ for (const { pattern, message } of deprecatedPatterns) {
2752
+ if (pattern.test(line)) {
2753
+ violations.push({
2754
+ rule: "no-deprecated-methods",
2755
+ message,
2756
+ filePath,
2757
+ lineNumber: i + 1,
2758
+ schemaName: "",
2759
+ severity: "warning",
2760
+ fixable: false
2761
+ });
2762
+ }
2763
+ }
2764
+ }
2765
+ return violations;
2766
+ }
2767
+ },
2768
+ {
2769
+ name: "naming-convention",
2770
+ description: "Enforce schema naming pattern (e.g., must end with Schema)",
2771
+ category: "quality",
2772
+ rule: (sourceFile, config) => {
2773
+ const violations = [];
2774
+ const text = sourceFile.getFullText();
2775
+ const filePath = sourceFile.getFilePath();
2776
+ const lines = text.split("\n");
2777
+ const pattern = new RegExp(config.pattern || ".*Schema$");
2778
+ for (let i = 0; i < lines.length; i++) {
2779
+ const line = lines[i] ?? "";
2780
+ const match = line.match(
2781
+ /(?:const|let)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|t\.|v\.|type\(|object\(|string\()/
2782
+ );
2783
+ if (match?.[1] && !pattern.test(match[1])) {
2784
+ violations.push({
2785
+ rule: "naming-convention",
2786
+ message: `Schema "${match[1]}" does not match naming pattern ${pattern.source}.`,
2787
+ filePath,
2788
+ lineNumber: i + 1,
2789
+ schemaName: match[1],
2790
+ severity: "warning",
2791
+ fixable: false
2792
+ });
2793
+ }
2794
+ }
2795
+ return violations;
2796
+ }
2797
+ },
2798
+ {
2799
+ name: "require-max-length",
2800
+ description: "String schemas must have .max() to prevent DoS via unbounded input",
2801
+ category: "security",
2802
+ rule: (sourceFile, _config) => {
2803
+ const violations = [];
2804
+ const text = sourceFile.getFullText();
2805
+ const filePath = sourceFile.getFilePath();
2806
+ const lines = text.split("\n");
2807
+ for (let i = 0; i < lines.length; i++) {
2808
+ const line = lines[i] ?? "";
2809
+ if (/z\.string\(\)/.test(line) && !line.includes(".max(") && !line.includes(".length(")) {
2810
+ let fullChain = line;
2811
+ let j = i + 1;
2812
+ while (j < lines.length && j < i + 5 && /^\s*\./.test(lines[j] ?? "")) {
2813
+ fullChain += lines[j] ?? "";
2814
+ j++;
2815
+ }
2816
+ if (!fullChain.includes(".max(") && !fullChain.includes(".length(")) {
2817
+ violations.push({
2818
+ rule: "require-max-length",
2819
+ message: "String schema should have .max() to prevent unbounded input (DoS protection).",
2820
+ filePath,
2821
+ lineNumber: i + 1,
2822
+ schemaName: "",
2823
+ severity: "warning",
2824
+ fixable: true
2825
+ });
2826
+ }
2827
+ }
2828
+ }
2829
+ return violations;
2830
+ }
2831
+ }
2832
+ ];
2833
+ function getGovernanceTemplate(name) {
2834
+ return GOVERNANCE_TEMPLATES.find((t) => t.name === name);
2835
+ }
2836
+ function getGovernanceTemplatesByCategory(category) {
2837
+ return GOVERNANCE_TEMPLATES.filter((t) => t.category === category);
2838
+ }
2839
+ function getGovernanceTemplateNames() {
2840
+ return GOVERNANCE_TEMPLATES.map((t) => t.name);
2841
+ }
2842
+
1467
2843
  // src/incremental.ts
1468
- import { existsSync as existsSync5, mkdirSync, readFileSync as readFileSync5, writeFileSync } from "fs";
1469
- import { join as join5 } from "path";
2844
+ import { existsSync as existsSync7, mkdirSync as mkdirSync3, readFileSync as readFileSync7, unlinkSync, writeFileSync as writeFileSync3 } from "fs";
2845
+ import { join as join7 } from "path";
1470
2846
  var STATE_DIR = ".schemashift";
1471
2847
  var STATE_FILE = "incremental.json";
1472
2848
  var IncrementalTracker = class {
1473
2849
  stateDir;
1474
2850
  statePath;
1475
2851
  constructor(projectPath) {
1476
- this.stateDir = join5(projectPath, STATE_DIR);
1477
- this.statePath = join5(this.stateDir, STATE_FILE);
2852
+ this.stateDir = join7(projectPath, STATE_DIR);
2853
+ this.statePath = join7(this.stateDir, STATE_FILE);
1478
2854
  }
1479
2855
  start(files, from, to) {
1480
2856
  const state = {
@@ -1509,9 +2885,9 @@ var IncrementalTracker = class {
1509
2885
  this.saveState(state);
1510
2886
  }
1511
2887
  getState() {
1512
- if (!existsSync5(this.statePath)) return null;
2888
+ if (!existsSync7(this.statePath)) return null;
1513
2889
  try {
1514
- return JSON.parse(readFileSync5(this.statePath, "utf-8"));
2890
+ return JSON.parse(readFileSync7(this.statePath, "utf-8"));
1515
2891
  } catch {
1516
2892
  return null;
1517
2893
  }
@@ -1538,21 +2914,21 @@ var IncrementalTracker = class {
1538
2914
  };
1539
2915
  }
1540
2916
  clear() {
1541
- if (existsSync5(this.statePath)) {
1542
- writeFileSync(this.statePath, "");
2917
+ if (existsSync7(this.statePath)) {
2918
+ unlinkSync(this.statePath);
1543
2919
  }
1544
2920
  }
1545
2921
  saveState(state) {
1546
- if (!existsSync5(this.stateDir)) {
1547
- mkdirSync(this.stateDir, { recursive: true });
2922
+ if (!existsSync7(this.stateDir)) {
2923
+ mkdirSync3(this.stateDir, { recursive: true });
1548
2924
  }
1549
- writeFileSync(this.statePath, JSON.stringify(state, null, 2));
2925
+ writeFileSync3(this.statePath, JSON.stringify(state, null, 2));
1550
2926
  }
1551
2927
  };
1552
2928
 
1553
2929
  // src/package-updater.ts
1554
- import { existsSync as existsSync6, readFileSync as readFileSync6, writeFileSync as writeFileSync2 } from "fs";
1555
- import { join as join6 } from "path";
2930
+ import { existsSync as existsSync8, readFileSync as readFileSync8, writeFileSync as writeFileSync4 } from "fs";
2931
+ import { join as join8 } from "path";
1556
2932
  var TARGET_VERSIONS = {
1557
2933
  "yup->zod": { zod: "^3.24.0" },
1558
2934
  "joi->zod": { zod: "^3.24.0" },
@@ -1573,14 +2949,14 @@ var PackageUpdater = class {
1573
2949
  const add = {};
1574
2950
  const remove = [];
1575
2951
  const warnings = [];
1576
- const pkgPath = join6(projectPath, "package.json");
1577
- if (!existsSync6(pkgPath)) {
2952
+ const pkgPath = join8(projectPath, "package.json");
2953
+ if (!existsSync8(pkgPath)) {
1578
2954
  warnings.push("No package.json found. Cannot plan dependency updates.");
1579
2955
  return { add, remove, warnings };
1580
2956
  }
1581
2957
  let pkg;
1582
2958
  try {
1583
- pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
2959
+ pkg = JSON.parse(readFileSync8(pkgPath, "utf-8"));
1584
2960
  } catch {
1585
2961
  warnings.push("Could not parse package.json.");
1586
2962
  return { add, remove, warnings };
@@ -1610,9 +2986,9 @@ var PackageUpdater = class {
1610
2986
  return { add, remove, warnings };
1611
2987
  }
1612
2988
  apply(projectPath, plan) {
1613
- const pkgPath = join6(projectPath, "package.json");
1614
- if (!existsSync6(pkgPath)) return;
1615
- const pkgText = readFileSync6(pkgPath, "utf-8");
2989
+ const pkgPath = join8(projectPath, "package.json");
2990
+ if (!existsSync8(pkgPath)) return;
2991
+ const pkgText = readFileSync8(pkgPath, "utf-8");
1616
2992
  const pkg = JSON.parse(pkgText);
1617
2993
  if (!pkg.dependencies) pkg.dependencies = {};
1618
2994
  for (const [name, version] of Object.entries(plan.add)) {
@@ -1622,11 +2998,133 @@ var PackageUpdater = class {
1622
2998
  pkg.dependencies[name] = version;
1623
2999
  }
1624
3000
  }
1625
- writeFileSync2(pkgPath, `${JSON.stringify(pkg, null, 2)}
3001
+ writeFileSync4(pkgPath, `${JSON.stringify(pkg, null, 2)}
1626
3002
  `);
1627
3003
  }
1628
3004
  };
1629
3005
 
3006
+ // src/performance-analyzer.ts
3007
+ var PerformanceAnalyzer = class {
3008
+ analyze(sourceFiles, from, to) {
3009
+ const warnings = [];
3010
+ let parseCallSites = 0;
3011
+ let dynamicSchemaCount = 0;
3012
+ for (const file of sourceFiles) {
3013
+ const text = file.getFullText();
3014
+ const filePath = file.getFilePath();
3015
+ const parseMatches = text.match(/\.(parse|safeParse)\s*\(/g);
3016
+ if (parseMatches) {
3017
+ parseCallSites += parseMatches.length;
3018
+ }
3019
+ const dynamicResult = this.detectDynamicSchemas(text, filePath);
3020
+ dynamicSchemaCount += dynamicResult.count;
3021
+ warnings.push(...dynamicResult.warnings);
3022
+ this.addMigrationWarnings(text, filePath, from, to, warnings);
3023
+ }
3024
+ const recommendation = this.getRecommendation(from, to, parseCallSites, dynamicSchemaCount);
3025
+ const summary = this.generateSummary(warnings, parseCallSites, dynamicSchemaCount);
3026
+ return {
3027
+ warnings,
3028
+ parseCallSites,
3029
+ dynamicSchemaCount,
3030
+ recommendation,
3031
+ summary
3032
+ };
3033
+ }
3034
+ detectDynamicSchemas(text, filePath) {
3035
+ const warnings = [];
3036
+ let count = 0;
3037
+ const functionBodyPattern = /(?:function\s+\w+\s*\([^)]*\)|const\s+\w+\s*=\s*(?:async\s+)?(?:\([^)]*\)|[a-zA-Z_]\w*)\s*=>)\s*\{[^}]*(?:z\.|yup\.|Joi\.|v\.)\w+\s*\(/g;
3038
+ for (const match of text.matchAll(functionBodyPattern)) {
3039
+ count++;
3040
+ const lineNumber = text.substring(0, match.index).split("\n").length;
3041
+ warnings.push({
3042
+ category: "dynamic-schemas",
3043
+ message: "Schema created inside function body \u2014 may cause performance issues with Zod v4.",
3044
+ detail: "Zod v4 uses JIT compilation, making schema creation ~17x slower than v3. Move schema definitions to module level to avoid re-creation on every call.",
3045
+ filePath,
3046
+ lineNumber,
3047
+ severity: "warning"
3048
+ });
3049
+ }
3050
+ const reactComponentPattern = /(?:function\s+[A-Z]\w*\s*\([^)]*\)|const\s+[A-Z]\w*\s*[:=])[^{]*\{[^}]*(?:z\.|yup\.|Joi\.)\w+\s*\(/g;
3051
+ for (const match of text.matchAll(reactComponentPattern)) {
3052
+ count++;
3053
+ const lineNumber = text.substring(0, match.index).split("\n").length;
3054
+ warnings.push({
3055
+ category: "schema-creation",
3056
+ message: "Schema appears to be created inside a React component.",
3057
+ detail: "Schemas created inside React components are re-created on every render. Move schema definitions outside the component or wrap in useMemo(). This is especially important for Zod v4 due to JIT compilation overhead.",
3058
+ filePath,
3059
+ lineNumber,
3060
+ severity: "warning"
3061
+ });
3062
+ }
3063
+ return { count, warnings };
3064
+ }
3065
+ addMigrationWarnings(text, filePath, from, to, warnings) {
3066
+ const migration = `${from}->${to}`;
3067
+ if (migration === "zod-v3->v4") {
3068
+ if (/edge-runtime|@vercel\/edge|cloudflare.*workers|deno\.serve|Deno\.serve/i.test(text) || /export\s+const\s+runtime\s*=\s*['"]edge['"]/i.test(text)) {
3069
+ warnings.push({
3070
+ category: "cold-start",
3071
+ message: "Edge/serverless environment detected \u2014 Zod v4 JIT compilation increases cold start time.",
3072
+ detail: "Zod v4 JIT trades slower schema creation for faster repeated parsing. In serverless/edge environments with short-lived instances, the JIT cost may not amortize. Consider Valibot or staying on Zod v3 for cold-start-sensitive code.",
3073
+ filePath,
3074
+ severity: "warning"
3075
+ });
3076
+ }
3077
+ const parseCount = (text.match(/\.parse\s*\(/g) || []).length;
3078
+ if (parseCount > 10) {
3079
+ warnings.push({
3080
+ category: "repeated-parsing",
3081
+ message: `High parse() usage (${parseCount} call sites) \u2014 Zod v4 JIT will benefit here.`,
3082
+ detail: "Zod v4 JIT compilation makes repeated parsing ~8x faster. This file has many parse() calls and will see performance improvement.",
3083
+ filePath,
3084
+ severity: "info"
3085
+ });
3086
+ }
3087
+ }
3088
+ if (migration === "zod->valibot" && /\.parse\s*\(/.test(text)) {
3089
+ warnings.push({
3090
+ category: "repeated-parsing",
3091
+ message: "Valibot parsing performance is comparable to Zod v4 for most schemas.",
3092
+ detail: "Valibot v1+ offers similar runtime performance to Zod v4 with significantly smaller bundle size. No JIT overhead means consistent performance across all environments.",
3093
+ filePath,
3094
+ severity: "info"
3095
+ });
3096
+ }
3097
+ }
3098
+ getRecommendation(from, to, parseCallSites, dynamicSchemaCount) {
3099
+ const migration = `${from}->${to}`;
3100
+ if (migration === "zod-v3->v4") {
3101
+ if (dynamicSchemaCount > 5) {
3102
+ return "Many dynamic schemas detected. Zod v4 JIT makes schema creation 17x slower. Move schemas to module level before migrating, or consider Valibot for size-sensitive apps.";
3103
+ }
3104
+ if (parseCallSites > 50) {
3105
+ return "High parse() volume detected. Zod v4 JIT will significantly benefit repeated parsing (up to 8x faster). Migration recommended for performance.";
3106
+ }
3107
+ return "Moderate usage detected. Zod v4 trades slower startup for faster runtime parsing.";
3108
+ }
3109
+ if (migration === "zod->valibot") {
3110
+ return "Valibot offers similar runtime performance with significantly smaller bundle size. Best suited for bundle-size-sensitive applications.";
3111
+ }
3112
+ if (from === "yup" || from === "joi") {
3113
+ return `Migrating from ${from} to ${to} should have neutral or positive performance impact.`;
3114
+ }
3115
+ return "Performance impact depends on usage patterns. Review warnings for details.";
3116
+ }
3117
+ generateSummary(warnings, parseCallSites, dynamicSchemaCount) {
3118
+ const parts = [];
3119
+ parts.push(`${parseCallSites} parse/safeParse call sites`);
3120
+ if (dynamicSchemaCount > 0) {
3121
+ parts.push(`${dynamicSchemaCount} dynamic schema creation sites`);
3122
+ }
3123
+ parts.push(`${warnings.length} performance warning(s)`);
3124
+ return parts.join(", ");
3125
+ }
3126
+ };
3127
+
1630
3128
  // src/plugin-loader.ts
1631
3129
  var PluginLoader = class {
1632
3130
  async loadPlugins(pluginPaths) {
@@ -1672,8 +3170,8 @@ var PluginLoader = class {
1672
3170
  };
1673
3171
 
1674
3172
  // src/standard-schema.ts
1675
- import { existsSync as existsSync7, readFileSync as readFileSync7 } from "fs";
1676
- import { join as join7 } from "path";
3173
+ import { existsSync as existsSync9, readFileSync as readFileSync9 } from "fs";
3174
+ import { join as join9 } from "path";
1677
3175
  var STANDARD_SCHEMA_LIBRARIES = {
1678
3176
  zod: { minMajor: 3, minMinor: 23 },
1679
3177
  // Zod v3.23+ and v4+
@@ -1702,16 +3200,16 @@ function isVersionCompatible(version, minMajor, minMinor) {
1702
3200
  return false;
1703
3201
  }
1704
3202
  function detectStandardSchema(projectPath) {
1705
- const pkgPath = join7(projectPath, "package.json");
1706
- if (!existsSync7(pkgPath)) {
1707
- return { detected: false, compatibleLibraries: [], recommendation: "" };
3203
+ const pkgPath = join9(projectPath, "package.json");
3204
+ if (!existsSync9(pkgPath)) {
3205
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1708
3206
  }
1709
3207
  let allDeps = {};
1710
3208
  try {
1711
- const pkg = JSON.parse(readFileSync7(pkgPath, "utf-8"));
3209
+ const pkg = JSON.parse(readFileSync9(pkgPath, "utf-8"));
1712
3210
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1713
3211
  } catch {
1714
- return { detected: false, compatibleLibraries: [], recommendation: "" };
3212
+ return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
1715
3213
  }
1716
3214
  const hasExplicitStandardSchema = "@standard-schema/spec" in allDeps;
1717
3215
  const compatibleLibraries = [];
@@ -1730,9 +3228,155 @@ function detectStandardSchema(projectPath) {
1730
3228
  } else if (hasExplicitStandardSchema) {
1731
3229
  recommendation = "Standard Schema spec detected. Ensure your validation library supports Standard Schema for maximum interoperability.";
1732
3230
  }
1733
- return { detected, compatibleLibraries, recommendation };
3231
+ let adoptionPath;
3232
+ if (detected && !hasExplicitStandardSchema) {
3233
+ adoptionPath = "Install @standard-schema/spec for explicit Standard Schema support. This enables library-agnostic validation consumers to accept your schemas without depending on a specific library. Run: npm install @standard-schema/spec";
3234
+ } else if (!detected) {
3235
+ adoptionPath = "Consider migrating to a Standard Schema-compatible library (Zod v3.23+, Valibot v1+, ArkType v2+) to future-proof your validation layer and reduce library lock-in.";
3236
+ }
3237
+ const interopTools = detected ? [
3238
+ "tRPC v11+ (Standard Schema input validation)",
3239
+ "TanStack Form (schema-agnostic validation)",
3240
+ "TanStack Router (route parameter validation)",
3241
+ "Hono (request validation middleware)",
3242
+ "Conform (progressive form validation)",
3243
+ "Nuxt (runtime config validation)"
3244
+ ] : [];
3245
+ return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
1734
3246
  }
1735
3247
 
3248
+ // src/test-scaffolder.ts
3249
+ var TestScaffolder = class {
3250
+ scaffold(sourceFiles, from, to) {
3251
+ const tests = [];
3252
+ let totalSchemas = 0;
3253
+ for (const file of sourceFiles) {
3254
+ const schemas = this.extractSchemaNames(file, from);
3255
+ if (schemas.length === 0) continue;
3256
+ totalSchemas += schemas.length;
3257
+ const testCode = this.generateTestFile(file, schemas, from, to);
3258
+ const filePath = file.getFilePath().replace(/\.tsx?$/, ".migration-test.ts");
3259
+ tests.push({ filePath, testCode, schemaCount: schemas.length });
3260
+ }
3261
+ const summary = tests.length > 0 ? `Generated ${tests.length} test file(s) covering ${totalSchemas} schema(s) for ${from}->${to} migration.` : "No schemas found to generate tests for.";
3262
+ return { tests, totalSchemas, summary };
3263
+ }
3264
+ extractSchemaNames(file, library) {
3265
+ const names = [];
3266
+ const prefixes = this.getLibraryPrefixes(library);
3267
+ for (const varDecl of file.getVariableDeclarations()) {
3268
+ const initializer = varDecl.getInitializer();
3269
+ if (!initializer) continue;
3270
+ const text = initializer.getText();
3271
+ if (prefixes.some((p) => text.startsWith(p))) {
3272
+ names.push(varDecl.getName());
3273
+ }
3274
+ }
3275
+ return names;
3276
+ }
3277
+ getLibraryPrefixes(library) {
3278
+ switch (library) {
3279
+ case "zod":
3280
+ case "zod-v3":
3281
+ return ["z.", "zod."];
3282
+ case "yup":
3283
+ return ["yup.", "Yup."];
3284
+ case "joi":
3285
+ return ["Joi.", "joi."];
3286
+ case "io-ts":
3287
+ return ["t."];
3288
+ case "valibot":
3289
+ return ["v.", "valibot."];
3290
+ default:
3291
+ return ["z."];
3292
+ }
3293
+ }
3294
+ generateTestFile(file, schemaNames, from, to) {
3295
+ const relativePath = file.getFilePath();
3296
+ const schemaImports = schemaNames.join(", ");
3297
+ const parseMethod = this.getParseMethod(to);
3298
+ const errorClass = this.getErrorClass(to);
3299
+ const testCases = schemaNames.map((name) => this.generateSchemaTests(name, to, parseMethod, errorClass)).join("\n\n");
3300
+ return `/**
3301
+ * Migration validation tests for ${from} -> ${to}
3302
+ * Auto-generated by SchemaShift
3303
+ *
3304
+ * These tests verify that schema behavior is preserved after migration.
3305
+ * Run before and after migration to ensure equivalence.
3306
+ *
3307
+ * Source: ${relativePath}
3308
+ */
3309
+ import { describe, expect, it } from 'vitest';
3310
+ import { ${schemaImports} } from '${relativePath.replace(/\.ts$/, ".js")}';
3311
+
3312
+ describe('Migration validation: ${relativePath}', () => {
3313
+ ${testCases}
3314
+ });
3315
+ `;
3316
+ }
3317
+ getParseMethod(to) {
3318
+ switch (to) {
3319
+ case "valibot":
3320
+ return "v.safeParse";
3321
+ default:
3322
+ return ".safeParse";
3323
+ }
3324
+ }
3325
+ getErrorClass(to) {
3326
+ switch (to) {
3327
+ case "valibot":
3328
+ return "ValiError";
3329
+ case "zod":
3330
+ case "v4":
3331
+ return "ZodError";
3332
+ default:
3333
+ return "Error";
3334
+ }
3335
+ }
3336
+ generateSchemaTests(schemaName, to, _parseMethod, _errorClass) {
3337
+ if (to === "valibot") {
3338
+ return ` describe('${schemaName}', () => {
3339
+ it('should accept valid data', () => {
3340
+ // TODO(schemashift): Add valid test data for ${schemaName}
3341
+ // const result = v.safeParse(${schemaName}, validData);
3342
+ // expect(result.success).toBe(true);
3343
+ });
3344
+
3345
+ it('should reject invalid data', () => {
3346
+ // TODO(schemashift): Add invalid test data for ${schemaName}
3347
+ // const result = v.safeParse(${schemaName}, invalidData);
3348
+ // expect(result.success).toBe(false);
3349
+ });
3350
+
3351
+ it('should preserve error messages', () => {
3352
+ // TODO(schemashift): Verify custom error messages are preserved
3353
+ // const result = v.safeParse(${schemaName}, invalidData);
3354
+ // expect(result.issues?.[0]?.message).toContain('expected message');
3355
+ });
3356
+ });`;
3357
+ }
3358
+ return ` describe('${schemaName}', () => {
3359
+ it('should accept valid data', () => {
3360
+ // TODO(schemashift): Add valid test data for ${schemaName}
3361
+ // const result = ${schemaName}.safeParse(validData);
3362
+ // expect(result.success).toBe(true);
3363
+ });
3364
+
3365
+ it('should reject invalid data', () => {
3366
+ // TODO(schemashift): Add invalid test data for ${schemaName}
3367
+ // const result = ${schemaName}.safeParse(invalidData);
3368
+ // expect(result.success).toBe(false);
3369
+ });
3370
+
3371
+ it('should preserve error messages', () => {
3372
+ // TODO(schemashift): Verify custom error messages are preserved
3373
+ // const result = ${schemaName}.safeParse(invalidData);
3374
+ // expect(result.error?.issues[0]?.message).toContain('expected message');
3375
+ });
3376
+ });`;
3377
+ }
3378
+ };
3379
+
1736
3380
  // src/transform.ts
1737
3381
  var TransformEngine = class {
1738
3382
  handlers = /* @__PURE__ */ new Map();
@@ -1747,9 +3391,10 @@ var TransformEngine = class {
1747
3391
  }
1748
3392
  getSupportedPaths() {
1749
3393
  return Array.from(this.handlers.keys()).map((key) => {
1750
- const [from, to] = key.split("->");
1751
- return { from, to };
1752
- });
3394
+ const parts = key.split("->");
3395
+ if (parts.length !== 2) return null;
3396
+ return { from: parts[0], to: parts[1] };
3397
+ }).filter((entry) => entry !== null);
1753
3398
  }
1754
3399
  transform(sourceFile, from, to, options) {
1755
3400
  const handler = this.getHandler(from, to);
@@ -1765,25 +3410,164 @@ var TransformEngine = class {
1765
3410
  return handler.transform(sourceFile, options);
1766
3411
  }
1767
3412
  };
3413
+
3414
+ // src/type-dedup-detector.ts
3415
+ import { Node } from "ts-morph";
3416
+ var TypeDedupDetector = class {
3417
+ detect(sourceFiles) {
3418
+ const typeDefinitions = this.collectTypeDefinitions(sourceFiles);
3419
+ const schemaDefinitions = this.collectSchemaDefinitions(sourceFiles);
3420
+ const candidates = this.findMatches(typeDefinitions, schemaDefinitions);
3421
+ const summary = candidates.length > 0 ? `Found ${candidates.length} type definition(s) that may duplicate schema shapes. After migration, replace with z.infer<typeof schema>.` : "No duplicate type definitions detected.";
3422
+ return { candidates, summary };
3423
+ }
3424
+ collectTypeDefinitions(sourceFiles) {
3425
+ const types = [];
3426
+ for (const file of sourceFiles) {
3427
+ const filePath = file.getFilePath();
3428
+ for (const iface of file.getInterfaces()) {
3429
+ const fields = iface.getProperties().map((p) => p.getName());
3430
+ if (fields.length > 0) {
3431
+ types.push({
3432
+ name: iface.getName(),
3433
+ fields,
3434
+ filePath,
3435
+ lineNumber: iface.getStartLineNumber()
3436
+ });
3437
+ }
3438
+ }
3439
+ for (const typeAlias of file.getTypeAliases()) {
3440
+ const typeNode = typeAlias.getTypeNode();
3441
+ if (!typeNode) continue;
3442
+ if (Node.isTypeLiteral(typeNode)) {
3443
+ const fields = typeNode.getProperties().map((p) => p.getName());
3444
+ if (fields.length > 0) {
3445
+ types.push({
3446
+ name: typeAlias.getName(),
3447
+ fields,
3448
+ filePath,
3449
+ lineNumber: typeAlias.getStartLineNumber()
3450
+ });
3451
+ }
3452
+ }
3453
+ }
3454
+ }
3455
+ return types;
3456
+ }
3457
+ collectSchemaDefinitions(sourceFiles) {
3458
+ const schemas = [];
3459
+ for (const file of sourceFiles) {
3460
+ const filePath = file.getFilePath();
3461
+ for (const varDecl of file.getVariableDeclarations()) {
3462
+ const initializer = varDecl.getInitializer();
3463
+ if (!initializer) continue;
3464
+ const text = initializer.getText();
3465
+ const isSchema = /(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\.object\s*\(/.test(text) || /Joi\.object\s*\(/.test(text);
3466
+ if (!isSchema) continue;
3467
+ const fields = this.extractSchemaFields(text);
3468
+ if (fields.length > 0) {
3469
+ schemas.push({
3470
+ name: varDecl.getName(),
3471
+ fields,
3472
+ filePath,
3473
+ lineNumber: varDecl.getStartLineNumber()
3474
+ });
3475
+ }
3476
+ }
3477
+ }
3478
+ return schemas;
3479
+ }
3480
+ extractSchemaFields(text) {
3481
+ const fields = [];
3482
+ const fieldPattern = /\b(\w+)\s*:\s*(?:z|zod|yup|Yup|Joi|joi|t|v|valibot)\./g;
3483
+ for (const match of text.matchAll(fieldPattern)) {
3484
+ if (match[1]) {
3485
+ fields.push(match[1]);
3486
+ }
3487
+ }
3488
+ return fields;
3489
+ }
3490
+ findMatches(types, schemas) {
3491
+ const candidates = [];
3492
+ for (const typeDef of types) {
3493
+ for (const schemaDef of schemas) {
3494
+ const matchedFields = this.getMatchedFields(typeDef.fields, schemaDef.fields);
3495
+ if (matchedFields.length < 2) continue;
3496
+ const typeFieldCount = typeDef.fields.length;
3497
+ const schemaFieldCount = schemaDef.fields.length;
3498
+ const matchRatio = matchedFields.length / Math.max(typeFieldCount, schemaFieldCount);
3499
+ let confidence;
3500
+ if (matchRatio >= 0.8) {
3501
+ confidence = "high";
3502
+ } else if (matchRatio >= 0.5) {
3503
+ confidence = "medium";
3504
+ } else {
3505
+ confidence = "low";
3506
+ }
3507
+ if (confidence === "low" && !this.namesRelated(typeDef.name, schemaDef.name)) {
3508
+ continue;
3509
+ }
3510
+ candidates.push({
3511
+ typeName: typeDef.name,
3512
+ typeFilePath: typeDef.filePath,
3513
+ typeLineNumber: typeDef.lineNumber,
3514
+ schemaName: schemaDef.name,
3515
+ schemaFilePath: schemaDef.filePath,
3516
+ schemaLineNumber: schemaDef.lineNumber,
3517
+ matchedFields,
3518
+ confidence,
3519
+ suggestion: `Replace "type/interface ${typeDef.name}" with "type ${typeDef.name} = z.infer<typeof ${schemaDef.name}>" (${matchedFields.length}/${typeFieldCount} fields match).`
3520
+ });
3521
+ }
3522
+ }
3523
+ candidates.sort((a, b) => {
3524
+ const confidenceOrder = { high: 0, medium: 1, low: 2 };
3525
+ const diff = confidenceOrder[a.confidence] - confidenceOrder[b.confidence];
3526
+ if (diff !== 0) return diff;
3527
+ return b.matchedFields.length - a.matchedFields.length;
3528
+ });
3529
+ return candidates;
3530
+ }
3531
+ getMatchedFields(typeFields, schemaFields) {
3532
+ const schemaSet = new Set(schemaFields);
3533
+ return typeFields.filter((f) => schemaSet.has(f));
3534
+ }
3535
+ namesRelated(typeName, schemaName) {
3536
+ const normalize = (name) => name.toLowerCase().replace(/schema|type|interface|i$/gi, "");
3537
+ return normalize(typeName) === normalize(schemaName);
3538
+ }
3539
+ };
1768
3540
  export {
3541
+ BehavioralWarningAnalyzer,
3542
+ BundleEstimator,
1769
3543
  CompatibilityAnalyzer,
1770
3544
  ComplexityEstimator,
1771
3545
  DetailedAnalyzer,
3546
+ DriftDetector,
1772
3547
  EcosystemAnalyzer,
1773
3548
  FormResolverMigrator,
3549
+ GOVERNANCE_TEMPLATES,
1774
3550
  GovernanceEngine,
1775
3551
  IncrementalTracker,
3552
+ MigrationAuditLog,
1776
3553
  MigrationChain,
1777
3554
  MonorepoResolver,
1778
3555
  PackageUpdater,
3556
+ PerformanceAnalyzer,
1779
3557
  PluginLoader,
1780
3558
  SchemaAnalyzer,
1781
3559
  SchemaDependencyResolver,
3560
+ TestScaffolder,
1782
3561
  TransformEngine,
3562
+ TypeDedupDetector,
1783
3563
  buildCallChain,
3564
+ computeParallelBatches,
1784
3565
  detectFormLibraries,
1785
3566
  detectSchemaLibrary,
1786
3567
  detectStandardSchema,
3568
+ getGovernanceTemplate,
3569
+ getGovernanceTemplateNames,
3570
+ getGovernanceTemplatesByCategory,
1787
3571
  isInsideComment,
1788
3572
  isInsideStringLiteral,
1789
3573
  loadConfig,