@schemashift/core 0.10.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
+ var __create = Object.create;
2
3
  var __defProp = Object.defineProperty;
3
4
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
5
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
5
7
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
8
  var __export = (target, all) => {
7
9
  for (var name in all)
@@ -15,11 +17,20 @@ var __copyProps = (to, from, except, desc) => {
15
17
  }
16
18
  return to;
17
19
  };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
18
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
29
 
20
30
  // src/index.ts
21
31
  var index_exports = {};
22
32
  __export(index_exports, {
33
+ ApprovalManager: () => ApprovalManager,
23
34
  BehavioralWarningAnalyzer: () => BehavioralWarningAnalyzer,
24
35
  BundleEstimator: () => BundleEstimator,
25
36
  CompatibilityAnalyzer: () => CompatibilityAnalyzer,
@@ -30,6 +41,8 @@ __export(index_exports, {
30
41
  FormResolverMigrator: () => FormResolverMigrator,
31
42
  GOVERNANCE_TEMPLATES: () => GOVERNANCE_TEMPLATES,
32
43
  GovernanceEngine: () => GovernanceEngine,
44
+ GovernanceFixer: () => GovernanceFixer,
45
+ GraphExporter: () => GraphExporter,
33
46
  IncrementalTracker: () => IncrementalTracker,
34
47
  MigrationAuditLog: () => MigrationAuditLog,
35
48
  MigrationChain: () => MigrationChain,
@@ -39,25 +52,38 @@ __export(index_exports, {
39
52
  PluginLoader: () => PluginLoader,
40
53
  SchemaAnalyzer: () => SchemaAnalyzer,
41
54
  SchemaDependencyResolver: () => SchemaDependencyResolver,
55
+ StandardSchemaAdvisor: () => StandardSchemaAdvisor,
42
56
  TestScaffolder: () => TestScaffolder,
43
57
  TransformEngine: () => TransformEngine,
44
58
  TypeDedupDetector: () => TypeDedupDetector,
59
+ WebhookNotifier: () => WebhookNotifier,
45
60
  buildCallChain: () => buildCallChain,
46
61
  computeParallelBatches: () => computeParallelBatches,
62
+ conditionalValidation: () => conditionalValidation,
63
+ dependentFields: () => dependentFields,
47
64
  detectFormLibraries: () => detectFormLibraries,
48
65
  detectSchemaLibrary: () => detectSchemaLibrary,
49
66
  detectStandardSchema: () => detectStandardSchema,
67
+ getAllMigrationTemplates: () => getAllMigrationTemplates,
50
68
  getGovernanceTemplate: () => getGovernanceTemplate,
51
69
  getGovernanceTemplateNames: () => getGovernanceTemplateNames,
52
70
  getGovernanceTemplatesByCategory: () => getGovernanceTemplatesByCategory,
71
+ getMigrationTemplate: () => getMigrationTemplate,
72
+ getMigrationTemplateNames: () => getMigrationTemplateNames,
73
+ getMigrationTemplatesByCategory: () => getMigrationTemplatesByCategory,
53
74
  isInsideComment: () => isInsideComment,
54
75
  isInsideStringLiteral: () => isInsideStringLiteral,
55
76
  loadConfig: () => loadConfig,
77
+ mutuallyExclusive: () => mutuallyExclusive,
56
78
  parseCallChain: () => parseCallChain,
79
+ requireIf: () => requireIf,
80
+ requireOneOf: () => requireOneOf,
57
81
  shouldSuppressWarning: () => shouldSuppressWarning,
58
82
  startsWithBase: () => startsWithBase,
83
+ suggestCrossFieldPattern: () => suggestCrossFieldPattern,
59
84
  transformMethodChain: () => transformMethodChain,
60
- validateConfig: () => validateConfig
85
+ validateConfig: () => validateConfig,
86
+ validateMigrationTemplate: () => validateMigrationTemplate
61
87
  });
62
88
  module.exports = __toCommonJS(index_exports);
63
89
 
@@ -196,6 +222,110 @@ var SchemaAnalyzer = class {
196
222
  }
197
223
  };
198
224
 
225
+ // src/approval.ts
226
+ var import_node_fs = require("fs");
227
+ var import_node_path = require("path");
228
+ var ApprovalManager = class {
229
+ pendingDir;
230
+ constructor(projectPath) {
231
+ this.pendingDir = (0, import_node_path.join)(projectPath, ".schemashift", "pending");
232
+ }
233
+ /**
234
+ * Create a new migration request for review.
235
+ */
236
+ createRequest(from, to, files, requestedBy, metadata) {
237
+ const id = `mig-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
238
+ const request = {
239
+ id,
240
+ from,
241
+ to,
242
+ files,
243
+ requestedBy,
244
+ requestedAt: (/* @__PURE__ */ new Date()).toISOString(),
245
+ status: "pending",
246
+ metadata
247
+ };
248
+ this.ensureDir();
249
+ const filePath = (0, import_node_path.join)(this.pendingDir, `${id}.json`);
250
+ (0, import_node_fs.writeFileSync)(filePath, JSON.stringify(request, null, 2), "utf-8");
251
+ return request;
252
+ }
253
+ /**
254
+ * Review (approve or reject) a pending migration request.
255
+ */
256
+ review(decision) {
257
+ const request = this.getRequest(decision.requestId);
258
+ if (!request) {
259
+ throw new Error(`Migration request ${decision.requestId} not found`);
260
+ }
261
+ if (request.status !== "pending") {
262
+ throw new Error(`Migration request ${decision.requestId} is already ${request.status}`);
263
+ }
264
+ request.status = decision.status;
265
+ request.reviewedBy = decision.reviewedBy;
266
+ request.reviewedAt = (/* @__PURE__ */ new Date()).toISOString();
267
+ request.reason = decision.reason;
268
+ const filePath = (0, import_node_path.join)(this.pendingDir, `${decision.requestId}.json`);
269
+ (0, import_node_fs.writeFileSync)(filePath, JSON.stringify(request, null, 2), "utf-8");
270
+ return request;
271
+ }
272
+ /**
273
+ * Get a specific migration request by ID.
274
+ */
275
+ getRequest(id) {
276
+ const filePath = (0, import_node_path.join)(this.pendingDir, `${id}.json`);
277
+ if (!(0, import_node_fs.existsSync)(filePath)) {
278
+ return null;
279
+ }
280
+ const content = (0, import_node_fs.readFileSync)(filePath, "utf-8");
281
+ return JSON.parse(content);
282
+ }
283
+ /**
284
+ * List all migration requests, optionally filtered by status.
285
+ */
286
+ listRequests(status) {
287
+ if (!(0, import_node_fs.existsSync)(this.pendingDir)) {
288
+ return [];
289
+ }
290
+ const files = (0, import_node_fs.readdirSync)(this.pendingDir).filter((f) => f.endsWith(".json"));
291
+ const requests = [];
292
+ for (const file of files) {
293
+ const content = (0, import_node_fs.readFileSync)((0, import_node_path.join)(this.pendingDir, file), "utf-8");
294
+ const request = JSON.parse(content);
295
+ if (!status || request.status === status) {
296
+ requests.push(request);
297
+ }
298
+ }
299
+ return requests.sort(
300
+ (a, b) => new Date(b.requestedAt).getTime() - new Date(a.requestedAt).getTime()
301
+ );
302
+ }
303
+ /**
304
+ * Get summary counts of all requests.
305
+ */
306
+ getSummary() {
307
+ const all = this.listRequests();
308
+ return {
309
+ pending: all.filter((r) => r.status === "pending").length,
310
+ approved: all.filter((r) => r.status === "approved").length,
311
+ rejected: all.filter((r) => r.status === "rejected").length,
312
+ total: all.length
313
+ };
314
+ }
315
+ /**
316
+ * Check if a migration has been approved.
317
+ */
318
+ isApproved(requestId) {
319
+ const request = this.getRequest(requestId);
320
+ return request?.status === "approved";
321
+ }
322
+ ensureDir() {
323
+ if (!(0, import_node_fs.existsSync)(this.pendingDir)) {
324
+ (0, import_node_fs.mkdirSync)(this.pendingDir, { recursive: true });
325
+ }
326
+ }
327
+ };
328
+
199
329
  // src/ast-utils.ts
200
330
  var import_ts_morph2 = require("ts-morph");
201
331
  function parseCallChain(node) {
@@ -332,8 +462,8 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
332
462
 
333
463
  // src/audit-log.ts
334
464
  var import_node_crypto = require("crypto");
335
- var import_node_fs = require("fs");
336
- var import_node_path = require("path");
465
+ var import_node_fs2 = require("fs");
466
+ var import_node_path2 = require("path");
337
467
  var AUDIT_DIR = ".schemashift";
338
468
  var AUDIT_FILE = "audit-log.json";
339
469
  var AUDIT_VERSION = 1;
@@ -341,8 +471,8 @@ var MigrationAuditLog = class {
341
471
  logDir;
342
472
  logPath;
343
473
  constructor(projectPath) {
344
- this.logDir = (0, import_node_path.join)(projectPath, AUDIT_DIR);
345
- this.logPath = (0, import_node_path.join)(this.logDir, AUDIT_FILE);
474
+ this.logDir = (0, import_node_path2.join)(projectPath, AUDIT_DIR);
475
+ this.logPath = (0, import_node_path2.join)(this.logDir, AUDIT_FILE);
346
476
  }
347
477
  /**
348
478
  * Append a new entry to the audit log.
@@ -378,11 +508,11 @@ var MigrationAuditLog = class {
378
508
  * Read the current audit log.
379
509
  */
380
510
  read() {
381
- if (!(0, import_node_fs.existsSync)(this.logPath)) {
511
+ if (!(0, import_node_fs2.existsSync)(this.logPath)) {
382
512
  return { version: AUDIT_VERSION, entries: [] };
383
513
  }
384
514
  try {
385
- const content = (0, import_node_fs.readFileSync)(this.logPath, "utf-8");
515
+ const content = (0, import_node_fs2.readFileSync)(this.logPath, "utf-8");
386
516
  if (!content.trim()) {
387
517
  return { version: AUDIT_VERSION, entries: [] };
388
518
  }
@@ -475,10 +605,10 @@ var MigrationAuditLog = class {
475
605
  };
476
606
  }
477
607
  write(log) {
478
- if (!(0, import_node_fs.existsSync)(this.logDir)) {
479
- (0, import_node_fs.mkdirSync)(this.logDir, { recursive: true });
608
+ if (!(0, import_node_fs2.existsSync)(this.logDir)) {
609
+ (0, import_node_fs2.mkdirSync)(this.logDir, { recursive: true });
480
610
  }
481
- (0, import_node_fs.writeFileSync)(this.logPath, JSON.stringify(log, null, 2));
611
+ (0, import_node_fs2.writeFileSync)(this.logPath, JSON.stringify(log, null, 2));
482
612
  }
483
613
  hashContent(content) {
484
614
  return (0, import_node_crypto.createHash)("sha256").update(content).digest("hex").substring(0, 16);
@@ -911,12 +1041,12 @@ var MigrationChain = class {
911
1041
  };
912
1042
 
913
1043
  // src/compatibility.ts
914
- var import_node_fs3 = require("fs");
915
- var import_node_path3 = require("path");
1044
+ var import_node_fs4 = require("fs");
1045
+ var import_node_path4 = require("path");
916
1046
 
917
1047
  // src/ecosystem.ts
918
- var import_node_fs2 = require("fs");
919
- var import_node_path2 = require("path");
1048
+ var import_node_fs3 = require("fs");
1049
+ var import_node_path3 = require("path");
920
1050
  var ECOSYSTEM_RULES = [
921
1051
  // ORM integrations
922
1052
  {
@@ -1171,6 +1301,72 @@ var ECOSYSTEM_RULES = [
1171
1301
  upgradeCommand: "npm install nuqs@latest"
1172
1302
  })
1173
1303
  },
1304
+ // Server action / routing integrations
1305
+ {
1306
+ package: "next-safe-action",
1307
+ category: "api",
1308
+ migrations: ["zod-v3->v4"],
1309
+ check: () => ({
1310
+ issue: "next-safe-action uses Zod for input validation. Zod v4 type changes may break action definitions.",
1311
+ suggestion: "Upgrade next-safe-action to the latest version with Zod v4 support.",
1312
+ severity: "warning",
1313
+ upgradeCommand: "npm install next-safe-action@latest"
1314
+ })
1315
+ },
1316
+ {
1317
+ package: "@tanstack/router",
1318
+ category: "api",
1319
+ migrations: ["zod-v3->v4"],
1320
+ check: () => ({
1321
+ issue: "@tanstack/router uses Zod for route parameter validation. Zod v4 changes may affect type inference.",
1322
+ suggestion: "Upgrade @tanstack/router to a version with Zod v4 support.",
1323
+ severity: "warning",
1324
+ upgradeCommand: "npm install @tanstack/router@latest"
1325
+ })
1326
+ },
1327
+ {
1328
+ package: "@tanstack/react-query",
1329
+ category: "api",
1330
+ migrations: ["zod-v3->v4"],
1331
+ check: () => ({
1332
+ issue: "@tanstack/react-query may use Zod for query key/param validation via integrations.",
1333
+ suggestion: "Verify any Zod-based query validation still works after the Zod v4 upgrade.",
1334
+ severity: "info"
1335
+ })
1336
+ },
1337
+ {
1338
+ package: "fastify-type-provider-zod",
1339
+ category: "api",
1340
+ migrations: ["zod-v3->v4"],
1341
+ check: () => ({
1342
+ issue: "fastify-type-provider-zod needs a Zod v4-compatible version.",
1343
+ suggestion: "Upgrade fastify-type-provider-zod to a version supporting Zod v4.",
1344
+ severity: "warning",
1345
+ upgradeCommand: "npm install fastify-type-provider-zod@latest"
1346
+ })
1347
+ },
1348
+ {
1349
+ package: "zod-i18n-map",
1350
+ category: "validation-util",
1351
+ migrations: ["zod-v3->v4"],
1352
+ check: () => ({
1353
+ issue: 'zod-i18n-map uses Zod v3 error map format. Error messages changed in v4 (e.g., "Required" is now descriptive).',
1354
+ suggestion: "Check for a Zod v4-compatible version of zod-i18n-map or update custom error maps.",
1355
+ severity: "warning",
1356
+ upgradeCommand: "npm install zod-i18n-map@latest"
1357
+ })
1358
+ },
1359
+ {
1360
+ package: "openapi-zod-client",
1361
+ category: "openapi",
1362
+ migrations: ["zod-v3->v4"],
1363
+ check: () => ({
1364
+ issue: "openapi-zod-client generates Zod v3 schemas from OpenAPI specs. Generated code may need regeneration.",
1365
+ suggestion: "Upgrade openapi-zod-client and regenerate schemas for Zod v4 compatibility.",
1366
+ severity: "warning",
1367
+ upgradeCommand: "npm install openapi-zod-client@latest"
1368
+ })
1369
+ },
1174
1370
  // Schema library detection for cross-library migrations
1175
1371
  {
1176
1372
  package: "@effect/schema",
@@ -1248,13 +1444,13 @@ var EcosystemAnalyzer = class {
1248
1444
  const dependencies = [];
1249
1445
  const warnings = [];
1250
1446
  const blockers = [];
1251
- const pkgPath = (0, import_node_path2.join)(projectPath, "package.json");
1252
- if (!(0, import_node_fs2.existsSync)(pkgPath)) {
1447
+ const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
1448
+ if (!(0, import_node_fs3.existsSync)(pkgPath)) {
1253
1449
  return { dependencies, warnings, blockers };
1254
1450
  }
1255
1451
  let allDeps = {};
1256
1452
  try {
1257
- const pkg = JSON.parse((0, import_node_fs2.readFileSync)(pkgPath, "utf-8"));
1453
+ const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
1258
1454
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1259
1455
  } catch {
1260
1456
  return { dependencies, warnings, blockers };
@@ -1375,10 +1571,10 @@ var CompatibilityAnalyzer = class {
1375
1571
  ecosystemAnalyzer = new EcosystemAnalyzer();
1376
1572
  detectVersions(projectPath) {
1377
1573
  const versions = [];
1378
- const pkgPath = (0, import_node_path3.join)(projectPath, "package.json");
1379
- if (!(0, import_node_fs3.existsSync)(pkgPath)) return versions;
1574
+ const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1575
+ if (!(0, import_node_fs4.existsSync)(pkgPath)) return versions;
1380
1576
  try {
1381
- const pkg = JSON.parse((0, import_node_fs3.readFileSync)(pkgPath, "utf-8"));
1577
+ const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
1382
1578
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1383
1579
  const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1384
1580
  for (const lib of knownLibs) {
@@ -1599,9 +1795,115 @@ async function loadConfig(configPath) {
1599
1795
  };
1600
1796
  }
1601
1797
 
1798
+ // src/cross-field-patterns.ts
1799
+ function requireIf(conditionField, requiredField) {
1800
+ return {
1801
+ name: `requireIf(${conditionField}, ${requiredField})`,
1802
+ description: `${requiredField} is required when ${conditionField} is truthy`,
1803
+ zodCode: [
1804
+ ".superRefine((data, ctx) => {",
1805
+ ` if (data.${conditionField} && !data.${requiredField}) {`,
1806
+ " ctx.addIssue({",
1807
+ " code: z.ZodIssueCode.custom,",
1808
+ ` message: '${requiredField} is required when ${conditionField} is set',`,
1809
+ ` path: ['${requiredField}'],`,
1810
+ " });",
1811
+ " }",
1812
+ "})"
1813
+ ].join("\n")
1814
+ };
1815
+ }
1816
+ function requireOneOf(fields) {
1817
+ const fieldList = fields.map((f) => `'${f}'`).join(", ");
1818
+ const conditions = fields.map((f) => `data.${f}`).join(" || ");
1819
+ return {
1820
+ name: `requireOneOf(${fields.join(", ")})`,
1821
+ description: `At least one of [${fields.join(", ")}] must be provided`,
1822
+ zodCode: [
1823
+ ".superRefine((data, ctx) => {",
1824
+ ` if (!(${conditions})) {`,
1825
+ " ctx.addIssue({",
1826
+ " code: z.ZodIssueCode.custom,",
1827
+ ` message: 'At least one of [${fields.join(", ")}] is required',`,
1828
+ ` path: [${fieldList}],`,
1829
+ " });",
1830
+ " }",
1831
+ "})"
1832
+ ].join("\n")
1833
+ };
1834
+ }
1835
+ function mutuallyExclusive(fields) {
1836
+ const checks = fields.map((f) => `(data.${f} ? 1 : 0)`).join(" + ");
1837
+ return {
1838
+ name: `mutuallyExclusive(${fields.join(", ")})`,
1839
+ description: `Only one of [${fields.join(", ")}] can be set at a time`,
1840
+ zodCode: [
1841
+ ".superRefine((data, ctx) => {",
1842
+ ` const count = ${checks};`,
1843
+ " if (count > 1) {",
1844
+ " ctx.addIssue({",
1845
+ " code: z.ZodIssueCode.custom,",
1846
+ ` message: 'Only one of [${fields.join(", ")}] can be set at a time',`,
1847
+ " });",
1848
+ " }",
1849
+ "})"
1850
+ ].join("\n")
1851
+ };
1852
+ }
1853
+ function dependentFields(primaryField, dependents) {
1854
+ const checks = dependents.map(
1855
+ (f) => ` if (!data.${f}) {
1856
+ ctx.addIssue({ code: z.ZodIssueCode.custom, message: '${f} is required when ${primaryField} is set', path: ['${f}'] });
1857
+ }`
1858
+ ).join("\n");
1859
+ return {
1860
+ name: `dependentFields(${primaryField} -> ${dependents.join(", ")})`,
1861
+ description: `When ${primaryField} is set, [${dependents.join(", ")}] are required`,
1862
+ zodCode: [
1863
+ ".superRefine((data, ctx) => {",
1864
+ ` if (data.${primaryField}) {`,
1865
+ checks,
1866
+ " }",
1867
+ "})"
1868
+ ].join("\n")
1869
+ };
1870
+ }
1871
+ function conditionalValidation(conditionField, conditionValue, targetField, validationMessage) {
1872
+ return {
1873
+ name: `conditionalValidation(${conditionField}=${conditionValue} -> ${targetField})`,
1874
+ description: `Validate ${targetField} when ${conditionField} equals ${conditionValue}`,
1875
+ zodCode: [
1876
+ ".superRefine((data, ctx) => {",
1877
+ ` if (data.${conditionField} === ${conditionValue} && !data.${targetField}) {`,
1878
+ " ctx.addIssue({",
1879
+ " code: z.ZodIssueCode.custom,",
1880
+ ` message: '${validationMessage}',`,
1881
+ ` path: ['${targetField}'],`,
1882
+ " });",
1883
+ " }",
1884
+ "})"
1885
+ ].join("\n")
1886
+ };
1887
+ }
1888
+ function suggestCrossFieldPattern(whenCode) {
1889
+ const booleanMatch = whenCode.match(/\.when\(['"](\w+)['"]\s*,\s*\{[^}]*is:\s*true/);
1890
+ if (booleanMatch?.[1]) {
1891
+ const field = booleanMatch[1];
1892
+ return requireIf(field, "targetField");
1893
+ }
1894
+ const multiFieldMatch = whenCode.match(/\.when\(\[([^\]]+)\]/);
1895
+ if (multiFieldMatch?.[1]) {
1896
+ const fields = multiFieldMatch[1].split(",").map((f) => f.trim().replace(/['"]/g, "")).filter(Boolean);
1897
+ if (fields.length > 1) {
1898
+ return dependentFields(fields[0] ?? "primary", fields.slice(1));
1899
+ }
1900
+ }
1901
+ return null;
1902
+ }
1903
+
1602
1904
  // src/dependency-graph.ts
1603
- var import_node_fs4 = require("fs");
1604
- var import_node_path4 = require("path");
1905
+ var import_node_fs5 = require("fs");
1906
+ var import_node_path5 = require("path");
1605
1907
  var SchemaDependencyResolver = class {
1606
1908
  resolve(project, filePaths) {
1607
1909
  const fileSet = new Set(filePaths);
@@ -1728,38 +2030,38 @@ function computeParallelBatches(packages, suggestedOrder) {
1728
2030
  }
1729
2031
  var MonorepoResolver = class {
1730
2032
  detect(projectPath) {
1731
- const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1732
- if ((0, import_node_fs4.existsSync)(pkgPath)) {
2033
+ const pkgPath = (0, import_node_path5.join)(projectPath, "package.json");
2034
+ if ((0, import_node_fs5.existsSync)(pkgPath)) {
1733
2035
  try {
1734
- const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
2036
+ const pkg = JSON.parse((0, import_node_fs5.readFileSync)(pkgPath, "utf-8"));
1735
2037
  if (pkg.workspaces) return true;
1736
2038
  } catch {
1737
2039
  }
1738
2040
  }
1739
- if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml"))) return true;
2041
+ if ((0, import_node_fs5.existsSync)((0, import_node_path5.join)(projectPath, "pnpm-workspace.yaml"))) return true;
1740
2042
  return false;
1741
2043
  }
1742
2044
  /**
1743
2045
  * Detect which workspace manager is being used.
1744
2046
  */
1745
2047
  detectManager(projectPath) {
1746
- if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
1747
- const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1748
- if ((0, import_node_fs4.existsSync)(pkgPath)) {
2048
+ if ((0, import_node_fs5.existsSync)((0, import_node_path5.join)(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
2049
+ const pkgPath = (0, import_node_path5.join)(projectPath, "package.json");
2050
+ if ((0, import_node_fs5.existsSync)(pkgPath)) {
1749
2051
  try {
1750
- const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
2052
+ const pkg = JSON.parse((0, import_node_fs5.readFileSync)(pkgPath, "utf-8"));
1751
2053
  if (pkg.packageManager?.startsWith("yarn")) return "yarn";
1752
2054
  if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
1753
2055
  } catch {
1754
2056
  }
1755
2057
  }
1756
- if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "pnpm-lock.yaml"))) return "pnpm";
1757
- if ((0, import_node_fs4.existsSync)((0, import_node_path4.join)(projectPath, "yarn.lock"))) return "yarn";
2058
+ if ((0, import_node_fs5.existsSync)((0, import_node_path5.join)(projectPath, "pnpm-lock.yaml"))) return "pnpm";
2059
+ if ((0, import_node_fs5.existsSync)((0, import_node_path5.join)(projectPath, "yarn.lock"))) return "yarn";
1758
2060
  return "npm";
1759
2061
  }
1760
2062
  analyze(projectPath) {
1761
- const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1762
- if (!(0, import_node_fs4.existsSync)(pkgPath)) {
2063
+ const pkgPath = (0, import_node_path5.join)(projectPath, "package.json");
2064
+ if (!(0, import_node_fs5.existsSync)(pkgPath)) {
1763
2065
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
1764
2066
  }
1765
2067
  let workspaceGlobs;
@@ -1774,10 +2076,10 @@ var MonorepoResolver = class {
1774
2076
  const packages = [];
1775
2077
  const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
1776
2078
  for (const dir of resolvedDirs) {
1777
- const wsPkgPath = (0, import_node_path4.join)(dir, "package.json");
1778
- if (!(0, import_node_fs4.existsSync)(wsPkgPath)) continue;
2079
+ const wsPkgPath = (0, import_node_path5.join)(dir, "package.json");
2080
+ if (!(0, import_node_fs5.existsSync)(wsPkgPath)) continue;
1779
2081
  try {
1780
- const wsPkg = JSON.parse((0, import_node_fs4.readFileSync)(wsPkgPath, "utf-8"));
2082
+ const wsPkg = JSON.parse((0, import_node_fs5.readFileSync)(wsPkgPath, "utf-8"));
1781
2083
  if (!wsPkg.name) continue;
1782
2084
  const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
1783
2085
  const depNames = Object.keys(allDeps);
@@ -1821,14 +2123,14 @@ var MonorepoResolver = class {
1821
2123
  * Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
1822
2124
  */
1823
2125
  resolveWorkspaceGlobs(projectPath) {
1824
- const pnpmPath = (0, import_node_path4.join)(projectPath, "pnpm-workspace.yaml");
1825
- if ((0, import_node_fs4.existsSync)(pnpmPath)) {
2126
+ const pnpmPath = (0, import_node_path5.join)(projectPath, "pnpm-workspace.yaml");
2127
+ if ((0, import_node_fs5.existsSync)(pnpmPath)) {
1826
2128
  return this.parsePnpmWorkspace(pnpmPath);
1827
2129
  }
1828
- const pkgPath = (0, import_node_path4.join)(projectPath, "package.json");
1829
- if ((0, import_node_fs4.existsSync)(pkgPath)) {
2130
+ const pkgPath = (0, import_node_path5.join)(projectPath, "package.json");
2131
+ if ((0, import_node_fs5.existsSync)(pkgPath)) {
1830
2132
  try {
1831
- const pkg = JSON.parse((0, import_node_fs4.readFileSync)(pkgPath, "utf-8"));
2133
+ const pkg = JSON.parse((0, import_node_fs5.readFileSync)(pkgPath, "utf-8"));
1832
2134
  if (pkg.workspaces) {
1833
2135
  return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
1834
2136
  }
@@ -1847,7 +2149,7 @@ var MonorepoResolver = class {
1847
2149
  * ```
1848
2150
  */
1849
2151
  parsePnpmWorkspace(filePath) {
1850
- const content = (0, import_node_fs4.readFileSync)(filePath, "utf-8");
2152
+ const content = (0, import_node_fs5.readFileSync)(filePath, "utf-8");
1851
2153
  const globs = [];
1852
2154
  let inPackages = false;
1853
2155
  for (const line of content.split("\n")) {
@@ -1872,14 +2174,14 @@ var MonorepoResolver = class {
1872
2174
  const dirs = [];
1873
2175
  for (const glob of globs) {
1874
2176
  const clean = glob.replace(/\/?\*$/, "");
1875
- const base = (0, import_node_path4.resolve)(projectPath, clean);
1876
- if (!(0, import_node_fs4.existsSync)(base)) continue;
2177
+ const base = (0, import_node_path5.resolve)(projectPath, clean);
2178
+ if (!(0, import_node_fs5.existsSync)(base)) continue;
1877
2179
  if (glob.endsWith("*")) {
1878
2180
  try {
1879
- const entries = (0, import_node_fs4.readdirSync)(base, { withFileTypes: true });
2181
+ const entries = (0, import_node_fs5.readdirSync)(base, { withFileTypes: true });
1880
2182
  for (const entry of entries) {
1881
2183
  if (entry.isDirectory()) {
1882
- dirs.push((0, import_node_path4.join)(base, entry.name));
2184
+ dirs.push((0, import_node_path5.join)(base, entry.name));
1883
2185
  }
1884
2186
  }
1885
2187
  } catch {
@@ -1893,8 +2195,8 @@ var MonorepoResolver = class {
1893
2195
  };
1894
2196
 
1895
2197
  // src/detailed-analyzer.ts
1896
- var import_node_fs5 = require("fs");
1897
- var import_node_path5 = require("path");
2198
+ var import_node_fs6 = require("fs");
2199
+ var import_node_path6 = require("path");
1898
2200
  var COMPLEXITY_CHAIN_WEIGHT = 2;
1899
2201
  var COMPLEXITY_DEPTH_WEIGHT = 3;
1900
2202
  var COMPLEXITY_VALIDATION_WEIGHT = 1;
@@ -1959,10 +2261,10 @@ var DetailedAnalyzer = class {
1959
2261
  }
1960
2262
  detectLibraryVersions(projectPath) {
1961
2263
  const versions = [];
1962
- const pkgPath = (0, import_node_path5.join)(projectPath, "package.json");
1963
- if (!(0, import_node_fs5.existsSync)(pkgPath)) return versions;
2264
+ const pkgPath = (0, import_node_path6.join)(projectPath, "package.json");
2265
+ if (!(0, import_node_fs6.existsSync)(pkgPath)) return versions;
1964
2266
  try {
1965
- const pkg = JSON.parse((0, import_node_fs5.readFileSync)(pkgPath, "utf-8"));
2267
+ const pkg = JSON.parse((0, import_node_fs6.readFileSync)(pkgPath, "utf-8"));
1966
2268
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1967
2269
  const allDeps = {
1968
2270
  ...pkg.dependencies,
@@ -2137,8 +2439,8 @@ var DetailedAnalyzer = class {
2137
2439
 
2138
2440
  // src/drift-detector.ts
2139
2441
  var import_node_crypto2 = require("crypto");
2140
- var import_node_fs6 = require("fs");
2141
- var import_node_path6 = require("path");
2442
+ var import_node_fs7 = require("fs");
2443
+ var import_node_path7 = require("path");
2142
2444
  var SNAPSHOT_DIR = ".schemashift";
2143
2445
  var SNAPSHOT_FILE = "schema-snapshot.json";
2144
2446
  var SNAPSHOT_VERSION = 1;
@@ -2146,8 +2448,8 @@ var DriftDetector = class {
2146
2448
  snapshotDir;
2147
2449
  snapshotPath;
2148
2450
  constructor(projectPath) {
2149
- this.snapshotDir = (0, import_node_path6.join)(projectPath, SNAPSHOT_DIR);
2150
- this.snapshotPath = (0, import_node_path6.join)(this.snapshotDir, SNAPSHOT_FILE);
2451
+ this.snapshotDir = (0, import_node_path7.join)(projectPath, SNAPSHOT_DIR);
2452
+ this.snapshotPath = (0, import_node_path7.join)(this.snapshotDir, SNAPSHOT_FILE);
2151
2453
  }
2152
2454
  /**
2153
2455
  * Take a snapshot of the current schema state
@@ -2155,13 +2457,13 @@ var DriftDetector = class {
2155
2457
  snapshot(files, projectPath) {
2156
2458
  const schemas = [];
2157
2459
  for (const filePath of files) {
2158
- if (!(0, import_node_fs6.existsSync)(filePath)) continue;
2159
- const content = (0, import_node_fs6.readFileSync)(filePath, "utf-8");
2460
+ if (!(0, import_node_fs7.existsSync)(filePath)) continue;
2461
+ const content = (0, import_node_fs7.readFileSync)(filePath, "utf-8");
2160
2462
  const library = this.detectLibraryFromContent(content);
2161
2463
  if (library === "unknown") continue;
2162
2464
  const schemaNames = this.extractSchemaNames(content);
2163
2465
  schemas.push({
2164
- filePath: (0, import_node_path6.relative)(projectPath, filePath),
2466
+ filePath: (0, import_node_path7.relative)(projectPath, filePath),
2165
2467
  library,
2166
2468
  contentHash: this.hashContent(content),
2167
2469
  schemaCount: schemaNames.length,
@@ -2180,20 +2482,20 @@ var DriftDetector = class {
2180
2482
  * Save a snapshot to disk
2181
2483
  */
2182
2484
  saveSnapshot(snapshot) {
2183
- if (!(0, import_node_fs6.existsSync)(this.snapshotDir)) {
2184
- (0, import_node_fs6.mkdirSync)(this.snapshotDir, { recursive: true });
2485
+ if (!(0, import_node_fs7.existsSync)(this.snapshotDir)) {
2486
+ (0, import_node_fs7.mkdirSync)(this.snapshotDir, { recursive: true });
2185
2487
  }
2186
- (0, import_node_fs6.writeFileSync)(this.snapshotPath, JSON.stringify(snapshot, null, 2));
2488
+ (0, import_node_fs7.writeFileSync)(this.snapshotPath, JSON.stringify(snapshot, null, 2));
2187
2489
  }
2188
2490
  /**
2189
2491
  * Load saved snapshot from disk
2190
2492
  */
2191
2493
  loadSnapshot() {
2192
- if (!(0, import_node_fs6.existsSync)(this.snapshotPath)) {
2494
+ if (!(0, import_node_fs7.existsSync)(this.snapshotPath)) {
2193
2495
  return null;
2194
2496
  }
2195
2497
  try {
2196
- const content = (0, import_node_fs6.readFileSync)(this.snapshotPath, "utf-8");
2498
+ const content = (0, import_node_fs7.readFileSync)(this.snapshotPath, "utf-8");
2197
2499
  return JSON.parse(content);
2198
2500
  } catch {
2199
2501
  return null;
@@ -2655,6 +2957,250 @@ var GovernanceEngine = class {
2655
2957
  }
2656
2958
  };
2657
2959
 
2960
+ // src/governance-fixer.ts
2961
+ var GovernanceFixer = class {
2962
+ defaultMaxLength = 1e4;
2963
+ /**
2964
+ * Set the default max length appended by the require-max-length fix.
2965
+ */
2966
+ setDefaultMaxLength(length) {
2967
+ this.defaultMaxLength = length;
2968
+ }
2969
+ /**
2970
+ * Check if a violation is auto-fixable.
2971
+ */
2972
+ canFix(violation) {
2973
+ return [
2974
+ "no-any-schemas",
2975
+ "require-descriptions",
2976
+ "require-max-length",
2977
+ "naming-convention",
2978
+ "no-any",
2979
+ "require-description",
2980
+ "required-validations",
2981
+ "require-safeParse"
2982
+ ].includes(violation.rule);
2983
+ }
2984
+ /**
2985
+ * Fix a single violation in a source file.
2986
+ * Returns the fixed code for the entire file.
2987
+ */
2988
+ fix(violation, sourceCode) {
2989
+ switch (violation.rule) {
2990
+ case "no-any-schemas":
2991
+ case "no-any":
2992
+ return this.fixNoAny(violation, sourceCode);
2993
+ case "require-descriptions":
2994
+ case "require-description":
2995
+ return this.fixRequireDescription(violation, sourceCode);
2996
+ case "require-max-length":
2997
+ case "required-validations":
2998
+ return this.fixRequireMaxLength(violation, sourceCode);
2999
+ case "naming-convention":
3000
+ return this.fixNamingConvention(violation, sourceCode);
3001
+ case "require-safeParse":
3002
+ return this.fixRequireSafeParse(violation, sourceCode);
3003
+ default:
3004
+ return {
3005
+ success: false,
3006
+ explanation: `No auto-fix available for rule: ${violation.rule}`,
3007
+ rule: violation.rule,
3008
+ lineNumber: violation.lineNumber
3009
+ };
3010
+ }
3011
+ }
3012
+ /**
3013
+ * Fix all fixable violations in a source file.
3014
+ * Applies fixes from bottom to top to preserve line numbers.
3015
+ */
3016
+ fixAll(violations, sourceCode) {
3017
+ const fixable = violations.filter((v) => this.canFix(v));
3018
+ const results = [];
3019
+ let currentCode = sourceCode;
3020
+ let fixed = 0;
3021
+ const sorted = [...fixable].sort((a, b) => b.lineNumber - a.lineNumber);
3022
+ for (const violation of sorted) {
3023
+ const result = this.fix(violation, currentCode);
3024
+ results.push(result);
3025
+ if (result.success && result.fixedCode) {
3026
+ currentCode = result.fixedCode;
3027
+ fixed++;
3028
+ }
3029
+ }
3030
+ return {
3031
+ totalViolations: violations.length,
3032
+ fixed,
3033
+ skipped: violations.length - fixed,
3034
+ results
3035
+ };
3036
+ }
3037
+ fixNoAny(violation, sourceCode) {
3038
+ const lines = sourceCode.split("\n");
3039
+ const lineIndex = violation.lineNumber - 1;
3040
+ const line = lines[lineIndex];
3041
+ if (!line) {
3042
+ return {
3043
+ success: false,
3044
+ explanation: `Line ${violation.lineNumber} not found`,
3045
+ rule: violation.rule,
3046
+ lineNumber: violation.lineNumber
3047
+ };
3048
+ }
3049
+ let fixedLine = line;
3050
+ let explanation = "";
3051
+ if (/\bz\.any\(\)/.test(line)) {
3052
+ fixedLine = line.replace(/\bz\.any\(\)/, "z.unknown()");
3053
+ explanation = "Replaced z.any() with z.unknown() for type safety";
3054
+ } else if (/\byup\.mixed\(\)/.test(line)) {
3055
+ fixedLine = line.replace(/\byup\.mixed\(\)/, "yup.mixed().required()");
3056
+ explanation = "Added .required() constraint to yup.mixed()";
3057
+ } else if (/\bt\.any\b/.test(line)) {
3058
+ fixedLine = line.replace(/\bt\.any\b/, "t.unknown");
3059
+ explanation = "Replaced t.any with t.unknown for type safety";
3060
+ } else if (/\bv\.any\(\)/.test(line)) {
3061
+ fixedLine = line.replace(/\bv\.any\(\)/, "v.unknown()");
3062
+ explanation = "Replaced v.any() with v.unknown() for type safety";
3063
+ } else {
3064
+ return {
3065
+ success: false,
3066
+ explanation: "Could not identify any-type pattern to fix",
3067
+ rule: violation.rule,
3068
+ lineNumber: violation.lineNumber
3069
+ };
3070
+ }
3071
+ lines[lineIndex] = fixedLine;
3072
+ return {
3073
+ success: true,
3074
+ fixedCode: lines.join("\n"),
3075
+ explanation,
3076
+ rule: violation.rule,
3077
+ lineNumber: violation.lineNumber
3078
+ };
3079
+ }
3080
+ fixRequireDescription(violation, sourceCode) {
3081
+ const lines = sourceCode.split("\n");
3082
+ const lineIndex = violation.lineNumber - 1;
3083
+ const line = lines[lineIndex];
3084
+ if (!line) {
3085
+ return {
3086
+ success: false,
3087
+ explanation: `Line ${violation.lineNumber} not found`,
3088
+ rule: violation.rule,
3089
+ lineNumber: violation.lineNumber
3090
+ };
3091
+ }
3092
+ let endLineIndex = lineIndex;
3093
+ for (let i = lineIndex; i < lines.length && i < lineIndex + 20; i++) {
3094
+ if (lines[i]?.includes(";")) {
3095
+ endLineIndex = i;
3096
+ break;
3097
+ }
3098
+ }
3099
+ const endLine = lines[endLineIndex] ?? "";
3100
+ const schemaName = violation.schemaName || "schema";
3101
+ const description = `${schemaName} schema`;
3102
+ const semicolonIndex = endLine.lastIndexOf(";");
3103
+ if (semicolonIndex >= 0) {
3104
+ lines[endLineIndex] = `${endLine.slice(0, semicolonIndex)}.describe('${description}')${endLine.slice(semicolonIndex)}`;
3105
+ } else {
3106
+ lines[endLineIndex] = `${endLine}.describe('${description}')`;
3107
+ }
3108
+ return {
3109
+ success: true,
3110
+ fixedCode: lines.join("\n"),
3111
+ explanation: `Added .describe('${description}') to ${schemaName}`,
3112
+ rule: violation.rule,
3113
+ lineNumber: violation.lineNumber
3114
+ };
3115
+ }
3116
+ fixRequireMaxLength(violation, sourceCode) {
3117
+ const lines = sourceCode.split("\n");
3118
+ const lineIndex = violation.lineNumber - 1;
3119
+ const line = lines[lineIndex];
3120
+ if (!line) {
3121
+ return {
3122
+ success: false,
3123
+ explanation: `Line ${violation.lineNumber} not found`,
3124
+ rule: violation.rule,
3125
+ lineNumber: violation.lineNumber
3126
+ };
3127
+ }
3128
+ if (/z\.string\(\)/.test(line)) {
3129
+ lines[lineIndex] = line.replace(/z\.string\(\)/, `z.string().max(${this.defaultMaxLength})`);
3130
+ return {
3131
+ success: true,
3132
+ fixedCode: lines.join("\n"),
3133
+ explanation: `Added .max(${this.defaultMaxLength}) to string schema`,
3134
+ rule: violation.rule,
3135
+ lineNumber: violation.lineNumber
3136
+ };
3137
+ }
3138
+ return {
3139
+ success: false,
3140
+ explanation: "Could not find z.string() pattern to fix on this line",
3141
+ rule: violation.rule,
3142
+ lineNumber: violation.lineNumber
3143
+ };
3144
+ }
3145
+ fixNamingConvention(violation, sourceCode) {
3146
+ const schemaName = violation.schemaName;
3147
+ if (!schemaName) {
3148
+ return {
3149
+ success: false,
3150
+ explanation: "No schema name available for renaming",
3151
+ rule: violation.rule,
3152
+ lineNumber: violation.lineNumber
3153
+ };
3154
+ }
3155
+ const newName = schemaName.endsWith("Schema") ? schemaName : `${schemaName}Schema`;
3156
+ if (newName === schemaName) {
3157
+ return {
3158
+ success: false,
3159
+ explanation: "Schema already matches naming convention",
3160
+ rule: violation.rule,
3161
+ lineNumber: violation.lineNumber
3162
+ };
3163
+ }
3164
+ const fixedCode = sourceCode.replace(new RegExp(`\\b${schemaName}\\b`, "g"), newName);
3165
+ return {
3166
+ success: true,
3167
+ fixedCode,
3168
+ explanation: `Renamed "${schemaName}" to "${newName}"`,
3169
+ rule: violation.rule,
3170
+ lineNumber: violation.lineNumber
3171
+ };
3172
+ }
3173
+ fixRequireSafeParse(violation, sourceCode) {
3174
+ const lines = sourceCode.split("\n");
3175
+ const lineIndex = violation.lineNumber - 1;
3176
+ const line = lines[lineIndex];
3177
+ if (!line) {
3178
+ return {
3179
+ success: false,
3180
+ explanation: `Line ${violation.lineNumber} not found`,
3181
+ rule: violation.rule,
3182
+ lineNumber: violation.lineNumber
3183
+ };
3184
+ }
3185
+ if (line.includes(".parse(") && !line.includes(".safeParse(")) {
3186
+ lines[lineIndex] = line.replace(".parse(", ".safeParse(");
3187
+ return {
3188
+ success: true,
3189
+ fixedCode: lines.join("\n"),
3190
+ explanation: "Replaced .parse() with .safeParse() for safer error handling",
3191
+ rule: violation.rule,
3192
+ lineNumber: violation.lineNumber
3193
+ };
3194
+ }
3195
+ return {
3196
+ success: false,
3197
+ explanation: "Could not find .parse() pattern to fix",
3198
+ rule: violation.rule,
3199
+ lineNumber: violation.lineNumber
3200
+ };
3201
+ }
3202
+ };
3203
+
2658
3204
  // src/governance-templates.ts
2659
3205
  var GOVERNANCE_TEMPLATES = [
2660
3206
  {
@@ -2903,17 +3449,184 @@ function getGovernanceTemplateNames() {
2903
3449
  return GOVERNANCE_TEMPLATES.map((t) => t.name);
2904
3450
  }
2905
3451
 
3452
+ // src/graph-exporter.ts
3453
+ var LIBRARY_COLORS = {
3454
+ zod: "#3068B7",
3455
+ yup: "#32CD32",
3456
+ joi: "#FF6347",
3457
+ "io-ts": "#9370DB",
3458
+ valibot: "#FF8C00",
3459
+ arktype: "#20B2AA",
3460
+ superstruct: "#DAA520",
3461
+ effect: "#6A5ACD"
3462
+ };
3463
+ var LIBRARY_MERMAID_STYLES = {
3464
+ zod: "fill:#3068B7,color:#fff",
3465
+ yup: "fill:#32CD32,color:#000",
3466
+ joi: "fill:#FF6347,color:#fff",
3467
+ "io-ts": "fill:#9370DB,color:#fff",
3468
+ valibot: "fill:#FF8C00,color:#000",
3469
+ arktype: "fill:#20B2AA,color:#fff",
3470
+ superstruct: "fill:#DAA520,color:#000",
3471
+ effect: "fill:#6A5ACD,color:#fff"
3472
+ };
3473
+ var GraphExporter = class {
3474
+ /**
3475
+ * Export dependency graph as DOT format for Graphviz.
3476
+ */
3477
+ exportDot(graph, options = {}) {
3478
+ const lines = [];
3479
+ lines.push("digraph SchemaShiftDependencies {");
3480
+ lines.push(" rankdir=LR;");
3481
+ lines.push(' node [shape=box, style=filled, fontname="monospace"];');
3482
+ lines.push(' edge [color="#666666"];');
3483
+ lines.push("");
3484
+ const circularFiles = /* @__PURE__ */ new Set();
3485
+ if (options.highlightCircular && graph.circularWarnings.length > 0) {
3486
+ for (const warning of graph.circularWarnings) {
3487
+ const match = warning.match(/Circular dependency: (.+)/);
3488
+ if (match?.[1]) {
3489
+ for (const part of match[1].split(" -> ")) {
3490
+ for (const file of graph.sortedFiles) {
3491
+ if (file.endsWith(part.trim()) || this.shortenPath(file) === part.trim()) {
3492
+ circularFiles.add(file);
3493
+ }
3494
+ }
3495
+ }
3496
+ }
3497
+ }
3498
+ }
3499
+ for (const filePath of graph.sortedFiles) {
3500
+ const meta = options.nodeMetadata?.get(filePath);
3501
+ const library = meta?.library;
3502
+ if (options.filterLibrary && library !== options.filterLibrary) continue;
3503
+ const shortPath = this.shortenPath(filePath);
3504
+ const nodeId = this.toNodeId(filePath);
3505
+ const attrs = [];
3506
+ attrs.push(`label="${shortPath}"`);
3507
+ if (circularFiles.has(filePath)) {
3508
+ attrs.push('color="#FF0000"');
3509
+ attrs.push("penwidth=2");
3510
+ }
3511
+ if (options.colorByLibrary && library && LIBRARY_COLORS[library]) {
3512
+ attrs.push(`fillcolor="${LIBRARY_COLORS[library]}"`);
3513
+ attrs.push('fontcolor="white"');
3514
+ } else {
3515
+ attrs.push('fillcolor="#E8E8E8"');
3516
+ }
3517
+ if (meta?.schemaCount) {
3518
+ attrs.push(`tooltip="${meta.schemaCount} schema(s)"`);
3519
+ }
3520
+ lines.push(` ${nodeId} [${attrs.join(", ")}];`);
3521
+ }
3522
+ lines.push("");
3523
+ const filterSet = options.filterLibrary ? new Set(
3524
+ graph.sortedFiles.filter((f) => {
3525
+ const meta = options.nodeMetadata?.get(f);
3526
+ return meta?.library === options.filterLibrary;
3527
+ })
3528
+ ) : void 0;
3529
+ for (const [file, deps] of graph.dependencies) {
3530
+ if (filterSet && !filterSet.has(file)) continue;
3531
+ const fromId = this.toNodeId(file);
3532
+ for (const dep of deps) {
3533
+ if (filterSet && !filterSet.has(dep)) continue;
3534
+ const toId = this.toNodeId(dep);
3535
+ const edgeAttrs = [];
3536
+ if (options.highlightCircular && circularFiles.has(file) && circularFiles.has(dep)) {
3537
+ edgeAttrs.push('color="#FF0000"');
3538
+ edgeAttrs.push("penwidth=2");
3539
+ }
3540
+ lines.push(
3541
+ ` ${fromId} -> ${toId}${edgeAttrs.length > 0 ? ` [${edgeAttrs.join(", ")}]` : ""};`
3542
+ );
3543
+ }
3544
+ }
3545
+ lines.push("}");
3546
+ return lines.join("\n");
3547
+ }
3548
+ /**
3549
+ * Export dependency graph as Mermaid diagram syntax.
3550
+ */
3551
+ exportMermaid(graph, options = {}) {
3552
+ const lines = [];
3553
+ lines.push("graph LR");
3554
+ const styledNodes = /* @__PURE__ */ new Map();
3555
+ for (const [file, deps] of graph.dependencies) {
3556
+ const meta = options.nodeMetadata?.get(file);
3557
+ if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
3558
+ const fromId = this.toMermaidId(file);
3559
+ const fromLabel = this.shortenPath(file);
3560
+ if (meta?.library) {
3561
+ styledNodes.set(fromId, meta.library);
3562
+ }
3563
+ if (deps.length === 0) {
3564
+ lines.push(` ${fromId}["${fromLabel}"]`);
3565
+ }
3566
+ for (const dep of deps) {
3567
+ const depMeta = options.nodeMetadata?.get(dep);
3568
+ if (options.filterLibrary && depMeta?.library !== options.filterLibrary) continue;
3569
+ const toId = this.toMermaidId(dep);
3570
+ const toLabel = this.shortenPath(dep);
3571
+ if (depMeta?.library) {
3572
+ styledNodes.set(toId, depMeta.library);
3573
+ }
3574
+ lines.push(` ${fromId}["${fromLabel}"] --> ${toId}["${toLabel}"]`);
3575
+ }
3576
+ }
3577
+ for (const file of graph.sortedFiles) {
3578
+ const meta = options.nodeMetadata?.get(file);
3579
+ if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
3580
+ const id = this.toMermaidId(file);
3581
+ if (!lines.some((l) => l.includes(id))) {
3582
+ lines.push(` ${id}["${this.shortenPath(file)}"]`);
3583
+ if (meta?.library) {
3584
+ styledNodes.set(id, meta.library);
3585
+ }
3586
+ }
3587
+ }
3588
+ if (options.colorByLibrary && styledNodes.size > 0) {
3589
+ lines.push("");
3590
+ const libraryGroups = /* @__PURE__ */ new Map();
3591
+ for (const [nodeId, library] of styledNodes) {
3592
+ const group = libraryGroups.get(library) ?? [];
3593
+ group.push(nodeId);
3594
+ libraryGroups.set(library, group);
3595
+ }
3596
+ for (const [library, nodeIds] of libraryGroups) {
3597
+ const style = LIBRARY_MERMAID_STYLES[library];
3598
+ if (style) {
3599
+ for (const nodeId of nodeIds) {
3600
+ lines.push(` style ${nodeId} ${style}`);
3601
+ }
3602
+ }
3603
+ }
3604
+ }
3605
+ return lines.join("\n");
3606
+ }
3607
+ shortenPath(filePath) {
3608
+ const parts = filePath.split("/");
3609
+ return parts.slice(-2).join("/");
3610
+ }
3611
+ toNodeId(filePath) {
3612
+ return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "").replace(/_+$/, "");
3613
+ }
3614
+ toMermaidId(filePath) {
3615
+ return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "n_").replace(/_+$/, "");
3616
+ }
3617
+ };
3618
+
2906
3619
  // src/incremental.ts
2907
- var import_node_fs7 = require("fs");
2908
- var import_node_path7 = require("path");
3620
+ var import_node_fs8 = require("fs");
3621
+ var import_node_path8 = require("path");
2909
3622
  var STATE_DIR = ".schemashift";
2910
3623
  var STATE_FILE = "incremental.json";
2911
3624
  var IncrementalTracker = class {
2912
3625
  stateDir;
2913
3626
  statePath;
2914
3627
  constructor(projectPath) {
2915
- this.stateDir = (0, import_node_path7.join)(projectPath, STATE_DIR);
2916
- this.statePath = (0, import_node_path7.join)(this.stateDir, STATE_FILE);
3628
+ this.stateDir = (0, import_node_path8.join)(projectPath, STATE_DIR);
3629
+ this.statePath = (0, import_node_path8.join)(this.stateDir, STATE_FILE);
2917
3630
  }
2918
3631
  start(files, from, to) {
2919
3632
  const state = {
@@ -2948,9 +3661,9 @@ var IncrementalTracker = class {
2948
3661
  this.saveState(state);
2949
3662
  }
2950
3663
  getState() {
2951
- if (!(0, import_node_fs7.existsSync)(this.statePath)) return null;
3664
+ if (!(0, import_node_fs8.existsSync)(this.statePath)) return null;
2952
3665
  try {
2953
- return JSON.parse((0, import_node_fs7.readFileSync)(this.statePath, "utf-8"));
3666
+ return JSON.parse((0, import_node_fs8.readFileSync)(this.statePath, "utf-8"));
2954
3667
  } catch {
2955
3668
  return null;
2956
3669
  }
@@ -2977,21 +3690,299 @@ var IncrementalTracker = class {
2977
3690
  };
2978
3691
  }
2979
3692
  clear() {
2980
- if ((0, import_node_fs7.existsSync)(this.statePath)) {
2981
- (0, import_node_fs7.unlinkSync)(this.statePath);
3693
+ if ((0, import_node_fs8.existsSync)(this.statePath)) {
3694
+ (0, import_node_fs8.unlinkSync)(this.statePath);
2982
3695
  }
2983
3696
  }
2984
3697
  saveState(state) {
2985
- if (!(0, import_node_fs7.existsSync)(this.stateDir)) {
2986
- (0, import_node_fs7.mkdirSync)(this.stateDir, { recursive: true });
3698
+ if (!(0, import_node_fs8.existsSync)(this.stateDir)) {
3699
+ (0, import_node_fs8.mkdirSync)(this.stateDir, { recursive: true });
2987
3700
  }
2988
- (0, import_node_fs7.writeFileSync)(this.statePath, JSON.stringify(state, null, 2));
3701
+ (0, import_node_fs8.writeFileSync)(this.statePath, JSON.stringify(state, null, 2));
3702
+ }
3703
+ };
3704
+
3705
+ // src/migration-templates.ts
3706
+ var BUILT_IN_TEMPLATES = [
3707
+ {
3708
+ name: "react-hook-form-yup-to-zod",
3709
+ description: "Migrate React Hook Form project from Yup to Zod validation",
3710
+ category: "form-migration",
3711
+ migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
3712
+ preChecks: [
3713
+ { description: "Ensure @hookform/resolvers is installed" },
3714
+ { description: "Check for .when() conditional validations that need manual review" }
3715
+ ],
3716
+ postSteps: [
3717
+ {
3718
+ description: "Update resolver imports: yupResolver \u2192 zodResolver",
3719
+ command: void 0
3720
+ },
3721
+ {
3722
+ description: "Run tests to verify form validation behavior",
3723
+ command: "npm test"
3724
+ },
3725
+ {
3726
+ description: "Remove Yup dependency if no longer used",
3727
+ command: "npm uninstall yup"
3728
+ }
3729
+ ],
3730
+ packageChanges: [
3731
+ { action: "install", package: "zod", version: "^3.24.0" },
3732
+ { action: "upgrade", package: "@hookform/resolvers", version: "latest" }
3733
+ ],
3734
+ recommendedFlags: ["--cross-file", "--scaffold-tests", "--verbose"],
3735
+ estimatedEffort: "moderate"
3736
+ },
3737
+ {
3738
+ name: "trpc-zod-v3-to-v4",
3739
+ description: "Upgrade tRPC project from Zod v3 to Zod v4",
3740
+ category: "framework-upgrade",
3741
+ migrationSteps: [
3742
+ { from: "zod-v3", to: "v4", description: "Upgrade Zod v3 schemas to v4 syntax" }
3743
+ ],
3744
+ preChecks: [
3745
+ { description: "Check tRPC version \u2014 v11+ required for Zod v4 compatibility" },
3746
+ { description: "Check zod-validation-error version \u2014 v5.0.0+ required" },
3747
+ { description: "Run existing test suite to establish baseline", command: "npm test" }
3748
+ ],
3749
+ postSteps: [
3750
+ {
3751
+ description: "Update tRPC to v11 if not already",
3752
+ command: "npm install @trpc/server@latest @trpc/client@latest"
3753
+ },
3754
+ {
3755
+ description: "Update zod-validation-error if used",
3756
+ command: "npm install zod-validation-error@^5.0.0"
3757
+ },
3758
+ { description: "Review TODO(schemashift) comments for manual fixes" },
3759
+ { description: "Run tests to verify tRPC router behavior", command: "npm test" }
3760
+ ],
3761
+ packageChanges: [
3762
+ { action: "upgrade", package: "zod", version: "^3.25.0" },
3763
+ { action: "upgrade", package: "@trpc/server", version: "^11.0.0" }
3764
+ ],
3765
+ recommendedFlags: ["--compat-check", "--scaffold-tests", "--verbose"],
3766
+ estimatedEffort: "high"
3767
+ },
3768
+ {
3769
+ name: "express-joi-to-zod",
3770
+ description: "Migrate Express.js API validators from Joi to Zod",
3771
+ category: "library-switch",
3772
+ migrationSteps: [{ from: "joi", to: "zod", description: "Convert Joi schemas to Zod schemas" }],
3773
+ preChecks: [
3774
+ { description: "Identify middleware using Joi validation" },
3775
+ { description: "Check for Joi.extend() custom validators that need manual migration" }
3776
+ ],
3777
+ postSteps: [
3778
+ { description: "Update Express middleware to use Zod schemas" },
3779
+ { description: "Replace celebrate/express-validation with custom Zod middleware" },
3780
+ { description: "Run API integration tests", command: "npm test" },
3781
+ { description: "Remove Joi dependency", command: "npm uninstall joi" }
3782
+ ],
3783
+ packageChanges: [
3784
+ { action: "install", package: "zod", version: "^3.24.0" },
3785
+ { action: "remove", package: "celebrate" }
3786
+ ],
3787
+ recommendedFlags: ["--cross-file", "--verbose"],
3788
+ estimatedEffort: "moderate"
3789
+ },
3790
+ {
3791
+ name: "nextjs-form-migration",
3792
+ description: "Migrate Next.js form validation from Yup/Formik to Zod/React Hook Form",
3793
+ category: "form-migration",
3794
+ migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
3795
+ preChecks: [
3796
+ { description: "Identify all Formik form components" },
3797
+ { description: "Check for server-side validation using Yup" },
3798
+ { description: "Run existing tests to establish baseline", command: "npm test" }
3799
+ ],
3800
+ postSteps: [
3801
+ { description: "Replace Formik with React Hook Form + zodResolver" },
3802
+ { description: "Update server actions to use Zod for validation" },
3803
+ {
3804
+ description: "Install next-safe-action if using server actions",
3805
+ command: "npm install next-safe-action"
3806
+ },
3807
+ { description: "Run full test suite", command: "npm test" }
3808
+ ],
3809
+ packageChanges: [
3810
+ { action: "install", package: "zod", version: "^3.24.0" },
3811
+ { action: "install", package: "react-hook-form", version: "^7.0.0" },
3812
+ { action: "install", package: "@hookform/resolvers", version: "latest" }
3813
+ ],
3814
+ recommendedFlags: ["--cross-file", "--scaffold-tests"],
3815
+ estimatedEffort: "high"
3816
+ },
3817
+ {
3818
+ name: "monorepo-staged-migration",
3819
+ description: "Phased monorepo migration with incremental tracking",
3820
+ category: "monorepo",
3821
+ migrationSteps: [
3822
+ { from: "yup", to: "zod", description: "Convert shared packages first, then applications" }
3823
+ ],
3824
+ preChecks: [
3825
+ { description: "Analyze monorepo workspace structure" },
3826
+ { description: "Identify shared schema packages used by multiple apps" },
3827
+ { description: "Ensure all packages build successfully", command: "npm run build" }
3828
+ ],
3829
+ postSteps: [
3830
+ { description: "Run incremental migration starting with leaf packages" },
3831
+ { description: "Build all packages after each batch", command: "npm run build" },
3832
+ { description: "Run full test suite", command: "npm test" },
3833
+ { description: "Review cross-package type compatibility" }
3834
+ ],
3835
+ packageChanges: [],
3836
+ recommendedFlags: ["--cross-file", "--incremental", "--compat-check", "--audit"],
3837
+ estimatedEffort: "high"
3838
+ }
3839
+ ];
3840
+ function getMigrationTemplate(name) {
3841
+ return BUILT_IN_TEMPLATES.find((t) => t.name === name);
3842
+ }
3843
+ function getMigrationTemplateNames() {
3844
+ return BUILT_IN_TEMPLATES.map((t) => t.name);
3845
+ }
3846
+ function getMigrationTemplatesByCategory(category) {
3847
+ return BUILT_IN_TEMPLATES.filter((t) => t.category === category);
3848
+ }
3849
+ function getAllMigrationTemplates() {
3850
+ return [...BUILT_IN_TEMPLATES];
3851
+ }
3852
+ function validateMigrationTemplate(template) {
3853
+ const errors = [];
3854
+ if (!template.name || template.name.trim().length === 0) {
3855
+ errors.push("Template name is required");
3856
+ }
3857
+ if (!template.description || template.description.trim().length === 0) {
3858
+ errors.push("Template description is required");
3859
+ }
3860
+ if (!template.migrationSteps || template.migrationSteps.length === 0) {
3861
+ errors.push("At least one migration step is required");
3862
+ }
3863
+ for (const step of template.migrationSteps ?? []) {
3864
+ if (!step.from || !step.to) {
3865
+ errors.push(`Migration step must have from and to: ${JSON.stringify(step)}`);
3866
+ }
3867
+ }
3868
+ return { valid: errors.length === 0, errors };
3869
+ }
3870
+
3871
+ // src/notifications.ts
3872
+ async function computeSignature(payload, secret) {
3873
+ const { createHmac } = await import("crypto");
3874
+ return createHmac("sha256", secret).update(payload).digest("hex");
3875
+ }
3876
+ var WebhookNotifier = class {
3877
+ webhooks;
3878
+ constructor(webhooks) {
3879
+ this.webhooks = webhooks;
3880
+ }
3881
+ /**
3882
+ * Create a migration event with current timestamp.
3883
+ */
3884
+ createEvent(type, details, project) {
3885
+ return {
3886
+ type,
3887
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
3888
+ project,
3889
+ details
3890
+ };
3891
+ }
3892
+ /**
3893
+ * Send an event to all matching webhooks.
3894
+ */
3895
+ async send(event) {
3896
+ const results = [];
3897
+ for (const webhook of this.webhooks) {
3898
+ if (webhook.events && !webhook.events.includes(event.type)) {
3899
+ continue;
3900
+ }
3901
+ const result = await this.sendToWebhook(webhook, event);
3902
+ results.push(result);
3903
+ }
3904
+ return results;
3905
+ }
3906
+ /**
3907
+ * Send event to a single webhook endpoint.
3908
+ */
3909
+ async sendToWebhook(webhook, event) {
3910
+ const payload = JSON.stringify(event);
3911
+ const headers = {
3912
+ "Content-Type": "application/json",
3913
+ "User-Agent": "SchemaShift-Webhook/1.0",
3914
+ ...webhook.headers
3915
+ };
3916
+ if (webhook.secret) {
3917
+ const signature = await computeSignature(payload, webhook.secret);
3918
+ headers["X-SchemaShift-Signature"] = `sha256=${signature}`;
3919
+ }
3920
+ try {
3921
+ const response = await fetch(webhook.url, {
3922
+ method: "POST",
3923
+ headers,
3924
+ body: payload
3925
+ });
3926
+ return {
3927
+ success: response.ok,
3928
+ statusCode: response.status,
3929
+ error: response.ok ? void 0 : `HTTP ${response.status}: ${response.statusText}`
3930
+ };
3931
+ } catch (err) {
3932
+ return {
3933
+ success: false,
3934
+ error: err instanceof Error ? err.message : String(err)
3935
+ };
3936
+ }
3937
+ }
3938
+ /**
3939
+ * Convenience: send a migration_started event.
3940
+ */
3941
+ async notifyMigrationStarted(from, to, fileCount, project) {
3942
+ const event = this.createEvent("migration_started", { from, to, fileCount }, project);
3943
+ return this.send(event);
3944
+ }
3945
+ /**
3946
+ * Convenience: send a migration_completed event.
3947
+ */
3948
+ async notifyMigrationCompleted(from, to, fileCount, warningCount, project) {
3949
+ const event = this.createEvent(
3950
+ "migration_completed",
3951
+ { from, to, fileCount, warningCount },
3952
+ project
3953
+ );
3954
+ return this.send(event);
3955
+ }
3956
+ /**
3957
+ * Convenience: send a migration_failed event.
3958
+ */
3959
+ async notifyMigrationFailed(from, to, error, project) {
3960
+ const event = this.createEvent("migration_failed", { from, to, error }, project);
3961
+ return this.send(event);
3962
+ }
3963
+ /**
3964
+ * Convenience: send a governance_violation event.
3965
+ */
3966
+ async notifyGovernanceViolation(violationCount, rules, project) {
3967
+ const event = this.createEvent("governance_violation", { violationCount, rules }, project);
3968
+ return this.send(event);
3969
+ }
3970
+ /**
3971
+ * Convenience: send a drift_detected event.
3972
+ */
3973
+ async notifyDriftDetected(modifiedFiles, addedFiles, removedFiles, project) {
3974
+ const event = this.createEvent(
3975
+ "drift_detected",
3976
+ { modifiedFiles, addedFiles, removedFiles },
3977
+ project
3978
+ );
3979
+ return this.send(event);
2989
3980
  }
2990
3981
  };
2991
3982
 
2992
3983
  // src/package-updater.ts
2993
- var import_node_fs8 = require("fs");
2994
- var import_node_path8 = require("path");
3984
+ var import_node_fs9 = require("fs");
3985
+ var import_node_path9 = require("path");
2995
3986
  var TARGET_VERSIONS = {
2996
3987
  "yup->zod": { zod: "^3.24.0" },
2997
3988
  "joi->zod": { zod: "^3.24.0" },
@@ -3012,14 +4003,14 @@ var PackageUpdater = class {
3012
4003
  const add = {};
3013
4004
  const remove = [];
3014
4005
  const warnings = [];
3015
- const pkgPath = (0, import_node_path8.join)(projectPath, "package.json");
3016
- if (!(0, import_node_fs8.existsSync)(pkgPath)) {
4006
+ const pkgPath = (0, import_node_path9.join)(projectPath, "package.json");
4007
+ if (!(0, import_node_fs9.existsSync)(pkgPath)) {
3017
4008
  warnings.push("No package.json found. Cannot plan dependency updates.");
3018
4009
  return { add, remove, warnings };
3019
4010
  }
3020
4011
  let pkg;
3021
4012
  try {
3022
- pkg = JSON.parse((0, import_node_fs8.readFileSync)(pkgPath, "utf-8"));
4013
+ pkg = JSON.parse((0, import_node_fs9.readFileSync)(pkgPath, "utf-8"));
3023
4014
  } catch {
3024
4015
  warnings.push("Could not parse package.json.");
3025
4016
  return { add, remove, warnings };
@@ -3049,9 +4040,9 @@ var PackageUpdater = class {
3049
4040
  return { add, remove, warnings };
3050
4041
  }
3051
4042
  apply(projectPath, plan) {
3052
- const pkgPath = (0, import_node_path8.join)(projectPath, "package.json");
3053
- if (!(0, import_node_fs8.existsSync)(pkgPath)) return;
3054
- const pkgText = (0, import_node_fs8.readFileSync)(pkgPath, "utf-8");
4043
+ const pkgPath = (0, import_node_path9.join)(projectPath, "package.json");
4044
+ if (!(0, import_node_fs9.existsSync)(pkgPath)) return;
4045
+ const pkgText = (0, import_node_fs9.readFileSync)(pkgPath, "utf-8");
3055
4046
  const pkg = JSON.parse(pkgText);
3056
4047
  if (!pkg.dependencies) pkg.dependencies = {};
3057
4048
  for (const [name, version] of Object.entries(plan.add)) {
@@ -3061,7 +4052,7 @@ var PackageUpdater = class {
3061
4052
  pkg.dependencies[name] = version;
3062
4053
  }
3063
4054
  }
3064
- (0, import_node_fs8.writeFileSync)(pkgPath, `${JSON.stringify(pkg, null, 2)}
4055
+ (0, import_node_fs9.writeFileSync)(pkgPath, `${JSON.stringify(pkg, null, 2)}
3065
4056
  `);
3066
4057
  }
3067
4058
  };
@@ -3233,8 +4224,8 @@ var PluginLoader = class {
3233
4224
  };
3234
4225
 
3235
4226
  // src/standard-schema.ts
3236
- var import_node_fs9 = require("fs");
3237
- var import_node_path9 = require("path");
4227
+ var import_node_fs10 = require("fs");
4228
+ var import_node_path10 = require("path");
3238
4229
  var STANDARD_SCHEMA_LIBRARIES = {
3239
4230
  zod: { minMajor: 3, minMinor: 23 },
3240
4231
  // Zod v3.23+ and v4+
@@ -3263,13 +4254,13 @@ function isVersionCompatible(version, minMajor, minMinor) {
3263
4254
  return false;
3264
4255
  }
3265
4256
  function detectStandardSchema(projectPath) {
3266
- const pkgPath = (0, import_node_path9.join)(projectPath, "package.json");
3267
- if (!(0, import_node_fs9.existsSync)(pkgPath)) {
4257
+ const pkgPath = (0, import_node_path10.join)(projectPath, "package.json");
4258
+ if (!(0, import_node_fs10.existsSync)(pkgPath)) {
3268
4259
  return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
3269
4260
  }
3270
4261
  let allDeps = {};
3271
4262
  try {
3272
- const pkg = JSON.parse((0, import_node_fs9.readFileSync)(pkgPath, "utf-8"));
4263
+ const pkg = JSON.parse((0, import_node_fs10.readFileSync)(pkgPath, "utf-8"));
3273
4264
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
3274
4265
  } catch {
3275
4266
  return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
@@ -3308,6 +4299,105 @@ function detectStandardSchema(projectPath) {
3308
4299
  return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
3309
4300
  }
3310
4301
 
4302
+ // src/standard-schema-advisor.ts
4303
+ var STANDARD_SCHEMA_LIBS = /* @__PURE__ */ new Set(["zod", "valibot", "arktype"]);
4304
+ var StandardSchemaAdvisor = class {
4305
+ /**
4306
+ * Check if a schema library supports Standard Schema.
4307
+ */
4308
+ supportsStandardSchema(library) {
4309
+ return STANDARD_SCHEMA_LIBS.has(library);
4310
+ }
4311
+ /**
4312
+ * Generate advisory for a given migration path.
4313
+ */
4314
+ advise(from, to) {
4315
+ const fromSupports = this.supportsStandardSchema(from);
4316
+ const toSupports = this.supportsStandardSchema(to);
4317
+ if (!fromSupports && !toSupports) {
4318
+ return {
4319
+ shouldConsiderAdapter: false,
4320
+ reason: `Neither ${from} nor ${to} supports Standard Schema. Full migration is recommended.`,
4321
+ migrationAdvantages: [
4322
+ "Complete type safety with target library",
4323
+ "Access to target library ecosystem",
4324
+ "No runtime adapter overhead"
4325
+ ],
4326
+ adapterAdvantages: [],
4327
+ recommendation: "migrate"
4328
+ };
4329
+ }
4330
+ if (fromSupports && toSupports) {
4331
+ return {
4332
+ shouldConsiderAdapter: true,
4333
+ reason: `Both ${from} and ${to} support Standard Schema 1.0. You may be able to use adapters for ecosystem tools (tRPC, TanStack Form, etc.) instead of migrating all schemas.`,
4334
+ adapterExample: this.generateAdapterExample(from, to),
4335
+ migrationAdvantages: [
4336
+ "Full target library API and ergonomics",
4337
+ "Consistent codebase (single library)",
4338
+ "Better IDE support for one library",
4339
+ "Smaller bundle (avoid loading two libraries)"
4340
+ ],
4341
+ adapterAdvantages: [
4342
+ "No code changes needed for existing schemas",
4343
+ "Gradual migration possible",
4344
+ "Ecosystem tools work with both libraries via Standard Schema",
4345
+ "Lower risk \u2014 existing validation behavior preserved"
4346
+ ],
4347
+ recommendation: "either"
4348
+ };
4349
+ }
4350
+ if (toSupports && !fromSupports) {
4351
+ return {
4352
+ shouldConsiderAdapter: false,
4353
+ reason: `${from} does not support Standard Schema, but ${to} does. Migrating to ${to} gives you Standard Schema interoperability.`,
4354
+ migrationAdvantages: [
4355
+ "Standard Schema interoperability with ecosystem tools",
4356
+ "Future-proof validation layer",
4357
+ `Access to ${to} API and type inference`
4358
+ ],
4359
+ adapterAdvantages: [],
4360
+ recommendation: "migrate"
4361
+ };
4362
+ }
4363
+ return {
4364
+ shouldConsiderAdapter: false,
4365
+ reason: `${from} supports Standard Schema but ${to} does not. Consider if you need the specific features of ${to} that justify losing Standard Schema interoperability.`,
4366
+ migrationAdvantages: [`Access to ${to}-specific features`],
4367
+ adapterAdvantages: [`Keeping ${from} preserves Standard Schema interoperability`],
4368
+ recommendation: "migrate"
4369
+ };
4370
+ }
4371
+ /**
4372
+ * Analyze a project and provide advisory based on detected libraries.
4373
+ */
4374
+ adviseFromProject(projectPath, from, to) {
4375
+ const projectInfo = detectStandardSchema(projectPath);
4376
+ const advisory = this.advise(from, to);
4377
+ return { ...advisory, projectInfo };
4378
+ }
4379
+ generateAdapterExample(from, to) {
4380
+ return [
4381
+ `// Instead of migrating all ${from} schemas to ${to},`,
4382
+ `// you can use Standard Schema adapters for ecosystem tools:`,
4383
+ `//`,
4384
+ `// Example with tRPC (v11+):`,
4385
+ `// tRPC accepts any Standard Schema-compatible schema.`,
4386
+ `// Both ${from} and ${to} schemas work without conversion:`,
4387
+ `//`,
4388
+ `// import { ${from}Schema } from './existing-${from}-schemas';`,
4389
+ `// import { ${to}Schema } from './new-${to}-schemas';`,
4390
+ `//`,
4391
+ `// const router = t.router({`,
4392
+ `// // Works with ${from} schema (Standard Schema compatible)`,
4393
+ `// getUser: t.procedure.input(${from}Schema).query(...)`,
4394
+ `// // Also works with ${to} schema`,
4395
+ `// createUser: t.procedure.input(${to}Schema).mutation(...)`,
4396
+ `// });`
4397
+ ].join("\n");
4398
+ }
4399
+ };
4400
+
3311
4401
  // src/test-scaffolder.ts
3312
4402
  var TestScaffolder = class {
3313
4403
  scaffold(sourceFiles, from, to) {
@@ -3602,6 +4692,7 @@ var TypeDedupDetector = class {
3602
4692
  };
3603
4693
  // Annotate the CommonJS export names for ESM import in node:
3604
4694
  0 && (module.exports = {
4695
+ ApprovalManager,
3605
4696
  BehavioralWarningAnalyzer,
3606
4697
  BundleEstimator,
3607
4698
  CompatibilityAnalyzer,
@@ -3612,6 +4703,8 @@ var TypeDedupDetector = class {
3612
4703
  FormResolverMigrator,
3613
4704
  GOVERNANCE_TEMPLATES,
3614
4705
  GovernanceEngine,
4706
+ GovernanceFixer,
4707
+ GraphExporter,
3615
4708
  IncrementalTracker,
3616
4709
  MigrationAuditLog,
3617
4710
  MigrationChain,
@@ -3621,24 +4714,37 @@ var TypeDedupDetector = class {
3621
4714
  PluginLoader,
3622
4715
  SchemaAnalyzer,
3623
4716
  SchemaDependencyResolver,
4717
+ StandardSchemaAdvisor,
3624
4718
  TestScaffolder,
3625
4719
  TransformEngine,
3626
4720
  TypeDedupDetector,
4721
+ WebhookNotifier,
3627
4722
  buildCallChain,
3628
4723
  computeParallelBatches,
4724
+ conditionalValidation,
4725
+ dependentFields,
3629
4726
  detectFormLibraries,
3630
4727
  detectSchemaLibrary,
3631
4728
  detectStandardSchema,
4729
+ getAllMigrationTemplates,
3632
4730
  getGovernanceTemplate,
3633
4731
  getGovernanceTemplateNames,
3634
4732
  getGovernanceTemplatesByCategory,
4733
+ getMigrationTemplate,
4734
+ getMigrationTemplateNames,
4735
+ getMigrationTemplatesByCategory,
3635
4736
  isInsideComment,
3636
4737
  isInsideStringLiteral,
3637
4738
  loadConfig,
4739
+ mutuallyExclusive,
3638
4740
  parseCallChain,
4741
+ requireIf,
4742
+ requireOneOf,
3639
4743
  shouldSuppressWarning,
3640
4744
  startsWithBase,
4745
+ suggestCrossFieldPattern,
3641
4746
  transformMethodChain,
3642
- validateConfig
4747
+ validateConfig,
4748
+ validateMigrationTemplate
3643
4749
  });
3644
4750
  //# sourceMappingURL=index.cjs.map