@schemashift/core 0.10.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -133,6 +133,110 @@ var SchemaAnalyzer = class {
133
133
  }
134
134
  };
135
135
 
136
+ // src/approval.ts
137
+ import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from "fs";
138
+ import { join } from "path";
139
+ var ApprovalManager = class {
140
+ pendingDir;
141
+ constructor(projectPath) {
142
+ this.pendingDir = join(projectPath, ".schemashift", "pending");
143
+ }
144
+ /**
145
+ * Create a new migration request for review.
146
+ */
147
+ createRequest(from, to, files, requestedBy, metadata) {
148
+ const id = `mig-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
149
+ const request = {
150
+ id,
151
+ from,
152
+ to,
153
+ files,
154
+ requestedBy,
155
+ requestedAt: (/* @__PURE__ */ new Date()).toISOString(),
156
+ status: "pending",
157
+ metadata
158
+ };
159
+ this.ensureDir();
160
+ const filePath = join(this.pendingDir, `${id}.json`);
161
+ writeFileSync(filePath, JSON.stringify(request, null, 2), "utf-8");
162
+ return request;
163
+ }
164
+ /**
165
+ * Review (approve or reject) a pending migration request.
166
+ */
167
+ review(decision) {
168
+ const request = this.getRequest(decision.requestId);
169
+ if (!request) {
170
+ throw new Error(`Migration request ${decision.requestId} not found`);
171
+ }
172
+ if (request.status !== "pending") {
173
+ throw new Error(`Migration request ${decision.requestId} is already ${request.status}`);
174
+ }
175
+ request.status = decision.status;
176
+ request.reviewedBy = decision.reviewedBy;
177
+ request.reviewedAt = (/* @__PURE__ */ new Date()).toISOString();
178
+ request.reason = decision.reason;
179
+ const filePath = join(this.pendingDir, `${decision.requestId}.json`);
180
+ writeFileSync(filePath, JSON.stringify(request, null, 2), "utf-8");
181
+ return request;
182
+ }
183
+ /**
184
+ * Get a specific migration request by ID.
185
+ */
186
+ getRequest(id) {
187
+ const filePath = join(this.pendingDir, `${id}.json`);
188
+ if (!existsSync(filePath)) {
189
+ return null;
190
+ }
191
+ const content = readFileSync(filePath, "utf-8");
192
+ return JSON.parse(content);
193
+ }
194
+ /**
195
+ * List all migration requests, optionally filtered by status.
196
+ */
197
+ listRequests(status) {
198
+ if (!existsSync(this.pendingDir)) {
199
+ return [];
200
+ }
201
+ const files = readdirSync(this.pendingDir).filter((f) => f.endsWith(".json"));
202
+ const requests = [];
203
+ for (const file of files) {
204
+ const content = readFileSync(join(this.pendingDir, file), "utf-8");
205
+ const request = JSON.parse(content);
206
+ if (!status || request.status === status) {
207
+ requests.push(request);
208
+ }
209
+ }
210
+ return requests.sort(
211
+ (a, b) => new Date(b.requestedAt).getTime() - new Date(a.requestedAt).getTime()
212
+ );
213
+ }
214
+ /**
215
+ * Get summary counts of all requests.
216
+ */
217
+ getSummary() {
218
+ const all = this.listRequests();
219
+ return {
220
+ pending: all.filter((r) => r.status === "pending").length,
221
+ approved: all.filter((r) => r.status === "approved").length,
222
+ rejected: all.filter((r) => r.status === "rejected").length,
223
+ total: all.length
224
+ };
225
+ }
226
+ /**
227
+ * Check if a migration has been approved.
228
+ */
229
+ isApproved(requestId) {
230
+ const request = this.getRequest(requestId);
231
+ return request?.status === "approved";
232
+ }
233
+ ensureDir() {
234
+ if (!existsSync(this.pendingDir)) {
235
+ mkdirSync(this.pendingDir, { recursive: true });
236
+ }
237
+ }
238
+ };
239
+
136
240
  // src/ast-utils.ts
137
241
  import { Node as NodeUtils } from "ts-morph";
138
242
  function parseCallChain(node) {
@@ -269,8 +373,8 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
269
373
 
270
374
  // src/audit-log.ts
271
375
  import { createHash } from "crypto";
272
- import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
273
- import { join } from "path";
376
+ import { existsSync as existsSync2, mkdirSync as mkdirSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "fs";
377
+ import { join as join2 } from "path";
274
378
  var AUDIT_DIR = ".schemashift";
275
379
  var AUDIT_FILE = "audit-log.json";
276
380
  var AUDIT_VERSION = 1;
@@ -278,8 +382,8 @@ var MigrationAuditLog = class {
278
382
  logDir;
279
383
  logPath;
280
384
  constructor(projectPath) {
281
- this.logDir = join(projectPath, AUDIT_DIR);
282
- this.logPath = join(this.logDir, AUDIT_FILE);
385
+ this.logDir = join2(projectPath, AUDIT_DIR);
386
+ this.logPath = join2(this.logDir, AUDIT_FILE);
283
387
  }
284
388
  /**
285
389
  * Append a new entry to the audit log.
@@ -315,11 +419,11 @@ var MigrationAuditLog = class {
315
419
  * Read the current audit log.
316
420
  */
317
421
  read() {
318
- if (!existsSync(this.logPath)) {
422
+ if (!existsSync2(this.logPath)) {
319
423
  return { version: AUDIT_VERSION, entries: [] };
320
424
  }
321
425
  try {
322
- const content = readFileSync(this.logPath, "utf-8");
426
+ const content = readFileSync2(this.logPath, "utf-8");
323
427
  if (!content.trim()) {
324
428
  return { version: AUDIT_VERSION, entries: [] };
325
429
  }
@@ -401,6 +505,88 @@ var MigrationAuditLog = class {
401
505
  clear() {
402
506
  this.write({ version: AUDIT_VERSION, entries: [] });
403
507
  }
508
+ /**
509
+ * Export a compliance report in SOC2 or HIPAA format.
510
+ */
511
+ exportComplianceReport(format) {
512
+ const log = this.read();
513
+ const summary = this.getSummary();
514
+ if (format === "soc2") {
515
+ return this.generateSoc2Report(log, summary);
516
+ }
517
+ return this.generateHipaaReport(log, summary);
518
+ }
519
+ generateSoc2Report(log, summary) {
520
+ const sections = [];
521
+ const now = (/* @__PURE__ */ new Date()).toISOString();
522
+ sections.push("# SOC2 Compliance Report \u2014 Schema Migration");
523
+ sections.push(`Generated: ${now}`);
524
+ sections.push("");
525
+ sections.push("## Change Control Summary");
526
+ sections.push(`- Total Migrations: ${summary.totalMigrations}`);
527
+ sections.push(`- Total Files Processed: ${summary.totalFiles}`);
528
+ sections.push(`- Successful: ${summary.successCount}`);
529
+ sections.push(`- Failed: ${summary.failureCount}`);
530
+ sections.push(`- Migration Paths: ${summary.migrationPaths.join(", ")}`);
531
+ sections.push("");
532
+ sections.push("## Change Control Entries");
533
+ for (const entry of log.entries) {
534
+ sections.push("");
535
+ sections.push(`### ${entry.filePath}`);
536
+ sections.push(`- Change ID: ${entry.migrationId}`);
537
+ sections.push(`- Timestamp: ${entry.timestamp}`);
538
+ sections.push(`- Action: ${entry.action}`);
539
+ sections.push(`- Migration: ${entry.from} \u2192 ${entry.to}`);
540
+ sections.push(`- Status: ${entry.success ? "Success" : "Failed"}`);
541
+ sections.push(`- Implementer: ${entry.user || "Unknown"}`);
542
+ sections.push(`- Before Hash: ${entry.beforeHash}`);
543
+ if (entry.afterHash) sections.push(`- After Hash: ${entry.afterHash}`);
544
+ sections.push(`- Warnings: ${entry.warningCount}`);
545
+ sections.push(`- Errors: ${entry.errorCount}`);
546
+ if (entry.riskScore !== void 0) sections.push(`- Risk Score: ${entry.riskScore}`);
547
+ if (entry.metadata?.ciProvider) sections.push(`- CI Provider: ${entry.metadata.ciProvider}`);
548
+ if (entry.metadata?.gitCommit) sections.push(`- Git Commit: ${entry.metadata.gitCommit}`);
549
+ if (entry.metadata?.gitBranch) sections.push(`- Git Branch: ${entry.metadata.gitBranch}`);
550
+ }
551
+ sections.push("");
552
+ sections.push("## Rollback Procedure");
553
+ sections.push("SchemaShift maintains automatic backups in `.schemashift/backups/`.");
554
+ sections.push("Use `schemashift rollback [backupId]` to restore files from any backup.");
555
+ sections.push("");
556
+ return sections.join("\n");
557
+ }
558
+ generateHipaaReport(log, summary) {
559
+ const sections = [];
560
+ const now = (/* @__PURE__ */ new Date()).toISOString();
561
+ sections.push("# HIPAA Compliance Audit Trail \u2014 Schema Migration");
562
+ sections.push(`Generated: ${now}`);
563
+ sections.push("");
564
+ sections.push("## Data Transformation Summary");
565
+ sections.push(`- Total Transformations: ${summary.totalFiles}`);
566
+ sections.push(`- Successful: ${summary.successCount}`);
567
+ sections.push(`- Failed: ${summary.failureCount}`);
568
+ sections.push("");
569
+ sections.push("## Integrity Verification");
570
+ for (const entry of log.entries) {
571
+ sections.push("");
572
+ sections.push(`### ${entry.filePath}`);
573
+ sections.push(`- Timestamp: ${entry.timestamp}`);
574
+ sections.push(`- User: ${entry.user || "Unknown"}`);
575
+ sections.push(`- Action: ${entry.action} (${entry.from} \u2192 ${entry.to})`);
576
+ sections.push(`- Integrity Before: SHA256:${entry.beforeHash}`);
577
+ if (entry.afterHash) sections.push(`- Integrity After: SHA256:${entry.afterHash}`);
578
+ sections.push(`- Status: ${entry.success ? "Completed" : "Failed"}`);
579
+ if (entry.metadata?.hostname) sections.push(`- Host: ${entry.metadata.hostname}`);
580
+ if (entry.metadata?.nodeVersion)
581
+ sections.push(`- Runtime: Node.js ${entry.metadata.nodeVersion}`);
582
+ }
583
+ sections.push("");
584
+ sections.push("## Access Control");
585
+ const users = [...new Set(log.entries.map((e) => e.user).filter(Boolean))];
586
+ sections.push(`- Users Who Performed Migrations: ${users.join(", ") || "Unknown"}`);
587
+ sections.push("");
588
+ return sections.join("\n");
589
+ }
404
590
  collectMetadata() {
405
591
  return {
406
592
  hostname: process.env.HOSTNAME || void 0,
@@ -412,10 +598,10 @@ var MigrationAuditLog = class {
412
598
  };
413
599
  }
414
600
  write(log) {
415
- if (!existsSync(this.logDir)) {
416
- mkdirSync(this.logDir, { recursive: true });
601
+ if (!existsSync2(this.logDir)) {
602
+ mkdirSync2(this.logDir, { recursive: true });
417
603
  }
418
- writeFileSync(this.logPath, JSON.stringify(log, null, 2));
604
+ writeFileSync2(this.logPath, JSON.stringify(log, null, 2));
419
605
  }
420
606
  hashContent(content) {
421
607
  return createHash("sha256").update(content).digest("hex").substring(0, 16);
@@ -848,12 +1034,12 @@ var MigrationChain = class {
848
1034
  };
849
1035
 
850
1036
  // src/compatibility.ts
851
- import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
852
- import { join as join3 } from "path";
1037
+ import { existsSync as existsSync4, readFileSync as readFileSync4 } from "fs";
1038
+ import { join as join4 } from "path";
853
1039
 
854
1040
  // src/ecosystem.ts
855
- import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
856
- import { join as join2 } from "path";
1041
+ import { existsSync as existsSync3, readFileSync as readFileSync3 } from "fs";
1042
+ import { join as join3 } from "path";
857
1043
  var ECOSYSTEM_RULES = [
858
1044
  // ORM integrations
859
1045
  {
@@ -1108,6 +1294,72 @@ var ECOSYSTEM_RULES = [
1108
1294
  upgradeCommand: "npm install nuqs@latest"
1109
1295
  })
1110
1296
  },
1297
+ // Server action / routing integrations
1298
+ {
1299
+ package: "next-safe-action",
1300
+ category: "api",
1301
+ migrations: ["zod-v3->v4"],
1302
+ check: () => ({
1303
+ issue: "next-safe-action uses Zod for input validation. Zod v4 type changes may break action definitions.",
1304
+ suggestion: "Upgrade next-safe-action to the latest version with Zod v4 support.",
1305
+ severity: "warning",
1306
+ upgradeCommand: "npm install next-safe-action@latest"
1307
+ })
1308
+ },
1309
+ {
1310
+ package: "@tanstack/router",
1311
+ category: "api",
1312
+ migrations: ["zod-v3->v4"],
1313
+ check: () => ({
1314
+ issue: "@tanstack/router uses Zod for route parameter validation. Zod v4 changes may affect type inference.",
1315
+ suggestion: "Upgrade @tanstack/router to a version with Zod v4 support.",
1316
+ severity: "warning",
1317
+ upgradeCommand: "npm install @tanstack/router@latest"
1318
+ })
1319
+ },
1320
+ {
1321
+ package: "@tanstack/react-query",
1322
+ category: "api",
1323
+ migrations: ["zod-v3->v4"],
1324
+ check: () => ({
1325
+ issue: "@tanstack/react-query may use Zod for query key/param validation via integrations.",
1326
+ suggestion: "Verify any Zod-based query validation still works after the Zod v4 upgrade.",
1327
+ severity: "info"
1328
+ })
1329
+ },
1330
+ {
1331
+ package: "fastify-type-provider-zod",
1332
+ category: "api",
1333
+ migrations: ["zod-v3->v4"],
1334
+ check: () => ({
1335
+ issue: "fastify-type-provider-zod needs a Zod v4-compatible version.",
1336
+ suggestion: "Upgrade fastify-type-provider-zod to a version supporting Zod v4.",
1337
+ severity: "warning",
1338
+ upgradeCommand: "npm install fastify-type-provider-zod@latest"
1339
+ })
1340
+ },
1341
+ {
1342
+ package: "zod-i18n-map",
1343
+ category: "validation-util",
1344
+ migrations: ["zod-v3->v4"],
1345
+ check: () => ({
1346
+ issue: 'zod-i18n-map uses Zod v3 error map format. Error messages changed in v4 (e.g., "Required" is now descriptive).',
1347
+ suggestion: "Check for a Zod v4-compatible version of zod-i18n-map or update custom error maps.",
1348
+ severity: "warning",
1349
+ upgradeCommand: "npm install zod-i18n-map@latest"
1350
+ })
1351
+ },
1352
+ {
1353
+ package: "openapi-zod-client",
1354
+ category: "openapi",
1355
+ migrations: ["zod-v3->v4"],
1356
+ check: () => ({
1357
+ issue: "openapi-zod-client generates Zod v3 schemas from OpenAPI specs. Generated code may need regeneration.",
1358
+ suggestion: "Upgrade openapi-zod-client and regenerate schemas for Zod v4 compatibility.",
1359
+ severity: "warning",
1360
+ upgradeCommand: "npm install openapi-zod-client@latest"
1361
+ })
1362
+ },
1111
1363
  // Schema library detection for cross-library migrations
1112
1364
  {
1113
1365
  package: "@effect/schema",
@@ -1185,13 +1437,13 @@ var EcosystemAnalyzer = class {
1185
1437
  const dependencies = [];
1186
1438
  const warnings = [];
1187
1439
  const blockers = [];
1188
- const pkgPath = join2(projectPath, "package.json");
1189
- if (!existsSync2(pkgPath)) {
1440
+ const pkgPath = join3(projectPath, "package.json");
1441
+ if (!existsSync3(pkgPath)) {
1190
1442
  return { dependencies, warnings, blockers };
1191
1443
  }
1192
1444
  let allDeps = {};
1193
1445
  try {
1194
- const pkg = JSON.parse(readFileSync2(pkgPath, "utf-8"));
1446
+ const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
1195
1447
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1196
1448
  } catch {
1197
1449
  return { dependencies, warnings, blockers };
@@ -1312,10 +1564,10 @@ var CompatibilityAnalyzer = class {
1312
1564
  ecosystemAnalyzer = new EcosystemAnalyzer();
1313
1565
  detectVersions(projectPath) {
1314
1566
  const versions = [];
1315
- const pkgPath = join3(projectPath, "package.json");
1316
- if (!existsSync3(pkgPath)) return versions;
1567
+ const pkgPath = join4(projectPath, "package.json");
1568
+ if (!existsSync4(pkgPath)) return versions;
1317
1569
  try {
1318
- const pkg = JSON.parse(readFileSync3(pkgPath, "utf-8"));
1570
+ const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
1319
1571
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1320
1572
  const allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
1321
1573
  for (const lib of knownLibs) {
@@ -1467,6 +1719,25 @@ var ComplexityEstimator = class {
1467
1719
  riskAreas
1468
1720
  };
1469
1721
  }
1722
+ estimateDuration(estimate) {
1723
+ const EFFORT_RANGES = {
1724
+ trivial: { label: "1\u20135 minutes", range: [1, 5] },
1725
+ low: { label: "5\u201315 minutes", range: [5, 15] },
1726
+ moderate: { label: "15\u201345 minutes", range: [15, 45] },
1727
+ high: { label: "1\u20133 hours", range: [60, 180] },
1728
+ extreme: { label: "3\u20138 hours", range: [180, 480] }
1729
+ };
1730
+ const base = EFFORT_RANGES[estimate.effort];
1731
+ const fileMultiplier = Math.max(1, Math.log2(estimate.totalFiles + 1));
1732
+ const low = Math.round(base.range[0] * fileMultiplier);
1733
+ const high = Math.round(base.range[1] * fileMultiplier);
1734
+ if (high >= 120) {
1735
+ const lowHours = Math.round(low / 60 * 10) / 10;
1736
+ const highHours = Math.round(high / 60 * 10) / 10;
1737
+ return { label: `${lowHours}\u2013${highHours} hours`, rangeMinutes: [low, high] };
1738
+ }
1739
+ return { label: `${low}\u2013${high} minutes`, rangeMinutes: [low, high] };
1740
+ }
1470
1741
  calculateEffort(totalSchemas, advancedCount, hasDeepDU) {
1471
1742
  if (totalSchemas >= 500 && hasDeepDU) return "extreme";
1472
1743
  if (totalSchemas >= 200 || advancedCount >= 20) return "high";
@@ -1536,9 +1807,115 @@ async function loadConfig(configPath) {
1536
1807
  };
1537
1808
  }
1538
1809
 
1810
+ // src/cross-field-patterns.ts
1811
+ function requireIf(conditionField, requiredField) {
1812
+ return {
1813
+ name: `requireIf(${conditionField}, ${requiredField})`,
1814
+ description: `${requiredField} is required when ${conditionField} is truthy`,
1815
+ zodCode: [
1816
+ ".superRefine((data, ctx) => {",
1817
+ ` if (data.${conditionField} && !data.${requiredField}) {`,
1818
+ " ctx.addIssue({",
1819
+ " code: z.ZodIssueCode.custom,",
1820
+ ` message: '${requiredField} is required when ${conditionField} is set',`,
1821
+ ` path: ['${requiredField}'],`,
1822
+ " });",
1823
+ " }",
1824
+ "})"
1825
+ ].join("\n")
1826
+ };
1827
+ }
1828
+ function requireOneOf(fields) {
1829
+ const fieldList = fields.map((f) => `'${f}'`).join(", ");
1830
+ const conditions = fields.map((f) => `data.${f}`).join(" || ");
1831
+ return {
1832
+ name: `requireOneOf(${fields.join(", ")})`,
1833
+ description: `At least one of [${fields.join(", ")}] must be provided`,
1834
+ zodCode: [
1835
+ ".superRefine((data, ctx) => {",
1836
+ ` if (!(${conditions})) {`,
1837
+ " ctx.addIssue({",
1838
+ " code: z.ZodIssueCode.custom,",
1839
+ ` message: 'At least one of [${fields.join(", ")}] is required',`,
1840
+ ` path: [${fieldList}],`,
1841
+ " });",
1842
+ " }",
1843
+ "})"
1844
+ ].join("\n")
1845
+ };
1846
+ }
1847
+ function mutuallyExclusive(fields) {
1848
+ const checks = fields.map((f) => `(data.${f} ? 1 : 0)`).join(" + ");
1849
+ return {
1850
+ name: `mutuallyExclusive(${fields.join(", ")})`,
1851
+ description: `Only one of [${fields.join(", ")}] can be set at a time`,
1852
+ zodCode: [
1853
+ ".superRefine((data, ctx) => {",
1854
+ ` const count = ${checks};`,
1855
+ " if (count > 1) {",
1856
+ " ctx.addIssue({",
1857
+ " code: z.ZodIssueCode.custom,",
1858
+ ` message: 'Only one of [${fields.join(", ")}] can be set at a time',`,
1859
+ " });",
1860
+ " }",
1861
+ "})"
1862
+ ].join("\n")
1863
+ };
1864
+ }
1865
+ function dependentFields(primaryField, dependents) {
1866
+ const checks = dependents.map(
1867
+ (f) => ` if (!data.${f}) {
1868
+ ctx.addIssue({ code: z.ZodIssueCode.custom, message: '${f} is required when ${primaryField} is set', path: ['${f}'] });
1869
+ }`
1870
+ ).join("\n");
1871
+ return {
1872
+ name: `dependentFields(${primaryField} -> ${dependents.join(", ")})`,
1873
+ description: `When ${primaryField} is set, [${dependents.join(", ")}] are required`,
1874
+ zodCode: [
1875
+ ".superRefine((data, ctx) => {",
1876
+ ` if (data.${primaryField}) {`,
1877
+ checks,
1878
+ " }",
1879
+ "})"
1880
+ ].join("\n")
1881
+ };
1882
+ }
1883
+ function conditionalValidation(conditionField, conditionValue, targetField, validationMessage) {
1884
+ return {
1885
+ name: `conditionalValidation(${conditionField}=${conditionValue} -> ${targetField})`,
1886
+ description: `Validate ${targetField} when ${conditionField} equals ${conditionValue}`,
1887
+ zodCode: [
1888
+ ".superRefine((data, ctx) => {",
1889
+ ` if (data.${conditionField} === ${conditionValue} && !data.${targetField}) {`,
1890
+ " ctx.addIssue({",
1891
+ " code: z.ZodIssueCode.custom,",
1892
+ ` message: '${validationMessage}',`,
1893
+ ` path: ['${targetField}'],`,
1894
+ " });",
1895
+ " }",
1896
+ "})"
1897
+ ].join("\n")
1898
+ };
1899
+ }
1900
+ function suggestCrossFieldPattern(whenCode) {
1901
+ const booleanMatch = whenCode.match(/\.when\(['"](\w+)['"]\s*,\s*\{[^}]*is:\s*true/);
1902
+ if (booleanMatch?.[1]) {
1903
+ const field = booleanMatch[1];
1904
+ return requireIf(field, "targetField");
1905
+ }
1906
+ const multiFieldMatch = whenCode.match(/\.when\(\[([^\]]+)\]/);
1907
+ if (multiFieldMatch?.[1]) {
1908
+ const fields = multiFieldMatch[1].split(",").map((f) => f.trim().replace(/['"]/g, "")).filter(Boolean);
1909
+ if (fields.length > 1) {
1910
+ return dependentFields(fields[0] ?? "primary", fields.slice(1));
1911
+ }
1912
+ }
1913
+ return null;
1914
+ }
1915
+
1539
1916
  // src/dependency-graph.ts
1540
- import { existsSync as existsSync4, readdirSync, readFileSync as readFileSync4 } from "fs";
1541
- import { join as join4, resolve } from "path";
1917
+ import { existsSync as existsSync5, readdirSync as readdirSync2, readFileSync as readFileSync5 } from "fs";
1918
+ import { join as join5, resolve } from "path";
1542
1919
  var SchemaDependencyResolver = class {
1543
1920
  resolve(project, filePaths) {
1544
1921
  const fileSet = new Set(filePaths);
@@ -1665,38 +2042,38 @@ function computeParallelBatches(packages, suggestedOrder) {
1665
2042
  }
1666
2043
  var MonorepoResolver = class {
1667
2044
  detect(projectPath) {
1668
- const pkgPath = join4(projectPath, "package.json");
1669
- if (existsSync4(pkgPath)) {
2045
+ const pkgPath = join5(projectPath, "package.json");
2046
+ if (existsSync5(pkgPath)) {
1670
2047
  try {
1671
- const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
2048
+ const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
1672
2049
  if (pkg.workspaces) return true;
1673
2050
  } catch {
1674
2051
  }
1675
2052
  }
1676
- if (existsSync4(join4(projectPath, "pnpm-workspace.yaml"))) return true;
2053
+ if (existsSync5(join5(projectPath, "pnpm-workspace.yaml"))) return true;
1677
2054
  return false;
1678
2055
  }
1679
2056
  /**
1680
2057
  * Detect which workspace manager is being used.
1681
2058
  */
1682
2059
  detectManager(projectPath) {
1683
- if (existsSync4(join4(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
1684
- const pkgPath = join4(projectPath, "package.json");
1685
- if (existsSync4(pkgPath)) {
2060
+ if (existsSync5(join5(projectPath, "pnpm-workspace.yaml"))) return "pnpm";
2061
+ const pkgPath = join5(projectPath, "package.json");
2062
+ if (existsSync5(pkgPath)) {
1686
2063
  try {
1687
- const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
2064
+ const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
1688
2065
  if (pkg.packageManager?.startsWith("yarn")) return "yarn";
1689
2066
  if (pkg.packageManager?.startsWith("pnpm")) return "pnpm";
1690
2067
  } catch {
1691
2068
  }
1692
2069
  }
1693
- if (existsSync4(join4(projectPath, "pnpm-lock.yaml"))) return "pnpm";
1694
- if (existsSync4(join4(projectPath, "yarn.lock"))) return "yarn";
2070
+ if (existsSync5(join5(projectPath, "pnpm-lock.yaml"))) return "pnpm";
2071
+ if (existsSync5(join5(projectPath, "yarn.lock"))) return "yarn";
1695
2072
  return "npm";
1696
2073
  }
1697
2074
  analyze(projectPath) {
1698
- const pkgPath = join4(projectPath, "package.json");
1699
- if (!existsSync4(pkgPath)) {
2075
+ const pkgPath = join5(projectPath, "package.json");
2076
+ if (!existsSync5(pkgPath)) {
1700
2077
  return { isMonorepo: false, packages: [], suggestedOrder: [] };
1701
2078
  }
1702
2079
  let workspaceGlobs;
@@ -1711,10 +2088,10 @@ var MonorepoResolver = class {
1711
2088
  const packages = [];
1712
2089
  const resolvedDirs = this.resolveWorkspaceDirs(projectPath, workspaceGlobs);
1713
2090
  for (const dir of resolvedDirs) {
1714
- const wsPkgPath = join4(dir, "package.json");
1715
- if (!existsSync4(wsPkgPath)) continue;
2091
+ const wsPkgPath = join5(dir, "package.json");
2092
+ if (!existsSync5(wsPkgPath)) continue;
1716
2093
  try {
1717
- const wsPkg = JSON.parse(readFileSync4(wsPkgPath, "utf-8"));
2094
+ const wsPkg = JSON.parse(readFileSync5(wsPkgPath, "utf-8"));
1718
2095
  if (!wsPkg.name) continue;
1719
2096
  const allDeps = { ...wsPkg.dependencies, ...wsPkg.devDependencies };
1720
2097
  const depNames = Object.keys(allDeps);
@@ -1758,14 +2135,14 @@ var MonorepoResolver = class {
1758
2135
  * Supports: npm/yarn workspaces (package.json), pnpm-workspace.yaml
1759
2136
  */
1760
2137
  resolveWorkspaceGlobs(projectPath) {
1761
- const pnpmPath = join4(projectPath, "pnpm-workspace.yaml");
1762
- if (existsSync4(pnpmPath)) {
2138
+ const pnpmPath = join5(projectPath, "pnpm-workspace.yaml");
2139
+ if (existsSync5(pnpmPath)) {
1763
2140
  return this.parsePnpmWorkspace(pnpmPath);
1764
2141
  }
1765
- const pkgPath = join4(projectPath, "package.json");
1766
- if (existsSync4(pkgPath)) {
2142
+ const pkgPath = join5(projectPath, "package.json");
2143
+ if (existsSync5(pkgPath)) {
1767
2144
  try {
1768
- const pkg = JSON.parse(readFileSync4(pkgPath, "utf-8"));
2145
+ const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
1769
2146
  if (pkg.workspaces) {
1770
2147
  return Array.isArray(pkg.workspaces) ? pkg.workspaces : pkg.workspaces.packages;
1771
2148
  }
@@ -1784,7 +2161,7 @@ var MonorepoResolver = class {
1784
2161
  * ```
1785
2162
  */
1786
2163
  parsePnpmWorkspace(filePath) {
1787
- const content = readFileSync4(filePath, "utf-8");
2164
+ const content = readFileSync5(filePath, "utf-8");
1788
2165
  const globs = [];
1789
2166
  let inPackages = false;
1790
2167
  for (const line of content.split("\n")) {
@@ -1810,13 +2187,13 @@ var MonorepoResolver = class {
1810
2187
  for (const glob of globs) {
1811
2188
  const clean = glob.replace(/\/?\*$/, "");
1812
2189
  const base = resolve(projectPath, clean);
1813
- if (!existsSync4(base)) continue;
2190
+ if (!existsSync5(base)) continue;
1814
2191
  if (glob.endsWith("*")) {
1815
2192
  try {
1816
- const entries = readdirSync(base, { withFileTypes: true });
2193
+ const entries = readdirSync2(base, { withFileTypes: true });
1817
2194
  for (const entry of entries) {
1818
2195
  if (entry.isDirectory()) {
1819
- dirs.push(join4(base, entry.name));
2196
+ dirs.push(join5(base, entry.name));
1820
2197
  }
1821
2198
  }
1822
2199
  } catch {
@@ -1830,8 +2207,8 @@ var MonorepoResolver = class {
1830
2207
  };
1831
2208
 
1832
2209
  // src/detailed-analyzer.ts
1833
- import { existsSync as existsSync5, readFileSync as readFileSync5 } from "fs";
1834
- import { join as join5 } from "path";
2210
+ import { existsSync as existsSync6, readFileSync as readFileSync6 } from "fs";
2211
+ import { join as join6 } from "path";
1835
2212
  var COMPLEXITY_CHAIN_WEIGHT = 2;
1836
2213
  var COMPLEXITY_DEPTH_WEIGHT = 3;
1837
2214
  var COMPLEXITY_VALIDATION_WEIGHT = 1;
@@ -1896,10 +2273,10 @@ var DetailedAnalyzer = class {
1896
2273
  }
1897
2274
  detectLibraryVersions(projectPath) {
1898
2275
  const versions = [];
1899
- const pkgPath = join5(projectPath, "package.json");
1900
- if (!existsSync5(pkgPath)) return versions;
2276
+ const pkgPath = join6(projectPath, "package.json");
2277
+ if (!existsSync6(pkgPath)) return versions;
1901
2278
  try {
1902
- const pkg = JSON.parse(readFileSync5(pkgPath, "utf-8"));
2279
+ const pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
1903
2280
  const knownLibs = ["zod", "yup", "joi", "io-ts", "valibot"];
1904
2281
  const allDeps = {
1905
2282
  ...pkg.dependencies,
@@ -2074,8 +2451,8 @@ var DetailedAnalyzer = class {
2074
2451
 
2075
2452
  // src/drift-detector.ts
2076
2453
  import { createHash as createHash2 } from "crypto";
2077
- import { existsSync as existsSync6, mkdirSync as mkdirSync2, readFileSync as readFileSync6, writeFileSync as writeFileSync2 } from "fs";
2078
- import { join as join6, relative } from "path";
2454
+ import { existsSync as existsSync7, mkdirSync as mkdirSync3, readFileSync as readFileSync7, writeFileSync as writeFileSync3 } from "fs";
2455
+ import { join as join7, relative } from "path";
2079
2456
  var SNAPSHOT_DIR = ".schemashift";
2080
2457
  var SNAPSHOT_FILE = "schema-snapshot.json";
2081
2458
  var SNAPSHOT_VERSION = 1;
@@ -2083,8 +2460,8 @@ var DriftDetector = class {
2083
2460
  snapshotDir;
2084
2461
  snapshotPath;
2085
2462
  constructor(projectPath) {
2086
- this.snapshotDir = join6(projectPath, SNAPSHOT_DIR);
2087
- this.snapshotPath = join6(this.snapshotDir, SNAPSHOT_FILE);
2463
+ this.snapshotDir = join7(projectPath, SNAPSHOT_DIR);
2464
+ this.snapshotPath = join7(this.snapshotDir, SNAPSHOT_FILE);
2088
2465
  }
2089
2466
  /**
2090
2467
  * Take a snapshot of the current schema state
@@ -2092,8 +2469,8 @@ var DriftDetector = class {
2092
2469
  snapshot(files, projectPath) {
2093
2470
  const schemas = [];
2094
2471
  for (const filePath of files) {
2095
- if (!existsSync6(filePath)) continue;
2096
- const content = readFileSync6(filePath, "utf-8");
2472
+ if (!existsSync7(filePath)) continue;
2473
+ const content = readFileSync7(filePath, "utf-8");
2097
2474
  const library = this.detectLibraryFromContent(content);
2098
2475
  if (library === "unknown") continue;
2099
2476
  const schemaNames = this.extractSchemaNames(content);
@@ -2117,20 +2494,20 @@ var DriftDetector = class {
2117
2494
  * Save a snapshot to disk
2118
2495
  */
2119
2496
  saveSnapshot(snapshot) {
2120
- if (!existsSync6(this.snapshotDir)) {
2121
- mkdirSync2(this.snapshotDir, { recursive: true });
2497
+ if (!existsSync7(this.snapshotDir)) {
2498
+ mkdirSync3(this.snapshotDir, { recursive: true });
2122
2499
  }
2123
- writeFileSync2(this.snapshotPath, JSON.stringify(snapshot, null, 2));
2500
+ writeFileSync3(this.snapshotPath, JSON.stringify(snapshot, null, 2));
2124
2501
  }
2125
2502
  /**
2126
2503
  * Load saved snapshot from disk
2127
2504
  */
2128
2505
  loadSnapshot() {
2129
- if (!existsSync6(this.snapshotPath)) {
2506
+ if (!existsSync7(this.snapshotPath)) {
2130
2507
  return null;
2131
2508
  }
2132
2509
  try {
2133
- const content = readFileSync6(this.snapshotPath, "utf-8");
2510
+ const content = readFileSync7(this.snapshotPath, "utf-8");
2134
2511
  return JSON.parse(content);
2135
2512
  } catch {
2136
2513
  return null;
@@ -2592,6 +2969,250 @@ var GovernanceEngine = class {
2592
2969
  }
2593
2970
  };
2594
2971
 
2972
+ // src/governance-fixer.ts
2973
+ var GovernanceFixer = class {
2974
+ defaultMaxLength = 1e4;
2975
+ /**
2976
+ * Set the default max length appended by the require-max-length fix.
2977
+ */
2978
+ setDefaultMaxLength(length) {
2979
+ this.defaultMaxLength = length;
2980
+ }
2981
+ /**
2982
+ * Check if a violation is auto-fixable.
2983
+ */
2984
+ canFix(violation) {
2985
+ return [
2986
+ "no-any-schemas",
2987
+ "require-descriptions",
2988
+ "require-max-length",
2989
+ "naming-convention",
2990
+ "no-any",
2991
+ "require-description",
2992
+ "required-validations",
2993
+ "require-safeParse"
2994
+ ].includes(violation.rule);
2995
+ }
2996
+ /**
2997
+ * Fix a single violation in a source file.
2998
+ * Returns the fixed code for the entire file.
2999
+ */
3000
+ fix(violation, sourceCode) {
3001
+ switch (violation.rule) {
3002
+ case "no-any-schemas":
3003
+ case "no-any":
3004
+ return this.fixNoAny(violation, sourceCode);
3005
+ case "require-descriptions":
3006
+ case "require-description":
3007
+ return this.fixRequireDescription(violation, sourceCode);
3008
+ case "require-max-length":
3009
+ case "required-validations":
3010
+ return this.fixRequireMaxLength(violation, sourceCode);
3011
+ case "naming-convention":
3012
+ return this.fixNamingConvention(violation, sourceCode);
3013
+ case "require-safeParse":
3014
+ return this.fixRequireSafeParse(violation, sourceCode);
3015
+ default:
3016
+ return {
3017
+ success: false,
3018
+ explanation: `No auto-fix available for rule: ${violation.rule}`,
3019
+ rule: violation.rule,
3020
+ lineNumber: violation.lineNumber
3021
+ };
3022
+ }
3023
+ }
3024
+ /**
3025
+ * Fix all fixable violations in a source file.
3026
+ * Applies fixes from bottom to top to preserve line numbers.
3027
+ */
3028
+ fixAll(violations, sourceCode) {
3029
+ const fixable = violations.filter((v) => this.canFix(v));
3030
+ const results = [];
3031
+ let currentCode = sourceCode;
3032
+ let fixed = 0;
3033
+ const sorted = [...fixable].sort((a, b) => b.lineNumber - a.lineNumber);
3034
+ for (const violation of sorted) {
3035
+ const result = this.fix(violation, currentCode);
3036
+ results.push(result);
3037
+ if (result.success && result.fixedCode) {
3038
+ currentCode = result.fixedCode;
3039
+ fixed++;
3040
+ }
3041
+ }
3042
+ return {
3043
+ totalViolations: violations.length,
3044
+ fixed,
3045
+ skipped: violations.length - fixed,
3046
+ results
3047
+ };
3048
+ }
3049
+ fixNoAny(violation, sourceCode) {
3050
+ const lines = sourceCode.split("\n");
3051
+ const lineIndex = violation.lineNumber - 1;
3052
+ const line = lines[lineIndex];
3053
+ if (!line) {
3054
+ return {
3055
+ success: false,
3056
+ explanation: `Line ${violation.lineNumber} not found`,
3057
+ rule: violation.rule,
3058
+ lineNumber: violation.lineNumber
3059
+ };
3060
+ }
3061
+ let fixedLine = line;
3062
+ let explanation = "";
3063
+ if (/\bz\.any\(\)/.test(line)) {
3064
+ fixedLine = line.replace(/\bz\.any\(\)/, "z.unknown()");
3065
+ explanation = "Replaced z.any() with z.unknown() for type safety";
3066
+ } else if (/\byup\.mixed\(\)/.test(line)) {
3067
+ fixedLine = line.replace(/\byup\.mixed\(\)/, "yup.mixed().required()");
3068
+ explanation = "Added .required() constraint to yup.mixed()";
3069
+ } else if (/\bt\.any\b/.test(line)) {
3070
+ fixedLine = line.replace(/\bt\.any\b/, "t.unknown");
3071
+ explanation = "Replaced t.any with t.unknown for type safety";
3072
+ } else if (/\bv\.any\(\)/.test(line)) {
3073
+ fixedLine = line.replace(/\bv\.any\(\)/, "v.unknown()");
3074
+ explanation = "Replaced v.any() with v.unknown() for type safety";
3075
+ } else {
3076
+ return {
3077
+ success: false,
3078
+ explanation: "Could not identify any-type pattern to fix",
3079
+ rule: violation.rule,
3080
+ lineNumber: violation.lineNumber
3081
+ };
3082
+ }
3083
+ lines[lineIndex] = fixedLine;
3084
+ return {
3085
+ success: true,
3086
+ fixedCode: lines.join("\n"),
3087
+ explanation,
3088
+ rule: violation.rule,
3089
+ lineNumber: violation.lineNumber
3090
+ };
3091
+ }
3092
+ fixRequireDescription(violation, sourceCode) {
3093
+ const lines = sourceCode.split("\n");
3094
+ const lineIndex = violation.lineNumber - 1;
3095
+ const line = lines[lineIndex];
3096
+ if (!line) {
3097
+ return {
3098
+ success: false,
3099
+ explanation: `Line ${violation.lineNumber} not found`,
3100
+ rule: violation.rule,
3101
+ lineNumber: violation.lineNumber
3102
+ };
3103
+ }
3104
+ let endLineIndex = lineIndex;
3105
+ for (let i = lineIndex; i < lines.length && i < lineIndex + 20; i++) {
3106
+ if (lines[i]?.includes(";")) {
3107
+ endLineIndex = i;
3108
+ break;
3109
+ }
3110
+ }
3111
+ const endLine = lines[endLineIndex] ?? "";
3112
+ const schemaName = violation.schemaName || "schema";
3113
+ const description = `${schemaName} schema`;
3114
+ const semicolonIndex = endLine.lastIndexOf(";");
3115
+ if (semicolonIndex >= 0) {
3116
+ lines[endLineIndex] = `${endLine.slice(0, semicolonIndex)}.describe('${description}')${endLine.slice(semicolonIndex)}`;
3117
+ } else {
3118
+ lines[endLineIndex] = `${endLine}.describe('${description}')`;
3119
+ }
3120
+ return {
3121
+ success: true,
3122
+ fixedCode: lines.join("\n"),
3123
+ explanation: `Added .describe('${description}') to ${schemaName}`,
3124
+ rule: violation.rule,
3125
+ lineNumber: violation.lineNumber
3126
+ };
3127
+ }
3128
+ fixRequireMaxLength(violation, sourceCode) {
3129
+ const lines = sourceCode.split("\n");
3130
+ const lineIndex = violation.lineNumber - 1;
3131
+ const line = lines[lineIndex];
3132
+ if (!line) {
3133
+ return {
3134
+ success: false,
3135
+ explanation: `Line ${violation.lineNumber} not found`,
3136
+ rule: violation.rule,
3137
+ lineNumber: violation.lineNumber
3138
+ };
3139
+ }
3140
+ if (/z\.string\(\)/.test(line)) {
3141
+ lines[lineIndex] = line.replace(/z\.string\(\)/, `z.string().max(${this.defaultMaxLength})`);
3142
+ return {
3143
+ success: true,
3144
+ fixedCode: lines.join("\n"),
3145
+ explanation: `Added .max(${this.defaultMaxLength}) to string schema`,
3146
+ rule: violation.rule,
3147
+ lineNumber: violation.lineNumber
3148
+ };
3149
+ }
3150
+ return {
3151
+ success: false,
3152
+ explanation: "Could not find z.string() pattern to fix on this line",
3153
+ rule: violation.rule,
3154
+ lineNumber: violation.lineNumber
3155
+ };
3156
+ }
3157
+ fixNamingConvention(violation, sourceCode) {
3158
+ const schemaName = violation.schemaName;
3159
+ if (!schemaName) {
3160
+ return {
3161
+ success: false,
3162
+ explanation: "No schema name available for renaming",
3163
+ rule: violation.rule,
3164
+ lineNumber: violation.lineNumber
3165
+ };
3166
+ }
3167
+ const newName = schemaName.endsWith("Schema") ? schemaName : `${schemaName}Schema`;
3168
+ if (newName === schemaName) {
3169
+ return {
3170
+ success: false,
3171
+ explanation: "Schema already matches naming convention",
3172
+ rule: violation.rule,
3173
+ lineNumber: violation.lineNumber
3174
+ };
3175
+ }
3176
+ const fixedCode = sourceCode.replace(new RegExp(`\\b${schemaName}\\b`, "g"), newName);
3177
+ return {
3178
+ success: true,
3179
+ fixedCode,
3180
+ explanation: `Renamed "${schemaName}" to "${newName}"`,
3181
+ rule: violation.rule,
3182
+ lineNumber: violation.lineNumber
3183
+ };
3184
+ }
3185
+ fixRequireSafeParse(violation, sourceCode) {
3186
+ const lines = sourceCode.split("\n");
3187
+ const lineIndex = violation.lineNumber - 1;
3188
+ const line = lines[lineIndex];
3189
+ if (!line) {
3190
+ return {
3191
+ success: false,
3192
+ explanation: `Line ${violation.lineNumber} not found`,
3193
+ rule: violation.rule,
3194
+ lineNumber: violation.lineNumber
3195
+ };
3196
+ }
3197
+ if (line.includes(".parse(") && !line.includes(".safeParse(")) {
3198
+ lines[lineIndex] = line.replace(".parse(", ".safeParse(");
3199
+ return {
3200
+ success: true,
3201
+ fixedCode: lines.join("\n"),
3202
+ explanation: "Replaced .parse() with .safeParse() for safer error handling",
3203
+ rule: violation.rule,
3204
+ lineNumber: violation.lineNumber
3205
+ };
3206
+ }
3207
+ return {
3208
+ success: false,
3209
+ explanation: "Could not find .parse() pattern to fix",
3210
+ rule: violation.rule,
3211
+ lineNumber: violation.lineNumber
3212
+ };
3213
+ }
3214
+ };
3215
+
2595
3216
  // src/governance-templates.ts
2596
3217
  var GOVERNANCE_TEMPLATES = [
2597
3218
  {
@@ -2840,17 +3461,184 @@ function getGovernanceTemplateNames() {
2840
3461
  return GOVERNANCE_TEMPLATES.map((t) => t.name);
2841
3462
  }
2842
3463
 
3464
+ // src/graph-exporter.ts
3465
+ var LIBRARY_COLORS = {
3466
+ zod: "#3068B7",
3467
+ yup: "#32CD32",
3468
+ joi: "#FF6347",
3469
+ "io-ts": "#9370DB",
3470
+ valibot: "#FF8C00",
3471
+ arktype: "#20B2AA",
3472
+ superstruct: "#DAA520",
3473
+ effect: "#6A5ACD"
3474
+ };
3475
+ var LIBRARY_MERMAID_STYLES = {
3476
+ zod: "fill:#3068B7,color:#fff",
3477
+ yup: "fill:#32CD32,color:#000",
3478
+ joi: "fill:#FF6347,color:#fff",
3479
+ "io-ts": "fill:#9370DB,color:#fff",
3480
+ valibot: "fill:#FF8C00,color:#000",
3481
+ arktype: "fill:#20B2AA,color:#fff",
3482
+ superstruct: "fill:#DAA520,color:#000",
3483
+ effect: "fill:#6A5ACD,color:#fff"
3484
+ };
3485
+ var GraphExporter = class {
3486
+ /**
3487
+ * Export dependency graph as DOT format for Graphviz.
3488
+ */
3489
+ exportDot(graph, options = {}) {
3490
+ const lines = [];
3491
+ lines.push("digraph SchemaShiftDependencies {");
3492
+ lines.push(" rankdir=LR;");
3493
+ lines.push(' node [shape=box, style=filled, fontname="monospace"];');
3494
+ lines.push(' edge [color="#666666"];');
3495
+ lines.push("");
3496
+ const circularFiles = /* @__PURE__ */ new Set();
3497
+ if (options.highlightCircular && graph.circularWarnings.length > 0) {
3498
+ for (const warning of graph.circularWarnings) {
3499
+ const match = warning.match(/Circular dependency: (.+)/);
3500
+ if (match?.[1]) {
3501
+ for (const part of match[1].split(" -> ")) {
3502
+ for (const file of graph.sortedFiles) {
3503
+ if (file.endsWith(part.trim()) || this.shortenPath(file) === part.trim()) {
3504
+ circularFiles.add(file);
3505
+ }
3506
+ }
3507
+ }
3508
+ }
3509
+ }
3510
+ }
3511
+ for (const filePath of graph.sortedFiles) {
3512
+ const meta = options.nodeMetadata?.get(filePath);
3513
+ const library = meta?.library;
3514
+ if (options.filterLibrary && library !== options.filterLibrary) continue;
3515
+ const shortPath = this.shortenPath(filePath);
3516
+ const nodeId = this.toNodeId(filePath);
3517
+ const attrs = [];
3518
+ attrs.push(`label="${shortPath}"`);
3519
+ if (circularFiles.has(filePath)) {
3520
+ attrs.push('color="#FF0000"');
3521
+ attrs.push("penwidth=2");
3522
+ }
3523
+ if (options.colorByLibrary && library && LIBRARY_COLORS[library]) {
3524
+ attrs.push(`fillcolor="${LIBRARY_COLORS[library]}"`);
3525
+ attrs.push('fontcolor="white"');
3526
+ } else {
3527
+ attrs.push('fillcolor="#E8E8E8"');
3528
+ }
3529
+ if (meta?.schemaCount) {
3530
+ attrs.push(`tooltip="${meta.schemaCount} schema(s)"`);
3531
+ }
3532
+ lines.push(` ${nodeId} [${attrs.join(", ")}];`);
3533
+ }
3534
+ lines.push("");
3535
+ const filterSet = options.filterLibrary ? new Set(
3536
+ graph.sortedFiles.filter((f) => {
3537
+ const meta = options.nodeMetadata?.get(f);
3538
+ return meta?.library === options.filterLibrary;
3539
+ })
3540
+ ) : void 0;
3541
+ for (const [file, deps] of graph.dependencies) {
3542
+ if (filterSet && !filterSet.has(file)) continue;
3543
+ const fromId = this.toNodeId(file);
3544
+ for (const dep of deps) {
3545
+ if (filterSet && !filterSet.has(dep)) continue;
3546
+ const toId = this.toNodeId(dep);
3547
+ const edgeAttrs = [];
3548
+ if (options.highlightCircular && circularFiles.has(file) && circularFiles.has(dep)) {
3549
+ edgeAttrs.push('color="#FF0000"');
3550
+ edgeAttrs.push("penwidth=2");
3551
+ }
3552
+ lines.push(
3553
+ ` ${fromId} -> ${toId}${edgeAttrs.length > 0 ? ` [${edgeAttrs.join(", ")}]` : ""};`
3554
+ );
3555
+ }
3556
+ }
3557
+ lines.push("}");
3558
+ return lines.join("\n");
3559
+ }
3560
+ /**
3561
+ * Export dependency graph as Mermaid diagram syntax.
3562
+ */
3563
+ exportMermaid(graph, options = {}) {
3564
+ const lines = [];
3565
+ lines.push("graph LR");
3566
+ const styledNodes = /* @__PURE__ */ new Map();
3567
+ for (const [file, deps] of graph.dependencies) {
3568
+ const meta = options.nodeMetadata?.get(file);
3569
+ if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
3570
+ const fromId = this.toMermaidId(file);
3571
+ const fromLabel = this.shortenPath(file);
3572
+ if (meta?.library) {
3573
+ styledNodes.set(fromId, meta.library);
3574
+ }
3575
+ if (deps.length === 0) {
3576
+ lines.push(` ${fromId}["${fromLabel}"]`);
3577
+ }
3578
+ for (const dep of deps) {
3579
+ const depMeta = options.nodeMetadata?.get(dep);
3580
+ if (options.filterLibrary && depMeta?.library !== options.filterLibrary) continue;
3581
+ const toId = this.toMermaidId(dep);
3582
+ const toLabel = this.shortenPath(dep);
3583
+ if (depMeta?.library) {
3584
+ styledNodes.set(toId, depMeta.library);
3585
+ }
3586
+ lines.push(` ${fromId}["${fromLabel}"] --> ${toId}["${toLabel}"]`);
3587
+ }
3588
+ }
3589
+ for (const file of graph.sortedFiles) {
3590
+ const meta = options.nodeMetadata?.get(file);
3591
+ if (options.filterLibrary && meta?.library !== options.filterLibrary) continue;
3592
+ const id = this.toMermaidId(file);
3593
+ if (!lines.some((l) => l.includes(id))) {
3594
+ lines.push(` ${id}["${this.shortenPath(file)}"]`);
3595
+ if (meta?.library) {
3596
+ styledNodes.set(id, meta.library);
3597
+ }
3598
+ }
3599
+ }
3600
+ if (options.colorByLibrary && styledNodes.size > 0) {
3601
+ lines.push("");
3602
+ const libraryGroups = /* @__PURE__ */ new Map();
3603
+ for (const [nodeId, library] of styledNodes) {
3604
+ const group = libraryGroups.get(library) ?? [];
3605
+ group.push(nodeId);
3606
+ libraryGroups.set(library, group);
3607
+ }
3608
+ for (const [library, nodeIds] of libraryGroups) {
3609
+ const style = LIBRARY_MERMAID_STYLES[library];
3610
+ if (style) {
3611
+ for (const nodeId of nodeIds) {
3612
+ lines.push(` style ${nodeId} ${style}`);
3613
+ }
3614
+ }
3615
+ }
3616
+ }
3617
+ return lines.join("\n");
3618
+ }
3619
+ shortenPath(filePath) {
3620
+ const parts = filePath.split("/");
3621
+ return parts.slice(-2).join("/");
3622
+ }
3623
+ toNodeId(filePath) {
3624
+ return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "").replace(/_+$/, "");
3625
+ }
3626
+ toMermaidId(filePath) {
3627
+ return filePath.replace(/[^a-zA-Z0-9]/g, "_").replace(/^_+/, "n_").replace(/_+$/, "");
3628
+ }
3629
+ };
3630
+
2843
3631
  // src/incremental.ts
2844
- import { existsSync as existsSync7, mkdirSync as mkdirSync3, readFileSync as readFileSync7, unlinkSync, writeFileSync as writeFileSync3 } from "fs";
2845
- import { join as join7 } from "path";
3632
+ import { existsSync as existsSync8, mkdirSync as mkdirSync4, readFileSync as readFileSync8, unlinkSync, writeFileSync as writeFileSync4 } from "fs";
3633
+ import { join as join8 } from "path";
2846
3634
  var STATE_DIR = ".schemashift";
2847
3635
  var STATE_FILE = "incremental.json";
2848
3636
  var IncrementalTracker = class {
2849
3637
  stateDir;
2850
3638
  statePath;
2851
3639
  constructor(projectPath) {
2852
- this.stateDir = join7(projectPath, STATE_DIR);
2853
- this.statePath = join7(this.stateDir, STATE_FILE);
3640
+ this.stateDir = join8(projectPath, STATE_DIR);
3641
+ this.statePath = join8(this.stateDir, STATE_FILE);
2854
3642
  }
2855
3643
  start(files, from, to) {
2856
3644
  const state = {
@@ -2885,9 +3673,9 @@ var IncrementalTracker = class {
2885
3673
  this.saveState(state);
2886
3674
  }
2887
3675
  getState() {
2888
- if (!existsSync7(this.statePath)) return null;
3676
+ if (!existsSync8(this.statePath)) return null;
2889
3677
  try {
2890
- return JSON.parse(readFileSync7(this.statePath, "utf-8"));
3678
+ return JSON.parse(readFileSync8(this.statePath, "utf-8"));
2891
3679
  } catch {
2892
3680
  return null;
2893
3681
  }
@@ -2913,22 +3701,416 @@ var IncrementalTracker = class {
2913
3701
  percent
2914
3702
  };
2915
3703
  }
3704
+ /**
3705
+ * Get a canary batch — a percentage of remaining files, sorted simplest first.
3706
+ * Used for phased rollouts where you migrate a small batch, verify, then continue.
3707
+ */
3708
+ getCanaryBatch(percent, fileSizes) {
3709
+ const state = this.getState();
3710
+ if (!state) return [];
3711
+ const count = Math.max(1, Math.ceil(state.remainingFiles.length * (percent / 100)));
3712
+ if (fileSizes) {
3713
+ const sorted = [...state.remainingFiles].sort((a, b) => {
3714
+ return (fileSizes.get(a) ?? 0) - (fileSizes.get(b) ?? 0);
3715
+ });
3716
+ return sorted.slice(0, count);
3717
+ }
3718
+ return state.remainingFiles.slice(0, count);
3719
+ }
2916
3720
  clear() {
2917
- if (existsSync7(this.statePath)) {
3721
+ if (existsSync8(this.statePath)) {
2918
3722
  unlinkSync(this.statePath);
2919
3723
  }
2920
3724
  }
2921
3725
  saveState(state) {
2922
- if (!existsSync7(this.stateDir)) {
2923
- mkdirSync3(this.stateDir, { recursive: true });
3726
+ if (!existsSync8(this.stateDir)) {
3727
+ mkdirSync4(this.stateDir, { recursive: true });
3728
+ }
3729
+ writeFileSync4(this.statePath, JSON.stringify(state, null, 2));
3730
+ }
3731
+ };
3732
+
3733
+ // src/migration-templates.ts
3734
+ var BUILT_IN_TEMPLATES = [
3735
+ {
3736
+ name: "react-hook-form-yup-to-zod",
3737
+ description: "Migrate React Hook Form project from Yup to Zod validation",
3738
+ category: "form-migration",
3739
+ migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
3740
+ preChecks: [
3741
+ { description: "Ensure @hookform/resolvers is installed" },
3742
+ { description: "Check for .when() conditional validations that need manual review" }
3743
+ ],
3744
+ postSteps: [
3745
+ {
3746
+ description: "Update resolver imports: yupResolver \u2192 zodResolver",
3747
+ command: void 0
3748
+ },
3749
+ {
3750
+ description: "Run tests to verify form validation behavior",
3751
+ command: "npm test"
3752
+ },
3753
+ {
3754
+ description: "Remove Yup dependency if no longer used",
3755
+ command: "npm uninstall yup"
3756
+ }
3757
+ ],
3758
+ packageChanges: [
3759
+ { action: "install", package: "zod", version: "^3.24.0" },
3760
+ { action: "upgrade", package: "@hookform/resolvers", version: "latest" }
3761
+ ],
3762
+ recommendedFlags: ["--cross-file", "--scaffold-tests", "--verbose"],
3763
+ estimatedEffort: "moderate"
3764
+ },
3765
+ {
3766
+ name: "trpc-zod-v3-to-v4",
3767
+ description: "Upgrade tRPC project from Zod v3 to Zod v4",
3768
+ category: "framework-upgrade",
3769
+ migrationSteps: [
3770
+ { from: "zod-v3", to: "v4", description: "Upgrade Zod v3 schemas to v4 syntax" }
3771
+ ],
3772
+ preChecks: [
3773
+ { description: "Check tRPC version \u2014 v11+ required for Zod v4 compatibility" },
3774
+ { description: "Check zod-validation-error version \u2014 v5.0.0+ required" },
3775
+ { description: "Run existing test suite to establish baseline", command: "npm test" }
3776
+ ],
3777
+ postSteps: [
3778
+ {
3779
+ description: "Update tRPC to v11 if not already",
3780
+ command: "npm install @trpc/server@latest @trpc/client@latest"
3781
+ },
3782
+ {
3783
+ description: "Update zod-validation-error if used",
3784
+ command: "npm install zod-validation-error@^5.0.0"
3785
+ },
3786
+ { description: "Review TODO(schemashift) comments for manual fixes" },
3787
+ { description: "Run tests to verify tRPC router behavior", command: "npm test" }
3788
+ ],
3789
+ packageChanges: [
3790
+ { action: "upgrade", package: "zod", version: "^3.25.0" },
3791
+ { action: "upgrade", package: "@trpc/server", version: "^11.0.0" }
3792
+ ],
3793
+ recommendedFlags: ["--compat-check", "--scaffold-tests", "--verbose"],
3794
+ estimatedEffort: "high"
3795
+ },
3796
+ {
3797
+ name: "express-joi-to-zod",
3798
+ description: "Migrate Express.js API validators from Joi to Zod",
3799
+ category: "library-switch",
3800
+ migrationSteps: [{ from: "joi", to: "zod", description: "Convert Joi schemas to Zod schemas" }],
3801
+ preChecks: [
3802
+ { description: "Identify middleware using Joi validation" },
3803
+ { description: "Check for Joi.extend() custom validators that need manual migration" }
3804
+ ],
3805
+ postSteps: [
3806
+ { description: "Update Express middleware to use Zod schemas" },
3807
+ { description: "Replace celebrate/express-validation with custom Zod middleware" },
3808
+ { description: "Run API integration tests", command: "npm test" },
3809
+ { description: "Remove Joi dependency", command: "npm uninstall joi" }
3810
+ ],
3811
+ packageChanges: [
3812
+ { action: "install", package: "zod", version: "^3.24.0" },
3813
+ { action: "remove", package: "celebrate" }
3814
+ ],
3815
+ recommendedFlags: ["--cross-file", "--verbose"],
3816
+ estimatedEffort: "moderate"
3817
+ },
3818
+ {
3819
+ name: "nextjs-form-migration",
3820
+ description: "Migrate Next.js form validation from Yup/Formik to Zod/React Hook Form",
3821
+ category: "form-migration",
3822
+ migrationSteps: [{ from: "yup", to: "zod", description: "Convert Yup schemas to Zod schemas" }],
3823
+ preChecks: [
3824
+ { description: "Identify all Formik form components" },
3825
+ { description: "Check for server-side validation using Yup" },
3826
+ { description: "Run existing tests to establish baseline", command: "npm test" }
3827
+ ],
3828
+ postSteps: [
3829
+ { description: "Replace Formik with React Hook Form + zodResolver" },
3830
+ { description: "Update server actions to use Zod for validation" },
3831
+ {
3832
+ description: "Install next-safe-action if using server actions",
3833
+ command: "npm install next-safe-action"
3834
+ },
3835
+ { description: "Run full test suite", command: "npm test" }
3836
+ ],
3837
+ packageChanges: [
3838
+ { action: "install", package: "zod", version: "^3.24.0" },
3839
+ { action: "install", package: "react-hook-form", version: "^7.0.0" },
3840
+ { action: "install", package: "@hookform/resolvers", version: "latest" }
3841
+ ],
3842
+ recommendedFlags: ["--cross-file", "--scaffold-tests"],
3843
+ estimatedEffort: "high"
3844
+ },
3845
+ {
3846
+ name: "monorepo-staged-migration",
3847
+ description: "Phased monorepo migration with incremental tracking",
3848
+ category: "monorepo",
3849
+ migrationSteps: [
3850
+ { from: "yup", to: "zod", description: "Convert shared packages first, then applications" }
3851
+ ],
3852
+ preChecks: [
3853
+ { description: "Analyze monorepo workspace structure" },
3854
+ { description: "Identify shared schema packages used by multiple apps" },
3855
+ { description: "Ensure all packages build successfully", command: "npm run build" }
3856
+ ],
3857
+ postSteps: [
3858
+ { description: "Run incremental migration starting with leaf packages" },
3859
+ { description: "Build all packages after each batch", command: "npm run build" },
3860
+ { description: "Run full test suite", command: "npm test" },
3861
+ { description: "Review cross-package type compatibility" }
3862
+ ],
3863
+ packageChanges: [],
3864
+ recommendedFlags: ["--cross-file", "--incremental", "--compat-check", "--audit"],
3865
+ estimatedEffort: "high"
3866
+ }
3867
+ ];
3868
+ function getMigrationTemplate(name) {
3869
+ return BUILT_IN_TEMPLATES.find((t) => t.name === name);
3870
+ }
3871
+ function getMigrationTemplateNames() {
3872
+ return BUILT_IN_TEMPLATES.map((t) => t.name);
3873
+ }
3874
+ function getMigrationTemplatesByCategory(category) {
3875
+ return BUILT_IN_TEMPLATES.filter((t) => t.category === category);
3876
+ }
3877
+ function getAllMigrationTemplates() {
3878
+ return [...BUILT_IN_TEMPLATES];
3879
+ }
3880
+ function validateMigrationTemplate(template) {
3881
+ const errors = [];
3882
+ if (!template.name || template.name.trim().length === 0) {
3883
+ errors.push("Template name is required");
3884
+ }
3885
+ if (!template.description || template.description.trim().length === 0) {
3886
+ errors.push("Template description is required");
3887
+ }
3888
+ if (!template.migrationSteps || template.migrationSteps.length === 0) {
3889
+ errors.push("At least one migration step is required");
3890
+ }
3891
+ for (const step of template.migrationSteps ?? []) {
3892
+ if (!step.from || !step.to) {
3893
+ errors.push(`Migration step must have from and to: ${JSON.stringify(step)}`);
3894
+ }
3895
+ }
3896
+ return { valid: errors.length === 0, errors };
3897
+ }
3898
+
3899
+ // src/notifications.ts
3900
+ async function computeSignature(payload, secret) {
3901
+ const { createHmac } = await import("crypto");
3902
+ return createHmac("sha256", secret).update(payload).digest("hex");
3903
+ }
3904
+ var WebhookNotifier = class {
3905
+ webhooks;
3906
+ constructor(webhooks) {
3907
+ this.webhooks = webhooks;
3908
+ }
3909
+ /**
3910
+ * Create a migration event with current timestamp.
3911
+ */
3912
+ createEvent(type, details, project) {
3913
+ return {
3914
+ type,
3915
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
3916
+ project,
3917
+ details
3918
+ };
3919
+ }
3920
+ /**
3921
+ * Send an event to all matching webhooks.
3922
+ */
3923
+ async send(event) {
3924
+ const results = [];
3925
+ for (const webhook of this.webhooks) {
3926
+ if (webhook.events && !webhook.events.includes(event.type)) {
3927
+ continue;
3928
+ }
3929
+ const result = await this.sendToWebhook(webhook, event);
3930
+ results.push(result);
3931
+ }
3932
+ return results;
3933
+ }
3934
+ /**
3935
+ * Format event as Slack Block Kit message.
3936
+ */
3937
+ formatSlackPayload(event) {
3938
+ const emoji = this.getEventEmoji(event.type);
3939
+ const title = this.getEventTitle(event.type);
3940
+ const details = event.details;
3941
+ const blocks = [
3942
+ {
3943
+ type: "header",
3944
+ text: { type: "plain_text", text: `${emoji} ${title}`, emoji: true }
3945
+ },
3946
+ {
3947
+ type: "section",
3948
+ fields: Object.entries(details).map(([key, value]) => ({
3949
+ type: "mrkdwn",
3950
+ text: `*${key}:* ${String(value)}`
3951
+ }))
3952
+ },
3953
+ {
3954
+ type: "context",
3955
+ elements: [
3956
+ {
3957
+ type: "mrkdwn",
3958
+ text: `SchemaShift | ${event.timestamp}${event.project ? ` | ${event.project}` : ""}`
3959
+ }
3960
+ ]
3961
+ }
3962
+ ];
3963
+ return { blocks };
3964
+ }
3965
+ /**
3966
+ * Format event as Microsoft Teams Adaptive Card.
3967
+ */
3968
+ formatTeamsPayload(event) {
3969
+ const title = this.getEventTitle(event.type);
3970
+ const details = event.details;
3971
+ const facts = Object.entries(details).map(([key, value]) => ({
3972
+ title: key,
3973
+ value: String(value)
3974
+ }));
3975
+ return {
3976
+ type: "message",
3977
+ attachments: [
3978
+ {
3979
+ contentType: "application/vnd.microsoft.card.adaptive",
3980
+ content: {
3981
+ $schema: "http://adaptivecards.io/schemas/adaptive-card.json",
3982
+ type: "AdaptiveCard",
3983
+ version: "1.4",
3984
+ body: [
3985
+ {
3986
+ type: "TextBlock",
3987
+ text: title,
3988
+ weight: "Bolder",
3989
+ size: "Medium"
3990
+ },
3991
+ {
3992
+ type: "FactSet",
3993
+ facts
3994
+ },
3995
+ {
3996
+ type: "TextBlock",
3997
+ text: `SchemaShift | ${event.timestamp}`,
3998
+ isSubtle: true,
3999
+ size: "Small"
4000
+ }
4001
+ ]
4002
+ }
4003
+ }
4004
+ ]
4005
+ };
4006
+ }
4007
+ getEventEmoji(type) {
4008
+ const emojis = {
4009
+ migration_started: "\u{1F504}",
4010
+ migration_completed: "\u2705",
4011
+ migration_failed: "\u274C",
4012
+ governance_violation: "\u26A0\uFE0F",
4013
+ drift_detected: "\u{1F50D}"
4014
+ };
4015
+ return emojis[type];
4016
+ }
4017
+ getEventTitle(type) {
4018
+ const titles = {
4019
+ migration_started: "Migration Started",
4020
+ migration_completed: "Migration Completed",
4021
+ migration_failed: "Migration Failed",
4022
+ governance_violation: "Governance Violation",
4023
+ drift_detected: "Schema Drift Detected"
4024
+ };
4025
+ return titles[type];
4026
+ }
4027
+ /**
4028
+ * Send event to a single webhook endpoint.
4029
+ */
4030
+ async sendToWebhook(webhook, event) {
4031
+ let payload;
4032
+ if (webhook.type === "slack") {
4033
+ payload = JSON.stringify(this.formatSlackPayload(event));
4034
+ } else if (webhook.type === "teams") {
4035
+ payload = JSON.stringify(this.formatTeamsPayload(event));
4036
+ } else {
4037
+ payload = JSON.stringify(event);
4038
+ }
4039
+ const headers = {
4040
+ "Content-Type": "application/json",
4041
+ "User-Agent": "SchemaShift-Webhook/1.0",
4042
+ ...webhook.headers
4043
+ };
4044
+ if (webhook.secret) {
4045
+ const signature = await computeSignature(payload, webhook.secret);
4046
+ headers["X-SchemaShift-Signature"] = `sha256=${signature}`;
2924
4047
  }
2925
- writeFileSync3(this.statePath, JSON.stringify(state, null, 2));
4048
+ try {
4049
+ const response = await fetch(webhook.url, {
4050
+ method: "POST",
4051
+ headers,
4052
+ body: payload
4053
+ });
4054
+ return {
4055
+ success: response.ok,
4056
+ statusCode: response.status,
4057
+ error: response.ok ? void 0 : `HTTP ${response.status}: ${response.statusText}`
4058
+ };
4059
+ } catch (err) {
4060
+ return {
4061
+ success: false,
4062
+ error: err instanceof Error ? err.message : String(err)
4063
+ };
4064
+ }
4065
+ }
4066
+ /**
4067
+ * Convenience: send a migration_started event.
4068
+ */
4069
+ async notifyMigrationStarted(from, to, fileCount, project) {
4070
+ const event = this.createEvent("migration_started", { from, to, fileCount }, project);
4071
+ return this.send(event);
4072
+ }
4073
+ /**
4074
+ * Convenience: send a migration_completed event.
4075
+ */
4076
+ async notifyMigrationCompleted(from, to, fileCount, warningCount, project) {
4077
+ const event = this.createEvent(
4078
+ "migration_completed",
4079
+ { from, to, fileCount, warningCount },
4080
+ project
4081
+ );
4082
+ return this.send(event);
4083
+ }
4084
+ /**
4085
+ * Convenience: send a migration_failed event.
4086
+ */
4087
+ async notifyMigrationFailed(from, to, error, project) {
4088
+ const event = this.createEvent("migration_failed", { from, to, error }, project);
4089
+ return this.send(event);
4090
+ }
4091
+ /**
4092
+ * Convenience: send a governance_violation event.
4093
+ */
4094
+ async notifyGovernanceViolation(violationCount, rules, project) {
4095
+ const event = this.createEvent("governance_violation", { violationCount, rules }, project);
4096
+ return this.send(event);
4097
+ }
4098
+ /**
4099
+ * Convenience: send a drift_detected event.
4100
+ */
4101
+ async notifyDriftDetected(modifiedFiles, addedFiles, removedFiles, project) {
4102
+ const event = this.createEvent(
4103
+ "drift_detected",
4104
+ { modifiedFiles, addedFiles, removedFiles },
4105
+ project
4106
+ );
4107
+ return this.send(event);
2926
4108
  }
2927
4109
  };
2928
4110
 
2929
4111
  // src/package-updater.ts
2930
- import { existsSync as existsSync8, readFileSync as readFileSync8, writeFileSync as writeFileSync4 } from "fs";
2931
- import { join as join8 } from "path";
4112
+ import { existsSync as existsSync9, readFileSync as readFileSync9, writeFileSync as writeFileSync5 } from "fs";
4113
+ import { join as join9 } from "path";
2932
4114
  var TARGET_VERSIONS = {
2933
4115
  "yup->zod": { zod: "^3.24.0" },
2934
4116
  "joi->zod": { zod: "^3.24.0" },
@@ -2949,14 +4131,14 @@ var PackageUpdater = class {
2949
4131
  const add = {};
2950
4132
  const remove = [];
2951
4133
  const warnings = [];
2952
- const pkgPath = join8(projectPath, "package.json");
2953
- if (!existsSync8(pkgPath)) {
4134
+ const pkgPath = join9(projectPath, "package.json");
4135
+ if (!existsSync9(pkgPath)) {
2954
4136
  warnings.push("No package.json found. Cannot plan dependency updates.");
2955
4137
  return { add, remove, warnings };
2956
4138
  }
2957
4139
  let pkg;
2958
4140
  try {
2959
- pkg = JSON.parse(readFileSync8(pkgPath, "utf-8"));
4141
+ pkg = JSON.parse(readFileSync9(pkgPath, "utf-8"));
2960
4142
  } catch {
2961
4143
  warnings.push("Could not parse package.json.");
2962
4144
  return { add, remove, warnings };
@@ -2986,9 +4168,9 @@ var PackageUpdater = class {
2986
4168
  return { add, remove, warnings };
2987
4169
  }
2988
4170
  apply(projectPath, plan) {
2989
- const pkgPath = join8(projectPath, "package.json");
2990
- if (!existsSync8(pkgPath)) return;
2991
- const pkgText = readFileSync8(pkgPath, "utf-8");
4171
+ const pkgPath = join9(projectPath, "package.json");
4172
+ if (!existsSync9(pkgPath)) return;
4173
+ const pkgText = readFileSync9(pkgPath, "utf-8");
2992
4174
  const pkg = JSON.parse(pkgText);
2993
4175
  if (!pkg.dependencies) pkg.dependencies = {};
2994
4176
  for (const [name, version] of Object.entries(plan.add)) {
@@ -2998,7 +4180,7 @@ var PackageUpdater = class {
2998
4180
  pkg.dependencies[name] = version;
2999
4181
  }
3000
4182
  }
3001
- writeFileSync4(pkgPath, `${JSON.stringify(pkg, null, 2)}
4183
+ writeFileSync5(pkgPath, `${JSON.stringify(pkg, null, 2)}
3002
4184
  `);
3003
4185
  }
3004
4186
  };
@@ -3169,9 +4351,164 @@ var PluginLoader = class {
3169
4351
  }
3170
4352
  };
3171
4353
 
4354
+ // src/schema-verifier.ts
4355
+ var PRIMITIVE_SAMPLES = {
4356
+ string: [
4357
+ { name: "empty string", input: "", expectedValid: true },
4358
+ { name: "normal string", input: "hello world", expectedValid: true },
4359
+ { name: "number as string", input: "12345", expectedValid: true },
4360
+ { name: "null input", input: null, expectedValid: false },
4361
+ { name: "number input", input: 42, expectedValid: false },
4362
+ { name: "boolean input", input: true, expectedValid: false },
4363
+ { name: "undefined input", input: void 0, expectedValid: false }
4364
+ ],
4365
+ number: [
4366
+ { name: "zero", input: 0, expectedValid: true },
4367
+ { name: "positive int", input: 42, expectedValid: true },
4368
+ { name: "negative int", input: -1, expectedValid: true },
4369
+ { name: "float", input: 3.14, expectedValid: true },
4370
+ { name: "string input", input: "hello", expectedValid: false },
4371
+ { name: "null input", input: null, expectedValid: false },
4372
+ { name: "NaN input", input: Number.NaN, expectedValid: false }
4373
+ ],
4374
+ boolean: [
4375
+ { name: "true", input: true, expectedValid: true },
4376
+ { name: "false", input: false, expectedValid: true },
4377
+ { name: "string input", input: "true", expectedValid: false },
4378
+ { name: "number input", input: 1, expectedValid: false },
4379
+ { name: "null input", input: null, expectedValid: false }
4380
+ ],
4381
+ date: [
4382
+ { name: "valid date", input: /* @__PURE__ */ new Date("2024-01-01"), expectedValid: true },
4383
+ { name: "string input", input: "2024-01-01", expectedValid: false },
4384
+ { name: "null input", input: null, expectedValid: false }
4385
+ ]
4386
+ };
4387
+ var EMAIL_SAMPLES = [
4388
+ { name: "valid email", input: "test@example.com", expectedValid: true },
4389
+ { name: "invalid email", input: "not-an-email", expectedValid: false },
4390
+ { name: "empty string", input: "", expectedValid: false }
4391
+ ];
4392
+ var URL_SAMPLES = [
4393
+ { name: "valid url", input: "https://example.com", expectedValid: true },
4394
+ { name: "invalid url", input: "not a url", expectedValid: false }
4395
+ ];
4396
+ var UUID_SAMPLES = [
4397
+ { name: "valid uuid", input: "550e8400-e29b-41d4-a716-446655440000", expectedValid: true },
4398
+ { name: "invalid uuid", input: "not-a-uuid", expectedValid: false }
4399
+ ];
4400
+ function extractSchemaNames(sourceText) {
4401
+ const schemas = [];
4402
+ const patterns = [
4403
+ /(?:const|let|var)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|v\.|t\.|S\.|type\(|object\(|string\()/g,
4404
+ /export\s+(?:const|let|var)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|v\.|t\.|S\.|type\(|object\(|string\()/g
4405
+ ];
4406
+ for (const pattern of patterns) {
4407
+ for (const match of sourceText.matchAll(pattern)) {
4408
+ const name = match[1];
4409
+ if (name && !schemas.includes(name)) {
4410
+ schemas.push(name);
4411
+ }
4412
+ }
4413
+ }
4414
+ return schemas;
4415
+ }
4416
+ function generateSamples(sourceText, schemaName, maxSamples) {
4417
+ const samples = [];
4418
+ const schemaBlock = extractSchemaBlock(sourceText, schemaName);
4419
+ if (!schemaBlock) return PRIMITIVE_SAMPLES.string?.slice(0, maxSamples) ?? [];
4420
+ if (/\.email\s*\(/.test(schemaBlock)) {
4421
+ samples.push(...EMAIL_SAMPLES);
4422
+ }
4423
+ if (/\.url\s*\(/.test(schemaBlock)) {
4424
+ samples.push(...URL_SAMPLES);
4425
+ }
4426
+ if (/\.uuid\s*\(/.test(schemaBlock)) {
4427
+ samples.push(...UUID_SAMPLES);
4428
+ }
4429
+ if (/string\s*\(/.test(schemaBlock)) {
4430
+ samples.push(...PRIMITIVE_SAMPLES.string ?? []);
4431
+ }
4432
+ if (/number\s*\(/.test(schemaBlock) || /\.int\s*\(/.test(schemaBlock)) {
4433
+ samples.push(...PRIMITIVE_SAMPLES.number ?? []);
4434
+ }
4435
+ if (/boolean\s*\(/.test(schemaBlock)) {
4436
+ samples.push(...PRIMITIVE_SAMPLES.boolean ?? []);
4437
+ }
4438
+ if (/date\s*\(/.test(schemaBlock)) {
4439
+ samples.push(...PRIMITIVE_SAMPLES.date ?? []);
4440
+ }
4441
+ if (/\.optional\s*\(/.test(schemaBlock) || /optional\s*\(/.test(schemaBlock)) {
4442
+ samples.push({ name: "undefined (optional)", input: void 0, expectedValid: true });
4443
+ }
4444
+ if (/\.nullable\s*\(/.test(schemaBlock) || /nullable\s*\(/.test(schemaBlock)) {
4445
+ samples.push({ name: "null (nullable)", input: null, expectedValid: true });
4446
+ }
4447
+ if (/\.min\s*\(\s*(\d+)/.test(schemaBlock)) {
4448
+ const minMatch = schemaBlock.match(/\.min\s*\(\s*(\d+)/);
4449
+ const minVal = minMatch ? Number.parseInt(minMatch[1] ?? "0", 10) : 0;
4450
+ samples.push({
4451
+ name: `below min (${minVal})`,
4452
+ input: minVal > 0 ? "a".repeat(minVal - 1) : "",
4453
+ expectedValid: false
4454
+ });
4455
+ }
4456
+ const seen = /* @__PURE__ */ new Set();
4457
+ const unique = [];
4458
+ for (const s of samples) {
4459
+ if (!seen.has(s.name)) {
4460
+ seen.add(s.name);
4461
+ unique.push(s);
4462
+ }
4463
+ }
4464
+ return unique.slice(0, maxSamples);
4465
+ }
4466
+ function extractSchemaBlock(sourceText, schemaName) {
4467
+ const escapedName = schemaName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
4468
+ const pattern = new RegExp(
4469
+ `(?:const|let|var|export\\s+const)\\s+${escapedName}\\s*=\\s*([\\s\\S]*?)(?:;\\s*$|;\\s*(?:const|let|var|export|function|class|type|interface))`,
4470
+ "m"
4471
+ );
4472
+ const match = sourceText.match(pattern);
4473
+ return match?.[1] ?? null;
4474
+ }
4475
+ function createVerificationReport(from, to, results) {
4476
+ const totalSchemas = results.length;
4477
+ const overallParityScore = totalSchemas > 0 ? results.reduce((sum, r) => sum + r.parityScore, 0) / totalSchemas : 100;
4478
+ return {
4479
+ from,
4480
+ to,
4481
+ totalSchemas,
4482
+ results,
4483
+ overallParityScore: Math.round(overallParityScore * 100) / 100,
4484
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
4485
+ };
4486
+ }
4487
+ function formatVerificationReport(report) {
4488
+ const lines = [];
4489
+ lines.push(`
4490
+ Schema Verification Report: ${report.from} \u2192 ${report.to}`);
4491
+ lines.push("\u2500".repeat(50));
4492
+ for (const result of report.results) {
4493
+ const icon = result.parityScore === 100 ? "\u2713" : result.parityScore >= 80 ? "\u26A0" : "\u2717";
4494
+ lines.push(
4495
+ ` ${icon} ${result.schemaName} \u2014 ${result.parityScore}% parity (${result.matchingSamples}/${result.totalSamples} samples)`
4496
+ );
4497
+ for (const mismatch of result.mismatches) {
4498
+ lines.push(
4499
+ ` \u2514\u2500 ${mismatch.sampleName}: source=${mismatch.sourceResult.valid ? "valid" : "invalid"}, target=${mismatch.targetResult.valid ? "valid" : "invalid"}`
4500
+ );
4501
+ }
4502
+ }
4503
+ lines.push("\u2500".repeat(50));
4504
+ lines.push(`Overall Parity: ${report.overallParityScore}%`);
4505
+ lines.push("");
4506
+ return lines.join("\n");
4507
+ }
4508
+
3172
4509
  // src/standard-schema.ts
3173
- import { existsSync as existsSync9, readFileSync as readFileSync9 } from "fs";
3174
- import { join as join9 } from "path";
4510
+ import { existsSync as existsSync10, readFileSync as readFileSync10 } from "fs";
4511
+ import { join as join10 } from "path";
3175
4512
  var STANDARD_SCHEMA_LIBRARIES = {
3176
4513
  zod: { minMajor: 3, minMinor: 23 },
3177
4514
  // Zod v3.23+ and v4+
@@ -3200,13 +4537,13 @@ function isVersionCompatible(version, minMajor, minMinor) {
3200
4537
  return false;
3201
4538
  }
3202
4539
  function detectStandardSchema(projectPath) {
3203
- const pkgPath = join9(projectPath, "package.json");
3204
- if (!existsSync9(pkgPath)) {
4540
+ const pkgPath = join10(projectPath, "package.json");
4541
+ if (!existsSync10(pkgPath)) {
3205
4542
  return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
3206
4543
  }
3207
4544
  let allDeps = {};
3208
4545
  try {
3209
- const pkg = JSON.parse(readFileSync9(pkgPath, "utf-8"));
4546
+ const pkg = JSON.parse(readFileSync10(pkgPath, "utf-8"));
3210
4547
  allDeps = { ...pkg.dependencies, ...pkg.devDependencies };
3211
4548
  } catch {
3212
4549
  return { detected: false, compatibleLibraries: [], recommendation: "", interopTools: [] };
@@ -3245,6 +4582,105 @@ function detectStandardSchema(projectPath) {
3245
4582
  return { detected, compatibleLibraries, recommendation, adoptionPath, interopTools };
3246
4583
  }
3247
4584
 
4585
+ // src/standard-schema-advisor.ts
4586
+ var STANDARD_SCHEMA_LIBS = /* @__PURE__ */ new Set(["zod", "valibot", "arktype"]);
4587
+ var StandardSchemaAdvisor = class {
4588
+ /**
4589
+ * Check if a schema library supports Standard Schema.
4590
+ */
4591
+ supportsStandardSchema(library) {
4592
+ return STANDARD_SCHEMA_LIBS.has(library);
4593
+ }
4594
+ /**
4595
+ * Generate advisory for a given migration path.
4596
+ */
4597
+ advise(from, to) {
4598
+ const fromSupports = this.supportsStandardSchema(from);
4599
+ const toSupports = this.supportsStandardSchema(to);
4600
+ if (!fromSupports && !toSupports) {
4601
+ return {
4602
+ shouldConsiderAdapter: false,
4603
+ reason: `Neither ${from} nor ${to} supports Standard Schema. Full migration is recommended.`,
4604
+ migrationAdvantages: [
4605
+ "Complete type safety with target library",
4606
+ "Access to target library ecosystem",
4607
+ "No runtime adapter overhead"
4608
+ ],
4609
+ adapterAdvantages: [],
4610
+ recommendation: "migrate"
4611
+ };
4612
+ }
4613
+ if (fromSupports && toSupports) {
4614
+ return {
4615
+ shouldConsiderAdapter: true,
4616
+ reason: `Both ${from} and ${to} support Standard Schema 1.0. You may be able to use adapters for ecosystem tools (tRPC, TanStack Form, etc.) instead of migrating all schemas.`,
4617
+ adapterExample: this.generateAdapterExample(from, to),
4618
+ migrationAdvantages: [
4619
+ "Full target library API and ergonomics",
4620
+ "Consistent codebase (single library)",
4621
+ "Better IDE support for one library",
4622
+ "Smaller bundle (avoid loading two libraries)"
4623
+ ],
4624
+ adapterAdvantages: [
4625
+ "No code changes needed for existing schemas",
4626
+ "Gradual migration possible",
4627
+ "Ecosystem tools work with both libraries via Standard Schema",
4628
+ "Lower risk \u2014 existing validation behavior preserved"
4629
+ ],
4630
+ recommendation: "either"
4631
+ };
4632
+ }
4633
+ if (toSupports && !fromSupports) {
4634
+ return {
4635
+ shouldConsiderAdapter: false,
4636
+ reason: `${from} does not support Standard Schema, but ${to} does. Migrating to ${to} gives you Standard Schema interoperability.`,
4637
+ migrationAdvantages: [
4638
+ "Standard Schema interoperability with ecosystem tools",
4639
+ "Future-proof validation layer",
4640
+ `Access to ${to} API and type inference`
4641
+ ],
4642
+ adapterAdvantages: [],
4643
+ recommendation: "migrate"
4644
+ };
4645
+ }
4646
+ return {
4647
+ shouldConsiderAdapter: false,
4648
+ reason: `${from} supports Standard Schema but ${to} does not. Consider if you need the specific features of ${to} that justify losing Standard Schema interoperability.`,
4649
+ migrationAdvantages: [`Access to ${to}-specific features`],
4650
+ adapterAdvantages: [`Keeping ${from} preserves Standard Schema interoperability`],
4651
+ recommendation: "migrate"
4652
+ };
4653
+ }
4654
+ /**
4655
+ * Analyze a project and provide advisory based on detected libraries.
4656
+ */
4657
+ adviseFromProject(projectPath, from, to) {
4658
+ const projectInfo = detectStandardSchema(projectPath);
4659
+ const advisory = this.advise(from, to);
4660
+ return { ...advisory, projectInfo };
4661
+ }
4662
+ generateAdapterExample(from, to) {
4663
+ return [
4664
+ `// Instead of migrating all ${from} schemas to ${to},`,
4665
+ `// you can use Standard Schema adapters for ecosystem tools:`,
4666
+ `//`,
4667
+ `// Example with tRPC (v11+):`,
4668
+ `// tRPC accepts any Standard Schema-compatible schema.`,
4669
+ `// Both ${from} and ${to} schemas work without conversion:`,
4670
+ `//`,
4671
+ `// import { ${from}Schema } from './existing-${from}-schemas';`,
4672
+ `// import { ${to}Schema } from './new-${to}-schemas';`,
4673
+ `//`,
4674
+ `// const router = t.router({`,
4675
+ `// // Works with ${from} schema (Standard Schema compatible)`,
4676
+ `// getUser: t.procedure.input(${from}Schema).query(...)`,
4677
+ `// // Also works with ${to} schema`,
4678
+ `// createUser: t.procedure.input(${to}Schema).mutation(...)`,
4679
+ `// });`
4680
+ ].join("\n");
4681
+ }
4682
+ };
4683
+
3248
4684
  // src/test-scaffolder.ts
3249
4685
  var TestScaffolder = class {
3250
4686
  scaffold(sourceFiles, from, to) {
@@ -3538,6 +4974,7 @@ var TypeDedupDetector = class {
3538
4974
  }
3539
4975
  };
3540
4976
  export {
4977
+ ApprovalManager,
3541
4978
  BehavioralWarningAnalyzer,
3542
4979
  BundleEstimator,
3543
4980
  CompatibilityAnalyzer,
@@ -3548,6 +4985,8 @@ export {
3548
4985
  FormResolverMigrator,
3549
4986
  GOVERNANCE_TEMPLATES,
3550
4987
  GovernanceEngine,
4988
+ GovernanceFixer,
4989
+ GraphExporter,
3551
4990
  IncrementalTracker,
3552
4991
  MigrationAuditLog,
3553
4992
  MigrationChain,
@@ -3557,24 +4996,41 @@ export {
3557
4996
  PluginLoader,
3558
4997
  SchemaAnalyzer,
3559
4998
  SchemaDependencyResolver,
4999
+ StandardSchemaAdvisor,
3560
5000
  TestScaffolder,
3561
5001
  TransformEngine,
3562
5002
  TypeDedupDetector,
5003
+ WebhookNotifier,
3563
5004
  buildCallChain,
3564
5005
  computeParallelBatches,
5006
+ conditionalValidation,
5007
+ createVerificationReport,
5008
+ dependentFields,
3565
5009
  detectFormLibraries,
3566
5010
  detectSchemaLibrary,
3567
5011
  detectStandardSchema,
5012
+ extractSchemaNames,
5013
+ formatVerificationReport,
5014
+ generateSamples,
5015
+ getAllMigrationTemplates,
3568
5016
  getGovernanceTemplate,
3569
5017
  getGovernanceTemplateNames,
3570
5018
  getGovernanceTemplatesByCategory,
5019
+ getMigrationTemplate,
5020
+ getMigrationTemplateNames,
5021
+ getMigrationTemplatesByCategory,
3571
5022
  isInsideComment,
3572
5023
  isInsideStringLiteral,
3573
5024
  loadConfig,
5025
+ mutuallyExclusive,
3574
5026
  parseCallChain,
5027
+ requireIf,
5028
+ requireOneOf,
3575
5029
  shouldSuppressWarning,
3576
5030
  startsWithBase,
5031
+ suggestCrossFieldPattern,
3577
5032
  transformMethodChain,
3578
- validateConfig
5033
+ validateConfig,
5034
+ validateMigrationTemplate
3579
5035
  };
3580
5036
  //# sourceMappingURL=index.js.map