@schemashift/core 0.11.0 → 0.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -30,11 +30,16 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
30
30
  // src/index.ts
31
31
  var index_exports = {};
32
32
  __export(index_exports, {
33
+ AUDIT_LOG_FILE: () => AUDIT_LOG_FILE,
33
34
  ApprovalManager: () => ApprovalManager,
35
+ BACKUP_DIR: () => BACKUP_DIR,
34
36
  BehavioralWarningAnalyzer: () => BehavioralWarningAnalyzer,
35
37
  BundleEstimator: () => BundleEstimator,
38
+ CONFIG_FILE_NAMES: () => CONFIG_FILE_NAMES,
36
39
  CompatibilityAnalyzer: () => CompatibilityAnalyzer,
37
40
  ComplexityEstimator: () => ComplexityEstimator,
41
+ DEFAULT_CONFIG_FILE: () => DEFAULT_CONFIG_FILE,
42
+ DeadSchemaDetector: () => DeadSchemaDetector,
38
43
  DetailedAnalyzer: () => DetailedAnalyzer,
39
44
  DriftDetector: () => DriftDetector,
40
45
  EcosystemAnalyzer: () => EcosystemAnalyzer,
@@ -43,16 +48,22 @@ __export(index_exports, {
43
48
  GovernanceEngine: () => GovernanceEngine,
44
49
  GovernanceFixer: () => GovernanceFixer,
45
50
  GraphExporter: () => GraphExporter,
51
+ INCREMENTAL_STATE_FILE: () => INCREMENTAL_STATE_FILE,
52
+ ImportDeduplicator: () => ImportDeduplicator,
46
53
  IncrementalTracker: () => IncrementalTracker,
47
54
  MigrationAuditLog: () => MigrationAuditLog,
48
55
  MigrationChain: () => MigrationChain,
49
56
  MonorepoResolver: () => MonorepoResolver,
57
+ PENDING_DIR: () => PENDING_DIR,
50
58
  PackageUpdater: () => PackageUpdater,
51
59
  PerformanceAnalyzer: () => PerformanceAnalyzer,
52
60
  PluginLoader: () => PluginLoader,
61
+ SCHEMASHIFT_DIR: () => SCHEMASHIFT_DIR,
62
+ SCHEMA_SNAPSHOT_FILE: () => SCHEMA_SNAPSHOT_FILE,
53
63
  SchemaAnalyzer: () => SchemaAnalyzer,
54
64
  SchemaDependencyResolver: () => SchemaDependencyResolver,
55
65
  StandardSchemaAdvisor: () => StandardSchemaAdvisor,
66
+ TESTS_DIR: () => TESTS_DIR,
56
67
  TestScaffolder: () => TestScaffolder,
57
68
  TransformEngine: () => TransformEngine,
58
69
  TypeDedupDetector: () => TypeDedupDetector,
@@ -60,10 +71,14 @@ __export(index_exports, {
60
71
  buildCallChain: () => buildCallChain,
61
72
  computeParallelBatches: () => computeParallelBatches,
62
73
  conditionalValidation: () => conditionalValidation,
74
+ createVerificationReport: () => createVerificationReport,
63
75
  dependentFields: () => dependentFields,
64
76
  detectFormLibraries: () => detectFormLibraries,
65
77
  detectSchemaLibrary: () => detectSchemaLibrary,
66
78
  detectStandardSchema: () => detectStandardSchema,
79
+ extractSchemaNames: () => extractSchemaNames,
80
+ formatVerificationReport: () => formatVerificationReport,
81
+ generateSamples: () => generateSamples,
67
82
  getAllMigrationTemplates: () => getAllMigrationTemplates,
68
83
  getGovernanceTemplate: () => getGovernanceTemplate,
69
84
  getGovernanceTemplateNames: () => getGovernanceTemplateNames,
@@ -225,10 +240,30 @@ var SchemaAnalyzer = class {
225
240
  // src/approval.ts
226
241
  var import_node_fs = require("fs");
227
242
  var import_node_path = require("path");
243
+
244
+ // src/constants.ts
245
+ var SCHEMASHIFT_DIR = ".schemashift";
246
+ var BACKUP_DIR = ".schemashift-backup";
247
+ var CONFIG_FILE_NAMES = [
248
+ ".schemashiftrc",
249
+ ".schemashiftrc.json",
250
+ ".schemashiftrc.yaml",
251
+ ".schemashiftrc.yml",
252
+ ".schemashiftrc.js",
253
+ ".schemashiftrc.cjs"
254
+ ];
255
+ var DEFAULT_CONFIG_FILE = ".schemashiftrc.json";
256
+ var INCREMENTAL_STATE_FILE = "incremental.json";
257
+ var AUDIT_LOG_FILE = "audit-log.json";
258
+ var SCHEMA_SNAPSHOT_FILE = "schema-snapshot.json";
259
+ var PENDING_DIR = "pending";
260
+ var TESTS_DIR = "tests";
261
+
262
+ // src/approval.ts
228
263
  var ApprovalManager = class {
229
264
  pendingDir;
230
265
  constructor(projectPath) {
231
- this.pendingDir = (0, import_node_path.join)(projectPath, ".schemashift", "pending");
266
+ this.pendingDir = (0, import_node_path.join)(projectPath, SCHEMASHIFT_DIR, PENDING_DIR);
232
267
  }
233
268
  /**
234
269
  * Create a new migration request for review.
@@ -277,8 +312,16 @@ var ApprovalManager = class {
277
312
  if (!(0, import_node_fs.existsSync)(filePath)) {
278
313
  return null;
279
314
  }
280
- const content = (0, import_node_fs.readFileSync)(filePath, "utf-8");
281
- return JSON.parse(content);
315
+ try {
316
+ const content = (0, import_node_fs.readFileSync)(filePath, "utf-8");
317
+ const parsed = JSON.parse(content);
318
+ if (!this.isValidRequest(parsed)) {
319
+ return null;
320
+ }
321
+ return parsed;
322
+ } catch {
323
+ return null;
324
+ }
282
325
  }
283
326
  /**
284
327
  * List all migration requests, optionally filtered by status.
@@ -290,10 +333,14 @@ var ApprovalManager = class {
290
333
  const files = (0, import_node_fs.readdirSync)(this.pendingDir).filter((f) => f.endsWith(".json"));
291
334
  const requests = [];
292
335
  for (const file of files) {
293
- const content = (0, import_node_fs.readFileSync)((0, import_node_path.join)(this.pendingDir, file), "utf-8");
294
- const request = JSON.parse(content);
295
- if (!status || request.status === status) {
296
- requests.push(request);
336
+ try {
337
+ const content = (0, import_node_fs.readFileSync)((0, import_node_path.join)(this.pendingDir, file), "utf-8");
338
+ const parsed = JSON.parse(content);
339
+ if (!this.isValidRequest(parsed)) continue;
340
+ if (!status || parsed.status === status) {
341
+ requests.push(parsed);
342
+ }
343
+ } catch {
297
344
  }
298
345
  }
299
346
  return requests.sort(
@@ -319,6 +366,11 @@ var ApprovalManager = class {
319
366
  const request = this.getRequest(requestId);
320
367
  return request?.status === "approved";
321
368
  }
369
+ isValidRequest(data) {
370
+ if (typeof data !== "object" || data === null) return false;
371
+ const obj = data;
372
+ return typeof obj.id === "string" && typeof obj.from === "string" && typeof obj.to === "string" && Array.isArray(obj.files) && typeof obj.requestedBy === "string" && typeof obj.status === "string" && ["pending", "approved", "rejected"].includes(obj.status);
373
+ }
322
374
  ensureDir() {
323
375
  if (!(0, import_node_fs.existsSync)(this.pendingDir)) {
324
376
  (0, import_node_fs.mkdirSync)(this.pendingDir, { recursive: true });
@@ -464,15 +516,13 @@ function transformMethodChain(chain, newBase, factoryMapper, methodMapper) {
464
516
  var import_node_crypto = require("crypto");
465
517
  var import_node_fs2 = require("fs");
466
518
  var import_node_path2 = require("path");
467
- var AUDIT_DIR = ".schemashift";
468
- var AUDIT_FILE = "audit-log.json";
469
519
  var AUDIT_VERSION = 1;
470
520
  var MigrationAuditLog = class {
471
521
  logDir;
472
522
  logPath;
473
523
  constructor(projectPath) {
474
- this.logDir = (0, import_node_path2.join)(projectPath, AUDIT_DIR);
475
- this.logPath = (0, import_node_path2.join)(this.logDir, AUDIT_FILE);
524
+ this.logDir = (0, import_node_path2.join)(projectPath, SCHEMASHIFT_DIR);
525
+ this.logPath = (0, import_node_path2.join)(this.logDir, AUDIT_LOG_FILE);
476
526
  }
477
527
  /**
478
528
  * Append a new entry to the audit log.
@@ -516,7 +566,11 @@ var MigrationAuditLog = class {
516
566
  if (!content.trim()) {
517
567
  return { version: AUDIT_VERSION, entries: [] };
518
568
  }
519
- return JSON.parse(content);
569
+ const parsed = JSON.parse(content);
570
+ if (!this.isValidAuditLog(parsed)) {
571
+ return { version: AUDIT_VERSION, entries: [] };
572
+ }
573
+ return parsed;
520
574
  } catch {
521
575
  return { version: AUDIT_VERSION, entries: [] };
522
576
  }
@@ -594,6 +648,88 @@ var MigrationAuditLog = class {
594
648
  clear() {
595
649
  this.write({ version: AUDIT_VERSION, entries: [] });
596
650
  }
651
+ /**
652
+ * Export a compliance report in SOC2 or HIPAA format.
653
+ */
654
+ exportComplianceReport(format) {
655
+ const log = this.read();
656
+ const summary = this.getSummary();
657
+ if (format === "soc2") {
658
+ return this.generateSoc2Report(log, summary);
659
+ }
660
+ return this.generateHipaaReport(log, summary);
661
+ }
662
+ generateSoc2Report(log, summary) {
663
+ const sections = [];
664
+ const now = (/* @__PURE__ */ new Date()).toISOString();
665
+ sections.push("# SOC2 Compliance Report \u2014 Schema Migration");
666
+ sections.push(`Generated: ${now}`);
667
+ sections.push("");
668
+ sections.push("## Change Control Summary");
669
+ sections.push(`- Total Migrations: ${summary.totalMigrations}`);
670
+ sections.push(`- Total Files Processed: ${summary.totalFiles}`);
671
+ sections.push(`- Successful: ${summary.successCount}`);
672
+ sections.push(`- Failed: ${summary.failureCount}`);
673
+ sections.push(`- Migration Paths: ${summary.migrationPaths.join(", ")}`);
674
+ sections.push("");
675
+ sections.push("## Change Control Entries");
676
+ for (const entry of log.entries) {
677
+ sections.push("");
678
+ sections.push(`### ${entry.filePath}`);
679
+ sections.push(`- Change ID: ${entry.migrationId}`);
680
+ sections.push(`- Timestamp: ${entry.timestamp}`);
681
+ sections.push(`- Action: ${entry.action}`);
682
+ sections.push(`- Migration: ${entry.from} \u2192 ${entry.to}`);
683
+ sections.push(`- Status: ${entry.success ? "Success" : "Failed"}`);
684
+ sections.push(`- Implementer: ${entry.user || "Unknown"}`);
685
+ sections.push(`- Before Hash: ${entry.beforeHash}`);
686
+ if (entry.afterHash) sections.push(`- After Hash: ${entry.afterHash}`);
687
+ sections.push(`- Warnings: ${entry.warningCount}`);
688
+ sections.push(`- Errors: ${entry.errorCount}`);
689
+ if (entry.riskScore !== void 0) sections.push(`- Risk Score: ${entry.riskScore}`);
690
+ if (entry.metadata?.ciProvider) sections.push(`- CI Provider: ${entry.metadata.ciProvider}`);
691
+ if (entry.metadata?.gitCommit) sections.push(`- Git Commit: ${entry.metadata.gitCommit}`);
692
+ if (entry.metadata?.gitBranch) sections.push(`- Git Branch: ${entry.metadata.gitBranch}`);
693
+ }
694
+ sections.push("");
695
+ sections.push("## Rollback Procedure");
696
+ sections.push("SchemaShift maintains automatic backups in `.schemashift/backups/`.");
697
+ sections.push("Use `schemashift rollback [backupId]` to restore files from any backup.");
698
+ sections.push("");
699
+ return sections.join("\n");
700
+ }
701
+ generateHipaaReport(log, summary) {
702
+ const sections = [];
703
+ const now = (/* @__PURE__ */ new Date()).toISOString();
704
+ sections.push("# HIPAA Compliance Audit Trail \u2014 Schema Migration");
705
+ sections.push(`Generated: ${now}`);
706
+ sections.push("");
707
+ sections.push("## Data Transformation Summary");
708
+ sections.push(`- Total Transformations: ${summary.totalFiles}`);
709
+ sections.push(`- Successful: ${summary.successCount}`);
710
+ sections.push(`- Failed: ${summary.failureCount}`);
711
+ sections.push("");
712
+ sections.push("## Integrity Verification");
713
+ for (const entry of log.entries) {
714
+ sections.push("");
715
+ sections.push(`### ${entry.filePath}`);
716
+ sections.push(`- Timestamp: ${entry.timestamp}`);
717
+ sections.push(`- User: ${entry.user || "Unknown"}`);
718
+ sections.push(`- Action: ${entry.action} (${entry.from} \u2192 ${entry.to})`);
719
+ sections.push(`- Integrity Before: SHA256:${entry.beforeHash}`);
720
+ if (entry.afterHash) sections.push(`- Integrity After: SHA256:${entry.afterHash}`);
721
+ sections.push(`- Status: ${entry.success ? "Completed" : "Failed"}`);
722
+ if (entry.metadata?.hostname) sections.push(`- Host: ${entry.metadata.hostname}`);
723
+ if (entry.metadata?.nodeVersion)
724
+ sections.push(`- Runtime: Node.js ${entry.metadata.nodeVersion}`);
725
+ }
726
+ sections.push("");
727
+ sections.push("## Access Control");
728
+ const users = [...new Set(log.entries.map((e) => e.user).filter(Boolean))];
729
+ sections.push(`- Users Who Performed Migrations: ${users.join(", ") || "Unknown"}`);
730
+ sections.push("");
731
+ return sections.join("\n");
732
+ }
597
733
  collectMetadata() {
598
734
  return {
599
735
  hostname: process.env.HOSTNAME || void 0,
@@ -604,6 +740,13 @@ var MigrationAuditLog = class {
604
740
  gitCommit: process.env.GITHUB_SHA || process.env.CI_COMMIT_SHA || void 0
605
741
  };
606
742
  }
743
+ isValidAuditLog(data) {
744
+ if (typeof data !== "object" || data === null) return false;
745
+ const obj = data;
746
+ if (typeof obj.version !== "number") return false;
747
+ if (!Array.isArray(obj.entries)) return false;
748
+ return true;
749
+ }
607
750
  write(log) {
608
751
  if (!(0, import_node_fs2.existsSync)(this.logDir)) {
609
752
  (0, import_node_fs2.mkdirSync)(this.logDir, { recursive: true });
@@ -1047,6 +1190,12 @@ var import_node_path4 = require("path");
1047
1190
  // src/ecosystem.ts
1048
1191
  var import_node_fs3 = require("fs");
1049
1192
  var import_node_path3 = require("path");
1193
+ function parseMajorVersion(version) {
1194
+ const match = version.match(/(\d+)/);
1195
+ const num = match?.[1] ? Number.parseInt(match[1], 10) : 0;
1196
+ if (!Number.isFinite(num) || num < 0 || num > 999) return 0;
1197
+ return num;
1198
+ }
1050
1199
  var ECOSYSTEM_RULES = [
1051
1200
  // ORM integrations
1052
1201
  {
@@ -1088,8 +1237,7 @@ var ECOSYSTEM_RULES = [
1088
1237
  category: "api",
1089
1238
  migrations: ["zod-v3->v4"],
1090
1239
  check: (version) => {
1091
- const majorMatch = version.match(/(\d+)/);
1092
- const major = majorMatch?.[1] ? Number.parseInt(majorMatch[1], 10) : 0;
1240
+ const major = parseMajorVersion(version);
1093
1241
  if (major < 11) {
1094
1242
  return {
1095
1243
  issue: `tRPC v${major} expects Zod v3 types. A v3 ZodType is not assignable to a v4 ZodType.`,
@@ -1122,8 +1270,7 @@ var ECOSYSTEM_RULES = [
1122
1270
  category: "validation-util",
1123
1271
  migrations: ["zod-v3->v4"],
1124
1272
  check: (version) => {
1125
- const majorMatch = version.match(/(\d+)/);
1126
- const major = majorMatch?.[1] ? Number.parseInt(majorMatch[1], 10) : 0;
1273
+ const major = parseMajorVersion(version);
1127
1274
  if (major < 4) {
1128
1275
  return {
1129
1276
  issue: `zod-validation-error v${major} is not compatible with Zod v4.`,
@@ -1413,8 +1560,7 @@ var ECOSYSTEM_RULES = [
1413
1560
  category: "validation-util",
1414
1561
  migrations: ["zod-v3->v4"],
1415
1562
  check: (version) => {
1416
- const majorMatch = version.match(/(\d+)/);
1417
- const major = majorMatch?.[1] ? Number.parseInt(majorMatch[1], 10) : 0;
1563
+ const major = parseMajorVersion(version);
1418
1564
  if (major < 4) {
1419
1565
  return {
1420
1566
  issue: "zod-to-json-schema v3 may not fully support Zod v4 schemas.",
@@ -1726,6 +1872,25 @@ var ComplexityEstimator = class {
1726
1872
  riskAreas
1727
1873
  };
1728
1874
  }
1875
+ estimateDuration(estimate) {
1876
+ const EFFORT_RANGES = {
1877
+ trivial: { label: "1\u20135 minutes", range: [1, 5] },
1878
+ low: { label: "5\u201315 minutes", range: [5, 15] },
1879
+ moderate: { label: "15\u201345 minutes", range: [15, 45] },
1880
+ high: { label: "1\u20133 hours", range: [60, 180] },
1881
+ extreme: { label: "3\u20138 hours", range: [180, 480] }
1882
+ };
1883
+ const base = EFFORT_RANGES[estimate.effort];
1884
+ const fileMultiplier = Math.max(1, Math.log2(estimate.totalFiles + 1));
1885
+ const low = Math.round(base.range[0] * fileMultiplier);
1886
+ const high = Math.round(base.range[1] * fileMultiplier);
1887
+ if (high >= 120) {
1888
+ const lowHours = Math.round(low / 60 * 10) / 10;
1889
+ const highHours = Math.round(high / 60 * 10) / 10;
1890
+ return { label: `${lowHours}\u2013${highHours} hours`, rangeMinutes: [low, high] };
1891
+ }
1892
+ return { label: `${low}\u2013${high} minutes`, rangeMinutes: [low, high] };
1893
+ }
1729
1894
  calculateEffort(totalSchemas, advancedCount, hasDeepDU) {
1730
1895
  if (totalSchemas >= 500 && hasDeepDU) return "extreme";
1731
1896
  if (totalSchemas >= 200 || advancedCount >= 20) return "high";
@@ -1790,7 +1955,7 @@ async function loadConfig(configPath) {
1790
1955
  include: ["**/*.ts", "**/*.tsx"],
1791
1956
  exclude: ["**/node_modules/**", "**/dist/**", "**/*.d.ts"],
1792
1957
  git: { enabled: false },
1793
- backup: { enabled: true, dir: ".schemashift-backup" },
1958
+ backup: { enabled: true, dir: BACKUP_DIR },
1794
1959
  ...result?.config
1795
1960
  };
1796
1961
  }
@@ -1901,6 +2066,75 @@ function suggestCrossFieldPattern(whenCode) {
1901
2066
  return null;
1902
2067
  }
1903
2068
 
2069
+ // src/dead-schema-detector.ts
2070
+ var DeadSchemaDetector = class {
2071
+ detect(sourceFiles) {
2072
+ const schemas = this.collectSchemaDefinitions(sourceFiles);
2073
+ const unusedSchemas = this.findUnusedSchemas(schemas, sourceFiles);
2074
+ return {
2075
+ unusedSchemas,
2076
+ totalSchemas: schemas.length,
2077
+ summary: unusedSchemas.length > 0 ? `Found ${unusedSchemas.length} unused schema(s) out of ${schemas.length} total that may be safely removed.` : `All ${schemas.length} schema(s) are referenced.`
2078
+ };
2079
+ }
2080
+ collectSchemaDefinitions(sourceFiles) {
2081
+ const schemas = [];
2082
+ const schemaPattern = /(?:const|let|var|export\s+(?:const|let|var))\s+(\w+)\s*=\s*(?:z\.|yup\.|Yup\.|Joi\.|t\.|v\.|type\(|object\(|string\(|S\.)/;
2083
+ for (const file of sourceFiles) {
2084
+ const text = file.getFullText();
2085
+ const lines = text.split("\n");
2086
+ const filePath = file.getFilePath();
2087
+ for (let i = 0; i < lines.length; i++) {
2088
+ const line = lines[i];
2089
+ if (!line) continue;
2090
+ const match = schemaPattern.exec(line);
2091
+ if (match?.[1]) {
2092
+ schemas.push({
2093
+ schemaName: match[1],
2094
+ filePath,
2095
+ lineNumber: i + 1
2096
+ });
2097
+ }
2098
+ }
2099
+ }
2100
+ return schemas;
2101
+ }
2102
+ findUnusedSchemas(schemas, sourceFiles) {
2103
+ const fileContents = /* @__PURE__ */ new Map();
2104
+ for (const file of sourceFiles) {
2105
+ fileContents.set(file.getFilePath(), file.getFullText());
2106
+ }
2107
+ const unused = [];
2108
+ for (const schema of schemas) {
2109
+ const { schemaName, filePath } = schema;
2110
+ let referenceCount = 0;
2111
+ for (const [path, content] of fileContents) {
2112
+ const pattern = new RegExp(`\\b${schemaName}\\b`, "g");
2113
+ const matches = content.match(pattern);
2114
+ const matchCount = matches?.length ?? 0;
2115
+ if (path === filePath) {
2116
+ if (matchCount > 1) {
2117
+ referenceCount += matchCount - 1;
2118
+ }
2119
+ } else {
2120
+ referenceCount += matchCount;
2121
+ }
2122
+ }
2123
+ const fileContent = fileContents.get(filePath) ?? "";
2124
+ const exportPattern = new RegExp(
2125
+ `export\\s+(?:const|let|var)\\s+${schemaName}\\b|export\\s*\\{[^}]*\\b${schemaName}\\b`
2126
+ );
2127
+ if (exportPattern.test(fileContent)) {
2128
+ referenceCount++;
2129
+ }
2130
+ if (referenceCount === 0) {
2131
+ unused.push(schema);
2132
+ }
2133
+ }
2134
+ return unused;
2135
+ }
2136
+ };
2137
+
1904
2138
  // src/dependency-graph.ts
1905
2139
  var import_node_fs5 = require("fs");
1906
2140
  var import_node_path5 = require("path");
@@ -2441,15 +2675,13 @@ var DetailedAnalyzer = class {
2441
2675
  var import_node_crypto2 = require("crypto");
2442
2676
  var import_node_fs7 = require("fs");
2443
2677
  var import_node_path7 = require("path");
2444
- var SNAPSHOT_DIR = ".schemashift";
2445
- var SNAPSHOT_FILE = "schema-snapshot.json";
2446
2678
  var SNAPSHOT_VERSION = 1;
2447
2679
  var DriftDetector = class {
2448
2680
  snapshotDir;
2449
2681
  snapshotPath;
2450
2682
  constructor(projectPath) {
2451
- this.snapshotDir = (0, import_node_path7.join)(projectPath, SNAPSHOT_DIR);
2452
- this.snapshotPath = (0, import_node_path7.join)(this.snapshotDir, SNAPSHOT_FILE);
2683
+ this.snapshotDir = (0, import_node_path7.join)(projectPath, SCHEMASHIFT_DIR);
2684
+ this.snapshotPath = (0, import_node_path7.join)(this.snapshotDir, SCHEMA_SNAPSHOT_FILE);
2453
2685
  }
2454
2686
  /**
2455
2687
  * Take a snapshot of the current schema state
@@ -2719,7 +2951,8 @@ var GovernanceEngine = class {
2719
2951
  if (this.rules.has("naming-convention")) {
2720
2952
  const config = this.rules.get("naming-convention") ?? {};
2721
2953
  const pattern = config.pattern || ".*Schema$";
2722
- if (!new RegExp(pattern).test(schemaName)) {
2954
+ const regex = this.safeRegExp(pattern);
2955
+ if (regex && !regex.test(schemaName)) {
2723
2956
  violations.push({
2724
2957
  rule: "naming-convention",
2725
2958
  message: `Schema "${schemaName}" does not match naming pattern: ${pattern}`,
@@ -2881,6 +3114,14 @@ var GovernanceEngine = class {
2881
3114
  passed: violations.filter((v) => v.severity === "error").length === 0
2882
3115
  };
2883
3116
  }
3117
+ safeRegExp(pattern) {
3118
+ if (pattern.length > 500) return null;
3119
+ try {
3120
+ return new RegExp(pattern);
3121
+ } catch {
3122
+ return null;
3123
+ }
3124
+ }
2884
3125
  detectFileLibrary(sourceFile) {
2885
3126
  for (const imp of sourceFile.getImportDeclarations()) {
2886
3127
  const lib = detectSchemaLibrary(imp.getModuleSpecifierValue());
@@ -3616,17 +3857,77 @@ var GraphExporter = class {
3616
3857
  }
3617
3858
  };
3618
3859
 
3860
+ // src/import-deduplicator.ts
3861
+ var ImportDeduplicator = class {
3862
+ detect(sourceFiles) {
3863
+ const allGroups = [];
3864
+ for (const file of sourceFiles) {
3865
+ const groups = this.findDuplicatesInFile(file);
3866
+ allGroups.push(...groups);
3867
+ }
3868
+ const totalDuplicates = allGroups.reduce((sum, g) => sum + g.occurrences.length, 0);
3869
+ return {
3870
+ duplicateGroups: allGroups,
3871
+ totalDuplicates,
3872
+ summary: allGroups.length > 0 ? `Found ${allGroups.length} duplicate import group(s) across ${new Set(allGroups.map((g) => g.occurrences[0]?.filePath)).size} file(s). Merge them for cleaner imports.` : "No duplicate imports found."
3873
+ };
3874
+ }
3875
+ findDuplicatesInFile(sourceFile) {
3876
+ const imports = sourceFile.getImportDeclarations();
3877
+ const filePath = sourceFile.getFilePath();
3878
+ const bySource = /* @__PURE__ */ new Map();
3879
+ for (const imp of imports) {
3880
+ const source = imp.getModuleSpecifierValue();
3881
+ const namedImports = imp.getNamedImports().map((n) => n.getName());
3882
+ const namespaceImport = imp.getNamespaceImport()?.getText();
3883
+ const defaultImport = imp.getDefaultImport()?.getText();
3884
+ const importedNames = [];
3885
+ if (defaultImport) importedNames.push(defaultImport);
3886
+ if (namespaceImport) importedNames.push(`* as ${namespaceImport}`);
3887
+ importedNames.push(...namedImports);
3888
+ if (importedNames.length === 0) continue;
3889
+ const entry = {
3890
+ source,
3891
+ filePath,
3892
+ lineNumber: imp.getStartLineNumber(),
3893
+ importedNames
3894
+ };
3895
+ const existing = bySource.get(source);
3896
+ if (existing) {
3897
+ existing.push(entry);
3898
+ } else {
3899
+ bySource.set(source, [entry]);
3900
+ }
3901
+ }
3902
+ const groups = [];
3903
+ for (const [source, occurrences] of bySource) {
3904
+ if (occurrences.length <= 1) continue;
3905
+ const allNames = /* @__PURE__ */ new Set();
3906
+ for (const occ of occurrences) {
3907
+ for (const name of occ.importedNames) {
3908
+ allNames.add(name);
3909
+ }
3910
+ }
3911
+ const mergedNames = [...allNames].sort().join(", ");
3912
+ groups.push({
3913
+ source,
3914
+ occurrences,
3915
+ suggestion: `Merge into single import: import { ${mergedNames} } from '${source}';`
3916
+ });
3917
+ }
3918
+ return groups;
3919
+ }
3920
+ };
3921
+
3619
3922
  // src/incremental.ts
3620
3923
  var import_node_fs8 = require("fs");
3621
3924
  var import_node_path8 = require("path");
3622
- var STATE_DIR = ".schemashift";
3623
- var STATE_FILE = "incremental.json";
3624
3925
  var IncrementalTracker = class {
3625
3926
  stateDir;
3626
3927
  statePath;
3627
3928
  constructor(projectPath) {
3628
- this.stateDir = (0, import_node_path8.join)(projectPath, STATE_DIR);
3629
- this.statePath = (0, import_node_path8.join)(this.stateDir, STATE_FILE);
3929
+ this.stateDir = (0, import_node_path8.join)(projectPath, SCHEMASHIFT_DIR);
3930
+ this.statePath = (0, import_node_path8.join)(this.stateDir, INCREMENTAL_STATE_FILE);
3630
3931
  }
3631
3932
  start(files, from, to) {
3632
3933
  const state = {
@@ -3663,7 +3964,9 @@ var IncrementalTracker = class {
3663
3964
  getState() {
3664
3965
  if (!(0, import_node_fs8.existsSync)(this.statePath)) return null;
3665
3966
  try {
3666
- return JSON.parse((0, import_node_fs8.readFileSync)(this.statePath, "utf-8"));
3967
+ const parsed = JSON.parse((0, import_node_fs8.readFileSync)(this.statePath, "utf-8"));
3968
+ if (!this.isValidState(parsed)) return null;
3969
+ return parsed;
3667
3970
  } catch {
3668
3971
  return null;
3669
3972
  }
@@ -3689,11 +3992,32 @@ var IncrementalTracker = class {
3689
3992
  percent
3690
3993
  };
3691
3994
  }
3995
+ /**
3996
+ * Get a canary batch — a percentage of remaining files, sorted simplest first.
3997
+ * Used for phased rollouts where you migrate a small batch, verify, then continue.
3998
+ */
3999
+ getCanaryBatch(percent, fileSizes) {
4000
+ const state = this.getState();
4001
+ if (!state) return [];
4002
+ const count = Math.max(1, Math.ceil(state.remainingFiles.length * (percent / 100)));
4003
+ if (fileSizes) {
4004
+ const sorted = [...state.remainingFiles].sort((a, b) => {
4005
+ return (fileSizes.get(a) ?? 0) - (fileSizes.get(b) ?? 0);
4006
+ });
4007
+ return sorted.slice(0, count);
4008
+ }
4009
+ return state.remainingFiles.slice(0, count);
4010
+ }
3692
4011
  clear() {
3693
4012
  if ((0, import_node_fs8.existsSync)(this.statePath)) {
3694
4013
  (0, import_node_fs8.unlinkSync)(this.statePath);
3695
4014
  }
3696
4015
  }
4016
+ isValidState(data) {
4017
+ if (typeof data !== "object" || data === null) return false;
4018
+ const obj = data;
4019
+ return typeof obj.migrationId === "string" && typeof obj.from === "string" && typeof obj.to === "string" && typeof obj.startedAt === "string" && Array.isArray(obj.completedFiles) && Array.isArray(obj.remainingFiles) && Array.isArray(obj.failedFiles);
4020
+ }
3697
4021
  saveState(state) {
3698
4022
  if (!(0, import_node_fs8.existsSync)(this.stateDir)) {
3699
4023
  (0, import_node_fs8.mkdirSync)(this.stateDir, { recursive: true });
@@ -3903,11 +4227,111 @@ var WebhookNotifier = class {
3903
4227
  }
3904
4228
  return results;
3905
4229
  }
4230
+ /**
4231
+ * Format event as Slack Block Kit message.
4232
+ */
4233
+ formatSlackPayload(event) {
4234
+ const emoji = this.getEventEmoji(event.type);
4235
+ const title = this.getEventTitle(event.type);
4236
+ const details = event.details;
4237
+ const blocks = [
4238
+ {
4239
+ type: "header",
4240
+ text: { type: "plain_text", text: `${emoji} ${title}`, emoji: true }
4241
+ },
4242
+ {
4243
+ type: "section",
4244
+ fields: Object.entries(details).map(([key, value]) => ({
4245
+ type: "mrkdwn",
4246
+ text: `*${key}:* ${String(value)}`
4247
+ }))
4248
+ },
4249
+ {
4250
+ type: "context",
4251
+ elements: [
4252
+ {
4253
+ type: "mrkdwn",
4254
+ text: `SchemaShift | ${event.timestamp}${event.project ? ` | ${event.project}` : ""}`
4255
+ }
4256
+ ]
4257
+ }
4258
+ ];
4259
+ return { blocks };
4260
+ }
4261
+ /**
4262
+ * Format event as Microsoft Teams Adaptive Card.
4263
+ */
4264
+ formatTeamsPayload(event) {
4265
+ const title = this.getEventTitle(event.type);
4266
+ const details = event.details;
4267
+ const facts = Object.entries(details).map(([key, value]) => ({
4268
+ title: key,
4269
+ value: String(value)
4270
+ }));
4271
+ return {
4272
+ type: "message",
4273
+ attachments: [
4274
+ {
4275
+ contentType: "application/vnd.microsoft.card.adaptive",
4276
+ content: {
4277
+ $schema: "http://adaptivecards.io/schemas/adaptive-card.json",
4278
+ type: "AdaptiveCard",
4279
+ version: "1.4",
4280
+ body: [
4281
+ {
4282
+ type: "TextBlock",
4283
+ text: title,
4284
+ weight: "Bolder",
4285
+ size: "Medium"
4286
+ },
4287
+ {
4288
+ type: "FactSet",
4289
+ facts
4290
+ },
4291
+ {
4292
+ type: "TextBlock",
4293
+ text: `SchemaShift | ${event.timestamp}`,
4294
+ isSubtle: true,
4295
+ size: "Small"
4296
+ }
4297
+ ]
4298
+ }
4299
+ }
4300
+ ]
4301
+ };
4302
+ }
4303
+ getEventEmoji(type) {
4304
+ const emojis = {
4305
+ migration_started: "\u{1F504}",
4306
+ migration_completed: "\u2705",
4307
+ migration_failed: "\u274C",
4308
+ governance_violation: "\u26A0\uFE0F",
4309
+ drift_detected: "\u{1F50D}"
4310
+ };
4311
+ return emojis[type];
4312
+ }
4313
+ getEventTitle(type) {
4314
+ const titles = {
4315
+ migration_started: "Migration Started",
4316
+ migration_completed: "Migration Completed",
4317
+ migration_failed: "Migration Failed",
4318
+ governance_violation: "Governance Violation",
4319
+ drift_detected: "Schema Drift Detected"
4320
+ };
4321
+ return titles[type];
4322
+ }
3906
4323
  /**
3907
4324
  * Send event to a single webhook endpoint.
3908
4325
  */
3909
4326
  async sendToWebhook(webhook, event) {
3910
- const payload = JSON.stringify(event);
4327
+ let payload;
4328
+ if (webhook.type === "slack") {
4329
+ payload = JSON.stringify(this.formatSlackPayload(event));
4330
+ } else if (webhook.type === "teams") {
4331
+ payload = JSON.stringify(this.formatTeamsPayload(event));
4332
+ } else {
4333
+ payload = JSON.stringify(event);
4334
+ }
3911
4335
  const headers = {
3912
4336
  "Content-Type": "application/json",
3913
4337
  "User-Agent": "SchemaShift-Webhook/1.0",
@@ -3917,11 +4341,15 @@ var WebhookNotifier = class {
3917
4341
  const signature = await computeSignature(payload, webhook.secret);
3918
4342
  headers["X-SchemaShift-Signature"] = `sha256=${signature}`;
3919
4343
  }
4344
+ const timeoutMs = webhook.timeoutMs ?? 1e4;
4345
+ const controller = new AbortController();
4346
+ const timeoutId = setTimeout(() => controller.abort(), timeoutMs);
3920
4347
  try {
3921
4348
  const response = await fetch(webhook.url, {
3922
4349
  method: "POST",
3923
4350
  headers,
3924
- body: payload
4351
+ body: payload,
4352
+ signal: controller.signal
3925
4353
  });
3926
4354
  return {
3927
4355
  success: response.ok,
@@ -3929,10 +4357,13 @@ var WebhookNotifier = class {
3929
4357
  error: response.ok ? void 0 : `HTTP ${response.status}: ${response.statusText}`
3930
4358
  };
3931
4359
  } catch (err) {
4360
+ const message = err instanceof Error && err.name === "AbortError" ? `Webhook request timed out after ${timeoutMs}ms` : err instanceof Error ? err.message : String(err);
3932
4361
  return {
3933
4362
  success: false,
3934
- error: err instanceof Error ? err.message : String(err)
4363
+ error: message
3935
4364
  };
4365
+ } finally {
4366
+ clearTimeout(timeoutId);
3936
4367
  }
3937
4368
  }
3938
4369
  /**
@@ -4223,6 +4654,161 @@ var PluginLoader = class {
4223
4654
  }
4224
4655
  };
4225
4656
 
4657
+ // src/schema-verifier.ts
4658
+ var PRIMITIVE_SAMPLES = {
4659
+ string: [
4660
+ { name: "empty string", input: "", expectedValid: true },
4661
+ { name: "normal string", input: "hello world", expectedValid: true },
4662
+ { name: "number as string", input: "12345", expectedValid: true },
4663
+ { name: "null input", input: null, expectedValid: false },
4664
+ { name: "number input", input: 42, expectedValid: false },
4665
+ { name: "boolean input", input: true, expectedValid: false },
4666
+ { name: "undefined input", input: void 0, expectedValid: false }
4667
+ ],
4668
+ number: [
4669
+ { name: "zero", input: 0, expectedValid: true },
4670
+ { name: "positive int", input: 42, expectedValid: true },
4671
+ { name: "negative int", input: -1, expectedValid: true },
4672
+ { name: "float", input: 3.14, expectedValid: true },
4673
+ { name: "string input", input: "hello", expectedValid: false },
4674
+ { name: "null input", input: null, expectedValid: false },
4675
+ { name: "NaN input", input: Number.NaN, expectedValid: false }
4676
+ ],
4677
+ boolean: [
4678
+ { name: "true", input: true, expectedValid: true },
4679
+ { name: "false", input: false, expectedValid: true },
4680
+ { name: "string input", input: "true", expectedValid: false },
4681
+ { name: "number input", input: 1, expectedValid: false },
4682
+ { name: "null input", input: null, expectedValid: false }
4683
+ ],
4684
+ date: [
4685
+ { name: "valid date", input: /* @__PURE__ */ new Date("2024-01-01"), expectedValid: true },
4686
+ { name: "string input", input: "2024-01-01", expectedValid: false },
4687
+ { name: "null input", input: null, expectedValid: false }
4688
+ ]
4689
+ };
4690
+ var EMAIL_SAMPLES = [
4691
+ { name: "valid email", input: "test@example.com", expectedValid: true },
4692
+ { name: "invalid email", input: "not-an-email", expectedValid: false },
4693
+ { name: "empty string", input: "", expectedValid: false }
4694
+ ];
4695
+ var URL_SAMPLES = [
4696
+ { name: "valid url", input: "https://example.com", expectedValid: true },
4697
+ { name: "invalid url", input: "not a url", expectedValid: false }
4698
+ ];
4699
+ var UUID_SAMPLES = [
4700
+ { name: "valid uuid", input: "550e8400-e29b-41d4-a716-446655440000", expectedValid: true },
4701
+ { name: "invalid uuid", input: "not-a-uuid", expectedValid: false }
4702
+ ];
4703
+ function extractSchemaNames(sourceText) {
4704
+ const schemas = [];
4705
+ const patterns = [
4706
+ /(?:const|let|var)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|v\.|t\.|S\.|type\(|object\(|string\()/g,
4707
+ /export\s+(?:const|let|var)\s+(\w+)\s*=\s*(?:z\.|yup\.|Joi\.|v\.|t\.|S\.|type\(|object\(|string\()/g
4708
+ ];
4709
+ for (const pattern of patterns) {
4710
+ for (const match of sourceText.matchAll(pattern)) {
4711
+ const name = match[1];
4712
+ if (name && !schemas.includes(name)) {
4713
+ schemas.push(name);
4714
+ }
4715
+ }
4716
+ }
4717
+ return schemas;
4718
+ }
4719
+ function generateSamples(sourceText, schemaName, maxSamples) {
4720
+ const samples = [];
4721
+ const schemaBlock = extractSchemaBlock(sourceText, schemaName);
4722
+ if (!schemaBlock) return PRIMITIVE_SAMPLES.string?.slice(0, maxSamples) ?? [];
4723
+ if (/\.email\s*\(/.test(schemaBlock)) {
4724
+ samples.push(...EMAIL_SAMPLES);
4725
+ }
4726
+ if (/\.url\s*\(/.test(schemaBlock)) {
4727
+ samples.push(...URL_SAMPLES);
4728
+ }
4729
+ if (/\.uuid\s*\(/.test(schemaBlock)) {
4730
+ samples.push(...UUID_SAMPLES);
4731
+ }
4732
+ if (/string\s*\(/.test(schemaBlock)) {
4733
+ samples.push(...PRIMITIVE_SAMPLES.string ?? []);
4734
+ }
4735
+ if (/number\s*\(/.test(schemaBlock) || /\.int\s*\(/.test(schemaBlock)) {
4736
+ samples.push(...PRIMITIVE_SAMPLES.number ?? []);
4737
+ }
4738
+ if (/boolean\s*\(/.test(schemaBlock)) {
4739
+ samples.push(...PRIMITIVE_SAMPLES.boolean ?? []);
4740
+ }
4741
+ if (/date\s*\(/.test(schemaBlock)) {
4742
+ samples.push(...PRIMITIVE_SAMPLES.date ?? []);
4743
+ }
4744
+ if (/\.optional\s*\(/.test(schemaBlock) || /optional\s*\(/.test(schemaBlock)) {
4745
+ samples.push({ name: "undefined (optional)", input: void 0, expectedValid: true });
4746
+ }
4747
+ if (/\.nullable\s*\(/.test(schemaBlock) || /nullable\s*\(/.test(schemaBlock)) {
4748
+ samples.push({ name: "null (nullable)", input: null, expectedValid: true });
4749
+ }
4750
+ if (/\.min\s*\(\s*(\d+)/.test(schemaBlock)) {
4751
+ const minMatch = schemaBlock.match(/\.min\s*\(\s*(\d+)/);
4752
+ const minVal = minMatch ? Number.parseInt(minMatch[1] ?? "0", 10) : 0;
4753
+ samples.push({
4754
+ name: `below min (${minVal})`,
4755
+ input: minVal > 0 ? "a".repeat(minVal - 1) : "",
4756
+ expectedValid: false
4757
+ });
4758
+ }
4759
+ const seen = /* @__PURE__ */ new Set();
4760
+ const unique = [];
4761
+ for (const s of samples) {
4762
+ if (!seen.has(s.name)) {
4763
+ seen.add(s.name);
4764
+ unique.push(s);
4765
+ }
4766
+ }
4767
+ return unique.slice(0, maxSamples);
4768
+ }
4769
+ function extractSchemaBlock(sourceText, schemaName) {
4770
+ const escapedName = schemaName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
4771
+ const pattern = new RegExp(
4772
+ `(?:const|let|var|export\\s+const)\\s+${escapedName}\\s*=\\s*([\\s\\S]*?)(?:;\\s*$|;\\s*(?:const|let|var|export|function|class|type|interface))`,
4773
+ "m"
4774
+ );
4775
+ const match = sourceText.match(pattern);
4776
+ return match?.[1] ?? null;
4777
+ }
4778
+ function createVerificationReport(from, to, results) {
4779
+ const totalSchemas = results.length;
4780
+ const overallParityScore = totalSchemas > 0 ? results.reduce((sum, r) => sum + r.parityScore, 0) / totalSchemas : 100;
4781
+ return {
4782
+ from,
4783
+ to,
4784
+ totalSchemas,
4785
+ results,
4786
+ overallParityScore: Math.round(overallParityScore * 100) / 100,
4787
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
4788
+ };
4789
+ }
4790
+ function formatVerificationReport(report) {
4791
+ const lines = [];
4792
+ lines.push(`
4793
+ Schema Verification Report: ${report.from} \u2192 ${report.to}`);
4794
+ lines.push("\u2500".repeat(50));
4795
+ for (const result of report.results) {
4796
+ const icon = result.parityScore === 100 ? "\u2713" : result.parityScore >= 80 ? "\u26A0" : "\u2717";
4797
+ lines.push(
4798
+ ` ${icon} ${result.schemaName} \u2014 ${result.parityScore}% parity (${result.matchingSamples}/${result.totalSamples} samples)`
4799
+ );
4800
+ for (const mismatch of result.mismatches) {
4801
+ lines.push(
4802
+ ` \u2514\u2500 ${mismatch.sampleName}: source=${mismatch.sourceResult.valid ? "valid" : "invalid"}, target=${mismatch.targetResult.valid ? "valid" : "invalid"}`
4803
+ );
4804
+ }
4805
+ }
4806
+ lines.push("\u2500".repeat(50));
4807
+ lines.push(`Overall Parity: ${report.overallParityScore}%`);
4808
+ lines.push("");
4809
+ return lines.join("\n");
4810
+ }
4811
+
4226
4812
  // src/standard-schema.ts
4227
4813
  var import_node_fs10 = require("fs");
4228
4814
  var import_node_path10 = require("path");
@@ -4692,11 +5278,16 @@ var TypeDedupDetector = class {
4692
5278
  };
4693
5279
  // Annotate the CommonJS export names for ESM import in node:
4694
5280
  0 && (module.exports = {
5281
+ AUDIT_LOG_FILE,
4695
5282
  ApprovalManager,
5283
+ BACKUP_DIR,
4696
5284
  BehavioralWarningAnalyzer,
4697
5285
  BundleEstimator,
5286
+ CONFIG_FILE_NAMES,
4698
5287
  CompatibilityAnalyzer,
4699
5288
  ComplexityEstimator,
5289
+ DEFAULT_CONFIG_FILE,
5290
+ DeadSchemaDetector,
4700
5291
  DetailedAnalyzer,
4701
5292
  DriftDetector,
4702
5293
  EcosystemAnalyzer,
@@ -4705,16 +5296,22 @@ var TypeDedupDetector = class {
4705
5296
  GovernanceEngine,
4706
5297
  GovernanceFixer,
4707
5298
  GraphExporter,
5299
+ INCREMENTAL_STATE_FILE,
5300
+ ImportDeduplicator,
4708
5301
  IncrementalTracker,
4709
5302
  MigrationAuditLog,
4710
5303
  MigrationChain,
4711
5304
  MonorepoResolver,
5305
+ PENDING_DIR,
4712
5306
  PackageUpdater,
4713
5307
  PerformanceAnalyzer,
4714
5308
  PluginLoader,
5309
+ SCHEMASHIFT_DIR,
5310
+ SCHEMA_SNAPSHOT_FILE,
4715
5311
  SchemaAnalyzer,
4716
5312
  SchemaDependencyResolver,
4717
5313
  StandardSchemaAdvisor,
5314
+ TESTS_DIR,
4718
5315
  TestScaffolder,
4719
5316
  TransformEngine,
4720
5317
  TypeDedupDetector,
@@ -4722,10 +5319,14 @@ var TypeDedupDetector = class {
4722
5319
  buildCallChain,
4723
5320
  computeParallelBatches,
4724
5321
  conditionalValidation,
5322
+ createVerificationReport,
4725
5323
  dependentFields,
4726
5324
  detectFormLibraries,
4727
5325
  detectSchemaLibrary,
4728
5326
  detectStandardSchema,
5327
+ extractSchemaNames,
5328
+ formatVerificationReport,
5329
+ generateSamples,
4729
5330
  getAllMigrationTemplates,
4730
5331
  getGovernanceTemplate,
4731
5332
  getGovernanceTemplateNames,