@famgia/omnify-atlas 0.0.111 → 0.0.112

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -32,17 +32,26 @@ var index_exports = {};
32
32
  __export(index_exports, {
33
33
  LOCK_FILE_NAME: () => LOCK_FILE_NAME,
34
34
  LOCK_FILE_VERSION: () => LOCK_FILE_VERSION,
35
+ VERSION_CHAIN_FILE: () => VERSION_CHAIN_FILE,
35
36
  addEnhancedMigrationRecord: () => addEnhancedMigrationRecord,
36
37
  addMigrationRecord: () => addMigrationRecord,
37
38
  applySchema: () => applySchema,
39
+ buildCurrentSchemaEntries: () => buildCurrentSchemaEntries,
38
40
  buildSchemaHashes: () => buildSchemaHashes,
39
41
  buildSchemaSnapshots: () => buildSchemaSnapshots,
40
42
  checkAtlasVersion: () => checkAtlasVersion,
43
+ checkBulkLockViolation: () => checkBulkLockViolation,
44
+ checkLockViolation: () => checkLockViolation,
41
45
  compareSchemas: () => compareSchemas,
42
46
  compareSchemasDeep: () => compareSchemasDeep,
47
+ computeBlockHash: () => computeBlockHash,
43
48
  computeHash: () => computeHash,
44
49
  computeSchemaHash: () => computeSchemaHash,
50
+ computeSha256: () => computeSha256,
51
+ createDeployBlock: () => createDeployBlock,
52
+ createEmptyChain: () => createEmptyChain,
45
53
  createEmptyLockFile: () => createEmptyLockFile,
54
+ deployVersion: () => deployVersion,
46
55
  diffHclSchemas: () => diffHclSchemas,
47
56
  extractTableNameFromFilename: () => extractTableNameFromFilename,
48
57
  extractTimestampFromFilename: () => extractTimestampFromFilename,
@@ -52,6 +61,9 @@ __export(index_exports, {
52
61
  generateHclSchema: () => generateHclSchema,
53
62
  generateHclTable: () => generateHclTable,
54
63
  generatePreview: () => generatePreview,
64
+ generateVersionName: () => generateVersionName,
65
+ getChainSummary: () => getChainSummary,
66
+ getLockedSchemas: () => getLockedSchemas,
55
67
  getMigrationsToRegenerate: () => getMigrationsToRegenerate,
56
68
  getPrimaryKeyType: () => getPrimaryKeyType,
57
69
  getTimestampType: () => getTimestampType,
@@ -63,6 +75,7 @@ __export(index_exports, {
63
75
  propertyNameToColumnName: () => propertyNameToColumnName,
64
76
  propertyToSnapshot: () => propertyToSnapshot,
65
77
  readLockFile: () => readLockFile,
78
+ readVersionChain: () => readVersionChain,
66
79
  renderHcl: () => renderHcl,
67
80
  runAtlasDiff: () => runAtlasDiff,
68
81
  schemaNameToTableName: () => schemaNameToTableName,
@@ -71,7 +84,9 @@ __export(index_exports, {
71
84
  updateLockFileV1: () => updateLockFileV1,
72
85
  validateHcl: () => validateHcl,
73
86
  validateMigrations: () => validateMigrations,
74
- writeLockFile: () => writeLockFile
87
+ verifyChain: () => verifyChain,
88
+ writeLockFile: () => writeLockFile,
89
+ writeVersionChain: () => writeVersionChain
75
90
  });
76
91
  module.exports = __toCommonJS(index_exports);
77
92
 
@@ -557,8 +572,8 @@ async function validateMigrations(lockFile, migrationsDir) {
557
572
  if (migration.checksum) {
558
573
  try {
559
574
  const { readFileSync } = await import("fs");
560
- const { join: join3 } = await import("path");
561
- const content = readFileSync(join3(migrationsDir, fileName), "utf8");
575
+ const { join: join4 } = await import("path");
576
+ const content = readFileSync(join4(migrationsDir, fileName), "utf8");
562
577
  const currentChecksum = computeHash(content);
563
578
  if (currentChecksum !== migration.checksum) {
564
579
  modifiedFiles.push(fileName);
@@ -628,6 +643,326 @@ function getMigrationsToRegenerate(lockFile, missingFiles) {
628
643
  return result;
629
644
  }
630
645
 
646
+ // src/lock/version-chain.ts
647
+ var import_node_crypto2 = require("crypto");
648
+ var import_promises2 = require("fs/promises");
649
+ var import_node_fs = require("fs");
650
+ var import_node_path = require("path");
651
+ var VERSION_CHAIN_FILE = ".omnify.chain";
652
+ function computeSha256(content) {
653
+ return (0, import_node_crypto2.createHash)("sha256").update(content, "utf8").digest("hex");
654
+ }
655
+ function computeBlockHash(previousHash, version, lockedAt, environment, schemas) {
656
+ const content = JSON.stringify({
657
+ previousHash,
658
+ version,
659
+ lockedAt,
660
+ environment,
661
+ schemas: schemas.map((s) => ({
662
+ name: s.name,
663
+ relativePath: s.relativePath,
664
+ contentHash: s.contentHash
665
+ }))
666
+ });
667
+ return computeSha256(content);
668
+ }
669
+ function createEmptyChain() {
670
+ const now = (/* @__PURE__ */ new Date()).toISOString();
671
+ return {
672
+ version: 1,
673
+ type: "omnify-version-chain",
674
+ genesisHash: null,
675
+ latestHash: null,
676
+ blocks: [],
677
+ createdAt: now,
678
+ updatedAt: now
679
+ };
680
+ }
681
+ async function readVersionChain(chainFilePath) {
682
+ try {
683
+ const content = await (0, import_promises2.readFile)(chainFilePath, "utf8");
684
+ const parsed = JSON.parse(content);
685
+ const chain = parsed;
686
+ if (chain.type !== "omnify-version-chain" || chain.version !== 1) {
687
+ throw new Error("Invalid version chain file format");
688
+ }
689
+ return parsed;
690
+ } catch (error) {
691
+ if (error.code === "ENOENT") {
692
+ return null;
693
+ }
694
+ throw error;
695
+ }
696
+ }
697
+ async function writeVersionChain(chainFilePath, chain) {
698
+ const content = JSON.stringify(chain, null, 2) + "\n";
699
+ await (0, import_promises2.writeFile)(chainFilePath, content, "utf8");
700
+ }
701
+ async function getFileContentHash(filePath) {
702
+ try {
703
+ const content = await (0, import_promises2.readFile)(filePath, "utf8");
704
+ return computeSha256(content);
705
+ } catch {
706
+ return null;
707
+ }
708
+ }
709
+ async function buildCurrentSchemaEntries(schemasDir, schemaFiles) {
710
+ const entries = [];
711
+ for (const schema of schemaFiles) {
712
+ const contentHash = await getFileContentHash(schema.filePath);
713
+ if (contentHash) {
714
+ entries.push({
715
+ name: schema.name,
716
+ relativePath: schema.relativePath,
717
+ contentHash
718
+ });
719
+ }
720
+ }
721
+ return entries.sort((a, b) => a.name.localeCompare(b.name));
722
+ }
723
+ function generateVersionName() {
724
+ const now = /* @__PURE__ */ new Date();
725
+ const year = now.getFullYear();
726
+ const month = String(now.getMonth() + 1).padStart(2, "0");
727
+ const day = String(now.getDate()).padStart(2, "0");
728
+ const hour = String(now.getHours()).padStart(2, "0");
729
+ const minute = String(now.getMinutes()).padStart(2, "0");
730
+ const second = String(now.getSeconds()).padStart(2, "0");
731
+ return `v${year}.${month}.${day}-${hour}${minute}${second}`;
732
+ }
733
+ async function verifyChain(chain, schemasDir) {
734
+ const verifiedBlocks = [];
735
+ const corruptedBlocks = [];
736
+ const tamperedSchemas = [];
737
+ const deletedLockedSchemas = [];
738
+ const lockedSchemas = /* @__PURE__ */ new Map();
739
+ let previousHash = null;
740
+ for (const block of chain.blocks) {
741
+ const expectedHash = computeBlockHash(
742
+ previousHash,
743
+ block.version,
744
+ block.lockedAt,
745
+ block.environment,
746
+ block.schemas
747
+ );
748
+ if (expectedHash !== block.blockHash) {
749
+ corruptedBlocks.push({
750
+ version: block.version,
751
+ expectedHash,
752
+ actualHash: block.blockHash,
753
+ reason: "Block hash mismatch - chain integrity compromised"
754
+ });
755
+ }
756
+ if (block.previousHash !== previousHash) {
757
+ corruptedBlocks.push({
758
+ version: block.version,
759
+ expectedHash: previousHash ?? "null",
760
+ actualHash: block.previousHash ?? "null",
761
+ reason: "Previous hash chain broken"
762
+ });
763
+ }
764
+ for (const schema of block.schemas) {
765
+ lockedSchemas.set(schema.name, {
766
+ hash: schema.contentHash,
767
+ version: block.version,
768
+ relativePath: schema.relativePath
769
+ });
770
+ }
771
+ if (corruptedBlocks.length === 0 || corruptedBlocks[corruptedBlocks.length - 1]?.version !== block.version) {
772
+ verifiedBlocks.push(block.version);
773
+ }
774
+ previousHash = block.blockHash;
775
+ }
776
+ for (const [name, locked] of lockedSchemas) {
777
+ const filePath = (0, import_node_path.resolve)(schemasDir, locked.relativePath);
778
+ if (!(0, import_node_fs.existsSync)(filePath)) {
779
+ deletedLockedSchemas.push({
780
+ schemaName: name,
781
+ filePath: locked.relativePath,
782
+ lockedInVersion: locked.version,
783
+ lockedHash: locked.hash
784
+ });
785
+ } else {
786
+ const currentHash = await getFileContentHash(filePath);
787
+ if (currentHash && currentHash !== locked.hash) {
788
+ tamperedSchemas.push({
789
+ schemaName: name,
790
+ filePath: locked.relativePath,
791
+ lockedHash: locked.hash,
792
+ currentHash,
793
+ lockedInVersion: locked.version
794
+ });
795
+ }
796
+ }
797
+ }
798
+ return {
799
+ valid: corruptedBlocks.length === 0 && tamperedSchemas.length === 0 && deletedLockedSchemas.length === 0,
800
+ blockCount: chain.blocks.length,
801
+ verifiedBlocks,
802
+ corruptedBlocks,
803
+ tamperedSchemas,
804
+ deletedLockedSchemas
805
+ };
806
+ }
807
+ function checkLockViolation(chain, schemaName, action) {
808
+ const affectedVersions = [];
809
+ for (const block of chain.blocks) {
810
+ const schema = block.schemas.find((s) => s.name === schemaName);
811
+ if (schema) {
812
+ affectedVersions.push(block.version);
813
+ }
814
+ }
815
+ if (affectedVersions.length > 0) {
816
+ return {
817
+ allowed: false,
818
+ reason: `Schema '${schemaName}' is locked in production version(s): ${affectedVersions.join(", ")}. ${action === "delete" ? "Deletion" : "Modification"} is not allowed.`,
819
+ affectedSchemas: [schemaName],
820
+ lockedInVersions: affectedVersions
821
+ };
822
+ }
823
+ return {
824
+ allowed: true,
825
+ affectedSchemas: [],
826
+ lockedInVersions: []
827
+ };
828
+ }
829
+ function checkBulkLockViolation(chain, schemas) {
830
+ const violations = [];
831
+ for (const { name, action } of schemas) {
832
+ const result = checkLockViolation(chain, name, action);
833
+ if (!result.allowed) {
834
+ violations.push({
835
+ name,
836
+ versions: [...result.lockedInVersions]
837
+ });
838
+ }
839
+ }
840
+ if (violations.length > 0) {
841
+ const schemaList = violations.map((v) => v.name);
842
+ const allVersions = [...new Set(violations.flatMap((v) => v.versions))];
843
+ return {
844
+ allowed: false,
845
+ reason: `The following schemas are locked: ${schemaList.join(", ")}. They cannot be modified or deleted.`,
846
+ affectedSchemas: schemaList,
847
+ lockedInVersions: allVersions
848
+ };
849
+ }
850
+ return {
851
+ allowed: true,
852
+ affectedSchemas: [],
853
+ lockedInVersions: []
854
+ };
855
+ }
856
+ function createDeployBlock(chain, schemas, options) {
857
+ const version = options.version ?? generateVersionName();
858
+ const lockedAt = (/* @__PURE__ */ new Date()).toISOString();
859
+ const previousHash = chain.latestHash;
860
+ const blockHash = computeBlockHash(
861
+ previousHash,
862
+ version,
863
+ lockedAt,
864
+ options.environment,
865
+ schemas
866
+ );
867
+ const block = {
868
+ version,
869
+ blockHash,
870
+ previousHash,
871
+ lockedAt,
872
+ environment: options.environment,
873
+ deployedBy: options.deployedBy,
874
+ schemas,
875
+ comment: options.comment
876
+ };
877
+ const updatedChain = {
878
+ ...chain,
879
+ genesisHash: chain.genesisHash ?? blockHash,
880
+ latestHash: blockHash,
881
+ blocks: [...chain.blocks, block],
882
+ updatedAt: lockedAt
883
+ };
884
+ return { chain: updatedChain, block };
885
+ }
886
+ async function deployVersion(chainFilePath, schemasDir, schemaFiles, options) {
887
+ let chain = await readVersionChain(chainFilePath);
888
+ if (!chain) {
889
+ chain = createEmptyChain();
890
+ }
891
+ const currentSchemas = await buildCurrentSchemaEntries(schemasDir, schemaFiles);
892
+ if (currentSchemas.length === 0) {
893
+ return {
894
+ success: false,
895
+ error: "No schema files found to lock",
896
+ addedSchemas: [],
897
+ modifiedSchemas: [],
898
+ warnings: []
899
+ };
900
+ }
901
+ const previousSchemas = /* @__PURE__ */ new Map();
902
+ for (const block2 of chain.blocks) {
903
+ for (const schema of block2.schemas) {
904
+ previousSchemas.set(schema.name, schema.contentHash);
905
+ }
906
+ }
907
+ const addedSchemas = [];
908
+ const modifiedSchemas = [];
909
+ const warnings = [];
910
+ for (const schema of currentSchemas) {
911
+ const previousHash = previousSchemas.get(schema.name);
912
+ if (!previousHash) {
913
+ addedSchemas.push(schema.name);
914
+ } else if (previousHash !== schema.contentHash) {
915
+ modifiedSchemas.push(schema.name);
916
+ warnings.push(
917
+ `Schema '${schema.name}' has been modified since last lock. This version will include the new state.`
918
+ );
919
+ }
920
+ }
921
+ const { chain: updatedChain, block } = createDeployBlock(
922
+ chain,
923
+ currentSchemas,
924
+ options
925
+ );
926
+ await writeVersionChain(chainFilePath, updatedChain);
927
+ return {
928
+ success: true,
929
+ block,
930
+ addedSchemas,
931
+ modifiedSchemas,
932
+ warnings
933
+ };
934
+ }
935
+ function getLockedSchemas(chain) {
936
+ const locked = /* @__PURE__ */ new Map();
937
+ for (const block of chain.blocks) {
938
+ for (const schema of block.schemas) {
939
+ locked.set(schema.name, {
940
+ hash: schema.contentHash,
941
+ version: block.version,
942
+ relativePath: schema.relativePath
943
+ });
944
+ }
945
+ }
946
+ return locked;
947
+ }
948
+ function getChainSummary(chain) {
949
+ const schemaNames = /* @__PURE__ */ new Set();
950
+ const environments = /* @__PURE__ */ new Set();
951
+ for (const block of chain.blocks) {
952
+ environments.add(block.environment);
953
+ for (const schema of block.schemas) {
954
+ schemaNames.add(schema.name);
955
+ }
956
+ }
957
+ return {
958
+ blockCount: chain.blocks.length,
959
+ schemaCount: schemaNames.size,
960
+ firstVersion: chain.blocks[0]?.version ?? null,
961
+ latestVersion: chain.blocks[chain.blocks.length - 1]?.version ?? null,
962
+ environments: [...environments]
963
+ };
964
+ }
965
+
631
966
  // src/hcl/type-mapper.ts
632
967
  var MYSQL_TYPES = {
633
968
  String: (prop) => ({
@@ -1163,10 +1498,10 @@ function renderHcl(schema) {
1163
1498
 
1164
1499
  // src/atlas/runner.ts
1165
1500
  var import_execa = require("execa");
1166
- var import_promises2 = require("fs/promises");
1167
- var import_node_path = require("path");
1501
+ var import_promises3 = require("fs/promises");
1502
+ var import_node_path2 = require("path");
1168
1503
  var import_node_os = require("os");
1169
- var import_node_crypto2 = require("crypto");
1504
+ var import_node_crypto3 = require("crypto");
1170
1505
  var import_omnify_core = require("@famgia/omnify-core");
1171
1506
  var DEFAULT_CONFIG = {
1172
1507
  binaryPath: "atlas",
@@ -1192,8 +1527,8 @@ function normalizeDevUrl(devUrl, driver) {
1192
1527
  return devUrl;
1193
1528
  }
1194
1529
  async function createTempDir() {
1195
- const tempPath = (0, import_node_path.join)((0, import_node_os.tmpdir)(), `omnify-atlas-${(0, import_node_crypto2.randomUUID)()}`);
1196
- await (0, import_promises2.mkdir)(tempPath, { recursive: true });
1530
+ const tempPath = (0, import_node_path2.join)((0, import_node_os.tmpdir)(), `omnify-atlas-${(0, import_node_crypto3.randomUUID)()}`);
1531
+ await (0, import_promises3.mkdir)(tempPath, { recursive: true });
1197
1532
  return tempPath;
1198
1533
  }
1199
1534
  async function executeAtlas(config, args) {
@@ -1282,19 +1617,19 @@ async function runAtlasDiff(config, options) {
1282
1617
  async function diffHclSchemas(config, fromHcl, toHcl) {
1283
1618
  const tempDir = await createTempDir();
1284
1619
  try {
1285
- const toPath = (0, import_node_path.join)(tempDir, "to.hcl");
1286
- await (0, import_promises2.writeFile)(toPath, toHcl, "utf8");
1620
+ const toPath = (0, import_node_path2.join)(tempDir, "to.hcl");
1621
+ await (0, import_promises3.writeFile)(toPath, toHcl, "utf8");
1287
1622
  let fromPath;
1288
1623
  if (fromHcl) {
1289
- fromPath = (0, import_node_path.join)(tempDir, "from.hcl");
1290
- await (0, import_promises2.writeFile)(fromPath, fromHcl, "utf8");
1624
+ fromPath = (0, import_node_path2.join)(tempDir, "from.hcl");
1625
+ await (0, import_promises3.writeFile)(fromPath, fromHcl, "utf8");
1291
1626
  }
1292
1627
  return await runAtlasDiff(config, {
1293
1628
  fromPath,
1294
1629
  toPath
1295
1630
  });
1296
1631
  } finally {
1297
- await (0, import_promises2.rm)(tempDir, { recursive: true, force: true });
1632
+ await (0, import_promises3.rm)(tempDir, { recursive: true, force: true });
1298
1633
  }
1299
1634
  }
1300
1635
  async function validateHcl(config, hclPath) {
@@ -1701,7 +2036,7 @@ function formatDiffSummary(result) {
1701
2036
  }
1702
2037
 
1703
2038
  // src/preview/preview.ts
1704
- var import_node_path2 = require("path");
2039
+ var import_node_path3 = require("path");
1705
2040
  var import_omnify_core2 = require("@famgia/omnify-core");
1706
2041
  async function generatePreview(schemas, atlasConfig, options = {}) {
1707
2042
  const atlasVersion = await checkAtlasVersion(atlasConfig);
@@ -1709,7 +2044,7 @@ async function generatePreview(schemas, atlasConfig, options = {}) {
1709
2044
  throw (0, import_omnify_core2.atlasNotFoundError)();
1710
2045
  }
1711
2046
  const currentHashes = await buildSchemaHashes(schemas);
1712
- const lockFilePath = (0, import_node_path2.join)(atlasConfig.workDir ?? process.cwd(), LOCK_FILE_NAME);
2047
+ const lockFilePath = (0, import_node_path3.join)(atlasConfig.workDir ?? process.cwd(), LOCK_FILE_NAME);
1713
2048
  const existingLockFile = await readLockFile(lockFilePath);
1714
2049
  const schemaChanges = compareSchemas(currentHashes, existingLockFile);
1715
2050
  const currentHcl = renderHcl(
@@ -1788,17 +2123,26 @@ function hasBlockingIssues(_preview) {
1788
2123
  0 && (module.exports = {
1789
2124
  LOCK_FILE_NAME,
1790
2125
  LOCK_FILE_VERSION,
2126
+ VERSION_CHAIN_FILE,
1791
2127
  addEnhancedMigrationRecord,
1792
2128
  addMigrationRecord,
1793
2129
  applySchema,
2130
+ buildCurrentSchemaEntries,
1794
2131
  buildSchemaHashes,
1795
2132
  buildSchemaSnapshots,
1796
2133
  checkAtlasVersion,
2134
+ checkBulkLockViolation,
2135
+ checkLockViolation,
1797
2136
  compareSchemas,
1798
2137
  compareSchemasDeep,
2138
+ computeBlockHash,
1799
2139
  computeHash,
1800
2140
  computeSchemaHash,
2141
+ computeSha256,
2142
+ createDeployBlock,
2143
+ createEmptyChain,
1801
2144
  createEmptyLockFile,
2145
+ deployVersion,
1802
2146
  diffHclSchemas,
1803
2147
  extractTableNameFromFilename,
1804
2148
  extractTimestampFromFilename,
@@ -1808,6 +2152,9 @@ function hasBlockingIssues(_preview) {
1808
2152
  generateHclSchema,
1809
2153
  generateHclTable,
1810
2154
  generatePreview,
2155
+ generateVersionName,
2156
+ getChainSummary,
2157
+ getLockedSchemas,
1811
2158
  getMigrationsToRegenerate,
1812
2159
  getPrimaryKeyType,
1813
2160
  getTimestampType,
@@ -1819,6 +2166,7 @@ function hasBlockingIssues(_preview) {
1819
2166
  propertyNameToColumnName,
1820
2167
  propertyToSnapshot,
1821
2168
  readLockFile,
2169
+ readVersionChain,
1822
2170
  renderHcl,
1823
2171
  runAtlasDiff,
1824
2172
  schemaNameToTableName,
@@ -1827,6 +2175,8 @@ function hasBlockingIssues(_preview) {
1827
2175
  updateLockFileV1,
1828
2176
  validateHcl,
1829
2177
  validateMigrations,
1830
- writeLockFile
2178
+ verifyChain,
2179
+ writeLockFile,
2180
+ writeVersionChain
1831
2181
  });
1832
2182
  //# sourceMappingURL=index.cjs.map