@famgia/omnify-atlas 0.0.110 → 0.0.112
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +366 -16
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +255 -1
- package/dist/index.d.ts +255 -1
- package/dist/index.js +347 -12
- package/dist/index.js.map +1 -1
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -480,8 +480,8 @@ async function validateMigrations(lockFile, migrationsDir) {
|
|
|
480
480
|
if (migration.checksum) {
|
|
481
481
|
try {
|
|
482
482
|
const { readFileSync } = await import("fs");
|
|
483
|
-
const { join:
|
|
484
|
-
const content = readFileSync(
|
|
483
|
+
const { join: join4 } = await import("path");
|
|
484
|
+
const content = readFileSync(join4(migrationsDir, fileName), "utf8");
|
|
485
485
|
const currentChecksum = computeHash(content);
|
|
486
486
|
if (currentChecksum !== migration.checksum) {
|
|
487
487
|
modifiedFiles.push(fileName);
|
|
@@ -551,6 +551,326 @@ function getMigrationsToRegenerate(lockFile, missingFiles) {
|
|
|
551
551
|
return result;
|
|
552
552
|
}
|
|
553
553
|
|
|
554
|
+
// src/lock/version-chain.ts
|
|
555
|
+
import { createHash as createHash2 } from "crypto";
|
|
556
|
+
import { readFile as readFile2, writeFile as writeFile2 } from "fs/promises";
|
|
557
|
+
import { existsSync } from "fs";
|
|
558
|
+
import { resolve } from "path";
|
|
559
|
+
var VERSION_CHAIN_FILE = ".omnify.chain";
|
|
560
|
+
function computeSha256(content) {
|
|
561
|
+
return createHash2("sha256").update(content, "utf8").digest("hex");
|
|
562
|
+
}
|
|
563
|
+
function computeBlockHash(previousHash, version, lockedAt, environment, schemas) {
|
|
564
|
+
const content = JSON.stringify({
|
|
565
|
+
previousHash,
|
|
566
|
+
version,
|
|
567
|
+
lockedAt,
|
|
568
|
+
environment,
|
|
569
|
+
schemas: schemas.map((s) => ({
|
|
570
|
+
name: s.name,
|
|
571
|
+
relativePath: s.relativePath,
|
|
572
|
+
contentHash: s.contentHash
|
|
573
|
+
}))
|
|
574
|
+
});
|
|
575
|
+
return computeSha256(content);
|
|
576
|
+
}
|
|
577
|
+
function createEmptyChain() {
|
|
578
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
579
|
+
return {
|
|
580
|
+
version: 1,
|
|
581
|
+
type: "omnify-version-chain",
|
|
582
|
+
genesisHash: null,
|
|
583
|
+
latestHash: null,
|
|
584
|
+
blocks: [],
|
|
585
|
+
createdAt: now,
|
|
586
|
+
updatedAt: now
|
|
587
|
+
};
|
|
588
|
+
}
|
|
589
|
+
async function readVersionChain(chainFilePath) {
|
|
590
|
+
try {
|
|
591
|
+
const content = await readFile2(chainFilePath, "utf8");
|
|
592
|
+
const parsed = JSON.parse(content);
|
|
593
|
+
const chain = parsed;
|
|
594
|
+
if (chain.type !== "omnify-version-chain" || chain.version !== 1) {
|
|
595
|
+
throw new Error("Invalid version chain file format");
|
|
596
|
+
}
|
|
597
|
+
return parsed;
|
|
598
|
+
} catch (error) {
|
|
599
|
+
if (error.code === "ENOENT") {
|
|
600
|
+
return null;
|
|
601
|
+
}
|
|
602
|
+
throw error;
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
async function writeVersionChain(chainFilePath, chain) {
|
|
606
|
+
const content = JSON.stringify(chain, null, 2) + "\n";
|
|
607
|
+
await writeFile2(chainFilePath, content, "utf8");
|
|
608
|
+
}
|
|
609
|
+
async function getFileContentHash(filePath) {
|
|
610
|
+
try {
|
|
611
|
+
const content = await readFile2(filePath, "utf8");
|
|
612
|
+
return computeSha256(content);
|
|
613
|
+
} catch {
|
|
614
|
+
return null;
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
async function buildCurrentSchemaEntries(schemasDir, schemaFiles) {
|
|
618
|
+
const entries = [];
|
|
619
|
+
for (const schema of schemaFiles) {
|
|
620
|
+
const contentHash = await getFileContentHash(schema.filePath);
|
|
621
|
+
if (contentHash) {
|
|
622
|
+
entries.push({
|
|
623
|
+
name: schema.name,
|
|
624
|
+
relativePath: schema.relativePath,
|
|
625
|
+
contentHash
|
|
626
|
+
});
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
return entries.sort((a, b) => a.name.localeCompare(b.name));
|
|
630
|
+
}
|
|
631
|
+
function generateVersionName() {
|
|
632
|
+
const now = /* @__PURE__ */ new Date();
|
|
633
|
+
const year = now.getFullYear();
|
|
634
|
+
const month = String(now.getMonth() + 1).padStart(2, "0");
|
|
635
|
+
const day = String(now.getDate()).padStart(2, "0");
|
|
636
|
+
const hour = String(now.getHours()).padStart(2, "0");
|
|
637
|
+
const minute = String(now.getMinutes()).padStart(2, "0");
|
|
638
|
+
const second = String(now.getSeconds()).padStart(2, "0");
|
|
639
|
+
return `v${year}.${month}.${day}-${hour}${minute}${second}`;
|
|
640
|
+
}
|
|
641
|
+
async function verifyChain(chain, schemasDir) {
|
|
642
|
+
const verifiedBlocks = [];
|
|
643
|
+
const corruptedBlocks = [];
|
|
644
|
+
const tamperedSchemas = [];
|
|
645
|
+
const deletedLockedSchemas = [];
|
|
646
|
+
const lockedSchemas = /* @__PURE__ */ new Map();
|
|
647
|
+
let previousHash = null;
|
|
648
|
+
for (const block of chain.blocks) {
|
|
649
|
+
const expectedHash = computeBlockHash(
|
|
650
|
+
previousHash,
|
|
651
|
+
block.version,
|
|
652
|
+
block.lockedAt,
|
|
653
|
+
block.environment,
|
|
654
|
+
block.schemas
|
|
655
|
+
);
|
|
656
|
+
if (expectedHash !== block.blockHash) {
|
|
657
|
+
corruptedBlocks.push({
|
|
658
|
+
version: block.version,
|
|
659
|
+
expectedHash,
|
|
660
|
+
actualHash: block.blockHash,
|
|
661
|
+
reason: "Block hash mismatch - chain integrity compromised"
|
|
662
|
+
});
|
|
663
|
+
}
|
|
664
|
+
if (block.previousHash !== previousHash) {
|
|
665
|
+
corruptedBlocks.push({
|
|
666
|
+
version: block.version,
|
|
667
|
+
expectedHash: previousHash ?? "null",
|
|
668
|
+
actualHash: block.previousHash ?? "null",
|
|
669
|
+
reason: "Previous hash chain broken"
|
|
670
|
+
});
|
|
671
|
+
}
|
|
672
|
+
for (const schema of block.schemas) {
|
|
673
|
+
lockedSchemas.set(schema.name, {
|
|
674
|
+
hash: schema.contentHash,
|
|
675
|
+
version: block.version,
|
|
676
|
+
relativePath: schema.relativePath
|
|
677
|
+
});
|
|
678
|
+
}
|
|
679
|
+
if (corruptedBlocks.length === 0 || corruptedBlocks[corruptedBlocks.length - 1]?.version !== block.version) {
|
|
680
|
+
verifiedBlocks.push(block.version);
|
|
681
|
+
}
|
|
682
|
+
previousHash = block.blockHash;
|
|
683
|
+
}
|
|
684
|
+
for (const [name, locked] of lockedSchemas) {
|
|
685
|
+
const filePath = resolve(schemasDir, locked.relativePath);
|
|
686
|
+
if (!existsSync(filePath)) {
|
|
687
|
+
deletedLockedSchemas.push({
|
|
688
|
+
schemaName: name,
|
|
689
|
+
filePath: locked.relativePath,
|
|
690
|
+
lockedInVersion: locked.version,
|
|
691
|
+
lockedHash: locked.hash
|
|
692
|
+
});
|
|
693
|
+
} else {
|
|
694
|
+
const currentHash = await getFileContentHash(filePath);
|
|
695
|
+
if (currentHash && currentHash !== locked.hash) {
|
|
696
|
+
tamperedSchemas.push({
|
|
697
|
+
schemaName: name,
|
|
698
|
+
filePath: locked.relativePath,
|
|
699
|
+
lockedHash: locked.hash,
|
|
700
|
+
currentHash,
|
|
701
|
+
lockedInVersion: locked.version
|
|
702
|
+
});
|
|
703
|
+
}
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
return {
|
|
707
|
+
valid: corruptedBlocks.length === 0 && tamperedSchemas.length === 0 && deletedLockedSchemas.length === 0,
|
|
708
|
+
blockCount: chain.blocks.length,
|
|
709
|
+
verifiedBlocks,
|
|
710
|
+
corruptedBlocks,
|
|
711
|
+
tamperedSchemas,
|
|
712
|
+
deletedLockedSchemas
|
|
713
|
+
};
|
|
714
|
+
}
|
|
715
|
+
function checkLockViolation(chain, schemaName, action) {
|
|
716
|
+
const affectedVersions = [];
|
|
717
|
+
for (const block of chain.blocks) {
|
|
718
|
+
const schema = block.schemas.find((s) => s.name === schemaName);
|
|
719
|
+
if (schema) {
|
|
720
|
+
affectedVersions.push(block.version);
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
if (affectedVersions.length > 0) {
|
|
724
|
+
return {
|
|
725
|
+
allowed: false,
|
|
726
|
+
reason: `Schema '${schemaName}' is locked in production version(s): ${affectedVersions.join(", ")}. ${action === "delete" ? "Deletion" : "Modification"} is not allowed.`,
|
|
727
|
+
affectedSchemas: [schemaName],
|
|
728
|
+
lockedInVersions: affectedVersions
|
|
729
|
+
};
|
|
730
|
+
}
|
|
731
|
+
return {
|
|
732
|
+
allowed: true,
|
|
733
|
+
affectedSchemas: [],
|
|
734
|
+
lockedInVersions: []
|
|
735
|
+
};
|
|
736
|
+
}
|
|
737
|
+
function checkBulkLockViolation(chain, schemas) {
|
|
738
|
+
const violations = [];
|
|
739
|
+
for (const { name, action } of schemas) {
|
|
740
|
+
const result = checkLockViolation(chain, name, action);
|
|
741
|
+
if (!result.allowed) {
|
|
742
|
+
violations.push({
|
|
743
|
+
name,
|
|
744
|
+
versions: [...result.lockedInVersions]
|
|
745
|
+
});
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
if (violations.length > 0) {
|
|
749
|
+
const schemaList = violations.map((v) => v.name);
|
|
750
|
+
const allVersions = [...new Set(violations.flatMap((v) => v.versions))];
|
|
751
|
+
return {
|
|
752
|
+
allowed: false,
|
|
753
|
+
reason: `The following schemas are locked: ${schemaList.join(", ")}. They cannot be modified or deleted.`,
|
|
754
|
+
affectedSchemas: schemaList,
|
|
755
|
+
lockedInVersions: allVersions
|
|
756
|
+
};
|
|
757
|
+
}
|
|
758
|
+
return {
|
|
759
|
+
allowed: true,
|
|
760
|
+
affectedSchemas: [],
|
|
761
|
+
lockedInVersions: []
|
|
762
|
+
};
|
|
763
|
+
}
|
|
764
|
+
function createDeployBlock(chain, schemas, options) {
|
|
765
|
+
const version = options.version ?? generateVersionName();
|
|
766
|
+
const lockedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
767
|
+
const previousHash = chain.latestHash;
|
|
768
|
+
const blockHash = computeBlockHash(
|
|
769
|
+
previousHash,
|
|
770
|
+
version,
|
|
771
|
+
lockedAt,
|
|
772
|
+
options.environment,
|
|
773
|
+
schemas
|
|
774
|
+
);
|
|
775
|
+
const block = {
|
|
776
|
+
version,
|
|
777
|
+
blockHash,
|
|
778
|
+
previousHash,
|
|
779
|
+
lockedAt,
|
|
780
|
+
environment: options.environment,
|
|
781
|
+
deployedBy: options.deployedBy,
|
|
782
|
+
schemas,
|
|
783
|
+
comment: options.comment
|
|
784
|
+
};
|
|
785
|
+
const updatedChain = {
|
|
786
|
+
...chain,
|
|
787
|
+
genesisHash: chain.genesisHash ?? blockHash,
|
|
788
|
+
latestHash: blockHash,
|
|
789
|
+
blocks: [...chain.blocks, block],
|
|
790
|
+
updatedAt: lockedAt
|
|
791
|
+
};
|
|
792
|
+
return { chain: updatedChain, block };
|
|
793
|
+
}
|
|
794
|
+
async function deployVersion(chainFilePath, schemasDir, schemaFiles, options) {
|
|
795
|
+
let chain = await readVersionChain(chainFilePath);
|
|
796
|
+
if (!chain) {
|
|
797
|
+
chain = createEmptyChain();
|
|
798
|
+
}
|
|
799
|
+
const currentSchemas = await buildCurrentSchemaEntries(schemasDir, schemaFiles);
|
|
800
|
+
if (currentSchemas.length === 0) {
|
|
801
|
+
return {
|
|
802
|
+
success: false,
|
|
803
|
+
error: "No schema files found to lock",
|
|
804
|
+
addedSchemas: [],
|
|
805
|
+
modifiedSchemas: [],
|
|
806
|
+
warnings: []
|
|
807
|
+
};
|
|
808
|
+
}
|
|
809
|
+
const previousSchemas = /* @__PURE__ */ new Map();
|
|
810
|
+
for (const block2 of chain.blocks) {
|
|
811
|
+
for (const schema of block2.schemas) {
|
|
812
|
+
previousSchemas.set(schema.name, schema.contentHash);
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
const addedSchemas = [];
|
|
816
|
+
const modifiedSchemas = [];
|
|
817
|
+
const warnings = [];
|
|
818
|
+
for (const schema of currentSchemas) {
|
|
819
|
+
const previousHash = previousSchemas.get(schema.name);
|
|
820
|
+
if (!previousHash) {
|
|
821
|
+
addedSchemas.push(schema.name);
|
|
822
|
+
} else if (previousHash !== schema.contentHash) {
|
|
823
|
+
modifiedSchemas.push(schema.name);
|
|
824
|
+
warnings.push(
|
|
825
|
+
`Schema '${schema.name}' has been modified since last lock. This version will include the new state.`
|
|
826
|
+
);
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
const { chain: updatedChain, block } = createDeployBlock(
|
|
830
|
+
chain,
|
|
831
|
+
currentSchemas,
|
|
832
|
+
options
|
|
833
|
+
);
|
|
834
|
+
await writeVersionChain(chainFilePath, updatedChain);
|
|
835
|
+
return {
|
|
836
|
+
success: true,
|
|
837
|
+
block,
|
|
838
|
+
addedSchemas,
|
|
839
|
+
modifiedSchemas,
|
|
840
|
+
warnings
|
|
841
|
+
};
|
|
842
|
+
}
|
|
843
|
+
function getLockedSchemas(chain) {
|
|
844
|
+
const locked = /* @__PURE__ */ new Map();
|
|
845
|
+
for (const block of chain.blocks) {
|
|
846
|
+
for (const schema of block.schemas) {
|
|
847
|
+
locked.set(schema.name, {
|
|
848
|
+
hash: schema.contentHash,
|
|
849
|
+
version: block.version,
|
|
850
|
+
relativePath: schema.relativePath
|
|
851
|
+
});
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
return locked;
|
|
855
|
+
}
|
|
856
|
+
function getChainSummary(chain) {
|
|
857
|
+
const schemaNames = /* @__PURE__ */ new Set();
|
|
858
|
+
const environments = /* @__PURE__ */ new Set();
|
|
859
|
+
for (const block of chain.blocks) {
|
|
860
|
+
environments.add(block.environment);
|
|
861
|
+
for (const schema of block.schemas) {
|
|
862
|
+
schemaNames.add(schema.name);
|
|
863
|
+
}
|
|
864
|
+
}
|
|
865
|
+
return {
|
|
866
|
+
blockCount: chain.blocks.length,
|
|
867
|
+
schemaCount: schemaNames.size,
|
|
868
|
+
firstVersion: chain.blocks[0]?.version ?? null,
|
|
869
|
+
latestVersion: chain.blocks[chain.blocks.length - 1]?.version ?? null,
|
|
870
|
+
environments: [...environments]
|
|
871
|
+
};
|
|
872
|
+
}
|
|
873
|
+
|
|
554
874
|
// src/hcl/type-mapper.ts
|
|
555
875
|
var MYSQL_TYPES = {
|
|
556
876
|
String: (prop) => ({
|
|
@@ -1086,8 +1406,8 @@ function renderHcl(schema) {
|
|
|
1086
1406
|
|
|
1087
1407
|
// src/atlas/runner.ts
|
|
1088
1408
|
import { execa } from "execa";
|
|
1089
|
-
import { mkdir, writeFile as
|
|
1090
|
-
import { join } from "path";
|
|
1409
|
+
import { mkdir, writeFile as writeFile3, rm } from "fs/promises";
|
|
1410
|
+
import { join as join2 } from "path";
|
|
1091
1411
|
import { tmpdir } from "os";
|
|
1092
1412
|
import { randomUUID } from "crypto";
|
|
1093
1413
|
import { atlasError, atlasNotFoundError } from "@famgia/omnify-core";
|
|
@@ -1115,7 +1435,7 @@ function normalizeDevUrl(devUrl, driver) {
|
|
|
1115
1435
|
return devUrl;
|
|
1116
1436
|
}
|
|
1117
1437
|
async function createTempDir() {
|
|
1118
|
-
const tempPath =
|
|
1438
|
+
const tempPath = join2(tmpdir(), `omnify-atlas-${randomUUID()}`);
|
|
1119
1439
|
await mkdir(tempPath, { recursive: true });
|
|
1120
1440
|
return tempPath;
|
|
1121
1441
|
}
|
|
@@ -1205,12 +1525,12 @@ async function runAtlasDiff(config, options) {
|
|
|
1205
1525
|
async function diffHclSchemas(config, fromHcl, toHcl) {
|
|
1206
1526
|
const tempDir = await createTempDir();
|
|
1207
1527
|
try {
|
|
1208
|
-
const toPath =
|
|
1209
|
-
await
|
|
1528
|
+
const toPath = join2(tempDir, "to.hcl");
|
|
1529
|
+
await writeFile3(toPath, toHcl, "utf8");
|
|
1210
1530
|
let fromPath;
|
|
1211
1531
|
if (fromHcl) {
|
|
1212
|
-
fromPath =
|
|
1213
|
-
await
|
|
1532
|
+
fromPath = join2(tempDir, "from.hcl");
|
|
1533
|
+
await writeFile3(fromPath, fromHcl, "utf8");
|
|
1214
1534
|
}
|
|
1215
1535
|
return await runAtlasDiff(config, {
|
|
1216
1536
|
fromPath,
|
|
@@ -1624,7 +1944,7 @@ function formatDiffSummary(result) {
|
|
|
1624
1944
|
}
|
|
1625
1945
|
|
|
1626
1946
|
// src/preview/preview.ts
|
|
1627
|
-
import { join as
|
|
1947
|
+
import { join as join3 } from "path";
|
|
1628
1948
|
import { atlasNotFoundError as atlasNotFoundError2 } from "@famgia/omnify-core";
|
|
1629
1949
|
async function generatePreview(schemas, atlasConfig, options = {}) {
|
|
1630
1950
|
const atlasVersion = await checkAtlasVersion(atlasConfig);
|
|
@@ -1632,7 +1952,7 @@ async function generatePreview(schemas, atlasConfig, options = {}) {
|
|
|
1632
1952
|
throw atlasNotFoundError2();
|
|
1633
1953
|
}
|
|
1634
1954
|
const currentHashes = await buildSchemaHashes(schemas);
|
|
1635
|
-
const lockFilePath =
|
|
1955
|
+
const lockFilePath = join3(atlasConfig.workDir ?? process.cwd(), LOCK_FILE_NAME);
|
|
1636
1956
|
const existingLockFile = await readLockFile(lockFilePath);
|
|
1637
1957
|
const schemaChanges = compareSchemas(currentHashes, existingLockFile);
|
|
1638
1958
|
const currentHcl = renderHcl(
|
|
@@ -1710,17 +2030,26 @@ function hasBlockingIssues(_preview) {
|
|
|
1710
2030
|
export {
|
|
1711
2031
|
LOCK_FILE_NAME,
|
|
1712
2032
|
LOCK_FILE_VERSION,
|
|
2033
|
+
VERSION_CHAIN_FILE,
|
|
1713
2034
|
addEnhancedMigrationRecord,
|
|
1714
2035
|
addMigrationRecord,
|
|
1715
2036
|
applySchema,
|
|
2037
|
+
buildCurrentSchemaEntries,
|
|
1716
2038
|
buildSchemaHashes,
|
|
1717
2039
|
buildSchemaSnapshots,
|
|
1718
2040
|
checkAtlasVersion,
|
|
2041
|
+
checkBulkLockViolation,
|
|
2042
|
+
checkLockViolation,
|
|
1719
2043
|
compareSchemas,
|
|
1720
2044
|
compareSchemasDeep,
|
|
2045
|
+
computeBlockHash,
|
|
1721
2046
|
computeHash,
|
|
1722
2047
|
computeSchemaHash,
|
|
2048
|
+
computeSha256,
|
|
2049
|
+
createDeployBlock,
|
|
2050
|
+
createEmptyChain,
|
|
1723
2051
|
createEmptyLockFile,
|
|
2052
|
+
deployVersion,
|
|
1724
2053
|
diffHclSchemas,
|
|
1725
2054
|
extractTableNameFromFilename,
|
|
1726
2055
|
extractTimestampFromFilename,
|
|
@@ -1730,6 +2059,9 @@ export {
|
|
|
1730
2059
|
generateHclSchema,
|
|
1731
2060
|
generateHclTable,
|
|
1732
2061
|
generatePreview,
|
|
2062
|
+
generateVersionName,
|
|
2063
|
+
getChainSummary,
|
|
2064
|
+
getLockedSchemas,
|
|
1733
2065
|
getMigrationsToRegenerate,
|
|
1734
2066
|
getPrimaryKeyType,
|
|
1735
2067
|
getTimestampType,
|
|
@@ -1741,6 +2073,7 @@ export {
|
|
|
1741
2073
|
propertyNameToColumnName,
|
|
1742
2074
|
propertyToSnapshot,
|
|
1743
2075
|
readLockFile,
|
|
2076
|
+
readVersionChain,
|
|
1744
2077
|
renderHcl,
|
|
1745
2078
|
runAtlasDiff,
|
|
1746
2079
|
schemaNameToTableName,
|
|
@@ -1749,6 +2082,8 @@ export {
|
|
|
1749
2082
|
updateLockFileV1,
|
|
1750
2083
|
validateHcl,
|
|
1751
2084
|
validateMigrations,
|
|
1752
|
-
|
|
2085
|
+
verifyChain,
|
|
2086
|
+
writeLockFile,
|
|
2087
|
+
writeVersionChain
|
|
1753
2088
|
};
|
|
1754
2089
|
//# sourceMappingURL=index.js.map
|