@clef-sh/core 0.1.27 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/dist/artifact/packer.d.ts +4 -3
- package/dist/artifact/packer.d.ts.map +1 -1
- package/dist/artifact/resolve.d.ts +3 -2
- package/dist/artifact/resolve.d.ts.map +1 -1
- package/dist/compliance/run.d.ts.map +1 -1
- package/dist/diff/engine.d.ts +18 -8
- package/dist/diff/engine.d.ts.map +1 -1
- package/dist/import/index.d.ts +5 -5
- package/dist/import/index.d.ts.map +1 -1
- package/dist/index.d.mts +14 -12
- package/dist/index.d.ts +14 -12
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1225 -872
- package/dist/index.js.map +4 -4
- package/dist/index.mjs +1213 -848
- package/dist/index.mjs.map +4 -4
- package/dist/kms/aws-arn.d.ts +29 -0
- package/dist/kms/aws-arn.d.ts.map +1 -0
- package/dist/kms/index.d.ts +2 -0
- package/dist/kms/index.d.ts.map +1 -1
- package/dist/lint/runner.d.ts +7 -7
- package/dist/lint/runner.d.ts.map +1 -1
- package/dist/manifest/io.d.ts +6 -0
- package/dist/manifest/io.d.ts.map +1 -1
- package/dist/manifest/parser.d.ts.map +1 -1
- package/dist/matrix/manager.d.ts +4 -16
- package/dist/matrix/manager.d.ts.map +1 -1
- package/dist/merge/driver.d.ts +2 -2
- package/dist/merge/driver.d.ts.map +1 -1
- package/dist/merge/metadata-driver.d.ts +5 -4
- package/dist/merge/metadata-driver.d.ts.map +1 -1
- package/dist/migration/backend.d.ts +10 -7
- package/dist/migration/backend.d.ts.map +1 -1
- package/dist/pack/backends/json-envelope.d.ts.map +1 -1
- package/dist/pack/types.d.ts +9 -3
- package/dist/pack/types.d.ts.map +1 -1
- package/dist/pending/metadata.d.ts +1 -3
- package/dist/pending/metadata.d.ts.map +1 -1
- package/dist/recipients/index.d.ts +4 -3
- package/dist/recipients/index.d.ts.map +1 -1
- package/dist/report/generator.d.ts +4 -3
- package/dist/report/generator.d.ts.map +1 -1
- package/dist/reset/manager.d.ts +21 -3
- package/dist/reset/manager.d.ts.map +1 -1
- package/dist/service-identity/manager.d.ts +6 -3
- package/dist/service-identity/manager.d.ts.map +1 -1
- package/dist/sops/client.d.ts +80 -55
- package/dist/sops/client.d.ts.map +1 -1
- package/dist/sops/linux-stdin-fifo.d.ts +31 -0
- package/dist/sops/linux-stdin-fifo.d.ts.map +1 -0
- package/dist/source/compose.d.ts +10 -0
- package/dist/source/compose.d.ts.map +1 -0
- package/dist/source/default-bulk.d.ts +12 -0
- package/dist/source/default-bulk.d.ts.map +1 -0
- package/dist/source/encryption-backend.d.ts +85 -0
- package/dist/source/encryption-backend.d.ts.map +1 -0
- package/dist/source/errors.d.ts +19 -0
- package/dist/source/errors.d.ts.map +1 -0
- package/dist/source/filesystem-storage-backend.d.ts +26 -0
- package/dist/source/filesystem-storage-backend.d.ts.map +1 -0
- package/dist/source/guards.d.ts +14 -0
- package/dist/source/guards.d.ts.map +1 -0
- package/dist/source/index.d.ts +10 -0
- package/dist/source/index.d.ts.map +1 -0
- package/dist/source/mock-source.d.ts +89 -0
- package/dist/source/mock-source.d.ts.map +1 -0
- package/dist/source/storage-backend.d.ts +61 -0
- package/dist/source/storage-backend.d.ts.map +1 -0
- package/dist/source/types.d.ts +212 -0
- package/dist/source/types.d.ts.map +1 -0
- package/dist/structure/manager.d.ts +17 -3
- package/dist/structure/manager.d.ts.map +1 -1
- package/dist/sync/manager.d.ts +7 -6
- package/dist/sync/manager.d.ts.map +1 -1
- package/dist/types/index.d.ts +10 -23
- package/dist/types/index.d.ts.map +1 -1
- package/package.json +3 -3
- package/dist/bulk/ops.d.ts +0 -57
- package/dist/bulk/ops.d.ts.map +0 -1
package/dist/index.mjs
CHANGED
|
@@ -306,13 +306,13 @@ var require_lib = __commonJS({
|
|
|
306
306
|
"../../node_modules/write-file-atomic/lib/index.js"(exports, module) {
|
|
307
307
|
"use strict";
|
|
308
308
|
module.exports = writeFile;
|
|
309
|
-
module.exports.sync =
|
|
309
|
+
module.exports.sync = writeFileSync8;
|
|
310
310
|
module.exports._getTmpname = getTmpname;
|
|
311
311
|
module.exports._cleanupOnExit = cleanupOnExit;
|
|
312
|
-
var
|
|
312
|
+
var fs23 = __require("fs");
|
|
313
313
|
var crypto6 = __require("node:crypto");
|
|
314
314
|
var { onExit } = require_cjs();
|
|
315
|
-
var
|
|
315
|
+
var path28 = __require("path");
|
|
316
316
|
var { promisify } = __require("util");
|
|
317
317
|
var activeFiles = {};
|
|
318
318
|
var threadId = (function getId() {
|
|
@@ -330,7 +330,7 @@ var require_lib = __commonJS({
|
|
|
330
330
|
function cleanupOnExit(tmpfile) {
|
|
331
331
|
return () => {
|
|
332
332
|
try {
|
|
333
|
-
|
|
333
|
+
fs23.unlinkSync(typeof tmpfile === "function" ? tmpfile() : tmpfile);
|
|
334
334
|
} catch {
|
|
335
335
|
}
|
|
336
336
|
};
|
|
@@ -365,13 +365,13 @@ var require_lib = __commonJS({
|
|
|
365
365
|
let fd;
|
|
366
366
|
let tmpfile;
|
|
367
367
|
const removeOnExitHandler = onExit(cleanupOnExit(() => tmpfile));
|
|
368
|
-
const absoluteName =
|
|
368
|
+
const absoluteName = path28.resolve(filename);
|
|
369
369
|
try {
|
|
370
370
|
await serializeActiveFile(absoluteName);
|
|
371
|
-
const truename = await promisify(
|
|
371
|
+
const truename = await promisify(fs23.realpath)(filename).catch(() => filename);
|
|
372
372
|
tmpfile = getTmpname(truename);
|
|
373
373
|
if (!options.mode || !options.chown) {
|
|
374
|
-
const stats = await promisify(
|
|
374
|
+
const stats = await promisify(fs23.stat)(truename).catch(() => {
|
|
375
375
|
});
|
|
376
376
|
if (stats) {
|
|
377
377
|
if (options.mode == null) {
|
|
@@ -382,45 +382,45 @@ var require_lib = __commonJS({
|
|
|
382
382
|
}
|
|
383
383
|
}
|
|
384
384
|
}
|
|
385
|
-
fd = await promisify(
|
|
385
|
+
fd = await promisify(fs23.open)(tmpfile, "w", options.mode);
|
|
386
386
|
if (options.tmpfileCreated) {
|
|
387
387
|
await options.tmpfileCreated(tmpfile);
|
|
388
388
|
}
|
|
389
389
|
if (ArrayBuffer.isView(data)) {
|
|
390
|
-
await promisify(
|
|
390
|
+
await promisify(fs23.write)(fd, data, 0, data.length, 0);
|
|
391
391
|
} else if (data != null) {
|
|
392
|
-
await promisify(
|
|
392
|
+
await promisify(fs23.write)(fd, String(data), 0, String(options.encoding || "utf8"));
|
|
393
393
|
}
|
|
394
394
|
if (options.fsync !== false) {
|
|
395
|
-
await promisify(
|
|
395
|
+
await promisify(fs23.fsync)(fd);
|
|
396
396
|
}
|
|
397
|
-
await promisify(
|
|
397
|
+
await promisify(fs23.close)(fd);
|
|
398
398
|
fd = null;
|
|
399
399
|
if (options.chown) {
|
|
400
|
-
await promisify(
|
|
400
|
+
await promisify(fs23.chown)(tmpfile, options.chown.uid, options.chown.gid).catch((err) => {
|
|
401
401
|
if (!isChownErrOk(err)) {
|
|
402
402
|
throw err;
|
|
403
403
|
}
|
|
404
404
|
});
|
|
405
405
|
}
|
|
406
406
|
if (options.mode) {
|
|
407
|
-
await promisify(
|
|
407
|
+
await promisify(fs23.chmod)(tmpfile, options.mode).catch((err) => {
|
|
408
408
|
if (!isChownErrOk(err)) {
|
|
409
409
|
throw err;
|
|
410
410
|
}
|
|
411
411
|
});
|
|
412
412
|
}
|
|
413
|
-
await promisify(
|
|
413
|
+
await promisify(fs23.rename)(tmpfile, truename);
|
|
414
414
|
} finally {
|
|
415
415
|
if (fd) {
|
|
416
|
-
await promisify(
|
|
416
|
+
await promisify(fs23.close)(fd).catch(
|
|
417
417
|
/* istanbul ignore next */
|
|
418
418
|
() => {
|
|
419
419
|
}
|
|
420
420
|
);
|
|
421
421
|
}
|
|
422
422
|
removeOnExitHandler();
|
|
423
|
-
await promisify(
|
|
423
|
+
await promisify(fs23.unlink)(tmpfile).catch(() => {
|
|
424
424
|
});
|
|
425
425
|
activeFiles[absoluteName].shift();
|
|
426
426
|
if (activeFiles[absoluteName].length > 0) {
|
|
@@ -446,20 +446,20 @@ var require_lib = __commonJS({
|
|
|
446
446
|
}
|
|
447
447
|
return promise;
|
|
448
448
|
}
|
|
449
|
-
function
|
|
449
|
+
function writeFileSync8(filename, data, options) {
|
|
450
450
|
if (typeof options === "string") {
|
|
451
451
|
options = { encoding: options };
|
|
452
452
|
} else if (!options) {
|
|
453
453
|
options = {};
|
|
454
454
|
}
|
|
455
455
|
try {
|
|
456
|
-
filename =
|
|
456
|
+
filename = fs23.realpathSync(filename);
|
|
457
457
|
} catch (ex) {
|
|
458
458
|
}
|
|
459
459
|
const tmpfile = getTmpname(filename);
|
|
460
460
|
if (!options.mode || !options.chown) {
|
|
461
461
|
try {
|
|
462
|
-
const stats =
|
|
462
|
+
const stats = fs23.statSync(filename);
|
|
463
463
|
options = Object.assign({}, options);
|
|
464
464
|
if (!options.mode) {
|
|
465
465
|
options.mode = stats.mode;
|
|
@@ -475,23 +475,23 @@ var require_lib = __commonJS({
|
|
|
475
475
|
const removeOnExitHandler = onExit(cleanup);
|
|
476
476
|
let threw = true;
|
|
477
477
|
try {
|
|
478
|
-
fd =
|
|
478
|
+
fd = fs23.openSync(tmpfile, "w", options.mode || 438);
|
|
479
479
|
if (options.tmpfileCreated) {
|
|
480
480
|
options.tmpfileCreated(tmpfile);
|
|
481
481
|
}
|
|
482
482
|
if (ArrayBuffer.isView(data)) {
|
|
483
|
-
|
|
483
|
+
fs23.writeSync(fd, data, 0, data.length, 0);
|
|
484
484
|
} else if (data != null) {
|
|
485
|
-
|
|
485
|
+
fs23.writeSync(fd, String(data), 0, String(options.encoding || "utf8"));
|
|
486
486
|
}
|
|
487
487
|
if (options.fsync !== false) {
|
|
488
|
-
|
|
488
|
+
fs23.fsyncSync(fd);
|
|
489
489
|
}
|
|
490
|
-
|
|
490
|
+
fs23.closeSync(fd);
|
|
491
491
|
fd = null;
|
|
492
492
|
if (options.chown) {
|
|
493
493
|
try {
|
|
494
|
-
|
|
494
|
+
fs23.chownSync(tmpfile, options.chown.uid, options.chown.gid);
|
|
495
495
|
} catch (err) {
|
|
496
496
|
if (!isChownErrOk(err)) {
|
|
497
497
|
throw err;
|
|
@@ -500,19 +500,19 @@ var require_lib = __commonJS({
|
|
|
500
500
|
}
|
|
501
501
|
if (options.mode) {
|
|
502
502
|
try {
|
|
503
|
-
|
|
503
|
+
fs23.chmodSync(tmpfile, options.mode);
|
|
504
504
|
} catch (err) {
|
|
505
505
|
if (!isChownErrOk(err)) {
|
|
506
506
|
throw err;
|
|
507
507
|
}
|
|
508
508
|
}
|
|
509
509
|
}
|
|
510
|
-
|
|
510
|
+
fs23.renameSync(tmpfile, filename);
|
|
511
511
|
threw = false;
|
|
512
512
|
} finally {
|
|
513
513
|
if (fd) {
|
|
514
514
|
try {
|
|
515
|
-
|
|
515
|
+
fs23.closeSync(fd);
|
|
516
516
|
} catch (ex) {
|
|
517
517
|
}
|
|
518
518
|
}
|
|
@@ -551,54 +551,54 @@ var require_polyfills = __commonJS({
|
|
|
551
551
|
}
|
|
552
552
|
var chdir;
|
|
553
553
|
module.exports = patch;
|
|
554
|
-
function patch(
|
|
554
|
+
function patch(fs23) {
|
|
555
555
|
if (constants.hasOwnProperty("O_SYMLINK") && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
|
556
|
-
patchLchmod(
|
|
557
|
-
}
|
|
558
|
-
if (!
|
|
559
|
-
patchLutimes(
|
|
560
|
-
}
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
if (
|
|
580
|
-
|
|
556
|
+
patchLchmod(fs23);
|
|
557
|
+
}
|
|
558
|
+
if (!fs23.lutimes) {
|
|
559
|
+
patchLutimes(fs23);
|
|
560
|
+
}
|
|
561
|
+
fs23.chown = chownFix(fs23.chown);
|
|
562
|
+
fs23.fchown = chownFix(fs23.fchown);
|
|
563
|
+
fs23.lchown = chownFix(fs23.lchown);
|
|
564
|
+
fs23.chmod = chmodFix(fs23.chmod);
|
|
565
|
+
fs23.fchmod = chmodFix(fs23.fchmod);
|
|
566
|
+
fs23.lchmod = chmodFix(fs23.lchmod);
|
|
567
|
+
fs23.chownSync = chownFixSync(fs23.chownSync);
|
|
568
|
+
fs23.fchownSync = chownFixSync(fs23.fchownSync);
|
|
569
|
+
fs23.lchownSync = chownFixSync(fs23.lchownSync);
|
|
570
|
+
fs23.chmodSync = chmodFixSync(fs23.chmodSync);
|
|
571
|
+
fs23.fchmodSync = chmodFixSync(fs23.fchmodSync);
|
|
572
|
+
fs23.lchmodSync = chmodFixSync(fs23.lchmodSync);
|
|
573
|
+
fs23.stat = statFix(fs23.stat);
|
|
574
|
+
fs23.fstat = statFix(fs23.fstat);
|
|
575
|
+
fs23.lstat = statFix(fs23.lstat);
|
|
576
|
+
fs23.statSync = statFixSync(fs23.statSync);
|
|
577
|
+
fs23.fstatSync = statFixSync(fs23.fstatSync);
|
|
578
|
+
fs23.lstatSync = statFixSync(fs23.lstatSync);
|
|
579
|
+
if (fs23.chmod && !fs23.lchmod) {
|
|
580
|
+
fs23.lchmod = function(path28, mode, cb) {
|
|
581
581
|
if (cb) process.nextTick(cb);
|
|
582
582
|
};
|
|
583
|
-
|
|
583
|
+
fs23.lchmodSync = function() {
|
|
584
584
|
};
|
|
585
585
|
}
|
|
586
|
-
if (
|
|
587
|
-
|
|
586
|
+
if (fs23.chown && !fs23.lchown) {
|
|
587
|
+
fs23.lchown = function(path28, uid, gid, cb) {
|
|
588
588
|
if (cb) process.nextTick(cb);
|
|
589
589
|
};
|
|
590
|
-
|
|
590
|
+
fs23.lchownSync = function() {
|
|
591
591
|
};
|
|
592
592
|
}
|
|
593
593
|
if (platform === "win32") {
|
|
594
|
-
|
|
594
|
+
fs23.rename = typeof fs23.rename !== "function" ? fs23.rename : (function(fs$rename) {
|
|
595
595
|
function rename(from, to, cb) {
|
|
596
596
|
var start = Date.now();
|
|
597
597
|
var backoff = 0;
|
|
598
598
|
fs$rename(from, to, function CB(er) {
|
|
599
599
|
if (er && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") && Date.now() - start < 6e4) {
|
|
600
600
|
setTimeout(function() {
|
|
601
|
-
|
|
601
|
+
fs23.stat(to, function(stater, st) {
|
|
602
602
|
if (stater && stater.code === "ENOENT")
|
|
603
603
|
fs$rename(from, to, CB);
|
|
604
604
|
else
|
|
@@ -614,9 +614,9 @@ var require_polyfills = __commonJS({
|
|
|
614
614
|
}
|
|
615
615
|
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename);
|
|
616
616
|
return rename;
|
|
617
|
-
})(
|
|
617
|
+
})(fs23.rename);
|
|
618
618
|
}
|
|
619
|
-
|
|
619
|
+
fs23.read = typeof fs23.read !== "function" ? fs23.read : (function(fs$read) {
|
|
620
620
|
function read(fd, buffer, offset, length, position, callback_) {
|
|
621
621
|
var callback;
|
|
622
622
|
if (callback_ && typeof callback_ === "function") {
|
|
@@ -624,22 +624,22 @@ var require_polyfills = __commonJS({
|
|
|
624
624
|
callback = function(er, _, __) {
|
|
625
625
|
if (er && er.code === "EAGAIN" && eagCounter < 10) {
|
|
626
626
|
eagCounter++;
|
|
627
|
-
return fs$read.call(
|
|
627
|
+
return fs$read.call(fs23, fd, buffer, offset, length, position, callback);
|
|
628
628
|
}
|
|
629
629
|
callback_.apply(this, arguments);
|
|
630
630
|
};
|
|
631
631
|
}
|
|
632
|
-
return fs$read.call(
|
|
632
|
+
return fs$read.call(fs23, fd, buffer, offset, length, position, callback);
|
|
633
633
|
}
|
|
634
634
|
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read);
|
|
635
635
|
return read;
|
|
636
|
-
})(
|
|
637
|
-
|
|
636
|
+
})(fs23.read);
|
|
637
|
+
fs23.readSync = typeof fs23.readSync !== "function" ? fs23.readSync : /* @__PURE__ */ (function(fs$readSync) {
|
|
638
638
|
return function(fd, buffer, offset, length, position) {
|
|
639
639
|
var eagCounter = 0;
|
|
640
640
|
while (true) {
|
|
641
641
|
try {
|
|
642
|
-
return fs$readSync.call(
|
|
642
|
+
return fs$readSync.call(fs23, fd, buffer, offset, length, position);
|
|
643
643
|
} catch (er) {
|
|
644
644
|
if (er.code === "EAGAIN" && eagCounter < 10) {
|
|
645
645
|
eagCounter++;
|
|
@@ -649,11 +649,11 @@ var require_polyfills = __commonJS({
|
|
|
649
649
|
}
|
|
650
650
|
}
|
|
651
651
|
};
|
|
652
|
-
})(
|
|
653
|
-
function patchLchmod(
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
652
|
+
})(fs23.readSync);
|
|
653
|
+
function patchLchmod(fs24) {
|
|
654
|
+
fs24.lchmod = function(path28, mode, callback) {
|
|
655
|
+
fs24.open(
|
|
656
|
+
path28,
|
|
657
657
|
constants.O_WRONLY | constants.O_SYMLINK,
|
|
658
658
|
mode,
|
|
659
659
|
function(err, fd) {
|
|
@@ -661,80 +661,80 @@ var require_polyfills = __commonJS({
|
|
|
661
661
|
if (callback) callback(err);
|
|
662
662
|
return;
|
|
663
663
|
}
|
|
664
|
-
|
|
665
|
-
|
|
664
|
+
fs24.fchmod(fd, mode, function(err2) {
|
|
665
|
+
fs24.close(fd, function(err22) {
|
|
666
666
|
if (callback) callback(err2 || err22);
|
|
667
667
|
});
|
|
668
668
|
});
|
|
669
669
|
}
|
|
670
670
|
);
|
|
671
671
|
};
|
|
672
|
-
|
|
673
|
-
var fd =
|
|
672
|
+
fs24.lchmodSync = function(path28, mode) {
|
|
673
|
+
var fd = fs24.openSync(path28, constants.O_WRONLY | constants.O_SYMLINK, mode);
|
|
674
674
|
var threw = true;
|
|
675
675
|
var ret;
|
|
676
676
|
try {
|
|
677
|
-
ret =
|
|
677
|
+
ret = fs24.fchmodSync(fd, mode);
|
|
678
678
|
threw = false;
|
|
679
679
|
} finally {
|
|
680
680
|
if (threw) {
|
|
681
681
|
try {
|
|
682
|
-
|
|
682
|
+
fs24.closeSync(fd);
|
|
683
683
|
} catch (er) {
|
|
684
684
|
}
|
|
685
685
|
} else {
|
|
686
|
-
|
|
686
|
+
fs24.closeSync(fd);
|
|
687
687
|
}
|
|
688
688
|
}
|
|
689
689
|
return ret;
|
|
690
690
|
};
|
|
691
691
|
}
|
|
692
|
-
function patchLutimes(
|
|
693
|
-
if (constants.hasOwnProperty("O_SYMLINK") &&
|
|
694
|
-
|
|
695
|
-
|
|
692
|
+
function patchLutimes(fs24) {
|
|
693
|
+
if (constants.hasOwnProperty("O_SYMLINK") && fs24.futimes) {
|
|
694
|
+
fs24.lutimes = function(path28, at, mt, cb) {
|
|
695
|
+
fs24.open(path28, constants.O_SYMLINK, function(er, fd) {
|
|
696
696
|
if (er) {
|
|
697
697
|
if (cb) cb(er);
|
|
698
698
|
return;
|
|
699
699
|
}
|
|
700
|
-
|
|
701
|
-
|
|
700
|
+
fs24.futimes(fd, at, mt, function(er2) {
|
|
701
|
+
fs24.close(fd, function(er22) {
|
|
702
702
|
if (cb) cb(er2 || er22);
|
|
703
703
|
});
|
|
704
704
|
});
|
|
705
705
|
});
|
|
706
706
|
};
|
|
707
|
-
|
|
708
|
-
var fd =
|
|
707
|
+
fs24.lutimesSync = function(path28, at, mt) {
|
|
708
|
+
var fd = fs24.openSync(path28, constants.O_SYMLINK);
|
|
709
709
|
var ret;
|
|
710
710
|
var threw = true;
|
|
711
711
|
try {
|
|
712
|
-
ret =
|
|
712
|
+
ret = fs24.futimesSync(fd, at, mt);
|
|
713
713
|
threw = false;
|
|
714
714
|
} finally {
|
|
715
715
|
if (threw) {
|
|
716
716
|
try {
|
|
717
|
-
|
|
717
|
+
fs24.closeSync(fd);
|
|
718
718
|
} catch (er) {
|
|
719
719
|
}
|
|
720
720
|
} else {
|
|
721
|
-
|
|
721
|
+
fs24.closeSync(fd);
|
|
722
722
|
}
|
|
723
723
|
}
|
|
724
724
|
return ret;
|
|
725
725
|
};
|
|
726
|
-
} else if (
|
|
727
|
-
|
|
726
|
+
} else if (fs24.futimes) {
|
|
727
|
+
fs24.lutimes = function(_a, _b, _c, cb) {
|
|
728
728
|
if (cb) process.nextTick(cb);
|
|
729
729
|
};
|
|
730
|
-
|
|
730
|
+
fs24.lutimesSync = function() {
|
|
731
731
|
};
|
|
732
732
|
}
|
|
733
733
|
}
|
|
734
734
|
function chmodFix(orig) {
|
|
735
735
|
if (!orig) return orig;
|
|
736
736
|
return function(target, mode, cb) {
|
|
737
|
-
return orig.call(
|
|
737
|
+
return orig.call(fs23, target, mode, function(er) {
|
|
738
738
|
if (chownErOk(er)) er = null;
|
|
739
739
|
if (cb) cb.apply(this, arguments);
|
|
740
740
|
});
|
|
@@ -744,7 +744,7 @@ var require_polyfills = __commonJS({
|
|
|
744
744
|
if (!orig) return orig;
|
|
745
745
|
return function(target, mode) {
|
|
746
746
|
try {
|
|
747
|
-
return orig.call(
|
|
747
|
+
return orig.call(fs23, target, mode);
|
|
748
748
|
} catch (er) {
|
|
749
749
|
if (!chownErOk(er)) throw er;
|
|
750
750
|
}
|
|
@@ -753,7 +753,7 @@ var require_polyfills = __commonJS({
|
|
|
753
753
|
function chownFix(orig) {
|
|
754
754
|
if (!orig) return orig;
|
|
755
755
|
return function(target, uid, gid, cb) {
|
|
756
|
-
return orig.call(
|
|
756
|
+
return orig.call(fs23, target, uid, gid, function(er) {
|
|
757
757
|
if (chownErOk(er)) er = null;
|
|
758
758
|
if (cb) cb.apply(this, arguments);
|
|
759
759
|
});
|
|
@@ -763,7 +763,7 @@ var require_polyfills = __commonJS({
|
|
|
763
763
|
if (!orig) return orig;
|
|
764
764
|
return function(target, uid, gid) {
|
|
765
765
|
try {
|
|
766
|
-
return orig.call(
|
|
766
|
+
return orig.call(fs23, target, uid, gid);
|
|
767
767
|
} catch (er) {
|
|
768
768
|
if (!chownErOk(er)) throw er;
|
|
769
769
|
}
|
|
@@ -783,13 +783,13 @@ var require_polyfills = __commonJS({
|
|
|
783
783
|
}
|
|
784
784
|
if (cb) cb.apply(this, arguments);
|
|
785
785
|
}
|
|
786
|
-
return options ? orig.call(
|
|
786
|
+
return options ? orig.call(fs23, target, options, callback) : orig.call(fs23, target, callback);
|
|
787
787
|
};
|
|
788
788
|
}
|
|
789
789
|
function statFixSync(orig) {
|
|
790
790
|
if (!orig) return orig;
|
|
791
791
|
return function(target, options) {
|
|
792
|
-
var stats = options ? orig.call(
|
|
792
|
+
var stats = options ? orig.call(fs23, target, options) : orig.call(fs23, target);
|
|
793
793
|
if (stats) {
|
|
794
794
|
if (stats.uid < 0) stats.uid += 4294967296;
|
|
795
795
|
if (stats.gid < 0) stats.gid += 4294967296;
|
|
@@ -818,16 +818,16 @@ var require_legacy_streams = __commonJS({
|
|
|
818
818
|
"../../node_modules/graceful-fs/legacy-streams.js"(exports, module) {
|
|
819
819
|
var Stream = __require("stream").Stream;
|
|
820
820
|
module.exports = legacy;
|
|
821
|
-
function legacy(
|
|
821
|
+
function legacy(fs23) {
|
|
822
822
|
return {
|
|
823
823
|
ReadStream,
|
|
824
824
|
WriteStream
|
|
825
825
|
};
|
|
826
|
-
function ReadStream(
|
|
827
|
-
if (!(this instanceof ReadStream)) return new ReadStream(
|
|
826
|
+
function ReadStream(path28, options) {
|
|
827
|
+
if (!(this instanceof ReadStream)) return new ReadStream(path28, options);
|
|
828
828
|
Stream.call(this);
|
|
829
829
|
var self = this;
|
|
830
|
-
this.path =
|
|
830
|
+
this.path = path28;
|
|
831
831
|
this.fd = null;
|
|
832
832
|
this.readable = true;
|
|
833
833
|
this.paused = false;
|
|
@@ -861,7 +861,7 @@ var require_legacy_streams = __commonJS({
|
|
|
861
861
|
});
|
|
862
862
|
return;
|
|
863
863
|
}
|
|
864
|
-
|
|
864
|
+
fs23.open(this.path, this.flags, this.mode, function(err, fd) {
|
|
865
865
|
if (err) {
|
|
866
866
|
self.emit("error", err);
|
|
867
867
|
self.readable = false;
|
|
@@ -872,10 +872,10 @@ var require_legacy_streams = __commonJS({
|
|
|
872
872
|
self._read();
|
|
873
873
|
});
|
|
874
874
|
}
|
|
875
|
-
function WriteStream(
|
|
876
|
-
if (!(this instanceof WriteStream)) return new WriteStream(
|
|
875
|
+
function WriteStream(path28, options) {
|
|
876
|
+
if (!(this instanceof WriteStream)) return new WriteStream(path28, options);
|
|
877
877
|
Stream.call(this);
|
|
878
|
-
this.path =
|
|
878
|
+
this.path = path28;
|
|
879
879
|
this.fd = null;
|
|
880
880
|
this.writable = true;
|
|
881
881
|
this.flags = "w";
|
|
@@ -900,7 +900,7 @@ var require_legacy_streams = __commonJS({
|
|
|
900
900
|
this.busy = false;
|
|
901
901
|
this._queue = [];
|
|
902
902
|
if (this.fd === null) {
|
|
903
|
-
this._open =
|
|
903
|
+
this._open = fs23.open;
|
|
904
904
|
this._queue.push([this._open, this.path, this.flags, this.mode, void 0]);
|
|
905
905
|
this.flush();
|
|
906
906
|
}
|
|
@@ -935,7 +935,7 @@ var require_clone = __commonJS({
|
|
|
935
935
|
// ../../node_modules/graceful-fs/graceful-fs.js
|
|
936
936
|
var require_graceful_fs = __commonJS({
|
|
937
937
|
"../../node_modules/graceful-fs/graceful-fs.js"(exports, module) {
|
|
938
|
-
var
|
|
938
|
+
var fs23 = __require("fs");
|
|
939
939
|
var polyfills = require_polyfills();
|
|
940
940
|
var legacy = require_legacy_streams();
|
|
941
941
|
var clone = require_clone();
|
|
@@ -967,12 +967,12 @@ var require_graceful_fs = __commonJS({
|
|
|
967
967
|
m = "GFS4: " + m.split(/\n/).join("\nGFS4: ");
|
|
968
968
|
console.error(m);
|
|
969
969
|
};
|
|
970
|
-
if (!
|
|
970
|
+
if (!fs23[gracefulQueue]) {
|
|
971
971
|
queue = global[gracefulQueue] || [];
|
|
972
|
-
publishQueue(
|
|
973
|
-
|
|
972
|
+
publishQueue(fs23, queue);
|
|
973
|
+
fs23.close = (function(fs$close) {
|
|
974
974
|
function close(fd, cb) {
|
|
975
|
-
return fs$close.call(
|
|
975
|
+
return fs$close.call(fs23, fd, function(err) {
|
|
976
976
|
if (!err) {
|
|
977
977
|
resetQueue();
|
|
978
978
|
}
|
|
@@ -984,48 +984,48 @@ var require_graceful_fs = __commonJS({
|
|
|
984
984
|
value: fs$close
|
|
985
985
|
});
|
|
986
986
|
return close;
|
|
987
|
-
})(
|
|
988
|
-
|
|
989
|
-
function
|
|
990
|
-
fs$closeSync.apply(
|
|
987
|
+
})(fs23.close);
|
|
988
|
+
fs23.closeSync = (function(fs$closeSync) {
|
|
989
|
+
function closeSync3(fd) {
|
|
990
|
+
fs$closeSync.apply(fs23, arguments);
|
|
991
991
|
resetQueue();
|
|
992
992
|
}
|
|
993
|
-
Object.defineProperty(
|
|
993
|
+
Object.defineProperty(closeSync3, previousSymbol, {
|
|
994
994
|
value: fs$closeSync
|
|
995
995
|
});
|
|
996
|
-
return
|
|
997
|
-
})(
|
|
996
|
+
return closeSync3;
|
|
997
|
+
})(fs23.closeSync);
|
|
998
998
|
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || "")) {
|
|
999
999
|
process.on("exit", function() {
|
|
1000
|
-
debug(
|
|
1001
|
-
__require("assert").equal(
|
|
1000
|
+
debug(fs23[gracefulQueue]);
|
|
1001
|
+
__require("assert").equal(fs23[gracefulQueue].length, 0);
|
|
1002
1002
|
});
|
|
1003
1003
|
}
|
|
1004
1004
|
}
|
|
1005
1005
|
var queue;
|
|
1006
1006
|
if (!global[gracefulQueue]) {
|
|
1007
|
-
publishQueue(global,
|
|
1008
|
-
}
|
|
1009
|
-
module.exports = patch(clone(
|
|
1010
|
-
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !
|
|
1011
|
-
module.exports = patch(
|
|
1012
|
-
|
|
1013
|
-
}
|
|
1014
|
-
function patch(
|
|
1015
|
-
polyfills(
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
var fs$readFile =
|
|
1020
|
-
|
|
1021
|
-
function readFile(
|
|
1007
|
+
publishQueue(global, fs23[gracefulQueue]);
|
|
1008
|
+
}
|
|
1009
|
+
module.exports = patch(clone(fs23));
|
|
1010
|
+
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs23.__patched) {
|
|
1011
|
+
module.exports = patch(fs23);
|
|
1012
|
+
fs23.__patched = true;
|
|
1013
|
+
}
|
|
1014
|
+
function patch(fs24) {
|
|
1015
|
+
polyfills(fs24);
|
|
1016
|
+
fs24.gracefulify = patch;
|
|
1017
|
+
fs24.createReadStream = createReadStream;
|
|
1018
|
+
fs24.createWriteStream = createWriteStream;
|
|
1019
|
+
var fs$readFile = fs24.readFile;
|
|
1020
|
+
fs24.readFile = readFile;
|
|
1021
|
+
function readFile(path28, options, cb) {
|
|
1022
1022
|
if (typeof options === "function")
|
|
1023
1023
|
cb = options, options = null;
|
|
1024
|
-
return go$readFile(
|
|
1025
|
-
function go$readFile(
|
|
1026
|
-
return fs$readFile(
|
|
1024
|
+
return go$readFile(path28, options, cb);
|
|
1025
|
+
function go$readFile(path29, options2, cb2, startTime) {
|
|
1026
|
+
return fs$readFile(path29, options2, function(err) {
|
|
1027
1027
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1028
|
-
enqueue([go$readFile, [
|
|
1028
|
+
enqueue([go$readFile, [path29, options2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1029
1029
|
else {
|
|
1030
1030
|
if (typeof cb2 === "function")
|
|
1031
1031
|
cb2.apply(this, arguments);
|
|
@@ -1033,16 +1033,16 @@ var require_graceful_fs = __commonJS({
|
|
|
1033
1033
|
});
|
|
1034
1034
|
}
|
|
1035
1035
|
}
|
|
1036
|
-
var fs$writeFile =
|
|
1037
|
-
|
|
1038
|
-
function writeFile(
|
|
1036
|
+
var fs$writeFile = fs24.writeFile;
|
|
1037
|
+
fs24.writeFile = writeFile;
|
|
1038
|
+
function writeFile(path28, data, options, cb) {
|
|
1039
1039
|
if (typeof options === "function")
|
|
1040
1040
|
cb = options, options = null;
|
|
1041
|
-
return go$writeFile(
|
|
1042
|
-
function go$writeFile(
|
|
1043
|
-
return fs$writeFile(
|
|
1041
|
+
return go$writeFile(path28, data, options, cb);
|
|
1042
|
+
function go$writeFile(path29, data2, options2, cb2, startTime) {
|
|
1043
|
+
return fs$writeFile(path29, data2, options2, function(err) {
|
|
1044
1044
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1045
|
-
enqueue([go$writeFile, [
|
|
1045
|
+
enqueue([go$writeFile, [path29, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1046
1046
|
else {
|
|
1047
1047
|
if (typeof cb2 === "function")
|
|
1048
1048
|
cb2.apply(this, arguments);
|
|
@@ -1050,17 +1050,17 @@ var require_graceful_fs = __commonJS({
|
|
|
1050
1050
|
});
|
|
1051
1051
|
}
|
|
1052
1052
|
}
|
|
1053
|
-
var fs$appendFile =
|
|
1053
|
+
var fs$appendFile = fs24.appendFile;
|
|
1054
1054
|
if (fs$appendFile)
|
|
1055
|
-
|
|
1056
|
-
function appendFile(
|
|
1055
|
+
fs24.appendFile = appendFile;
|
|
1056
|
+
function appendFile(path28, data, options, cb) {
|
|
1057
1057
|
if (typeof options === "function")
|
|
1058
1058
|
cb = options, options = null;
|
|
1059
|
-
return go$appendFile(
|
|
1060
|
-
function go$appendFile(
|
|
1061
|
-
return fs$appendFile(
|
|
1059
|
+
return go$appendFile(path28, data, options, cb);
|
|
1060
|
+
function go$appendFile(path29, data2, options2, cb2, startTime) {
|
|
1061
|
+
return fs$appendFile(path29, data2, options2, function(err) {
|
|
1062
1062
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1063
|
-
enqueue([go$appendFile, [
|
|
1063
|
+
enqueue([go$appendFile, [path29, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1064
1064
|
else {
|
|
1065
1065
|
if (typeof cb2 === "function")
|
|
1066
1066
|
cb2.apply(this, arguments);
|
|
@@ -1068,9 +1068,9 @@ var require_graceful_fs = __commonJS({
|
|
|
1068
1068
|
});
|
|
1069
1069
|
}
|
|
1070
1070
|
}
|
|
1071
|
-
var fs$copyFile =
|
|
1071
|
+
var fs$copyFile = fs24.copyFile;
|
|
1072
1072
|
if (fs$copyFile)
|
|
1073
|
-
|
|
1073
|
+
fs24.copyFile = copyFile;
|
|
1074
1074
|
function copyFile(src, dest, flags, cb) {
|
|
1075
1075
|
if (typeof flags === "function") {
|
|
1076
1076
|
cb = flags;
|
|
@@ -1088,34 +1088,34 @@ var require_graceful_fs = __commonJS({
|
|
|
1088
1088
|
});
|
|
1089
1089
|
}
|
|
1090
1090
|
}
|
|
1091
|
-
var fs$readdir =
|
|
1092
|
-
|
|
1091
|
+
var fs$readdir = fs24.readdir;
|
|
1092
|
+
fs24.readdir = readdir;
|
|
1093
1093
|
var noReaddirOptionVersions = /^v[0-5]\./;
|
|
1094
|
-
function readdir(
|
|
1094
|
+
function readdir(path28, options, cb) {
|
|
1095
1095
|
if (typeof options === "function")
|
|
1096
1096
|
cb = options, options = null;
|
|
1097
|
-
var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(
|
|
1098
|
-
return fs$readdir(
|
|
1099
|
-
|
|
1097
|
+
var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path29, options2, cb2, startTime) {
|
|
1098
|
+
return fs$readdir(path29, fs$readdirCallback(
|
|
1099
|
+
path29,
|
|
1100
1100
|
options2,
|
|
1101
1101
|
cb2,
|
|
1102
1102
|
startTime
|
|
1103
1103
|
));
|
|
1104
|
-
} : function go$readdir2(
|
|
1105
|
-
return fs$readdir(
|
|
1106
|
-
|
|
1104
|
+
} : function go$readdir2(path29, options2, cb2, startTime) {
|
|
1105
|
+
return fs$readdir(path29, options2, fs$readdirCallback(
|
|
1106
|
+
path29,
|
|
1107
1107
|
options2,
|
|
1108
1108
|
cb2,
|
|
1109
1109
|
startTime
|
|
1110
1110
|
));
|
|
1111
1111
|
};
|
|
1112
|
-
return go$readdir(
|
|
1113
|
-
function fs$readdirCallback(
|
|
1112
|
+
return go$readdir(path28, options, cb);
|
|
1113
|
+
function fs$readdirCallback(path29, options2, cb2, startTime) {
|
|
1114
1114
|
return function(err, files) {
|
|
1115
1115
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1116
1116
|
enqueue([
|
|
1117
1117
|
go$readdir,
|
|
1118
|
-
[
|
|
1118
|
+
[path29, options2, cb2],
|
|
1119
1119
|
err,
|
|
1120
1120
|
startTime || Date.now(),
|
|
1121
1121
|
Date.now()
|
|
@@ -1130,21 +1130,21 @@ var require_graceful_fs = __commonJS({
|
|
|
1130
1130
|
}
|
|
1131
1131
|
}
|
|
1132
1132
|
if (process.version.substr(0, 4) === "v0.8") {
|
|
1133
|
-
var legStreams = legacy(
|
|
1133
|
+
var legStreams = legacy(fs24);
|
|
1134
1134
|
ReadStream = legStreams.ReadStream;
|
|
1135
1135
|
WriteStream = legStreams.WriteStream;
|
|
1136
1136
|
}
|
|
1137
|
-
var fs$ReadStream =
|
|
1137
|
+
var fs$ReadStream = fs24.ReadStream;
|
|
1138
1138
|
if (fs$ReadStream) {
|
|
1139
1139
|
ReadStream.prototype = Object.create(fs$ReadStream.prototype);
|
|
1140
1140
|
ReadStream.prototype.open = ReadStream$open;
|
|
1141
1141
|
}
|
|
1142
|
-
var fs$WriteStream =
|
|
1142
|
+
var fs$WriteStream = fs24.WriteStream;
|
|
1143
1143
|
if (fs$WriteStream) {
|
|
1144
1144
|
WriteStream.prototype = Object.create(fs$WriteStream.prototype);
|
|
1145
1145
|
WriteStream.prototype.open = WriteStream$open;
|
|
1146
1146
|
}
|
|
1147
|
-
Object.defineProperty(
|
|
1147
|
+
Object.defineProperty(fs24, "ReadStream", {
|
|
1148
1148
|
get: function() {
|
|
1149
1149
|
return ReadStream;
|
|
1150
1150
|
},
|
|
@@ -1154,7 +1154,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1154
1154
|
enumerable: true,
|
|
1155
1155
|
configurable: true
|
|
1156
1156
|
});
|
|
1157
|
-
Object.defineProperty(
|
|
1157
|
+
Object.defineProperty(fs24, "WriteStream", {
|
|
1158
1158
|
get: function() {
|
|
1159
1159
|
return WriteStream;
|
|
1160
1160
|
},
|
|
@@ -1165,7 +1165,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1165
1165
|
configurable: true
|
|
1166
1166
|
});
|
|
1167
1167
|
var FileReadStream = ReadStream;
|
|
1168
|
-
Object.defineProperty(
|
|
1168
|
+
Object.defineProperty(fs24, "FileReadStream", {
|
|
1169
1169
|
get: function() {
|
|
1170
1170
|
return FileReadStream;
|
|
1171
1171
|
},
|
|
@@ -1176,7 +1176,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1176
1176
|
configurable: true
|
|
1177
1177
|
});
|
|
1178
1178
|
var FileWriteStream = WriteStream;
|
|
1179
|
-
Object.defineProperty(
|
|
1179
|
+
Object.defineProperty(fs24, "FileWriteStream", {
|
|
1180
1180
|
get: function() {
|
|
1181
1181
|
return FileWriteStream;
|
|
1182
1182
|
},
|
|
@@ -1186,7 +1186,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1186
1186
|
enumerable: true,
|
|
1187
1187
|
configurable: true
|
|
1188
1188
|
});
|
|
1189
|
-
function ReadStream(
|
|
1189
|
+
function ReadStream(path28, options) {
|
|
1190
1190
|
if (this instanceof ReadStream)
|
|
1191
1191
|
return fs$ReadStream.apply(this, arguments), this;
|
|
1192
1192
|
else
|
|
@@ -1206,7 +1206,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1206
1206
|
}
|
|
1207
1207
|
});
|
|
1208
1208
|
}
|
|
1209
|
-
function WriteStream(
|
|
1209
|
+
function WriteStream(path28, options) {
|
|
1210
1210
|
if (this instanceof WriteStream)
|
|
1211
1211
|
return fs$WriteStream.apply(this, arguments), this;
|
|
1212
1212
|
else
|
|
@@ -1224,22 +1224,22 @@ var require_graceful_fs = __commonJS({
|
|
|
1224
1224
|
}
|
|
1225
1225
|
});
|
|
1226
1226
|
}
|
|
1227
|
-
function createReadStream(
|
|
1228
|
-
return new
|
|
1227
|
+
function createReadStream(path28, options) {
|
|
1228
|
+
return new fs24.ReadStream(path28, options);
|
|
1229
1229
|
}
|
|
1230
|
-
function createWriteStream(
|
|
1231
|
-
return new
|
|
1230
|
+
function createWriteStream(path28, options) {
|
|
1231
|
+
return new fs24.WriteStream(path28, options);
|
|
1232
1232
|
}
|
|
1233
|
-
var fs$open =
|
|
1234
|
-
|
|
1235
|
-
function open(
|
|
1233
|
+
var fs$open = fs24.open;
|
|
1234
|
+
fs24.open = open;
|
|
1235
|
+
function open(path28, flags, mode, cb) {
|
|
1236
1236
|
if (typeof mode === "function")
|
|
1237
1237
|
cb = mode, mode = null;
|
|
1238
|
-
return go$open(
|
|
1239
|
-
function go$open(
|
|
1240
|
-
return fs$open(
|
|
1238
|
+
return go$open(path28, flags, mode, cb);
|
|
1239
|
+
function go$open(path29, flags2, mode2, cb2, startTime) {
|
|
1240
|
+
return fs$open(path29, flags2, mode2, function(err, fd) {
|
|
1241
1241
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1242
|
-
enqueue([go$open, [
|
|
1242
|
+
enqueue([go$open, [path29, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1243
1243
|
else {
|
|
1244
1244
|
if (typeof cb2 === "function")
|
|
1245
1245
|
cb2.apply(this, arguments);
|
|
@@ -1247,20 +1247,20 @@ var require_graceful_fs = __commonJS({
|
|
|
1247
1247
|
});
|
|
1248
1248
|
}
|
|
1249
1249
|
}
|
|
1250
|
-
return
|
|
1250
|
+
return fs24;
|
|
1251
1251
|
}
|
|
1252
1252
|
function enqueue(elem) {
|
|
1253
1253
|
debug("ENQUEUE", elem[0].name, elem[1]);
|
|
1254
|
-
|
|
1254
|
+
fs23[gracefulQueue].push(elem);
|
|
1255
1255
|
retry();
|
|
1256
1256
|
}
|
|
1257
1257
|
var retryTimer;
|
|
1258
1258
|
function resetQueue() {
|
|
1259
1259
|
var now = Date.now();
|
|
1260
|
-
for (var i = 0; i <
|
|
1261
|
-
if (
|
|
1262
|
-
|
|
1263
|
-
|
|
1260
|
+
for (var i = 0; i < fs23[gracefulQueue].length; ++i) {
|
|
1261
|
+
if (fs23[gracefulQueue][i].length > 2) {
|
|
1262
|
+
fs23[gracefulQueue][i][3] = now;
|
|
1263
|
+
fs23[gracefulQueue][i][4] = now;
|
|
1264
1264
|
}
|
|
1265
1265
|
}
|
|
1266
1266
|
retry();
|
|
@@ -1268,9 +1268,9 @@ var require_graceful_fs = __commonJS({
|
|
|
1268
1268
|
function retry() {
|
|
1269
1269
|
clearTimeout(retryTimer);
|
|
1270
1270
|
retryTimer = void 0;
|
|
1271
|
-
if (
|
|
1271
|
+
if (fs23[gracefulQueue].length === 0)
|
|
1272
1272
|
return;
|
|
1273
|
-
var elem =
|
|
1273
|
+
var elem = fs23[gracefulQueue].shift();
|
|
1274
1274
|
var fn = elem[0];
|
|
1275
1275
|
var args = elem[1];
|
|
1276
1276
|
var err = elem[2];
|
|
@@ -1292,7 +1292,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1292
1292
|
debug("RETRY", fn.name, args);
|
|
1293
1293
|
fn.apply(null, args.concat([startTime]));
|
|
1294
1294
|
} else {
|
|
1295
|
-
|
|
1295
|
+
fs23[gracefulQueue].push(elem);
|
|
1296
1296
|
}
|
|
1297
1297
|
}
|
|
1298
1298
|
if (retryTimer === void 0) {
|
|
@@ -1727,10 +1727,10 @@ var require_mtime_precision = __commonJS({
|
|
|
1727
1727
|
"../../node_modules/proper-lockfile/lib/mtime-precision.js"(exports, module) {
|
|
1728
1728
|
"use strict";
|
|
1729
1729
|
var cacheSymbol = /* @__PURE__ */ Symbol();
|
|
1730
|
-
function probe(file,
|
|
1731
|
-
const cachedPrecision =
|
|
1730
|
+
function probe(file, fs23, callback) {
|
|
1731
|
+
const cachedPrecision = fs23[cacheSymbol];
|
|
1732
1732
|
if (cachedPrecision) {
|
|
1733
|
-
return
|
|
1733
|
+
return fs23.stat(file, (err, stat) => {
|
|
1734
1734
|
if (err) {
|
|
1735
1735
|
return callback(err);
|
|
1736
1736
|
}
|
|
@@ -1738,16 +1738,16 @@ var require_mtime_precision = __commonJS({
|
|
|
1738
1738
|
});
|
|
1739
1739
|
}
|
|
1740
1740
|
const mtime = new Date(Math.ceil(Date.now() / 1e3) * 1e3 + 5);
|
|
1741
|
-
|
|
1741
|
+
fs23.utimes(file, mtime, mtime, (err) => {
|
|
1742
1742
|
if (err) {
|
|
1743
1743
|
return callback(err);
|
|
1744
1744
|
}
|
|
1745
|
-
|
|
1745
|
+
fs23.stat(file, (err2, stat) => {
|
|
1746
1746
|
if (err2) {
|
|
1747
1747
|
return callback(err2);
|
|
1748
1748
|
}
|
|
1749
1749
|
const precision = stat.mtime.getTime() % 1e3 === 0 ? "s" : "ms";
|
|
1750
|
-
Object.defineProperty(
|
|
1750
|
+
Object.defineProperty(fs23, cacheSymbol, { value: precision });
|
|
1751
1751
|
callback(null, stat.mtime, precision);
|
|
1752
1752
|
});
|
|
1753
1753
|
});
|
|
@@ -1768,8 +1768,8 @@ var require_mtime_precision = __commonJS({
|
|
|
1768
1768
|
var require_lockfile = __commonJS({
|
|
1769
1769
|
"../../node_modules/proper-lockfile/lib/lockfile.js"(exports, module) {
|
|
1770
1770
|
"use strict";
|
|
1771
|
-
var
|
|
1772
|
-
var
|
|
1771
|
+
var path28 = __require("path");
|
|
1772
|
+
var fs23 = require_graceful_fs();
|
|
1773
1773
|
var retry = require_retry2();
|
|
1774
1774
|
var onExit = require_signal_exit();
|
|
1775
1775
|
var mtimePrecision = require_mtime_precision();
|
|
@@ -1779,7 +1779,7 @@ var require_lockfile = __commonJS({
|
|
|
1779
1779
|
}
|
|
1780
1780
|
function resolveCanonicalPath(file, options, callback) {
|
|
1781
1781
|
if (!options.realpath) {
|
|
1782
|
-
return callback(null,
|
|
1782
|
+
return callback(null, path28.resolve(file));
|
|
1783
1783
|
}
|
|
1784
1784
|
options.fs.realpath(file, callback);
|
|
1785
1785
|
}
|
|
@@ -1900,7 +1900,7 @@ var require_lockfile = __commonJS({
|
|
|
1900
1900
|
update: null,
|
|
1901
1901
|
realpath: true,
|
|
1902
1902
|
retries: 0,
|
|
1903
|
-
fs:
|
|
1903
|
+
fs: fs23,
|
|
1904
1904
|
onCompromised: (err) => {
|
|
1905
1905
|
throw err;
|
|
1906
1906
|
},
|
|
@@ -1944,7 +1944,7 @@ var require_lockfile = __commonJS({
|
|
|
1944
1944
|
}
|
|
1945
1945
|
function unlock(file, options, callback) {
|
|
1946
1946
|
options = {
|
|
1947
|
-
fs:
|
|
1947
|
+
fs: fs23,
|
|
1948
1948
|
realpath: true,
|
|
1949
1949
|
...options
|
|
1950
1950
|
};
|
|
@@ -1966,7 +1966,7 @@ var require_lockfile = __commonJS({
|
|
|
1966
1966
|
options = {
|
|
1967
1967
|
stale: 1e4,
|
|
1968
1968
|
realpath: true,
|
|
1969
|
-
fs:
|
|
1969
|
+
fs: fs23,
|
|
1970
1970
|
...options
|
|
1971
1971
|
};
|
|
1972
1972
|
options.stale = Math.max(options.stale || 0, 2e3);
|
|
@@ -2005,16 +2005,16 @@ var require_lockfile = __commonJS({
|
|
|
2005
2005
|
var require_adapter = __commonJS({
|
|
2006
2006
|
"../../node_modules/proper-lockfile/lib/adapter.js"(exports, module) {
|
|
2007
2007
|
"use strict";
|
|
2008
|
-
var
|
|
2009
|
-
function createSyncFs(
|
|
2008
|
+
var fs23 = require_graceful_fs();
|
|
2009
|
+
function createSyncFs(fs24) {
|
|
2010
2010
|
const methods = ["mkdir", "realpath", "stat", "rmdir", "utimes"];
|
|
2011
|
-
const newFs = { ...
|
|
2011
|
+
const newFs = { ...fs24 };
|
|
2012
2012
|
methods.forEach((method) => {
|
|
2013
2013
|
newFs[method] = (...args) => {
|
|
2014
2014
|
const callback = args.pop();
|
|
2015
2015
|
let ret;
|
|
2016
2016
|
try {
|
|
2017
|
-
ret =
|
|
2017
|
+
ret = fs24[`${method}Sync`](...args);
|
|
2018
2018
|
} catch (err) {
|
|
2019
2019
|
return callback(err);
|
|
2020
2020
|
}
|
|
@@ -2052,7 +2052,7 @@ var require_adapter = __commonJS({
|
|
|
2052
2052
|
}
|
|
2053
2053
|
function toSyncOptions(options) {
|
|
2054
2054
|
options = { ...options };
|
|
2055
|
-
options.fs = createSyncFs(options.fs ||
|
|
2055
|
+
options.fs = createSyncFs(options.fs || fs23);
|
|
2056
2056
|
if (typeof options.retries === "number" && options.retries > 0 || options.retries && typeof options.retries.retries === "number" && options.retries.retries > 0) {
|
|
2057
2057
|
throw Object.assign(new Error("Cannot use retries with the sync api"), { code: "ESYNC" });
|
|
2058
2058
|
}
|
|
@@ -2269,11 +2269,98 @@ function keyPreview(key) {
|
|
|
2269
2269
|
return `age1\u2026${last8}`;
|
|
2270
2270
|
}
|
|
2271
2271
|
|
|
2272
|
+
// src/kms/aws-arn.ts
|
|
2273
|
+
var PARTITION_PATTERN = /^aws(?:-[a-z]+)*$/;
|
|
2274
|
+
var REGION_PATTERN = /^[a-z]{2,}(?:-[a-z]+)+-\d+$/;
|
|
2275
|
+
var ACCOUNT_PATTERN = /^\d{12}$/;
|
|
2276
|
+
function validateAwsKmsArn(input) {
|
|
2277
|
+
if (typeof input !== "string") {
|
|
2278
|
+
return { ok: false, reason: "value must be a string" };
|
|
2279
|
+
}
|
|
2280
|
+
if (input.length === 0) {
|
|
2281
|
+
return { ok: false, reason: "value is empty" };
|
|
2282
|
+
}
|
|
2283
|
+
if (!input.startsWith("arn:")) {
|
|
2284
|
+
return {
|
|
2285
|
+
ok: false,
|
|
2286
|
+
reason: "expected an ARN starting with 'arn:' (got a bare key id, alias name, or other format). Use a full ARN like 'arn:aws:kms:us-east-1:123456789012:alias/<name>'."
|
|
2287
|
+
};
|
|
2288
|
+
}
|
|
2289
|
+
const segments = input.split(":");
|
|
2290
|
+
if (segments.length < 6) {
|
|
2291
|
+
return {
|
|
2292
|
+
ok: false,
|
|
2293
|
+
reason: `expected 6 colon-delimited segments (arn:aws:kms:<region>:<account>:<resource>), got ${segments.length}. Check that the region and account aren't missing.`
|
|
2294
|
+
};
|
|
2295
|
+
}
|
|
2296
|
+
if (segments.length > 6) {
|
|
2297
|
+
return {
|
|
2298
|
+
ok: false,
|
|
2299
|
+
reason: `expected exactly 6 colon-delimited segments, got ${segments.length}. Check for stray ':' characters.`
|
|
2300
|
+
};
|
|
2301
|
+
}
|
|
2302
|
+
const [, partition, service, region, account, resource] = segments;
|
|
2303
|
+
if (!PARTITION_PATTERN.test(partition)) {
|
|
2304
|
+
return {
|
|
2305
|
+
ok: false,
|
|
2306
|
+
reason: `partition segment '${partition}' is not recognized. Expected 'aws', 'aws-us-gov', 'aws-cn', etc.`
|
|
2307
|
+
};
|
|
2308
|
+
}
|
|
2309
|
+
if (service !== "kms") {
|
|
2310
|
+
return {
|
|
2311
|
+
ok: false,
|
|
2312
|
+
reason: `service segment must be 'kms', got '${service}'.`
|
|
2313
|
+
};
|
|
2314
|
+
}
|
|
2315
|
+
if (region.length === 0) {
|
|
2316
|
+
return {
|
|
2317
|
+
ok: false,
|
|
2318
|
+
reason: "region segment is empty (look for '::' between 'kms' and the account id). Set a region like 'us-east-1' before reconstructing the ARN \u2014 common cause: a $REGION shell variable was unset when the ARN was built."
|
|
2319
|
+
};
|
|
2320
|
+
}
|
|
2321
|
+
if (!REGION_PATTERN.test(region)) {
|
|
2322
|
+
return {
|
|
2323
|
+
ok: false,
|
|
2324
|
+
reason: `region segment '${region}' doesn't look like an AWS region (expected e.g. 'us-east-1', 'eu-west-2').`
|
|
2325
|
+
};
|
|
2326
|
+
}
|
|
2327
|
+
if (account.length === 0) {
|
|
2328
|
+
return {
|
|
2329
|
+
ok: false,
|
|
2330
|
+
reason: "account segment is empty. Provide the 12-digit AWS account id."
|
|
2331
|
+
};
|
|
2332
|
+
}
|
|
2333
|
+
if (!ACCOUNT_PATTERN.test(account)) {
|
|
2334
|
+
return {
|
|
2335
|
+
ok: false,
|
|
2336
|
+
reason: `account segment '${account}' must be exactly 12 digits.`
|
|
2337
|
+
};
|
|
2338
|
+
}
|
|
2339
|
+
if (!resource || resource.length === 0) {
|
|
2340
|
+
return {
|
|
2341
|
+
ok: false,
|
|
2342
|
+
reason: "resource segment is empty. Expected 'key/<id>' or 'alias/<name>' after the account."
|
|
2343
|
+
};
|
|
2344
|
+
}
|
|
2345
|
+
if (!resource.startsWith("key/") && !resource.startsWith("alias/")) {
|
|
2346
|
+
return {
|
|
2347
|
+
ok: false,
|
|
2348
|
+
reason: `resource '${resource}' must start with 'key/' or 'alias/'.`
|
|
2349
|
+
};
|
|
2350
|
+
}
|
|
2351
|
+
if (resource === "key/" || resource === "alias/") {
|
|
2352
|
+
return {
|
|
2353
|
+
ok: false,
|
|
2354
|
+
reason: "resource id is empty after 'key/' or 'alias/'."
|
|
2355
|
+
};
|
|
2356
|
+
}
|
|
2357
|
+
return { ok: true };
|
|
2358
|
+
}
|
|
2359
|
+
|
|
2272
2360
|
// src/manifest/parser.ts
|
|
2273
2361
|
var CLEF_MANIFEST_FILENAME = "clef.yaml";
|
|
2274
2362
|
var VALID_BACKENDS = ["age", "awskms", "gcpkms", "azurekv", "pgp", "hsm"];
|
|
2275
2363
|
var PKCS11_URI_PATTERN = /^pkcs11:[a-zA-Z][a-zA-Z0-9_-]*=[^;]+/;
|
|
2276
|
-
var AWS_KMS_ARN_PATTERN = /^arn:aws(?:-[a-z]+)*:kms:[a-z0-9-]+:\d+:(key|alias)\/.+$/;
|
|
2277
2364
|
var VALID_TOP_LEVEL_KEYS = [
|
|
2278
2365
|
"version",
|
|
2279
2366
|
"environments",
|
|
@@ -2796,11 +2883,14 @@ var ManifestParser = class {
|
|
|
2796
2883
|
"service_identities"
|
|
2797
2884
|
);
|
|
2798
2885
|
}
|
|
2799
|
-
if (kmsObj.provider === "aws"
|
|
2800
|
-
|
|
2801
|
-
|
|
2802
|
-
|
|
2803
|
-
|
|
2886
|
+
if (kmsObj.provider === "aws") {
|
|
2887
|
+
const arnValidation = validateAwsKmsArn(kmsObj.keyId);
|
|
2888
|
+
if (!arnValidation.ok) {
|
|
2889
|
+
throw new ManifestValidationError(
|
|
2890
|
+
`Service identity '${siName}' environment '${envName}': kms.keyId is not a valid AWS KMS ARN \u2014 ${arnValidation.reason} (got '${kmsObj.keyId}'). Expected shape: arn:aws:kms:<region>:<account>:key/<id> or arn:aws:kms:<region>:<account>:alias/<name>.`,
|
|
2891
|
+
"service_identities"
|
|
2892
|
+
);
|
|
2893
|
+
}
|
|
2804
2894
|
}
|
|
2805
2895
|
if (Object.prototype.hasOwnProperty.call(kmsObj, "region")) {
|
|
2806
2896
|
throw new ManifestValidationError(
|
|
@@ -2875,6 +2965,18 @@ function readManifestYaml(repoRoot) {
|
|
|
2875
2965
|
return YAML2.parse(raw);
|
|
2876
2966
|
}
|
|
2877
2967
|
function writeManifestYaml(repoRoot, doc) {
|
|
2968
|
+
const parser = new ManifestParser();
|
|
2969
|
+
try {
|
|
2970
|
+
parser.validate(doc);
|
|
2971
|
+
} catch (err) {
|
|
2972
|
+
if (err instanceof ManifestValidationError) {
|
|
2973
|
+
throw new ManifestValidationError(
|
|
2974
|
+
`Refusing to write invalid manifest: ${err.message}`,
|
|
2975
|
+
err.field
|
|
2976
|
+
);
|
|
2977
|
+
}
|
|
2978
|
+
throw err;
|
|
2979
|
+
}
|
|
2878
2980
|
const manifestPath = path.join(repoRoot, CLEF_MANIFEST_FILENAME);
|
|
2879
2981
|
import_write_file_atomic.default.sync(manifestPath, YAML2.stringify(doc));
|
|
2880
2982
|
}
|
|
@@ -3345,10 +3447,6 @@ async function getPendingKeys(filePath) {
|
|
|
3345
3447
|
const metadata = await loadMetadata(filePath);
|
|
3346
3448
|
return metadata.pending.map((p) => p.key);
|
|
3347
3449
|
}
|
|
3348
|
-
async function isPending(filePath, key) {
|
|
3349
|
-
const metadata = await loadMetadata(filePath);
|
|
3350
|
-
return metadata.pending.some((p) => p.key === key);
|
|
3351
|
-
}
|
|
3352
3450
|
async function recordRotation(filePath, keys, rotatedBy, now = /* @__PURE__ */ new Date()) {
|
|
3353
3451
|
const metadata = await loadMetadata(filePath);
|
|
3354
3452
|
for (const key of keys) {
|
|
@@ -3384,14 +3482,6 @@ async function getRotations(filePath) {
|
|
|
3384
3482
|
function generateRandomValue() {
|
|
3385
3483
|
return crypto.randomBytes(32).toString("hex");
|
|
3386
3484
|
}
|
|
3387
|
-
async function markPendingWithRetry(filePath, keys, setBy, retryDelayMs = 200) {
|
|
3388
|
-
try {
|
|
3389
|
-
await markPending(filePath, keys, setBy);
|
|
3390
|
-
} catch {
|
|
3391
|
-
await new Promise((r) => setTimeout(r, retryDelayMs));
|
|
3392
|
-
await markPending(filePath, keys, setBy);
|
|
3393
|
-
}
|
|
3394
|
-
}
|
|
3395
3485
|
|
|
3396
3486
|
// src/sops/keys.ts
|
|
3397
3487
|
import * as fs6 from "fs";
|
|
@@ -3441,34 +3531,17 @@ var MatrixManager = class {
|
|
|
3441
3531
|
detectMissingCells(manifest, repoRoot) {
|
|
3442
3532
|
return this.resolveMatrix(manifest, repoRoot).filter((cell) => !cell.exists);
|
|
3443
3533
|
}
|
|
3444
|
-
/**
|
|
3445
|
-
* Create an empty encrypted SOPS file for a missing matrix cell.
|
|
3446
|
-
*
|
|
3447
|
-
* @param cell - The cell to scaffold (must not already exist).
|
|
3448
|
-
* @param sopsClient - SOPS client used to write the initial encrypted file.
|
|
3449
|
-
* @param manifest - Parsed manifest used to determine the encryption backend.
|
|
3450
|
-
*/
|
|
3451
|
-
async scaffoldCell(cell, sopsClient, manifest) {
|
|
3452
|
-
const dir = path5.dirname(cell.filePath);
|
|
3453
|
-
if (!fs7.existsSync(dir)) {
|
|
3454
|
-
fs7.mkdirSync(dir, { recursive: true });
|
|
3455
|
-
}
|
|
3456
|
-
await sopsClient.encrypt(cell.filePath, {}, manifest, cell.environment);
|
|
3457
|
-
}
|
|
3458
3534
|
/**
|
|
3459
3535
|
* Read each cell and return key counts, pending counts, and cross-environment issues.
|
|
3460
3536
|
*
|
|
3461
|
-
*
|
|
3462
|
-
*
|
|
3463
|
-
*
|
|
3464
|
-
* decrypt-based implementation later (e.g. for backends that don't expose
|
|
3465
|
-
* key names without decryption).
|
|
3537
|
+
* Keys are read from the plaintext YAML structure directly — no
|
|
3538
|
+
* decryption needed. A future backend that doesn't expose key names
|
|
3539
|
+
* without decryption would need its own implementation.
|
|
3466
3540
|
*
|
|
3467
3541
|
* @param manifest - Parsed manifest.
|
|
3468
3542
|
* @param repoRoot - Absolute path to the repository root.
|
|
3469
|
-
* @param _sopsClient - Reserved for future use; pass any `EncryptionBackend`.
|
|
3470
3543
|
*/
|
|
3471
|
-
async getMatrixStatus(manifest, repoRoot
|
|
3544
|
+
async getMatrixStatus(manifest, repoRoot) {
|
|
3472
3545
|
const cells = this.resolveMatrix(manifest, repoRoot);
|
|
3473
3546
|
const statuses = [];
|
|
3474
3547
|
const cellKeys = /* @__PURE__ */ new Map();
|
|
@@ -3784,7 +3857,6 @@ function orderedKeys(keys) {
|
|
|
3784
3857
|
}
|
|
3785
3858
|
|
|
3786
3859
|
// src/diff/engine.ts
|
|
3787
|
-
import * as path7 from "path";
|
|
3788
3860
|
var DiffEngine = class {
|
|
3789
3861
|
/**
|
|
3790
3862
|
* Compare two in-memory value maps and produce a sorted diff result.
|
|
@@ -3835,131 +3907,21 @@ var DiffEngine = class {
|
|
|
3835
3907
|
* @param namespace - Namespace containing both cells.
|
|
3836
3908
|
* @param envA - Name of environment A.
|
|
3837
3909
|
* @param envB - Name of environment B.
|
|
3838
|
-
* @param
|
|
3839
|
-
* @
|
|
3840
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
3841
|
-
* @throws {@link SopsDecryptionError} If either file cannot be decrypted.
|
|
3910
|
+
* @param source - SecretSource that resolves both cells (substrate-agnostic).
|
|
3911
|
+
* @throws {@link SopsDecryptionError} If either cell cannot be decrypted.
|
|
3842
3912
|
*/
|
|
3843
|
-
async
|
|
3844
|
-
const fileA = path7.join(
|
|
3845
|
-
repoRoot,
|
|
3846
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", envA)
|
|
3847
|
-
);
|
|
3848
|
-
const fileB = path7.join(
|
|
3849
|
-
repoRoot,
|
|
3850
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", envB)
|
|
3851
|
-
);
|
|
3913
|
+
async diffCells(namespace, envA, envB, source) {
|
|
3852
3914
|
const [decryptedA, decryptedB] = await Promise.all([
|
|
3853
|
-
|
|
3854
|
-
|
|
3915
|
+
source.readCell({ namespace, environment: envA }),
|
|
3916
|
+
source.readCell({ namespace, environment: envB })
|
|
3855
3917
|
]);
|
|
3856
3918
|
return this.diff(decryptedA.values, decryptedB.values, envA, envB, namespace);
|
|
3857
3919
|
}
|
|
3858
3920
|
};
|
|
3859
3921
|
|
|
3860
|
-
// src/bulk/ops.ts
|
|
3861
|
-
import * as path8 from "path";
|
|
3862
|
-
var BulkOps = class {
|
|
3863
|
-
constructor(tx) {
|
|
3864
|
-
this.tx = tx;
|
|
3865
|
-
}
|
|
3866
|
-
tx;
|
|
3867
|
-
/**
|
|
3868
|
-
* Set a key to different values in multiple environments at once.
|
|
3869
|
-
*
|
|
3870
|
-
* @param namespace - Target namespace.
|
|
3871
|
-
* @param key - Secret key name to set.
|
|
3872
|
-
* @param values - Map of `{ environment: value }` pairs.
|
|
3873
|
-
* @param manifest - Parsed manifest.
|
|
3874
|
-
* @param sopsClient - SOPS client used to decrypt and re-encrypt each file.
|
|
3875
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
3876
|
-
* @throws Whatever the underlying encrypt throws — the transaction rolls back.
|
|
3877
|
-
*/
|
|
3878
|
-
async setAcrossEnvironments(namespace, key, values, manifest, sopsClient, repoRoot) {
|
|
3879
|
-
const targets = manifest.environments.filter((env) => env.name in values).map((env) => ({
|
|
3880
|
-
env: env.name,
|
|
3881
|
-
filePath: path8.join(
|
|
3882
|
-
repoRoot,
|
|
3883
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", env.name)
|
|
3884
|
-
)
|
|
3885
|
-
}));
|
|
3886
|
-
if (targets.length === 0) return;
|
|
3887
|
-
await this.tx.run(repoRoot, {
|
|
3888
|
-
description: `clef set: ${namespace}/${key} across ${targets.length} env(s)`,
|
|
3889
|
-
paths: targets.map((t) => path8.relative(repoRoot, t.filePath)),
|
|
3890
|
-
mutate: async () => {
|
|
3891
|
-
for (const target of targets) {
|
|
3892
|
-
const decrypted = await sopsClient.decrypt(target.filePath);
|
|
3893
|
-
decrypted.values[key] = values[target.env];
|
|
3894
|
-
await sopsClient.encrypt(target.filePath, decrypted.values, manifest, target.env);
|
|
3895
|
-
}
|
|
3896
|
-
}
|
|
3897
|
-
});
|
|
3898
|
-
}
|
|
3899
|
-
/**
|
|
3900
|
-
* Delete a key from every environment in a namespace.
|
|
3901
|
-
*
|
|
3902
|
-
* @param namespace - Target namespace.
|
|
3903
|
-
* @param key - Secret key name to delete.
|
|
3904
|
-
* @param manifest - Parsed manifest.
|
|
3905
|
-
* @param sopsClient - SOPS client.
|
|
3906
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
3907
|
-
*/
|
|
3908
|
-
async deleteAcrossEnvironments(namespace, key, manifest, sopsClient, repoRoot) {
|
|
3909
|
-
const targets = manifest.environments.map((env) => ({
|
|
3910
|
-
env: env.name,
|
|
3911
|
-
filePath: path8.join(
|
|
3912
|
-
repoRoot,
|
|
3913
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", env.name)
|
|
3914
|
-
)
|
|
3915
|
-
}));
|
|
3916
|
-
await this.tx.run(repoRoot, {
|
|
3917
|
-
description: `clef delete: ${namespace}/${key} from ${targets.length} env(s)`,
|
|
3918
|
-
paths: targets.map((t) => path8.relative(repoRoot, t.filePath)),
|
|
3919
|
-
mutate: async () => {
|
|
3920
|
-
for (const target of targets) {
|
|
3921
|
-
const decrypted = await sopsClient.decrypt(target.filePath);
|
|
3922
|
-
if (key in decrypted.values) {
|
|
3923
|
-
delete decrypted.values[key];
|
|
3924
|
-
await sopsClient.encrypt(target.filePath, decrypted.values, manifest, target.env);
|
|
3925
|
-
}
|
|
3926
|
-
}
|
|
3927
|
-
}
|
|
3928
|
-
});
|
|
3929
|
-
}
|
|
3930
|
-
/**
|
|
3931
|
-
* Copy a single key's value from one matrix cell to another.
|
|
3932
|
-
*
|
|
3933
|
-
* @param key - Secret key name to copy.
|
|
3934
|
-
* @param fromCell - Source matrix cell.
|
|
3935
|
-
* @param toCell - Destination matrix cell.
|
|
3936
|
-
* @param sopsClient - SOPS client.
|
|
3937
|
-
* @param manifest - Parsed manifest.
|
|
3938
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
3939
|
-
* @throws `Error` if the key does not exist in the source cell.
|
|
3940
|
-
*/
|
|
3941
|
-
async copyValue(key, fromCell, toCell, sopsClient, manifest, repoRoot) {
|
|
3942
|
-
const source = await sopsClient.decrypt(fromCell.filePath);
|
|
3943
|
-
if (!(key in source.values)) {
|
|
3944
|
-
throw new Error(
|
|
3945
|
-
`Key '${key}' does not exist in ${fromCell.namespace}/${fromCell.environment}.`
|
|
3946
|
-
);
|
|
3947
|
-
}
|
|
3948
|
-
await this.tx.run(repoRoot, {
|
|
3949
|
-
description: `clef copy: ${key} from ${fromCell.namespace}/${fromCell.environment} to ${toCell.namespace}/${toCell.environment}`,
|
|
3950
|
-
paths: [path8.relative(repoRoot, toCell.filePath)],
|
|
3951
|
-
mutate: async () => {
|
|
3952
|
-
const dest = await sopsClient.decrypt(toCell.filePath);
|
|
3953
|
-
dest.values[key] = source.values[key];
|
|
3954
|
-
await sopsClient.encrypt(toCell.filePath, dest.values, manifest, toCell.environment);
|
|
3955
|
-
}
|
|
3956
|
-
});
|
|
3957
|
-
}
|
|
3958
|
-
};
|
|
3959
|
-
|
|
3960
3922
|
// src/git/integration.ts
|
|
3961
3923
|
import * as fs10 from "fs";
|
|
3962
|
-
import * as
|
|
3924
|
+
import * as path7 from "path";
|
|
3963
3925
|
var PRE_COMMIT_HOOK = `#!/bin/sh
|
|
3964
3926
|
# Clef pre-commit hook \u2014 blocks commits of files missing SOPS encryption metadata
|
|
3965
3927
|
# and scans staged files for plaintext secrets.
|
|
@@ -4135,17 +4097,17 @@ var GitIntegration = class {
|
|
|
4135
4097
|
* @returns The kind of operation in progress, or null if none.
|
|
4136
4098
|
*/
|
|
4137
4099
|
async isMidOperation(repoRoot) {
|
|
4138
|
-
const gitDir =
|
|
4139
|
-
if (fs10.existsSync(
|
|
4100
|
+
const gitDir = path7.join(repoRoot, ".git");
|
|
4101
|
+
if (fs10.existsSync(path7.join(gitDir, "MERGE_HEAD"))) {
|
|
4140
4102
|
return { midOp: true, kind: "merge" };
|
|
4141
4103
|
}
|
|
4142
|
-
if (fs10.existsSync(
|
|
4104
|
+
if (fs10.existsSync(path7.join(gitDir, "rebase-merge")) || fs10.existsSync(path7.join(gitDir, "rebase-apply"))) {
|
|
4143
4105
|
return { midOp: true, kind: "rebase" };
|
|
4144
4106
|
}
|
|
4145
|
-
if (fs10.existsSync(
|
|
4107
|
+
if (fs10.existsSync(path7.join(gitDir, "CHERRY_PICK_HEAD"))) {
|
|
4146
4108
|
return { midOp: true, kind: "cherry-pick" };
|
|
4147
4109
|
}
|
|
4148
|
-
if (fs10.existsSync(
|
|
4110
|
+
if (fs10.existsSync(path7.join(gitDir, "REVERT_HEAD"))) {
|
|
4149
4111
|
return { midOp: true, kind: "revert" };
|
|
4150
4112
|
}
|
|
4151
4113
|
return { midOp: false };
|
|
@@ -4357,14 +4319,14 @@ var GitIntegration = class {
|
|
|
4357
4319
|
{ cwd: repoRoot }
|
|
4358
4320
|
);
|
|
4359
4321
|
const metadataGitConfig = metaConfig.exitCode === 0 && metaConfig.stdout.trim().length > 0;
|
|
4360
|
-
const attrFilePath =
|
|
4322
|
+
const attrFilePath = path7.join(repoRoot, ".gitattributes");
|
|
4361
4323
|
const attrContent = fs10.existsSync(attrFilePath) ? fs10.readFileSync(attrFilePath, "utf-8") : "";
|
|
4362
4324
|
const gitattributes = attrContent.includes("merge=sops");
|
|
4363
4325
|
const metadataGitattributes = attrContent.includes("merge=clef-metadata");
|
|
4364
4326
|
return { gitConfig, gitattributes, metadataGitConfig, metadataGitattributes };
|
|
4365
4327
|
}
|
|
4366
4328
|
async ensureGitattributes(repoRoot) {
|
|
4367
|
-
const attrPath =
|
|
4329
|
+
const attrPath = path7.join(repoRoot, ".gitattributes");
|
|
4368
4330
|
const existing = fs10.existsSync(attrPath) ? fs10.readFileSync(attrPath, "utf-8") : "";
|
|
4369
4331
|
let newContent = existing;
|
|
4370
4332
|
if (!existing.includes("merge=sops")) {
|
|
@@ -4399,9 +4361,9 @@ ${block}` : block;
|
|
|
4399
4361
|
* @throws {@link GitOperationError} On failure.
|
|
4400
4362
|
*/
|
|
4401
4363
|
async installPreCommitHook(repoRoot) {
|
|
4402
|
-
const hookPath =
|
|
4364
|
+
const hookPath = path7.join(repoRoot, ".git", "hooks", "pre-commit");
|
|
4403
4365
|
try {
|
|
4404
|
-
const hooksDir =
|
|
4366
|
+
const hooksDir = path7.dirname(hookPath);
|
|
4405
4367
|
if (!fs10.existsSync(hooksDir)) {
|
|
4406
4368
|
fs10.mkdirSync(hooksDir, { recursive: true });
|
|
4407
4369
|
}
|
|
@@ -4418,7 +4380,7 @@ ${block}` : block;
|
|
|
4418
4380
|
// src/tx/transaction-manager.ts
|
|
4419
4381
|
var lockfile = __toESM(require_proper_lockfile());
|
|
4420
4382
|
import * as fs11 from "fs";
|
|
4421
|
-
import * as
|
|
4383
|
+
import * as path8 from "path";
|
|
4422
4384
|
|
|
4423
4385
|
// src/tx/errors.ts
|
|
4424
4386
|
var TransactionLockError = class extends Error {
|
|
@@ -4466,15 +4428,15 @@ var TransactionManager = class {
|
|
|
4466
4428
|
async run(repoRoot, opts) {
|
|
4467
4429
|
const shouldCommit = opts.commit !== false;
|
|
4468
4430
|
const allowDirty = opts.allowDirty === true;
|
|
4469
|
-
const clefDir =
|
|
4431
|
+
const clefDir = path8.join(repoRoot, CLEF_DIR);
|
|
4470
4432
|
if (!fs11.existsSync(clefDir)) {
|
|
4471
4433
|
fs11.mkdirSync(clefDir, { recursive: true });
|
|
4472
4434
|
}
|
|
4473
|
-
const clefGitignore =
|
|
4435
|
+
const clefGitignore = path8.join(clefDir, ".gitignore");
|
|
4474
4436
|
if (!fs11.existsSync(clefGitignore)) {
|
|
4475
4437
|
fs11.writeFileSync(clefGitignore, "*\n");
|
|
4476
4438
|
}
|
|
4477
|
-
const lockPath =
|
|
4439
|
+
const lockPath = path8.join(clefDir, LOCK_FILE);
|
|
4478
4440
|
if (!fs11.existsSync(lockPath)) {
|
|
4479
4441
|
fs11.writeFileSync(lockPath, "");
|
|
4480
4442
|
}
|
|
@@ -4612,7 +4574,6 @@ var TransactionManager = class {
|
|
|
4612
4574
|
};
|
|
4613
4575
|
|
|
4614
4576
|
// src/sops/client.ts
|
|
4615
|
-
var import_write_file_atomic3 = __toESM(require_lib());
|
|
4616
4577
|
import * as fs14 from "fs";
|
|
4617
4578
|
import * as net from "net";
|
|
4618
4579
|
import { randomBytes as randomBytes2 } from "crypto";
|
|
@@ -4620,11 +4581,11 @@ import * as YAML8 from "yaml";
|
|
|
4620
4581
|
|
|
4621
4582
|
// src/sops/resolver.ts
|
|
4622
4583
|
import * as fs13 from "fs";
|
|
4623
|
-
import * as
|
|
4584
|
+
import * as path10 from "path";
|
|
4624
4585
|
|
|
4625
4586
|
// src/sops/bundled.ts
|
|
4626
4587
|
import * as fs12 from "fs";
|
|
4627
|
-
import * as
|
|
4588
|
+
import * as path9 from "path";
|
|
4628
4589
|
function tryBundled() {
|
|
4629
4590
|
const platform = process.platform;
|
|
4630
4591
|
const arch = process.arch;
|
|
@@ -4636,8 +4597,8 @@ function tryBundled() {
|
|
|
4636
4597
|
const binName = platform === "win32" ? "sops.exe" : "sops";
|
|
4637
4598
|
try {
|
|
4638
4599
|
const packageMain = __require.resolve(`${packageName}/package.json`);
|
|
4639
|
-
const packageDir =
|
|
4640
|
-
const binPath =
|
|
4600
|
+
const packageDir = path9.dirname(packageMain);
|
|
4601
|
+
const binPath = path9.join(packageDir, "bin", binName);
|
|
4641
4602
|
return fs12.existsSync(binPath) ? binPath : null;
|
|
4642
4603
|
} catch {
|
|
4643
4604
|
return null;
|
|
@@ -4646,7 +4607,7 @@ function tryBundled() {
|
|
|
4646
4607
|
|
|
4647
4608
|
// src/sops/resolver.ts
|
|
4648
4609
|
function validateSopsPath(candidate) {
|
|
4649
|
-
if (!
|
|
4610
|
+
if (!path10.isAbsolute(candidate)) {
|
|
4650
4611
|
throw new Error(`CLEF_SOPS_PATH must be an absolute path, got '${candidate}'.`);
|
|
4651
4612
|
}
|
|
4652
4613
|
const segments = candidate.split(/[/\\]/);
|
|
@@ -4825,6 +4786,17 @@ function isClefHsmArn(arn) {
|
|
|
4825
4786
|
function formatFromPath(filePath) {
|
|
4826
4787
|
return filePath.endsWith(".json") ? "json" : "yaml";
|
|
4827
4788
|
}
|
|
4789
|
+
async function openInputPipe(content) {
|
|
4790
|
+
if (process.platform === "win32") {
|
|
4791
|
+
const pipe = await openWindowsInputPipe(content);
|
|
4792
|
+
return { inputArg: pipe.inputArg, cleanup: pipe.cleanup };
|
|
4793
|
+
}
|
|
4794
|
+
return { inputArg: "/dev/stdin", cleanup: () => {
|
|
4795
|
+
}, runnerStdin: content };
|
|
4796
|
+
}
|
|
4797
|
+
function nullConfigPath() {
|
|
4798
|
+
return process.platform === "win32" ? "NUL" : "/dev/null";
|
|
4799
|
+
}
|
|
4828
4800
|
function openWindowsInputPipe(content) {
|
|
4829
4801
|
const pipeName = `\\\\.\\pipe\\clef-sops-${randomBytes2(8).toString("hex")}`;
|
|
4830
4802
|
return new Promise((resolve2, reject) => {
|
|
@@ -4872,6 +4844,10 @@ var SopsClient = class {
|
|
|
4872
4844
|
runner;
|
|
4873
4845
|
ageKeyFile;
|
|
4874
4846
|
ageKey;
|
|
4847
|
+
/** {@link EncryptionBackend} identifier. */
|
|
4848
|
+
id = "sops";
|
|
4849
|
+
/** {@link EncryptionBackend} short description (used by `clef doctor`). */
|
|
4850
|
+
description = "SOPS-based encryption via the bundled `sops` binary";
|
|
4875
4851
|
sopsCommand;
|
|
4876
4852
|
keyserviceArgs;
|
|
4877
4853
|
buildSopsEnv() {
|
|
@@ -4885,14 +4861,18 @@ var SopsClient = class {
|
|
|
4885
4861
|
return Object.keys(env).length > 0 ? env : void 0;
|
|
4886
4862
|
}
|
|
4887
4863
|
/**
|
|
4888
|
-
* Decrypt a SOPS-encrypted file
|
|
4864
|
+
* Decrypt a SOPS-encrypted file by path. The only remaining file-path
|
|
4865
|
+
* entry point on this class — kept for the merge driver, which
|
|
4866
|
+
* receives temp filesystem paths from git that don't map onto a
|
|
4867
|
+
* `CellRef`. Production `SecretSource` consumers should call
|
|
4868
|
+
* `source.readCell` instead.
|
|
4889
4869
|
*
|
|
4890
4870
|
* @param filePath - Path to the `.enc.yaml` or `.enc.json` file.
|
|
4891
4871
|
* @returns {@link DecryptedFile} with plaintext values in memory only.
|
|
4892
4872
|
* @throws {@link SopsKeyNotFoundError} If no matching decryption key is available.
|
|
4893
4873
|
* @throws {@link SopsDecryptionError} On any other decryption failure.
|
|
4894
4874
|
*/
|
|
4895
|
-
async
|
|
4875
|
+
async decryptFile(filePath) {
|
|
4896
4876
|
await assertSops(this.runner, this.sopsCommand);
|
|
4897
4877
|
const fmt = formatFromPath(filePath);
|
|
4898
4878
|
const env = this.buildSopsEnv();
|
|
@@ -4928,170 +4908,9 @@ var SopsClient = class {
|
|
|
4928
4908
|
for (const [key, value] of Object.entries(parsed)) {
|
|
4929
4909
|
values[key] = String(value);
|
|
4930
4910
|
}
|
|
4931
|
-
const metadata =
|
|
4911
|
+
const metadata = this.parseMetadataFromFile(filePath);
|
|
4932
4912
|
return { values, metadata };
|
|
4933
4913
|
}
|
|
4934
|
-
/**
|
|
4935
|
-
* Encrypt a key/value map and write it to an encrypted SOPS file.
|
|
4936
|
-
*
|
|
4937
|
-
* @param filePath - Destination path for the encrypted file.
|
|
4938
|
-
* @param values - Flat key/value map to encrypt.
|
|
4939
|
-
* @param manifest - Manifest used to determine the encryption backend and key configuration.
|
|
4940
|
-
* @param environment - Optional environment name. When provided, per-env backend overrides
|
|
4941
|
-
* are resolved from the manifest. When omitted, the global `sops.default_backend` is used.
|
|
4942
|
-
* @throws {@link SopsEncryptionError} On encryption or write failure.
|
|
4943
|
-
*/
|
|
4944
|
-
async encrypt(filePath, values, manifest, environment) {
|
|
4945
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
4946
|
-
const fmt = formatFromPath(filePath);
|
|
4947
|
-
const content = fmt === "json" ? JSON.stringify(values, null, 2) : YAML8.stringify(values);
|
|
4948
|
-
const args = this.buildEncryptArgs(filePath, manifest, environment);
|
|
4949
|
-
const env = this.buildSopsEnv();
|
|
4950
|
-
let inputArg;
|
|
4951
|
-
let pipeCleanup;
|
|
4952
|
-
if (process.platform === "win32") {
|
|
4953
|
-
const pipe = await openWindowsInputPipe(content);
|
|
4954
|
-
inputArg = pipe.inputArg;
|
|
4955
|
-
pipeCleanup = pipe.cleanup;
|
|
4956
|
-
} else {
|
|
4957
|
-
inputArg = "/dev/stdin";
|
|
4958
|
-
}
|
|
4959
|
-
let result;
|
|
4960
|
-
try {
|
|
4961
|
-
const configPath = process.platform === "win32" ? "NUL" : "/dev/null";
|
|
4962
|
-
result = await this.runner.run(
|
|
4963
|
-
this.sopsCommand,
|
|
4964
|
-
[
|
|
4965
|
-
"--config",
|
|
4966
|
-
configPath,
|
|
4967
|
-
"encrypt",
|
|
4968
|
-
...this.keyserviceArgs,
|
|
4969
|
-
...args,
|
|
4970
|
-
"--input-type",
|
|
4971
|
-
fmt,
|
|
4972
|
-
"--output-type",
|
|
4973
|
-
fmt,
|
|
4974
|
-
"--filename-override",
|
|
4975
|
-
filePath,
|
|
4976
|
-
inputArg
|
|
4977
|
-
],
|
|
4978
|
-
{
|
|
4979
|
-
// stdin is still piped on Unix (/dev/stdin reads from it);
|
|
4980
|
-
// on Windows the named pipe server feeds content directly.
|
|
4981
|
-
...process.platform !== "win32" ? { stdin: content } : {},
|
|
4982
|
-
...env ? { env } : {}
|
|
4983
|
-
}
|
|
4984
|
-
);
|
|
4985
|
-
} finally {
|
|
4986
|
-
pipeCleanup?.();
|
|
4987
|
-
}
|
|
4988
|
-
if (result.exitCode !== 0) {
|
|
4989
|
-
throw new SopsEncryptionError(
|
|
4990
|
-
`Failed to encrypt '${filePath}': ${result.stderr.trim()}`,
|
|
4991
|
-
filePath
|
|
4992
|
-
);
|
|
4993
|
-
}
|
|
4994
|
-
try {
|
|
4995
|
-
await (0, import_write_file_atomic3.default)(filePath, result.stdout);
|
|
4996
|
-
} catch (err) {
|
|
4997
|
-
throw new SopsEncryptionError(
|
|
4998
|
-
`Failed to write encrypted data to '${filePath}': ${err.message}`,
|
|
4999
|
-
filePath
|
|
5000
|
-
);
|
|
5001
|
-
}
|
|
5002
|
-
}
|
|
5003
|
-
/**
|
|
5004
|
-
* Rotate encryption by adding a new age recipient key to an existing SOPS file.
|
|
5005
|
-
*
|
|
5006
|
-
* @param filePath - Path to the encrypted file to re-encrypt.
|
|
5007
|
-
* @param newKey - New age public key to add as a recipient.
|
|
5008
|
-
* @throws {@link SopsEncryptionError} On failure.
|
|
5009
|
-
*/
|
|
5010
|
-
async reEncrypt(filePath, newKey) {
|
|
5011
|
-
await this.addRecipient(filePath, newKey);
|
|
5012
|
-
}
|
|
5013
|
-
/**
|
|
5014
|
-
* Add an age recipient to an existing SOPS file.
|
|
5015
|
-
*
|
|
5016
|
-
* @param filePath - Path to the encrypted file.
|
|
5017
|
-
* @param key - age public key to add as a recipient.
|
|
5018
|
-
* @throws {@link SopsEncryptionError} On failure.
|
|
5019
|
-
*/
|
|
5020
|
-
async addRecipient(filePath, key) {
|
|
5021
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
5022
|
-
const env = this.buildSopsEnv();
|
|
5023
|
-
const result = await this.runner.run(
|
|
5024
|
-
this.sopsCommand,
|
|
5025
|
-
["rotate", ...this.keyserviceArgs, "-i", "--add-age", key, filePath],
|
|
5026
|
-
{
|
|
5027
|
-
...env ? { env } : {}
|
|
5028
|
-
}
|
|
5029
|
-
);
|
|
5030
|
-
if (result.exitCode !== 0) {
|
|
5031
|
-
throw new SopsEncryptionError(
|
|
5032
|
-
`Failed to add recipient to '${filePath}': ${result.stderr.trim()}`,
|
|
5033
|
-
filePath
|
|
5034
|
-
);
|
|
5035
|
-
}
|
|
5036
|
-
}
|
|
5037
|
-
/**
|
|
5038
|
-
* Remove an age recipient from an existing SOPS file.
|
|
5039
|
-
*
|
|
5040
|
-
* @param filePath - Path to the encrypted file.
|
|
5041
|
-
* @param key - age public key to remove.
|
|
5042
|
-
* @throws {@link SopsEncryptionError} On failure.
|
|
5043
|
-
*/
|
|
5044
|
-
async removeRecipient(filePath, key) {
|
|
5045
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
5046
|
-
const env = this.buildSopsEnv();
|
|
5047
|
-
const result = await this.runner.run(
|
|
5048
|
-
this.sopsCommand,
|
|
5049
|
-
["rotate", ...this.keyserviceArgs, "-i", "--rm-age", key, filePath],
|
|
5050
|
-
{
|
|
5051
|
-
...env ? { env } : {}
|
|
5052
|
-
}
|
|
5053
|
-
);
|
|
5054
|
-
if (result.exitCode !== 0) {
|
|
5055
|
-
throw new SopsEncryptionError(
|
|
5056
|
-
`Failed to remove recipient from '${filePath}': ${result.stderr.trim()}`,
|
|
5057
|
-
filePath
|
|
5058
|
-
);
|
|
5059
|
-
}
|
|
5060
|
-
}
|
|
5061
|
-
/**
|
|
5062
|
-
* Check whether a file contains valid SOPS encryption metadata.
|
|
5063
|
-
*
|
|
5064
|
-
* @param filePath - Path to the file to check.
|
|
5065
|
-
* @returns `true` if valid SOPS metadata is present; `false` otherwise. Never throws.
|
|
5066
|
-
*/
|
|
5067
|
-
async validateEncryption(filePath) {
|
|
5068
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
5069
|
-
try {
|
|
5070
|
-
await this.getMetadata(filePath);
|
|
5071
|
-
return true;
|
|
5072
|
-
} catch {
|
|
5073
|
-
return false;
|
|
5074
|
-
}
|
|
5075
|
-
}
|
|
5076
|
-
/**
|
|
5077
|
-
* Extract SOPS metadata (backend, recipients, last-modified timestamp) from an encrypted file
|
|
5078
|
-
* without decrypting its values.
|
|
5079
|
-
*
|
|
5080
|
-
* @param filePath - Path to the encrypted file.
|
|
5081
|
-
* @returns {@link SopsMetadata} parsed from the file's `sops:` block.
|
|
5082
|
-
* @throws {@link SopsDecryptionError} If the file cannot be read or parsed.
|
|
5083
|
-
*/
|
|
5084
|
-
async getMetadata(filePath) {
|
|
5085
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
5086
|
-
const env = this.buildSopsEnv();
|
|
5087
|
-
const result = await this.runner.run(this.sopsCommand, ["filestatus", filePath], {
|
|
5088
|
-
...env ? { env } : {}
|
|
5089
|
-
});
|
|
5090
|
-
if (result.exitCode !== 0) {
|
|
5091
|
-
return this.parseMetadataFromFile(filePath);
|
|
5092
|
-
}
|
|
5093
|
-
return this.parseMetadataFromFile(filePath);
|
|
5094
|
-
}
|
|
5095
4914
|
/**
|
|
5096
4915
|
* Determine whether a decrypt failure is caused by a missing/mismatched key (vs. some other
|
|
5097
4916
|
* SOPS error) without relying on stderr message text.
|
|
@@ -5135,20 +4954,30 @@ var SopsClient = class {
|
|
|
5135
4954
|
filePath
|
|
5136
4955
|
);
|
|
5137
4956
|
}
|
|
4957
|
+
return this.parseMetadataFromContent(content, filePath);
|
|
4958
|
+
}
|
|
4959
|
+
/**
|
|
4960
|
+
* Parse SOPS metadata from a string (no IO). Used by both
|
|
4961
|
+
* `parseMetadataFromFile` (after reading from disk) and the blob-shaped
|
|
4962
|
+
* `getMetadataFromBlob` (which receives ciphertext directly from a
|
|
4963
|
+
* BlobStore). The `label` is woven into error messages so callers can
|
|
4964
|
+
* include the file path or cell ref the content came from.
|
|
4965
|
+
*/
|
|
4966
|
+
parseMetadataFromContent(content, label) {
|
|
5138
4967
|
let parsed;
|
|
5139
4968
|
try {
|
|
5140
4969
|
parsed = YAML8.parse(content);
|
|
5141
4970
|
} catch {
|
|
5142
4971
|
throw new SopsDecryptionError(
|
|
5143
|
-
|
|
5144
|
-
|
|
4972
|
+
`${label} is not valid YAML. Cannot extract SOPS metadata.`,
|
|
4973
|
+
label
|
|
5145
4974
|
);
|
|
5146
4975
|
}
|
|
5147
4976
|
const sops = parsed?.sops;
|
|
5148
4977
|
if (!sops) {
|
|
5149
4978
|
throw new SopsDecryptionError(
|
|
5150
|
-
|
|
5151
|
-
|
|
4979
|
+
`${label} does not contain SOPS metadata. It may not be encrypted.`,
|
|
4980
|
+
label
|
|
5152
4981
|
);
|
|
5153
4982
|
}
|
|
5154
4983
|
const backend = this.detectBackend(sops);
|
|
@@ -5211,59 +5040,303 @@ var SopsClient = class {
|
|
|
5211
5040
|
}
|
|
5212
5041
|
}
|
|
5213
5042
|
}
|
|
5214
|
-
buildEncryptArgs(
|
|
5215
|
-
const args = [];
|
|
5216
|
-
const config = environment ? resolveBackendConfig(manifest, environment) : {
|
|
5217
|
-
backend: manifest.sops.default_backend,
|
|
5218
|
-
aws_kms_arn: manifest.sops.aws_kms_arn,
|
|
5219
|
-
gcp_kms_resource_id: manifest.sops.gcp_kms_resource_id,
|
|
5220
|
-
azure_kv_url: manifest.sops.azure_kv_url,
|
|
5221
|
-
pgp_fingerprint: manifest.sops.pgp_fingerprint,
|
|
5222
|
-
pkcs11_uri: manifest.sops.pkcs11_uri
|
|
5223
|
-
};
|
|
5224
|
-
switch (config.backend) {
|
|
5225
|
-
case "age": {
|
|
5226
|
-
const envRecipients = environment ? resolveRecipientsForEnvironment(manifest, environment) : void 0;
|
|
5227
|
-
const recipients = envRecipients ?? manifest.sops.age?.recipients ?? [];
|
|
5228
|
-
const keys = recipients.map((r) => typeof r === "string" ? r : r.key);
|
|
5229
|
-
if (keys.length > 0) {
|
|
5230
|
-
args.push("--age", keys.join(","));
|
|
5231
|
-
}
|
|
5232
|
-
break;
|
|
5233
|
-
}
|
|
5234
|
-
case "awskms":
|
|
5235
|
-
if (config.aws_kms_arn) {
|
|
5236
|
-
args.push("--kms", config.aws_kms_arn);
|
|
5237
|
-
}
|
|
5238
|
-
break;
|
|
5239
|
-
case "gcpkms":
|
|
5240
|
-
if (config.gcp_kms_resource_id) {
|
|
5241
|
-
args.push("--gcp-kms", config.gcp_kms_resource_id);
|
|
5242
|
-
}
|
|
5243
|
-
break;
|
|
5244
|
-
case "azurekv":
|
|
5245
|
-
if (config.azure_kv_url) {
|
|
5246
|
-
args.push("--azure-kv", config.azure_kv_url);
|
|
5247
|
-
}
|
|
5248
|
-
break;
|
|
5249
|
-
case "pgp":
|
|
5250
|
-
if (config.pgp_fingerprint) {
|
|
5251
|
-
args.push("--pgp", config.pgp_fingerprint);
|
|
5043
|
+
buildEncryptArgs(manifest, environment) {
|
|
5044
|
+
const args = [];
|
|
5045
|
+
const config = environment ? resolveBackendConfig(manifest, environment) : {
|
|
5046
|
+
backend: manifest.sops.default_backend,
|
|
5047
|
+
aws_kms_arn: manifest.sops.aws_kms_arn,
|
|
5048
|
+
gcp_kms_resource_id: manifest.sops.gcp_kms_resource_id,
|
|
5049
|
+
azure_kv_url: manifest.sops.azure_kv_url,
|
|
5050
|
+
pgp_fingerprint: manifest.sops.pgp_fingerprint,
|
|
5051
|
+
pkcs11_uri: manifest.sops.pkcs11_uri
|
|
5052
|
+
};
|
|
5053
|
+
switch (config.backend) {
|
|
5054
|
+
case "age": {
|
|
5055
|
+
const envRecipients = environment ? resolveRecipientsForEnvironment(manifest, environment) : void 0;
|
|
5056
|
+
const recipients = envRecipients ?? manifest.sops.age?.recipients ?? [];
|
|
5057
|
+
const keys = recipients.map((r) => typeof r === "string" ? r : r.key);
|
|
5058
|
+
if (keys.length > 0) {
|
|
5059
|
+
args.push("--age", keys.join(","));
|
|
5060
|
+
}
|
|
5061
|
+
break;
|
|
5062
|
+
}
|
|
5063
|
+
case "awskms":
|
|
5064
|
+
if (config.aws_kms_arn) {
|
|
5065
|
+
args.push("--kms", config.aws_kms_arn);
|
|
5066
|
+
}
|
|
5067
|
+
break;
|
|
5068
|
+
case "gcpkms":
|
|
5069
|
+
if (config.gcp_kms_resource_id) {
|
|
5070
|
+
args.push("--gcp-kms", config.gcp_kms_resource_id);
|
|
5071
|
+
}
|
|
5072
|
+
break;
|
|
5073
|
+
case "azurekv":
|
|
5074
|
+
if (config.azure_kv_url) {
|
|
5075
|
+
args.push("--azure-kv", config.azure_kv_url);
|
|
5076
|
+
}
|
|
5077
|
+
break;
|
|
5078
|
+
case "pgp":
|
|
5079
|
+
if (config.pgp_fingerprint) {
|
|
5080
|
+
args.push("--pgp", config.pgp_fingerprint);
|
|
5081
|
+
}
|
|
5082
|
+
break;
|
|
5083
|
+
case "hsm":
|
|
5084
|
+
if (config.pkcs11_uri) {
|
|
5085
|
+
args.push("--kms", pkcs11UriToSyntheticArn(config.pkcs11_uri));
|
|
5086
|
+
}
|
|
5087
|
+
break;
|
|
5088
|
+
}
|
|
5089
|
+
return args;
|
|
5090
|
+
}
|
|
5091
|
+
// ── Blob-shaped methods ─────────────────────────────────────────────────
|
|
5092
|
+
//
|
|
5093
|
+
// These mirror the file-path methods above but operate on opaque
|
|
5094
|
+
// ciphertext bytes via SOPS' stdin/stdout. They are the substrate-
|
|
5095
|
+
// agnostic primitives used by the `composeSecretSource` factory to
|
|
5096
|
+
// wrap any `BlobStore` (filesystem, postgres, etc.) into a full
|
|
5097
|
+
// `SecretSource`. Plaintext never leaves the SOPS subprocess.
|
|
5098
|
+
/**
|
|
5099
|
+
* {@link EncryptionBackend.decrypt} — decrypt SOPS-encrypted bytes (e.g.
|
|
5100
|
+
* read from a `StorageBackend`) and return plaintext values + metadata.
|
|
5101
|
+
* Plaintext lives only in memory.
|
|
5102
|
+
*/
|
|
5103
|
+
async decrypt(blob, ctx) {
|
|
5104
|
+
await assertSops(this.runner, this.sopsCommand);
|
|
5105
|
+
const env = this.buildSopsEnv();
|
|
5106
|
+
const pipe = await openInputPipe(blob);
|
|
5107
|
+
let result;
|
|
5108
|
+
try {
|
|
5109
|
+
result = await this.runner.run(
|
|
5110
|
+
this.sopsCommand,
|
|
5111
|
+
[
|
|
5112
|
+
"decrypt",
|
|
5113
|
+
...this.keyserviceArgs,
|
|
5114
|
+
"--input-type",
|
|
5115
|
+
ctx.format,
|
|
5116
|
+
"--output-type",
|
|
5117
|
+
ctx.format,
|
|
5118
|
+
pipe.inputArg
|
|
5119
|
+
],
|
|
5120
|
+
{
|
|
5121
|
+
...pipe.runnerStdin !== void 0 ? { stdin: pipe.runnerStdin } : {},
|
|
5122
|
+
...env ? { env } : {}
|
|
5123
|
+
}
|
|
5124
|
+
);
|
|
5125
|
+
} finally {
|
|
5126
|
+
pipe.cleanup();
|
|
5127
|
+
}
|
|
5128
|
+
if (result.exitCode !== 0) {
|
|
5129
|
+
const errorType = await this.classifyDecryptErrorFromContent(blob);
|
|
5130
|
+
if (errorType === "key-not-found") {
|
|
5131
|
+
throw new SopsKeyNotFoundError(`No decryption key found for cell. ${result.stderr.trim()}`);
|
|
5132
|
+
}
|
|
5133
|
+
throw new SopsDecryptionError(`Failed to decrypt cell: ${result.stderr.trim()}`);
|
|
5134
|
+
}
|
|
5135
|
+
let parsed;
|
|
5136
|
+
try {
|
|
5137
|
+
parsed = YAML8.parse(result.stdout) ?? {};
|
|
5138
|
+
} catch {
|
|
5139
|
+
throw new SopsDecryptionError("Decrypted content is not valid YAML.");
|
|
5140
|
+
}
|
|
5141
|
+
const values = {};
|
|
5142
|
+
for (const [key, value] of Object.entries(parsed)) {
|
|
5143
|
+
values[key] = String(value);
|
|
5144
|
+
}
|
|
5145
|
+
const metadata = this.parseMetadataFromContent(blob, "<cell>");
|
|
5146
|
+
return { values, metadata };
|
|
5147
|
+
}
|
|
5148
|
+
/**
|
|
5149
|
+
* {@link EncryptionBackend.encrypt} — encrypt plaintext values into a
|
|
5150
|
+
* SOPS-formatted ciphertext blob. Returns the bytes as a string;
|
|
5151
|
+
* caller (typically a `StorageBackend`) decides where to put them.
|
|
5152
|
+
* Plaintext is piped via stdin only.
|
|
5153
|
+
*/
|
|
5154
|
+
async encrypt(values, ctx) {
|
|
5155
|
+
await assertSops(this.runner, this.sopsCommand);
|
|
5156
|
+
const content = ctx.format === "json" ? JSON.stringify(values, null, 2) : YAML8.stringify(values);
|
|
5157
|
+
const args = this.buildEncryptArgs(ctx.manifest, ctx.environment);
|
|
5158
|
+
const env = this.buildSopsEnv();
|
|
5159
|
+
const pipe = await openInputPipe(content);
|
|
5160
|
+
let result;
|
|
5161
|
+
try {
|
|
5162
|
+
result = await this.runner.run(
|
|
5163
|
+
this.sopsCommand,
|
|
5164
|
+
[
|
|
5165
|
+
"--config",
|
|
5166
|
+
nullConfigPath(),
|
|
5167
|
+
"encrypt",
|
|
5168
|
+
...this.keyserviceArgs,
|
|
5169
|
+
...args,
|
|
5170
|
+
"--input-type",
|
|
5171
|
+
ctx.format,
|
|
5172
|
+
"--output-type",
|
|
5173
|
+
ctx.format,
|
|
5174
|
+
pipe.inputArg
|
|
5175
|
+
],
|
|
5176
|
+
{
|
|
5177
|
+
...pipe.runnerStdin !== void 0 ? { stdin: pipe.runnerStdin } : {},
|
|
5178
|
+
...env ? { env } : {}
|
|
5179
|
+
}
|
|
5180
|
+
);
|
|
5181
|
+
} finally {
|
|
5182
|
+
pipe.cleanup();
|
|
5183
|
+
}
|
|
5184
|
+
if (result.exitCode !== 0) {
|
|
5185
|
+
throw new SopsEncryptionError(`Failed to encrypt cell: ${result.stderr.trim()}`);
|
|
5186
|
+
}
|
|
5187
|
+
return result.stdout;
|
|
5188
|
+
}
|
|
5189
|
+
/**
|
|
5190
|
+
* {@link EncryptionBackend.rotate} — add or remove recipients from an
|
|
5191
|
+
* encrypted SOPS blob via stdin/stdout. Drops the in-place `-i` flag
|
|
5192
|
+
* the deleted file-path-shaped methods used, so SOPS writes the
|
|
5193
|
+
* rotated ciphertext to stdout instead of back to a file. Plaintext
|
|
5194
|
+
* stays inside the SOPS subprocess; no plaintext window exists in
|
|
5195
|
+
* this Node process.
|
|
5196
|
+
*
|
|
5197
|
+
* Single SOPS invocation can both add and remove recipients
|
|
5198
|
+
* simultaneously (matches the CLI flag set).
|
|
5199
|
+
*/
|
|
5200
|
+
async rotate(blob, opts, ctx) {
|
|
5201
|
+
await assertSops(this.runner, this.sopsCommand);
|
|
5202
|
+
const env = this.buildSopsEnv();
|
|
5203
|
+
const pipe = await openInputPipe(blob);
|
|
5204
|
+
const flagArgs = [];
|
|
5205
|
+
if (opts.addAge) flagArgs.push("--add-age", opts.addAge);
|
|
5206
|
+
if (opts.rmAge) flagArgs.push("--rm-age", opts.rmAge);
|
|
5207
|
+
if (opts.addKms) flagArgs.push("--add-kms", opts.addKms);
|
|
5208
|
+
if (opts.rmKms) flagArgs.push("--rm-kms", opts.rmKms);
|
|
5209
|
+
if (opts.addGcpKms) flagArgs.push("--add-gcp-kms", opts.addGcpKms);
|
|
5210
|
+
if (opts.rmGcpKms) flagArgs.push("--rm-gcp-kms", opts.rmGcpKms);
|
|
5211
|
+
if (opts.addAzureKv) flagArgs.push("--add-azure-kv", opts.addAzureKv);
|
|
5212
|
+
if (opts.rmAzureKv) flagArgs.push("--rm-azure-kv", opts.rmAzureKv);
|
|
5213
|
+
if (opts.addPgp) flagArgs.push("--add-pgp", opts.addPgp);
|
|
5214
|
+
if (opts.rmPgp) flagArgs.push("--rm-pgp", opts.rmPgp);
|
|
5215
|
+
let result;
|
|
5216
|
+
try {
|
|
5217
|
+
result = await this.runner.run(
|
|
5218
|
+
this.sopsCommand,
|
|
5219
|
+
[
|
|
5220
|
+
"--config",
|
|
5221
|
+
nullConfigPath(),
|
|
5222
|
+
"rotate",
|
|
5223
|
+
...this.keyserviceArgs,
|
|
5224
|
+
...flagArgs,
|
|
5225
|
+
"--input-type",
|
|
5226
|
+
ctx.format,
|
|
5227
|
+
"--output-type",
|
|
5228
|
+
ctx.format,
|
|
5229
|
+
pipe.inputArg
|
|
5230
|
+
],
|
|
5231
|
+
{
|
|
5232
|
+
...pipe.runnerStdin !== void 0 ? { stdin: pipe.runnerStdin } : {},
|
|
5233
|
+
...env ? { env } : {}
|
|
5234
|
+
}
|
|
5235
|
+
);
|
|
5236
|
+
} finally {
|
|
5237
|
+
pipe.cleanup();
|
|
5238
|
+
}
|
|
5239
|
+
if (result.exitCode !== 0) {
|
|
5240
|
+
throw new SopsEncryptionError(`Failed to rotate cell: ${result.stderr.trim()}`);
|
|
5241
|
+
}
|
|
5242
|
+
return result.stdout;
|
|
5243
|
+
}
|
|
5244
|
+
/**
|
|
5245
|
+
* {@link EncryptionBackend.getMetadata} — extract SOPS metadata from a
|
|
5246
|
+
* ciphertext blob without decrypting. Pure parser, no IO, no
|
|
5247
|
+
* subprocess.
|
|
5248
|
+
*/
|
|
5249
|
+
getMetadata(content) {
|
|
5250
|
+
return this.parseMetadataFromContent(content, "<cell>");
|
|
5251
|
+
}
|
|
5252
|
+
/**
|
|
5253
|
+
* {@link EncryptionBackend.validateEncryption} — whether `content` is a
|
|
5254
|
+
* valid SOPS-encrypted blob (parses + has the `sops:` metadata
|
|
5255
|
+
* block). Never throws.
|
|
5256
|
+
*/
|
|
5257
|
+
validateEncryption(content) {
|
|
5258
|
+
try {
|
|
5259
|
+
this.parseMetadataFromContent(content, "<cell>");
|
|
5260
|
+
return true;
|
|
5261
|
+
} catch {
|
|
5262
|
+
return false;
|
|
5263
|
+
}
|
|
5264
|
+
}
|
|
5265
|
+
/**
|
|
5266
|
+
* Blob-shaped variant of `classifyDecryptError`. Same logic as the
|
|
5267
|
+
* file-path version but reads metadata from the in-memory ciphertext
|
|
5268
|
+
* instead of disk.
|
|
5269
|
+
*/
|
|
5270
|
+
async classifyDecryptErrorFromContent(content) {
|
|
5271
|
+
let metadata;
|
|
5272
|
+
try {
|
|
5273
|
+
metadata = this.parseMetadataFromContent(content, "<cell>");
|
|
5274
|
+
} catch {
|
|
5275
|
+
return "other";
|
|
5276
|
+
}
|
|
5277
|
+
if (metadata.backend !== "age") return "other";
|
|
5278
|
+
if (!this.ageKey && !this.ageKeyFile) return "key-not-found";
|
|
5279
|
+
let keyContent;
|
|
5280
|
+
try {
|
|
5281
|
+
keyContent = this.ageKey ?? fs14.readFileSync(this.ageKeyFile, "utf-8");
|
|
5282
|
+
} catch {
|
|
5283
|
+
return "key-not-found";
|
|
5284
|
+
}
|
|
5285
|
+
const privateKeys = keyContent.split("\n").map((line) => line.trim()).filter((line) => line.startsWith("AGE-SECRET-KEY-"));
|
|
5286
|
+
if (privateKeys.length === 0) return "key-not-found";
|
|
5287
|
+
try {
|
|
5288
|
+
const publicKeys = await Promise.all(privateKeys.map((k) => deriveAgePublicKey(k)));
|
|
5289
|
+
const recipients = new Set(metadata.recipients);
|
|
5290
|
+
return publicKeys.some((pk) => recipients.has(pk)) ? "other" : "key-not-found";
|
|
5291
|
+
} catch {
|
|
5292
|
+
return "other";
|
|
5293
|
+
}
|
|
5294
|
+
}
|
|
5295
|
+
};
|
|
5296
|
+
|
|
5297
|
+
// src/sops/linux-stdin-fifo.ts
|
|
5298
|
+
import * as os from "os";
|
|
5299
|
+
import * as path11 from "path";
|
|
5300
|
+
import { execFileSync, spawn } from "child_process";
|
|
5301
|
+
function shouldUseLinuxStdinFifo() {
|
|
5302
|
+
return process.platform === "linux" && !process.env.JEST_WORKER_ID;
|
|
5303
|
+
}
|
|
5304
|
+
function wrapWithLinuxStdinFifo(runner) {
|
|
5305
|
+
if (!shouldUseLinuxStdinFifo()) return runner;
|
|
5306
|
+
return {
|
|
5307
|
+
run: (cmd, args, opts) => {
|
|
5308
|
+
const stdinIdx = args.indexOf("/dev/stdin");
|
|
5309
|
+
if (stdinIdx < 0 || opts?.stdin === void 0) {
|
|
5310
|
+
return runner.run(cmd, args, opts);
|
|
5311
|
+
}
|
|
5312
|
+
const fifoDir = execFileSync("mktemp", ["-d", path11.join(os.tmpdir(), "clef-fifo-XXXXXX")]).toString().trim();
|
|
5313
|
+
const fifoPath = path11.join(fifoDir, "input");
|
|
5314
|
+
execFileSync("mkfifo", [fifoPath]);
|
|
5315
|
+
const writer = spawn("dd", [`of=${fifoPath}`, "status=none"], {
|
|
5316
|
+
stdio: ["pipe", "ignore", "ignore"]
|
|
5317
|
+
});
|
|
5318
|
+
writer.stdin.write(opts.stdin);
|
|
5319
|
+
writer.stdin.end();
|
|
5320
|
+
const patchedArgs = [...args];
|
|
5321
|
+
patchedArgs[stdinIdx] = fifoPath;
|
|
5322
|
+
const { stdin: _stdin, ...restOpts } = opts;
|
|
5323
|
+
return runner.run(cmd, patchedArgs, restOpts).finally(() => {
|
|
5324
|
+
try {
|
|
5325
|
+
writer.kill();
|
|
5326
|
+
} catch {
|
|
5252
5327
|
}
|
|
5253
|
-
|
|
5254
|
-
|
|
5255
|
-
|
|
5256
|
-
args.push("--kms", pkcs11UriToSyntheticArn(config.pkcs11_uri));
|
|
5328
|
+
try {
|
|
5329
|
+
execFileSync("rm", ["-rf", fifoDir]);
|
|
5330
|
+
} catch {
|
|
5257
5331
|
}
|
|
5258
|
-
|
|
5332
|
+
});
|
|
5259
5333
|
}
|
|
5260
|
-
|
|
5261
|
-
|
|
5262
|
-
};
|
|
5334
|
+
};
|
|
5335
|
+
}
|
|
5263
5336
|
|
|
5264
5337
|
// src/hsm/bundled.ts
|
|
5265
5338
|
import * as fs15 from "fs";
|
|
5266
|
-
import * as
|
|
5339
|
+
import * as path12 from "path";
|
|
5267
5340
|
function tryBundledKeyservice() {
|
|
5268
5341
|
const platform = process.platform;
|
|
5269
5342
|
const arch = process.arch;
|
|
@@ -5275,8 +5348,8 @@ function tryBundledKeyservice() {
|
|
|
5275
5348
|
const binName = "clef-keyservice";
|
|
5276
5349
|
try {
|
|
5277
5350
|
const packageMain = __require.resolve(`${packageName}/package.json`);
|
|
5278
|
-
const packageDir =
|
|
5279
|
-
const binPath =
|
|
5351
|
+
const packageDir = path12.dirname(packageMain);
|
|
5352
|
+
const binPath = path12.join(packageDir, "bin", binName);
|
|
5280
5353
|
return fs15.existsSync(binPath) ? binPath : null;
|
|
5281
5354
|
} catch {
|
|
5282
5355
|
return null;
|
|
@@ -5285,9 +5358,9 @@ function tryBundledKeyservice() {
|
|
|
5285
5358
|
|
|
5286
5359
|
// src/hsm/resolver.ts
|
|
5287
5360
|
import * as fs16 from "fs";
|
|
5288
|
-
import * as
|
|
5361
|
+
import * as path13 from "path";
|
|
5289
5362
|
function validateKeyservicePath(candidate) {
|
|
5290
|
-
if (!
|
|
5363
|
+
if (!path13.isAbsolute(candidate)) {
|
|
5291
5364
|
throw new Error(`CLEF_KEYSERVICE_PATH must be an absolute path, got '${candidate}'.`);
|
|
5292
5365
|
}
|
|
5293
5366
|
const segments = candidate.split(/[/\\]/);
|
|
@@ -5322,7 +5395,7 @@ function resetKeyserviceResolution() {
|
|
|
5322
5395
|
}
|
|
5323
5396
|
|
|
5324
5397
|
// src/hsm/keyservice.ts
|
|
5325
|
-
import { spawn } from "child_process";
|
|
5398
|
+
import { spawn as spawn2 } from "child_process";
|
|
5326
5399
|
import * as readline from "readline";
|
|
5327
5400
|
var PORT_REGEX = /^PORT=(\d+)$/;
|
|
5328
5401
|
var STARTUP_TIMEOUT_MS = 5e3;
|
|
@@ -5340,7 +5413,7 @@ async function spawnKeyservice(options) {
|
|
|
5340
5413
|
...options.pin ? { CLEF_PKCS11_PIN: options.pin } : {},
|
|
5341
5414
|
...options.pinFile ? { CLEF_PKCS11_PIN_FILE: options.pinFile } : {}
|
|
5342
5415
|
};
|
|
5343
|
-
const child =
|
|
5416
|
+
const child = spawn2(options.binaryPath, args, {
|
|
5344
5417
|
stdio: ["ignore", "pipe", "pipe"],
|
|
5345
5418
|
env: childEnv
|
|
5346
5419
|
});
|
|
@@ -5416,16 +5489,16 @@ function killGracefully(child) {
|
|
|
5416
5489
|
}
|
|
5417
5490
|
|
|
5418
5491
|
// src/lint/runner.ts
|
|
5419
|
-
import * as
|
|
5492
|
+
import * as path14 from "path";
|
|
5420
5493
|
var LintRunner = class {
|
|
5421
|
-
constructor(matrixManager, schemaValidator,
|
|
5494
|
+
constructor(matrixManager, schemaValidator, source) {
|
|
5422
5495
|
this.matrixManager = matrixManager;
|
|
5423
5496
|
this.schemaValidator = schemaValidator;
|
|
5424
|
-
this.
|
|
5497
|
+
this.source = source;
|
|
5425
5498
|
}
|
|
5426
5499
|
matrixManager;
|
|
5427
5500
|
schemaValidator;
|
|
5428
|
-
|
|
5501
|
+
source;
|
|
5429
5502
|
/**
|
|
5430
5503
|
* Lint the entire matrix: check missing files, schema errors, SOPS integrity,
|
|
5431
5504
|
* single-recipient warnings, and cross-environment key drift.
|
|
@@ -5452,8 +5525,9 @@ var LintRunner = class {
|
|
|
5452
5525
|
fileCount = existingCells.length;
|
|
5453
5526
|
const namespaceKeys = {};
|
|
5454
5527
|
for (const cell of existingCells) {
|
|
5528
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
5455
5529
|
try {
|
|
5456
|
-
const isValid = await this.
|
|
5530
|
+
const isValid = await this.source.validateEncryption(ref);
|
|
5457
5531
|
if (!isValid) {
|
|
5458
5532
|
issues.push({
|
|
5459
5533
|
severity: "error",
|
|
@@ -5474,7 +5548,7 @@ var LintRunner = class {
|
|
|
5474
5548
|
continue;
|
|
5475
5549
|
}
|
|
5476
5550
|
try {
|
|
5477
|
-
const decrypted = await this.
|
|
5551
|
+
const decrypted = await this.source.readCell(ref);
|
|
5478
5552
|
const keys = Object.keys(decrypted.values);
|
|
5479
5553
|
if (!namespaceKeys[cell.namespace]) {
|
|
5480
5554
|
namespaceKeys[cell.namespace] = {};
|
|
@@ -5519,7 +5593,7 @@ var LintRunner = class {
|
|
|
5519
5593
|
}
|
|
5520
5594
|
const ns = manifest.namespaces.find((n) => n.name === cell.namespace);
|
|
5521
5595
|
if (ns?.schema) {
|
|
5522
|
-
const schemaPath =
|
|
5596
|
+
const schemaPath = path14.join(repoRoot, ns.schema);
|
|
5523
5597
|
try {
|
|
5524
5598
|
const schema = this.schemaValidator.loadSchema(schemaPath);
|
|
5525
5599
|
const result = this.schemaValidator.validate(decrypted.values, schema);
|
|
@@ -5562,7 +5636,8 @@ var LintRunner = class {
|
|
|
5562
5636
|
}
|
|
5563
5637
|
}
|
|
5564
5638
|
try {
|
|
5565
|
-
const
|
|
5639
|
+
const meta = await this.source.getPendingMetadata(ref);
|
|
5640
|
+
const pendingKeys = meta.pending.map((p) => p.key);
|
|
5566
5641
|
pendingCount += pendingKeys.length;
|
|
5567
5642
|
for (const pendingKey of pendingKeys) {
|
|
5568
5643
|
issues.push({
|
|
@@ -5615,7 +5690,6 @@ var LintRunner = class {
|
|
|
5615
5690
|
const siIssues = await this.lintServiceIdentities(
|
|
5616
5691
|
manifest.service_identities,
|
|
5617
5692
|
manifest,
|
|
5618
|
-
repoRoot,
|
|
5619
5693
|
existingCells
|
|
5620
5694
|
);
|
|
5621
5695
|
issues.push(...siIssues);
|
|
@@ -5625,18 +5699,27 @@ var LintRunner = class {
|
|
|
5625
5699
|
return { issues, fileCount: fileCount + missingCells.length, pendingCount };
|
|
5626
5700
|
}
|
|
5627
5701
|
/**
|
|
5628
|
-
* Cross-reference
|
|
5702
|
+
* Cross-reference cell metadata against the cipher's plaintext key
|
|
5629
5703
|
* names for each existing cell. Reports orphan rotation records and
|
|
5630
|
-
* dual-state (pending + rotation) inconsistencies. Uses
|
|
5631
|
-
*
|
|
5704
|
+
* dual-state (pending + rotation) inconsistencies. Uses the source's
|
|
5705
|
+
* `listKeys` (no decryption).
|
|
5632
5706
|
*/
|
|
5633
5707
|
async lintMetadataConsistency(cells) {
|
|
5634
5708
|
const issues = [];
|
|
5635
5709
|
for (const cell of cells) {
|
|
5636
|
-
const
|
|
5637
|
-
|
|
5638
|
-
|
|
5639
|
-
|
|
5710
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
5711
|
+
let cipherKeys;
|
|
5712
|
+
try {
|
|
5713
|
+
cipherKeys = new Set(await this.source.listKeys(ref));
|
|
5714
|
+
} catch {
|
|
5715
|
+
continue;
|
|
5716
|
+
}
|
|
5717
|
+
let metadata;
|
|
5718
|
+
try {
|
|
5719
|
+
metadata = await this.source.getPendingMetadata(ref);
|
|
5720
|
+
} catch {
|
|
5721
|
+
continue;
|
|
5722
|
+
}
|
|
5640
5723
|
for (const record of metadata.rotations) {
|
|
5641
5724
|
if (!cipherKeys.has(record.key)) {
|
|
5642
5725
|
issues.push({
|
|
@@ -5667,7 +5750,7 @@ var LintRunner = class {
|
|
|
5667
5750
|
/**
|
|
5668
5751
|
* Lint service identity configurations for drift issues.
|
|
5669
5752
|
*/
|
|
5670
|
-
async lintServiceIdentities(identities, manifest,
|
|
5753
|
+
async lintServiceIdentities(identities, manifest, existingCells) {
|
|
5671
5754
|
const issues = [];
|
|
5672
5755
|
const declaredEnvNames = new Set(manifest.environments.map((e) => e.name));
|
|
5673
5756
|
const declaredNsNames = new Set(manifest.namespaces.map((ns) => ns.name));
|
|
@@ -5708,9 +5791,10 @@ var LintRunner = class {
|
|
|
5708
5791
|
const envConfig = si.environments[cell.environment];
|
|
5709
5792
|
if (!envConfig) continue;
|
|
5710
5793
|
if (!envConfig.recipient) continue;
|
|
5794
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
5711
5795
|
if (si.namespaces.includes(cell.namespace)) {
|
|
5712
5796
|
try {
|
|
5713
|
-
const metadata = await this.
|
|
5797
|
+
const metadata = await this.source.getCellMetadata(ref);
|
|
5714
5798
|
if (!metadata.recipients.includes(envConfig.recipient)) {
|
|
5715
5799
|
issues.push({
|
|
5716
5800
|
severity: "warning",
|
|
@@ -5724,7 +5808,7 @@ var LintRunner = class {
|
|
|
5724
5808
|
}
|
|
5725
5809
|
} else {
|
|
5726
5810
|
try {
|
|
5727
|
-
const metadata = await this.
|
|
5811
|
+
const metadata = await this.source.getCellMetadata(ref);
|
|
5728
5812
|
if (metadata.recipients.includes(envConfig.recipient)) {
|
|
5729
5813
|
issues.push({
|
|
5730
5814
|
severity: "warning",
|
|
@@ -5750,7 +5834,10 @@ var LintRunner = class {
|
|
|
5750
5834
|
async fix(manifest, repoRoot) {
|
|
5751
5835
|
const missingCells = this.matrixManager.detectMissingCells(manifest, repoRoot);
|
|
5752
5836
|
for (const cell of missingCells) {
|
|
5753
|
-
await this.
|
|
5837
|
+
await this.source.scaffoldCell(
|
|
5838
|
+
{ namespace: cell.namespace, environment: cell.environment },
|
|
5839
|
+
manifest
|
|
5840
|
+
);
|
|
5754
5841
|
}
|
|
5755
5842
|
return this.run(manifest, repoRoot);
|
|
5756
5843
|
}
|
|
@@ -5805,15 +5892,12 @@ Use 'clef exec' to inject secrets directly into a process, or 'clef export --for
|
|
|
5805
5892
|
}
|
|
5806
5893
|
};
|
|
5807
5894
|
|
|
5808
|
-
// src/import/index.ts
|
|
5809
|
-
import * as path17 from "path";
|
|
5810
|
-
|
|
5811
5895
|
// src/import/parsers.ts
|
|
5812
|
-
import * as
|
|
5896
|
+
import * as path15 from "path";
|
|
5813
5897
|
import * as YAML9 from "yaml";
|
|
5814
5898
|
function detectFormat(filePath, content) {
|
|
5815
|
-
const base =
|
|
5816
|
-
const ext =
|
|
5899
|
+
const base = path15.basename(filePath);
|
|
5900
|
+
const ext = path15.extname(filePath).toLowerCase();
|
|
5817
5901
|
if (base === ".env" || base.startsWith(".env.")) {
|
|
5818
5902
|
return "dotenv";
|
|
5819
5903
|
}
|
|
@@ -5963,11 +6047,11 @@ function parse9(content, format, filePath) {
|
|
|
5963
6047
|
|
|
5964
6048
|
// src/import/index.ts
|
|
5965
6049
|
var ImportRunner = class {
|
|
5966
|
-
constructor(
|
|
5967
|
-
this.
|
|
6050
|
+
constructor(source, tx) {
|
|
6051
|
+
this.source = source;
|
|
5968
6052
|
this.tx = tx;
|
|
5969
6053
|
}
|
|
5970
|
-
|
|
6054
|
+
source;
|
|
5971
6055
|
tx;
|
|
5972
6056
|
/**
|
|
5973
6057
|
* Parse a source file and import its key/value pairs into a target `namespace/environment` cell.
|
|
@@ -5981,10 +6065,8 @@ var ImportRunner = class {
|
|
|
5981
6065
|
*/
|
|
5982
6066
|
async import(target, sourcePath, content, manifest, repoRoot, options) {
|
|
5983
6067
|
const [ns, env] = target.split("/");
|
|
5984
|
-
const
|
|
5985
|
-
|
|
5986
|
-
manifest.file_pattern.replace("{namespace}", ns).replace("{environment}", env)
|
|
5987
|
-
);
|
|
6068
|
+
const ref = { namespace: ns, environment: env };
|
|
6069
|
+
const relCellPath = manifest.file_pattern.replace("{namespace}", ns).replace("{environment}", env);
|
|
5988
6070
|
const parsed = parse9(content, options.format ?? "auto", sourcePath ?? "");
|
|
5989
6071
|
let candidates = Object.entries(parsed.pairs);
|
|
5990
6072
|
if (options.prefix) {
|
|
@@ -6002,7 +6084,7 @@ var ImportRunner = class {
|
|
|
6002
6084
|
if (options.dryRun) {
|
|
6003
6085
|
let existingKeys;
|
|
6004
6086
|
try {
|
|
6005
|
-
const decrypted2 = await this.
|
|
6087
|
+
const decrypted2 = await this.source.readCell(ref);
|
|
6006
6088
|
existingKeys = new Set(Object.keys(decrypted2.values));
|
|
6007
6089
|
} catch {
|
|
6008
6090
|
existingKeys = /* @__PURE__ */ new Set();
|
|
@@ -6016,7 +6098,7 @@ var ImportRunner = class {
|
|
|
6016
6098
|
}
|
|
6017
6099
|
return { imported, skipped, failed, warnings, dryRun: true };
|
|
6018
6100
|
}
|
|
6019
|
-
const decrypted = await this.
|
|
6101
|
+
const decrypted = await this.source.readCell(ref);
|
|
6020
6102
|
const newValues = { ...decrypted.values };
|
|
6021
6103
|
const rotatedKeys = [];
|
|
6022
6104
|
for (const [key, value] of candidates) {
|
|
@@ -6033,7 +6115,6 @@ var ImportRunner = class {
|
|
|
6033
6115
|
if (imported.length === 0) {
|
|
6034
6116
|
return { imported, skipped, failed, warnings, dryRun: false };
|
|
6035
6117
|
}
|
|
6036
|
-
const relCellPath = path17.relative(repoRoot, filePath);
|
|
6037
6118
|
const relMetaPath = relCellPath.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml");
|
|
6038
6119
|
await this.tx.run(repoRoot, {
|
|
6039
6120
|
description: `clef import ${target}: ${imported.length} key(s)`,
|
|
@@ -6041,9 +6122,9 @@ var ImportRunner = class {
|
|
|
6041
6122
|
// callback are staged and rolled back atomically with the ciphertext.
|
|
6042
6123
|
paths: [relCellPath, relMetaPath],
|
|
6043
6124
|
mutate: async () => {
|
|
6044
|
-
await this.
|
|
6125
|
+
await this.source.writeCell(ref, newValues);
|
|
6045
6126
|
if (options.rotatedBy && rotatedKeys.length > 0) {
|
|
6046
|
-
await recordRotation(
|
|
6127
|
+
await this.source.recordRotation(ref, rotatedKeys, options.rotatedBy);
|
|
6047
6128
|
}
|
|
6048
6129
|
}
|
|
6049
6130
|
});
|
|
@@ -6052,7 +6133,7 @@ var ImportRunner = class {
|
|
|
6052
6133
|
};
|
|
6053
6134
|
|
|
6054
6135
|
// src/recipients/index.ts
|
|
6055
|
-
import * as
|
|
6136
|
+
import * as path16 from "path";
|
|
6056
6137
|
function parseRecipientEntry(entry) {
|
|
6057
6138
|
if (typeof entry === "string") {
|
|
6058
6139
|
return { key: entry };
|
|
@@ -6120,12 +6201,12 @@ function ensureEnvironmentRecipientsArray(doc, envName) {
|
|
|
6120
6201
|
return env.recipients;
|
|
6121
6202
|
}
|
|
6122
6203
|
var RecipientManager = class {
|
|
6123
|
-
constructor(
|
|
6124
|
-
this.
|
|
6204
|
+
constructor(source, matrixManager, tx) {
|
|
6205
|
+
this.source = source;
|
|
6125
6206
|
this.matrixManager = matrixManager;
|
|
6126
6207
|
this.tx = tx;
|
|
6127
6208
|
}
|
|
6128
|
-
|
|
6209
|
+
source;
|
|
6129
6210
|
matrixManager;
|
|
6130
6211
|
tx;
|
|
6131
6212
|
/**
|
|
@@ -6180,7 +6261,7 @@ var RecipientManager = class {
|
|
|
6180
6261
|
const reEncryptedFiles = [];
|
|
6181
6262
|
await this.tx.run(repoRoot, {
|
|
6182
6263
|
description: environment ? `clef recipients add ${keyPreview(normalizedKey)} -e ${environment}` : `clef recipients add ${keyPreview(normalizedKey)}`,
|
|
6183
|
-
paths: [...cells.map((c) =>
|
|
6264
|
+
paths: [...cells.map((c) => path16.relative(repoRoot, c.filePath)), CLEF_MANIFEST_FILENAME],
|
|
6184
6265
|
mutate: async () => {
|
|
6185
6266
|
const doc = readManifestYaml(repoRoot);
|
|
6186
6267
|
const recipients = environment ? ensureEnvironmentRecipientsArray(doc, environment) : ensureRecipientsArray(doc);
|
|
@@ -6191,7 +6272,8 @@ var RecipientManager = class {
|
|
|
6191
6272
|
}
|
|
6192
6273
|
writeManifestYaml(repoRoot, doc);
|
|
6193
6274
|
for (const cell of cells) {
|
|
6194
|
-
|
|
6275
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
6276
|
+
await this.source.rotate(ref, { addAge: normalizedKey });
|
|
6195
6277
|
reEncryptedFiles.push(cell.filePath);
|
|
6196
6278
|
}
|
|
6197
6279
|
}
|
|
@@ -6239,7 +6321,7 @@ var RecipientManager = class {
|
|
|
6239
6321
|
const reEncryptedFiles = [];
|
|
6240
6322
|
await this.tx.run(repoRoot, {
|
|
6241
6323
|
description: environment ? `clef recipients remove ${keyPreview(trimmedKey)} -e ${environment}` : `clef recipients remove ${keyPreview(trimmedKey)}`,
|
|
6242
|
-
paths: [...cells.map((c) =>
|
|
6324
|
+
paths: [...cells.map((c) => path16.relative(repoRoot, c.filePath)), CLEF_MANIFEST_FILENAME],
|
|
6243
6325
|
mutate: async () => {
|
|
6244
6326
|
const doc = readManifestYaml(repoRoot);
|
|
6245
6327
|
const recipients = environment ? ensureEnvironmentRecipientsArray(doc, environment) : ensureRecipientsArray(doc);
|
|
@@ -6247,7 +6329,8 @@ var RecipientManager = class {
|
|
|
6247
6329
|
recipients.splice(idx, 1);
|
|
6248
6330
|
writeManifestYaml(repoRoot, doc);
|
|
6249
6331
|
for (const cell of cells) {
|
|
6250
|
-
|
|
6332
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
6333
|
+
await this.source.rotate(ref, { rmAge: trimmedKey });
|
|
6251
6334
|
reEncryptedFiles.push(cell.filePath);
|
|
6252
6335
|
}
|
|
6253
6336
|
}
|
|
@@ -6269,12 +6352,12 @@ var RecipientManager = class {
|
|
|
6269
6352
|
|
|
6270
6353
|
// src/recipients/requests.ts
|
|
6271
6354
|
import * as fs17 from "fs";
|
|
6272
|
-
import * as
|
|
6355
|
+
import * as path17 from "path";
|
|
6273
6356
|
import * as YAML10 from "yaml";
|
|
6274
6357
|
var REQUESTS_FILENAME = ".clef-requests.yaml";
|
|
6275
6358
|
var HEADER_COMMENT2 = "# Pending recipient access requests. Approve with: clef recipients approve <label>\n";
|
|
6276
6359
|
function requestsFilePath(repoRoot) {
|
|
6277
|
-
return
|
|
6360
|
+
return path17.join(repoRoot, REQUESTS_FILENAME);
|
|
6278
6361
|
}
|
|
6279
6362
|
function loadRequests(repoRoot) {
|
|
6280
6363
|
const filePath = requestsFilePath(repoRoot);
|
|
@@ -6349,7 +6432,7 @@ function findInList(requests, identifier) {
|
|
|
6349
6432
|
}
|
|
6350
6433
|
|
|
6351
6434
|
// src/drift/detector.ts
|
|
6352
|
-
import * as
|
|
6435
|
+
import * as path18 from "path";
|
|
6353
6436
|
var DriftDetector = class {
|
|
6354
6437
|
parser = new ManifestParser();
|
|
6355
6438
|
matrix = new MatrixManager();
|
|
@@ -6362,8 +6445,8 @@ var DriftDetector = class {
|
|
|
6362
6445
|
* @returns Drift result with any issues found.
|
|
6363
6446
|
*/
|
|
6364
6447
|
detect(localRoot, remoteRoot, namespaceFilter) {
|
|
6365
|
-
const localManifest = this.parser.parse(
|
|
6366
|
-
const remoteManifest = this.parser.parse(
|
|
6448
|
+
const localManifest = this.parser.parse(path18.join(localRoot, CLEF_MANIFEST_FILENAME));
|
|
6449
|
+
const remoteManifest = this.parser.parse(path18.join(remoteRoot, CLEF_MANIFEST_FILENAME));
|
|
6367
6450
|
const localCells = this.matrix.resolveMatrix(localManifest, localRoot);
|
|
6368
6451
|
const remoteCells = this.matrix.resolveMatrix(remoteManifest, remoteRoot);
|
|
6369
6452
|
const localEnvNames = localManifest.environments.map((e) => e.name);
|
|
@@ -6427,7 +6510,7 @@ var DriftDetector = class {
|
|
|
6427
6510
|
};
|
|
6428
6511
|
|
|
6429
6512
|
// src/report/generator.ts
|
|
6430
|
-
import * as
|
|
6513
|
+
import * as path19 from "path";
|
|
6431
6514
|
|
|
6432
6515
|
// src/report/sanitizer.ts
|
|
6433
6516
|
var ReportSanitizer = class {
|
|
@@ -6564,14 +6647,14 @@ var ReportSanitizer = class {
|
|
|
6564
6647
|
|
|
6565
6648
|
// src/report/generator.ts
|
|
6566
6649
|
var ReportGenerator = class {
|
|
6567
|
-
constructor(runner,
|
|
6650
|
+
constructor(runner, source, matrixManager, schemaValidator) {
|
|
6568
6651
|
this.runner = runner;
|
|
6569
|
-
this.
|
|
6652
|
+
this.source = source;
|
|
6570
6653
|
this.matrixManager = matrixManager;
|
|
6571
6654
|
this.schemaValidator = schemaValidator;
|
|
6572
6655
|
}
|
|
6573
6656
|
runner;
|
|
6574
|
-
|
|
6657
|
+
source;
|
|
6575
6658
|
matrixManager;
|
|
6576
6659
|
schemaValidator;
|
|
6577
6660
|
/**
|
|
@@ -6587,7 +6670,7 @@ var ReportGenerator = class {
|
|
|
6587
6670
|
let manifest = null;
|
|
6588
6671
|
try {
|
|
6589
6672
|
const parser = new ManifestParser();
|
|
6590
|
-
manifest = parser.parse(
|
|
6673
|
+
manifest = parser.parse(path19.join(repoRoot, "clef.yaml"));
|
|
6591
6674
|
} catch {
|
|
6592
6675
|
const emptyManifest = {
|
|
6593
6676
|
manifestVersion: 0,
|
|
@@ -6708,16 +6791,17 @@ var ReportGenerator = class {
|
|
|
6708
6791
|
metadata: null
|
|
6709
6792
|
};
|
|
6710
6793
|
}
|
|
6794
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
6711
6795
|
const keyCount = this.readKeyCount(cell.filePath);
|
|
6712
6796
|
let pendingCount = 0;
|
|
6713
6797
|
try {
|
|
6714
|
-
const
|
|
6715
|
-
pendingCount = pending.length;
|
|
6798
|
+
const meta = await this.source.getPendingMetadata(ref);
|
|
6799
|
+
pendingCount = meta.pending.length;
|
|
6716
6800
|
} catch {
|
|
6717
6801
|
}
|
|
6718
6802
|
let metadata = null;
|
|
6719
6803
|
try {
|
|
6720
|
-
const sopsMetadata = await this.
|
|
6804
|
+
const sopsMetadata = await this.source.getCellMetadata(ref);
|
|
6721
6805
|
metadata = {
|
|
6722
6806
|
backend: sopsMetadata.backend,
|
|
6723
6807
|
recipients: sopsMetadata.recipients,
|
|
@@ -6740,7 +6824,7 @@ var ReportGenerator = class {
|
|
|
6740
6824
|
}
|
|
6741
6825
|
async buildPolicy(manifest, repoRoot) {
|
|
6742
6826
|
try {
|
|
6743
|
-
const lintRunner = new LintRunner(this.matrixManager, this.schemaValidator, this.
|
|
6827
|
+
const lintRunner = new LintRunner(this.matrixManager, this.schemaValidator, this.source);
|
|
6744
6828
|
const lintResult = await lintRunner.run(manifest, repoRoot);
|
|
6745
6829
|
return new ReportSanitizer().sanitize(lintResult.issues);
|
|
6746
6830
|
} catch {
|
|
@@ -7057,9 +7141,9 @@ var SopsMergeDriver = class {
|
|
|
7057
7141
|
*/
|
|
7058
7142
|
async mergeFiles(basePath, oursPath, theirsPath) {
|
|
7059
7143
|
const [baseDecrypted, oursDecrypted, theirsDecrypted] = await Promise.all([
|
|
7060
|
-
this.sopsClient.
|
|
7061
|
-
this.sopsClient.
|
|
7062
|
-
this.sopsClient.
|
|
7144
|
+
this.sopsClient.decryptFile(basePath),
|
|
7145
|
+
this.sopsClient.decryptFile(oursPath),
|
|
7146
|
+
this.sopsClient.decryptFile(theirsPath)
|
|
7063
7147
|
]);
|
|
7064
7148
|
return this.merge(baseDecrypted.values, oursDecrypted.values, theirsDecrypted.values);
|
|
7065
7149
|
}
|
|
@@ -7176,22 +7260,26 @@ function mergeMetadataFiles(_basePath, oursPath, theirsPath) {
|
|
|
7176
7260
|
}
|
|
7177
7261
|
|
|
7178
7262
|
// src/service-identity/manager.ts
|
|
7179
|
-
import * as
|
|
7263
|
+
import * as path20 from "path";
|
|
7180
7264
|
var ServiceIdentityManager = class {
|
|
7181
|
-
constructor(
|
|
7182
|
-
this.
|
|
7265
|
+
constructor(source, matrixManager, tx) {
|
|
7266
|
+
this.source = source;
|
|
7183
7267
|
this.matrixManager = matrixManager;
|
|
7184
7268
|
this.tx = tx;
|
|
7185
7269
|
}
|
|
7186
|
-
|
|
7270
|
+
source;
|
|
7187
7271
|
matrixManager;
|
|
7188
7272
|
tx;
|
|
7273
|
+
/** Helper: cell → ref for the source seam. */
|
|
7274
|
+
ref(cell) {
|
|
7275
|
+
return { namespace: cell.namespace, environment: cell.environment };
|
|
7276
|
+
}
|
|
7189
7277
|
/**
|
|
7190
7278
|
* Compute repo-relative paths for a set of cells plus the manifest. Used
|
|
7191
7279
|
* to seed TransactionManager.run's `paths` argument.
|
|
7192
7280
|
*/
|
|
7193
7281
|
txPaths(repoRoot, cells) {
|
|
7194
|
-
return [...cells.map((c) =>
|
|
7282
|
+
return [...cells.map((c) => path20.relative(repoRoot, c.filePath)), CLEF_MANIFEST_FILENAME];
|
|
7195
7283
|
}
|
|
7196
7284
|
/**
|
|
7197
7285
|
* Create a new service identity with per-environment age key pairs or KMS envelope config.
|
|
@@ -7283,7 +7371,7 @@ var ServiceIdentityManager = class {
|
|
|
7283
7371
|
if (!envConfig?.recipient) continue;
|
|
7284
7372
|
if (isKmsEnvelope(envConfig)) continue;
|
|
7285
7373
|
try {
|
|
7286
|
-
await this.
|
|
7374
|
+
await this.source.rotate(this.ref(cell), { rmAge: envConfig.recipient });
|
|
7287
7375
|
} catch {
|
|
7288
7376
|
}
|
|
7289
7377
|
}
|
|
@@ -7333,7 +7421,7 @@ var ServiceIdentityManager = class {
|
|
|
7333
7421
|
const scopedCells = cells.filter((c) => c.environment === envName);
|
|
7334
7422
|
for (const cell of scopedCells) {
|
|
7335
7423
|
try {
|
|
7336
|
-
await this.
|
|
7424
|
+
await this.source.rotate(this.ref(cell), { rmAge: oldConfig.recipient });
|
|
7337
7425
|
} catch {
|
|
7338
7426
|
}
|
|
7339
7427
|
}
|
|
@@ -7360,7 +7448,7 @@ var ServiceIdentityManager = class {
|
|
|
7360
7448
|
if (isKmsEnvelope(envConfig)) continue;
|
|
7361
7449
|
if (!envConfig.recipient) continue;
|
|
7362
7450
|
try {
|
|
7363
|
-
await this.
|
|
7451
|
+
await this.source.rotate(this.ref(cell), { addAge: envConfig.recipient });
|
|
7364
7452
|
} catch (err) {
|
|
7365
7453
|
const message = err instanceof Error ? err.message : String(err);
|
|
7366
7454
|
if (!message.includes("already")) {
|
|
@@ -7408,7 +7496,7 @@ var ServiceIdentityManager = class {
|
|
|
7408
7496
|
if (isKmsEnvelope(envConfig)) continue;
|
|
7409
7497
|
if (!envConfig.recipient) continue;
|
|
7410
7498
|
try {
|
|
7411
|
-
await this.
|
|
7499
|
+
await this.source.rotate(this.ref(cell), { addAge: envConfig.recipient });
|
|
7412
7500
|
affectedFiles.push(cell.filePath);
|
|
7413
7501
|
} catch (err) {
|
|
7414
7502
|
const message = err instanceof Error ? err.message : String(err);
|
|
@@ -7469,7 +7557,7 @@ var ServiceIdentityManager = class {
|
|
|
7469
7557
|
if (isKmsEnvelope(envConfig)) continue;
|
|
7470
7558
|
if (!envConfig.recipient) continue;
|
|
7471
7559
|
try {
|
|
7472
|
-
await this.
|
|
7560
|
+
await this.source.rotate(this.ref(cell), { rmAge: envConfig.recipient });
|
|
7473
7561
|
affectedFiles.push(cell.filePath);
|
|
7474
7562
|
} catch {
|
|
7475
7563
|
}
|
|
@@ -7538,7 +7626,7 @@ var ServiceIdentityManager = class {
|
|
|
7538
7626
|
if (!identity.pack_only && !isKmsEnvelope(envConfig) && envConfig.recipient) {
|
|
7539
7627
|
for (const cell of cells) {
|
|
7540
7628
|
try {
|
|
7541
|
-
await this.
|
|
7629
|
+
await this.source.rotate(this.ref(cell), { addAge: envConfig.recipient });
|
|
7542
7630
|
} catch (err) {
|
|
7543
7631
|
const message = err instanceof Error ? err.message : String(err);
|
|
7544
7632
|
if (!message.includes("already")) {
|
|
@@ -7615,10 +7703,10 @@ var ServiceIdentityManager = class {
|
|
|
7615
7703
|
const scopedCells = cells.filter((c) => c.environment === envName);
|
|
7616
7704
|
for (const cell of scopedCells) {
|
|
7617
7705
|
try {
|
|
7618
|
-
await this.
|
|
7706
|
+
await this.source.rotate(this.ref(cell), { rmAge: oldRecipient });
|
|
7619
7707
|
} catch {
|
|
7620
7708
|
}
|
|
7621
|
-
await this.
|
|
7709
|
+
await this.source.rotate(this.ref(cell), { addAge: newPublicKey });
|
|
7622
7710
|
}
|
|
7623
7711
|
}
|
|
7624
7712
|
}
|
|
@@ -7677,7 +7765,7 @@ var ServiceIdentityManager = class {
|
|
|
7677
7765
|
if (!envConfig.recipient) continue;
|
|
7678
7766
|
if (si.namespaces.includes(cell.namespace)) {
|
|
7679
7767
|
try {
|
|
7680
|
-
const metadata = await this.
|
|
7768
|
+
const metadata = await this.source.getCellMetadata(this.ref(cell));
|
|
7681
7769
|
if (!metadata.recipients.includes(envConfig.recipient)) {
|
|
7682
7770
|
issues.push({
|
|
7683
7771
|
identity: si.name,
|
|
@@ -7692,7 +7780,7 @@ var ServiceIdentityManager = class {
|
|
|
7692
7780
|
}
|
|
7693
7781
|
} else {
|
|
7694
7782
|
try {
|
|
7695
|
-
const metadata = await this.
|
|
7783
|
+
const metadata = await this.source.getCellMetadata(this.ref(cell));
|
|
7696
7784
|
if (metadata.recipients.includes(envConfig.recipient)) {
|
|
7697
7785
|
issues.push({
|
|
7698
7786
|
identity: si.name,
|
|
@@ -7714,15 +7802,15 @@ var ServiceIdentityManager = class {
|
|
|
7714
7802
|
|
|
7715
7803
|
// src/structure/manager.ts
|
|
7716
7804
|
import * as fs19 from "fs";
|
|
7717
|
-
import * as
|
|
7805
|
+
import * as path21 from "path";
|
|
7718
7806
|
var StructureManager = class {
|
|
7719
|
-
constructor(matrixManager,
|
|
7807
|
+
constructor(matrixManager, buildSource, tx) {
|
|
7720
7808
|
this.matrixManager = matrixManager;
|
|
7721
|
-
this.
|
|
7809
|
+
this.buildSource = buildSource;
|
|
7722
7810
|
this.tx = tx;
|
|
7723
7811
|
}
|
|
7724
7812
|
matrixManager;
|
|
7725
|
-
|
|
7813
|
+
buildSource;
|
|
7726
7814
|
tx;
|
|
7727
7815
|
// ── add ──────────────────────────────────────────────────────────────────
|
|
7728
7816
|
/**
|
|
@@ -7738,7 +7826,7 @@ var StructureManager = class {
|
|
|
7738
7826
|
this.assertValidIdentifier("namespace", name);
|
|
7739
7827
|
const newCellPaths = manifest.environments.map((env) => ({
|
|
7740
7828
|
environment: env.name,
|
|
7741
|
-
filePath:
|
|
7829
|
+
filePath: path21.join(
|
|
7742
7830
|
repoRoot,
|
|
7743
7831
|
manifest.file_pattern.replace("{namespace}", name).replace("{environment}", env.name)
|
|
7744
7832
|
)
|
|
@@ -7746,7 +7834,7 @@ var StructureManager = class {
|
|
|
7746
7834
|
for (const cell of newCellPaths) {
|
|
7747
7835
|
if (fs19.existsSync(cell.filePath)) {
|
|
7748
7836
|
throw new Error(
|
|
7749
|
-
`Cannot add namespace '${name}': file '${
|
|
7837
|
+
`Cannot add namespace '${name}': file '${path21.relative(repoRoot, cell.filePath)}' already exists.`
|
|
7750
7838
|
);
|
|
7751
7839
|
}
|
|
7752
7840
|
}
|
|
@@ -7765,21 +7853,14 @@ var StructureManager = class {
|
|
|
7765
7853
|
await this.tx.run(repoRoot, {
|
|
7766
7854
|
description: `clef namespace add ${name}`,
|
|
7767
7855
|
paths: [
|
|
7768
|
-
...newCellPaths.map((c) =>
|
|
7856
|
+
...newCellPaths.map((c) => path21.relative(repoRoot, c.filePath)),
|
|
7769
7857
|
CLEF_MANIFEST_FILENAME
|
|
7770
7858
|
],
|
|
7771
7859
|
mutate: async () => {
|
|
7860
|
+
const source = this.buildSource(updatedManifest);
|
|
7772
7861
|
for (const cell of newCellPaths) {
|
|
7773
|
-
|
|
7774
|
-
|
|
7775
|
-
namespace: name,
|
|
7776
|
-
environment: cell.environment,
|
|
7777
|
-
filePath: cell.filePath,
|
|
7778
|
-
exists: false
|
|
7779
|
-
},
|
|
7780
|
-
this.encryption,
|
|
7781
|
-
updatedManifest
|
|
7782
|
-
);
|
|
7862
|
+
const ref = { namespace: name, environment: cell.environment };
|
|
7863
|
+
await source.scaffoldCell(ref, updatedManifest);
|
|
7783
7864
|
}
|
|
7784
7865
|
const doc = readManifestYaml(repoRoot);
|
|
7785
7866
|
const namespaces = doc.namespaces;
|
|
@@ -7810,7 +7891,7 @@ var StructureManager = class {
|
|
|
7810
7891
|
this.assertValidIdentifier("environment", name);
|
|
7811
7892
|
const newCellPaths = manifest.namespaces.map((ns) => ({
|
|
7812
7893
|
namespace: ns.name,
|
|
7813
|
-
filePath:
|
|
7894
|
+
filePath: path21.join(
|
|
7814
7895
|
repoRoot,
|
|
7815
7896
|
manifest.file_pattern.replace("{namespace}", ns.name).replace("{environment}", name)
|
|
7816
7897
|
)
|
|
@@ -7818,7 +7899,7 @@ var StructureManager = class {
|
|
|
7818
7899
|
for (const cell of newCellPaths) {
|
|
7819
7900
|
if (fs19.existsSync(cell.filePath)) {
|
|
7820
7901
|
throw new Error(
|
|
7821
|
-
`Cannot add environment '${name}': file '${
|
|
7902
|
+
`Cannot add environment '${name}': file '${path21.relative(repoRoot, cell.filePath)}' already exists.`
|
|
7822
7903
|
);
|
|
7823
7904
|
}
|
|
7824
7905
|
}
|
|
@@ -7837,21 +7918,14 @@ var StructureManager = class {
|
|
|
7837
7918
|
await this.tx.run(repoRoot, {
|
|
7838
7919
|
description: `clef env add ${name}`,
|
|
7839
7920
|
paths: [
|
|
7840
|
-
...newCellPaths.map((c) =>
|
|
7921
|
+
...newCellPaths.map((c) => path21.relative(repoRoot, c.filePath)),
|
|
7841
7922
|
CLEF_MANIFEST_FILENAME
|
|
7842
7923
|
],
|
|
7843
7924
|
mutate: async () => {
|
|
7925
|
+
const source = this.buildSource(updatedManifest);
|
|
7844
7926
|
for (const cell of newCellPaths) {
|
|
7845
|
-
|
|
7846
|
-
|
|
7847
|
-
namespace: cell.namespace,
|
|
7848
|
-
environment: name,
|
|
7849
|
-
filePath: cell.filePath,
|
|
7850
|
-
exists: false
|
|
7851
|
-
},
|
|
7852
|
-
this.encryption,
|
|
7853
|
-
updatedManifest
|
|
7854
|
-
);
|
|
7927
|
+
const ref = { namespace: cell.namespace, environment: name };
|
|
7928
|
+
await source.scaffoldCell(ref, updatedManifest);
|
|
7855
7929
|
}
|
|
7856
7930
|
const doc = readManifestYaml(repoRoot);
|
|
7857
7931
|
const environments = doc.environments;
|
|
@@ -7994,7 +8068,7 @@ var StructureManager = class {
|
|
|
7994
8068
|
for (const pair of renamePairs) {
|
|
7995
8069
|
if (fs19.existsSync(pair.to)) {
|
|
7996
8070
|
throw new Error(
|
|
7997
|
-
`Rename target '${
|
|
8071
|
+
`Rename target '${path21.relative(repoRoot, pair.to)}' already exists. Move or remove it first.`
|
|
7998
8072
|
);
|
|
7999
8073
|
}
|
|
8000
8074
|
}
|
|
@@ -8037,7 +8111,7 @@ var StructureManager = class {
|
|
|
8037
8111
|
for (const pair of renamePairs) {
|
|
8038
8112
|
if (fs19.existsSync(pair.to)) {
|
|
8039
8113
|
throw new Error(
|
|
8040
|
-
`Rename target '${
|
|
8114
|
+
`Rename target '${path21.relative(repoRoot, pair.to)}' already exists. Move or remove it first.`
|
|
8041
8115
|
);
|
|
8042
8116
|
}
|
|
8043
8117
|
}
|
|
@@ -8087,7 +8161,7 @@ var StructureManager = class {
|
|
|
8087
8161
|
swapAxisInCellPath(repoRoot, manifest, cell, axis, newName) {
|
|
8088
8162
|
const ns = axis === "namespace" ? newName : cell.namespace;
|
|
8089
8163
|
const env = axis === "environment" ? newName : cell.environment;
|
|
8090
|
-
return
|
|
8164
|
+
return path21.join(
|
|
8091
8165
|
repoRoot,
|
|
8092
8166
|
manifest.file_pattern.replace("{namespace}", ns).replace("{environment}", env)
|
|
8093
8167
|
);
|
|
@@ -8099,8 +8173,8 @@ var StructureManager = class {
|
|
|
8099
8173
|
txPaths(repoRoot, renamePairs) {
|
|
8100
8174
|
const paths = /* @__PURE__ */ new Set();
|
|
8101
8175
|
for (const pair of renamePairs) {
|
|
8102
|
-
paths.add(
|
|
8103
|
-
paths.add(
|
|
8176
|
+
paths.add(path21.relative(repoRoot, pair.from));
|
|
8177
|
+
paths.add(path21.relative(repoRoot, pair.to));
|
|
8104
8178
|
}
|
|
8105
8179
|
paths.add(CLEF_MANIFEST_FILENAME);
|
|
8106
8180
|
return [...paths];
|
|
@@ -8111,7 +8185,7 @@ var StructureManager = class {
|
|
|
8111
8185
|
*/
|
|
8112
8186
|
applyRenames(pairs) {
|
|
8113
8187
|
for (const pair of pairs) {
|
|
8114
|
-
const targetDir =
|
|
8188
|
+
const targetDir = path21.dirname(pair.to);
|
|
8115
8189
|
if (!fs19.existsSync(targetDir)) {
|
|
8116
8190
|
fs19.mkdirSync(targetDir, { recursive: true });
|
|
8117
8191
|
}
|
|
@@ -8126,10 +8200,10 @@ var StructureManager = class {
|
|
|
8126
8200
|
deletePaths(repoRoot, cells) {
|
|
8127
8201
|
const paths = /* @__PURE__ */ new Set();
|
|
8128
8202
|
for (const cell of cells) {
|
|
8129
|
-
paths.add(
|
|
8203
|
+
paths.add(path21.relative(repoRoot, cell.filePath));
|
|
8130
8204
|
const meta = cell.filePath.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml");
|
|
8131
8205
|
if (fs19.existsSync(meta)) {
|
|
8132
|
-
paths.add(
|
|
8206
|
+
paths.add(path21.relative(repoRoot, meta));
|
|
8133
8207
|
}
|
|
8134
8208
|
}
|
|
8135
8209
|
paths.add(CLEF_MANIFEST_FILENAME);
|
|
@@ -8244,7 +8318,7 @@ function renameKeyPreservingOrder(obj, oldKey, newKey) {
|
|
|
8244
8318
|
}
|
|
8245
8319
|
|
|
8246
8320
|
// src/artifact/resolve.ts
|
|
8247
|
-
async function resolveIdentitySecrets(identityName, environment, manifest, repoRoot,
|
|
8321
|
+
async function resolveIdentitySecrets(identityName, environment, manifest, repoRoot, source, matrixManager) {
|
|
8248
8322
|
const identity = manifest.service_identities?.find((si) => si.name === identityName);
|
|
8249
8323
|
if (!identity) {
|
|
8250
8324
|
throw new Error(`Service identity '${identityName}' not found in manifest.`);
|
|
@@ -8260,7 +8334,10 @@ async function resolveIdentitySecrets(identityName, environment, manifest, repoR
|
|
|
8260
8334
|
(c) => c.exists && identity.namespaces.includes(c.namespace) && c.environment === environment
|
|
8261
8335
|
);
|
|
8262
8336
|
for (const cell of cells) {
|
|
8263
|
-
const decrypted = await
|
|
8337
|
+
const decrypted = await source.readCell({
|
|
8338
|
+
namespace: cell.namespace,
|
|
8339
|
+
environment: cell.environment
|
|
8340
|
+
});
|
|
8264
8341
|
const bucket = allValues[cell.namespace] ??= {};
|
|
8265
8342
|
for (const [key, value] of Object.entries(decrypted.values)) {
|
|
8266
8343
|
if (key in bucket && bucket[key] !== value) {
|
|
@@ -8284,14 +8361,14 @@ import * as crypto4 from "crypto";
|
|
|
8284
8361
|
|
|
8285
8362
|
// src/artifact/output.ts
|
|
8286
8363
|
import * as fs20 from "fs";
|
|
8287
|
-
import * as
|
|
8364
|
+
import * as path22 from "path";
|
|
8288
8365
|
var FilePackOutput = class {
|
|
8289
8366
|
constructor(outputPath) {
|
|
8290
8367
|
this.outputPath = outputPath;
|
|
8291
8368
|
}
|
|
8292
8369
|
outputPath;
|
|
8293
8370
|
async write(_artifact, json) {
|
|
8294
|
-
const outputDir =
|
|
8371
|
+
const outputDir = path22.dirname(this.outputPath);
|
|
8295
8372
|
if (!fs20.existsSync(outputDir)) {
|
|
8296
8373
|
fs20.mkdirSync(outputDir, { recursive: true });
|
|
8297
8374
|
}
|
|
@@ -8338,17 +8415,6 @@ function buildSigningPayload(artifact) {
|
|
|
8338
8415
|
];
|
|
8339
8416
|
return Buffer.from(fields.join("\n"), "utf-8");
|
|
8340
8417
|
}
|
|
8341
|
-
function generateSigningKeyPair() {
|
|
8342
|
-
const pair = crypto2.generateKeyPairSync("ed25519");
|
|
8343
|
-
return {
|
|
8344
|
-
publicKey: pair.publicKey.export({ type: "spki", format: "der" }).toString(
|
|
8345
|
-
"base64"
|
|
8346
|
-
),
|
|
8347
|
-
privateKey: pair.privateKey.export({ type: "pkcs8", format: "der" }).toString(
|
|
8348
|
-
"base64"
|
|
8349
|
-
)
|
|
8350
|
-
};
|
|
8351
|
-
}
|
|
8352
8418
|
function signEd25519(payload, privateKeyBase64) {
|
|
8353
8419
|
const keyObj = crypto2.createPrivateKey({
|
|
8354
8420
|
key: Buffer.from(privateKeyBase64, "base64"),
|
|
@@ -8384,17 +8450,6 @@ function verifySignature(payload, signatureBase64, publicKeyBase64) {
|
|
|
8384
8450
|
}
|
|
8385
8451
|
throw new Error(`Unsupported key type for signature verification: ${keyType}`);
|
|
8386
8452
|
}
|
|
8387
|
-
function detectAlgorithm(publicKeyBase64) {
|
|
8388
|
-
const keyObj = crypto2.createPublicKey({
|
|
8389
|
-
key: Buffer.from(publicKeyBase64, "base64"),
|
|
8390
|
-
format: "der",
|
|
8391
|
-
type: "spki"
|
|
8392
|
-
});
|
|
8393
|
-
const keyType = keyObj.asymmetricKeyType;
|
|
8394
|
-
if (keyType === "ed25519") return "Ed25519";
|
|
8395
|
-
if (keyType === "ec") return "ECDSA_SHA256";
|
|
8396
|
-
throw new Error(`Unsupported key type: ${keyType}`);
|
|
8397
|
-
}
|
|
8398
8453
|
|
|
8399
8454
|
// src/artifact/hash.ts
|
|
8400
8455
|
import * as crypto3 from "crypto";
|
|
@@ -8404,12 +8459,12 @@ function computeCiphertextHash(ciphertext) {
|
|
|
8404
8459
|
|
|
8405
8460
|
// src/artifact/packer.ts
|
|
8406
8461
|
var ArtifactPacker = class {
|
|
8407
|
-
constructor(
|
|
8408
|
-
this.
|
|
8462
|
+
constructor(source, matrixManager, kms) {
|
|
8463
|
+
this.source = source;
|
|
8409
8464
|
this.matrixManager = matrixManager;
|
|
8410
8465
|
this.kms = kms;
|
|
8411
8466
|
}
|
|
8412
|
-
|
|
8467
|
+
source;
|
|
8413
8468
|
matrixManager;
|
|
8414
8469
|
kms;
|
|
8415
8470
|
/**
|
|
@@ -8427,7 +8482,7 @@ var ArtifactPacker = class {
|
|
|
8427
8482
|
config.environment,
|
|
8428
8483
|
manifest,
|
|
8429
8484
|
repoRoot,
|
|
8430
|
-
this.
|
|
8485
|
+
this.source,
|
|
8431
8486
|
this.matrixManager
|
|
8432
8487
|
);
|
|
8433
8488
|
const plaintext = JSON.stringify(resolved.values);
|
|
@@ -8812,11 +8867,7 @@ var JsonEnvelopeBackend = class {
|
|
|
8812
8867
|
}
|
|
8813
8868
|
async pack(req) {
|
|
8814
8869
|
const opts = req.backendOptions;
|
|
8815
|
-
const packer = new ArtifactPacker(
|
|
8816
|
-
req.services.encryption,
|
|
8817
|
-
new MatrixManager(),
|
|
8818
|
-
req.services.kms
|
|
8819
|
-
);
|
|
8870
|
+
const packer = new ArtifactPacker(req.services.source, new MatrixManager(), req.services.kms);
|
|
8820
8871
|
const output = opts.output ?? (opts.outputPath ? new FilePackOutput(opts.outputPath) : void 0);
|
|
8821
8872
|
const result = await packer.pack(
|
|
8822
8873
|
{
|
|
@@ -8845,7 +8896,7 @@ var JsonEnvelopeBackend = class {
|
|
|
8845
8896
|
var VALID_KMS_PROVIDERS = ["aws", "gcp", "azure"];
|
|
8846
8897
|
|
|
8847
8898
|
// src/migration/backend.ts
|
|
8848
|
-
import * as
|
|
8899
|
+
import * as path23 from "path";
|
|
8849
8900
|
import * as YAML12 from "yaml";
|
|
8850
8901
|
var BACKEND_KEY_FIELDS = {
|
|
8851
8902
|
age: void 0,
|
|
@@ -8873,23 +8924,24 @@ function metadataMatchesTarget(meta, target) {
|
|
|
8873
8924
|
}
|
|
8874
8925
|
var BackendMigrator = class {
|
|
8875
8926
|
/**
|
|
8876
|
-
* @param
|
|
8927
|
+
* @param buildSource - Factory that builds a `SecretSource` bound to a
|
|
8928
|
+
* given manifest. Called twice during a real migration: once with the
|
|
8929
|
+
* pre-migration manifest (for classification + decrypt) and once with
|
|
8930
|
+
* the post-mutation manifest (for re-encrypt + verify). The factory
|
|
8931
|
+
* pattern is required because the encryption layer of a composed
|
|
8932
|
+
* source is bound to a manifest at construction.
|
|
8877
8933
|
* @param matrixManager - Matrix resolver.
|
|
8878
8934
|
* @param tx - Transaction manager that wraps the migration in a single git commit
|
|
8879
8935
|
* so a partial failure rolls back ALL files + the manifest via `git reset --hard`.
|
|
8880
|
-
* @param targetEncryption - Optional separate backend for encrypt. Use when migrating
|
|
8881
|
-
* from cloud (decrypt via keyservice) to another backend (encrypt via local credentials).
|
|
8882
8936
|
*/
|
|
8883
|
-
constructor(
|
|
8937
|
+
constructor(buildSource, matrixManager, tx) {
|
|
8938
|
+
this.buildSource = buildSource;
|
|
8884
8939
|
this.matrixManager = matrixManager;
|
|
8885
8940
|
this.tx = tx;
|
|
8886
|
-
this.decryptBackend = encryption;
|
|
8887
|
-
this.encryptBackend = targetEncryption ?? encryption;
|
|
8888
8941
|
}
|
|
8942
|
+
buildSource;
|
|
8889
8943
|
matrixManager;
|
|
8890
8944
|
tx;
|
|
8891
|
-
decryptBackend;
|
|
8892
|
-
encryptBackend;
|
|
8893
8945
|
async migrate(manifest, repoRoot, options, onProgress) {
|
|
8894
8946
|
const { target, environment, dryRun, skipVerify } = options;
|
|
8895
8947
|
if (environment) {
|
|
@@ -8909,10 +8961,12 @@ var BackendMigrator = class {
|
|
|
8909
8961
|
warnings: ["No encrypted files found to migrate."]
|
|
8910
8962
|
};
|
|
8911
8963
|
}
|
|
8964
|
+
const sourceBefore = this.buildSource(manifest);
|
|
8912
8965
|
const toMigrate = [];
|
|
8913
8966
|
const skippedFiles = [];
|
|
8914
8967
|
for (const cell of targetCells) {
|
|
8915
|
-
const
|
|
8968
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
8969
|
+
const meta = await sourceBefore.getCellMetadata(ref);
|
|
8916
8970
|
if (metadataMatchesTarget(meta, target)) {
|
|
8917
8971
|
skippedFiles.push(cell.filePath);
|
|
8918
8972
|
onProgress?.({
|
|
@@ -8933,6 +8987,8 @@ var BackendMigrator = class {
|
|
|
8933
8987
|
warnings: ["All files already use the target backend and key. Nothing to migrate."]
|
|
8934
8988
|
};
|
|
8935
8989
|
}
|
|
8990
|
+
const preMigrationWarnings = [];
|
|
8991
|
+
this.checkAgeRecipientsWarning(manifest, target, environment, preMigrationWarnings);
|
|
8936
8992
|
if (dryRun) {
|
|
8937
8993
|
const warnings2 = [];
|
|
8938
8994
|
for (const cell of toMigrate) {
|
|
@@ -8949,7 +9005,7 @@ var BackendMigrator = class {
|
|
|
8949
9005
|
} else {
|
|
8950
9006
|
warnings2.push(`Would update global default_backend \u2192 ${target.backend}`);
|
|
8951
9007
|
}
|
|
8952
|
-
|
|
9008
|
+
warnings2.push(...preMigrationWarnings);
|
|
8953
9009
|
return {
|
|
8954
9010
|
migratedFiles: [],
|
|
8955
9011
|
skippedFiles,
|
|
@@ -8961,11 +9017,12 @@ var BackendMigrator = class {
|
|
|
8961
9017
|
const migratedFiles = [];
|
|
8962
9018
|
let migrationFailed = false;
|
|
8963
9019
|
let migrationError;
|
|
9020
|
+
let sourceAfter;
|
|
8964
9021
|
try {
|
|
8965
9022
|
await this.tx.run(repoRoot, {
|
|
8966
9023
|
description: environment ? `clef migrate-backend ${target.backend}: ${environment}` : `clef migrate-backend ${target.backend}`,
|
|
8967
9024
|
paths: [
|
|
8968
|
-
...toMigrate.map((c) =>
|
|
9025
|
+
...toMigrate.map((c) => path23.relative(repoRoot, c.filePath)),
|
|
8969
9026
|
CLEF_MANIFEST_FILENAME
|
|
8970
9027
|
],
|
|
8971
9028
|
mutate: async () => {
|
|
@@ -8973,19 +9030,16 @@ var BackendMigrator = class {
|
|
|
8973
9030
|
this.updateManifestDoc(doc, target, environment);
|
|
8974
9031
|
writeManifestYaml(repoRoot, doc);
|
|
8975
9032
|
const updatedManifest = YAML12.parse(YAML12.stringify(doc));
|
|
9033
|
+
sourceAfter = this.buildSource(updatedManifest);
|
|
8976
9034
|
for (const cell of toMigrate) {
|
|
8977
9035
|
onProgress?.({
|
|
8978
9036
|
type: "migrate",
|
|
8979
9037
|
file: cell.filePath,
|
|
8980
9038
|
message: `Migrating ${cell.namespace}/${cell.environment}...`
|
|
8981
9039
|
});
|
|
8982
|
-
const
|
|
8983
|
-
await
|
|
8984
|
-
|
|
8985
|
-
decrypted.values,
|
|
8986
|
-
updatedManifest,
|
|
8987
|
-
cell.environment
|
|
8988
|
-
);
|
|
9040
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
9041
|
+
const decrypted = await sourceBefore.readCell(ref);
|
|
9042
|
+
await sourceAfter.writeCell(ref, decrypted.values);
|
|
8989
9043
|
migratedFiles.push(cell.filePath);
|
|
8990
9044
|
}
|
|
8991
9045
|
}
|
|
@@ -9005,12 +9059,17 @@ var BackendMigrator = class {
|
|
|
9005
9059
|
rolledBack: true,
|
|
9006
9060
|
error: migrationError.message,
|
|
9007
9061
|
verifiedFiles: [],
|
|
9008
|
-
warnings
|
|
9062
|
+
// Surface pre-migration warnings even on rollback. The new manifest
|
|
9063
|
+
// validator can reject the write (e.g. per-env recipients vs.
|
|
9064
|
+
// non-age backend), and without these warnings the user only sees
|
|
9065
|
+
// an opaque "rolled back" message — not the actionable hint about
|
|
9066
|
+
// what to clean up first.
|
|
9067
|
+
warnings: ["All changes have been rolled back.", ...preMigrationWarnings]
|
|
9009
9068
|
};
|
|
9010
9069
|
}
|
|
9011
9070
|
const verifiedFiles = [];
|
|
9012
9071
|
const warnings = [];
|
|
9013
|
-
if (!skipVerify) {
|
|
9072
|
+
if (!skipVerify && sourceAfter) {
|
|
9014
9073
|
for (const cell of toMigrate) {
|
|
9015
9074
|
try {
|
|
9016
9075
|
onProgress?.({
|
|
@@ -9018,7 +9077,8 @@ var BackendMigrator = class {
|
|
|
9018
9077
|
file: cell.filePath,
|
|
9019
9078
|
message: `Verifying ${cell.namespace}/${cell.environment}...`
|
|
9020
9079
|
});
|
|
9021
|
-
|
|
9080
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
9081
|
+
await sourceAfter.readCell(ref);
|
|
9022
9082
|
verifiedFiles.push(cell.filePath);
|
|
9023
9083
|
} catch (err) {
|
|
9024
9084
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
@@ -9028,7 +9088,7 @@ var BackendMigrator = class {
|
|
|
9028
9088
|
}
|
|
9029
9089
|
}
|
|
9030
9090
|
}
|
|
9031
|
-
|
|
9091
|
+
warnings.push(...preMigrationWarnings);
|
|
9032
9092
|
return { migratedFiles, skippedFiles, rolledBack: false, verifiedFiles, warnings };
|
|
9033
9093
|
}
|
|
9034
9094
|
// ── Private helpers ──────────────────────────────────────────────────
|
|
@@ -9063,16 +9123,16 @@ var BackendMigrator = class {
|
|
|
9063
9123
|
};
|
|
9064
9124
|
|
|
9065
9125
|
// src/reset/manager.ts
|
|
9066
|
-
import * as
|
|
9126
|
+
import * as path24 from "path";
|
|
9067
9127
|
var ResetManager = class {
|
|
9068
|
-
constructor(matrixManager,
|
|
9128
|
+
constructor(matrixManager, buildSource, schemaValidator, tx) {
|
|
9069
9129
|
this.matrixManager = matrixManager;
|
|
9070
|
-
this.
|
|
9130
|
+
this.buildSource = buildSource;
|
|
9071
9131
|
this.schemaValidator = schemaValidator;
|
|
9072
9132
|
this.tx = tx;
|
|
9073
9133
|
}
|
|
9074
9134
|
matrixManager;
|
|
9075
|
-
|
|
9135
|
+
buildSource;
|
|
9076
9136
|
schemaValidator;
|
|
9077
9137
|
tx;
|
|
9078
9138
|
async reset(opts, manifest, repoRoot) {
|
|
@@ -9092,11 +9152,11 @@ var ResetManager = class {
|
|
|
9092
9152
|
txPaths.push(CLEF_MANIFEST_FILENAME);
|
|
9093
9153
|
}
|
|
9094
9154
|
for (const cell of targetCells) {
|
|
9095
|
-
txPaths.push(
|
|
9155
|
+
txPaths.push(path24.relative(repoRoot, cell.filePath));
|
|
9096
9156
|
const cellKeys = keyPlan.get(cell.namespace) ?? [];
|
|
9097
9157
|
if (cellKeys.length > 0) {
|
|
9098
9158
|
txPaths.push(
|
|
9099
|
-
|
|
9159
|
+
path24.relative(repoRoot, cell.filePath.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml"))
|
|
9100
9160
|
);
|
|
9101
9161
|
}
|
|
9102
9162
|
}
|
|
@@ -9113,17 +9173,14 @@ var ResetManager = class {
|
|
|
9113
9173
|
writeManifestYaml(repoRoot, doc);
|
|
9114
9174
|
effectiveManifest = withBackendOverride(manifest, affectedEnvs, opts.backend, opts.key);
|
|
9115
9175
|
}
|
|
9176
|
+
const source = this.buildSource(effectiveManifest);
|
|
9116
9177
|
for (const cell of targetCells) {
|
|
9117
9178
|
const keys = keyPlan.get(cell.namespace) ?? [];
|
|
9118
9179
|
const placeholders = this.buildPlaceholders(keys);
|
|
9119
|
-
|
|
9120
|
-
|
|
9121
|
-
placeholders,
|
|
9122
|
-
effectiveManifest,
|
|
9123
|
-
cell.environment
|
|
9124
|
-
);
|
|
9180
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
9181
|
+
await source.writeCell(ref, placeholders);
|
|
9125
9182
|
if (keys.length > 0) {
|
|
9126
|
-
await
|
|
9183
|
+
await source.markPending(ref, keys, "clef reset");
|
|
9127
9184
|
pendingKeysByCell[cell.filePath] = keys;
|
|
9128
9185
|
}
|
|
9129
9186
|
scaffoldedCells.push(cell.filePath);
|
|
@@ -9184,7 +9241,7 @@ var ResetManager = class {
|
|
|
9184
9241
|
for (const namespace of namespaces) {
|
|
9185
9242
|
const nsDef = manifest.namespaces.find((n) => n.name === namespace);
|
|
9186
9243
|
if (nsDef?.schema) {
|
|
9187
|
-
const schema = this.schemaValidator.loadSchema(
|
|
9244
|
+
const schema = this.schemaValidator.loadSchema(path24.join(repoRoot, nsDef.schema));
|
|
9188
9245
|
plan.set(namespace, Object.keys(schema.keys));
|
|
9189
9246
|
continue;
|
|
9190
9247
|
}
|
|
@@ -9263,15 +9320,15 @@ function withBackendOverride(manifest, envNames, backend, key) {
|
|
|
9263
9320
|
}
|
|
9264
9321
|
|
|
9265
9322
|
// src/sync/manager.ts
|
|
9266
|
-
import * as
|
|
9323
|
+
import * as path25 from "path";
|
|
9267
9324
|
var SyncManager = class {
|
|
9268
|
-
constructor(matrixManager,
|
|
9325
|
+
constructor(matrixManager, source, tx) {
|
|
9269
9326
|
this.matrixManager = matrixManager;
|
|
9270
|
-
this.
|
|
9327
|
+
this.source = source;
|
|
9271
9328
|
this.tx = tx;
|
|
9272
9329
|
}
|
|
9273
9330
|
matrixManager;
|
|
9274
|
-
|
|
9331
|
+
source;
|
|
9275
9332
|
tx;
|
|
9276
9333
|
/**
|
|
9277
9334
|
* Compute what sync would do without mutating anything.
|
|
@@ -9288,8 +9345,13 @@ var SyncManager = class {
|
|
|
9288
9345
|
const targetCells = opts.namespace ? existingCells.filter((c) => c.namespace === opts.namespace) : existingCells;
|
|
9289
9346
|
const keysByNsEnv = {};
|
|
9290
9347
|
for (const cell of targetCells) {
|
|
9291
|
-
const
|
|
9292
|
-
|
|
9348
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
9349
|
+
let keys;
|
|
9350
|
+
try {
|
|
9351
|
+
keys = await this.source.listKeys(ref);
|
|
9352
|
+
} catch {
|
|
9353
|
+
continue;
|
|
9354
|
+
}
|
|
9293
9355
|
if (!keysByNsEnv[cell.namespace]) keysByNsEnv[cell.namespace] = {};
|
|
9294
9356
|
keysByNsEnv[cell.namespace][cell.environment] = new Set(keys);
|
|
9295
9357
|
}
|
|
@@ -9335,7 +9397,7 @@ var SyncManager = class {
|
|
|
9335
9397
|
}
|
|
9336
9398
|
const txPaths = [];
|
|
9337
9399
|
for (const cell of syncPlan.cells) {
|
|
9338
|
-
const rel =
|
|
9400
|
+
const rel = path25.relative(repoRoot, cell.filePath);
|
|
9339
9401
|
txPaths.push(rel);
|
|
9340
9402
|
txPaths.push(rel.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml"));
|
|
9341
9403
|
}
|
|
@@ -9348,17 +9410,13 @@ var SyncManager = class {
|
|
|
9348
9410
|
paths: txPaths,
|
|
9349
9411
|
mutate: async () => {
|
|
9350
9412
|
for (const cell of syncPlan.cells) {
|
|
9351
|
-
const
|
|
9413
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
9414
|
+
const decrypted = await this.source.readCell(ref);
|
|
9352
9415
|
for (const key of cell.missingKeys) {
|
|
9353
9416
|
decrypted.values[key] = generateRandomValue();
|
|
9354
9417
|
}
|
|
9355
|
-
await this.
|
|
9356
|
-
|
|
9357
|
-
decrypted.values,
|
|
9358
|
-
manifest,
|
|
9359
|
-
cell.environment
|
|
9360
|
-
);
|
|
9361
|
-
await markPendingWithRetry(cell.filePath, cell.missingKeys, "clef sync");
|
|
9418
|
+
await this.source.writeCell(ref, decrypted.values);
|
|
9419
|
+
await this.source.markPending(ref, cell.missingKeys, "clef sync");
|
|
9362
9420
|
const cellLabel = `${cell.namespace}/${cell.environment}`;
|
|
9363
9421
|
modifiedCells.push(cellLabel);
|
|
9364
9422
|
scaffoldedKeys[cellLabel] = cell.missingKeys;
|
|
@@ -9618,13 +9676,272 @@ var ComplianceGenerator = class {
|
|
|
9618
9676
|
};
|
|
9619
9677
|
|
|
9620
9678
|
// src/compliance/run.ts
|
|
9621
|
-
import * as
|
|
9679
|
+
import * as path27 from "path";
|
|
9680
|
+
|
|
9681
|
+
// src/source/compose.ts
|
|
9682
|
+
import * as YAML14 from "yaml";
|
|
9683
|
+
|
|
9684
|
+
// src/source/default-bulk.ts
|
|
9685
|
+
function defaultBulk(source) {
|
|
9686
|
+
return {
|
|
9687
|
+
async bulkSet(namespace, key, valuesByEnv, _manifest) {
|
|
9688
|
+
for (const [environment, value] of Object.entries(valuesByEnv)) {
|
|
9689
|
+
const cell = { namespace, environment };
|
|
9690
|
+
const existing = await source.cellExists(cell) ? (await source.readCell(cell)).values : {};
|
|
9691
|
+
await source.writeCell(cell, { ...existing, [key]: value });
|
|
9692
|
+
}
|
|
9693
|
+
},
|
|
9694
|
+
async bulkDelete(namespace, key, manifest) {
|
|
9695
|
+
for (const env of manifest.environments) {
|
|
9696
|
+
const cell = { namespace, environment: env.name };
|
|
9697
|
+
if (!await source.cellExists(cell)) continue;
|
|
9698
|
+
const data = await source.readCell(cell);
|
|
9699
|
+
if (!(key in data.values)) continue;
|
|
9700
|
+
const next = { ...data.values };
|
|
9701
|
+
delete next[key];
|
|
9702
|
+
await source.writeCell(cell, next);
|
|
9703
|
+
}
|
|
9704
|
+
},
|
|
9705
|
+
async copyValue(key, from, to, _manifest) {
|
|
9706
|
+
const src = await source.readCell(from);
|
|
9707
|
+
if (!(key in src.values)) {
|
|
9708
|
+
throw new Error(
|
|
9709
|
+
`Cannot copy: key '${key}' not present in ${from.namespace}/${from.environment}`
|
|
9710
|
+
);
|
|
9711
|
+
}
|
|
9712
|
+
const dst = await source.cellExists(to) ? (await source.readCell(to)).values : {};
|
|
9713
|
+
await source.writeCell(to, { ...dst, [key]: src.values[key] });
|
|
9714
|
+
}
|
|
9715
|
+
};
|
|
9716
|
+
}
|
|
9717
|
+
|
|
9718
|
+
// src/source/compose.ts
|
|
9719
|
+
function composeSecretSource(storage, encryption, manifest) {
|
|
9720
|
+
return new ComposedSecretSource(storage, encryption, manifest);
|
|
9721
|
+
}
|
|
9722
|
+
var ComposedSecretSource = class {
|
|
9723
|
+
constructor(storage, encryption, manifest) {
|
|
9724
|
+
this.storage = storage;
|
|
9725
|
+
this.encryption = encryption;
|
|
9726
|
+
this.manifest = manifest;
|
|
9727
|
+
this.id = `${storage.id}+${encryption.id}`;
|
|
9728
|
+
this.description = `${storage.description} / ${encryption.description}`;
|
|
9729
|
+
}
|
|
9730
|
+
storage;
|
|
9731
|
+
encryption;
|
|
9732
|
+
manifest;
|
|
9733
|
+
id;
|
|
9734
|
+
description;
|
|
9735
|
+
context(cell) {
|
|
9736
|
+
return {
|
|
9737
|
+
manifest: this.manifest,
|
|
9738
|
+
environment: cell.environment,
|
|
9739
|
+
format: this.storage.blobFormat(cell)
|
|
9740
|
+
};
|
|
9741
|
+
}
|
|
9742
|
+
// ── Core SecretSource ──────────────────────────────────────────────────
|
|
9743
|
+
async readCell(cell) {
|
|
9744
|
+
const blob = await this.storage.readBlob(cell);
|
|
9745
|
+
return this.encryption.decrypt(blob, this.context(cell));
|
|
9746
|
+
}
|
|
9747
|
+
async writeCell(cell, values) {
|
|
9748
|
+
const blob = await this.encryption.encrypt(values, this.context(cell));
|
|
9749
|
+
await this.storage.writeBlob(cell, blob);
|
|
9750
|
+
}
|
|
9751
|
+
async deleteCell(cell) {
|
|
9752
|
+
await this.storage.deleteBlob(cell);
|
|
9753
|
+
}
|
|
9754
|
+
async cellExists(cell) {
|
|
9755
|
+
return this.storage.blobExists(cell);
|
|
9756
|
+
}
|
|
9757
|
+
/**
|
|
9758
|
+
* List cell keys WITHOUT decrypting. SOPS files store key names in
|
|
9759
|
+
* plaintext at the top level of the YAML/JSON document — we read the
|
|
9760
|
+
* blob and return everything except the `sops:` metadata block.
|
|
9761
|
+
*
|
|
9762
|
+
* NOTE: this is currently SOPS-shaped. A future non-SOPS
|
|
9763
|
+
* `EncryptionBackend` whose ciphertext doesn't expose key names in
|
|
9764
|
+
* the clear would need its own listing strategy — likely a
|
|
9765
|
+
* `listKeys(blob)` method on `EncryptionBackend`. Deferred until a
|
|
9766
|
+
* second backend exists.
|
|
9767
|
+
*/
|
|
9768
|
+
async listKeys(cell) {
|
|
9769
|
+
if (!await this.storage.blobExists(cell)) return [];
|
|
9770
|
+
const blob = await this.storage.readBlob(cell);
|
|
9771
|
+
const parsed = YAML14.parse(blob);
|
|
9772
|
+
if (!parsed || typeof parsed !== "object") return [];
|
|
9773
|
+
return Object.keys(parsed).filter((k) => k !== "sops");
|
|
9774
|
+
}
|
|
9775
|
+
async getCellMetadata(cell) {
|
|
9776
|
+
const blob = await this.storage.readBlob(cell);
|
|
9777
|
+
return this.encryption.getMetadata(blob);
|
|
9778
|
+
}
|
|
9779
|
+
async scaffoldCell(cell, manifest) {
|
|
9780
|
+
if (await this.storage.blobExists(cell)) return;
|
|
9781
|
+
const blob = await this.encryption.encrypt(
|
|
9782
|
+
{},
|
|
9783
|
+
{
|
|
9784
|
+
manifest,
|
|
9785
|
+
environment: cell.environment,
|
|
9786
|
+
format: this.storage.blobFormat(cell)
|
|
9787
|
+
}
|
|
9788
|
+
);
|
|
9789
|
+
await this.storage.writeBlob(cell, blob);
|
|
9790
|
+
}
|
|
9791
|
+
// ── Pending / rotation metadata ────────────────────────────────────────
|
|
9792
|
+
async getPendingMetadata(cell) {
|
|
9793
|
+
return this.storage.readPendingMetadata(cell);
|
|
9794
|
+
}
|
|
9795
|
+
async markPending(cell, keys, setBy) {
|
|
9796
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
9797
|
+
const now = /* @__PURE__ */ new Date();
|
|
9798
|
+
for (const key of keys) {
|
|
9799
|
+
if (!meta.pending.find((p) => p.key === key)) {
|
|
9800
|
+
meta.pending.push({ key, since: now, setBy });
|
|
9801
|
+
}
|
|
9802
|
+
}
|
|
9803
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
9804
|
+
}
|
|
9805
|
+
async markResolved(cell, keys) {
|
|
9806
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
9807
|
+
meta.pending = meta.pending.filter((p) => !keys.includes(p.key));
|
|
9808
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
9809
|
+
}
|
|
9810
|
+
async recordRotation(cell, keys, rotatedBy) {
|
|
9811
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
9812
|
+
const now = /* @__PURE__ */ new Date();
|
|
9813
|
+
for (const key of keys) {
|
|
9814
|
+
const existing = meta.rotations.find((r) => r.key === key);
|
|
9815
|
+
if (existing) {
|
|
9816
|
+
existing.lastRotatedAt = now;
|
|
9817
|
+
existing.rotatedBy = rotatedBy;
|
|
9818
|
+
existing.rotationCount += 1;
|
|
9819
|
+
} else {
|
|
9820
|
+
meta.rotations.push({ key, lastRotatedAt: now, rotatedBy, rotationCount: 1 });
|
|
9821
|
+
}
|
|
9822
|
+
}
|
|
9823
|
+
meta.pending = meta.pending.filter((p) => !keys.includes(p.key));
|
|
9824
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
9825
|
+
}
|
|
9826
|
+
async removeRotation(cell, keys) {
|
|
9827
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
9828
|
+
meta.rotations = meta.rotations.filter((r) => !keys.includes(r.key));
|
|
9829
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
9830
|
+
}
|
|
9831
|
+
// ── Lintable ───────────────────────────────────────────────────────────
|
|
9832
|
+
async validateEncryption(cell) {
|
|
9833
|
+
if (!await this.storage.blobExists(cell)) return false;
|
|
9834
|
+
const blob = await this.storage.readBlob(cell);
|
|
9835
|
+
return this.encryption.validateEncryption(blob);
|
|
9836
|
+
}
|
|
9837
|
+
async checkRecipientDrift(cell, expected) {
|
|
9838
|
+
const blob = await this.storage.readBlob(cell);
|
|
9839
|
+
const meta = this.encryption.getMetadata(blob);
|
|
9840
|
+
const actual = new Set(meta.recipients);
|
|
9841
|
+
const expectedSet = new Set(expected);
|
|
9842
|
+
return {
|
|
9843
|
+
missing: expected.filter((r) => !actual.has(r)),
|
|
9844
|
+
unexpected: meta.recipients.filter((r) => !expectedSet.has(r))
|
|
9845
|
+
};
|
|
9846
|
+
}
|
|
9847
|
+
// ── Rotatable ──────────────────────────────────────────────────────────
|
|
9848
|
+
async rotate(cell, opts) {
|
|
9849
|
+
const blob = await this.storage.readBlob(cell);
|
|
9850
|
+
const rotated = await this.encryption.rotate(blob, opts, this.context(cell));
|
|
9851
|
+
await this.storage.writeBlob(cell, rotated);
|
|
9852
|
+
}
|
|
9853
|
+
// ── Bulk ───────────────────────────────────────────────────────────────
|
|
9854
|
+
//
|
|
9855
|
+
// Default looped implementation. A future StorageBackend that supports
|
|
9856
|
+
// batch operations (e.g. PostgresStorageBackend with row-level UPDATE
|
|
9857
|
+
// batching) can override these by wrapping `composeSecretSource`'s
|
|
9858
|
+
// output and replacing just the bulk methods.
|
|
9859
|
+
bulkSet = (namespace, key, valuesByEnv, manifest) => defaultBulk(this).bulkSet(namespace, key, valuesByEnv, manifest);
|
|
9860
|
+
bulkDelete = (namespace, key, manifest) => defaultBulk(this).bulkDelete(namespace, key, manifest);
|
|
9861
|
+
copyValue = (key, from, to, manifest) => defaultBulk(this).copyValue(key, from, to, manifest);
|
|
9862
|
+
};
|
|
9863
|
+
|
|
9864
|
+
// src/source/filesystem-storage-backend.ts
|
|
9865
|
+
import * as fs22 from "fs";
|
|
9866
|
+
import * as path26 from "path";
|
|
9867
|
+
import { randomBytes as randomBytes4 } from "crypto";
|
|
9868
|
+
var FilesystemStorageBackend = class {
|
|
9869
|
+
constructor(manifest, repoRoot) {
|
|
9870
|
+
this.manifest = manifest;
|
|
9871
|
+
this.repoRoot = repoRoot;
|
|
9872
|
+
}
|
|
9873
|
+
manifest;
|
|
9874
|
+
repoRoot;
|
|
9875
|
+
id = "filesystem";
|
|
9876
|
+
description = "Filesystem-backed cell storage (default substrate)";
|
|
9877
|
+
/**
|
|
9878
|
+
* Resolve a cell reference to its absolute filesystem path. Public —
|
|
9879
|
+
* used by substrate-specific trait implementations.
|
|
9880
|
+
*/
|
|
9881
|
+
cellPath(cell) {
|
|
9882
|
+
const relativePath = this.manifest.file_pattern.replace("{namespace}", cell.namespace).replace("{environment}", cell.environment);
|
|
9883
|
+
return path26.join(this.repoRoot, relativePath);
|
|
9884
|
+
}
|
|
9885
|
+
/** The repo root, exposed for filesystem-shaped trait implementations. */
|
|
9886
|
+
getRepoRoot() {
|
|
9887
|
+
return this.repoRoot;
|
|
9888
|
+
}
|
|
9889
|
+
blobFormat(cell) {
|
|
9890
|
+
return this.cellPath(cell).endsWith(".json") ? "json" : "yaml";
|
|
9891
|
+
}
|
|
9892
|
+
async readBlob(cell) {
|
|
9893
|
+
const filePath = this.cellPath(cell);
|
|
9894
|
+
return fs22.readFileSync(filePath, "utf-8");
|
|
9895
|
+
}
|
|
9896
|
+
async writeBlob(cell, blob) {
|
|
9897
|
+
const filePath = this.cellPath(cell);
|
|
9898
|
+
const dir = path26.dirname(filePath);
|
|
9899
|
+
if (!fs22.existsSync(dir)) {
|
|
9900
|
+
fs22.mkdirSync(dir, { recursive: true });
|
|
9901
|
+
}
|
|
9902
|
+
const tmpPath = `${filePath}.${Date.now()}.${randomBytes4(4).toString("hex")}.tmp`;
|
|
9903
|
+
const handle = fs22.openSync(tmpPath, "w");
|
|
9904
|
+
try {
|
|
9905
|
+
fs22.writeFileSync(handle, blob);
|
|
9906
|
+
fs22.fsyncSync(handle);
|
|
9907
|
+
} finally {
|
|
9908
|
+
fs22.closeSync(handle);
|
|
9909
|
+
}
|
|
9910
|
+
fs22.renameSync(tmpPath, filePath);
|
|
9911
|
+
}
|
|
9912
|
+
async deleteBlob(cell) {
|
|
9913
|
+
const filePath = this.cellPath(cell);
|
|
9914
|
+
if (fs22.existsSync(filePath)) {
|
|
9915
|
+
fs22.unlinkSync(filePath);
|
|
9916
|
+
}
|
|
9917
|
+
const sidecar = this.sidecarPath(filePath);
|
|
9918
|
+
if (fs22.existsSync(sidecar)) {
|
|
9919
|
+
fs22.unlinkSync(sidecar);
|
|
9920
|
+
}
|
|
9921
|
+
}
|
|
9922
|
+
async blobExists(cell) {
|
|
9923
|
+
return fs22.existsSync(this.cellPath(cell));
|
|
9924
|
+
}
|
|
9925
|
+
async readPendingMetadata(cell) {
|
|
9926
|
+
return loadMetadata(this.cellPath(cell));
|
|
9927
|
+
}
|
|
9928
|
+
async writePendingMetadata(cell, meta) {
|
|
9929
|
+
await saveMetadata(this.cellPath(cell), meta);
|
|
9930
|
+
}
|
|
9931
|
+
sidecarPath(filePath) {
|
|
9932
|
+
const dir = path26.dirname(filePath);
|
|
9933
|
+
const base = path26.basename(filePath).replace(/\.enc\.(yaml|json)$/, "");
|
|
9934
|
+
return path26.join(dir, `${base}.clef-meta.yaml`);
|
|
9935
|
+
}
|
|
9936
|
+
};
|
|
9937
|
+
|
|
9938
|
+
// src/compliance/run.ts
|
|
9622
9939
|
var UNKNOWN = "unknown";
|
|
9623
9940
|
async function runCompliance(opts) {
|
|
9624
9941
|
const start = Date.now();
|
|
9625
9942
|
const repoRoot = opts.repoRoot ?? process.cwd();
|
|
9626
|
-
const manifestPath = opts.manifestPath ??
|
|
9627
|
-
const policyPath = opts.policyPath ??
|
|
9943
|
+
const manifestPath = opts.manifestPath ?? path27.join(repoRoot, "clef.yaml");
|
|
9944
|
+
const policyPath = opts.policyPath ?? path27.join(repoRoot, CLEF_POLICY_FILENAME);
|
|
9628
9945
|
const include = {
|
|
9629
9946
|
scan: opts.include?.scan ?? true,
|
|
9630
9947
|
lint: opts.include?.lint ?? true,
|
|
@@ -9636,6 +9953,11 @@ async function runCompliance(opts) {
|
|
|
9636
9953
|
const sopsClient = new SopsClient(opts.runner, opts.ageKeyFile, opts.ageKey, opts.sopsPath);
|
|
9637
9954
|
const matrixManager = new MatrixManager();
|
|
9638
9955
|
const schemaValidator = new SchemaValidator();
|
|
9956
|
+
const lintSource = composeSecretSource(
|
|
9957
|
+
new FilesystemStorageBackend(manifest, repoRoot),
|
|
9958
|
+
sopsClient,
|
|
9959
|
+
manifest
|
|
9960
|
+
);
|
|
9639
9961
|
const [sha, repo, files, scanResult, lintResult] = await Promise.all([
|
|
9640
9962
|
opts.sha !== void 0 ? Promise.resolve(opts.sha) : detectSha(opts.runner, repoRoot),
|
|
9641
9963
|
opts.repo !== void 0 ? Promise.resolve(opts.repo) : detectRepo(opts.runner, repoRoot),
|
|
@@ -9644,12 +9966,12 @@ async function runCompliance(opts) {
|
|
|
9644
9966
|
repoRoot,
|
|
9645
9967
|
policy,
|
|
9646
9968
|
matrixManager,
|
|
9647
|
-
|
|
9969
|
+
source: lintSource,
|
|
9648
9970
|
filter: opts.filter,
|
|
9649
9971
|
now
|
|
9650
9972
|
}) : Promise.resolve([]),
|
|
9651
9973
|
include.scan ? new ScanRunner(opts.runner).scan(repoRoot, manifest) : Promise.resolve(emptyScan()),
|
|
9652
|
-
include.lint ? new LintRunner(matrixManager, schemaValidator,
|
|
9974
|
+
include.lint ? new LintRunner(matrixManager, schemaValidator, lintSource).run(manifest, repoRoot) : Promise.resolve(emptyLint())
|
|
9653
9975
|
]);
|
|
9654
9976
|
const adjustedLint = downgradeDecryptIssues(lintResult);
|
|
9655
9977
|
const document = new ComplianceGenerator().generate({
|
|
@@ -9669,8 +9991,11 @@ async function evaluateMatrix(args) {
|
|
|
9669
9991
|
const cells = args.matrixManager.resolveMatrix(args.manifest, args.repoRoot).filter((c) => applyFilter(c.namespace, c.environment, args.filter)).filter((c) => c.exists);
|
|
9670
9992
|
return Promise.all(
|
|
9671
9993
|
cells.map(async (cell) => {
|
|
9672
|
-
const metadata = await args.
|
|
9673
|
-
|
|
9994
|
+
const metadata = await args.source.getCellMetadata({
|
|
9995
|
+
namespace: cell.namespace,
|
|
9996
|
+
environment: cell.environment
|
|
9997
|
+
});
|
|
9998
|
+
const relPath = path27.relative(args.repoRoot, cell.filePath).replace(/\\/g, "/");
|
|
9674
9999
|
const keys = readSopsKeyNames(cell.filePath) ?? [];
|
|
9675
10000
|
const rotations = await getRotations(cell.filePath);
|
|
9676
10001
|
return evaluator.evaluateFile(relPath, cell.environment, metadata, keys, rotations, args.now);
|
|
@@ -9728,10 +10053,61 @@ async function detectRepo(runner, repoRoot) {
|
|
|
9728
10053
|
const match = url.match(/[:/]([^/:]+)\/([^/]+?)(?:\.git)?\/?$/);
|
|
9729
10054
|
return match ? `${match[1]}/${match[2]}` : UNKNOWN;
|
|
9730
10055
|
}
|
|
10056
|
+
|
|
10057
|
+
// src/source/guards.ts
|
|
10058
|
+
function isFn(o, name) {
|
|
10059
|
+
return typeof o === "object" && o !== null && typeof o[name] === "function";
|
|
10060
|
+
}
|
|
10061
|
+
function isLintable(s) {
|
|
10062
|
+
return isFn(s, "validateEncryption") && isFn(s, "checkRecipientDrift");
|
|
10063
|
+
}
|
|
10064
|
+
function isRotatable(s) {
|
|
10065
|
+
return isFn(s, "rotate");
|
|
10066
|
+
}
|
|
10067
|
+
function isRecipientManaged(s) {
|
|
10068
|
+
return isFn(s, "listRecipients") && isFn(s, "addRecipient") && isFn(s, "removeRecipient");
|
|
10069
|
+
}
|
|
10070
|
+
function isMergeAware(s) {
|
|
10071
|
+
return isFn(s, "mergeCells") && isFn(s, "installMergeDriver");
|
|
10072
|
+
}
|
|
10073
|
+
function isMigratable(s) {
|
|
10074
|
+
return isFn(s, "migrateBackend");
|
|
10075
|
+
}
|
|
10076
|
+
function isBulk(s) {
|
|
10077
|
+
return isFn(s, "bulkSet") && isFn(s, "bulkDelete") && isFn(s, "copyValue");
|
|
10078
|
+
}
|
|
10079
|
+
function isStructural(s) {
|
|
10080
|
+
return isFn(s, "addNamespace") && isFn(s, "addEnvironment") && isFn(s, "renameNamespace") && isFn(s, "renameEnvironment");
|
|
10081
|
+
}
|
|
10082
|
+
function describeCapabilities(s) {
|
|
10083
|
+
return {
|
|
10084
|
+
lint: isLintable(s),
|
|
10085
|
+
rotate: isRotatable(s),
|
|
10086
|
+
recipients: isRecipientManaged(s),
|
|
10087
|
+
merge: isMergeAware(s),
|
|
10088
|
+
migrate: isMigratable(s),
|
|
10089
|
+
bulk: isBulk(s),
|
|
10090
|
+
structural: isStructural(s)
|
|
10091
|
+
};
|
|
10092
|
+
}
|
|
10093
|
+
|
|
10094
|
+
// src/source/errors.ts
|
|
10095
|
+
var SourceCapabilityUnsupportedError = class extends ClefError {
|
|
10096
|
+
constructor(capability, sourceId) {
|
|
10097
|
+
super(
|
|
10098
|
+
`'${capability}' is not supported by the '${sourceId}' source.`,
|
|
10099
|
+
`Switch to a source that implements ${capability}, or use a different command.`
|
|
10100
|
+
);
|
|
10101
|
+
this.capability = capability;
|
|
10102
|
+
this.sourceId = sourceId;
|
|
10103
|
+
this.name = "SourceCapabilityUnsupportedError";
|
|
10104
|
+
}
|
|
10105
|
+
capability;
|
|
10106
|
+
sourceId;
|
|
10107
|
+
};
|
|
9731
10108
|
export {
|
|
9732
10109
|
ArtifactPacker,
|
|
9733
10110
|
BackendMigrator,
|
|
9734
|
-
BulkOps,
|
|
9735
10111
|
CLEF_MANIFEST_FILENAME,
|
|
9736
10112
|
CLEF_POLICY_FILENAME,
|
|
9737
10113
|
CLEF_REPORT_SCHEMA_VERSION,
|
|
@@ -9745,6 +10121,7 @@ export {
|
|
|
9745
10121
|
DiffEngine,
|
|
9746
10122
|
DriftDetector,
|
|
9747
10123
|
FilePackOutput,
|
|
10124
|
+
FilesystemStorageBackend,
|
|
9748
10125
|
GitIntegration,
|
|
9749
10126
|
GitOperationError,
|
|
9750
10127
|
ImportRunner,
|
|
@@ -9761,7 +10138,6 @@ export {
|
|
|
9761
10138
|
PolicyValidationError,
|
|
9762
10139
|
REQUESTS_FILENAME,
|
|
9763
10140
|
REQUIREMENTS,
|
|
9764
|
-
REVEAL_WARNING,
|
|
9765
10141
|
RecipientManager,
|
|
9766
10142
|
ReportGenerator,
|
|
9767
10143
|
ReportSanitizer,
|
|
@@ -9778,6 +10154,7 @@ export {
|
|
|
9778
10154
|
SopsMergeDriver,
|
|
9779
10155
|
SopsMissingError,
|
|
9780
10156
|
SopsVersionError,
|
|
10157
|
+
SourceCapabilityUnsupportedError,
|
|
9781
10158
|
StructureManager,
|
|
9782
10159
|
SyncManager,
|
|
9783
10160
|
TransactionLockError,
|
|
@@ -9797,11 +10174,11 @@ export {
|
|
|
9797
10174
|
checkAll,
|
|
9798
10175
|
checkDependency,
|
|
9799
10176
|
collectCIContext,
|
|
10177
|
+
composeSecretSource,
|
|
9800
10178
|
computeCiphertextHash,
|
|
9801
10179
|
deriveAgePublicKey,
|
|
10180
|
+
describeCapabilities,
|
|
9802
10181
|
describeScope,
|
|
9803
|
-
detectAlgorithm,
|
|
9804
|
-
detectFormat,
|
|
9805
10182
|
emptyTemplate,
|
|
9806
10183
|
exampleTemplate,
|
|
9807
10184
|
findRequest,
|
|
@@ -9809,35 +10186,27 @@ export {
|
|
|
9809
10186
|
formatRevealWarning,
|
|
9810
10187
|
generateAgeIdentity,
|
|
9811
10188
|
generateRandomValue,
|
|
9812
|
-
|
|
9813
|
-
getPendingKeys,
|
|
9814
|
-
getRotations,
|
|
10189
|
+
isBulk,
|
|
9815
10190
|
isClefHsmArn,
|
|
9816
|
-
isHighEntropy,
|
|
9817
10191
|
isKmsEnvelope,
|
|
10192
|
+
isLintable,
|
|
10193
|
+
isMergeAware,
|
|
10194
|
+
isMigratable,
|
|
9818
10195
|
isPackedArtifact,
|
|
9819
|
-
|
|
10196
|
+
isRecipientManaged,
|
|
10197
|
+
isRotatable,
|
|
10198
|
+
isStructural,
|
|
9820
10199
|
keyPreview,
|
|
9821
|
-
loadIgnoreRules,
|
|
9822
|
-
loadMetadata,
|
|
9823
10200
|
loadRequests,
|
|
9824
10201
|
markPending,
|
|
9825
|
-
markPendingWithRetry,
|
|
9826
10202
|
markResolved,
|
|
9827
|
-
matchPatterns,
|
|
9828
|
-
mergeMetadataContents,
|
|
9829
10203
|
mergeMetadataFiles,
|
|
9830
|
-
metadataPath,
|
|
9831
10204
|
parse9 as parse,
|
|
9832
|
-
parseDotenv,
|
|
9833
|
-
parseIgnoreContent,
|
|
9834
|
-
parseJson,
|
|
9835
10205
|
parseSignerKey,
|
|
9836
10206
|
parseYaml,
|
|
9837
10207
|
pkcs11UriToSyntheticArn,
|
|
9838
10208
|
readManifestYaml,
|
|
9839
10209
|
recordRotation,
|
|
9840
|
-
redactValue,
|
|
9841
10210
|
removeRequest as removeAccessRequest,
|
|
9842
10211
|
removeRotation,
|
|
9843
10212
|
requestsFilePath,
|
|
@@ -9849,22 +10218,18 @@ export {
|
|
|
9849
10218
|
resolveRecipientsForEnvironment,
|
|
9850
10219
|
resolveSopsPath,
|
|
9851
10220
|
runCompliance,
|
|
9852
|
-
saveMetadata,
|
|
9853
10221
|
saveRequests,
|
|
9854
|
-
|
|
9855
|
-
shannonEntropy,
|
|
9856
|
-
shouldIgnoreFile,
|
|
9857
|
-
shouldIgnoreMatch,
|
|
9858
|
-
signEd25519,
|
|
9859
|
-
signKms,
|
|
10222
|
+
shouldUseLinuxStdinFifo,
|
|
9860
10223
|
spawnKeyservice,
|
|
9861
10224
|
syntheticArnToPkcs11Uri,
|
|
9862
10225
|
tryBundledKeyservice,
|
|
9863
10226
|
upsertRequest,
|
|
9864
10227
|
validateAgePublicKey,
|
|
10228
|
+
validateAwsKmsArn,
|
|
9865
10229
|
validatePackedArtifact,
|
|
9866
10230
|
validateResetScope,
|
|
9867
10231
|
verifySignature,
|
|
10232
|
+
wrapWithLinuxStdinFifo,
|
|
9868
10233
|
writeManifestYaml,
|
|
9869
10234
|
writeManifestYamlRaw,
|
|
9870
10235
|
writeSchema,
|