@clef-sh/core 0.1.27 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/dist/artifact/packer.d.ts +4 -3
- package/dist/artifact/packer.d.ts.map +1 -1
- package/dist/artifact/resolve.d.ts +3 -2
- package/dist/artifact/resolve.d.ts.map +1 -1
- package/dist/compliance/run.d.ts.map +1 -1
- package/dist/diff/engine.d.ts +18 -8
- package/dist/diff/engine.d.ts.map +1 -1
- package/dist/import/index.d.ts +5 -5
- package/dist/import/index.d.ts.map +1 -1
- package/dist/index.d.mts +14 -12
- package/dist/index.d.ts +14 -12
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1225 -872
- package/dist/index.js.map +4 -4
- package/dist/index.mjs +1213 -848
- package/dist/index.mjs.map +4 -4
- package/dist/kms/aws-arn.d.ts +29 -0
- package/dist/kms/aws-arn.d.ts.map +1 -0
- package/dist/kms/index.d.ts +2 -0
- package/dist/kms/index.d.ts.map +1 -1
- package/dist/lint/runner.d.ts +7 -7
- package/dist/lint/runner.d.ts.map +1 -1
- package/dist/manifest/io.d.ts +6 -0
- package/dist/manifest/io.d.ts.map +1 -1
- package/dist/manifest/parser.d.ts.map +1 -1
- package/dist/matrix/manager.d.ts +4 -16
- package/dist/matrix/manager.d.ts.map +1 -1
- package/dist/merge/driver.d.ts +2 -2
- package/dist/merge/driver.d.ts.map +1 -1
- package/dist/merge/metadata-driver.d.ts +5 -4
- package/dist/merge/metadata-driver.d.ts.map +1 -1
- package/dist/migration/backend.d.ts +10 -7
- package/dist/migration/backend.d.ts.map +1 -1
- package/dist/pack/backends/json-envelope.d.ts.map +1 -1
- package/dist/pack/types.d.ts +9 -3
- package/dist/pack/types.d.ts.map +1 -1
- package/dist/pending/metadata.d.ts +1 -3
- package/dist/pending/metadata.d.ts.map +1 -1
- package/dist/recipients/index.d.ts +4 -3
- package/dist/recipients/index.d.ts.map +1 -1
- package/dist/report/generator.d.ts +4 -3
- package/dist/report/generator.d.ts.map +1 -1
- package/dist/reset/manager.d.ts +21 -3
- package/dist/reset/manager.d.ts.map +1 -1
- package/dist/service-identity/manager.d.ts +6 -3
- package/dist/service-identity/manager.d.ts.map +1 -1
- package/dist/sops/client.d.ts +80 -55
- package/dist/sops/client.d.ts.map +1 -1
- package/dist/sops/linux-stdin-fifo.d.ts +31 -0
- package/dist/sops/linux-stdin-fifo.d.ts.map +1 -0
- package/dist/source/compose.d.ts +10 -0
- package/dist/source/compose.d.ts.map +1 -0
- package/dist/source/default-bulk.d.ts +12 -0
- package/dist/source/default-bulk.d.ts.map +1 -0
- package/dist/source/encryption-backend.d.ts +85 -0
- package/dist/source/encryption-backend.d.ts.map +1 -0
- package/dist/source/errors.d.ts +19 -0
- package/dist/source/errors.d.ts.map +1 -0
- package/dist/source/filesystem-storage-backend.d.ts +26 -0
- package/dist/source/filesystem-storage-backend.d.ts.map +1 -0
- package/dist/source/guards.d.ts +14 -0
- package/dist/source/guards.d.ts.map +1 -0
- package/dist/source/index.d.ts +10 -0
- package/dist/source/index.d.ts.map +1 -0
- package/dist/source/mock-source.d.ts +89 -0
- package/dist/source/mock-source.d.ts.map +1 -0
- package/dist/source/storage-backend.d.ts +61 -0
- package/dist/source/storage-backend.d.ts.map +1 -0
- package/dist/source/types.d.ts +212 -0
- package/dist/source/types.d.ts.map +1 -0
- package/dist/structure/manager.d.ts +17 -3
- package/dist/structure/manager.d.ts.map +1 -1
- package/dist/sync/manager.d.ts +7 -6
- package/dist/sync/manager.d.ts.map +1 -1
- package/dist/types/index.d.ts +10 -23
- package/dist/types/index.d.ts.map +1 -1
- package/package.json +3 -3
- package/dist/bulk/ops.d.ts +0 -57
- package/dist/bulk/ops.d.ts.map +0 -1
package/dist/index.js
CHANGED
|
@@ -301,13 +301,13 @@ var require_lib = __commonJS({
|
|
|
301
301
|
"../../node_modules/write-file-atomic/lib/index.js"(exports2, module2) {
|
|
302
302
|
"use strict";
|
|
303
303
|
module2.exports = writeFile;
|
|
304
|
-
module2.exports.sync =
|
|
304
|
+
module2.exports.sync = writeFileSync8;
|
|
305
305
|
module2.exports._getTmpname = getTmpname;
|
|
306
306
|
module2.exports._cleanupOnExit = cleanupOnExit;
|
|
307
|
-
var
|
|
307
|
+
var fs23 = require("fs");
|
|
308
308
|
var crypto7 = require("node:crypto");
|
|
309
309
|
var { onExit } = require_cjs();
|
|
310
|
-
var
|
|
310
|
+
var path28 = require("path");
|
|
311
311
|
var { promisify } = require("util");
|
|
312
312
|
var activeFiles = {};
|
|
313
313
|
var threadId = (function getId() {
|
|
@@ -325,7 +325,7 @@ var require_lib = __commonJS({
|
|
|
325
325
|
function cleanupOnExit(tmpfile) {
|
|
326
326
|
return () => {
|
|
327
327
|
try {
|
|
328
|
-
|
|
328
|
+
fs23.unlinkSync(typeof tmpfile === "function" ? tmpfile() : tmpfile);
|
|
329
329
|
} catch {
|
|
330
330
|
}
|
|
331
331
|
};
|
|
@@ -360,13 +360,13 @@ var require_lib = __commonJS({
|
|
|
360
360
|
let fd;
|
|
361
361
|
let tmpfile;
|
|
362
362
|
const removeOnExitHandler = onExit(cleanupOnExit(() => tmpfile));
|
|
363
|
-
const absoluteName =
|
|
363
|
+
const absoluteName = path28.resolve(filename);
|
|
364
364
|
try {
|
|
365
365
|
await serializeActiveFile(absoluteName);
|
|
366
|
-
const truename = await promisify(
|
|
366
|
+
const truename = await promisify(fs23.realpath)(filename).catch(() => filename);
|
|
367
367
|
tmpfile = getTmpname(truename);
|
|
368
368
|
if (!options.mode || !options.chown) {
|
|
369
|
-
const stats = await promisify(
|
|
369
|
+
const stats = await promisify(fs23.stat)(truename).catch(() => {
|
|
370
370
|
});
|
|
371
371
|
if (stats) {
|
|
372
372
|
if (options.mode == null) {
|
|
@@ -377,45 +377,45 @@ var require_lib = __commonJS({
|
|
|
377
377
|
}
|
|
378
378
|
}
|
|
379
379
|
}
|
|
380
|
-
fd = await promisify(
|
|
380
|
+
fd = await promisify(fs23.open)(tmpfile, "w", options.mode);
|
|
381
381
|
if (options.tmpfileCreated) {
|
|
382
382
|
await options.tmpfileCreated(tmpfile);
|
|
383
383
|
}
|
|
384
384
|
if (ArrayBuffer.isView(data)) {
|
|
385
|
-
await promisify(
|
|
385
|
+
await promisify(fs23.write)(fd, data, 0, data.length, 0);
|
|
386
386
|
} else if (data != null) {
|
|
387
|
-
await promisify(
|
|
387
|
+
await promisify(fs23.write)(fd, String(data), 0, String(options.encoding || "utf8"));
|
|
388
388
|
}
|
|
389
389
|
if (options.fsync !== false) {
|
|
390
|
-
await promisify(
|
|
390
|
+
await promisify(fs23.fsync)(fd);
|
|
391
391
|
}
|
|
392
|
-
await promisify(
|
|
392
|
+
await promisify(fs23.close)(fd);
|
|
393
393
|
fd = null;
|
|
394
394
|
if (options.chown) {
|
|
395
|
-
await promisify(
|
|
395
|
+
await promisify(fs23.chown)(tmpfile, options.chown.uid, options.chown.gid).catch((err) => {
|
|
396
396
|
if (!isChownErrOk(err)) {
|
|
397
397
|
throw err;
|
|
398
398
|
}
|
|
399
399
|
});
|
|
400
400
|
}
|
|
401
401
|
if (options.mode) {
|
|
402
|
-
await promisify(
|
|
402
|
+
await promisify(fs23.chmod)(tmpfile, options.mode).catch((err) => {
|
|
403
403
|
if (!isChownErrOk(err)) {
|
|
404
404
|
throw err;
|
|
405
405
|
}
|
|
406
406
|
});
|
|
407
407
|
}
|
|
408
|
-
await promisify(
|
|
408
|
+
await promisify(fs23.rename)(tmpfile, truename);
|
|
409
409
|
} finally {
|
|
410
410
|
if (fd) {
|
|
411
|
-
await promisify(
|
|
411
|
+
await promisify(fs23.close)(fd).catch(
|
|
412
412
|
/* istanbul ignore next */
|
|
413
413
|
() => {
|
|
414
414
|
}
|
|
415
415
|
);
|
|
416
416
|
}
|
|
417
417
|
removeOnExitHandler();
|
|
418
|
-
await promisify(
|
|
418
|
+
await promisify(fs23.unlink)(tmpfile).catch(() => {
|
|
419
419
|
});
|
|
420
420
|
activeFiles[absoluteName].shift();
|
|
421
421
|
if (activeFiles[absoluteName].length > 0) {
|
|
@@ -441,20 +441,20 @@ var require_lib = __commonJS({
|
|
|
441
441
|
}
|
|
442
442
|
return promise;
|
|
443
443
|
}
|
|
444
|
-
function
|
|
444
|
+
function writeFileSync8(filename, data, options) {
|
|
445
445
|
if (typeof options === "string") {
|
|
446
446
|
options = { encoding: options };
|
|
447
447
|
} else if (!options) {
|
|
448
448
|
options = {};
|
|
449
449
|
}
|
|
450
450
|
try {
|
|
451
|
-
filename =
|
|
451
|
+
filename = fs23.realpathSync(filename);
|
|
452
452
|
} catch (ex) {
|
|
453
453
|
}
|
|
454
454
|
const tmpfile = getTmpname(filename);
|
|
455
455
|
if (!options.mode || !options.chown) {
|
|
456
456
|
try {
|
|
457
|
-
const stats =
|
|
457
|
+
const stats = fs23.statSync(filename);
|
|
458
458
|
options = Object.assign({}, options);
|
|
459
459
|
if (!options.mode) {
|
|
460
460
|
options.mode = stats.mode;
|
|
@@ -470,23 +470,23 @@ var require_lib = __commonJS({
|
|
|
470
470
|
const removeOnExitHandler = onExit(cleanup);
|
|
471
471
|
let threw = true;
|
|
472
472
|
try {
|
|
473
|
-
fd =
|
|
473
|
+
fd = fs23.openSync(tmpfile, "w", options.mode || 438);
|
|
474
474
|
if (options.tmpfileCreated) {
|
|
475
475
|
options.tmpfileCreated(tmpfile);
|
|
476
476
|
}
|
|
477
477
|
if (ArrayBuffer.isView(data)) {
|
|
478
|
-
|
|
478
|
+
fs23.writeSync(fd, data, 0, data.length, 0);
|
|
479
479
|
} else if (data != null) {
|
|
480
|
-
|
|
480
|
+
fs23.writeSync(fd, String(data), 0, String(options.encoding || "utf8"));
|
|
481
481
|
}
|
|
482
482
|
if (options.fsync !== false) {
|
|
483
|
-
|
|
483
|
+
fs23.fsyncSync(fd);
|
|
484
484
|
}
|
|
485
|
-
|
|
485
|
+
fs23.closeSync(fd);
|
|
486
486
|
fd = null;
|
|
487
487
|
if (options.chown) {
|
|
488
488
|
try {
|
|
489
|
-
|
|
489
|
+
fs23.chownSync(tmpfile, options.chown.uid, options.chown.gid);
|
|
490
490
|
} catch (err) {
|
|
491
491
|
if (!isChownErrOk(err)) {
|
|
492
492
|
throw err;
|
|
@@ -495,19 +495,19 @@ var require_lib = __commonJS({
|
|
|
495
495
|
}
|
|
496
496
|
if (options.mode) {
|
|
497
497
|
try {
|
|
498
|
-
|
|
498
|
+
fs23.chmodSync(tmpfile, options.mode);
|
|
499
499
|
} catch (err) {
|
|
500
500
|
if (!isChownErrOk(err)) {
|
|
501
501
|
throw err;
|
|
502
502
|
}
|
|
503
503
|
}
|
|
504
504
|
}
|
|
505
|
-
|
|
505
|
+
fs23.renameSync(tmpfile, filename);
|
|
506
506
|
threw = false;
|
|
507
507
|
} finally {
|
|
508
508
|
if (fd) {
|
|
509
509
|
try {
|
|
510
|
-
|
|
510
|
+
fs23.closeSync(fd);
|
|
511
511
|
} catch (ex) {
|
|
512
512
|
}
|
|
513
513
|
}
|
|
@@ -546,54 +546,54 @@ var require_polyfills = __commonJS({
|
|
|
546
546
|
}
|
|
547
547
|
var chdir;
|
|
548
548
|
module2.exports = patch;
|
|
549
|
-
function patch(
|
|
549
|
+
function patch(fs23) {
|
|
550
550
|
if (constants.hasOwnProperty("O_SYMLINK") && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
|
551
|
-
patchLchmod(
|
|
552
|
-
}
|
|
553
|
-
if (!
|
|
554
|
-
patchLutimes(
|
|
555
|
-
}
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
if (
|
|
575
|
-
|
|
551
|
+
patchLchmod(fs23);
|
|
552
|
+
}
|
|
553
|
+
if (!fs23.lutimes) {
|
|
554
|
+
patchLutimes(fs23);
|
|
555
|
+
}
|
|
556
|
+
fs23.chown = chownFix(fs23.chown);
|
|
557
|
+
fs23.fchown = chownFix(fs23.fchown);
|
|
558
|
+
fs23.lchown = chownFix(fs23.lchown);
|
|
559
|
+
fs23.chmod = chmodFix(fs23.chmod);
|
|
560
|
+
fs23.fchmod = chmodFix(fs23.fchmod);
|
|
561
|
+
fs23.lchmod = chmodFix(fs23.lchmod);
|
|
562
|
+
fs23.chownSync = chownFixSync(fs23.chownSync);
|
|
563
|
+
fs23.fchownSync = chownFixSync(fs23.fchownSync);
|
|
564
|
+
fs23.lchownSync = chownFixSync(fs23.lchownSync);
|
|
565
|
+
fs23.chmodSync = chmodFixSync(fs23.chmodSync);
|
|
566
|
+
fs23.fchmodSync = chmodFixSync(fs23.fchmodSync);
|
|
567
|
+
fs23.lchmodSync = chmodFixSync(fs23.lchmodSync);
|
|
568
|
+
fs23.stat = statFix(fs23.stat);
|
|
569
|
+
fs23.fstat = statFix(fs23.fstat);
|
|
570
|
+
fs23.lstat = statFix(fs23.lstat);
|
|
571
|
+
fs23.statSync = statFixSync(fs23.statSync);
|
|
572
|
+
fs23.fstatSync = statFixSync(fs23.fstatSync);
|
|
573
|
+
fs23.lstatSync = statFixSync(fs23.lstatSync);
|
|
574
|
+
if (fs23.chmod && !fs23.lchmod) {
|
|
575
|
+
fs23.lchmod = function(path28, mode, cb) {
|
|
576
576
|
if (cb) process.nextTick(cb);
|
|
577
577
|
};
|
|
578
|
-
|
|
578
|
+
fs23.lchmodSync = function() {
|
|
579
579
|
};
|
|
580
580
|
}
|
|
581
|
-
if (
|
|
582
|
-
|
|
581
|
+
if (fs23.chown && !fs23.lchown) {
|
|
582
|
+
fs23.lchown = function(path28, uid, gid, cb) {
|
|
583
583
|
if (cb) process.nextTick(cb);
|
|
584
584
|
};
|
|
585
|
-
|
|
585
|
+
fs23.lchownSync = function() {
|
|
586
586
|
};
|
|
587
587
|
}
|
|
588
588
|
if (platform === "win32") {
|
|
589
|
-
|
|
589
|
+
fs23.rename = typeof fs23.rename !== "function" ? fs23.rename : (function(fs$rename) {
|
|
590
590
|
function rename(from, to, cb) {
|
|
591
591
|
var start = Date.now();
|
|
592
592
|
var backoff = 0;
|
|
593
593
|
fs$rename(from, to, function CB(er) {
|
|
594
594
|
if (er && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") && Date.now() - start < 6e4) {
|
|
595
595
|
setTimeout(function() {
|
|
596
|
-
|
|
596
|
+
fs23.stat(to, function(stater, st) {
|
|
597
597
|
if (stater && stater.code === "ENOENT")
|
|
598
598
|
fs$rename(from, to, CB);
|
|
599
599
|
else
|
|
@@ -609,9 +609,9 @@ var require_polyfills = __commonJS({
|
|
|
609
609
|
}
|
|
610
610
|
if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename);
|
|
611
611
|
return rename;
|
|
612
|
-
})(
|
|
612
|
+
})(fs23.rename);
|
|
613
613
|
}
|
|
614
|
-
|
|
614
|
+
fs23.read = typeof fs23.read !== "function" ? fs23.read : (function(fs$read) {
|
|
615
615
|
function read(fd, buffer, offset, length, position, callback_) {
|
|
616
616
|
var callback;
|
|
617
617
|
if (callback_ && typeof callback_ === "function") {
|
|
@@ -619,22 +619,22 @@ var require_polyfills = __commonJS({
|
|
|
619
619
|
callback = function(er, _, __) {
|
|
620
620
|
if (er && er.code === "EAGAIN" && eagCounter < 10) {
|
|
621
621
|
eagCounter++;
|
|
622
|
-
return fs$read.call(
|
|
622
|
+
return fs$read.call(fs23, fd, buffer, offset, length, position, callback);
|
|
623
623
|
}
|
|
624
624
|
callback_.apply(this, arguments);
|
|
625
625
|
};
|
|
626
626
|
}
|
|
627
|
-
return fs$read.call(
|
|
627
|
+
return fs$read.call(fs23, fd, buffer, offset, length, position, callback);
|
|
628
628
|
}
|
|
629
629
|
if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read);
|
|
630
630
|
return read;
|
|
631
|
-
})(
|
|
632
|
-
|
|
631
|
+
})(fs23.read);
|
|
632
|
+
fs23.readSync = typeof fs23.readSync !== "function" ? fs23.readSync : /* @__PURE__ */ (function(fs$readSync) {
|
|
633
633
|
return function(fd, buffer, offset, length, position) {
|
|
634
634
|
var eagCounter = 0;
|
|
635
635
|
while (true) {
|
|
636
636
|
try {
|
|
637
|
-
return fs$readSync.call(
|
|
637
|
+
return fs$readSync.call(fs23, fd, buffer, offset, length, position);
|
|
638
638
|
} catch (er) {
|
|
639
639
|
if (er.code === "EAGAIN" && eagCounter < 10) {
|
|
640
640
|
eagCounter++;
|
|
@@ -644,11 +644,11 @@ var require_polyfills = __commonJS({
|
|
|
644
644
|
}
|
|
645
645
|
}
|
|
646
646
|
};
|
|
647
|
-
})(
|
|
648
|
-
function patchLchmod(
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
647
|
+
})(fs23.readSync);
|
|
648
|
+
function patchLchmod(fs24) {
|
|
649
|
+
fs24.lchmod = function(path28, mode, callback) {
|
|
650
|
+
fs24.open(
|
|
651
|
+
path28,
|
|
652
652
|
constants.O_WRONLY | constants.O_SYMLINK,
|
|
653
653
|
mode,
|
|
654
654
|
function(err, fd) {
|
|
@@ -656,80 +656,80 @@ var require_polyfills = __commonJS({
|
|
|
656
656
|
if (callback) callback(err);
|
|
657
657
|
return;
|
|
658
658
|
}
|
|
659
|
-
|
|
660
|
-
|
|
659
|
+
fs24.fchmod(fd, mode, function(err2) {
|
|
660
|
+
fs24.close(fd, function(err22) {
|
|
661
661
|
if (callback) callback(err2 || err22);
|
|
662
662
|
});
|
|
663
663
|
});
|
|
664
664
|
}
|
|
665
665
|
);
|
|
666
666
|
};
|
|
667
|
-
|
|
668
|
-
var fd =
|
|
667
|
+
fs24.lchmodSync = function(path28, mode) {
|
|
668
|
+
var fd = fs24.openSync(path28, constants.O_WRONLY | constants.O_SYMLINK, mode);
|
|
669
669
|
var threw = true;
|
|
670
670
|
var ret;
|
|
671
671
|
try {
|
|
672
|
-
ret =
|
|
672
|
+
ret = fs24.fchmodSync(fd, mode);
|
|
673
673
|
threw = false;
|
|
674
674
|
} finally {
|
|
675
675
|
if (threw) {
|
|
676
676
|
try {
|
|
677
|
-
|
|
677
|
+
fs24.closeSync(fd);
|
|
678
678
|
} catch (er) {
|
|
679
679
|
}
|
|
680
680
|
} else {
|
|
681
|
-
|
|
681
|
+
fs24.closeSync(fd);
|
|
682
682
|
}
|
|
683
683
|
}
|
|
684
684
|
return ret;
|
|
685
685
|
};
|
|
686
686
|
}
|
|
687
|
-
function patchLutimes(
|
|
688
|
-
if (constants.hasOwnProperty("O_SYMLINK") &&
|
|
689
|
-
|
|
690
|
-
|
|
687
|
+
function patchLutimes(fs24) {
|
|
688
|
+
if (constants.hasOwnProperty("O_SYMLINK") && fs24.futimes) {
|
|
689
|
+
fs24.lutimes = function(path28, at, mt, cb) {
|
|
690
|
+
fs24.open(path28, constants.O_SYMLINK, function(er, fd) {
|
|
691
691
|
if (er) {
|
|
692
692
|
if (cb) cb(er);
|
|
693
693
|
return;
|
|
694
694
|
}
|
|
695
|
-
|
|
696
|
-
|
|
695
|
+
fs24.futimes(fd, at, mt, function(er2) {
|
|
696
|
+
fs24.close(fd, function(er22) {
|
|
697
697
|
if (cb) cb(er2 || er22);
|
|
698
698
|
});
|
|
699
699
|
});
|
|
700
700
|
});
|
|
701
701
|
};
|
|
702
|
-
|
|
703
|
-
var fd =
|
|
702
|
+
fs24.lutimesSync = function(path28, at, mt) {
|
|
703
|
+
var fd = fs24.openSync(path28, constants.O_SYMLINK);
|
|
704
704
|
var ret;
|
|
705
705
|
var threw = true;
|
|
706
706
|
try {
|
|
707
|
-
ret =
|
|
707
|
+
ret = fs24.futimesSync(fd, at, mt);
|
|
708
708
|
threw = false;
|
|
709
709
|
} finally {
|
|
710
710
|
if (threw) {
|
|
711
711
|
try {
|
|
712
|
-
|
|
712
|
+
fs24.closeSync(fd);
|
|
713
713
|
} catch (er) {
|
|
714
714
|
}
|
|
715
715
|
} else {
|
|
716
|
-
|
|
716
|
+
fs24.closeSync(fd);
|
|
717
717
|
}
|
|
718
718
|
}
|
|
719
719
|
return ret;
|
|
720
720
|
};
|
|
721
|
-
} else if (
|
|
722
|
-
|
|
721
|
+
} else if (fs24.futimes) {
|
|
722
|
+
fs24.lutimes = function(_a, _b, _c, cb) {
|
|
723
723
|
if (cb) process.nextTick(cb);
|
|
724
724
|
};
|
|
725
|
-
|
|
725
|
+
fs24.lutimesSync = function() {
|
|
726
726
|
};
|
|
727
727
|
}
|
|
728
728
|
}
|
|
729
729
|
function chmodFix(orig) {
|
|
730
730
|
if (!orig) return orig;
|
|
731
731
|
return function(target, mode, cb) {
|
|
732
|
-
return orig.call(
|
|
732
|
+
return orig.call(fs23, target, mode, function(er) {
|
|
733
733
|
if (chownErOk(er)) er = null;
|
|
734
734
|
if (cb) cb.apply(this, arguments);
|
|
735
735
|
});
|
|
@@ -739,7 +739,7 @@ var require_polyfills = __commonJS({
|
|
|
739
739
|
if (!orig) return orig;
|
|
740
740
|
return function(target, mode) {
|
|
741
741
|
try {
|
|
742
|
-
return orig.call(
|
|
742
|
+
return orig.call(fs23, target, mode);
|
|
743
743
|
} catch (er) {
|
|
744
744
|
if (!chownErOk(er)) throw er;
|
|
745
745
|
}
|
|
@@ -748,7 +748,7 @@ var require_polyfills = __commonJS({
|
|
|
748
748
|
function chownFix(orig) {
|
|
749
749
|
if (!orig) return orig;
|
|
750
750
|
return function(target, uid, gid, cb) {
|
|
751
|
-
return orig.call(
|
|
751
|
+
return orig.call(fs23, target, uid, gid, function(er) {
|
|
752
752
|
if (chownErOk(er)) er = null;
|
|
753
753
|
if (cb) cb.apply(this, arguments);
|
|
754
754
|
});
|
|
@@ -758,7 +758,7 @@ var require_polyfills = __commonJS({
|
|
|
758
758
|
if (!orig) return orig;
|
|
759
759
|
return function(target, uid, gid) {
|
|
760
760
|
try {
|
|
761
|
-
return orig.call(
|
|
761
|
+
return orig.call(fs23, target, uid, gid);
|
|
762
762
|
} catch (er) {
|
|
763
763
|
if (!chownErOk(er)) throw er;
|
|
764
764
|
}
|
|
@@ -778,13 +778,13 @@ var require_polyfills = __commonJS({
|
|
|
778
778
|
}
|
|
779
779
|
if (cb) cb.apply(this, arguments);
|
|
780
780
|
}
|
|
781
|
-
return options ? orig.call(
|
|
781
|
+
return options ? orig.call(fs23, target, options, callback) : orig.call(fs23, target, callback);
|
|
782
782
|
};
|
|
783
783
|
}
|
|
784
784
|
function statFixSync(orig) {
|
|
785
785
|
if (!orig) return orig;
|
|
786
786
|
return function(target, options) {
|
|
787
|
-
var stats = options ? orig.call(
|
|
787
|
+
var stats = options ? orig.call(fs23, target, options) : orig.call(fs23, target);
|
|
788
788
|
if (stats) {
|
|
789
789
|
if (stats.uid < 0) stats.uid += 4294967296;
|
|
790
790
|
if (stats.gid < 0) stats.gid += 4294967296;
|
|
@@ -813,16 +813,16 @@ var require_legacy_streams = __commonJS({
|
|
|
813
813
|
"../../node_modules/graceful-fs/legacy-streams.js"(exports2, module2) {
|
|
814
814
|
var Stream = require("stream").Stream;
|
|
815
815
|
module2.exports = legacy;
|
|
816
|
-
function legacy(
|
|
816
|
+
function legacy(fs23) {
|
|
817
817
|
return {
|
|
818
818
|
ReadStream,
|
|
819
819
|
WriteStream
|
|
820
820
|
};
|
|
821
|
-
function ReadStream(
|
|
822
|
-
if (!(this instanceof ReadStream)) return new ReadStream(
|
|
821
|
+
function ReadStream(path28, options) {
|
|
822
|
+
if (!(this instanceof ReadStream)) return new ReadStream(path28, options);
|
|
823
823
|
Stream.call(this);
|
|
824
824
|
var self = this;
|
|
825
|
-
this.path =
|
|
825
|
+
this.path = path28;
|
|
826
826
|
this.fd = null;
|
|
827
827
|
this.readable = true;
|
|
828
828
|
this.paused = false;
|
|
@@ -856,7 +856,7 @@ var require_legacy_streams = __commonJS({
|
|
|
856
856
|
});
|
|
857
857
|
return;
|
|
858
858
|
}
|
|
859
|
-
|
|
859
|
+
fs23.open(this.path, this.flags, this.mode, function(err, fd) {
|
|
860
860
|
if (err) {
|
|
861
861
|
self.emit("error", err);
|
|
862
862
|
self.readable = false;
|
|
@@ -867,10 +867,10 @@ var require_legacy_streams = __commonJS({
|
|
|
867
867
|
self._read();
|
|
868
868
|
});
|
|
869
869
|
}
|
|
870
|
-
function WriteStream(
|
|
871
|
-
if (!(this instanceof WriteStream)) return new WriteStream(
|
|
870
|
+
function WriteStream(path28, options) {
|
|
871
|
+
if (!(this instanceof WriteStream)) return new WriteStream(path28, options);
|
|
872
872
|
Stream.call(this);
|
|
873
|
-
this.path =
|
|
873
|
+
this.path = path28;
|
|
874
874
|
this.fd = null;
|
|
875
875
|
this.writable = true;
|
|
876
876
|
this.flags = "w";
|
|
@@ -895,7 +895,7 @@ var require_legacy_streams = __commonJS({
|
|
|
895
895
|
this.busy = false;
|
|
896
896
|
this._queue = [];
|
|
897
897
|
if (this.fd === null) {
|
|
898
|
-
this._open =
|
|
898
|
+
this._open = fs23.open;
|
|
899
899
|
this._queue.push([this._open, this.path, this.flags, this.mode, void 0]);
|
|
900
900
|
this.flush();
|
|
901
901
|
}
|
|
@@ -930,7 +930,7 @@ var require_clone = __commonJS({
|
|
|
930
930
|
// ../../node_modules/graceful-fs/graceful-fs.js
|
|
931
931
|
var require_graceful_fs = __commonJS({
|
|
932
932
|
"../../node_modules/graceful-fs/graceful-fs.js"(exports2, module2) {
|
|
933
|
-
var
|
|
933
|
+
var fs23 = require("fs");
|
|
934
934
|
var polyfills = require_polyfills();
|
|
935
935
|
var legacy = require_legacy_streams();
|
|
936
936
|
var clone = require_clone();
|
|
@@ -962,12 +962,12 @@ var require_graceful_fs = __commonJS({
|
|
|
962
962
|
m = "GFS4: " + m.split(/\n/).join("\nGFS4: ");
|
|
963
963
|
console.error(m);
|
|
964
964
|
};
|
|
965
|
-
if (!
|
|
965
|
+
if (!fs23[gracefulQueue]) {
|
|
966
966
|
queue = global[gracefulQueue] || [];
|
|
967
|
-
publishQueue(
|
|
968
|
-
|
|
967
|
+
publishQueue(fs23, queue);
|
|
968
|
+
fs23.close = (function(fs$close) {
|
|
969
969
|
function close(fd, cb) {
|
|
970
|
-
return fs$close.call(
|
|
970
|
+
return fs$close.call(fs23, fd, function(err) {
|
|
971
971
|
if (!err) {
|
|
972
972
|
resetQueue();
|
|
973
973
|
}
|
|
@@ -979,48 +979,48 @@ var require_graceful_fs = __commonJS({
|
|
|
979
979
|
value: fs$close
|
|
980
980
|
});
|
|
981
981
|
return close;
|
|
982
|
-
})(
|
|
983
|
-
|
|
984
|
-
function
|
|
985
|
-
fs$closeSync.apply(
|
|
982
|
+
})(fs23.close);
|
|
983
|
+
fs23.closeSync = (function(fs$closeSync) {
|
|
984
|
+
function closeSync3(fd) {
|
|
985
|
+
fs$closeSync.apply(fs23, arguments);
|
|
986
986
|
resetQueue();
|
|
987
987
|
}
|
|
988
|
-
Object.defineProperty(
|
|
988
|
+
Object.defineProperty(closeSync3, previousSymbol, {
|
|
989
989
|
value: fs$closeSync
|
|
990
990
|
});
|
|
991
|
-
return
|
|
992
|
-
})(
|
|
991
|
+
return closeSync3;
|
|
992
|
+
})(fs23.closeSync);
|
|
993
993
|
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || "")) {
|
|
994
994
|
process.on("exit", function() {
|
|
995
|
-
debug(
|
|
996
|
-
require("assert").equal(
|
|
995
|
+
debug(fs23[gracefulQueue]);
|
|
996
|
+
require("assert").equal(fs23[gracefulQueue].length, 0);
|
|
997
997
|
});
|
|
998
998
|
}
|
|
999
999
|
}
|
|
1000
1000
|
var queue;
|
|
1001
1001
|
if (!global[gracefulQueue]) {
|
|
1002
|
-
publishQueue(global,
|
|
1003
|
-
}
|
|
1004
|
-
module2.exports = patch(clone(
|
|
1005
|
-
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !
|
|
1006
|
-
module2.exports = patch(
|
|
1007
|
-
|
|
1008
|
-
}
|
|
1009
|
-
function patch(
|
|
1010
|
-
polyfills(
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
var fs$readFile =
|
|
1015
|
-
|
|
1016
|
-
function readFile(
|
|
1002
|
+
publishQueue(global, fs23[gracefulQueue]);
|
|
1003
|
+
}
|
|
1004
|
+
module2.exports = patch(clone(fs23));
|
|
1005
|
+
if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs23.__patched) {
|
|
1006
|
+
module2.exports = patch(fs23);
|
|
1007
|
+
fs23.__patched = true;
|
|
1008
|
+
}
|
|
1009
|
+
function patch(fs24) {
|
|
1010
|
+
polyfills(fs24);
|
|
1011
|
+
fs24.gracefulify = patch;
|
|
1012
|
+
fs24.createReadStream = createReadStream;
|
|
1013
|
+
fs24.createWriteStream = createWriteStream;
|
|
1014
|
+
var fs$readFile = fs24.readFile;
|
|
1015
|
+
fs24.readFile = readFile;
|
|
1016
|
+
function readFile(path28, options, cb) {
|
|
1017
1017
|
if (typeof options === "function")
|
|
1018
1018
|
cb = options, options = null;
|
|
1019
|
-
return go$readFile(
|
|
1020
|
-
function go$readFile(
|
|
1021
|
-
return fs$readFile(
|
|
1019
|
+
return go$readFile(path28, options, cb);
|
|
1020
|
+
function go$readFile(path29, options2, cb2, startTime) {
|
|
1021
|
+
return fs$readFile(path29, options2, function(err) {
|
|
1022
1022
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1023
|
-
enqueue([go$readFile, [
|
|
1023
|
+
enqueue([go$readFile, [path29, options2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1024
1024
|
else {
|
|
1025
1025
|
if (typeof cb2 === "function")
|
|
1026
1026
|
cb2.apply(this, arguments);
|
|
@@ -1028,16 +1028,16 @@ var require_graceful_fs = __commonJS({
|
|
|
1028
1028
|
});
|
|
1029
1029
|
}
|
|
1030
1030
|
}
|
|
1031
|
-
var fs$writeFile =
|
|
1032
|
-
|
|
1033
|
-
function writeFile(
|
|
1031
|
+
var fs$writeFile = fs24.writeFile;
|
|
1032
|
+
fs24.writeFile = writeFile;
|
|
1033
|
+
function writeFile(path28, data, options, cb) {
|
|
1034
1034
|
if (typeof options === "function")
|
|
1035
1035
|
cb = options, options = null;
|
|
1036
|
-
return go$writeFile(
|
|
1037
|
-
function go$writeFile(
|
|
1038
|
-
return fs$writeFile(
|
|
1036
|
+
return go$writeFile(path28, data, options, cb);
|
|
1037
|
+
function go$writeFile(path29, data2, options2, cb2, startTime) {
|
|
1038
|
+
return fs$writeFile(path29, data2, options2, function(err) {
|
|
1039
1039
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1040
|
-
enqueue([go$writeFile, [
|
|
1040
|
+
enqueue([go$writeFile, [path29, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1041
1041
|
else {
|
|
1042
1042
|
if (typeof cb2 === "function")
|
|
1043
1043
|
cb2.apply(this, arguments);
|
|
@@ -1045,17 +1045,17 @@ var require_graceful_fs = __commonJS({
|
|
|
1045
1045
|
});
|
|
1046
1046
|
}
|
|
1047
1047
|
}
|
|
1048
|
-
var fs$appendFile =
|
|
1048
|
+
var fs$appendFile = fs24.appendFile;
|
|
1049
1049
|
if (fs$appendFile)
|
|
1050
|
-
|
|
1051
|
-
function appendFile(
|
|
1050
|
+
fs24.appendFile = appendFile;
|
|
1051
|
+
function appendFile(path28, data, options, cb) {
|
|
1052
1052
|
if (typeof options === "function")
|
|
1053
1053
|
cb = options, options = null;
|
|
1054
|
-
return go$appendFile(
|
|
1055
|
-
function go$appendFile(
|
|
1056
|
-
return fs$appendFile(
|
|
1054
|
+
return go$appendFile(path28, data, options, cb);
|
|
1055
|
+
function go$appendFile(path29, data2, options2, cb2, startTime) {
|
|
1056
|
+
return fs$appendFile(path29, data2, options2, function(err) {
|
|
1057
1057
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1058
|
-
enqueue([go$appendFile, [
|
|
1058
|
+
enqueue([go$appendFile, [path29, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1059
1059
|
else {
|
|
1060
1060
|
if (typeof cb2 === "function")
|
|
1061
1061
|
cb2.apply(this, arguments);
|
|
@@ -1063,9 +1063,9 @@ var require_graceful_fs = __commonJS({
|
|
|
1063
1063
|
});
|
|
1064
1064
|
}
|
|
1065
1065
|
}
|
|
1066
|
-
var fs$copyFile =
|
|
1066
|
+
var fs$copyFile = fs24.copyFile;
|
|
1067
1067
|
if (fs$copyFile)
|
|
1068
|
-
|
|
1068
|
+
fs24.copyFile = copyFile;
|
|
1069
1069
|
function copyFile(src, dest, flags, cb) {
|
|
1070
1070
|
if (typeof flags === "function") {
|
|
1071
1071
|
cb = flags;
|
|
@@ -1083,34 +1083,34 @@ var require_graceful_fs = __commonJS({
|
|
|
1083
1083
|
});
|
|
1084
1084
|
}
|
|
1085
1085
|
}
|
|
1086
|
-
var fs$readdir =
|
|
1087
|
-
|
|
1086
|
+
var fs$readdir = fs24.readdir;
|
|
1087
|
+
fs24.readdir = readdir;
|
|
1088
1088
|
var noReaddirOptionVersions = /^v[0-5]\./;
|
|
1089
|
-
function readdir(
|
|
1089
|
+
function readdir(path28, options, cb) {
|
|
1090
1090
|
if (typeof options === "function")
|
|
1091
1091
|
cb = options, options = null;
|
|
1092
|
-
var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(
|
|
1093
|
-
return fs$readdir(
|
|
1094
|
-
|
|
1092
|
+
var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path29, options2, cb2, startTime) {
|
|
1093
|
+
return fs$readdir(path29, fs$readdirCallback(
|
|
1094
|
+
path29,
|
|
1095
1095
|
options2,
|
|
1096
1096
|
cb2,
|
|
1097
1097
|
startTime
|
|
1098
1098
|
));
|
|
1099
|
-
} : function go$readdir2(
|
|
1100
|
-
return fs$readdir(
|
|
1101
|
-
|
|
1099
|
+
} : function go$readdir2(path29, options2, cb2, startTime) {
|
|
1100
|
+
return fs$readdir(path29, options2, fs$readdirCallback(
|
|
1101
|
+
path29,
|
|
1102
1102
|
options2,
|
|
1103
1103
|
cb2,
|
|
1104
1104
|
startTime
|
|
1105
1105
|
));
|
|
1106
1106
|
};
|
|
1107
|
-
return go$readdir(
|
|
1108
|
-
function fs$readdirCallback(
|
|
1107
|
+
return go$readdir(path28, options, cb);
|
|
1108
|
+
function fs$readdirCallback(path29, options2, cb2, startTime) {
|
|
1109
1109
|
return function(err, files) {
|
|
1110
1110
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1111
1111
|
enqueue([
|
|
1112
1112
|
go$readdir,
|
|
1113
|
-
[
|
|
1113
|
+
[path29, options2, cb2],
|
|
1114
1114
|
err,
|
|
1115
1115
|
startTime || Date.now(),
|
|
1116
1116
|
Date.now()
|
|
@@ -1125,21 +1125,21 @@ var require_graceful_fs = __commonJS({
|
|
|
1125
1125
|
}
|
|
1126
1126
|
}
|
|
1127
1127
|
if (process.version.substr(0, 4) === "v0.8") {
|
|
1128
|
-
var legStreams = legacy(
|
|
1128
|
+
var legStreams = legacy(fs24);
|
|
1129
1129
|
ReadStream = legStreams.ReadStream;
|
|
1130
1130
|
WriteStream = legStreams.WriteStream;
|
|
1131
1131
|
}
|
|
1132
|
-
var fs$ReadStream =
|
|
1132
|
+
var fs$ReadStream = fs24.ReadStream;
|
|
1133
1133
|
if (fs$ReadStream) {
|
|
1134
1134
|
ReadStream.prototype = Object.create(fs$ReadStream.prototype);
|
|
1135
1135
|
ReadStream.prototype.open = ReadStream$open;
|
|
1136
1136
|
}
|
|
1137
|
-
var fs$WriteStream =
|
|
1137
|
+
var fs$WriteStream = fs24.WriteStream;
|
|
1138
1138
|
if (fs$WriteStream) {
|
|
1139
1139
|
WriteStream.prototype = Object.create(fs$WriteStream.prototype);
|
|
1140
1140
|
WriteStream.prototype.open = WriteStream$open;
|
|
1141
1141
|
}
|
|
1142
|
-
Object.defineProperty(
|
|
1142
|
+
Object.defineProperty(fs24, "ReadStream", {
|
|
1143
1143
|
get: function() {
|
|
1144
1144
|
return ReadStream;
|
|
1145
1145
|
},
|
|
@@ -1149,7 +1149,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1149
1149
|
enumerable: true,
|
|
1150
1150
|
configurable: true
|
|
1151
1151
|
});
|
|
1152
|
-
Object.defineProperty(
|
|
1152
|
+
Object.defineProperty(fs24, "WriteStream", {
|
|
1153
1153
|
get: function() {
|
|
1154
1154
|
return WriteStream;
|
|
1155
1155
|
},
|
|
@@ -1160,7 +1160,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1160
1160
|
configurable: true
|
|
1161
1161
|
});
|
|
1162
1162
|
var FileReadStream = ReadStream;
|
|
1163
|
-
Object.defineProperty(
|
|
1163
|
+
Object.defineProperty(fs24, "FileReadStream", {
|
|
1164
1164
|
get: function() {
|
|
1165
1165
|
return FileReadStream;
|
|
1166
1166
|
},
|
|
@@ -1171,7 +1171,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1171
1171
|
configurable: true
|
|
1172
1172
|
});
|
|
1173
1173
|
var FileWriteStream = WriteStream;
|
|
1174
|
-
Object.defineProperty(
|
|
1174
|
+
Object.defineProperty(fs24, "FileWriteStream", {
|
|
1175
1175
|
get: function() {
|
|
1176
1176
|
return FileWriteStream;
|
|
1177
1177
|
},
|
|
@@ -1181,7 +1181,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1181
1181
|
enumerable: true,
|
|
1182
1182
|
configurable: true
|
|
1183
1183
|
});
|
|
1184
|
-
function ReadStream(
|
|
1184
|
+
function ReadStream(path28, options) {
|
|
1185
1185
|
if (this instanceof ReadStream)
|
|
1186
1186
|
return fs$ReadStream.apply(this, arguments), this;
|
|
1187
1187
|
else
|
|
@@ -1201,7 +1201,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1201
1201
|
}
|
|
1202
1202
|
});
|
|
1203
1203
|
}
|
|
1204
|
-
function WriteStream(
|
|
1204
|
+
function WriteStream(path28, options) {
|
|
1205
1205
|
if (this instanceof WriteStream)
|
|
1206
1206
|
return fs$WriteStream.apply(this, arguments), this;
|
|
1207
1207
|
else
|
|
@@ -1219,22 +1219,22 @@ var require_graceful_fs = __commonJS({
|
|
|
1219
1219
|
}
|
|
1220
1220
|
});
|
|
1221
1221
|
}
|
|
1222
|
-
function createReadStream(
|
|
1223
|
-
return new
|
|
1222
|
+
function createReadStream(path28, options) {
|
|
1223
|
+
return new fs24.ReadStream(path28, options);
|
|
1224
1224
|
}
|
|
1225
|
-
function createWriteStream(
|
|
1226
|
-
return new
|
|
1225
|
+
function createWriteStream(path28, options) {
|
|
1226
|
+
return new fs24.WriteStream(path28, options);
|
|
1227
1227
|
}
|
|
1228
|
-
var fs$open =
|
|
1229
|
-
|
|
1230
|
-
function open(
|
|
1228
|
+
var fs$open = fs24.open;
|
|
1229
|
+
fs24.open = open;
|
|
1230
|
+
function open(path28, flags, mode, cb) {
|
|
1231
1231
|
if (typeof mode === "function")
|
|
1232
1232
|
cb = mode, mode = null;
|
|
1233
|
-
return go$open(
|
|
1234
|
-
function go$open(
|
|
1235
|
-
return fs$open(
|
|
1233
|
+
return go$open(path28, flags, mode, cb);
|
|
1234
|
+
function go$open(path29, flags2, mode2, cb2, startTime) {
|
|
1235
|
+
return fs$open(path29, flags2, mode2, function(err, fd) {
|
|
1236
1236
|
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
|
|
1237
|
-
enqueue([go$open, [
|
|
1237
|
+
enqueue([go$open, [path29, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]);
|
|
1238
1238
|
else {
|
|
1239
1239
|
if (typeof cb2 === "function")
|
|
1240
1240
|
cb2.apply(this, arguments);
|
|
@@ -1242,20 +1242,20 @@ var require_graceful_fs = __commonJS({
|
|
|
1242
1242
|
});
|
|
1243
1243
|
}
|
|
1244
1244
|
}
|
|
1245
|
-
return
|
|
1245
|
+
return fs24;
|
|
1246
1246
|
}
|
|
1247
1247
|
function enqueue(elem) {
|
|
1248
1248
|
debug("ENQUEUE", elem[0].name, elem[1]);
|
|
1249
|
-
|
|
1249
|
+
fs23[gracefulQueue].push(elem);
|
|
1250
1250
|
retry();
|
|
1251
1251
|
}
|
|
1252
1252
|
var retryTimer;
|
|
1253
1253
|
function resetQueue() {
|
|
1254
1254
|
var now = Date.now();
|
|
1255
|
-
for (var i = 0; i <
|
|
1256
|
-
if (
|
|
1257
|
-
|
|
1258
|
-
|
|
1255
|
+
for (var i = 0; i < fs23[gracefulQueue].length; ++i) {
|
|
1256
|
+
if (fs23[gracefulQueue][i].length > 2) {
|
|
1257
|
+
fs23[gracefulQueue][i][3] = now;
|
|
1258
|
+
fs23[gracefulQueue][i][4] = now;
|
|
1259
1259
|
}
|
|
1260
1260
|
}
|
|
1261
1261
|
retry();
|
|
@@ -1263,9 +1263,9 @@ var require_graceful_fs = __commonJS({
|
|
|
1263
1263
|
function retry() {
|
|
1264
1264
|
clearTimeout(retryTimer);
|
|
1265
1265
|
retryTimer = void 0;
|
|
1266
|
-
if (
|
|
1266
|
+
if (fs23[gracefulQueue].length === 0)
|
|
1267
1267
|
return;
|
|
1268
|
-
var elem =
|
|
1268
|
+
var elem = fs23[gracefulQueue].shift();
|
|
1269
1269
|
var fn = elem[0];
|
|
1270
1270
|
var args = elem[1];
|
|
1271
1271
|
var err = elem[2];
|
|
@@ -1287,7 +1287,7 @@ var require_graceful_fs = __commonJS({
|
|
|
1287
1287
|
debug("RETRY", fn.name, args);
|
|
1288
1288
|
fn.apply(null, args.concat([startTime]));
|
|
1289
1289
|
} else {
|
|
1290
|
-
|
|
1290
|
+
fs23[gracefulQueue].push(elem);
|
|
1291
1291
|
}
|
|
1292
1292
|
}
|
|
1293
1293
|
if (retryTimer === void 0) {
|
|
@@ -1722,10 +1722,10 @@ var require_mtime_precision = __commonJS({
|
|
|
1722
1722
|
"../../node_modules/proper-lockfile/lib/mtime-precision.js"(exports2, module2) {
|
|
1723
1723
|
"use strict";
|
|
1724
1724
|
var cacheSymbol = /* @__PURE__ */ Symbol();
|
|
1725
|
-
function probe(file,
|
|
1726
|
-
const cachedPrecision =
|
|
1725
|
+
function probe(file, fs23, callback) {
|
|
1726
|
+
const cachedPrecision = fs23[cacheSymbol];
|
|
1727
1727
|
if (cachedPrecision) {
|
|
1728
|
-
return
|
|
1728
|
+
return fs23.stat(file, (err, stat) => {
|
|
1729
1729
|
if (err) {
|
|
1730
1730
|
return callback(err);
|
|
1731
1731
|
}
|
|
@@ -1733,16 +1733,16 @@ var require_mtime_precision = __commonJS({
|
|
|
1733
1733
|
});
|
|
1734
1734
|
}
|
|
1735
1735
|
const mtime = new Date(Math.ceil(Date.now() / 1e3) * 1e3 + 5);
|
|
1736
|
-
|
|
1736
|
+
fs23.utimes(file, mtime, mtime, (err) => {
|
|
1737
1737
|
if (err) {
|
|
1738
1738
|
return callback(err);
|
|
1739
1739
|
}
|
|
1740
|
-
|
|
1740
|
+
fs23.stat(file, (err2, stat) => {
|
|
1741
1741
|
if (err2) {
|
|
1742
1742
|
return callback(err2);
|
|
1743
1743
|
}
|
|
1744
1744
|
const precision = stat.mtime.getTime() % 1e3 === 0 ? "s" : "ms";
|
|
1745
|
-
Object.defineProperty(
|
|
1745
|
+
Object.defineProperty(fs23, cacheSymbol, { value: precision });
|
|
1746
1746
|
callback(null, stat.mtime, precision);
|
|
1747
1747
|
});
|
|
1748
1748
|
});
|
|
@@ -1763,8 +1763,8 @@ var require_mtime_precision = __commonJS({
|
|
|
1763
1763
|
var require_lockfile = __commonJS({
|
|
1764
1764
|
"../../node_modules/proper-lockfile/lib/lockfile.js"(exports2, module2) {
|
|
1765
1765
|
"use strict";
|
|
1766
|
-
var
|
|
1767
|
-
var
|
|
1766
|
+
var path28 = require("path");
|
|
1767
|
+
var fs23 = require_graceful_fs();
|
|
1768
1768
|
var retry = require_retry2();
|
|
1769
1769
|
var onExit = require_signal_exit();
|
|
1770
1770
|
var mtimePrecision = require_mtime_precision();
|
|
@@ -1774,7 +1774,7 @@ var require_lockfile = __commonJS({
|
|
|
1774
1774
|
}
|
|
1775
1775
|
function resolveCanonicalPath(file, options, callback) {
|
|
1776
1776
|
if (!options.realpath) {
|
|
1777
|
-
return callback(null,
|
|
1777
|
+
return callback(null, path28.resolve(file));
|
|
1778
1778
|
}
|
|
1779
1779
|
options.fs.realpath(file, callback);
|
|
1780
1780
|
}
|
|
@@ -1895,7 +1895,7 @@ var require_lockfile = __commonJS({
|
|
|
1895
1895
|
update: null,
|
|
1896
1896
|
realpath: true,
|
|
1897
1897
|
retries: 0,
|
|
1898
|
-
fs:
|
|
1898
|
+
fs: fs23,
|
|
1899
1899
|
onCompromised: (err) => {
|
|
1900
1900
|
throw err;
|
|
1901
1901
|
},
|
|
@@ -1939,7 +1939,7 @@ var require_lockfile = __commonJS({
|
|
|
1939
1939
|
}
|
|
1940
1940
|
function unlock(file, options, callback) {
|
|
1941
1941
|
options = {
|
|
1942
|
-
fs:
|
|
1942
|
+
fs: fs23,
|
|
1943
1943
|
realpath: true,
|
|
1944
1944
|
...options
|
|
1945
1945
|
};
|
|
@@ -1961,7 +1961,7 @@ var require_lockfile = __commonJS({
|
|
|
1961
1961
|
options = {
|
|
1962
1962
|
stale: 1e4,
|
|
1963
1963
|
realpath: true,
|
|
1964
|
-
fs:
|
|
1964
|
+
fs: fs23,
|
|
1965
1965
|
...options
|
|
1966
1966
|
};
|
|
1967
1967
|
options.stale = Math.max(options.stale || 0, 2e3);
|
|
@@ -2000,16 +2000,16 @@ var require_lockfile = __commonJS({
|
|
|
2000
2000
|
var require_adapter = __commonJS({
|
|
2001
2001
|
"../../node_modules/proper-lockfile/lib/adapter.js"(exports2, module2) {
|
|
2002
2002
|
"use strict";
|
|
2003
|
-
var
|
|
2004
|
-
function createSyncFs(
|
|
2003
|
+
var fs23 = require_graceful_fs();
|
|
2004
|
+
function createSyncFs(fs24) {
|
|
2005
2005
|
const methods = ["mkdir", "realpath", "stat", "rmdir", "utimes"];
|
|
2006
|
-
const newFs = { ...
|
|
2006
|
+
const newFs = { ...fs24 };
|
|
2007
2007
|
methods.forEach((method) => {
|
|
2008
2008
|
newFs[method] = (...args) => {
|
|
2009
2009
|
const callback = args.pop();
|
|
2010
2010
|
let ret;
|
|
2011
2011
|
try {
|
|
2012
|
-
ret =
|
|
2012
|
+
ret = fs24[`${method}Sync`](...args);
|
|
2013
2013
|
} catch (err) {
|
|
2014
2014
|
return callback(err);
|
|
2015
2015
|
}
|
|
@@ -2047,7 +2047,7 @@ var require_adapter = __commonJS({
|
|
|
2047
2047
|
}
|
|
2048
2048
|
function toSyncOptions(options) {
|
|
2049
2049
|
options = { ...options };
|
|
2050
|
-
options.fs = createSyncFs(options.fs ||
|
|
2050
|
+
options.fs = createSyncFs(options.fs || fs23);
|
|
2051
2051
|
if (typeof options.retries === "number" && options.retries > 0 || options.retries && typeof options.retries.retries === "number" && options.retries.retries > 0) {
|
|
2052
2052
|
throw Object.assign(new Error("Cannot use retries with the sync api"), { code: "ESYNC" });
|
|
2053
2053
|
}
|
|
@@ -2303,7 +2303,7 @@ var require_age_encryption = __commonJS({
|
|
|
2303
2303
|
Object.assign(hashC, info);
|
|
2304
2304
|
return Object.freeze(hashC);
|
|
2305
2305
|
}
|
|
2306
|
-
function
|
|
2306
|
+
function randomBytes5(bytesLength = 32) {
|
|
2307
2307
|
anumber(bytesLength, "bytesLength");
|
|
2308
2308
|
const cr = typeof globalThis === "object" ? globalThis.crypto : null;
|
|
2309
2309
|
if (typeof cr?.getRandomValues !== "function")
|
|
@@ -3030,7 +3030,7 @@ var require_age_encryption = __commonJS({
|
|
|
3030
3030
|
};
|
|
3031
3031
|
}
|
|
3032
3032
|
// @__NO_SIDE_EFFECTS__
|
|
3033
|
-
function
|
|
3033
|
+
function join20(separator = "") {
|
|
3034
3034
|
astr("join", separator);
|
|
3035
3035
|
return {
|
|
3036
3036
|
encode: (from) => {
|
|
@@ -3160,9 +3160,9 @@ var require_age_encryption = __commonJS({
|
|
|
3160
3160
|
decode(s) {
|
|
3161
3161
|
return decodeBase64Builtin(s, false);
|
|
3162
3162
|
}
|
|
3163
|
-
} : /* @__PURE__ */ chain(/* @__PURE__ */ radix2(6), /* @__PURE__ */ alphabet("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"), /* @__PURE__ */ padding(6), /* @__PURE__ */
|
|
3164
|
-
var base64nopad = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ chain(/* @__PURE__ */ radix2(6), /* @__PURE__ */ alphabet("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"), /* @__PURE__ */
|
|
3165
|
-
var BECH_ALPHABET = /* @__PURE__ */ chain(/* @__PURE__ */ alphabet("qpzry9x8gf2tvdw0s3jn54khce6mua7l"), /* @__PURE__ */
|
|
3163
|
+
} : /* @__PURE__ */ chain(/* @__PURE__ */ radix2(6), /* @__PURE__ */ alphabet("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"), /* @__PURE__ */ padding(6), /* @__PURE__ */ join20("")));
|
|
3164
|
+
var base64nopad = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ chain(/* @__PURE__ */ radix2(6), /* @__PURE__ */ alphabet("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"), /* @__PURE__ */ join20("")));
|
|
3165
|
+
var BECH_ALPHABET = /* @__PURE__ */ chain(/* @__PURE__ */ alphabet("qpzry9x8gf2tvdw0s3jn54khce6mua7l"), /* @__PURE__ */ join20(""));
|
|
3166
3166
|
var POLYMOD_GENERATORS = [996825010, 642813549, 513874426, 1027748829, 705979059];
|
|
3167
3167
|
function bech32Polymod(pre) {
|
|
3168
3168
|
const b = pre >> 25;
|
|
@@ -7655,7 +7655,7 @@ var require_age_encryption = __commonJS({
|
|
|
7655
7655
|
var concatBytes4 = (...arrays) => concatBytes(...arrays);
|
|
7656
7656
|
var hexToBytes3 = (hex) => hexToBytes(hex);
|
|
7657
7657
|
var isBytes5 = isBytes;
|
|
7658
|
-
var
|
|
7658
|
+
var randomBytes52 = (bytesLength) => randomBytes5(bytesLength);
|
|
7659
7659
|
var _0n7 = /* @__PURE__ */ BigInt(0);
|
|
7660
7660
|
var _1n8 = /* @__PURE__ */ BigInt(1);
|
|
7661
7661
|
function abool3(value, title = "") {
|
|
@@ -9127,7 +9127,7 @@ var require_age_encryption = __commonJS({
|
|
|
9127
9127
|
}
|
|
9128
9128
|
function ecdh2(Point, ecdhOpts = {}) {
|
|
9129
9129
|
const { Fn: Fn2 } = Point;
|
|
9130
|
-
const randomBytes_ = ecdhOpts.randomBytes === void 0 ?
|
|
9130
|
+
const randomBytes_ = ecdhOpts.randomBytes === void 0 ? randomBytes52 : ecdhOpts.randomBytes;
|
|
9131
9131
|
const lengths = Object.assign(getWLengths2(Point.Fp, Fn2), {
|
|
9132
9132
|
seed: Math.max(getMinHashLength2(Fn2.ORDER), 16)
|
|
9133
9133
|
});
|
|
@@ -9201,7 +9201,7 @@ var require_age_encryption = __commonJS({
|
|
|
9201
9201
|
bits2int_modN: "function"
|
|
9202
9202
|
});
|
|
9203
9203
|
ecdsaOpts = Object.assign({}, ecdsaOpts);
|
|
9204
|
-
const randomBytes6 = ecdsaOpts.randomBytes === void 0 ?
|
|
9204
|
+
const randomBytes6 = ecdsaOpts.randomBytes === void 0 ? randomBytes52 : ecdsaOpts.randomBytes;
|
|
9205
9205
|
const hmac3 = ecdsaOpts.hmac === void 0 ? (key, msg) => hmac(hash_, key, msg) : ecdsaOpts.hmac;
|
|
9206
9206
|
const { Fp: Fp2, Fn: Fn2 } = Point;
|
|
9207
9207
|
const { ORDER: CURVE_ORDER, BITS: fnBits } = Fn2;
|
|
@@ -9815,7 +9815,7 @@ var require_age_encryption = __commonJS({
|
|
|
9815
9815
|
const is25519 = type === "x25519";
|
|
9816
9816
|
if (!is25519 && type !== "x448")
|
|
9817
9817
|
throw new Error("invalid type");
|
|
9818
|
-
const randomBytes_ = rand === void 0 ?
|
|
9818
|
+
const randomBytes_ = rand === void 0 ? randomBytes52 : rand;
|
|
9819
9819
|
const montgomeryBits = is25519 ? 255 : 448;
|
|
9820
9820
|
const fieldLen = is25519 ? 32 : 56;
|
|
9821
9821
|
const Gu = is25519 ? BigInt(9) : BigInt(5);
|
|
@@ -10449,12 +10449,12 @@ var require_age_encryption = __commonJS({
|
|
|
10449
10449
|
return generateX25519Identity();
|
|
10450
10450
|
}
|
|
10451
10451
|
function generateX25519Identity() {
|
|
10452
|
-
const scalar =
|
|
10452
|
+
const scalar = randomBytes5(32);
|
|
10453
10453
|
const identity = bech32.encodeFromBytes("AGE-SECRET-KEY-", scalar).toUpperCase();
|
|
10454
10454
|
return Promise.resolve(identity);
|
|
10455
10455
|
}
|
|
10456
10456
|
function generateHybridIdentity() {
|
|
10457
|
-
const scalar =
|
|
10457
|
+
const scalar = randomBytes5(32);
|
|
10458
10458
|
const identity = bech32.encodeFromBytes("AGE-SECRET-KEY-PQ-", scalar).toUpperCase();
|
|
10459
10459
|
return Promise.resolve(identity);
|
|
10460
10460
|
}
|
|
@@ -10662,7 +10662,7 @@ var require_age_encryption = __commonJS({
|
|
|
10662
10662
|
this.recipient = res.bytes;
|
|
10663
10663
|
}
|
|
10664
10664
|
async wrapFileKey(fileKey) {
|
|
10665
|
-
const ephemeral =
|
|
10665
|
+
const ephemeral = randomBytes5(32);
|
|
10666
10666
|
const share = await scalarMultBase(ephemeral);
|
|
10667
10667
|
const secret = await scalarMult(ephemeral, this.recipient);
|
|
10668
10668
|
const salt = new Uint8Array(share.length + this.recipient.length);
|
|
@@ -10723,7 +10723,7 @@ var require_age_encryption = __commonJS({
|
|
|
10723
10723
|
this.logN = logN;
|
|
10724
10724
|
}
|
|
10725
10725
|
wrapFileKey(fileKey) {
|
|
10726
|
-
const salt =
|
|
10726
|
+
const salt = randomBytes5(16);
|
|
10727
10727
|
const label2 = "age-encryption.org/v1/scrypt";
|
|
10728
10728
|
const labelAndSalt = new Uint8Array(label2.length + 16);
|
|
10729
10729
|
labelAndSalt.set(new TextEncoder().encode(label2));
|
|
@@ -11058,7 +11058,7 @@ var require_age_encryption = __commonJS({
|
|
|
11058
11058
|
rp: { name: "", id: options.rpId },
|
|
11059
11059
|
user: {
|
|
11060
11060
|
name: options.keyName,
|
|
11061
|
-
id: domBuffer2(
|
|
11061
|
+
id: domBuffer2(randomBytes5(8)),
|
|
11062
11062
|
// avoid overwriting existing keys
|
|
11063
11063
|
displayName: ""
|
|
11064
11064
|
},
|
|
@@ -11128,7 +11128,7 @@ var require_age_encryption = __commonJS({
|
|
|
11128
11128
|
transports: this.transports,
|
|
11129
11129
|
type: "public-key"
|
|
11130
11130
|
}] : [],
|
|
11131
|
-
challenge: domBuffer2(
|
|
11131
|
+
challenge: domBuffer2(randomBytes5(16)),
|
|
11132
11132
|
extensions: { prf: { eval: prfInputs(nonce) } },
|
|
11133
11133
|
userVerification: "required",
|
|
11134
11134
|
// prf requires UV
|
|
@@ -11147,7 +11147,7 @@ var require_age_encryption = __commonJS({
|
|
|
11147
11147
|
* Implements {@link Recipient.wrapFileKey}.
|
|
11148
11148
|
*/
|
|
11149
11149
|
async wrapFileKey(fileKey) {
|
|
11150
|
-
const nonce =
|
|
11150
|
+
const nonce = randomBytes5(16);
|
|
11151
11151
|
const results = await this.getCredential(nonce);
|
|
11152
11152
|
const key = deriveKey(results);
|
|
11153
11153
|
return [new Stanza([label, base64nopad.encode(nonce)], encryptFileKey(fileKey, key))];
|
|
@@ -11270,7 +11270,7 @@ var require_age_encryption = __commonJS({
|
|
|
11270
11270
|
}
|
|
11271
11271
|
}
|
|
11272
11272
|
async encrypt(file) {
|
|
11273
|
-
const fileKey =
|
|
11273
|
+
const fileKey = randomBytes5(16);
|
|
11274
11274
|
const stanzas = [];
|
|
11275
11275
|
let recipients = this.recipients;
|
|
11276
11276
|
if (this.passphrase !== null) {
|
|
@@ -11283,7 +11283,7 @@ var require_age_encryption = __commonJS({
|
|
|
11283
11283
|
const hmacKey = hkdf(sha256, fileKey, void 0, labelHeader, 32);
|
|
11284
11284
|
const mac = hmac(sha256, hmacKey, encodeHeaderNoMAC(stanzas));
|
|
11285
11285
|
const header = encodeHeader(stanzas, mac);
|
|
11286
|
-
const nonce =
|
|
11286
|
+
const nonce = randomBytes5(16);
|
|
11287
11287
|
const labelPayload = new TextEncoder().encode("payload");
|
|
11288
11288
|
const streamKey = hkdf(sha256, fileKey, nonce, labelPayload, 32);
|
|
11289
11289
|
const encrypter = encryptSTREAM(streamKey);
|
|
@@ -11420,7 +11420,6 @@ var index_exports = {};
|
|
|
11420
11420
|
__export(index_exports, {
|
|
11421
11421
|
ArtifactPacker: () => ArtifactPacker,
|
|
11422
11422
|
BackendMigrator: () => BackendMigrator,
|
|
11423
|
-
BulkOps: () => BulkOps,
|
|
11424
11423
|
CLEF_MANIFEST_FILENAME: () => CLEF_MANIFEST_FILENAME,
|
|
11425
11424
|
CLEF_POLICY_FILENAME: () => CLEF_POLICY_FILENAME,
|
|
11426
11425
|
CLEF_REPORT_SCHEMA_VERSION: () => CLEF_REPORT_SCHEMA_VERSION,
|
|
@@ -11434,6 +11433,7 @@ __export(index_exports, {
|
|
|
11434
11433
|
DiffEngine: () => DiffEngine,
|
|
11435
11434
|
DriftDetector: () => DriftDetector,
|
|
11436
11435
|
FilePackOutput: () => FilePackOutput,
|
|
11436
|
+
FilesystemStorageBackend: () => FilesystemStorageBackend,
|
|
11437
11437
|
GitIntegration: () => GitIntegration,
|
|
11438
11438
|
GitOperationError: () => GitOperationError,
|
|
11439
11439
|
ImportRunner: () => ImportRunner,
|
|
@@ -11450,7 +11450,6 @@ __export(index_exports, {
|
|
|
11450
11450
|
PolicyValidationError: () => PolicyValidationError,
|
|
11451
11451
|
REQUESTS_FILENAME: () => REQUESTS_FILENAME,
|
|
11452
11452
|
REQUIREMENTS: () => REQUIREMENTS,
|
|
11453
|
-
REVEAL_WARNING: () => REVEAL_WARNING,
|
|
11454
11453
|
RecipientManager: () => RecipientManager,
|
|
11455
11454
|
ReportGenerator: () => ReportGenerator,
|
|
11456
11455
|
ReportSanitizer: () => ReportSanitizer,
|
|
@@ -11467,6 +11466,7 @@ __export(index_exports, {
|
|
|
11467
11466
|
SopsMergeDriver: () => SopsMergeDriver,
|
|
11468
11467
|
SopsMissingError: () => SopsMissingError,
|
|
11469
11468
|
SopsVersionError: () => SopsVersionError,
|
|
11469
|
+
SourceCapabilityUnsupportedError: () => SourceCapabilityUnsupportedError,
|
|
11470
11470
|
StructureManager: () => StructureManager,
|
|
11471
11471
|
SyncManager: () => SyncManager,
|
|
11472
11472
|
TransactionLockError: () => TransactionLockError,
|
|
@@ -11486,11 +11486,11 @@ __export(index_exports, {
|
|
|
11486
11486
|
checkAll: () => checkAll,
|
|
11487
11487
|
checkDependency: () => checkDependency,
|
|
11488
11488
|
collectCIContext: () => collectCIContext,
|
|
11489
|
+
composeSecretSource: () => composeSecretSource,
|
|
11489
11490
|
computeCiphertextHash: () => computeCiphertextHash,
|
|
11490
11491
|
deriveAgePublicKey: () => deriveAgePublicKey,
|
|
11492
|
+
describeCapabilities: () => describeCapabilities,
|
|
11491
11493
|
describeScope: () => describeScope,
|
|
11492
|
-
detectAlgorithm: () => detectAlgorithm,
|
|
11493
|
-
detectFormat: () => detectFormat,
|
|
11494
11494
|
emptyTemplate: () => emptyTemplate,
|
|
11495
11495
|
exampleTemplate: () => exampleTemplate,
|
|
11496
11496
|
findRequest: () => findRequest,
|
|
@@ -11498,35 +11498,27 @@ __export(index_exports, {
|
|
|
11498
11498
|
formatRevealWarning: () => formatRevealWarning,
|
|
11499
11499
|
generateAgeIdentity: () => generateAgeIdentity,
|
|
11500
11500
|
generateRandomValue: () => generateRandomValue,
|
|
11501
|
-
|
|
11502
|
-
getPendingKeys: () => getPendingKeys,
|
|
11503
|
-
getRotations: () => getRotations,
|
|
11501
|
+
isBulk: () => isBulk,
|
|
11504
11502
|
isClefHsmArn: () => isClefHsmArn,
|
|
11505
|
-
isHighEntropy: () => isHighEntropy,
|
|
11506
11503
|
isKmsEnvelope: () => isKmsEnvelope,
|
|
11504
|
+
isLintable: () => isLintable,
|
|
11505
|
+
isMergeAware: () => isMergeAware,
|
|
11506
|
+
isMigratable: () => isMigratable,
|
|
11507
11507
|
isPackedArtifact: () => isPackedArtifact,
|
|
11508
|
-
|
|
11508
|
+
isRecipientManaged: () => isRecipientManaged,
|
|
11509
|
+
isRotatable: () => isRotatable,
|
|
11510
|
+
isStructural: () => isStructural,
|
|
11509
11511
|
keyPreview: () => keyPreview,
|
|
11510
|
-
loadIgnoreRules: () => loadIgnoreRules,
|
|
11511
|
-
loadMetadata: () => loadMetadata,
|
|
11512
11512
|
loadRequests: () => loadRequests,
|
|
11513
11513
|
markPending: () => markPending,
|
|
11514
|
-
markPendingWithRetry: () => markPendingWithRetry,
|
|
11515
11514
|
markResolved: () => markResolved,
|
|
11516
|
-
matchPatterns: () => matchPatterns,
|
|
11517
|
-
mergeMetadataContents: () => mergeMetadataContents,
|
|
11518
11515
|
mergeMetadataFiles: () => mergeMetadataFiles,
|
|
11519
|
-
metadataPath: () => metadataPath,
|
|
11520
11516
|
parse: () => parse9,
|
|
11521
|
-
parseDotenv: () => parseDotenv,
|
|
11522
|
-
parseIgnoreContent: () => parseIgnoreContent,
|
|
11523
|
-
parseJson: () => parseJson,
|
|
11524
11517
|
parseSignerKey: () => parseSignerKey,
|
|
11525
11518
|
parseYaml: () => parseYaml,
|
|
11526
11519
|
pkcs11UriToSyntheticArn: () => pkcs11UriToSyntheticArn,
|
|
11527
11520
|
readManifestYaml: () => readManifestYaml,
|
|
11528
11521
|
recordRotation: () => recordRotation,
|
|
11529
|
-
redactValue: () => redactValue,
|
|
11530
11522
|
removeAccessRequest: () => removeRequest,
|
|
11531
11523
|
removeRotation: () => removeRotation,
|
|
11532
11524
|
requestsFilePath: () => requestsFilePath,
|
|
@@ -11538,22 +11530,18 @@ __export(index_exports, {
|
|
|
11538
11530
|
resolveRecipientsForEnvironment: () => resolveRecipientsForEnvironment,
|
|
11539
11531
|
resolveSopsPath: () => resolveSopsPath,
|
|
11540
11532
|
runCompliance: () => runCompliance,
|
|
11541
|
-
saveMetadata: () => saveMetadata,
|
|
11542
11533
|
saveRequests: () => saveRequests,
|
|
11543
|
-
|
|
11544
|
-
shannonEntropy: () => shannonEntropy,
|
|
11545
|
-
shouldIgnoreFile: () => shouldIgnoreFile,
|
|
11546
|
-
shouldIgnoreMatch: () => shouldIgnoreMatch,
|
|
11547
|
-
signEd25519: () => signEd25519,
|
|
11548
|
-
signKms: () => signKms,
|
|
11534
|
+
shouldUseLinuxStdinFifo: () => shouldUseLinuxStdinFifo,
|
|
11549
11535
|
spawnKeyservice: () => spawnKeyservice,
|
|
11550
11536
|
syntheticArnToPkcs11Uri: () => syntheticArnToPkcs11Uri,
|
|
11551
11537
|
tryBundledKeyservice: () => tryBundledKeyservice,
|
|
11552
11538
|
upsertRequest: () => upsertRequest,
|
|
11553
11539
|
validateAgePublicKey: () => validateAgePublicKey,
|
|
11540
|
+
validateAwsKmsArn: () => validateAwsKmsArn,
|
|
11554
11541
|
validatePackedArtifact: () => validatePackedArtifact,
|
|
11555
11542
|
validateResetScope: () => validateResetScope,
|
|
11556
11543
|
verifySignature: () => verifySignature,
|
|
11544
|
+
wrapWithLinuxStdinFifo: () => wrapWithLinuxStdinFifo,
|
|
11557
11545
|
writeManifestYaml: () => writeManifestYaml,
|
|
11558
11546
|
writeManifestYamlRaw: () => writeManifestYamlRaw,
|
|
11559
11547
|
writeSchema: () => writeSchema,
|
|
@@ -11728,11 +11716,98 @@ function keyPreview(key) {
|
|
|
11728
11716
|
return `age1\u2026${last8}`;
|
|
11729
11717
|
}
|
|
11730
11718
|
|
|
11719
|
+
// src/kms/aws-arn.ts
|
|
11720
|
+
var PARTITION_PATTERN = /^aws(?:-[a-z]+)*$/;
|
|
11721
|
+
var REGION_PATTERN = /^[a-z]{2,}(?:-[a-z]+)+-\d+$/;
|
|
11722
|
+
var ACCOUNT_PATTERN = /^\d{12}$/;
|
|
11723
|
+
function validateAwsKmsArn(input) {
|
|
11724
|
+
if (typeof input !== "string") {
|
|
11725
|
+
return { ok: false, reason: "value must be a string" };
|
|
11726
|
+
}
|
|
11727
|
+
if (input.length === 0) {
|
|
11728
|
+
return { ok: false, reason: "value is empty" };
|
|
11729
|
+
}
|
|
11730
|
+
if (!input.startsWith("arn:")) {
|
|
11731
|
+
return {
|
|
11732
|
+
ok: false,
|
|
11733
|
+
reason: "expected an ARN starting with 'arn:' (got a bare key id, alias name, or other format). Use a full ARN like 'arn:aws:kms:us-east-1:123456789012:alias/<name>'."
|
|
11734
|
+
};
|
|
11735
|
+
}
|
|
11736
|
+
const segments = input.split(":");
|
|
11737
|
+
if (segments.length < 6) {
|
|
11738
|
+
return {
|
|
11739
|
+
ok: false,
|
|
11740
|
+
reason: `expected 6 colon-delimited segments (arn:aws:kms:<region>:<account>:<resource>), got ${segments.length}. Check that the region and account aren't missing.`
|
|
11741
|
+
};
|
|
11742
|
+
}
|
|
11743
|
+
if (segments.length > 6) {
|
|
11744
|
+
return {
|
|
11745
|
+
ok: false,
|
|
11746
|
+
reason: `expected exactly 6 colon-delimited segments, got ${segments.length}. Check for stray ':' characters.`
|
|
11747
|
+
};
|
|
11748
|
+
}
|
|
11749
|
+
const [, partition, service, region, account, resource] = segments;
|
|
11750
|
+
if (!PARTITION_PATTERN.test(partition)) {
|
|
11751
|
+
return {
|
|
11752
|
+
ok: false,
|
|
11753
|
+
reason: `partition segment '${partition}' is not recognized. Expected 'aws', 'aws-us-gov', 'aws-cn', etc.`
|
|
11754
|
+
};
|
|
11755
|
+
}
|
|
11756
|
+
if (service !== "kms") {
|
|
11757
|
+
return {
|
|
11758
|
+
ok: false,
|
|
11759
|
+
reason: `service segment must be 'kms', got '${service}'.`
|
|
11760
|
+
};
|
|
11761
|
+
}
|
|
11762
|
+
if (region.length === 0) {
|
|
11763
|
+
return {
|
|
11764
|
+
ok: false,
|
|
11765
|
+
reason: "region segment is empty (look for '::' between 'kms' and the account id). Set a region like 'us-east-1' before reconstructing the ARN \u2014 common cause: a $REGION shell variable was unset when the ARN was built."
|
|
11766
|
+
};
|
|
11767
|
+
}
|
|
11768
|
+
if (!REGION_PATTERN.test(region)) {
|
|
11769
|
+
return {
|
|
11770
|
+
ok: false,
|
|
11771
|
+
reason: `region segment '${region}' doesn't look like an AWS region (expected e.g. 'us-east-1', 'eu-west-2').`
|
|
11772
|
+
};
|
|
11773
|
+
}
|
|
11774
|
+
if (account.length === 0) {
|
|
11775
|
+
return {
|
|
11776
|
+
ok: false,
|
|
11777
|
+
reason: "account segment is empty. Provide the 12-digit AWS account id."
|
|
11778
|
+
};
|
|
11779
|
+
}
|
|
11780
|
+
if (!ACCOUNT_PATTERN.test(account)) {
|
|
11781
|
+
return {
|
|
11782
|
+
ok: false,
|
|
11783
|
+
reason: `account segment '${account}' must be exactly 12 digits.`
|
|
11784
|
+
};
|
|
11785
|
+
}
|
|
11786
|
+
if (!resource || resource.length === 0) {
|
|
11787
|
+
return {
|
|
11788
|
+
ok: false,
|
|
11789
|
+
reason: "resource segment is empty. Expected 'key/<id>' or 'alias/<name>' after the account."
|
|
11790
|
+
};
|
|
11791
|
+
}
|
|
11792
|
+
if (!resource.startsWith("key/") && !resource.startsWith("alias/")) {
|
|
11793
|
+
return {
|
|
11794
|
+
ok: false,
|
|
11795
|
+
reason: `resource '${resource}' must start with 'key/' or 'alias/'.`
|
|
11796
|
+
};
|
|
11797
|
+
}
|
|
11798
|
+
if (resource === "key/" || resource === "alias/") {
|
|
11799
|
+
return {
|
|
11800
|
+
ok: false,
|
|
11801
|
+
reason: "resource id is empty after 'key/' or 'alias/'."
|
|
11802
|
+
};
|
|
11803
|
+
}
|
|
11804
|
+
return { ok: true };
|
|
11805
|
+
}
|
|
11806
|
+
|
|
11731
11807
|
// src/manifest/parser.ts
|
|
11732
11808
|
var CLEF_MANIFEST_FILENAME = "clef.yaml";
|
|
11733
11809
|
var VALID_BACKENDS = ["age", "awskms", "gcpkms", "azurekv", "pgp", "hsm"];
|
|
11734
11810
|
var PKCS11_URI_PATTERN = /^pkcs11:[a-zA-Z][a-zA-Z0-9_-]*=[^;]+/;
|
|
11735
|
-
var AWS_KMS_ARN_PATTERN = /^arn:aws(?:-[a-z]+)*:kms:[a-z0-9-]+:\d+:(key|alias)\/.+$/;
|
|
11736
11811
|
var VALID_TOP_LEVEL_KEYS = [
|
|
11737
11812
|
"version",
|
|
11738
11813
|
"environments",
|
|
@@ -12255,11 +12330,14 @@ var ManifestParser = class {
|
|
|
12255
12330
|
"service_identities"
|
|
12256
12331
|
);
|
|
12257
12332
|
}
|
|
12258
|
-
if (kmsObj.provider === "aws"
|
|
12259
|
-
|
|
12260
|
-
|
|
12261
|
-
|
|
12262
|
-
|
|
12333
|
+
if (kmsObj.provider === "aws") {
|
|
12334
|
+
const arnValidation = validateAwsKmsArn(kmsObj.keyId);
|
|
12335
|
+
if (!arnValidation.ok) {
|
|
12336
|
+
throw new ManifestValidationError(
|
|
12337
|
+
`Service identity '${siName}' environment '${envName}': kms.keyId is not a valid AWS KMS ARN \u2014 ${arnValidation.reason} (got '${kmsObj.keyId}'). Expected shape: arn:aws:kms:<region>:<account>:key/<id> or arn:aws:kms:<region>:<account>:alias/<name>.`,
|
|
12338
|
+
"service_identities"
|
|
12339
|
+
);
|
|
12340
|
+
}
|
|
12263
12341
|
}
|
|
12264
12342
|
if (Object.prototype.hasOwnProperty.call(kmsObj, "region")) {
|
|
12265
12343
|
throw new ManifestValidationError(
|
|
@@ -12334,6 +12412,18 @@ function readManifestYaml(repoRoot) {
|
|
|
12334
12412
|
return YAML2.parse(raw);
|
|
12335
12413
|
}
|
|
12336
12414
|
function writeManifestYaml(repoRoot, doc) {
|
|
12415
|
+
const parser = new ManifestParser();
|
|
12416
|
+
try {
|
|
12417
|
+
parser.validate(doc);
|
|
12418
|
+
} catch (err) {
|
|
12419
|
+
if (err instanceof ManifestValidationError) {
|
|
12420
|
+
throw new ManifestValidationError(
|
|
12421
|
+
`Refusing to write invalid manifest: ${err.message}`,
|
|
12422
|
+
err.field
|
|
12423
|
+
);
|
|
12424
|
+
}
|
|
12425
|
+
throw err;
|
|
12426
|
+
}
|
|
12337
12427
|
const manifestPath = path.join(repoRoot, CLEF_MANIFEST_FILENAME);
|
|
12338
12428
|
import_write_file_atomic.default.sync(manifestPath, YAML2.stringify(doc));
|
|
12339
12429
|
}
|
|
@@ -12804,10 +12894,6 @@ async function getPendingKeys(filePath) {
|
|
|
12804
12894
|
const metadata = await loadMetadata(filePath);
|
|
12805
12895
|
return metadata.pending.map((p) => p.key);
|
|
12806
12896
|
}
|
|
12807
|
-
async function isPending(filePath, key) {
|
|
12808
|
-
const metadata = await loadMetadata(filePath);
|
|
12809
|
-
return metadata.pending.some((p) => p.key === key);
|
|
12810
|
-
}
|
|
12811
12897
|
async function recordRotation(filePath, keys, rotatedBy, now = /* @__PURE__ */ new Date()) {
|
|
12812
12898
|
const metadata = await loadMetadata(filePath);
|
|
12813
12899
|
for (const key of keys) {
|
|
@@ -12843,14 +12929,6 @@ async function getRotations(filePath) {
|
|
|
12843
12929
|
function generateRandomValue() {
|
|
12844
12930
|
return crypto2.randomBytes(32).toString("hex");
|
|
12845
12931
|
}
|
|
12846
|
-
async function markPendingWithRetry(filePath, keys, setBy, retryDelayMs = 200) {
|
|
12847
|
-
try {
|
|
12848
|
-
await markPending(filePath, keys, setBy);
|
|
12849
|
-
} catch {
|
|
12850
|
-
await new Promise((r) => setTimeout(r, retryDelayMs));
|
|
12851
|
-
await markPending(filePath, keys, setBy);
|
|
12852
|
-
}
|
|
12853
|
-
}
|
|
12854
12932
|
|
|
12855
12933
|
// src/sops/keys.ts
|
|
12856
12934
|
var fs6 = __toESM(require("fs"));
|
|
@@ -12900,34 +12978,17 @@ var MatrixManager = class {
|
|
|
12900
12978
|
detectMissingCells(manifest, repoRoot) {
|
|
12901
12979
|
return this.resolveMatrix(manifest, repoRoot).filter((cell) => !cell.exists);
|
|
12902
12980
|
}
|
|
12903
|
-
/**
|
|
12904
|
-
* Create an empty encrypted SOPS file for a missing matrix cell.
|
|
12905
|
-
*
|
|
12906
|
-
* @param cell - The cell to scaffold (must not already exist).
|
|
12907
|
-
* @param sopsClient - SOPS client used to write the initial encrypted file.
|
|
12908
|
-
* @param manifest - Parsed manifest used to determine the encryption backend.
|
|
12909
|
-
*/
|
|
12910
|
-
async scaffoldCell(cell, sopsClient, manifest) {
|
|
12911
|
-
const dir = path5.dirname(cell.filePath);
|
|
12912
|
-
if (!fs7.existsSync(dir)) {
|
|
12913
|
-
fs7.mkdirSync(dir, { recursive: true });
|
|
12914
|
-
}
|
|
12915
|
-
await sopsClient.encrypt(cell.filePath, {}, manifest, cell.environment);
|
|
12916
|
-
}
|
|
12917
12981
|
/**
|
|
12918
12982
|
* Read each cell and return key counts, pending counts, and cross-environment issues.
|
|
12919
12983
|
*
|
|
12920
|
-
*
|
|
12921
|
-
*
|
|
12922
|
-
*
|
|
12923
|
-
* decrypt-based implementation later (e.g. for backends that don't expose
|
|
12924
|
-
* key names without decryption).
|
|
12984
|
+
* Keys are read from the plaintext YAML structure directly — no
|
|
12985
|
+
* decryption needed. A future backend that doesn't expose key names
|
|
12986
|
+
* without decryption would need its own implementation.
|
|
12925
12987
|
*
|
|
12926
12988
|
* @param manifest - Parsed manifest.
|
|
12927
12989
|
* @param repoRoot - Absolute path to the repository root.
|
|
12928
|
-
* @param _sopsClient - Reserved for future use; pass any `EncryptionBackend`.
|
|
12929
12990
|
*/
|
|
12930
|
-
async getMatrixStatus(manifest, repoRoot
|
|
12991
|
+
async getMatrixStatus(manifest, repoRoot) {
|
|
12931
12992
|
const cells = this.resolveMatrix(manifest, repoRoot);
|
|
12932
12993
|
const statuses = [];
|
|
12933
12994
|
const cellKeys = /* @__PURE__ */ new Map();
|
|
@@ -13243,7 +13304,6 @@ function orderedKeys(keys) {
|
|
|
13243
13304
|
}
|
|
13244
13305
|
|
|
13245
13306
|
// src/diff/engine.ts
|
|
13246
|
-
var path7 = __toESM(require("path"));
|
|
13247
13307
|
var DiffEngine = class {
|
|
13248
13308
|
/**
|
|
13249
13309
|
* Compare two in-memory value maps and produce a sorted diff result.
|
|
@@ -13294,131 +13354,21 @@ var DiffEngine = class {
|
|
|
13294
13354
|
* @param namespace - Namespace containing both cells.
|
|
13295
13355
|
* @param envA - Name of environment A.
|
|
13296
13356
|
* @param envB - Name of environment B.
|
|
13297
|
-
* @param
|
|
13298
|
-
* @
|
|
13299
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
13300
|
-
* @throws {@link SopsDecryptionError} If either file cannot be decrypted.
|
|
13357
|
+
* @param source - SecretSource that resolves both cells (substrate-agnostic).
|
|
13358
|
+
* @throws {@link SopsDecryptionError} If either cell cannot be decrypted.
|
|
13301
13359
|
*/
|
|
13302
|
-
async
|
|
13303
|
-
const fileA = path7.join(
|
|
13304
|
-
repoRoot,
|
|
13305
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", envA)
|
|
13306
|
-
);
|
|
13307
|
-
const fileB = path7.join(
|
|
13308
|
-
repoRoot,
|
|
13309
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", envB)
|
|
13310
|
-
);
|
|
13360
|
+
async diffCells(namespace, envA, envB, source) {
|
|
13311
13361
|
const [decryptedA, decryptedB] = await Promise.all([
|
|
13312
|
-
|
|
13313
|
-
|
|
13362
|
+
source.readCell({ namespace, environment: envA }),
|
|
13363
|
+
source.readCell({ namespace, environment: envB })
|
|
13314
13364
|
]);
|
|
13315
13365
|
return this.diff(decryptedA.values, decryptedB.values, envA, envB, namespace);
|
|
13316
13366
|
}
|
|
13317
13367
|
};
|
|
13318
13368
|
|
|
13319
|
-
// src/bulk/ops.ts
|
|
13320
|
-
var path8 = __toESM(require("path"));
|
|
13321
|
-
var BulkOps = class {
|
|
13322
|
-
constructor(tx) {
|
|
13323
|
-
this.tx = tx;
|
|
13324
|
-
}
|
|
13325
|
-
tx;
|
|
13326
|
-
/**
|
|
13327
|
-
* Set a key to different values in multiple environments at once.
|
|
13328
|
-
*
|
|
13329
|
-
* @param namespace - Target namespace.
|
|
13330
|
-
* @param key - Secret key name to set.
|
|
13331
|
-
* @param values - Map of `{ environment: value }` pairs.
|
|
13332
|
-
* @param manifest - Parsed manifest.
|
|
13333
|
-
* @param sopsClient - SOPS client used to decrypt and re-encrypt each file.
|
|
13334
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
13335
|
-
* @throws Whatever the underlying encrypt throws — the transaction rolls back.
|
|
13336
|
-
*/
|
|
13337
|
-
async setAcrossEnvironments(namespace, key, values, manifest, sopsClient, repoRoot) {
|
|
13338
|
-
const targets = manifest.environments.filter((env) => env.name in values).map((env) => ({
|
|
13339
|
-
env: env.name,
|
|
13340
|
-
filePath: path8.join(
|
|
13341
|
-
repoRoot,
|
|
13342
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", env.name)
|
|
13343
|
-
)
|
|
13344
|
-
}));
|
|
13345
|
-
if (targets.length === 0) return;
|
|
13346
|
-
await this.tx.run(repoRoot, {
|
|
13347
|
-
description: `clef set: ${namespace}/${key} across ${targets.length} env(s)`,
|
|
13348
|
-
paths: targets.map((t) => path8.relative(repoRoot, t.filePath)),
|
|
13349
|
-
mutate: async () => {
|
|
13350
|
-
for (const target of targets) {
|
|
13351
|
-
const decrypted = await sopsClient.decrypt(target.filePath);
|
|
13352
|
-
decrypted.values[key] = values[target.env];
|
|
13353
|
-
await sopsClient.encrypt(target.filePath, decrypted.values, manifest, target.env);
|
|
13354
|
-
}
|
|
13355
|
-
}
|
|
13356
|
-
});
|
|
13357
|
-
}
|
|
13358
|
-
/**
|
|
13359
|
-
* Delete a key from every environment in a namespace.
|
|
13360
|
-
*
|
|
13361
|
-
* @param namespace - Target namespace.
|
|
13362
|
-
* @param key - Secret key name to delete.
|
|
13363
|
-
* @param manifest - Parsed manifest.
|
|
13364
|
-
* @param sopsClient - SOPS client.
|
|
13365
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
13366
|
-
*/
|
|
13367
|
-
async deleteAcrossEnvironments(namespace, key, manifest, sopsClient, repoRoot) {
|
|
13368
|
-
const targets = manifest.environments.map((env) => ({
|
|
13369
|
-
env: env.name,
|
|
13370
|
-
filePath: path8.join(
|
|
13371
|
-
repoRoot,
|
|
13372
|
-
manifest.file_pattern.replace("{namespace}", namespace).replace("{environment}", env.name)
|
|
13373
|
-
)
|
|
13374
|
-
}));
|
|
13375
|
-
await this.tx.run(repoRoot, {
|
|
13376
|
-
description: `clef delete: ${namespace}/${key} from ${targets.length} env(s)`,
|
|
13377
|
-
paths: targets.map((t) => path8.relative(repoRoot, t.filePath)),
|
|
13378
|
-
mutate: async () => {
|
|
13379
|
-
for (const target of targets) {
|
|
13380
|
-
const decrypted = await sopsClient.decrypt(target.filePath);
|
|
13381
|
-
if (key in decrypted.values) {
|
|
13382
|
-
delete decrypted.values[key];
|
|
13383
|
-
await sopsClient.encrypt(target.filePath, decrypted.values, manifest, target.env);
|
|
13384
|
-
}
|
|
13385
|
-
}
|
|
13386
|
-
}
|
|
13387
|
-
});
|
|
13388
|
-
}
|
|
13389
|
-
/**
|
|
13390
|
-
* Copy a single key's value from one matrix cell to another.
|
|
13391
|
-
*
|
|
13392
|
-
* @param key - Secret key name to copy.
|
|
13393
|
-
* @param fromCell - Source matrix cell.
|
|
13394
|
-
* @param toCell - Destination matrix cell.
|
|
13395
|
-
* @param sopsClient - SOPS client.
|
|
13396
|
-
* @param manifest - Parsed manifest.
|
|
13397
|
-
* @param repoRoot - Absolute path to the repository root.
|
|
13398
|
-
* @throws `Error` if the key does not exist in the source cell.
|
|
13399
|
-
*/
|
|
13400
|
-
async copyValue(key, fromCell, toCell, sopsClient, manifest, repoRoot) {
|
|
13401
|
-
const source = await sopsClient.decrypt(fromCell.filePath);
|
|
13402
|
-
if (!(key in source.values)) {
|
|
13403
|
-
throw new Error(
|
|
13404
|
-
`Key '${key}' does not exist in ${fromCell.namespace}/${fromCell.environment}.`
|
|
13405
|
-
);
|
|
13406
|
-
}
|
|
13407
|
-
await this.tx.run(repoRoot, {
|
|
13408
|
-
description: `clef copy: ${key} from ${fromCell.namespace}/${fromCell.environment} to ${toCell.namespace}/${toCell.environment}`,
|
|
13409
|
-
paths: [path8.relative(repoRoot, toCell.filePath)],
|
|
13410
|
-
mutate: async () => {
|
|
13411
|
-
const dest = await sopsClient.decrypt(toCell.filePath);
|
|
13412
|
-
dest.values[key] = source.values[key];
|
|
13413
|
-
await sopsClient.encrypt(toCell.filePath, dest.values, manifest, toCell.environment);
|
|
13414
|
-
}
|
|
13415
|
-
});
|
|
13416
|
-
}
|
|
13417
|
-
};
|
|
13418
|
-
|
|
13419
13369
|
// src/git/integration.ts
|
|
13420
13370
|
var fs10 = __toESM(require("fs"));
|
|
13421
|
-
var
|
|
13371
|
+
var path7 = __toESM(require("path"));
|
|
13422
13372
|
var PRE_COMMIT_HOOK = `#!/bin/sh
|
|
13423
13373
|
# Clef pre-commit hook \u2014 blocks commits of files missing SOPS encryption metadata
|
|
13424
13374
|
# and scans staged files for plaintext secrets.
|
|
@@ -13594,17 +13544,17 @@ var GitIntegration = class {
|
|
|
13594
13544
|
* @returns The kind of operation in progress, or null if none.
|
|
13595
13545
|
*/
|
|
13596
13546
|
async isMidOperation(repoRoot) {
|
|
13597
|
-
const gitDir =
|
|
13598
|
-
if (fs10.existsSync(
|
|
13547
|
+
const gitDir = path7.join(repoRoot, ".git");
|
|
13548
|
+
if (fs10.existsSync(path7.join(gitDir, "MERGE_HEAD"))) {
|
|
13599
13549
|
return { midOp: true, kind: "merge" };
|
|
13600
13550
|
}
|
|
13601
|
-
if (fs10.existsSync(
|
|
13551
|
+
if (fs10.existsSync(path7.join(gitDir, "rebase-merge")) || fs10.existsSync(path7.join(gitDir, "rebase-apply"))) {
|
|
13602
13552
|
return { midOp: true, kind: "rebase" };
|
|
13603
13553
|
}
|
|
13604
|
-
if (fs10.existsSync(
|
|
13554
|
+
if (fs10.existsSync(path7.join(gitDir, "CHERRY_PICK_HEAD"))) {
|
|
13605
13555
|
return { midOp: true, kind: "cherry-pick" };
|
|
13606
13556
|
}
|
|
13607
|
-
if (fs10.existsSync(
|
|
13557
|
+
if (fs10.existsSync(path7.join(gitDir, "REVERT_HEAD"))) {
|
|
13608
13558
|
return { midOp: true, kind: "revert" };
|
|
13609
13559
|
}
|
|
13610
13560
|
return { midOp: false };
|
|
@@ -13816,14 +13766,14 @@ var GitIntegration = class {
|
|
|
13816
13766
|
{ cwd: repoRoot }
|
|
13817
13767
|
);
|
|
13818
13768
|
const metadataGitConfig = metaConfig.exitCode === 0 && metaConfig.stdout.trim().length > 0;
|
|
13819
|
-
const attrFilePath =
|
|
13769
|
+
const attrFilePath = path7.join(repoRoot, ".gitattributes");
|
|
13820
13770
|
const attrContent = fs10.existsSync(attrFilePath) ? fs10.readFileSync(attrFilePath, "utf-8") : "";
|
|
13821
13771
|
const gitattributes = attrContent.includes("merge=sops");
|
|
13822
13772
|
const metadataGitattributes = attrContent.includes("merge=clef-metadata");
|
|
13823
13773
|
return { gitConfig, gitattributes, metadataGitConfig, metadataGitattributes };
|
|
13824
13774
|
}
|
|
13825
13775
|
async ensureGitattributes(repoRoot) {
|
|
13826
|
-
const attrPath =
|
|
13776
|
+
const attrPath = path7.join(repoRoot, ".gitattributes");
|
|
13827
13777
|
const existing = fs10.existsSync(attrPath) ? fs10.readFileSync(attrPath, "utf-8") : "";
|
|
13828
13778
|
let newContent = existing;
|
|
13829
13779
|
if (!existing.includes("merge=sops")) {
|
|
@@ -13858,9 +13808,9 @@ ${block}` : block;
|
|
|
13858
13808
|
* @throws {@link GitOperationError} On failure.
|
|
13859
13809
|
*/
|
|
13860
13810
|
async installPreCommitHook(repoRoot) {
|
|
13861
|
-
const hookPath =
|
|
13811
|
+
const hookPath = path7.join(repoRoot, ".git", "hooks", "pre-commit");
|
|
13862
13812
|
try {
|
|
13863
|
-
const hooksDir =
|
|
13813
|
+
const hooksDir = path7.dirname(hookPath);
|
|
13864
13814
|
if (!fs10.existsSync(hooksDir)) {
|
|
13865
13815
|
fs10.mkdirSync(hooksDir, { recursive: true });
|
|
13866
13816
|
}
|
|
@@ -13876,7 +13826,7 @@ ${block}` : block;
|
|
|
13876
13826
|
|
|
13877
13827
|
// src/tx/transaction-manager.ts
|
|
13878
13828
|
var fs11 = __toESM(require("fs"));
|
|
13879
|
-
var
|
|
13829
|
+
var path8 = __toESM(require("path"));
|
|
13880
13830
|
var lockfile = __toESM(require_proper_lockfile());
|
|
13881
13831
|
|
|
13882
13832
|
// src/tx/errors.ts
|
|
@@ -13925,15 +13875,15 @@ var TransactionManager = class {
|
|
|
13925
13875
|
async run(repoRoot, opts) {
|
|
13926
13876
|
const shouldCommit = opts.commit !== false;
|
|
13927
13877
|
const allowDirty = opts.allowDirty === true;
|
|
13928
|
-
const clefDir =
|
|
13878
|
+
const clefDir = path8.join(repoRoot, CLEF_DIR);
|
|
13929
13879
|
if (!fs11.existsSync(clefDir)) {
|
|
13930
13880
|
fs11.mkdirSync(clefDir, { recursive: true });
|
|
13931
13881
|
}
|
|
13932
|
-
const clefGitignore =
|
|
13882
|
+
const clefGitignore = path8.join(clefDir, ".gitignore");
|
|
13933
13883
|
if (!fs11.existsSync(clefGitignore)) {
|
|
13934
13884
|
fs11.writeFileSync(clefGitignore, "*\n");
|
|
13935
13885
|
}
|
|
13936
|
-
const lockPath =
|
|
13886
|
+
const lockPath = path8.join(clefDir, LOCK_FILE);
|
|
13937
13887
|
if (!fs11.existsSync(lockPath)) {
|
|
13938
13888
|
fs11.writeFileSync(lockPath, "");
|
|
13939
13889
|
}
|
|
@@ -14074,16 +14024,15 @@ var TransactionManager = class {
|
|
|
14074
14024
|
var fs14 = __toESM(require("fs"));
|
|
14075
14025
|
var net = __toESM(require("net"));
|
|
14076
14026
|
var import_crypto = require("crypto");
|
|
14077
|
-
var import_write_file_atomic3 = __toESM(require_lib());
|
|
14078
14027
|
var YAML8 = __toESM(require("yaml"));
|
|
14079
14028
|
|
|
14080
14029
|
// src/sops/resolver.ts
|
|
14081
14030
|
var fs13 = __toESM(require("fs"));
|
|
14082
|
-
var
|
|
14031
|
+
var path10 = __toESM(require("path"));
|
|
14083
14032
|
|
|
14084
14033
|
// src/sops/bundled.ts
|
|
14085
14034
|
var fs12 = __toESM(require("fs"));
|
|
14086
|
-
var
|
|
14035
|
+
var path9 = __toESM(require("path"));
|
|
14087
14036
|
function tryBundled() {
|
|
14088
14037
|
const platform = process.platform;
|
|
14089
14038
|
const arch = process.arch;
|
|
@@ -14095,8 +14044,8 @@ function tryBundled() {
|
|
|
14095
14044
|
const binName = platform === "win32" ? "sops.exe" : "sops";
|
|
14096
14045
|
try {
|
|
14097
14046
|
const packageMain = require.resolve(`${packageName}/package.json`);
|
|
14098
|
-
const packageDir =
|
|
14099
|
-
const binPath =
|
|
14047
|
+
const packageDir = path9.dirname(packageMain);
|
|
14048
|
+
const binPath = path9.join(packageDir, "bin", binName);
|
|
14100
14049
|
return fs12.existsSync(binPath) ? binPath : null;
|
|
14101
14050
|
} catch {
|
|
14102
14051
|
return null;
|
|
@@ -14105,7 +14054,7 @@ function tryBundled() {
|
|
|
14105
14054
|
|
|
14106
14055
|
// src/sops/resolver.ts
|
|
14107
14056
|
function validateSopsPath(candidate) {
|
|
14108
|
-
if (!
|
|
14057
|
+
if (!path10.isAbsolute(candidate)) {
|
|
14109
14058
|
throw new Error(`CLEF_SOPS_PATH must be an absolute path, got '${candidate}'.`);
|
|
14110
14059
|
}
|
|
14111
14060
|
const segments = candidate.split(/[/\\]/);
|
|
@@ -14284,6 +14233,17 @@ function isClefHsmArn(arn) {
|
|
|
14284
14233
|
function formatFromPath(filePath) {
|
|
14285
14234
|
return filePath.endsWith(".json") ? "json" : "yaml";
|
|
14286
14235
|
}
|
|
14236
|
+
async function openInputPipe(content) {
|
|
14237
|
+
if (process.platform === "win32") {
|
|
14238
|
+
const pipe = await openWindowsInputPipe(content);
|
|
14239
|
+
return { inputArg: pipe.inputArg, cleanup: pipe.cleanup };
|
|
14240
|
+
}
|
|
14241
|
+
return { inputArg: "/dev/stdin", cleanup: () => {
|
|
14242
|
+
}, runnerStdin: content };
|
|
14243
|
+
}
|
|
14244
|
+
function nullConfigPath() {
|
|
14245
|
+
return process.platform === "win32" ? "NUL" : "/dev/null";
|
|
14246
|
+
}
|
|
14287
14247
|
function openWindowsInputPipe(content) {
|
|
14288
14248
|
const pipeName = `\\\\.\\pipe\\clef-sops-${(0, import_crypto.randomBytes)(8).toString("hex")}`;
|
|
14289
14249
|
return new Promise((resolve2, reject) => {
|
|
@@ -14331,6 +14291,10 @@ var SopsClient = class {
|
|
|
14331
14291
|
runner;
|
|
14332
14292
|
ageKeyFile;
|
|
14333
14293
|
ageKey;
|
|
14294
|
+
/** {@link EncryptionBackend} identifier. */
|
|
14295
|
+
id = "sops";
|
|
14296
|
+
/** {@link EncryptionBackend} short description (used by `clef doctor`). */
|
|
14297
|
+
description = "SOPS-based encryption via the bundled `sops` binary";
|
|
14334
14298
|
sopsCommand;
|
|
14335
14299
|
keyserviceArgs;
|
|
14336
14300
|
buildSopsEnv() {
|
|
@@ -14344,14 +14308,18 @@ var SopsClient = class {
|
|
|
14344
14308
|
return Object.keys(env).length > 0 ? env : void 0;
|
|
14345
14309
|
}
|
|
14346
14310
|
/**
|
|
14347
|
-
* Decrypt a SOPS-encrypted file
|
|
14311
|
+
* Decrypt a SOPS-encrypted file by path. The only remaining file-path
|
|
14312
|
+
* entry point on this class — kept for the merge driver, which
|
|
14313
|
+
* receives temp filesystem paths from git that don't map onto a
|
|
14314
|
+
* `CellRef`. Production `SecretSource` consumers should call
|
|
14315
|
+
* `source.readCell` instead.
|
|
14348
14316
|
*
|
|
14349
14317
|
* @param filePath - Path to the `.enc.yaml` or `.enc.json` file.
|
|
14350
14318
|
* @returns {@link DecryptedFile} with plaintext values in memory only.
|
|
14351
14319
|
* @throws {@link SopsKeyNotFoundError} If no matching decryption key is available.
|
|
14352
14320
|
* @throws {@link SopsDecryptionError} On any other decryption failure.
|
|
14353
14321
|
*/
|
|
14354
|
-
async
|
|
14322
|
+
async decryptFile(filePath) {
|
|
14355
14323
|
await assertSops(this.runner, this.sopsCommand);
|
|
14356
14324
|
const fmt = formatFromPath(filePath);
|
|
14357
14325
|
const env = this.buildSopsEnv();
|
|
@@ -14387,170 +14355,9 @@ var SopsClient = class {
|
|
|
14387
14355
|
for (const [key, value] of Object.entries(parsed)) {
|
|
14388
14356
|
values[key] = String(value);
|
|
14389
14357
|
}
|
|
14390
|
-
const metadata =
|
|
14358
|
+
const metadata = this.parseMetadataFromFile(filePath);
|
|
14391
14359
|
return { values, metadata };
|
|
14392
14360
|
}
|
|
14393
|
-
/**
|
|
14394
|
-
* Encrypt a key/value map and write it to an encrypted SOPS file.
|
|
14395
|
-
*
|
|
14396
|
-
* @param filePath - Destination path for the encrypted file.
|
|
14397
|
-
* @param values - Flat key/value map to encrypt.
|
|
14398
|
-
* @param manifest - Manifest used to determine the encryption backend and key configuration.
|
|
14399
|
-
* @param environment - Optional environment name. When provided, per-env backend overrides
|
|
14400
|
-
* are resolved from the manifest. When omitted, the global `sops.default_backend` is used.
|
|
14401
|
-
* @throws {@link SopsEncryptionError} On encryption or write failure.
|
|
14402
|
-
*/
|
|
14403
|
-
async encrypt(filePath, values, manifest, environment) {
|
|
14404
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
14405
|
-
const fmt = formatFromPath(filePath);
|
|
14406
|
-
const content = fmt === "json" ? JSON.stringify(values, null, 2) : YAML8.stringify(values);
|
|
14407
|
-
const args = this.buildEncryptArgs(filePath, manifest, environment);
|
|
14408
|
-
const env = this.buildSopsEnv();
|
|
14409
|
-
let inputArg;
|
|
14410
|
-
let pipeCleanup;
|
|
14411
|
-
if (process.platform === "win32") {
|
|
14412
|
-
const pipe = await openWindowsInputPipe(content);
|
|
14413
|
-
inputArg = pipe.inputArg;
|
|
14414
|
-
pipeCleanup = pipe.cleanup;
|
|
14415
|
-
} else {
|
|
14416
|
-
inputArg = "/dev/stdin";
|
|
14417
|
-
}
|
|
14418
|
-
let result;
|
|
14419
|
-
try {
|
|
14420
|
-
const configPath = process.platform === "win32" ? "NUL" : "/dev/null";
|
|
14421
|
-
result = await this.runner.run(
|
|
14422
|
-
this.sopsCommand,
|
|
14423
|
-
[
|
|
14424
|
-
"--config",
|
|
14425
|
-
configPath,
|
|
14426
|
-
"encrypt",
|
|
14427
|
-
...this.keyserviceArgs,
|
|
14428
|
-
...args,
|
|
14429
|
-
"--input-type",
|
|
14430
|
-
fmt,
|
|
14431
|
-
"--output-type",
|
|
14432
|
-
fmt,
|
|
14433
|
-
"--filename-override",
|
|
14434
|
-
filePath,
|
|
14435
|
-
inputArg
|
|
14436
|
-
],
|
|
14437
|
-
{
|
|
14438
|
-
// stdin is still piped on Unix (/dev/stdin reads from it);
|
|
14439
|
-
// on Windows the named pipe server feeds content directly.
|
|
14440
|
-
...process.platform !== "win32" ? { stdin: content } : {},
|
|
14441
|
-
...env ? { env } : {}
|
|
14442
|
-
}
|
|
14443
|
-
);
|
|
14444
|
-
} finally {
|
|
14445
|
-
pipeCleanup?.();
|
|
14446
|
-
}
|
|
14447
|
-
if (result.exitCode !== 0) {
|
|
14448
|
-
throw new SopsEncryptionError(
|
|
14449
|
-
`Failed to encrypt '${filePath}': ${result.stderr.trim()}`,
|
|
14450
|
-
filePath
|
|
14451
|
-
);
|
|
14452
|
-
}
|
|
14453
|
-
try {
|
|
14454
|
-
await (0, import_write_file_atomic3.default)(filePath, result.stdout);
|
|
14455
|
-
} catch (err) {
|
|
14456
|
-
throw new SopsEncryptionError(
|
|
14457
|
-
`Failed to write encrypted data to '${filePath}': ${err.message}`,
|
|
14458
|
-
filePath
|
|
14459
|
-
);
|
|
14460
|
-
}
|
|
14461
|
-
}
|
|
14462
|
-
/**
|
|
14463
|
-
* Rotate encryption by adding a new age recipient key to an existing SOPS file.
|
|
14464
|
-
*
|
|
14465
|
-
* @param filePath - Path to the encrypted file to re-encrypt.
|
|
14466
|
-
* @param newKey - New age public key to add as a recipient.
|
|
14467
|
-
* @throws {@link SopsEncryptionError} On failure.
|
|
14468
|
-
*/
|
|
14469
|
-
async reEncrypt(filePath, newKey) {
|
|
14470
|
-
await this.addRecipient(filePath, newKey);
|
|
14471
|
-
}
|
|
14472
|
-
/**
|
|
14473
|
-
* Add an age recipient to an existing SOPS file.
|
|
14474
|
-
*
|
|
14475
|
-
* @param filePath - Path to the encrypted file.
|
|
14476
|
-
* @param key - age public key to add as a recipient.
|
|
14477
|
-
* @throws {@link SopsEncryptionError} On failure.
|
|
14478
|
-
*/
|
|
14479
|
-
async addRecipient(filePath, key) {
|
|
14480
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
14481
|
-
const env = this.buildSopsEnv();
|
|
14482
|
-
const result = await this.runner.run(
|
|
14483
|
-
this.sopsCommand,
|
|
14484
|
-
["rotate", ...this.keyserviceArgs, "-i", "--add-age", key, filePath],
|
|
14485
|
-
{
|
|
14486
|
-
...env ? { env } : {}
|
|
14487
|
-
}
|
|
14488
|
-
);
|
|
14489
|
-
if (result.exitCode !== 0) {
|
|
14490
|
-
throw new SopsEncryptionError(
|
|
14491
|
-
`Failed to add recipient to '${filePath}': ${result.stderr.trim()}`,
|
|
14492
|
-
filePath
|
|
14493
|
-
);
|
|
14494
|
-
}
|
|
14495
|
-
}
|
|
14496
|
-
/**
|
|
14497
|
-
* Remove an age recipient from an existing SOPS file.
|
|
14498
|
-
*
|
|
14499
|
-
* @param filePath - Path to the encrypted file.
|
|
14500
|
-
* @param key - age public key to remove.
|
|
14501
|
-
* @throws {@link SopsEncryptionError} On failure.
|
|
14502
|
-
*/
|
|
14503
|
-
async removeRecipient(filePath, key) {
|
|
14504
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
14505
|
-
const env = this.buildSopsEnv();
|
|
14506
|
-
const result = await this.runner.run(
|
|
14507
|
-
this.sopsCommand,
|
|
14508
|
-
["rotate", ...this.keyserviceArgs, "-i", "--rm-age", key, filePath],
|
|
14509
|
-
{
|
|
14510
|
-
...env ? { env } : {}
|
|
14511
|
-
}
|
|
14512
|
-
);
|
|
14513
|
-
if (result.exitCode !== 0) {
|
|
14514
|
-
throw new SopsEncryptionError(
|
|
14515
|
-
`Failed to remove recipient from '${filePath}': ${result.stderr.trim()}`,
|
|
14516
|
-
filePath
|
|
14517
|
-
);
|
|
14518
|
-
}
|
|
14519
|
-
}
|
|
14520
|
-
/**
|
|
14521
|
-
* Check whether a file contains valid SOPS encryption metadata.
|
|
14522
|
-
*
|
|
14523
|
-
* @param filePath - Path to the file to check.
|
|
14524
|
-
* @returns `true` if valid SOPS metadata is present; `false` otherwise. Never throws.
|
|
14525
|
-
*/
|
|
14526
|
-
async validateEncryption(filePath) {
|
|
14527
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
14528
|
-
try {
|
|
14529
|
-
await this.getMetadata(filePath);
|
|
14530
|
-
return true;
|
|
14531
|
-
} catch {
|
|
14532
|
-
return false;
|
|
14533
|
-
}
|
|
14534
|
-
}
|
|
14535
|
-
/**
|
|
14536
|
-
* Extract SOPS metadata (backend, recipients, last-modified timestamp) from an encrypted file
|
|
14537
|
-
* without decrypting its values.
|
|
14538
|
-
*
|
|
14539
|
-
* @param filePath - Path to the encrypted file.
|
|
14540
|
-
* @returns {@link SopsMetadata} parsed from the file's `sops:` block.
|
|
14541
|
-
* @throws {@link SopsDecryptionError} If the file cannot be read or parsed.
|
|
14542
|
-
*/
|
|
14543
|
-
async getMetadata(filePath) {
|
|
14544
|
-
await assertSops(this.runner, this.sopsCommand);
|
|
14545
|
-
const env = this.buildSopsEnv();
|
|
14546
|
-
const result = await this.runner.run(this.sopsCommand, ["filestatus", filePath], {
|
|
14547
|
-
...env ? { env } : {}
|
|
14548
|
-
});
|
|
14549
|
-
if (result.exitCode !== 0) {
|
|
14550
|
-
return this.parseMetadataFromFile(filePath);
|
|
14551
|
-
}
|
|
14552
|
-
return this.parseMetadataFromFile(filePath);
|
|
14553
|
-
}
|
|
14554
14361
|
/**
|
|
14555
14362
|
* Determine whether a decrypt failure is caused by a missing/mismatched key (vs. some other
|
|
14556
14363
|
* SOPS error) without relying on stderr message text.
|
|
@@ -14594,20 +14401,30 @@ var SopsClient = class {
|
|
|
14594
14401
|
filePath
|
|
14595
14402
|
);
|
|
14596
14403
|
}
|
|
14404
|
+
return this.parseMetadataFromContent(content, filePath);
|
|
14405
|
+
}
|
|
14406
|
+
/**
|
|
14407
|
+
* Parse SOPS metadata from a string (no IO). Used by both
|
|
14408
|
+
* `parseMetadataFromFile` (after reading from disk) and the blob-shaped
|
|
14409
|
+
* `getMetadataFromBlob` (which receives ciphertext directly from a
|
|
14410
|
+
* BlobStore). The `label` is woven into error messages so callers can
|
|
14411
|
+
* include the file path or cell ref the content came from.
|
|
14412
|
+
*/
|
|
14413
|
+
parseMetadataFromContent(content, label) {
|
|
14597
14414
|
let parsed;
|
|
14598
14415
|
try {
|
|
14599
14416
|
parsed = YAML8.parse(content);
|
|
14600
14417
|
} catch {
|
|
14601
14418
|
throw new SopsDecryptionError(
|
|
14602
|
-
|
|
14603
|
-
|
|
14419
|
+
`${label} is not valid YAML. Cannot extract SOPS metadata.`,
|
|
14420
|
+
label
|
|
14604
14421
|
);
|
|
14605
14422
|
}
|
|
14606
14423
|
const sops = parsed?.sops;
|
|
14607
14424
|
if (!sops) {
|
|
14608
14425
|
throw new SopsDecryptionError(
|
|
14609
|
-
|
|
14610
|
-
|
|
14426
|
+
`${label} does not contain SOPS metadata. It may not be encrypted.`,
|
|
14427
|
+
label
|
|
14611
14428
|
);
|
|
14612
14429
|
}
|
|
14613
14430
|
const backend = this.detectBackend(sops);
|
|
@@ -14670,7 +14487,7 @@ var SopsClient = class {
|
|
|
14670
14487
|
}
|
|
14671
14488
|
}
|
|
14672
14489
|
}
|
|
14673
|
-
buildEncryptArgs(
|
|
14490
|
+
buildEncryptArgs(manifest, environment) {
|
|
14674
14491
|
const args = [];
|
|
14675
14492
|
const config = environment ? resolveBackendConfig(manifest, environment) : {
|
|
14676
14493
|
backend: manifest.sops.default_backend,
|
|
@@ -14718,36 +14535,280 @@ var SopsClient = class {
|
|
|
14718
14535
|
}
|
|
14719
14536
|
return args;
|
|
14720
14537
|
}
|
|
14721
|
-
|
|
14722
|
-
|
|
14723
|
-
//
|
|
14724
|
-
|
|
14725
|
-
|
|
14726
|
-
|
|
14727
|
-
|
|
14728
|
-
|
|
14729
|
-
|
|
14730
|
-
|
|
14731
|
-
|
|
14732
|
-
|
|
14733
|
-
|
|
14734
|
-
|
|
14735
|
-
|
|
14736
|
-
const
|
|
14737
|
-
|
|
14738
|
-
|
|
14739
|
-
|
|
14740
|
-
|
|
14741
|
-
|
|
14742
|
-
|
|
14743
|
-
|
|
14744
|
-
|
|
14745
|
-
|
|
14746
|
-
|
|
14747
|
-
|
|
14748
|
-
|
|
14749
|
-
|
|
14750
|
-
|
|
14538
|
+
// ── Blob-shaped methods ─────────────────────────────────────────────────
|
|
14539
|
+
//
|
|
14540
|
+
// These mirror the file-path methods above but operate on opaque
|
|
14541
|
+
// ciphertext bytes via SOPS' stdin/stdout. They are the substrate-
|
|
14542
|
+
// agnostic primitives used by the `composeSecretSource` factory to
|
|
14543
|
+
// wrap any `BlobStore` (filesystem, postgres, etc.) into a full
|
|
14544
|
+
// `SecretSource`. Plaintext never leaves the SOPS subprocess.
|
|
14545
|
+
/**
|
|
14546
|
+
* {@link EncryptionBackend.decrypt} — decrypt SOPS-encrypted bytes (e.g.
|
|
14547
|
+
* read from a `StorageBackend`) and return plaintext values + metadata.
|
|
14548
|
+
* Plaintext lives only in memory.
|
|
14549
|
+
*/
|
|
14550
|
+
async decrypt(blob, ctx) {
|
|
14551
|
+
await assertSops(this.runner, this.sopsCommand);
|
|
14552
|
+
const env = this.buildSopsEnv();
|
|
14553
|
+
const pipe = await openInputPipe(blob);
|
|
14554
|
+
let result;
|
|
14555
|
+
try {
|
|
14556
|
+
result = await this.runner.run(
|
|
14557
|
+
this.sopsCommand,
|
|
14558
|
+
[
|
|
14559
|
+
"decrypt",
|
|
14560
|
+
...this.keyserviceArgs,
|
|
14561
|
+
"--input-type",
|
|
14562
|
+
ctx.format,
|
|
14563
|
+
"--output-type",
|
|
14564
|
+
ctx.format,
|
|
14565
|
+
pipe.inputArg
|
|
14566
|
+
],
|
|
14567
|
+
{
|
|
14568
|
+
...pipe.runnerStdin !== void 0 ? { stdin: pipe.runnerStdin } : {},
|
|
14569
|
+
...env ? { env } : {}
|
|
14570
|
+
}
|
|
14571
|
+
);
|
|
14572
|
+
} finally {
|
|
14573
|
+
pipe.cleanup();
|
|
14574
|
+
}
|
|
14575
|
+
if (result.exitCode !== 0) {
|
|
14576
|
+
const errorType = await this.classifyDecryptErrorFromContent(blob);
|
|
14577
|
+
if (errorType === "key-not-found") {
|
|
14578
|
+
throw new SopsKeyNotFoundError(`No decryption key found for cell. ${result.stderr.trim()}`);
|
|
14579
|
+
}
|
|
14580
|
+
throw new SopsDecryptionError(`Failed to decrypt cell: ${result.stderr.trim()}`);
|
|
14581
|
+
}
|
|
14582
|
+
let parsed;
|
|
14583
|
+
try {
|
|
14584
|
+
parsed = YAML8.parse(result.stdout) ?? {};
|
|
14585
|
+
} catch {
|
|
14586
|
+
throw new SopsDecryptionError("Decrypted content is not valid YAML.");
|
|
14587
|
+
}
|
|
14588
|
+
const values = {};
|
|
14589
|
+
for (const [key, value] of Object.entries(parsed)) {
|
|
14590
|
+
values[key] = String(value);
|
|
14591
|
+
}
|
|
14592
|
+
const metadata = this.parseMetadataFromContent(blob, "<cell>");
|
|
14593
|
+
return { values, metadata };
|
|
14594
|
+
}
|
|
14595
|
+
/**
|
|
14596
|
+
* {@link EncryptionBackend.encrypt} — encrypt plaintext values into a
|
|
14597
|
+
* SOPS-formatted ciphertext blob. Returns the bytes as a string;
|
|
14598
|
+
* caller (typically a `StorageBackend`) decides where to put them.
|
|
14599
|
+
* Plaintext is piped via stdin only.
|
|
14600
|
+
*/
|
|
14601
|
+
async encrypt(values, ctx) {
|
|
14602
|
+
await assertSops(this.runner, this.sopsCommand);
|
|
14603
|
+
const content = ctx.format === "json" ? JSON.stringify(values, null, 2) : YAML8.stringify(values);
|
|
14604
|
+
const args = this.buildEncryptArgs(ctx.manifest, ctx.environment);
|
|
14605
|
+
const env = this.buildSopsEnv();
|
|
14606
|
+
const pipe = await openInputPipe(content);
|
|
14607
|
+
let result;
|
|
14608
|
+
try {
|
|
14609
|
+
result = await this.runner.run(
|
|
14610
|
+
this.sopsCommand,
|
|
14611
|
+
[
|
|
14612
|
+
"--config",
|
|
14613
|
+
nullConfigPath(),
|
|
14614
|
+
"encrypt",
|
|
14615
|
+
...this.keyserviceArgs,
|
|
14616
|
+
...args,
|
|
14617
|
+
"--input-type",
|
|
14618
|
+
ctx.format,
|
|
14619
|
+
"--output-type",
|
|
14620
|
+
ctx.format,
|
|
14621
|
+
pipe.inputArg
|
|
14622
|
+
],
|
|
14623
|
+
{
|
|
14624
|
+
...pipe.runnerStdin !== void 0 ? { stdin: pipe.runnerStdin } : {},
|
|
14625
|
+
...env ? { env } : {}
|
|
14626
|
+
}
|
|
14627
|
+
);
|
|
14628
|
+
} finally {
|
|
14629
|
+
pipe.cleanup();
|
|
14630
|
+
}
|
|
14631
|
+
if (result.exitCode !== 0) {
|
|
14632
|
+
throw new SopsEncryptionError(`Failed to encrypt cell: ${result.stderr.trim()}`);
|
|
14633
|
+
}
|
|
14634
|
+
return result.stdout;
|
|
14635
|
+
}
|
|
14636
|
+
/**
|
|
14637
|
+
* {@link EncryptionBackend.rotate} — add or remove recipients from an
|
|
14638
|
+
* encrypted SOPS blob via stdin/stdout. Drops the in-place `-i` flag
|
|
14639
|
+
* the deleted file-path-shaped methods used, so SOPS writes the
|
|
14640
|
+
* rotated ciphertext to stdout instead of back to a file. Plaintext
|
|
14641
|
+
* stays inside the SOPS subprocess; no plaintext window exists in
|
|
14642
|
+
* this Node process.
|
|
14643
|
+
*
|
|
14644
|
+
* Single SOPS invocation can both add and remove recipients
|
|
14645
|
+
* simultaneously (matches the CLI flag set).
|
|
14646
|
+
*/
|
|
14647
|
+
async rotate(blob, opts, ctx) {
|
|
14648
|
+
await assertSops(this.runner, this.sopsCommand);
|
|
14649
|
+
const env = this.buildSopsEnv();
|
|
14650
|
+
const pipe = await openInputPipe(blob);
|
|
14651
|
+
const flagArgs = [];
|
|
14652
|
+
if (opts.addAge) flagArgs.push("--add-age", opts.addAge);
|
|
14653
|
+
if (opts.rmAge) flagArgs.push("--rm-age", opts.rmAge);
|
|
14654
|
+
if (opts.addKms) flagArgs.push("--add-kms", opts.addKms);
|
|
14655
|
+
if (opts.rmKms) flagArgs.push("--rm-kms", opts.rmKms);
|
|
14656
|
+
if (opts.addGcpKms) flagArgs.push("--add-gcp-kms", opts.addGcpKms);
|
|
14657
|
+
if (opts.rmGcpKms) flagArgs.push("--rm-gcp-kms", opts.rmGcpKms);
|
|
14658
|
+
if (opts.addAzureKv) flagArgs.push("--add-azure-kv", opts.addAzureKv);
|
|
14659
|
+
if (opts.rmAzureKv) flagArgs.push("--rm-azure-kv", opts.rmAzureKv);
|
|
14660
|
+
if (opts.addPgp) flagArgs.push("--add-pgp", opts.addPgp);
|
|
14661
|
+
if (opts.rmPgp) flagArgs.push("--rm-pgp", opts.rmPgp);
|
|
14662
|
+
let result;
|
|
14663
|
+
try {
|
|
14664
|
+
result = await this.runner.run(
|
|
14665
|
+
this.sopsCommand,
|
|
14666
|
+
[
|
|
14667
|
+
"--config",
|
|
14668
|
+
nullConfigPath(),
|
|
14669
|
+
"rotate",
|
|
14670
|
+
...this.keyserviceArgs,
|
|
14671
|
+
...flagArgs,
|
|
14672
|
+
"--input-type",
|
|
14673
|
+
ctx.format,
|
|
14674
|
+
"--output-type",
|
|
14675
|
+
ctx.format,
|
|
14676
|
+
pipe.inputArg
|
|
14677
|
+
],
|
|
14678
|
+
{
|
|
14679
|
+
...pipe.runnerStdin !== void 0 ? { stdin: pipe.runnerStdin } : {},
|
|
14680
|
+
...env ? { env } : {}
|
|
14681
|
+
}
|
|
14682
|
+
);
|
|
14683
|
+
} finally {
|
|
14684
|
+
pipe.cleanup();
|
|
14685
|
+
}
|
|
14686
|
+
if (result.exitCode !== 0) {
|
|
14687
|
+
throw new SopsEncryptionError(`Failed to rotate cell: ${result.stderr.trim()}`);
|
|
14688
|
+
}
|
|
14689
|
+
return result.stdout;
|
|
14690
|
+
}
|
|
14691
|
+
/**
|
|
14692
|
+
* {@link EncryptionBackend.getMetadata} — extract SOPS metadata from a
|
|
14693
|
+
* ciphertext blob without decrypting. Pure parser, no IO, no
|
|
14694
|
+
* subprocess.
|
|
14695
|
+
*/
|
|
14696
|
+
getMetadata(content) {
|
|
14697
|
+
return this.parseMetadataFromContent(content, "<cell>");
|
|
14698
|
+
}
|
|
14699
|
+
/**
|
|
14700
|
+
* {@link EncryptionBackend.validateEncryption} — whether `content` is a
|
|
14701
|
+
* valid SOPS-encrypted blob (parses + has the `sops:` metadata
|
|
14702
|
+
* block). Never throws.
|
|
14703
|
+
*/
|
|
14704
|
+
validateEncryption(content) {
|
|
14705
|
+
try {
|
|
14706
|
+
this.parseMetadataFromContent(content, "<cell>");
|
|
14707
|
+
return true;
|
|
14708
|
+
} catch {
|
|
14709
|
+
return false;
|
|
14710
|
+
}
|
|
14711
|
+
}
|
|
14712
|
+
/**
|
|
14713
|
+
* Blob-shaped variant of `classifyDecryptError`. Same logic as the
|
|
14714
|
+
* file-path version but reads metadata from the in-memory ciphertext
|
|
14715
|
+
* instead of disk.
|
|
14716
|
+
*/
|
|
14717
|
+
async classifyDecryptErrorFromContent(content) {
|
|
14718
|
+
let metadata;
|
|
14719
|
+
try {
|
|
14720
|
+
metadata = this.parseMetadataFromContent(content, "<cell>");
|
|
14721
|
+
} catch {
|
|
14722
|
+
return "other";
|
|
14723
|
+
}
|
|
14724
|
+
if (metadata.backend !== "age") return "other";
|
|
14725
|
+
if (!this.ageKey && !this.ageKeyFile) return "key-not-found";
|
|
14726
|
+
let keyContent;
|
|
14727
|
+
try {
|
|
14728
|
+
keyContent = this.ageKey ?? fs14.readFileSync(this.ageKeyFile, "utf-8");
|
|
14729
|
+
} catch {
|
|
14730
|
+
return "key-not-found";
|
|
14731
|
+
}
|
|
14732
|
+
const privateKeys = keyContent.split("\n").map((line) => line.trim()).filter((line) => line.startsWith("AGE-SECRET-KEY-"));
|
|
14733
|
+
if (privateKeys.length === 0) return "key-not-found";
|
|
14734
|
+
try {
|
|
14735
|
+
const publicKeys = await Promise.all(privateKeys.map((k) => deriveAgePublicKey(k)));
|
|
14736
|
+
const recipients = new Set(metadata.recipients);
|
|
14737
|
+
return publicKeys.some((pk) => recipients.has(pk)) ? "other" : "key-not-found";
|
|
14738
|
+
} catch {
|
|
14739
|
+
return "other";
|
|
14740
|
+
}
|
|
14741
|
+
}
|
|
14742
|
+
};
|
|
14743
|
+
|
|
14744
|
+
// src/sops/linux-stdin-fifo.ts
|
|
14745
|
+
var os = __toESM(require("os"));
|
|
14746
|
+
var path11 = __toESM(require("path"));
|
|
14747
|
+
var import_child_process = require("child_process");
|
|
14748
|
+
function shouldUseLinuxStdinFifo() {
|
|
14749
|
+
return process.platform === "linux" && !process.env.JEST_WORKER_ID;
|
|
14750
|
+
}
|
|
14751
|
+
function wrapWithLinuxStdinFifo(runner) {
|
|
14752
|
+
if (!shouldUseLinuxStdinFifo()) return runner;
|
|
14753
|
+
return {
|
|
14754
|
+
run: (cmd, args, opts) => {
|
|
14755
|
+
const stdinIdx = args.indexOf("/dev/stdin");
|
|
14756
|
+
if (stdinIdx < 0 || opts?.stdin === void 0) {
|
|
14757
|
+
return runner.run(cmd, args, opts);
|
|
14758
|
+
}
|
|
14759
|
+
const fifoDir = (0, import_child_process.execFileSync)("mktemp", ["-d", path11.join(os.tmpdir(), "clef-fifo-XXXXXX")]).toString().trim();
|
|
14760
|
+
const fifoPath = path11.join(fifoDir, "input");
|
|
14761
|
+
(0, import_child_process.execFileSync)("mkfifo", [fifoPath]);
|
|
14762
|
+
const writer = (0, import_child_process.spawn)("dd", [`of=${fifoPath}`, "status=none"], {
|
|
14763
|
+
stdio: ["pipe", "ignore", "ignore"]
|
|
14764
|
+
});
|
|
14765
|
+
writer.stdin.write(opts.stdin);
|
|
14766
|
+
writer.stdin.end();
|
|
14767
|
+
const patchedArgs = [...args];
|
|
14768
|
+
patchedArgs[stdinIdx] = fifoPath;
|
|
14769
|
+
const { stdin: _stdin, ...restOpts } = opts;
|
|
14770
|
+
return runner.run(cmd, patchedArgs, restOpts).finally(() => {
|
|
14771
|
+
try {
|
|
14772
|
+
writer.kill();
|
|
14773
|
+
} catch {
|
|
14774
|
+
}
|
|
14775
|
+
try {
|
|
14776
|
+
(0, import_child_process.execFileSync)("rm", ["-rf", fifoDir]);
|
|
14777
|
+
} catch {
|
|
14778
|
+
}
|
|
14779
|
+
});
|
|
14780
|
+
}
|
|
14781
|
+
};
|
|
14782
|
+
}
|
|
14783
|
+
|
|
14784
|
+
// src/hsm/bundled.ts
|
|
14785
|
+
var fs15 = __toESM(require("fs"));
|
|
14786
|
+
var path12 = __toESM(require("path"));
|
|
14787
|
+
function tryBundledKeyservice() {
|
|
14788
|
+
const platform = process.platform;
|
|
14789
|
+
const arch = process.arch;
|
|
14790
|
+
const archName = arch === "x64" ? "x64" : arch === "arm64" ? "arm64" : null;
|
|
14791
|
+
if (!archName) return null;
|
|
14792
|
+
const platformName = platform === "darwin" ? "darwin" : platform === "linux" ? "linux" : null;
|
|
14793
|
+
if (!platformName) return null;
|
|
14794
|
+
const packageName = `@clef-sh/keyservice-${platformName}-${archName}`;
|
|
14795
|
+
const binName = "clef-keyservice";
|
|
14796
|
+
try {
|
|
14797
|
+
const packageMain = require.resolve(`${packageName}/package.json`);
|
|
14798
|
+
const packageDir = path12.dirname(packageMain);
|
|
14799
|
+
const binPath = path12.join(packageDir, "bin", binName);
|
|
14800
|
+
return fs15.existsSync(binPath) ? binPath : null;
|
|
14801
|
+
} catch {
|
|
14802
|
+
return null;
|
|
14803
|
+
}
|
|
14804
|
+
}
|
|
14805
|
+
|
|
14806
|
+
// src/hsm/resolver.ts
|
|
14807
|
+
var fs16 = __toESM(require("fs"));
|
|
14808
|
+
var path13 = __toESM(require("path"));
|
|
14809
|
+
function validateKeyservicePath(candidate) {
|
|
14810
|
+
if (!path13.isAbsolute(candidate)) {
|
|
14811
|
+
throw new Error(`CLEF_KEYSERVICE_PATH must be an absolute path, got '${candidate}'.`);
|
|
14751
14812
|
}
|
|
14752
14813
|
const segments = candidate.split(/[/\\]/);
|
|
14753
14814
|
if (segments.includes("..")) {
|
|
@@ -14781,7 +14842,7 @@ function resetKeyserviceResolution() {
|
|
|
14781
14842
|
}
|
|
14782
14843
|
|
|
14783
14844
|
// src/hsm/keyservice.ts
|
|
14784
|
-
var
|
|
14845
|
+
var import_child_process2 = require("child_process");
|
|
14785
14846
|
var readline = __toESM(require("readline"));
|
|
14786
14847
|
var PORT_REGEX = /^PORT=(\d+)$/;
|
|
14787
14848
|
var STARTUP_TIMEOUT_MS = 5e3;
|
|
@@ -14799,7 +14860,7 @@ async function spawnKeyservice(options) {
|
|
|
14799
14860
|
...options.pin ? { CLEF_PKCS11_PIN: options.pin } : {},
|
|
14800
14861
|
...options.pinFile ? { CLEF_PKCS11_PIN_FILE: options.pinFile } : {}
|
|
14801
14862
|
};
|
|
14802
|
-
const child = (0,
|
|
14863
|
+
const child = (0, import_child_process2.spawn)(options.binaryPath, args, {
|
|
14803
14864
|
stdio: ["ignore", "pipe", "pipe"],
|
|
14804
14865
|
env: childEnv
|
|
14805
14866
|
});
|
|
@@ -14875,16 +14936,16 @@ function killGracefully(child) {
|
|
|
14875
14936
|
}
|
|
14876
14937
|
|
|
14877
14938
|
// src/lint/runner.ts
|
|
14878
|
-
var
|
|
14939
|
+
var path14 = __toESM(require("path"));
|
|
14879
14940
|
var LintRunner = class {
|
|
14880
|
-
constructor(matrixManager, schemaValidator,
|
|
14941
|
+
constructor(matrixManager, schemaValidator, source) {
|
|
14881
14942
|
this.matrixManager = matrixManager;
|
|
14882
14943
|
this.schemaValidator = schemaValidator;
|
|
14883
|
-
this.
|
|
14944
|
+
this.source = source;
|
|
14884
14945
|
}
|
|
14885
14946
|
matrixManager;
|
|
14886
14947
|
schemaValidator;
|
|
14887
|
-
|
|
14948
|
+
source;
|
|
14888
14949
|
/**
|
|
14889
14950
|
* Lint the entire matrix: check missing files, schema errors, SOPS integrity,
|
|
14890
14951
|
* single-recipient warnings, and cross-environment key drift.
|
|
@@ -14911,8 +14972,9 @@ var LintRunner = class {
|
|
|
14911
14972
|
fileCount = existingCells.length;
|
|
14912
14973
|
const namespaceKeys = {};
|
|
14913
14974
|
for (const cell of existingCells) {
|
|
14975
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
14914
14976
|
try {
|
|
14915
|
-
const isValid = await this.
|
|
14977
|
+
const isValid = await this.source.validateEncryption(ref);
|
|
14916
14978
|
if (!isValid) {
|
|
14917
14979
|
issues.push({
|
|
14918
14980
|
severity: "error",
|
|
@@ -14933,7 +14995,7 @@ var LintRunner = class {
|
|
|
14933
14995
|
continue;
|
|
14934
14996
|
}
|
|
14935
14997
|
try {
|
|
14936
|
-
const decrypted = await this.
|
|
14998
|
+
const decrypted = await this.source.readCell(ref);
|
|
14937
14999
|
const keys = Object.keys(decrypted.values);
|
|
14938
15000
|
if (!namespaceKeys[cell.namespace]) {
|
|
14939
15001
|
namespaceKeys[cell.namespace] = {};
|
|
@@ -14978,7 +15040,7 @@ var LintRunner = class {
|
|
|
14978
15040
|
}
|
|
14979
15041
|
const ns = manifest.namespaces.find((n) => n.name === cell.namespace);
|
|
14980
15042
|
if (ns?.schema) {
|
|
14981
|
-
const schemaPath =
|
|
15043
|
+
const schemaPath = path14.join(repoRoot, ns.schema);
|
|
14982
15044
|
try {
|
|
14983
15045
|
const schema = this.schemaValidator.loadSchema(schemaPath);
|
|
14984
15046
|
const result = this.schemaValidator.validate(decrypted.values, schema);
|
|
@@ -15021,7 +15083,8 @@ var LintRunner = class {
|
|
|
15021
15083
|
}
|
|
15022
15084
|
}
|
|
15023
15085
|
try {
|
|
15024
|
-
const
|
|
15086
|
+
const meta = await this.source.getPendingMetadata(ref);
|
|
15087
|
+
const pendingKeys = meta.pending.map((p) => p.key);
|
|
15025
15088
|
pendingCount += pendingKeys.length;
|
|
15026
15089
|
for (const pendingKey of pendingKeys) {
|
|
15027
15090
|
issues.push({
|
|
@@ -15074,7 +15137,6 @@ var LintRunner = class {
|
|
|
15074
15137
|
const siIssues = await this.lintServiceIdentities(
|
|
15075
15138
|
manifest.service_identities,
|
|
15076
15139
|
manifest,
|
|
15077
|
-
repoRoot,
|
|
15078
15140
|
existingCells
|
|
15079
15141
|
);
|
|
15080
15142
|
issues.push(...siIssues);
|
|
@@ -15084,18 +15146,27 @@ var LintRunner = class {
|
|
|
15084
15146
|
return { issues, fileCount: fileCount + missingCells.length, pendingCount };
|
|
15085
15147
|
}
|
|
15086
15148
|
/**
|
|
15087
|
-
* Cross-reference
|
|
15149
|
+
* Cross-reference cell metadata against the cipher's plaintext key
|
|
15088
15150
|
* names for each existing cell. Reports orphan rotation records and
|
|
15089
|
-
* dual-state (pending + rotation) inconsistencies. Uses
|
|
15090
|
-
*
|
|
15151
|
+
* dual-state (pending + rotation) inconsistencies. Uses the source's
|
|
15152
|
+
* `listKeys` (no decryption).
|
|
15091
15153
|
*/
|
|
15092
15154
|
async lintMetadataConsistency(cells) {
|
|
15093
15155
|
const issues = [];
|
|
15094
15156
|
for (const cell of cells) {
|
|
15095
|
-
const
|
|
15096
|
-
|
|
15097
|
-
|
|
15098
|
-
|
|
15157
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
15158
|
+
let cipherKeys;
|
|
15159
|
+
try {
|
|
15160
|
+
cipherKeys = new Set(await this.source.listKeys(ref));
|
|
15161
|
+
} catch {
|
|
15162
|
+
continue;
|
|
15163
|
+
}
|
|
15164
|
+
let metadata;
|
|
15165
|
+
try {
|
|
15166
|
+
metadata = await this.source.getPendingMetadata(ref);
|
|
15167
|
+
} catch {
|
|
15168
|
+
continue;
|
|
15169
|
+
}
|
|
15099
15170
|
for (const record of metadata.rotations) {
|
|
15100
15171
|
if (!cipherKeys.has(record.key)) {
|
|
15101
15172
|
issues.push({
|
|
@@ -15126,7 +15197,7 @@ var LintRunner = class {
|
|
|
15126
15197
|
/**
|
|
15127
15198
|
* Lint service identity configurations for drift issues.
|
|
15128
15199
|
*/
|
|
15129
|
-
async lintServiceIdentities(identities, manifest,
|
|
15200
|
+
async lintServiceIdentities(identities, manifest, existingCells) {
|
|
15130
15201
|
const issues = [];
|
|
15131
15202
|
const declaredEnvNames = new Set(manifest.environments.map((e) => e.name));
|
|
15132
15203
|
const declaredNsNames = new Set(manifest.namespaces.map((ns) => ns.name));
|
|
@@ -15167,9 +15238,10 @@ var LintRunner = class {
|
|
|
15167
15238
|
const envConfig = si.environments[cell.environment];
|
|
15168
15239
|
if (!envConfig) continue;
|
|
15169
15240
|
if (!envConfig.recipient) continue;
|
|
15241
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
15170
15242
|
if (si.namespaces.includes(cell.namespace)) {
|
|
15171
15243
|
try {
|
|
15172
|
-
const metadata = await this.
|
|
15244
|
+
const metadata = await this.source.getCellMetadata(ref);
|
|
15173
15245
|
if (!metadata.recipients.includes(envConfig.recipient)) {
|
|
15174
15246
|
issues.push({
|
|
15175
15247
|
severity: "warning",
|
|
@@ -15183,7 +15255,7 @@ var LintRunner = class {
|
|
|
15183
15255
|
}
|
|
15184
15256
|
} else {
|
|
15185
15257
|
try {
|
|
15186
|
-
const metadata = await this.
|
|
15258
|
+
const metadata = await this.source.getCellMetadata(ref);
|
|
15187
15259
|
if (metadata.recipients.includes(envConfig.recipient)) {
|
|
15188
15260
|
issues.push({
|
|
15189
15261
|
severity: "warning",
|
|
@@ -15209,7 +15281,10 @@ var LintRunner = class {
|
|
|
15209
15281
|
async fix(manifest, repoRoot) {
|
|
15210
15282
|
const missingCells = this.matrixManager.detectMissingCells(manifest, repoRoot);
|
|
15211
15283
|
for (const cell of missingCells) {
|
|
15212
|
-
await this.
|
|
15284
|
+
await this.source.scaffoldCell(
|
|
15285
|
+
{ namespace: cell.namespace, environment: cell.environment },
|
|
15286
|
+
manifest
|
|
15287
|
+
);
|
|
15213
15288
|
}
|
|
15214
15289
|
return this.run(manifest, repoRoot);
|
|
15215
15290
|
}
|
|
@@ -15264,15 +15339,12 @@ Use 'clef exec' to inject secrets directly into a process, or 'clef export --for
|
|
|
15264
15339
|
}
|
|
15265
15340
|
};
|
|
15266
15341
|
|
|
15267
|
-
// src/import/index.ts
|
|
15268
|
-
var path17 = __toESM(require("path"));
|
|
15269
|
-
|
|
15270
15342
|
// src/import/parsers.ts
|
|
15271
|
-
var
|
|
15343
|
+
var path15 = __toESM(require("path"));
|
|
15272
15344
|
var YAML9 = __toESM(require("yaml"));
|
|
15273
15345
|
function detectFormat(filePath, content) {
|
|
15274
|
-
const base =
|
|
15275
|
-
const ext =
|
|
15346
|
+
const base = path15.basename(filePath);
|
|
15347
|
+
const ext = path15.extname(filePath).toLowerCase();
|
|
15276
15348
|
if (base === ".env" || base.startsWith(".env.")) {
|
|
15277
15349
|
return "dotenv";
|
|
15278
15350
|
}
|
|
@@ -15422,11 +15494,11 @@ function parse9(content, format, filePath) {
|
|
|
15422
15494
|
|
|
15423
15495
|
// src/import/index.ts
|
|
15424
15496
|
var ImportRunner = class {
|
|
15425
|
-
constructor(
|
|
15426
|
-
this.
|
|
15497
|
+
constructor(source, tx) {
|
|
15498
|
+
this.source = source;
|
|
15427
15499
|
this.tx = tx;
|
|
15428
15500
|
}
|
|
15429
|
-
|
|
15501
|
+
source;
|
|
15430
15502
|
tx;
|
|
15431
15503
|
/**
|
|
15432
15504
|
* Parse a source file and import its key/value pairs into a target `namespace/environment` cell.
|
|
@@ -15440,10 +15512,8 @@ var ImportRunner = class {
|
|
|
15440
15512
|
*/
|
|
15441
15513
|
async import(target, sourcePath, content, manifest, repoRoot, options) {
|
|
15442
15514
|
const [ns, env] = target.split("/");
|
|
15443
|
-
const
|
|
15444
|
-
|
|
15445
|
-
manifest.file_pattern.replace("{namespace}", ns).replace("{environment}", env)
|
|
15446
|
-
);
|
|
15515
|
+
const ref = { namespace: ns, environment: env };
|
|
15516
|
+
const relCellPath = manifest.file_pattern.replace("{namespace}", ns).replace("{environment}", env);
|
|
15447
15517
|
const parsed = parse9(content, options.format ?? "auto", sourcePath ?? "");
|
|
15448
15518
|
let candidates = Object.entries(parsed.pairs);
|
|
15449
15519
|
if (options.prefix) {
|
|
@@ -15461,7 +15531,7 @@ var ImportRunner = class {
|
|
|
15461
15531
|
if (options.dryRun) {
|
|
15462
15532
|
let existingKeys;
|
|
15463
15533
|
try {
|
|
15464
|
-
const decrypted2 = await this.
|
|
15534
|
+
const decrypted2 = await this.source.readCell(ref);
|
|
15465
15535
|
existingKeys = new Set(Object.keys(decrypted2.values));
|
|
15466
15536
|
} catch {
|
|
15467
15537
|
existingKeys = /* @__PURE__ */ new Set();
|
|
@@ -15475,7 +15545,7 @@ var ImportRunner = class {
|
|
|
15475
15545
|
}
|
|
15476
15546
|
return { imported, skipped, failed, warnings, dryRun: true };
|
|
15477
15547
|
}
|
|
15478
|
-
const decrypted = await this.
|
|
15548
|
+
const decrypted = await this.source.readCell(ref);
|
|
15479
15549
|
const newValues = { ...decrypted.values };
|
|
15480
15550
|
const rotatedKeys = [];
|
|
15481
15551
|
for (const [key, value] of candidates) {
|
|
@@ -15492,7 +15562,6 @@ var ImportRunner = class {
|
|
|
15492
15562
|
if (imported.length === 0) {
|
|
15493
15563
|
return { imported, skipped, failed, warnings, dryRun: false };
|
|
15494
15564
|
}
|
|
15495
|
-
const relCellPath = path17.relative(repoRoot, filePath);
|
|
15496
15565
|
const relMetaPath = relCellPath.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml");
|
|
15497
15566
|
await this.tx.run(repoRoot, {
|
|
15498
15567
|
description: `clef import ${target}: ${imported.length} key(s)`,
|
|
@@ -15500,9 +15569,9 @@ var ImportRunner = class {
|
|
|
15500
15569
|
// callback are staged and rolled back atomically with the ciphertext.
|
|
15501
15570
|
paths: [relCellPath, relMetaPath],
|
|
15502
15571
|
mutate: async () => {
|
|
15503
|
-
await this.
|
|
15572
|
+
await this.source.writeCell(ref, newValues);
|
|
15504
15573
|
if (options.rotatedBy && rotatedKeys.length > 0) {
|
|
15505
|
-
await recordRotation(
|
|
15574
|
+
await this.source.recordRotation(ref, rotatedKeys, options.rotatedBy);
|
|
15506
15575
|
}
|
|
15507
15576
|
}
|
|
15508
15577
|
});
|
|
@@ -15511,7 +15580,7 @@ var ImportRunner = class {
|
|
|
15511
15580
|
};
|
|
15512
15581
|
|
|
15513
15582
|
// src/recipients/index.ts
|
|
15514
|
-
var
|
|
15583
|
+
var path16 = __toESM(require("path"));
|
|
15515
15584
|
function parseRecipientEntry(entry) {
|
|
15516
15585
|
if (typeof entry === "string") {
|
|
15517
15586
|
return { key: entry };
|
|
@@ -15579,12 +15648,12 @@ function ensureEnvironmentRecipientsArray(doc, envName) {
|
|
|
15579
15648
|
return env.recipients;
|
|
15580
15649
|
}
|
|
15581
15650
|
var RecipientManager = class {
|
|
15582
|
-
constructor(
|
|
15583
|
-
this.
|
|
15651
|
+
constructor(source, matrixManager, tx) {
|
|
15652
|
+
this.source = source;
|
|
15584
15653
|
this.matrixManager = matrixManager;
|
|
15585
15654
|
this.tx = tx;
|
|
15586
15655
|
}
|
|
15587
|
-
|
|
15656
|
+
source;
|
|
15588
15657
|
matrixManager;
|
|
15589
15658
|
tx;
|
|
15590
15659
|
/**
|
|
@@ -15639,7 +15708,7 @@ var RecipientManager = class {
|
|
|
15639
15708
|
const reEncryptedFiles = [];
|
|
15640
15709
|
await this.tx.run(repoRoot, {
|
|
15641
15710
|
description: environment ? `clef recipients add ${keyPreview(normalizedKey)} -e ${environment}` : `clef recipients add ${keyPreview(normalizedKey)}`,
|
|
15642
|
-
paths: [...cells.map((c) =>
|
|
15711
|
+
paths: [...cells.map((c) => path16.relative(repoRoot, c.filePath)), CLEF_MANIFEST_FILENAME],
|
|
15643
15712
|
mutate: async () => {
|
|
15644
15713
|
const doc = readManifestYaml(repoRoot);
|
|
15645
15714
|
const recipients = environment ? ensureEnvironmentRecipientsArray(doc, environment) : ensureRecipientsArray(doc);
|
|
@@ -15650,7 +15719,8 @@ var RecipientManager = class {
|
|
|
15650
15719
|
}
|
|
15651
15720
|
writeManifestYaml(repoRoot, doc);
|
|
15652
15721
|
for (const cell of cells) {
|
|
15653
|
-
|
|
15722
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
15723
|
+
await this.source.rotate(ref, { addAge: normalizedKey });
|
|
15654
15724
|
reEncryptedFiles.push(cell.filePath);
|
|
15655
15725
|
}
|
|
15656
15726
|
}
|
|
@@ -15698,7 +15768,7 @@ var RecipientManager = class {
|
|
|
15698
15768
|
const reEncryptedFiles = [];
|
|
15699
15769
|
await this.tx.run(repoRoot, {
|
|
15700
15770
|
description: environment ? `clef recipients remove ${keyPreview(trimmedKey)} -e ${environment}` : `clef recipients remove ${keyPreview(trimmedKey)}`,
|
|
15701
|
-
paths: [...cells.map((c) =>
|
|
15771
|
+
paths: [...cells.map((c) => path16.relative(repoRoot, c.filePath)), CLEF_MANIFEST_FILENAME],
|
|
15702
15772
|
mutate: async () => {
|
|
15703
15773
|
const doc = readManifestYaml(repoRoot);
|
|
15704
15774
|
const recipients = environment ? ensureEnvironmentRecipientsArray(doc, environment) : ensureRecipientsArray(doc);
|
|
@@ -15706,7 +15776,8 @@ var RecipientManager = class {
|
|
|
15706
15776
|
recipients.splice(idx, 1);
|
|
15707
15777
|
writeManifestYaml(repoRoot, doc);
|
|
15708
15778
|
for (const cell of cells) {
|
|
15709
|
-
|
|
15779
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
15780
|
+
await this.source.rotate(ref, { rmAge: trimmedKey });
|
|
15710
15781
|
reEncryptedFiles.push(cell.filePath);
|
|
15711
15782
|
}
|
|
15712
15783
|
}
|
|
@@ -15728,12 +15799,12 @@ var RecipientManager = class {
|
|
|
15728
15799
|
|
|
15729
15800
|
// src/recipients/requests.ts
|
|
15730
15801
|
var fs17 = __toESM(require("fs"));
|
|
15731
|
-
var
|
|
15802
|
+
var path17 = __toESM(require("path"));
|
|
15732
15803
|
var YAML10 = __toESM(require("yaml"));
|
|
15733
15804
|
var REQUESTS_FILENAME = ".clef-requests.yaml";
|
|
15734
15805
|
var HEADER_COMMENT2 = "# Pending recipient access requests. Approve with: clef recipients approve <label>\n";
|
|
15735
15806
|
function requestsFilePath(repoRoot) {
|
|
15736
|
-
return
|
|
15807
|
+
return path17.join(repoRoot, REQUESTS_FILENAME);
|
|
15737
15808
|
}
|
|
15738
15809
|
function loadRequests(repoRoot) {
|
|
15739
15810
|
const filePath = requestsFilePath(repoRoot);
|
|
@@ -15808,7 +15879,7 @@ function findInList(requests, identifier) {
|
|
|
15808
15879
|
}
|
|
15809
15880
|
|
|
15810
15881
|
// src/drift/detector.ts
|
|
15811
|
-
var
|
|
15882
|
+
var path18 = __toESM(require("path"));
|
|
15812
15883
|
var DriftDetector = class {
|
|
15813
15884
|
parser = new ManifestParser();
|
|
15814
15885
|
matrix = new MatrixManager();
|
|
@@ -15821,8 +15892,8 @@ var DriftDetector = class {
|
|
|
15821
15892
|
* @returns Drift result with any issues found.
|
|
15822
15893
|
*/
|
|
15823
15894
|
detect(localRoot, remoteRoot, namespaceFilter) {
|
|
15824
|
-
const localManifest = this.parser.parse(
|
|
15825
|
-
const remoteManifest = this.parser.parse(
|
|
15895
|
+
const localManifest = this.parser.parse(path18.join(localRoot, CLEF_MANIFEST_FILENAME));
|
|
15896
|
+
const remoteManifest = this.parser.parse(path18.join(remoteRoot, CLEF_MANIFEST_FILENAME));
|
|
15826
15897
|
const localCells = this.matrix.resolveMatrix(localManifest, localRoot);
|
|
15827
15898
|
const remoteCells = this.matrix.resolveMatrix(remoteManifest, remoteRoot);
|
|
15828
15899
|
const localEnvNames = localManifest.environments.map((e) => e.name);
|
|
@@ -15886,7 +15957,7 @@ var DriftDetector = class {
|
|
|
15886
15957
|
};
|
|
15887
15958
|
|
|
15888
15959
|
// src/report/generator.ts
|
|
15889
|
-
var
|
|
15960
|
+
var path19 = __toESM(require("path"));
|
|
15890
15961
|
|
|
15891
15962
|
// src/report/sanitizer.ts
|
|
15892
15963
|
var ReportSanitizer = class {
|
|
@@ -16023,14 +16094,14 @@ var ReportSanitizer = class {
|
|
|
16023
16094
|
|
|
16024
16095
|
// src/report/generator.ts
|
|
16025
16096
|
var ReportGenerator = class {
|
|
16026
|
-
constructor(runner,
|
|
16097
|
+
constructor(runner, source, matrixManager, schemaValidator) {
|
|
16027
16098
|
this.runner = runner;
|
|
16028
|
-
this.
|
|
16099
|
+
this.source = source;
|
|
16029
16100
|
this.matrixManager = matrixManager;
|
|
16030
16101
|
this.schemaValidator = schemaValidator;
|
|
16031
16102
|
}
|
|
16032
16103
|
runner;
|
|
16033
|
-
|
|
16104
|
+
source;
|
|
16034
16105
|
matrixManager;
|
|
16035
16106
|
schemaValidator;
|
|
16036
16107
|
/**
|
|
@@ -16046,7 +16117,7 @@ var ReportGenerator = class {
|
|
|
16046
16117
|
let manifest = null;
|
|
16047
16118
|
try {
|
|
16048
16119
|
const parser = new ManifestParser();
|
|
16049
|
-
manifest = parser.parse(
|
|
16120
|
+
manifest = parser.parse(path19.join(repoRoot, "clef.yaml"));
|
|
16050
16121
|
} catch {
|
|
16051
16122
|
const emptyManifest = {
|
|
16052
16123
|
manifestVersion: 0,
|
|
@@ -16167,16 +16238,17 @@ var ReportGenerator = class {
|
|
|
16167
16238
|
metadata: null
|
|
16168
16239
|
};
|
|
16169
16240
|
}
|
|
16241
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
16170
16242
|
const keyCount = this.readKeyCount(cell.filePath);
|
|
16171
16243
|
let pendingCount = 0;
|
|
16172
16244
|
try {
|
|
16173
|
-
const
|
|
16174
|
-
pendingCount = pending.length;
|
|
16245
|
+
const meta = await this.source.getPendingMetadata(ref);
|
|
16246
|
+
pendingCount = meta.pending.length;
|
|
16175
16247
|
} catch {
|
|
16176
16248
|
}
|
|
16177
16249
|
let metadata = null;
|
|
16178
16250
|
try {
|
|
16179
|
-
const sopsMetadata = await this.
|
|
16251
|
+
const sopsMetadata = await this.source.getCellMetadata(ref);
|
|
16180
16252
|
metadata = {
|
|
16181
16253
|
backend: sopsMetadata.backend,
|
|
16182
16254
|
recipients: sopsMetadata.recipients,
|
|
@@ -16199,7 +16271,7 @@ var ReportGenerator = class {
|
|
|
16199
16271
|
}
|
|
16200
16272
|
async buildPolicy(manifest, repoRoot) {
|
|
16201
16273
|
try {
|
|
16202
|
-
const lintRunner = new LintRunner(this.matrixManager, this.schemaValidator, this.
|
|
16274
|
+
const lintRunner = new LintRunner(this.matrixManager, this.schemaValidator, this.source);
|
|
16203
16275
|
const lintResult = await lintRunner.run(manifest, repoRoot);
|
|
16204
16276
|
return new ReportSanitizer().sanitize(lintResult.issues);
|
|
16205
16277
|
} catch {
|
|
@@ -16516,9 +16588,9 @@ var SopsMergeDriver = class {
|
|
|
16516
16588
|
*/
|
|
16517
16589
|
async mergeFiles(basePath, oursPath, theirsPath) {
|
|
16518
16590
|
const [baseDecrypted, oursDecrypted, theirsDecrypted] = await Promise.all([
|
|
16519
|
-
this.sopsClient.
|
|
16520
|
-
this.sopsClient.
|
|
16521
|
-
this.sopsClient.
|
|
16591
|
+
this.sopsClient.decryptFile(basePath),
|
|
16592
|
+
this.sopsClient.decryptFile(oursPath),
|
|
16593
|
+
this.sopsClient.decryptFile(theirsPath)
|
|
16522
16594
|
]);
|
|
16523
16595
|
return this.merge(baseDecrypted.values, oursDecrypted.values, theirsDecrypted.values);
|
|
16524
16596
|
}
|
|
@@ -16635,22 +16707,26 @@ function mergeMetadataFiles(_basePath, oursPath, theirsPath) {
|
|
|
16635
16707
|
}
|
|
16636
16708
|
|
|
16637
16709
|
// src/service-identity/manager.ts
|
|
16638
|
-
var
|
|
16710
|
+
var path20 = __toESM(require("path"));
|
|
16639
16711
|
var ServiceIdentityManager = class {
|
|
16640
|
-
constructor(
|
|
16641
|
-
this.
|
|
16712
|
+
constructor(source, matrixManager, tx) {
|
|
16713
|
+
this.source = source;
|
|
16642
16714
|
this.matrixManager = matrixManager;
|
|
16643
16715
|
this.tx = tx;
|
|
16644
16716
|
}
|
|
16645
|
-
|
|
16717
|
+
source;
|
|
16646
16718
|
matrixManager;
|
|
16647
16719
|
tx;
|
|
16720
|
+
/** Helper: cell → ref for the source seam. */
|
|
16721
|
+
ref(cell) {
|
|
16722
|
+
return { namespace: cell.namespace, environment: cell.environment };
|
|
16723
|
+
}
|
|
16648
16724
|
/**
|
|
16649
16725
|
* Compute repo-relative paths for a set of cells plus the manifest. Used
|
|
16650
16726
|
* to seed TransactionManager.run's `paths` argument.
|
|
16651
16727
|
*/
|
|
16652
16728
|
txPaths(repoRoot, cells) {
|
|
16653
|
-
return [...cells.map((c) =>
|
|
16729
|
+
return [...cells.map((c) => path20.relative(repoRoot, c.filePath)), CLEF_MANIFEST_FILENAME];
|
|
16654
16730
|
}
|
|
16655
16731
|
/**
|
|
16656
16732
|
* Create a new service identity with per-environment age key pairs or KMS envelope config.
|
|
@@ -16742,7 +16818,7 @@ var ServiceIdentityManager = class {
|
|
|
16742
16818
|
if (!envConfig?.recipient) continue;
|
|
16743
16819
|
if (isKmsEnvelope(envConfig)) continue;
|
|
16744
16820
|
try {
|
|
16745
|
-
await this.
|
|
16821
|
+
await this.source.rotate(this.ref(cell), { rmAge: envConfig.recipient });
|
|
16746
16822
|
} catch {
|
|
16747
16823
|
}
|
|
16748
16824
|
}
|
|
@@ -16792,7 +16868,7 @@ var ServiceIdentityManager = class {
|
|
|
16792
16868
|
const scopedCells = cells.filter((c) => c.environment === envName);
|
|
16793
16869
|
for (const cell of scopedCells) {
|
|
16794
16870
|
try {
|
|
16795
|
-
await this.
|
|
16871
|
+
await this.source.rotate(this.ref(cell), { rmAge: oldConfig.recipient });
|
|
16796
16872
|
} catch {
|
|
16797
16873
|
}
|
|
16798
16874
|
}
|
|
@@ -16819,7 +16895,7 @@ var ServiceIdentityManager = class {
|
|
|
16819
16895
|
if (isKmsEnvelope(envConfig)) continue;
|
|
16820
16896
|
if (!envConfig.recipient) continue;
|
|
16821
16897
|
try {
|
|
16822
|
-
await this.
|
|
16898
|
+
await this.source.rotate(this.ref(cell), { addAge: envConfig.recipient });
|
|
16823
16899
|
} catch (err) {
|
|
16824
16900
|
const message = err instanceof Error ? err.message : String(err);
|
|
16825
16901
|
if (!message.includes("already")) {
|
|
@@ -16867,7 +16943,7 @@ var ServiceIdentityManager = class {
|
|
|
16867
16943
|
if (isKmsEnvelope(envConfig)) continue;
|
|
16868
16944
|
if (!envConfig.recipient) continue;
|
|
16869
16945
|
try {
|
|
16870
|
-
await this.
|
|
16946
|
+
await this.source.rotate(this.ref(cell), { addAge: envConfig.recipient });
|
|
16871
16947
|
affectedFiles.push(cell.filePath);
|
|
16872
16948
|
} catch (err) {
|
|
16873
16949
|
const message = err instanceof Error ? err.message : String(err);
|
|
@@ -16928,7 +17004,7 @@ var ServiceIdentityManager = class {
|
|
|
16928
17004
|
if (isKmsEnvelope(envConfig)) continue;
|
|
16929
17005
|
if (!envConfig.recipient) continue;
|
|
16930
17006
|
try {
|
|
16931
|
-
await this.
|
|
17007
|
+
await this.source.rotate(this.ref(cell), { rmAge: envConfig.recipient });
|
|
16932
17008
|
affectedFiles.push(cell.filePath);
|
|
16933
17009
|
} catch {
|
|
16934
17010
|
}
|
|
@@ -16997,7 +17073,7 @@ var ServiceIdentityManager = class {
|
|
|
16997
17073
|
if (!identity.pack_only && !isKmsEnvelope(envConfig) && envConfig.recipient) {
|
|
16998
17074
|
for (const cell of cells) {
|
|
16999
17075
|
try {
|
|
17000
|
-
await this.
|
|
17076
|
+
await this.source.rotate(this.ref(cell), { addAge: envConfig.recipient });
|
|
17001
17077
|
} catch (err) {
|
|
17002
17078
|
const message = err instanceof Error ? err.message : String(err);
|
|
17003
17079
|
if (!message.includes("already")) {
|
|
@@ -17074,10 +17150,10 @@ var ServiceIdentityManager = class {
|
|
|
17074
17150
|
const scopedCells = cells.filter((c) => c.environment === envName);
|
|
17075
17151
|
for (const cell of scopedCells) {
|
|
17076
17152
|
try {
|
|
17077
|
-
await this.
|
|
17153
|
+
await this.source.rotate(this.ref(cell), { rmAge: oldRecipient });
|
|
17078
17154
|
} catch {
|
|
17079
17155
|
}
|
|
17080
|
-
await this.
|
|
17156
|
+
await this.source.rotate(this.ref(cell), { addAge: newPublicKey });
|
|
17081
17157
|
}
|
|
17082
17158
|
}
|
|
17083
17159
|
}
|
|
@@ -17136,7 +17212,7 @@ var ServiceIdentityManager = class {
|
|
|
17136
17212
|
if (!envConfig.recipient) continue;
|
|
17137
17213
|
if (si.namespaces.includes(cell.namespace)) {
|
|
17138
17214
|
try {
|
|
17139
|
-
const metadata = await this.
|
|
17215
|
+
const metadata = await this.source.getCellMetadata(this.ref(cell));
|
|
17140
17216
|
if (!metadata.recipients.includes(envConfig.recipient)) {
|
|
17141
17217
|
issues.push({
|
|
17142
17218
|
identity: si.name,
|
|
@@ -17151,7 +17227,7 @@ var ServiceIdentityManager = class {
|
|
|
17151
17227
|
}
|
|
17152
17228
|
} else {
|
|
17153
17229
|
try {
|
|
17154
|
-
const metadata = await this.
|
|
17230
|
+
const metadata = await this.source.getCellMetadata(this.ref(cell));
|
|
17155
17231
|
if (metadata.recipients.includes(envConfig.recipient)) {
|
|
17156
17232
|
issues.push({
|
|
17157
17233
|
identity: si.name,
|
|
@@ -17173,15 +17249,15 @@ var ServiceIdentityManager = class {
|
|
|
17173
17249
|
|
|
17174
17250
|
// src/structure/manager.ts
|
|
17175
17251
|
var fs19 = __toESM(require("fs"));
|
|
17176
|
-
var
|
|
17252
|
+
var path21 = __toESM(require("path"));
|
|
17177
17253
|
var StructureManager = class {
|
|
17178
|
-
constructor(matrixManager,
|
|
17254
|
+
constructor(matrixManager, buildSource, tx) {
|
|
17179
17255
|
this.matrixManager = matrixManager;
|
|
17180
|
-
this.
|
|
17256
|
+
this.buildSource = buildSource;
|
|
17181
17257
|
this.tx = tx;
|
|
17182
17258
|
}
|
|
17183
17259
|
matrixManager;
|
|
17184
|
-
|
|
17260
|
+
buildSource;
|
|
17185
17261
|
tx;
|
|
17186
17262
|
// ── add ──────────────────────────────────────────────────────────────────
|
|
17187
17263
|
/**
|
|
@@ -17197,7 +17273,7 @@ var StructureManager = class {
|
|
|
17197
17273
|
this.assertValidIdentifier("namespace", name);
|
|
17198
17274
|
const newCellPaths = manifest.environments.map((env) => ({
|
|
17199
17275
|
environment: env.name,
|
|
17200
|
-
filePath:
|
|
17276
|
+
filePath: path21.join(
|
|
17201
17277
|
repoRoot,
|
|
17202
17278
|
manifest.file_pattern.replace("{namespace}", name).replace("{environment}", env.name)
|
|
17203
17279
|
)
|
|
@@ -17205,7 +17281,7 @@ var StructureManager = class {
|
|
|
17205
17281
|
for (const cell of newCellPaths) {
|
|
17206
17282
|
if (fs19.existsSync(cell.filePath)) {
|
|
17207
17283
|
throw new Error(
|
|
17208
|
-
`Cannot add namespace '${name}': file '${
|
|
17284
|
+
`Cannot add namespace '${name}': file '${path21.relative(repoRoot, cell.filePath)}' already exists.`
|
|
17209
17285
|
);
|
|
17210
17286
|
}
|
|
17211
17287
|
}
|
|
@@ -17224,21 +17300,14 @@ var StructureManager = class {
|
|
|
17224
17300
|
await this.tx.run(repoRoot, {
|
|
17225
17301
|
description: `clef namespace add ${name}`,
|
|
17226
17302
|
paths: [
|
|
17227
|
-
...newCellPaths.map((c) =>
|
|
17303
|
+
...newCellPaths.map((c) => path21.relative(repoRoot, c.filePath)),
|
|
17228
17304
|
CLEF_MANIFEST_FILENAME
|
|
17229
17305
|
],
|
|
17230
17306
|
mutate: async () => {
|
|
17307
|
+
const source = this.buildSource(updatedManifest);
|
|
17231
17308
|
for (const cell of newCellPaths) {
|
|
17232
|
-
|
|
17233
|
-
|
|
17234
|
-
namespace: name,
|
|
17235
|
-
environment: cell.environment,
|
|
17236
|
-
filePath: cell.filePath,
|
|
17237
|
-
exists: false
|
|
17238
|
-
},
|
|
17239
|
-
this.encryption,
|
|
17240
|
-
updatedManifest
|
|
17241
|
-
);
|
|
17309
|
+
const ref = { namespace: name, environment: cell.environment };
|
|
17310
|
+
await source.scaffoldCell(ref, updatedManifest);
|
|
17242
17311
|
}
|
|
17243
17312
|
const doc = readManifestYaml(repoRoot);
|
|
17244
17313
|
const namespaces = doc.namespaces;
|
|
@@ -17269,7 +17338,7 @@ var StructureManager = class {
|
|
|
17269
17338
|
this.assertValidIdentifier("environment", name);
|
|
17270
17339
|
const newCellPaths = manifest.namespaces.map((ns) => ({
|
|
17271
17340
|
namespace: ns.name,
|
|
17272
|
-
filePath:
|
|
17341
|
+
filePath: path21.join(
|
|
17273
17342
|
repoRoot,
|
|
17274
17343
|
manifest.file_pattern.replace("{namespace}", ns.name).replace("{environment}", name)
|
|
17275
17344
|
)
|
|
@@ -17277,7 +17346,7 @@ var StructureManager = class {
|
|
|
17277
17346
|
for (const cell of newCellPaths) {
|
|
17278
17347
|
if (fs19.existsSync(cell.filePath)) {
|
|
17279
17348
|
throw new Error(
|
|
17280
|
-
`Cannot add environment '${name}': file '${
|
|
17349
|
+
`Cannot add environment '${name}': file '${path21.relative(repoRoot, cell.filePath)}' already exists.`
|
|
17281
17350
|
);
|
|
17282
17351
|
}
|
|
17283
17352
|
}
|
|
@@ -17296,21 +17365,14 @@ var StructureManager = class {
|
|
|
17296
17365
|
await this.tx.run(repoRoot, {
|
|
17297
17366
|
description: `clef env add ${name}`,
|
|
17298
17367
|
paths: [
|
|
17299
|
-
...newCellPaths.map((c) =>
|
|
17368
|
+
...newCellPaths.map((c) => path21.relative(repoRoot, c.filePath)),
|
|
17300
17369
|
CLEF_MANIFEST_FILENAME
|
|
17301
17370
|
],
|
|
17302
17371
|
mutate: async () => {
|
|
17372
|
+
const source = this.buildSource(updatedManifest);
|
|
17303
17373
|
for (const cell of newCellPaths) {
|
|
17304
|
-
|
|
17305
|
-
|
|
17306
|
-
namespace: cell.namespace,
|
|
17307
|
-
environment: name,
|
|
17308
|
-
filePath: cell.filePath,
|
|
17309
|
-
exists: false
|
|
17310
|
-
},
|
|
17311
|
-
this.encryption,
|
|
17312
|
-
updatedManifest
|
|
17313
|
-
);
|
|
17374
|
+
const ref = { namespace: cell.namespace, environment: name };
|
|
17375
|
+
await source.scaffoldCell(ref, updatedManifest);
|
|
17314
17376
|
}
|
|
17315
17377
|
const doc = readManifestYaml(repoRoot);
|
|
17316
17378
|
const environments = doc.environments;
|
|
@@ -17453,7 +17515,7 @@ var StructureManager = class {
|
|
|
17453
17515
|
for (const pair of renamePairs) {
|
|
17454
17516
|
if (fs19.existsSync(pair.to)) {
|
|
17455
17517
|
throw new Error(
|
|
17456
|
-
`Rename target '${
|
|
17518
|
+
`Rename target '${path21.relative(repoRoot, pair.to)}' already exists. Move or remove it first.`
|
|
17457
17519
|
);
|
|
17458
17520
|
}
|
|
17459
17521
|
}
|
|
@@ -17496,7 +17558,7 @@ var StructureManager = class {
|
|
|
17496
17558
|
for (const pair of renamePairs) {
|
|
17497
17559
|
if (fs19.existsSync(pair.to)) {
|
|
17498
17560
|
throw new Error(
|
|
17499
|
-
`Rename target '${
|
|
17561
|
+
`Rename target '${path21.relative(repoRoot, pair.to)}' already exists. Move or remove it first.`
|
|
17500
17562
|
);
|
|
17501
17563
|
}
|
|
17502
17564
|
}
|
|
@@ -17546,7 +17608,7 @@ var StructureManager = class {
|
|
|
17546
17608
|
swapAxisInCellPath(repoRoot, manifest, cell, axis, newName) {
|
|
17547
17609
|
const ns = axis === "namespace" ? newName : cell.namespace;
|
|
17548
17610
|
const env = axis === "environment" ? newName : cell.environment;
|
|
17549
|
-
return
|
|
17611
|
+
return path21.join(
|
|
17550
17612
|
repoRoot,
|
|
17551
17613
|
manifest.file_pattern.replace("{namespace}", ns).replace("{environment}", env)
|
|
17552
17614
|
);
|
|
@@ -17558,8 +17620,8 @@ var StructureManager = class {
|
|
|
17558
17620
|
txPaths(repoRoot, renamePairs) {
|
|
17559
17621
|
const paths = /* @__PURE__ */ new Set();
|
|
17560
17622
|
for (const pair of renamePairs) {
|
|
17561
|
-
paths.add(
|
|
17562
|
-
paths.add(
|
|
17623
|
+
paths.add(path21.relative(repoRoot, pair.from));
|
|
17624
|
+
paths.add(path21.relative(repoRoot, pair.to));
|
|
17563
17625
|
}
|
|
17564
17626
|
paths.add(CLEF_MANIFEST_FILENAME);
|
|
17565
17627
|
return [...paths];
|
|
@@ -17570,7 +17632,7 @@ var StructureManager = class {
|
|
|
17570
17632
|
*/
|
|
17571
17633
|
applyRenames(pairs) {
|
|
17572
17634
|
for (const pair of pairs) {
|
|
17573
|
-
const targetDir =
|
|
17635
|
+
const targetDir = path21.dirname(pair.to);
|
|
17574
17636
|
if (!fs19.existsSync(targetDir)) {
|
|
17575
17637
|
fs19.mkdirSync(targetDir, { recursive: true });
|
|
17576
17638
|
}
|
|
@@ -17585,10 +17647,10 @@ var StructureManager = class {
|
|
|
17585
17647
|
deletePaths(repoRoot, cells) {
|
|
17586
17648
|
const paths = /* @__PURE__ */ new Set();
|
|
17587
17649
|
for (const cell of cells) {
|
|
17588
|
-
paths.add(
|
|
17650
|
+
paths.add(path21.relative(repoRoot, cell.filePath));
|
|
17589
17651
|
const meta = cell.filePath.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml");
|
|
17590
17652
|
if (fs19.existsSync(meta)) {
|
|
17591
|
-
paths.add(
|
|
17653
|
+
paths.add(path21.relative(repoRoot, meta));
|
|
17592
17654
|
}
|
|
17593
17655
|
}
|
|
17594
17656
|
paths.add(CLEF_MANIFEST_FILENAME);
|
|
@@ -17703,7 +17765,7 @@ function renameKeyPreservingOrder(obj, oldKey, newKey) {
|
|
|
17703
17765
|
}
|
|
17704
17766
|
|
|
17705
17767
|
// src/artifact/resolve.ts
|
|
17706
|
-
async function resolveIdentitySecrets(identityName, environment, manifest, repoRoot,
|
|
17768
|
+
async function resolveIdentitySecrets(identityName, environment, manifest, repoRoot, source, matrixManager) {
|
|
17707
17769
|
const identity = manifest.service_identities?.find((si) => si.name === identityName);
|
|
17708
17770
|
if (!identity) {
|
|
17709
17771
|
throw new Error(`Service identity '${identityName}' not found in manifest.`);
|
|
@@ -17719,7 +17781,10 @@ async function resolveIdentitySecrets(identityName, environment, manifest, repoR
|
|
|
17719
17781
|
(c) => c.exists && identity.namespaces.includes(c.namespace) && c.environment === environment
|
|
17720
17782
|
);
|
|
17721
17783
|
for (const cell of cells) {
|
|
17722
|
-
const decrypted = await
|
|
17784
|
+
const decrypted = await source.readCell({
|
|
17785
|
+
namespace: cell.namespace,
|
|
17786
|
+
environment: cell.environment
|
|
17787
|
+
});
|
|
17723
17788
|
const bucket = allValues[cell.namespace] ??= {};
|
|
17724
17789
|
for (const [key, value] of Object.entries(decrypted.values)) {
|
|
17725
17790
|
if (key in bucket && bucket[key] !== value) {
|
|
@@ -17743,14 +17808,14 @@ var crypto5 = __toESM(require("crypto"));
|
|
|
17743
17808
|
|
|
17744
17809
|
// src/artifact/output.ts
|
|
17745
17810
|
var fs20 = __toESM(require("fs"));
|
|
17746
|
-
var
|
|
17811
|
+
var path22 = __toESM(require("path"));
|
|
17747
17812
|
var FilePackOutput = class {
|
|
17748
17813
|
constructor(outputPath) {
|
|
17749
17814
|
this.outputPath = outputPath;
|
|
17750
17815
|
}
|
|
17751
17816
|
outputPath;
|
|
17752
17817
|
async write(_artifact, json) {
|
|
17753
|
-
const outputDir =
|
|
17818
|
+
const outputDir = path22.dirname(this.outputPath);
|
|
17754
17819
|
if (!fs20.existsSync(outputDir)) {
|
|
17755
17820
|
fs20.mkdirSync(outputDir, { recursive: true });
|
|
17756
17821
|
}
|
|
@@ -17797,17 +17862,6 @@ function buildSigningPayload(artifact) {
|
|
|
17797
17862
|
];
|
|
17798
17863
|
return Buffer.from(fields.join("\n"), "utf-8");
|
|
17799
17864
|
}
|
|
17800
|
-
function generateSigningKeyPair() {
|
|
17801
|
-
const pair = crypto3.generateKeyPairSync("ed25519");
|
|
17802
|
-
return {
|
|
17803
|
-
publicKey: pair.publicKey.export({ type: "spki", format: "der" }).toString(
|
|
17804
|
-
"base64"
|
|
17805
|
-
),
|
|
17806
|
-
privateKey: pair.privateKey.export({ type: "pkcs8", format: "der" }).toString(
|
|
17807
|
-
"base64"
|
|
17808
|
-
)
|
|
17809
|
-
};
|
|
17810
|
-
}
|
|
17811
17865
|
function signEd25519(payload, privateKeyBase64) {
|
|
17812
17866
|
const keyObj = crypto3.createPrivateKey({
|
|
17813
17867
|
key: Buffer.from(privateKeyBase64, "base64"),
|
|
@@ -17843,17 +17897,6 @@ function verifySignature(payload, signatureBase64, publicKeyBase64) {
|
|
|
17843
17897
|
}
|
|
17844
17898
|
throw new Error(`Unsupported key type for signature verification: ${keyType}`);
|
|
17845
17899
|
}
|
|
17846
|
-
function detectAlgorithm(publicKeyBase64) {
|
|
17847
|
-
const keyObj = crypto3.createPublicKey({
|
|
17848
|
-
key: Buffer.from(publicKeyBase64, "base64"),
|
|
17849
|
-
format: "der",
|
|
17850
|
-
type: "spki"
|
|
17851
|
-
});
|
|
17852
|
-
const keyType = keyObj.asymmetricKeyType;
|
|
17853
|
-
if (keyType === "ed25519") return "Ed25519";
|
|
17854
|
-
if (keyType === "ec") return "ECDSA_SHA256";
|
|
17855
|
-
throw new Error(`Unsupported key type: ${keyType}`);
|
|
17856
|
-
}
|
|
17857
17900
|
|
|
17858
17901
|
// src/artifact/hash.ts
|
|
17859
17902
|
var crypto4 = __toESM(require("crypto"));
|
|
@@ -17863,12 +17906,12 @@ function computeCiphertextHash(ciphertext) {
|
|
|
17863
17906
|
|
|
17864
17907
|
// src/artifact/packer.ts
|
|
17865
17908
|
var ArtifactPacker = class {
|
|
17866
|
-
constructor(
|
|
17867
|
-
this.
|
|
17909
|
+
constructor(source, matrixManager, kms) {
|
|
17910
|
+
this.source = source;
|
|
17868
17911
|
this.matrixManager = matrixManager;
|
|
17869
17912
|
this.kms = kms;
|
|
17870
17913
|
}
|
|
17871
|
-
|
|
17914
|
+
source;
|
|
17872
17915
|
matrixManager;
|
|
17873
17916
|
kms;
|
|
17874
17917
|
/**
|
|
@@ -17886,7 +17929,7 @@ var ArtifactPacker = class {
|
|
|
17886
17929
|
config.environment,
|
|
17887
17930
|
manifest,
|
|
17888
17931
|
repoRoot,
|
|
17889
|
-
this.
|
|
17932
|
+
this.source,
|
|
17890
17933
|
this.matrixManager
|
|
17891
17934
|
);
|
|
17892
17935
|
const plaintext = JSON.stringify(resolved.values);
|
|
@@ -18271,11 +18314,7 @@ var JsonEnvelopeBackend = class {
|
|
|
18271
18314
|
}
|
|
18272
18315
|
async pack(req) {
|
|
18273
18316
|
const opts = req.backendOptions;
|
|
18274
|
-
const packer = new ArtifactPacker(
|
|
18275
|
-
req.services.encryption,
|
|
18276
|
-
new MatrixManager(),
|
|
18277
|
-
req.services.kms
|
|
18278
|
-
);
|
|
18317
|
+
const packer = new ArtifactPacker(req.services.source, new MatrixManager(), req.services.kms);
|
|
18279
18318
|
const output = opts.output ?? (opts.outputPath ? new FilePackOutput(opts.outputPath) : void 0);
|
|
18280
18319
|
const result = await packer.pack(
|
|
18281
18320
|
{
|
|
@@ -18304,7 +18343,7 @@ var JsonEnvelopeBackend = class {
|
|
|
18304
18343
|
var VALID_KMS_PROVIDERS = ["aws", "gcp", "azure"];
|
|
18305
18344
|
|
|
18306
18345
|
// src/migration/backend.ts
|
|
18307
|
-
var
|
|
18346
|
+
var path23 = __toESM(require("path"));
|
|
18308
18347
|
var YAML12 = __toESM(require("yaml"));
|
|
18309
18348
|
var BACKEND_KEY_FIELDS = {
|
|
18310
18349
|
age: void 0,
|
|
@@ -18332,23 +18371,24 @@ function metadataMatchesTarget(meta, target) {
|
|
|
18332
18371
|
}
|
|
18333
18372
|
var BackendMigrator = class {
|
|
18334
18373
|
/**
|
|
18335
|
-
* @param
|
|
18374
|
+
* @param buildSource - Factory that builds a `SecretSource` bound to a
|
|
18375
|
+
* given manifest. Called twice during a real migration: once with the
|
|
18376
|
+
* pre-migration manifest (for classification + decrypt) and once with
|
|
18377
|
+
* the post-mutation manifest (for re-encrypt + verify). The factory
|
|
18378
|
+
* pattern is required because the encryption layer of a composed
|
|
18379
|
+
* source is bound to a manifest at construction.
|
|
18336
18380
|
* @param matrixManager - Matrix resolver.
|
|
18337
18381
|
* @param tx - Transaction manager that wraps the migration in a single git commit
|
|
18338
18382
|
* so a partial failure rolls back ALL files + the manifest via `git reset --hard`.
|
|
18339
|
-
* @param targetEncryption - Optional separate backend for encrypt. Use when migrating
|
|
18340
|
-
* from cloud (decrypt via keyservice) to another backend (encrypt via local credentials).
|
|
18341
18383
|
*/
|
|
18342
|
-
constructor(
|
|
18384
|
+
constructor(buildSource, matrixManager, tx) {
|
|
18385
|
+
this.buildSource = buildSource;
|
|
18343
18386
|
this.matrixManager = matrixManager;
|
|
18344
18387
|
this.tx = tx;
|
|
18345
|
-
this.decryptBackend = encryption;
|
|
18346
|
-
this.encryptBackend = targetEncryption ?? encryption;
|
|
18347
18388
|
}
|
|
18389
|
+
buildSource;
|
|
18348
18390
|
matrixManager;
|
|
18349
18391
|
tx;
|
|
18350
|
-
decryptBackend;
|
|
18351
|
-
encryptBackend;
|
|
18352
18392
|
async migrate(manifest, repoRoot, options, onProgress) {
|
|
18353
18393
|
const { target, environment, dryRun, skipVerify } = options;
|
|
18354
18394
|
if (environment) {
|
|
@@ -18368,10 +18408,12 @@ var BackendMigrator = class {
|
|
|
18368
18408
|
warnings: ["No encrypted files found to migrate."]
|
|
18369
18409
|
};
|
|
18370
18410
|
}
|
|
18411
|
+
const sourceBefore = this.buildSource(manifest);
|
|
18371
18412
|
const toMigrate = [];
|
|
18372
18413
|
const skippedFiles = [];
|
|
18373
18414
|
for (const cell of targetCells) {
|
|
18374
|
-
const
|
|
18415
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
18416
|
+
const meta = await sourceBefore.getCellMetadata(ref);
|
|
18375
18417
|
if (metadataMatchesTarget(meta, target)) {
|
|
18376
18418
|
skippedFiles.push(cell.filePath);
|
|
18377
18419
|
onProgress?.({
|
|
@@ -18392,6 +18434,8 @@ var BackendMigrator = class {
|
|
|
18392
18434
|
warnings: ["All files already use the target backend and key. Nothing to migrate."]
|
|
18393
18435
|
};
|
|
18394
18436
|
}
|
|
18437
|
+
const preMigrationWarnings = [];
|
|
18438
|
+
this.checkAgeRecipientsWarning(manifest, target, environment, preMigrationWarnings);
|
|
18395
18439
|
if (dryRun) {
|
|
18396
18440
|
const warnings2 = [];
|
|
18397
18441
|
for (const cell of toMigrate) {
|
|
@@ -18408,7 +18452,7 @@ var BackendMigrator = class {
|
|
|
18408
18452
|
} else {
|
|
18409
18453
|
warnings2.push(`Would update global default_backend \u2192 ${target.backend}`);
|
|
18410
18454
|
}
|
|
18411
|
-
|
|
18455
|
+
warnings2.push(...preMigrationWarnings);
|
|
18412
18456
|
return {
|
|
18413
18457
|
migratedFiles: [],
|
|
18414
18458
|
skippedFiles,
|
|
@@ -18420,11 +18464,12 @@ var BackendMigrator = class {
|
|
|
18420
18464
|
const migratedFiles = [];
|
|
18421
18465
|
let migrationFailed = false;
|
|
18422
18466
|
let migrationError;
|
|
18467
|
+
let sourceAfter;
|
|
18423
18468
|
try {
|
|
18424
18469
|
await this.tx.run(repoRoot, {
|
|
18425
18470
|
description: environment ? `clef migrate-backend ${target.backend}: ${environment}` : `clef migrate-backend ${target.backend}`,
|
|
18426
18471
|
paths: [
|
|
18427
|
-
...toMigrate.map((c) =>
|
|
18472
|
+
...toMigrate.map((c) => path23.relative(repoRoot, c.filePath)),
|
|
18428
18473
|
CLEF_MANIFEST_FILENAME
|
|
18429
18474
|
],
|
|
18430
18475
|
mutate: async () => {
|
|
@@ -18432,19 +18477,16 @@ var BackendMigrator = class {
|
|
|
18432
18477
|
this.updateManifestDoc(doc, target, environment);
|
|
18433
18478
|
writeManifestYaml(repoRoot, doc);
|
|
18434
18479
|
const updatedManifest = YAML12.parse(YAML12.stringify(doc));
|
|
18480
|
+
sourceAfter = this.buildSource(updatedManifest);
|
|
18435
18481
|
for (const cell of toMigrate) {
|
|
18436
18482
|
onProgress?.({
|
|
18437
18483
|
type: "migrate",
|
|
18438
18484
|
file: cell.filePath,
|
|
18439
18485
|
message: `Migrating ${cell.namespace}/${cell.environment}...`
|
|
18440
18486
|
});
|
|
18441
|
-
const
|
|
18442
|
-
await
|
|
18443
|
-
|
|
18444
|
-
decrypted.values,
|
|
18445
|
-
updatedManifest,
|
|
18446
|
-
cell.environment
|
|
18447
|
-
);
|
|
18487
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
18488
|
+
const decrypted = await sourceBefore.readCell(ref);
|
|
18489
|
+
await sourceAfter.writeCell(ref, decrypted.values);
|
|
18448
18490
|
migratedFiles.push(cell.filePath);
|
|
18449
18491
|
}
|
|
18450
18492
|
}
|
|
@@ -18464,12 +18506,17 @@ var BackendMigrator = class {
|
|
|
18464
18506
|
rolledBack: true,
|
|
18465
18507
|
error: migrationError.message,
|
|
18466
18508
|
verifiedFiles: [],
|
|
18467
|
-
warnings
|
|
18509
|
+
// Surface pre-migration warnings even on rollback. The new manifest
|
|
18510
|
+
// validator can reject the write (e.g. per-env recipients vs.
|
|
18511
|
+
// non-age backend), and without these warnings the user only sees
|
|
18512
|
+
// an opaque "rolled back" message — not the actionable hint about
|
|
18513
|
+
// what to clean up first.
|
|
18514
|
+
warnings: ["All changes have been rolled back.", ...preMigrationWarnings]
|
|
18468
18515
|
};
|
|
18469
18516
|
}
|
|
18470
18517
|
const verifiedFiles = [];
|
|
18471
18518
|
const warnings = [];
|
|
18472
|
-
if (!skipVerify) {
|
|
18519
|
+
if (!skipVerify && sourceAfter) {
|
|
18473
18520
|
for (const cell of toMigrate) {
|
|
18474
18521
|
try {
|
|
18475
18522
|
onProgress?.({
|
|
@@ -18477,7 +18524,8 @@ var BackendMigrator = class {
|
|
|
18477
18524
|
file: cell.filePath,
|
|
18478
18525
|
message: `Verifying ${cell.namespace}/${cell.environment}...`
|
|
18479
18526
|
});
|
|
18480
|
-
|
|
18527
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
18528
|
+
await sourceAfter.readCell(ref);
|
|
18481
18529
|
verifiedFiles.push(cell.filePath);
|
|
18482
18530
|
} catch (err) {
|
|
18483
18531
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
@@ -18487,7 +18535,7 @@ var BackendMigrator = class {
|
|
|
18487
18535
|
}
|
|
18488
18536
|
}
|
|
18489
18537
|
}
|
|
18490
|
-
|
|
18538
|
+
warnings.push(...preMigrationWarnings);
|
|
18491
18539
|
return { migratedFiles, skippedFiles, rolledBack: false, verifiedFiles, warnings };
|
|
18492
18540
|
}
|
|
18493
18541
|
// ── Private helpers ──────────────────────────────────────────────────
|
|
@@ -18522,16 +18570,16 @@ var BackendMigrator = class {
|
|
|
18522
18570
|
};
|
|
18523
18571
|
|
|
18524
18572
|
// src/reset/manager.ts
|
|
18525
|
-
var
|
|
18573
|
+
var path24 = __toESM(require("path"));
|
|
18526
18574
|
var ResetManager = class {
|
|
18527
|
-
constructor(matrixManager,
|
|
18575
|
+
constructor(matrixManager, buildSource, schemaValidator, tx) {
|
|
18528
18576
|
this.matrixManager = matrixManager;
|
|
18529
|
-
this.
|
|
18577
|
+
this.buildSource = buildSource;
|
|
18530
18578
|
this.schemaValidator = schemaValidator;
|
|
18531
18579
|
this.tx = tx;
|
|
18532
18580
|
}
|
|
18533
18581
|
matrixManager;
|
|
18534
|
-
|
|
18582
|
+
buildSource;
|
|
18535
18583
|
schemaValidator;
|
|
18536
18584
|
tx;
|
|
18537
18585
|
async reset(opts, manifest, repoRoot) {
|
|
@@ -18551,11 +18599,11 @@ var ResetManager = class {
|
|
|
18551
18599
|
txPaths.push(CLEF_MANIFEST_FILENAME);
|
|
18552
18600
|
}
|
|
18553
18601
|
for (const cell of targetCells) {
|
|
18554
|
-
txPaths.push(
|
|
18602
|
+
txPaths.push(path24.relative(repoRoot, cell.filePath));
|
|
18555
18603
|
const cellKeys = keyPlan.get(cell.namespace) ?? [];
|
|
18556
18604
|
if (cellKeys.length > 0) {
|
|
18557
18605
|
txPaths.push(
|
|
18558
|
-
|
|
18606
|
+
path24.relative(repoRoot, cell.filePath.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml"))
|
|
18559
18607
|
);
|
|
18560
18608
|
}
|
|
18561
18609
|
}
|
|
@@ -18572,17 +18620,14 @@ var ResetManager = class {
|
|
|
18572
18620
|
writeManifestYaml(repoRoot, doc);
|
|
18573
18621
|
effectiveManifest = withBackendOverride(manifest, affectedEnvs, opts.backend, opts.key);
|
|
18574
18622
|
}
|
|
18623
|
+
const source = this.buildSource(effectiveManifest);
|
|
18575
18624
|
for (const cell of targetCells) {
|
|
18576
18625
|
const keys = keyPlan.get(cell.namespace) ?? [];
|
|
18577
18626
|
const placeholders = this.buildPlaceholders(keys);
|
|
18578
|
-
|
|
18579
|
-
|
|
18580
|
-
placeholders,
|
|
18581
|
-
effectiveManifest,
|
|
18582
|
-
cell.environment
|
|
18583
|
-
);
|
|
18627
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
18628
|
+
await source.writeCell(ref, placeholders);
|
|
18584
18629
|
if (keys.length > 0) {
|
|
18585
|
-
await
|
|
18630
|
+
await source.markPending(ref, keys, "clef reset");
|
|
18586
18631
|
pendingKeysByCell[cell.filePath] = keys;
|
|
18587
18632
|
}
|
|
18588
18633
|
scaffoldedCells.push(cell.filePath);
|
|
@@ -18643,7 +18688,7 @@ var ResetManager = class {
|
|
|
18643
18688
|
for (const namespace of namespaces) {
|
|
18644
18689
|
const nsDef = manifest.namespaces.find((n) => n.name === namespace);
|
|
18645
18690
|
if (nsDef?.schema) {
|
|
18646
|
-
const schema = this.schemaValidator.loadSchema(
|
|
18691
|
+
const schema = this.schemaValidator.loadSchema(path24.join(repoRoot, nsDef.schema));
|
|
18647
18692
|
plan.set(namespace, Object.keys(schema.keys));
|
|
18648
18693
|
continue;
|
|
18649
18694
|
}
|
|
@@ -18722,15 +18767,15 @@ function withBackendOverride(manifest, envNames, backend, key) {
|
|
|
18722
18767
|
}
|
|
18723
18768
|
|
|
18724
18769
|
// src/sync/manager.ts
|
|
18725
|
-
var
|
|
18770
|
+
var path25 = __toESM(require("path"));
|
|
18726
18771
|
var SyncManager = class {
|
|
18727
|
-
constructor(matrixManager,
|
|
18772
|
+
constructor(matrixManager, source, tx) {
|
|
18728
18773
|
this.matrixManager = matrixManager;
|
|
18729
|
-
this.
|
|
18774
|
+
this.source = source;
|
|
18730
18775
|
this.tx = tx;
|
|
18731
18776
|
}
|
|
18732
18777
|
matrixManager;
|
|
18733
|
-
|
|
18778
|
+
source;
|
|
18734
18779
|
tx;
|
|
18735
18780
|
/**
|
|
18736
18781
|
* Compute what sync would do without mutating anything.
|
|
@@ -18747,8 +18792,13 @@ var SyncManager = class {
|
|
|
18747
18792
|
const targetCells = opts.namespace ? existingCells.filter((c) => c.namespace === opts.namespace) : existingCells;
|
|
18748
18793
|
const keysByNsEnv = {};
|
|
18749
18794
|
for (const cell of targetCells) {
|
|
18750
|
-
const
|
|
18751
|
-
|
|
18795
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
18796
|
+
let keys;
|
|
18797
|
+
try {
|
|
18798
|
+
keys = await this.source.listKeys(ref);
|
|
18799
|
+
} catch {
|
|
18800
|
+
continue;
|
|
18801
|
+
}
|
|
18752
18802
|
if (!keysByNsEnv[cell.namespace]) keysByNsEnv[cell.namespace] = {};
|
|
18753
18803
|
keysByNsEnv[cell.namespace][cell.environment] = new Set(keys);
|
|
18754
18804
|
}
|
|
@@ -18794,7 +18844,7 @@ var SyncManager = class {
|
|
|
18794
18844
|
}
|
|
18795
18845
|
const txPaths = [];
|
|
18796
18846
|
for (const cell of syncPlan.cells) {
|
|
18797
|
-
const rel =
|
|
18847
|
+
const rel = path25.relative(repoRoot, cell.filePath);
|
|
18798
18848
|
txPaths.push(rel);
|
|
18799
18849
|
txPaths.push(rel.replace(/\.enc\.(yaml|json)$/, ".clef-meta.yaml"));
|
|
18800
18850
|
}
|
|
@@ -18807,17 +18857,13 @@ var SyncManager = class {
|
|
|
18807
18857
|
paths: txPaths,
|
|
18808
18858
|
mutate: async () => {
|
|
18809
18859
|
for (const cell of syncPlan.cells) {
|
|
18810
|
-
const
|
|
18860
|
+
const ref = { namespace: cell.namespace, environment: cell.environment };
|
|
18861
|
+
const decrypted = await this.source.readCell(ref);
|
|
18811
18862
|
for (const key of cell.missingKeys) {
|
|
18812
18863
|
decrypted.values[key] = generateRandomValue();
|
|
18813
18864
|
}
|
|
18814
|
-
await this.
|
|
18815
|
-
|
|
18816
|
-
decrypted.values,
|
|
18817
|
-
manifest,
|
|
18818
|
-
cell.environment
|
|
18819
|
-
);
|
|
18820
|
-
await markPendingWithRetry(cell.filePath, cell.missingKeys, "clef sync");
|
|
18865
|
+
await this.source.writeCell(ref, decrypted.values);
|
|
18866
|
+
await this.source.markPending(ref, cell.missingKeys, "clef sync");
|
|
18821
18867
|
const cellLabel = `${cell.namespace}/${cell.environment}`;
|
|
18822
18868
|
modifiedCells.push(cellLabel);
|
|
18823
18869
|
scaffoldedKeys[cellLabel] = cell.missingKeys;
|
|
@@ -19077,13 +19123,272 @@ var ComplianceGenerator = class {
|
|
|
19077
19123
|
};
|
|
19078
19124
|
|
|
19079
19125
|
// src/compliance/run.ts
|
|
19080
|
-
var
|
|
19126
|
+
var path27 = __toESM(require("path"));
|
|
19127
|
+
|
|
19128
|
+
// src/source/compose.ts
|
|
19129
|
+
var YAML14 = __toESM(require("yaml"));
|
|
19130
|
+
|
|
19131
|
+
// src/source/default-bulk.ts
|
|
19132
|
+
function defaultBulk(source) {
|
|
19133
|
+
return {
|
|
19134
|
+
async bulkSet(namespace, key, valuesByEnv, _manifest) {
|
|
19135
|
+
for (const [environment, value] of Object.entries(valuesByEnv)) {
|
|
19136
|
+
const cell = { namespace, environment };
|
|
19137
|
+
const existing = await source.cellExists(cell) ? (await source.readCell(cell)).values : {};
|
|
19138
|
+
await source.writeCell(cell, { ...existing, [key]: value });
|
|
19139
|
+
}
|
|
19140
|
+
},
|
|
19141
|
+
async bulkDelete(namespace, key, manifest) {
|
|
19142
|
+
for (const env of manifest.environments) {
|
|
19143
|
+
const cell = { namespace, environment: env.name };
|
|
19144
|
+
if (!await source.cellExists(cell)) continue;
|
|
19145
|
+
const data = await source.readCell(cell);
|
|
19146
|
+
if (!(key in data.values)) continue;
|
|
19147
|
+
const next = { ...data.values };
|
|
19148
|
+
delete next[key];
|
|
19149
|
+
await source.writeCell(cell, next);
|
|
19150
|
+
}
|
|
19151
|
+
},
|
|
19152
|
+
async copyValue(key, from, to, _manifest) {
|
|
19153
|
+
const src = await source.readCell(from);
|
|
19154
|
+
if (!(key in src.values)) {
|
|
19155
|
+
throw new Error(
|
|
19156
|
+
`Cannot copy: key '${key}' not present in ${from.namespace}/${from.environment}`
|
|
19157
|
+
);
|
|
19158
|
+
}
|
|
19159
|
+
const dst = await source.cellExists(to) ? (await source.readCell(to)).values : {};
|
|
19160
|
+
await source.writeCell(to, { ...dst, [key]: src.values[key] });
|
|
19161
|
+
}
|
|
19162
|
+
};
|
|
19163
|
+
}
|
|
19164
|
+
|
|
19165
|
+
// src/source/compose.ts
|
|
19166
|
+
function composeSecretSource(storage, encryption, manifest) {
|
|
19167
|
+
return new ComposedSecretSource(storage, encryption, manifest);
|
|
19168
|
+
}
|
|
19169
|
+
var ComposedSecretSource = class {
|
|
19170
|
+
constructor(storage, encryption, manifest) {
|
|
19171
|
+
this.storage = storage;
|
|
19172
|
+
this.encryption = encryption;
|
|
19173
|
+
this.manifest = manifest;
|
|
19174
|
+
this.id = `${storage.id}+${encryption.id}`;
|
|
19175
|
+
this.description = `${storage.description} / ${encryption.description}`;
|
|
19176
|
+
}
|
|
19177
|
+
storage;
|
|
19178
|
+
encryption;
|
|
19179
|
+
manifest;
|
|
19180
|
+
id;
|
|
19181
|
+
description;
|
|
19182
|
+
context(cell) {
|
|
19183
|
+
return {
|
|
19184
|
+
manifest: this.manifest,
|
|
19185
|
+
environment: cell.environment,
|
|
19186
|
+
format: this.storage.blobFormat(cell)
|
|
19187
|
+
};
|
|
19188
|
+
}
|
|
19189
|
+
// ── Core SecretSource ──────────────────────────────────────────────────
|
|
19190
|
+
async readCell(cell) {
|
|
19191
|
+
const blob = await this.storage.readBlob(cell);
|
|
19192
|
+
return this.encryption.decrypt(blob, this.context(cell));
|
|
19193
|
+
}
|
|
19194
|
+
async writeCell(cell, values) {
|
|
19195
|
+
const blob = await this.encryption.encrypt(values, this.context(cell));
|
|
19196
|
+
await this.storage.writeBlob(cell, blob);
|
|
19197
|
+
}
|
|
19198
|
+
async deleteCell(cell) {
|
|
19199
|
+
await this.storage.deleteBlob(cell);
|
|
19200
|
+
}
|
|
19201
|
+
async cellExists(cell) {
|
|
19202
|
+
return this.storage.blobExists(cell);
|
|
19203
|
+
}
|
|
19204
|
+
/**
|
|
19205
|
+
* List cell keys WITHOUT decrypting. SOPS files store key names in
|
|
19206
|
+
* plaintext at the top level of the YAML/JSON document — we read the
|
|
19207
|
+
* blob and return everything except the `sops:` metadata block.
|
|
19208
|
+
*
|
|
19209
|
+
* NOTE: this is currently SOPS-shaped. A future non-SOPS
|
|
19210
|
+
* `EncryptionBackend` whose ciphertext doesn't expose key names in
|
|
19211
|
+
* the clear would need its own listing strategy — likely a
|
|
19212
|
+
* `listKeys(blob)` method on `EncryptionBackend`. Deferred until a
|
|
19213
|
+
* second backend exists.
|
|
19214
|
+
*/
|
|
19215
|
+
async listKeys(cell) {
|
|
19216
|
+
if (!await this.storage.blobExists(cell)) return [];
|
|
19217
|
+
const blob = await this.storage.readBlob(cell);
|
|
19218
|
+
const parsed = YAML14.parse(blob);
|
|
19219
|
+
if (!parsed || typeof parsed !== "object") return [];
|
|
19220
|
+
return Object.keys(parsed).filter((k) => k !== "sops");
|
|
19221
|
+
}
|
|
19222
|
+
async getCellMetadata(cell) {
|
|
19223
|
+
const blob = await this.storage.readBlob(cell);
|
|
19224
|
+
return this.encryption.getMetadata(blob);
|
|
19225
|
+
}
|
|
19226
|
+
async scaffoldCell(cell, manifest) {
|
|
19227
|
+
if (await this.storage.blobExists(cell)) return;
|
|
19228
|
+
const blob = await this.encryption.encrypt(
|
|
19229
|
+
{},
|
|
19230
|
+
{
|
|
19231
|
+
manifest,
|
|
19232
|
+
environment: cell.environment,
|
|
19233
|
+
format: this.storage.blobFormat(cell)
|
|
19234
|
+
}
|
|
19235
|
+
);
|
|
19236
|
+
await this.storage.writeBlob(cell, blob);
|
|
19237
|
+
}
|
|
19238
|
+
// ── Pending / rotation metadata ────────────────────────────────────────
|
|
19239
|
+
async getPendingMetadata(cell) {
|
|
19240
|
+
return this.storage.readPendingMetadata(cell);
|
|
19241
|
+
}
|
|
19242
|
+
async markPending(cell, keys, setBy) {
|
|
19243
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
19244
|
+
const now = /* @__PURE__ */ new Date();
|
|
19245
|
+
for (const key of keys) {
|
|
19246
|
+
if (!meta.pending.find((p) => p.key === key)) {
|
|
19247
|
+
meta.pending.push({ key, since: now, setBy });
|
|
19248
|
+
}
|
|
19249
|
+
}
|
|
19250
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
19251
|
+
}
|
|
19252
|
+
async markResolved(cell, keys) {
|
|
19253
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
19254
|
+
meta.pending = meta.pending.filter((p) => !keys.includes(p.key));
|
|
19255
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
19256
|
+
}
|
|
19257
|
+
async recordRotation(cell, keys, rotatedBy) {
|
|
19258
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
19259
|
+
const now = /* @__PURE__ */ new Date();
|
|
19260
|
+
for (const key of keys) {
|
|
19261
|
+
const existing = meta.rotations.find((r) => r.key === key);
|
|
19262
|
+
if (existing) {
|
|
19263
|
+
existing.lastRotatedAt = now;
|
|
19264
|
+
existing.rotatedBy = rotatedBy;
|
|
19265
|
+
existing.rotationCount += 1;
|
|
19266
|
+
} else {
|
|
19267
|
+
meta.rotations.push({ key, lastRotatedAt: now, rotatedBy, rotationCount: 1 });
|
|
19268
|
+
}
|
|
19269
|
+
}
|
|
19270
|
+
meta.pending = meta.pending.filter((p) => !keys.includes(p.key));
|
|
19271
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
19272
|
+
}
|
|
19273
|
+
async removeRotation(cell, keys) {
|
|
19274
|
+
const meta = await this.storage.readPendingMetadata(cell);
|
|
19275
|
+
meta.rotations = meta.rotations.filter((r) => !keys.includes(r.key));
|
|
19276
|
+
await this.storage.writePendingMetadata(cell, meta);
|
|
19277
|
+
}
|
|
19278
|
+
// ── Lintable ───────────────────────────────────────────────────────────
|
|
19279
|
+
async validateEncryption(cell) {
|
|
19280
|
+
if (!await this.storage.blobExists(cell)) return false;
|
|
19281
|
+
const blob = await this.storage.readBlob(cell);
|
|
19282
|
+
return this.encryption.validateEncryption(blob);
|
|
19283
|
+
}
|
|
19284
|
+
async checkRecipientDrift(cell, expected) {
|
|
19285
|
+
const blob = await this.storage.readBlob(cell);
|
|
19286
|
+
const meta = this.encryption.getMetadata(blob);
|
|
19287
|
+
const actual = new Set(meta.recipients);
|
|
19288
|
+
const expectedSet = new Set(expected);
|
|
19289
|
+
return {
|
|
19290
|
+
missing: expected.filter((r) => !actual.has(r)),
|
|
19291
|
+
unexpected: meta.recipients.filter((r) => !expectedSet.has(r))
|
|
19292
|
+
};
|
|
19293
|
+
}
|
|
19294
|
+
// ── Rotatable ──────────────────────────────────────────────────────────
|
|
19295
|
+
async rotate(cell, opts) {
|
|
19296
|
+
const blob = await this.storage.readBlob(cell);
|
|
19297
|
+
const rotated = await this.encryption.rotate(blob, opts, this.context(cell));
|
|
19298
|
+
await this.storage.writeBlob(cell, rotated);
|
|
19299
|
+
}
|
|
19300
|
+
// ── Bulk ───────────────────────────────────────────────────────────────
|
|
19301
|
+
//
|
|
19302
|
+
// Default looped implementation. A future StorageBackend that supports
|
|
19303
|
+
// batch operations (e.g. PostgresStorageBackend with row-level UPDATE
|
|
19304
|
+
// batching) can override these by wrapping `composeSecretSource`'s
|
|
19305
|
+
// output and replacing just the bulk methods.
|
|
19306
|
+
bulkSet = (namespace, key, valuesByEnv, manifest) => defaultBulk(this).bulkSet(namespace, key, valuesByEnv, manifest);
|
|
19307
|
+
bulkDelete = (namespace, key, manifest) => defaultBulk(this).bulkDelete(namespace, key, manifest);
|
|
19308
|
+
copyValue = (key, from, to, manifest) => defaultBulk(this).copyValue(key, from, to, manifest);
|
|
19309
|
+
};
|
|
19310
|
+
|
|
19311
|
+
// src/source/filesystem-storage-backend.ts
|
|
19312
|
+
var fs22 = __toESM(require("fs"));
|
|
19313
|
+
var path26 = __toESM(require("path"));
|
|
19314
|
+
var import_crypto3 = require("crypto");
|
|
19315
|
+
var FilesystemStorageBackend = class {
|
|
19316
|
+
constructor(manifest, repoRoot) {
|
|
19317
|
+
this.manifest = manifest;
|
|
19318
|
+
this.repoRoot = repoRoot;
|
|
19319
|
+
}
|
|
19320
|
+
manifest;
|
|
19321
|
+
repoRoot;
|
|
19322
|
+
id = "filesystem";
|
|
19323
|
+
description = "Filesystem-backed cell storage (default substrate)";
|
|
19324
|
+
/**
|
|
19325
|
+
* Resolve a cell reference to its absolute filesystem path. Public —
|
|
19326
|
+
* used by substrate-specific trait implementations.
|
|
19327
|
+
*/
|
|
19328
|
+
cellPath(cell) {
|
|
19329
|
+
const relativePath = this.manifest.file_pattern.replace("{namespace}", cell.namespace).replace("{environment}", cell.environment);
|
|
19330
|
+
return path26.join(this.repoRoot, relativePath);
|
|
19331
|
+
}
|
|
19332
|
+
/** The repo root, exposed for filesystem-shaped trait implementations. */
|
|
19333
|
+
getRepoRoot() {
|
|
19334
|
+
return this.repoRoot;
|
|
19335
|
+
}
|
|
19336
|
+
blobFormat(cell) {
|
|
19337
|
+
return this.cellPath(cell).endsWith(".json") ? "json" : "yaml";
|
|
19338
|
+
}
|
|
19339
|
+
async readBlob(cell) {
|
|
19340
|
+
const filePath = this.cellPath(cell);
|
|
19341
|
+
return fs22.readFileSync(filePath, "utf-8");
|
|
19342
|
+
}
|
|
19343
|
+
async writeBlob(cell, blob) {
|
|
19344
|
+
const filePath = this.cellPath(cell);
|
|
19345
|
+
const dir = path26.dirname(filePath);
|
|
19346
|
+
if (!fs22.existsSync(dir)) {
|
|
19347
|
+
fs22.mkdirSync(dir, { recursive: true });
|
|
19348
|
+
}
|
|
19349
|
+
const tmpPath = `${filePath}.${Date.now()}.${(0, import_crypto3.randomBytes)(4).toString("hex")}.tmp`;
|
|
19350
|
+
const handle = fs22.openSync(tmpPath, "w");
|
|
19351
|
+
try {
|
|
19352
|
+
fs22.writeFileSync(handle, blob);
|
|
19353
|
+
fs22.fsyncSync(handle);
|
|
19354
|
+
} finally {
|
|
19355
|
+
fs22.closeSync(handle);
|
|
19356
|
+
}
|
|
19357
|
+
fs22.renameSync(tmpPath, filePath);
|
|
19358
|
+
}
|
|
19359
|
+
async deleteBlob(cell) {
|
|
19360
|
+
const filePath = this.cellPath(cell);
|
|
19361
|
+
if (fs22.existsSync(filePath)) {
|
|
19362
|
+
fs22.unlinkSync(filePath);
|
|
19363
|
+
}
|
|
19364
|
+
const sidecar = this.sidecarPath(filePath);
|
|
19365
|
+
if (fs22.existsSync(sidecar)) {
|
|
19366
|
+
fs22.unlinkSync(sidecar);
|
|
19367
|
+
}
|
|
19368
|
+
}
|
|
19369
|
+
async blobExists(cell) {
|
|
19370
|
+
return fs22.existsSync(this.cellPath(cell));
|
|
19371
|
+
}
|
|
19372
|
+
async readPendingMetadata(cell) {
|
|
19373
|
+
return loadMetadata(this.cellPath(cell));
|
|
19374
|
+
}
|
|
19375
|
+
async writePendingMetadata(cell, meta) {
|
|
19376
|
+
await saveMetadata(this.cellPath(cell), meta);
|
|
19377
|
+
}
|
|
19378
|
+
sidecarPath(filePath) {
|
|
19379
|
+
const dir = path26.dirname(filePath);
|
|
19380
|
+
const base = path26.basename(filePath).replace(/\.enc\.(yaml|json)$/, "");
|
|
19381
|
+
return path26.join(dir, `${base}.clef-meta.yaml`);
|
|
19382
|
+
}
|
|
19383
|
+
};
|
|
19384
|
+
|
|
19385
|
+
// src/compliance/run.ts
|
|
19081
19386
|
var UNKNOWN = "unknown";
|
|
19082
19387
|
async function runCompliance(opts) {
|
|
19083
19388
|
const start = Date.now();
|
|
19084
19389
|
const repoRoot = opts.repoRoot ?? process.cwd();
|
|
19085
|
-
const manifestPath = opts.manifestPath ??
|
|
19086
|
-
const policyPath = opts.policyPath ??
|
|
19390
|
+
const manifestPath = opts.manifestPath ?? path27.join(repoRoot, "clef.yaml");
|
|
19391
|
+
const policyPath = opts.policyPath ?? path27.join(repoRoot, CLEF_POLICY_FILENAME);
|
|
19087
19392
|
const include = {
|
|
19088
19393
|
scan: opts.include?.scan ?? true,
|
|
19089
19394
|
lint: opts.include?.lint ?? true,
|
|
@@ -19095,6 +19400,11 @@ async function runCompliance(opts) {
|
|
|
19095
19400
|
const sopsClient = new SopsClient(opts.runner, opts.ageKeyFile, opts.ageKey, opts.sopsPath);
|
|
19096
19401
|
const matrixManager = new MatrixManager();
|
|
19097
19402
|
const schemaValidator = new SchemaValidator();
|
|
19403
|
+
const lintSource = composeSecretSource(
|
|
19404
|
+
new FilesystemStorageBackend(manifest, repoRoot),
|
|
19405
|
+
sopsClient,
|
|
19406
|
+
manifest
|
|
19407
|
+
);
|
|
19098
19408
|
const [sha, repo, files, scanResult, lintResult] = await Promise.all([
|
|
19099
19409
|
opts.sha !== void 0 ? Promise.resolve(opts.sha) : detectSha(opts.runner, repoRoot),
|
|
19100
19410
|
opts.repo !== void 0 ? Promise.resolve(opts.repo) : detectRepo(opts.runner, repoRoot),
|
|
@@ -19103,12 +19413,12 @@ async function runCompliance(opts) {
|
|
|
19103
19413
|
repoRoot,
|
|
19104
19414
|
policy,
|
|
19105
19415
|
matrixManager,
|
|
19106
|
-
|
|
19416
|
+
source: lintSource,
|
|
19107
19417
|
filter: opts.filter,
|
|
19108
19418
|
now
|
|
19109
19419
|
}) : Promise.resolve([]),
|
|
19110
19420
|
include.scan ? new ScanRunner(opts.runner).scan(repoRoot, manifest) : Promise.resolve(emptyScan()),
|
|
19111
|
-
include.lint ? new LintRunner(matrixManager, schemaValidator,
|
|
19421
|
+
include.lint ? new LintRunner(matrixManager, schemaValidator, lintSource).run(manifest, repoRoot) : Promise.resolve(emptyLint())
|
|
19112
19422
|
]);
|
|
19113
19423
|
const adjustedLint = downgradeDecryptIssues(lintResult);
|
|
19114
19424
|
const document = new ComplianceGenerator().generate({
|
|
@@ -19128,8 +19438,11 @@ async function evaluateMatrix(args) {
|
|
|
19128
19438
|
const cells = args.matrixManager.resolveMatrix(args.manifest, args.repoRoot).filter((c) => applyFilter(c.namespace, c.environment, args.filter)).filter((c) => c.exists);
|
|
19129
19439
|
return Promise.all(
|
|
19130
19440
|
cells.map(async (cell) => {
|
|
19131
|
-
const metadata = await args.
|
|
19132
|
-
|
|
19441
|
+
const metadata = await args.source.getCellMetadata({
|
|
19442
|
+
namespace: cell.namespace,
|
|
19443
|
+
environment: cell.environment
|
|
19444
|
+
});
|
|
19445
|
+
const relPath = path27.relative(args.repoRoot, cell.filePath).replace(/\\/g, "/");
|
|
19133
19446
|
const keys = readSopsKeyNames(cell.filePath) ?? [];
|
|
19134
19447
|
const rotations = await getRotations(cell.filePath);
|
|
19135
19448
|
return evaluator.evaluateFile(relPath, cell.environment, metadata, keys, rotations, args.now);
|
|
@@ -19187,11 +19500,62 @@ async function detectRepo(runner, repoRoot) {
|
|
|
19187
19500
|
const match = url.match(/[:/]([^/:]+)\/([^/]+?)(?:\.git)?\/?$/);
|
|
19188
19501
|
return match ? `${match[1]}/${match[2]}` : UNKNOWN;
|
|
19189
19502
|
}
|
|
19503
|
+
|
|
19504
|
+
// src/source/guards.ts
|
|
19505
|
+
function isFn(o, name) {
|
|
19506
|
+
return typeof o === "object" && o !== null && typeof o[name] === "function";
|
|
19507
|
+
}
|
|
19508
|
+
function isLintable(s) {
|
|
19509
|
+
return isFn(s, "validateEncryption") && isFn(s, "checkRecipientDrift");
|
|
19510
|
+
}
|
|
19511
|
+
function isRotatable(s) {
|
|
19512
|
+
return isFn(s, "rotate");
|
|
19513
|
+
}
|
|
19514
|
+
function isRecipientManaged(s) {
|
|
19515
|
+
return isFn(s, "listRecipients") && isFn(s, "addRecipient") && isFn(s, "removeRecipient");
|
|
19516
|
+
}
|
|
19517
|
+
function isMergeAware(s) {
|
|
19518
|
+
return isFn(s, "mergeCells") && isFn(s, "installMergeDriver");
|
|
19519
|
+
}
|
|
19520
|
+
function isMigratable(s) {
|
|
19521
|
+
return isFn(s, "migrateBackend");
|
|
19522
|
+
}
|
|
19523
|
+
function isBulk(s) {
|
|
19524
|
+
return isFn(s, "bulkSet") && isFn(s, "bulkDelete") && isFn(s, "copyValue");
|
|
19525
|
+
}
|
|
19526
|
+
function isStructural(s) {
|
|
19527
|
+
return isFn(s, "addNamespace") && isFn(s, "addEnvironment") && isFn(s, "renameNamespace") && isFn(s, "renameEnvironment");
|
|
19528
|
+
}
|
|
19529
|
+
function describeCapabilities(s) {
|
|
19530
|
+
return {
|
|
19531
|
+
lint: isLintable(s),
|
|
19532
|
+
rotate: isRotatable(s),
|
|
19533
|
+
recipients: isRecipientManaged(s),
|
|
19534
|
+
merge: isMergeAware(s),
|
|
19535
|
+
migrate: isMigratable(s),
|
|
19536
|
+
bulk: isBulk(s),
|
|
19537
|
+
structural: isStructural(s)
|
|
19538
|
+
};
|
|
19539
|
+
}
|
|
19540
|
+
|
|
19541
|
+
// src/source/errors.ts
|
|
19542
|
+
var SourceCapabilityUnsupportedError = class extends ClefError {
|
|
19543
|
+
constructor(capability, sourceId) {
|
|
19544
|
+
super(
|
|
19545
|
+
`'${capability}' is not supported by the '${sourceId}' source.`,
|
|
19546
|
+
`Switch to a source that implements ${capability}, or use a different command.`
|
|
19547
|
+
);
|
|
19548
|
+
this.capability = capability;
|
|
19549
|
+
this.sourceId = sourceId;
|
|
19550
|
+
this.name = "SourceCapabilityUnsupportedError";
|
|
19551
|
+
}
|
|
19552
|
+
capability;
|
|
19553
|
+
sourceId;
|
|
19554
|
+
};
|
|
19190
19555
|
// Annotate the CommonJS export names for ESM import in node:
|
|
19191
19556
|
0 && (module.exports = {
|
|
19192
19557
|
ArtifactPacker,
|
|
19193
19558
|
BackendMigrator,
|
|
19194
|
-
BulkOps,
|
|
19195
19559
|
CLEF_MANIFEST_FILENAME,
|
|
19196
19560
|
CLEF_POLICY_FILENAME,
|
|
19197
19561
|
CLEF_REPORT_SCHEMA_VERSION,
|
|
@@ -19205,6 +19569,7 @@ async function detectRepo(runner, repoRoot) {
|
|
|
19205
19569
|
DiffEngine,
|
|
19206
19570
|
DriftDetector,
|
|
19207
19571
|
FilePackOutput,
|
|
19572
|
+
FilesystemStorageBackend,
|
|
19208
19573
|
GitIntegration,
|
|
19209
19574
|
GitOperationError,
|
|
19210
19575
|
ImportRunner,
|
|
@@ -19221,7 +19586,6 @@ async function detectRepo(runner, repoRoot) {
|
|
|
19221
19586
|
PolicyValidationError,
|
|
19222
19587
|
REQUESTS_FILENAME,
|
|
19223
19588
|
REQUIREMENTS,
|
|
19224
|
-
REVEAL_WARNING,
|
|
19225
19589
|
RecipientManager,
|
|
19226
19590
|
ReportGenerator,
|
|
19227
19591
|
ReportSanitizer,
|
|
@@ -19238,6 +19602,7 @@ async function detectRepo(runner, repoRoot) {
|
|
|
19238
19602
|
SopsMergeDriver,
|
|
19239
19603
|
SopsMissingError,
|
|
19240
19604
|
SopsVersionError,
|
|
19605
|
+
SourceCapabilityUnsupportedError,
|
|
19241
19606
|
StructureManager,
|
|
19242
19607
|
SyncManager,
|
|
19243
19608
|
TransactionLockError,
|
|
@@ -19257,11 +19622,11 @@ async function detectRepo(runner, repoRoot) {
|
|
|
19257
19622
|
checkAll,
|
|
19258
19623
|
checkDependency,
|
|
19259
19624
|
collectCIContext,
|
|
19625
|
+
composeSecretSource,
|
|
19260
19626
|
computeCiphertextHash,
|
|
19261
19627
|
deriveAgePublicKey,
|
|
19628
|
+
describeCapabilities,
|
|
19262
19629
|
describeScope,
|
|
19263
|
-
detectAlgorithm,
|
|
19264
|
-
detectFormat,
|
|
19265
19630
|
emptyTemplate,
|
|
19266
19631
|
exampleTemplate,
|
|
19267
19632
|
findRequest,
|
|
@@ -19269,35 +19634,27 @@ async function detectRepo(runner, repoRoot) {
|
|
|
19269
19634
|
formatRevealWarning,
|
|
19270
19635
|
generateAgeIdentity,
|
|
19271
19636
|
generateRandomValue,
|
|
19272
|
-
|
|
19273
|
-
getPendingKeys,
|
|
19274
|
-
getRotations,
|
|
19637
|
+
isBulk,
|
|
19275
19638
|
isClefHsmArn,
|
|
19276
|
-
isHighEntropy,
|
|
19277
19639
|
isKmsEnvelope,
|
|
19640
|
+
isLintable,
|
|
19641
|
+
isMergeAware,
|
|
19642
|
+
isMigratable,
|
|
19278
19643
|
isPackedArtifact,
|
|
19279
|
-
|
|
19644
|
+
isRecipientManaged,
|
|
19645
|
+
isRotatable,
|
|
19646
|
+
isStructural,
|
|
19280
19647
|
keyPreview,
|
|
19281
|
-
loadIgnoreRules,
|
|
19282
|
-
loadMetadata,
|
|
19283
19648
|
loadRequests,
|
|
19284
19649
|
markPending,
|
|
19285
|
-
markPendingWithRetry,
|
|
19286
19650
|
markResolved,
|
|
19287
|
-
matchPatterns,
|
|
19288
|
-
mergeMetadataContents,
|
|
19289
19651
|
mergeMetadataFiles,
|
|
19290
|
-
metadataPath,
|
|
19291
19652
|
parse,
|
|
19292
|
-
parseDotenv,
|
|
19293
|
-
parseIgnoreContent,
|
|
19294
|
-
parseJson,
|
|
19295
19653
|
parseSignerKey,
|
|
19296
19654
|
parseYaml,
|
|
19297
19655
|
pkcs11UriToSyntheticArn,
|
|
19298
19656
|
readManifestYaml,
|
|
19299
19657
|
recordRotation,
|
|
19300
|
-
redactValue,
|
|
19301
19658
|
removeAccessRequest,
|
|
19302
19659
|
removeRotation,
|
|
19303
19660
|
requestsFilePath,
|
|
@@ -19309,22 +19666,18 @@ async function detectRepo(runner, repoRoot) {
|
|
|
19309
19666
|
resolveRecipientsForEnvironment,
|
|
19310
19667
|
resolveSopsPath,
|
|
19311
19668
|
runCompliance,
|
|
19312
|
-
saveMetadata,
|
|
19313
19669
|
saveRequests,
|
|
19314
|
-
|
|
19315
|
-
shannonEntropy,
|
|
19316
|
-
shouldIgnoreFile,
|
|
19317
|
-
shouldIgnoreMatch,
|
|
19318
|
-
signEd25519,
|
|
19319
|
-
signKms,
|
|
19670
|
+
shouldUseLinuxStdinFifo,
|
|
19320
19671
|
spawnKeyservice,
|
|
19321
19672
|
syntheticArnToPkcs11Uri,
|
|
19322
19673
|
tryBundledKeyservice,
|
|
19323
19674
|
upsertRequest,
|
|
19324
19675
|
validateAgePublicKey,
|
|
19676
|
+
validateAwsKmsArn,
|
|
19325
19677
|
validatePackedArtifact,
|
|
19326
19678
|
validateResetScope,
|
|
19327
19679
|
verifySignature,
|
|
19680
|
+
wrapWithLinuxStdinFifo,
|
|
19328
19681
|
writeManifestYaml,
|
|
19329
19682
|
writeManifestYamlRaw,
|
|
19330
19683
|
writeSchema,
|