build-raptor 0.136.0 → 0.138.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/build-raptor.js +161 -152
  2. package/index.js +159 -150
  3. package/package.json +3 -3
package/index.js CHANGED
@@ -2,7 +2,8 @@ var __defProp = Object.defineProperty;
2
2
  var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
3
 
4
4
  // modules/build-raptor-core/dist/src/breakdown.js
5
- import * as fse5 from "fs-extra";
5
+ import fs5 from "fs";
6
+ import fse5 from "fs-extra/esm";
6
7
 
7
8
  // modules/misc/dist/src/maps.js
8
9
  function hardGet(map, key) {
@@ -121,13 +122,14 @@ function errorLike(err) {
121
122
  __name(errorLike, "errorLike");
122
123
 
123
124
  // modules/misc/dist/src/directory-scanner.js
124
- import * as fs from "fs";
125
- import * as fse2 from "fs-extra";
125
+ import * as fs2 from "fs";
126
+ import fse2 from "fs-extra/esm";
126
127
  import * as path from "path";
127
128
 
128
129
  // modules/misc/dist/src/misc.js
129
130
  import * as crypto from "crypto";
130
- import * as fse from "fs-extra";
131
+ import fs from "fs";
132
+ import fse from "fs-extra/esm";
131
133
  import jsonStringify from "safe-stable-stringify";
132
134
  import * as util from "util";
133
135
  function computeObjectHash(input) {
@@ -144,7 +146,7 @@ async function dumpFile(inputPath, output) {
144
146
  throw new Error(`Cannot dump non existing file: ${inputPath}`);
145
147
  }
146
148
  return new Promise((res, rej) => {
147
- const inputStream = fse.createReadStream(inputPath);
149
+ const inputStream = fs.createReadStream(inputPath);
148
150
  inputStream.on("end", () => {
149
151
  res();
150
152
  });
@@ -278,9 +280,9 @@ var DirectoryScanner = class _DirectoryScanner {
278
280
  readContent(resolvedPath, stat) {
279
281
  try {
280
282
  if (stat.isSymbolicLink()) {
281
- return Buffer.from(fs.readlinkSync(resolvedPath));
283
+ return Buffer.from(fs2.readlinkSync(resolvedPath));
282
284
  }
283
- return fs.readFileSync(resolvedPath);
285
+ return fs2.readFileSync(resolvedPath);
284
286
  } catch (e) {
285
287
  throw new Error(`failed to read ${stat.isSymbolicLink() ? "symbolic link" : "file"} at ${resolvedPath}: ${e}`);
286
288
  }
@@ -292,7 +294,7 @@ var DirectoryScanner = class _DirectoryScanner {
292
294
  // does the sorting).
293
295
  async readDirSorted(resolvedPath) {
294
296
  try {
295
- const ret = await fse2.readdir(resolvedPath);
297
+ const ret = await fs2.promises.readdir(resolvedPath);
296
298
  ret.sort();
297
299
  return ret;
298
300
  } catch (e) {
@@ -301,7 +303,7 @@ var DirectoryScanner = class _DirectoryScanner {
301
303
  }
302
304
  async getStat(resolvedPath) {
303
305
  try {
304
- return fs.lstatSync(resolvedPath);
306
+ return fs2.lstatSync(resolvedPath);
305
307
  } catch (e) {
306
308
  throw new Error(`Cannot stat ${resolvedPath}: ${e}`);
307
309
  }
@@ -321,8 +323,8 @@ var SinglePublisher = class {
321
323
  this.subscribers.push(subscriber);
322
324
  }
323
325
  async publish(e) {
324
- const promises3 = this.subscribers.map((s) => s(e));
325
- await Promise.all(promises3);
326
+ const promises6 = this.subscribers.map((s) => s(e));
327
+ await Promise.all(promises6);
326
328
  }
327
329
  };
328
330
  var TypedPublisher = class {
@@ -418,11 +420,12 @@ var Executor = class {
418
420
  };
419
421
 
420
422
  // modules/misc/dist/src/file-system-storage-client.js
421
- import * as fse3 from "fs-extra";
423
+ import fs4 from "fs";
424
+ import fse3 from "fs-extra/esm";
422
425
  import * as path3 from "path";
423
426
 
424
427
  // modules/misc/dist/src/clean-directory.js
425
- import * as fs2 from "fs";
428
+ import * as fs3 from "fs";
426
429
  import * as path2 from "path";
427
430
  function cleanDirectory(directoryPath, deletionFactor, triggerCleanupIfByteSizeExceeds) {
428
431
  const size = calculateDirectorySize(directoryPath);
@@ -435,9 +438,9 @@ function cleanDirectory(directoryPath, deletionFactor, triggerCleanupIfByteSizeE
435
438
  __name(cleanDirectory, "cleanDirectory");
436
439
  function calculateDirectorySize(directoryPath) {
437
440
  let ret = 0;
438
- for (const f of fs2.readdirSync(directoryPath)) {
441
+ for (const f of fs3.readdirSync(directoryPath)) {
439
442
  const resolved = path2.join(directoryPath, f);
440
- const stats = fs2.statSync(resolved);
443
+ const stats = fs3.statSync(resolved);
441
444
  ret += stats.size;
442
445
  }
443
446
  return ret;
@@ -446,18 +449,18 @@ __name(calculateDirectorySize, "calculateDirectorySize");
446
449
  function deleteFiles(directoryPath, deletionFactor) {
447
450
  let files;
448
451
  try {
449
- files = fs2.readdirSync(directoryPath);
452
+ files = fs3.readdirSync(directoryPath);
450
453
  } catch (e) {
451
454
  throw new Error(`failed when reading ${directoryPath}: ${e}`);
452
455
  }
453
- const mapped = files.map((f) => ({ f, atime: fs2.statSync(path2.join(directoryPath, f)).atime.toISOString() }));
456
+ const mapped = files.map((f) => ({ f, atime: fs3.statSync(path2.join(directoryPath, f)).atime.toISOString() }));
454
457
  const sorted = sortBy(mapped, (at) => at.atime);
455
458
  const numFilesToDelete = Math.min(sorted.length, Math.floor(sorted.length * deletionFactor));
456
459
  const ret = sorted.slice(0, numFilesToDelete).map((at) => at.f);
457
460
  for (const f of ret) {
458
461
  const filePath = path2.join(directoryPath, f);
459
462
  try {
460
- fs2.unlinkSync(filePath);
463
+ fs3.unlinkSync(filePath);
461
464
  } catch (e) {
462
465
  throw new Error(`cleanup of ${filePath} failed: ${e}`);
463
466
  }
@@ -489,26 +492,26 @@ var FilesystemStorageClient = class _FilesystemStorageClient {
489
492
  return path3.join(this.dir, `${middle}-${s}`);
490
493
  }
491
494
  async putObject(key, content) {
492
- await fse3.writeFile(this.keyToPath(key), content);
495
+ await fs4.promises.writeFile(this.keyToPath(key), content);
493
496
  }
494
497
  async putContentAddressable(content) {
495
498
  const ret = computeHash(content);
496
499
  const p = this.hashToPath("cas", ret);
497
- await fse3.writeFile(p, content);
500
+ await fs4.promises.writeFile(p, content);
498
501
  return ret;
499
502
  }
500
503
  async getContentAddressable(hash) {
501
504
  const p = this.hashToPath("cas", hash);
502
- return await fse3.readFile(p);
505
+ return await fs4.promises.readFile(p);
503
506
  }
504
507
  async getObject(key, type = "string") {
505
508
  const p = this.keyToPath(key);
506
509
  try {
507
510
  if (type === "string") {
508
- return await fse3.readFile(p, "utf-8");
511
+ return await fs4.promises.readFile(p, "utf-8");
509
512
  }
510
513
  if (type === "buffer") {
511
- return await fse3.readFile(p);
514
+ return await fs4.promises.readFile(p);
512
515
  }
513
516
  shouldNeverHappen(type);
514
517
  } catch (e) {
@@ -521,7 +524,7 @@ var FilesystemStorageClient = class _FilesystemStorageClient {
521
524
  };
522
525
 
523
526
  // modules/misc/dist/src/folderify.js
524
- import * as fse4 from "fs-extra";
527
+ import fse4 from "fs-extra/esm";
525
528
  import * as Tmp from "tmp-promise";
526
529
 
527
530
  // modules/misc/dist/src/graph.js
@@ -865,11 +868,11 @@ __name(intify, "intify");
865
868
 
866
869
  // modules/misc/dist/src/promises.js
867
870
  import PQueue2 from "p-queue";
868
- function promises(input) {
871
+ function promises2(input) {
869
872
  const adjusted = new DefaultReifiable(input.map((p) => Promise.resolve(p)));
870
873
  return new PromisesImpl(adjusted, (t) => Promise.resolve(t), () => Promise.resolve(true));
871
874
  }
872
- __name(promises, "promises");
875
+ __name(promises2, "promises");
873
876
  var PromisesImpl = class _PromisesImpl {
874
877
  static {
875
878
  __name(this, "PromisesImpl");
@@ -1125,7 +1128,7 @@ var Breakdown = class {
1125
1128
  if (!outputFile) {
1126
1129
  return [];
1127
1130
  }
1128
- const content = await fse5.readFile(outputFile, "utf8");
1131
+ const content = await fs5.promises.readFile(outputFile, "utf8");
1129
1132
  const trimmed = content.trim();
1130
1133
  return format3 === "string" ? trimmed : trimmed.split("\n");
1131
1134
  }
@@ -1142,7 +1145,7 @@ var Breakdown = class {
1142
1145
  if (!await fse5.pathExists(resolved)) {
1143
1146
  return void 0;
1144
1147
  }
1145
- const content = await fse5.readFile(resolved, "utf8");
1148
+ const content = await fs5.promises.readFile(resolved, "utf8");
1146
1149
  return content.trim().split("\n");
1147
1150
  }
1148
1151
  };
@@ -1199,11 +1202,11 @@ function BuildRunId(input) {
1199
1202
  __name(BuildRunId, "BuildRunId");
1200
1203
 
1201
1204
  // modules/build-raptor-core/dist/src/engine-bootstrapper.js
1202
- import * as fs9 from "fs";
1205
+ import * as fs14 from "fs";
1203
1206
  import * as JsoncParser from "jsonc-parser";
1204
1207
 
1205
1208
  // modules/logger/dist/src/logger.js
1206
- import * as fs3 from "fs";
1209
+ import * as fs6 from "fs";
1207
1210
  import { format } from "logform";
1208
1211
  import * as path6 from "path";
1209
1212
  import jsonStringify2 from "safe-stable-stringify";
@@ -1214,9 +1217,9 @@ var criticalityLegend = {
1214
1217
  low: 200
1215
1218
  };
1216
1219
  function createDefaultLogger(logFile, pickiness, logLevel, uiStream) {
1217
- const stat = fs3.statSync(logFile, { throwIfNoEntry: false });
1220
+ const stat = fs6.statSync(logFile, { throwIfNoEntry: false });
1218
1221
  if (stat && stat.size > 0) {
1219
- fs3.rmSync(logFile, { force: true });
1222
+ fs6.rmSync(logFile, { force: true });
1220
1223
  }
1221
1224
  return new FileLogger(logFile, pickiness, logLevel, uiStream);
1222
1225
  }
@@ -1309,13 +1312,13 @@ import { z as z7 } from "zod";
1309
1312
 
1310
1313
  // modules/build-raptor-core/dist/src/engine.js
1311
1314
  import child_process from "child_process";
1312
- import * as fs5 from "fs";
1313
- import * as fse10 from "fs-extra";
1315
+ import * as fs10 from "fs";
1316
+ import fse8 from "fs-extra/esm";
1314
1317
  import ignore from "ignore";
1315
1318
  import * as path10 from "path";
1316
1319
 
1317
1320
  // modules/build-raptor-core/dist/src/fingerprint-ledger.js
1318
- import * as fse6 from "fs-extra";
1321
+ import fse6 from "fs-extra/esm";
1319
1322
  import { z as z2 } from "zod";
1320
1323
  var LedgerItem = z2.union([
1321
1324
  z2.object({
@@ -1444,8 +1447,7 @@ var PersistedFingerprintLedger = class {
1444
1447
  var TRUNCATION_THRESHOLD = 200 * 1e3 * 1e3;
1445
1448
 
1446
1449
  // modules/build-raptor-core/dist/src/fingerprinter.js
1447
- import * as fs4 from "fs";
1448
- import * as fse7 from "fs-extra";
1450
+ import * as fs7 from "fs";
1449
1451
  import * as path7 from "path";
1450
1452
 
1451
1453
  // modules/build-raptor-core/dist/src/hasher.js
@@ -1549,7 +1551,7 @@ var Fingerprinter = class {
1549
1551
  const respectGitIgnore = this.dirScanner.isValid(pathInRepo, stat);
1550
1552
  const active = this.dirScanner.isValid(pathInRepo, stat);
1551
1553
  if (!stat.isDirectory()) {
1552
- const content = await readFile4(resolved);
1554
+ const content = await readFile(resolved);
1553
1555
  const hasher2 = new Hasher(pathInRepo, this.seed);
1554
1556
  hasher2.update(content);
1555
1557
  return await this.store(hasher2, active, content.toString("utf-8"));
@@ -1577,17 +1579,17 @@ var Fingerprinter = class {
1577
1579
  return { hasher, active };
1578
1580
  }
1579
1581
  };
1580
- async function readFile4(p) {
1582
+ async function readFile(p) {
1581
1583
  try {
1582
- return await fse7.readFile(p);
1584
+ return await fs7.promises.readFile(p);
1583
1585
  } catch (e) {
1584
1586
  throw new Error(`Failed to read ${p}: ${e}`);
1585
1587
  }
1586
1588
  }
1587
- __name(readFile4, "readFile");
1589
+ __name(readFile, "readFile");
1588
1590
  function statPath(p) {
1589
1591
  try {
1590
- return fs4.statSync(p, { throwIfNoEntry: false });
1592
+ return fs7.statSync(p, { throwIfNoEntry: false });
1591
1593
  } catch (e) {
1592
1594
  throw new Error(`Failed to stat ${p}: ${e}`);
1593
1595
  }
@@ -1595,7 +1597,7 @@ function statPath(p) {
1595
1597
  __name(statPath, "statPath");
1596
1598
  async function readDir(p) {
1597
1599
  try {
1598
- return await fse7.readdir(p, { withFileTypes: true });
1600
+ return await fs7.promises.readdir(p, { withFileTypes: true });
1599
1601
  } catch (e) {
1600
1602
  throw new Error(`Failed to read dir ${p}: ${e}`);
1601
1603
  }
@@ -1926,7 +1928,7 @@ var Planner = class {
1926
1928
  };
1927
1929
 
1928
1930
  // modules/build-raptor-core/dist/src/purger.js
1929
- import * as fse8 from "fs-extra";
1931
+ import fs8 from "fs";
1930
1932
  var Purger = class {
1931
1933
  static {
1932
1934
  __name(this, "Purger");
@@ -1936,10 +1938,10 @@ var Purger = class {
1936
1938
  this.repoRootDir = repoRootDir;
1937
1939
  }
1938
1940
  async removeLocations(outputLocations) {
1939
- await promises(outputLocations).forEach(20, async (p) => {
1941
+ await promises2(outputLocations).forEach(20, async (p) => {
1940
1942
  const resolved = this.repoRootDir.resolve(p);
1941
1943
  this.logger.info(`purging ${resolved}`);
1942
- await fse8.rm(resolved, { recursive: true, force: true });
1944
+ await fs8.promises.rm(resolved, { recursive: true, force: true });
1943
1945
  });
1944
1946
  }
1945
1947
  async purgeOutputsOfTask(task) {
@@ -1958,7 +1960,8 @@ function shouldPurge(loc) {
1958
1960
  __name(shouldPurge, "shouldPurge");
1959
1961
 
1960
1962
  // modules/build-raptor-core/dist/src/task-executor.js
1961
- import * as fse9 from "fs-extra";
1963
+ import fs9 from "fs";
1964
+ import fse7 from "fs-extra/esm";
1962
1965
  import * as path9 from "path";
1963
1966
  var TaskExecutor = class {
1964
1967
  static {
@@ -2036,7 +2039,7 @@ var SingleTaskExecutor = class {
2036
2039
  }
2037
2040
  async postProcess(status, outputFile, time) {
2038
2041
  if (this.shouldDiagnose) {
2039
- const content = fse9.readFileSync(outputFile, "utf-8");
2042
+ const content = fs9.readFileSync(outputFile, "utf-8");
2040
2043
  this.diagnose(`content of ${outputFile} is ${content}`);
2041
2044
  }
2042
2045
  await this.eventPublisher.publish("executionEnded", {
@@ -2069,9 +2072,9 @@ var SingleTaskExecutor = class {
2069
2072
  }
2070
2073
  async validateOutputs() {
2071
2074
  const t = this.task;
2072
- const missing = await promises(t.outputLocations).filter(async (loc) => {
2075
+ const missing = await promises2(t.outputLocations).filter(async (loc) => {
2073
2076
  const resolved = this.model.rootDir.resolve(loc.pathInRepo);
2074
- const exists = await fse9.pathExists(resolved);
2077
+ const exists = await fse7.pathExists(resolved);
2075
2078
  return !exists;
2076
2079
  }).reify(100);
2077
2080
  if (!missing.length) {
@@ -2226,7 +2229,7 @@ ${formatted}`);
2226
2229
  this.diagnose(`purging outputs`);
2227
2230
  const taskNames = [this.taskName];
2228
2231
  const tasks = taskNames.map((tn) => this.tracker.getTask(tn));
2229
- await promises(tasks).forEach(20, async (task) => {
2232
+ await promises2(tasks).forEach(20, async (task) => {
2230
2233
  await this.purger.purgeOutputsOfTask(task);
2231
2234
  });
2232
2235
  }
@@ -2427,7 +2430,7 @@ var Engine = class {
2427
2430
  }
2428
2431
  async run(buildRunId) {
2429
2432
  this.steps.transmit({ step: "BUILD_RUN_STARTED", buildRunId, commitHash: this.options.commitHash });
2430
- fs5.writeFileSync(path10.join(this.options.buildRaptorDir, "build-run-id"), buildRunId);
2433
+ fs10.writeFileSync(path10.join(this.options.buildRaptorDir, "build-run-id"), buildRunId);
2431
2434
  await this.fingerprintLedger.updateRun(buildRunId);
2432
2435
  await this.repoProtocol.initialize(this.rootDir, this.eventPublisher, this.options.config.outDirName, this.options.config.repoProtocol);
2433
2436
  try {
@@ -2515,8 +2518,8 @@ ${JSON.stringify(taskList, null, 2)}`);
2515
2518
  async loadModel(buildRunId) {
2516
2519
  const gitIgnorePath = this.rootDir.resolve(PathInRepo(".gitignore"));
2517
2520
  const ig = ignore();
2518
- if (await fse10.pathExists(gitIgnorePath)) {
2519
- const gitIgnoreContent = await fse10.readFile(gitIgnorePath, "utf8");
2521
+ if (await fse8.pathExists(gitIgnorePath)) {
2522
+ const gitIgnoreContent = await fs10.promises.readFile(gitIgnorePath, "utf8");
2520
2523
  const lines = gitIgnoreContent.split("\n");
2521
2524
  this.logger.info(`Found a .gitignore file:
2522
2525
  ${JSON.stringify(lines, null, 2)}`);
@@ -2864,7 +2867,7 @@ var Step = z4.discriminatedUnion("step", [
2864
2867
  var StepByStep = Step.array();
2865
2868
 
2866
2869
  // modules/build-raptor-core/dist/src/step-by-step-transmitter.js
2867
- import * as fs6 from "fs";
2870
+ import * as fs11 from "fs";
2868
2871
  import * as util2 from "util";
2869
2872
  var StepByStepTransmitter = class {
2870
2873
  static {
@@ -2896,7 +2899,7 @@ var StepByStepTransmitter = class {
2896
2899
  return;
2897
2900
  }
2898
2901
  const parsed = StepByStep.parse(this.steps);
2899
- fs6.writeFileSync(this.stepByStepFile, JSON.stringify(parsed));
2902
+ fs11.writeFileSync(this.stepByStepFile, JSON.stringify(parsed));
2900
2903
  this.logger.info(`step by step written to ${this.stepByStepFile}`);
2901
2904
  }
2902
2905
  async dynamicallyLoadProcessor(stepByStepProcessorModuleName, lookFor = "processor") {
@@ -2912,9 +2915,9 @@ var StepByStepTransmitter = class {
2912
2915
  };
2913
2916
 
2914
2917
  // modules/build-raptor-core/dist/src/task-store.js
2915
- import * as fs8 from "fs";
2918
+ import * as fs13 from "fs";
2916
2919
  import { createWriteStream } from "fs";
2917
- import * as fse11 from "fs-extra";
2920
+ import fse9 from "fs-extra/esm";
2918
2921
  import * as path12 from "path";
2919
2922
  import * as stream from "stream";
2920
2923
  import * as Tmp2 from "tmp-promise";
@@ -2923,7 +2926,7 @@ import * as zlib from "zlib";
2923
2926
  import { z as z6 } from "zod";
2924
2927
 
2925
2928
  // modules/build-raptor-core/dist/src/tar-stream.js
2926
- import * as fs7 from "fs";
2929
+ import * as fs12 from "fs";
2927
2930
  import * as path11 from "path";
2928
2931
  import { z as z5 } from "zod";
2929
2932
  var Info = z5.object({
@@ -3030,7 +3033,7 @@ var TarStream = class _TarStream {
3030
3033
  const resolved = resolve2(parsedInfo);
3031
3034
  const date = new Date(Number(parsedInfo.mtime));
3032
3035
  try {
3033
- fs7.utimesSync(resolved, date, date);
3036
+ fs12.utimesSync(resolved, date, date);
3034
3037
  } catch (e) {
3035
3038
  logger.error(`utimeSync failure: ${JSON.stringify({ resolved, date, parsedInfo })}`, e);
3036
3039
  throw new Error(`could not update time of ${resolved} to ${date.toISOString()}: ${e}`);
@@ -3059,19 +3062,19 @@ var TarStream = class _TarStream {
3059
3062
  source.copy(contentBuf, 0, offset, contentEndOffset);
3060
3063
  offset = contentEndOffset;
3061
3064
  const resolved = resolve2(parsedInfo);
3062
- fs7.mkdirSync(path11.dirname(resolved), { recursive: true });
3065
+ fs12.mkdirSync(path11.dirname(resolved), { recursive: true });
3063
3066
  if (parsedInfo.isSymlink) {
3064
3067
  symlinks.push({ info: parsedInfo, content: contentBuf });
3065
3068
  } else {
3066
- fs7.writeFileSync(resolved, contentBuf, { mode: parsedInfo.mode });
3069
+ fs12.writeFileSync(resolved, contentBuf, { mode: parsedInfo.mode });
3067
3070
  updateStats(parsedInfo);
3068
3071
  }
3069
3072
  }
3070
3073
  for (const { info, content } of symlinks) {
3071
3074
  const resolved = resolve2(info);
3072
- fs7.mkdirSync(path11.dirname(resolved), { recursive: true });
3075
+ fs12.mkdirSync(path11.dirname(resolved), { recursive: true });
3073
3076
  const linkTarget = content.toString("utf-8");
3074
- fs7.symlinkSync(linkTarget, resolved);
3077
+ fs12.symlinkSync(linkTarget, resolved);
3075
3078
  if (!path11.isAbsolute(linkTarget)) {
3076
3079
  updateStats(info);
3077
3080
  }
@@ -3173,13 +3176,13 @@ var TaskStore = class {
3173
3176
  return { buffer: emptyBuffer(), publicFiles: {} };
3174
3177
  }
3175
3178
  this.trace?.push(`bundling ${JSON.stringify(outputs)}`);
3176
- const pairs = await promises(outputs.filter((o) => o.isPublic)).map(async (o) => {
3179
+ const pairs = await promises2(outputs.filter((o) => o.isPublic)).map(async (o) => {
3177
3180
  const resolved = this.repoRootDir.resolve(o.pathInRepo);
3178
- const stat = fs8.statSync(resolved);
3181
+ const stat = fs13.statSync(resolved);
3179
3182
  if (!stat.isFile()) {
3180
3183
  throw new BuildFailedError(`cannot publish an output location that is not a file: "${o.pathInRepo.val}"`);
3181
3184
  }
3182
- const content = fs8.readFileSync(resolved);
3185
+ const content = fs13.readFileSync(resolved);
3183
3186
  const h = await this.client.putContentAddressable(content);
3184
3187
  return [o.pathInRepo.val, h];
3185
3188
  }).reify(STORAGE_CONCURRENCY);
@@ -3195,7 +3198,7 @@ var TaskStore = class {
3195
3198
  const scanner = new DirectoryScanner(this.repoRootDir.resolve());
3196
3199
  for (const curr of outputs.filter((o) => !o.isPublic)) {
3197
3200
  const o = curr.pathInRepo;
3198
- const exists = await fse11.pathExists(this.repoRootDir.resolve(o));
3201
+ const exists = await fse9.pathExists(this.repoRootDir.resolve(o));
3199
3202
  if (!exists) {
3200
3203
  throw new Error(`Output location <${o}> does not exist (under <${this.repoRootDir}>)`);
3201
3204
  }
@@ -3207,7 +3210,7 @@ var TaskStore = class {
3207
3210
  throw new Error(`Cannot handle non-files in output: ${p} (under ${this.repoRootDir})`);
3208
3211
  }
3209
3212
  const resolved = this.repoRootDir.resolve(PathInRepo(p));
3210
- const { mtime, atime, ctime } = fs8.statSync(resolved);
3213
+ const { mtime, atime, ctime } = fs13.statSync(resolved);
3211
3214
  this.trace?.push(`adding an entry: ${stat.mode.toString(8)} ${p} ${mtime.toISOString()}`);
3212
3215
  if (stat.isSymbolicLink()) {
3213
3216
  const linkTarget = content.toString("utf-8");
@@ -3225,7 +3228,7 @@ var TaskStore = class {
3225
3228
  const gzip = zlib.createGzip();
3226
3229
  const destination = createWriteStream(tempFile.path);
3227
3230
  await pipeline2(source, gzip, destination);
3228
- const gzipped = await fse11.readFile(tempFile.path);
3231
+ const gzipped = await fs13.promises.readFile(tempFile.path);
3229
3232
  this.trace?.push(`gzipped is ${gzipped.length} long`);
3230
3233
  const ret = Buffer.concat([lenBuf, metadataBuf, gzipped]);
3231
3234
  this.trace?.push(`bundling digest of ret is ${computeObjectHash({ data: ret.toString("hex") })}`);
@@ -3239,8 +3242,8 @@ var TaskStore = class {
3239
3242
  const unparsed = JSON.parse(buf.slice(LEN_BUF_SIZE, LEN_BUF_SIZE + metadataLen).toString("utf-8"));
3240
3243
  const metadata = Metadata.parse(unparsed);
3241
3244
  const outputs = metadata.outputs.map((at) => PathInRepo(at));
3242
- const removeOutputDir = /* @__PURE__ */ __name(async (o) => await fse11.rm(this.repoRootDir.resolve(o), { recursive: true, force: true }), "removeOutputDir");
3243
- await promises(outputs).map(async (o) => await removeOutputDir(o)).reify(20);
3245
+ const removeOutputDir = /* @__PURE__ */ __name(async (o) => await fs13.promises.rm(this.repoRootDir.resolve(o), { recursive: true, force: true }), "removeOutputDir");
3246
+ await promises2(outputs).map(async (o) => await removeOutputDir(o)).reify(20);
3244
3247
  const source = buf.slice(LEN_BUF_SIZE + metadataLen);
3245
3248
  const unzipped = await unzip2(source);
3246
3249
  try {
@@ -3248,7 +3251,7 @@ var TaskStore = class {
3248
3251
  } catch (e) {
3249
3252
  throw new Error(`unbundling a buffer (${buf.length} bytes) has failed: ${e}`);
3250
3253
  }
3251
- await promises(Object.keys(metadata.publicFiles)).forEach(STORAGE_CONCURRENCY, async (pir) => {
3254
+ await promises2(Object.keys(metadata.publicFiles)).forEach(STORAGE_CONCURRENCY, async (pir) => {
3252
3255
  const pathInRepo = PathInRepo(pir);
3253
3256
  const resolved = this.repoRootDir.resolve(pathInRepo);
3254
3257
  const hash = metadata.publicFiles[pathInRepo.val];
@@ -3256,7 +3259,7 @@ var TaskStore = class {
3256
3259
  throw new Error(`hash not found for "${pathInRepo}"`);
3257
3260
  }
3258
3261
  const buf2 = await this.client.getContentAddressable(hash);
3259
- fs8.writeFileSync(resolved, buf2);
3262
+ fs13.writeFileSync(resolved, buf2);
3260
3263
  });
3261
3264
  return { files: outputs, publicFiles: metadata.publicFiles };
3262
3265
  }
@@ -3356,7 +3359,7 @@ var EngineBootstrapper = class _EngineBootstrapper {
3356
3359
  }
3357
3360
  resolveConfigFile() {
3358
3361
  const arr = _EngineBootstrapper.CONFIG_FILES.map((at) => PathInRepo(at));
3359
- const existings = arr.flatMap((at) => fs9.existsSync(this.rootDir.resolve(at)) ? [at] : []);
3362
+ const existings = arr.flatMap((at) => fs14.existsSync(this.rootDir.resolve(at)) ? [at] : []);
3360
3363
  if (existings.length > 1) {
3361
3364
  const quoted = existings.map((at) => `"${at}"`);
3362
3365
  throw new Error(`Found competing config files: ${quoted.join(", ")}. To avoid confusion, you must keep just one.`);
@@ -3369,10 +3372,10 @@ var EngineBootstrapper = class _EngineBootstrapper {
3369
3372
  }
3370
3373
  const p = this.rootDir.resolve(pathToConfigFile);
3371
3374
  try {
3372
- if (!fs9.existsSync(p)) {
3375
+ if (!fs14.existsSync(p)) {
3373
3376
  return BuildRaptorConfig.parse({});
3374
3377
  }
3375
- const content = fs9.readFileSync(p, "utf-8");
3378
+ const content = fs14.readFileSync(p, "utf-8");
3376
3379
  const errors = [];
3377
3380
  const parsed = JsoncParser.parse(content, errors, { allowTrailingComma: true, allowEmptyContent: true });
3378
3381
  const e = errors.at(0);
@@ -3467,14 +3470,14 @@ function summarizeTask(t) {
3467
3470
  __name(summarizeTask, "summarizeTask");
3468
3471
 
3469
3472
  // modules/build-raptor-core/dist/src/find-repo-dir.js
3470
- import fs10 from "fs";
3473
+ import fs15 from "fs";
3471
3474
  import path14 from "path";
3472
3475
  function findRepoDir(dir3) {
3473
3476
  while (true) {
3474
3477
  const pj = path14.join(dir3, "package.json");
3475
- const ex = fs10.existsSync(pj);
3478
+ const ex = fs15.existsSync(pj);
3476
3479
  if (ex) {
3477
- const content = JSON.parse(fs10.readFileSync(pj, "utf-8"));
3480
+ const content = JSON.parse(fs15.readFileSync(pj, "utf-8"));
3478
3481
  const keys = Object.keys(content);
3479
3482
  if (keys.includes("workspaces")) {
3480
3483
  return dir3;
@@ -3490,8 +3493,8 @@ function findRepoDir(dir3) {
3490
3493
  __name(findRepoDir, "findRepoDir");
3491
3494
 
3492
3495
  // modules/build-raptor/dist/src/build-raptor-cli.js
3493
- import fs12 from "fs";
3494
- import * as fse13 from "fs-extra";
3496
+ import fs17 from "fs";
3497
+ import fse11 from "fs-extra/esm";
3495
3498
  import * as os from "os";
3496
3499
  import * as path16 from "path";
3497
3500
 
@@ -3641,8 +3644,8 @@ import { hideBin } from "yargs/helpers";
3641
3644
  // modules/yarn-repo-protocol/dist/src/yarn-repo-protocol.js
3642
3645
  import escapeStringRegexp from "escape-string-regexp";
3643
3646
  import execa from "execa";
3644
- import * as fs11 from "fs";
3645
- import * as fse12 from "fs-extra";
3647
+ import fs16 from "fs";
3648
+ import fse10 from "fs-extra/esm";
3646
3649
  import * as path15 from "path";
3647
3650
 
3648
3651
  // modules/reporter-output/dist/src/reporter-output.js
@@ -3863,23 +3866,23 @@ ${formattedIssues.join("\n")}`);
3863
3866
  async generateSymlinksToPackages(rootDir, units) {
3864
3867
  const nodeModules = PathInRepo("node_modules");
3865
3868
  const nodeModulesLoc = rootDir.resolve(nodeModules);
3866
- await fse12.mkdirp(rootDir.resolve(nodeModules));
3869
+ await fse10.mkdirp(rootDir.resolve(nodeModules));
3867
3870
  for (const u of units) {
3868
3871
  const link = nodeModules.expand(u.id);
3869
3872
  const linkLoc = rootDir.resolve(link);
3870
- const exists = await fse12.pathExists(linkLoc);
3873
+ const exists = await fse10.pathExists(linkLoc);
3871
3874
  if (exists) {
3872
3875
  continue;
3873
3876
  }
3874
3877
  const packageLoc = rootDir.resolve(u.pathInRepo);
3875
3878
  const packageFromNodeModules = path15.relative(nodeModulesLoc, packageLoc);
3876
- await fse12.symlink(packageFromNodeModules, linkLoc);
3879
+ await fs16.promises.symlink(packageFromNodeModules, linkLoc);
3877
3880
  }
3878
3881
  }
3879
3882
  async generateTsConfigFiles(rootDir, units, graph) {
3880
3883
  const rootBase = rootDir.resolve(PathInRepo(this.tsconfigBaseName));
3881
- const rootBaseExists = await fse12.pathExists(rootBase);
3882
- const rootBaseContent = rootBaseExists ? await fse12.readJSON(rootBase) : {};
3884
+ const rootBaseExists = await fse10.pathExists(rootBase);
3885
+ const rootBaseContent = rootBaseExists ? await fse10.readJSON(rootBase) : {};
3883
3886
  const defaultOptions = {
3884
3887
  module: "CommonJS",
3885
3888
  inlineSourceMap: true,
@@ -3897,8 +3900,8 @@ ${formattedIssues.join("\n")}`);
3897
3900
  for (const u of units) {
3898
3901
  const deps = graph.neighborsOf(u.id);
3899
3902
  const localBase = rootDir.resolve(u.pathInRepo.expand(this.tsconfigBaseName));
3900
- const localBaseExists = await fse12.pathExists(localBase);
3901
- const localBaseContent = localBaseExists ? await fse12.readJSON(localBase) : {};
3903
+ const localBaseExists = await fse10.pathExists(localBase);
3904
+ const localBaseContent = localBaseExists ? await fse10.readJSON(localBase) : {};
3902
3905
  const additions = [...localBaseContent.include ?? [], ...rootBaseContent.include ?? []];
3903
3906
  const tsconf = {
3904
3907
  ...localBaseExists ? { extends: `./${this.tsconfigBaseName}` } : rootBaseExists ? { extends: path15.relative(u.pathInRepo.val, this.tsconfigBaseName) } : {},
@@ -3926,15 +3929,15 @@ ${formattedIssues.join("\n")}`);
3926
3929
  }
3927
3930
  const content = JSON.stringify(tsconf, null, 2);
3928
3931
  const p = rootDir.resolve(u.pathInRepo.expand("tsconfig.json"));
3929
- if (await fse12.pathExists(p)) {
3930
- const existing = JSON.stringify(await fse12.readJSON(p, "utf-8"), null, 2);
3932
+ if (await fse10.pathExists(p)) {
3933
+ const existing = JSON.stringify(await fse10.readJSON(p, "utf-8"), null, 2);
3931
3934
  if (existing.trim() === content.trim()) {
3932
3935
  this.logger.info(`skipping generation of tsconfig.json in ${u.id} - no changes`);
3933
3936
  continue;
3934
3937
  }
3935
3938
  }
3936
3939
  this.logger.info(`updating the tsconfig.json file of ${u.id}`);
3937
- await fse12.writeFile(p, content);
3940
+ await fs16.promises.writeFile(p, content);
3938
3941
  }
3939
3942
  }
3940
3943
  async close() {
@@ -3942,9 +3945,15 @@ ${formattedIssues.join("\n")}`);
3942
3945
  async run(cmd, args, dir3, outputFile, additionalEnvVars = {}) {
3943
3946
  const summary = `<${dir3}$ ${cmd} ${args.join(" ")}>`;
3944
3947
  this.logger.info(`Dispatching ${summary}. output: ${outputFile}`);
3945
- const out = await fse12.open(outputFile, "w");
3948
+ const out = await fs16.promises.open(outputFile, "w");
3946
3949
  try {
3947
- const p = await execa(cmd, args, { cwd: dir3, stdout: out, stderr: out, reject: false, env: additionalEnvVars });
3950
+ const p = await execa(cmd, args, {
3951
+ cwd: dir3,
3952
+ stdout: out.fd,
3953
+ stderr: out.fd,
3954
+ reject: false,
3955
+ env: additionalEnvVars
3956
+ });
3948
3957
  this.logger.info(`exitCode of ${cmd} ${args.join(" ")} is ${p.exitCode}`);
3949
3958
  if (p.exitCode === 0) {
3950
3959
  return "OK";
@@ -3954,7 +3963,7 @@ ${formattedIssues.join("\n")}`);
3954
3963
  this.logger.error(`execution of ${summary} failed`, e);
3955
3964
  return "CRASH";
3956
3965
  } finally {
3957
- await fse12.close(out);
3966
+ await out.close();
3958
3967
  }
3959
3968
  }
3960
3969
  // TODO(imaman): this should be retired. custom build tasks should be used instead.
@@ -3968,8 +3977,8 @@ ${formattedIssues.join("\n")}`);
3968
3977
  }
3969
3978
  const tempFile = await getTempFile();
3970
3979
  const ret = await this.run("npm", ["run", this.scriptNames.postBuild], dir3, tempFile);
3971
- const toAppend = await fse12.readFile(tempFile);
3972
- await fse12.appendFile(outputFile, toAppend);
3980
+ const toAppend = await fs16.promises.readFile(tempFile);
3981
+ await fs16.promises.appendFile(outputFile, toAppend);
3973
3982
  return ret;
3974
3983
  }
3975
3984
  async checkBuiltFiles(dir3) {
@@ -3978,7 +3987,7 @@ ${formattedIssues.join("\n")}`);
3978
3987
  const inputDir = path15.join(dir3, codeDir);
3979
3988
  const paths = await DirectoryScanner.listPaths(inputDir, { startingPointMustExist: false });
3980
3989
  for (const p of paths) {
3981
- inputFiles.set(p, fs11.statSync(path15.join(inputDir, p)).mode);
3990
+ inputFiles.set(p, fs16.statSync(path15.join(inputDir, p)).mode);
3982
3991
  }
3983
3992
  const d = path15.join(dir3, `${this.dist()}/${codeDir}`);
3984
3993
  const distFiles = await DirectoryScanner.listPaths(d, { startingPointMustExist: false });
@@ -4004,9 +4013,9 @@ ${formattedIssues.join("\n")}`);
4004
4013
  const resolved = path15.join(d, f);
4005
4014
  if (orig === void 0) {
4006
4015
  this.logger.info(`deleting unmatched dist file: ${f}`);
4007
- fs11.rmSync(resolved);
4016
+ fs16.rmSync(resolved);
4008
4017
  } else {
4009
- fs11.chmodSync(resolved, orig);
4018
+ fs16.chmodSync(resolved, orig);
4010
4019
  }
4011
4020
  }
4012
4021
  }
@@ -4026,7 +4035,7 @@ ${formattedIssues.join("\n")}`);
4026
4035
  throw new Error(`cannot execute ${taskName} when its feature toggle is set to ${ft}`);
4027
4036
  }, "off"),
4028
4037
  dormant: /* @__PURE__ */ __name(async () => {
4029
- fs11.writeFileSync(outputFile, "");
4038
+ fs16.writeFileSync(outputFile, "");
4030
4039
  const ret = "OK";
4031
4040
  return ret;
4032
4041
  }, "dormant"),
@@ -4061,17 +4070,17 @@ ${formattedIssues.join("\n")}`);
4061
4070
  const hasSpecFiles = await this.hasSpecFiles(testsDir);
4062
4071
  if (!hasSpecFiles) {
4063
4072
  this.logger.info(`No *.spec.ts files found in ${testsDir}, skipping test execution`);
4064
- await fs11.promises.writeFile(outputFile, `No test files found in ${testsDir}
4073
+ await fs16.promises.writeFile(outputFile, `No test files found in ${testsDir}
4065
4074
  `);
4066
4075
  const dirInRepo = this.state.rootDir.unresolve(dir3);
4067
4076
  const resolvedSummaryFile = this.state.rootDir.resolve(dirInRepo.expand(this.testRunSummaryFile));
4068
- fs11.writeFileSync(resolvedSummaryFile, JSON.stringify({}));
4077
+ fs16.writeFileSync(resolvedSummaryFile, JSON.stringify({}));
4069
4078
  const jof = path15.join(dir3, JEST_OUTPUT_FILE);
4070
- fs11.writeFileSync(jof, JSON.stringify([]));
4079
+ fs16.writeFileSync(jof, JSON.stringify([]));
4071
4080
  const tempFile2 = await getTempFile();
4072
4081
  const validateResult2 = await this.runValidate(u, dir3, tempFile2);
4073
- const toAppend2 = await fse12.readFile(tempFile2);
4074
- await fse12.appendFile(outputFile, toAppend2);
4082
+ const toAppend2 = await fs16.promises.readFile(tempFile2);
4083
+ await fs16.promises.appendFile(outputFile, toAppend2);
4075
4084
  return validateResult2;
4076
4085
  }
4077
4086
  const tempFile = await getTempFile();
@@ -4080,8 +4089,8 @@ ${formattedIssues.join("\n")}`);
4080
4089
  testCommand ? this.runCustomTest(u.id, dir3, taskName, outputFile) : this.runJest(dir3, taskName, outputFile),
4081
4090
  this.runValidate(u, dir3, tempFile)
4082
4091
  ]);
4083
- const toAppend = await fse12.readFile(tempFile);
4084
- await fse12.appendFile(outputFile, toAppend);
4092
+ const toAppend = await fs16.promises.readFile(tempFile);
4093
+ await fs16.promises.appendFile(outputFile, toAppend);
4085
4094
  return switchOn(testResult, {
4086
4095
  CRASH: /* @__PURE__ */ __name(() => testResult, "CRASH"),
4087
4096
  FAIL: /* @__PURE__ */ __name(() => testResult, "FAIL"),
@@ -4090,22 +4099,22 @@ ${formattedIssues.join("\n")}`);
4090
4099
  }
4091
4100
  if (taskKind === "pack") {
4092
4101
  const ret = await this.pack(u, dir3);
4093
- await fse12.writeFile(outputFile, "");
4102
+ await fs16.promises.writeFile(outputFile, "");
4094
4103
  return ret;
4095
4104
  }
4096
4105
  if (taskKind === "publish-assets") {
4097
4106
  const scriptName = this.scriptNames.prepareAssets;
4098
4107
  const fullPath = path15.join(dir3, PREPARED_ASSETS_DIR);
4099
- await fse12.rm(fullPath, { force: true, recursive: true });
4100
- await fse12.mkdirp(fullPath);
4108
+ await fs16.promises.rm(fullPath, { force: true, recursive: true });
4109
+ await fse10.mkdirp(fullPath);
4101
4110
  const ret = await this.run("npm", ["run", scriptName], dir3, outputFile);
4102
- const exists = await fse12.pathExists(fullPath);
4111
+ const exists = await fse10.pathExists(fullPath);
4103
4112
  if (!exists) {
4104
4113
  throw new BuildFailedError(`Output file ${path15.basename(fullPath)} was not created by the ${scriptName} run script in ${dir3}`);
4105
4114
  }
4106
- const files = await fse12.readdir(fullPath);
4115
+ const files = await fs16.promises.readdir(fullPath);
4107
4116
  await Promise.all(files.map(async (f) => {
4108
- const contentToPublish = await fse12.readFile(path15.join(fullPath, f));
4117
+ const contentToPublish = await fs16.promises.readFile(path15.join(fullPath, f));
4109
4118
  this.logger.info(`unit ${u.id}: publishing asset ${f}`);
4110
4119
  const casAddress = await this.assetPublisher.publishAsset(u, contentToPublish, f);
4111
4120
  this.logger.info(`unit ${u.id}: asset ${f} published to cas ${casAddress}`);
@@ -4122,7 +4131,7 @@ ${formattedIssues.join("\n")}`);
4122
4131
  async runUberBuild(outputFile, taskName) {
4123
4132
  if (this.state.uberBuildPromise) {
4124
4133
  const ret2 = await this.state.uberBuildPromise;
4125
- await fse12.writeFile(outputFile, ``);
4134
+ await fs16.promises.writeFile(outputFile, ``);
4126
4135
  return ret2;
4127
4136
  }
4128
4137
  this.logger.info(`logging uberbuild in ${outputFile} (triggered by ${taskName})`);
@@ -4136,7 +4145,7 @@ ${formattedIssues.join("\n")}`);
4136
4145
  async runJest(dir3, taskName, outputFile) {
4137
4146
  const dirInRepo = this.state.rootDir.unresolve(dir3);
4138
4147
  const resolvedSummaryFile = this.state.rootDir.resolve(dirInRepo.expand(this.testRunSummaryFile));
4139
- fs11.writeFileSync(resolvedSummaryFile, JSON.stringify({}));
4148
+ fs16.writeFileSync(resolvedSummaryFile, JSON.stringify({}));
4140
4149
  const jof = path15.join(dir3, JEST_OUTPUT_FILE);
4141
4150
  const testsToRun = await this.computeTestsToRun(jof);
4142
4151
  const reporterOutputFile = (await Tmp4.file()).path;
@@ -4150,14 +4159,14 @@ ${formattedIssues.join("\n")}`);
4150
4159
  "--reporters",
4151
4160
  "default"
4152
4161
  ], dir3, outputFile, this.state.config.additionalJestEnvVars);
4153
- const readStdout = /* @__PURE__ */ __name(() => fs11.readFileSync(outputFile, "utf-8").trim(), "readStdout");
4154
- const latest = fs11.readFileSync(reporterOutputFile, "utf-8");
4162
+ const readStdout = /* @__PURE__ */ __name(() => fs16.readFileSync(outputFile, "utf-8").trim(), "readStdout");
4163
+ const latest = fs16.readFileSync(reporterOutputFile, "utf-8");
4155
4164
  if (latest.trim().length === 0) {
4156
4165
  const output = readStdout();
4157
4166
  if (output.length) {
4158
4167
  this.logger.print(`<No Jest tests were invoked. Jest output follows below. latest=${JSON.stringify(latest)}>
4159
4168
  ${output}`);
4160
- fs11.writeFileSync(jof, JSON.stringify(emptyRerunList));
4169
+ fs16.writeFileSync(jof, JSON.stringify(emptyRerunList));
4161
4170
  return "FAIL";
4162
4171
  }
4163
4172
  }
@@ -4197,7 +4206,7 @@ ${output}`);
4197
4206
  }
4198
4207
  });
4199
4208
  const summary = generateTestRunSummary(this.state.rootDir, reporterOutput);
4200
- fs11.writeFileSync(resolvedSummaryFile, JSON.stringify(summary));
4209
+ fs16.writeFileSync(resolvedSummaryFile, JSON.stringify(summary));
4201
4210
  const failingCases = reporterOutput.cases.filter((at) => switchOn(at.status, {
4202
4211
  disabled: /* @__PURE__ */ __name(() => false, "disabled"),
4203
4212
  failed: /* @__PURE__ */ __name(() => true, "failed"),
@@ -4207,7 +4216,7 @@ ${output}`);
4207
4216
  todo: /* @__PURE__ */ __name(() => false, "todo")
4208
4217
  }));
4209
4218
  const rerunList = sortBy(failingCases.map((at) => ({ fileName: at.fileName, testCaseFullName: at.testCaseFullName })), (at) => `${at.fileName} ${at.testCaseFullName}`);
4210
- fs11.writeFileSync(jof, JSON.stringify(RerunList.parse(rerunList)));
4219
+ fs16.writeFileSync(jof, JSON.stringify(RerunList.parse(rerunList)));
4211
4220
  return ret;
4212
4221
  }
4213
4222
  async runCustomTest(unitId, dir3, _taskName, outputFile) {
@@ -4218,7 +4227,7 @@ ${output}`);
4218
4227
  const commandPath = this.state.rootDir.resolve(PathInRepo(testCommand));
4219
4228
  const dirInRepo = this.state.rootDir.unresolve(dir3);
4220
4229
  const resolvedSummaryFile = this.state.rootDir.resolve(dirInRepo.expand(this.testRunSummaryFile));
4221
- fs11.writeFileSync(resolvedSummaryFile, JSON.stringify({}));
4230
+ fs16.writeFileSync(resolvedSummaryFile, JSON.stringify({}));
4222
4231
  const args = [
4223
4232
  dir3,
4224
4233
  // Package directory absolute path
@@ -4229,8 +4238,8 @@ ${output}`);
4229
4238
  ];
4230
4239
  const ret = await this.run(commandPath, args, dir3, outputFile);
4231
4240
  const jof = path15.join(dir3, JEST_OUTPUT_FILE);
4232
- if (!fs11.existsSync(jof)) {
4233
- fs11.writeFileSync(jof, JSON.stringify([]));
4241
+ if (!fs16.existsSync(jof)) {
4242
+ fs16.writeFileSync(jof, JSON.stringify([]));
4234
4243
  }
4235
4244
  return ret;
4236
4245
  }
@@ -4297,23 +4306,23 @@ ${output}`);
4297
4306
  const packDist = path15.join(path15.join(dir3, PACK_DIR), "dist");
4298
4307
  const packDistSrc = path15.join(packDist, this.src);
4299
4308
  const packDistDeps = path15.join(packDist, "deps");
4300
- fs11.mkdirSync(packDistSrc, { recursive: true });
4301
- fs11.cpSync(path15.join(dir3, this.dist("s")), packDistSrc, { recursive: true });
4309
+ fs16.mkdirSync(packDistSrc, { recursive: true });
4310
+ fs16.cpSync(path15.join(dir3, this.dist("s")), packDistSrc, { recursive: true });
4302
4311
  this.logger.info(`updated packagejson is ${JSON.stringify(packageDef)}`);
4303
4312
  const packageJsonPath = path15.join(dir3, PACK_DIR, "package.json");
4304
4313
  const depUnits = this.state.graph.traverseFrom(u.id, { direction: "forward" }).filter((at) => at !== u.id).map((at) => this.unitOf(at));
4305
4314
  for (const at of depUnits) {
4306
4315
  const d = path15.join(packDistDeps, at.id);
4307
- fs11.mkdirSync(d, { recursive: true });
4308
- fs11.cpSync(this.state.rootDir.resolve(at.pathInRepo.expand(this.dist("s"))), d, { recursive: true });
4316
+ fs16.mkdirSync(d, { recursive: true });
4317
+ fs16.cpSync(this.state.rootDir.resolve(at.pathInRepo.expand(this.dist("s"))), d, { recursive: true });
4309
4318
  }
4310
4319
  try {
4311
- fs11.writeFileSync(packageJsonPath, JSON.stringify(packageDef, null, 2));
4320
+ fs16.writeFileSync(packageJsonPath, JSON.stringify(packageDef, null, 2));
4312
4321
  } catch (e) {
4313
4322
  throw new Error(`Failed to write new package definition at ${packageJsonPath}: ${e}`);
4314
4323
  }
4315
4324
  const indexJs = path15.join(dir3, PACK_DIR, this.dist("s"), "index.js");
4316
- const content = fs11.readFileSync(indexJs, "utf-8");
4325
+ const content = fs16.readFileSync(indexJs, "utf-8");
4317
4326
  const preamble = [
4318
4327
  "(() => {",
4319
4328
  " const fs = require(`fs`)",
@@ -4331,7 +4340,7 @@ ${output}`);
4331
4340
  "})()",
4332
4341
  ""
4333
4342
  ].join("\n");
4334
- fs11.writeFileSync(indexJs, preamble + content);
4343
+ fs16.writeFileSync(indexJs, preamble + content);
4335
4344
  return "OK";
4336
4345
  }
4337
4346
  async getYarnInfo(rootDir) {
@@ -4535,10 +4544,10 @@ ${output}`);
4535
4544
  throw new BuildFailedError(`Circular reference detected in build task definition: ${cycle.join(" -> ")}`);
4536
4545
  }
4537
4546
  absPathToIndex.set(fileToRead, absPathToIndex.size);
4538
- if (!fs11.existsSync(fileToRead)) {
4547
+ if (!fs16.existsSync(fileToRead)) {
4539
4548
  throw new BuildFailedError(`Could no find file ${where} while resolving build task "${name}" from ${originatingFrom}`);
4540
4549
  }
4541
- const unparsed = JSON.parse(fs11.readFileSync(fileToRead, "utf-8"));
4550
+ const unparsed = JSON.parse(fs16.readFileSync(fileToRead, "utf-8"));
4542
4551
  const parseResult = BuildTaskRecord.safeParse(unparsed);
4543
4552
  if (!parseResult.success) {
4544
4553
  throw new BuildFailedError(`buildTask object (in ${fileToRead}) is not well formed: ${parseResult.error.message}`);
@@ -4555,12 +4564,12 @@ ${output}`);
4555
4564
  }
4556
4565
  }
4557
4566
  async computeTestsToRun(resolved) {
4558
- const exists = await fse12.pathExists(resolved);
4567
+ const exists = await fse10.pathExists(resolved);
4559
4568
  if (!exists) {
4560
4569
  this.logger.info("jest-output.json does not exist. running everything!");
4561
4570
  return [this.tests];
4562
4571
  }
4563
- const content = await fse12.readFile(resolved, "utf-8");
4572
+ const content = await fs16.promises.readFile(resolved, "utf-8");
4564
4573
  let parsed;
4565
4574
  try {
4566
4575
  parsed = JSON.parse(content);
@@ -4599,9 +4608,9 @@ function computeUnits(yarnInfo) {
4599
4608
  __name(computeUnits, "computeUnits");
4600
4609
  async function readPackages(rootDir, units) {
4601
4610
  const ret = /* @__PURE__ */ new Map();
4602
- await promises(units).forEach(20, async (um) => {
4611
+ await promises2(units).forEach(20, async (um) => {
4603
4612
  const p = rootDir.resolve(um.pathInRepo.expand("package.json"));
4604
- const content = await fse12.readJSON(p);
4613
+ const content = await fse10.readJSON(p);
4605
4614
  ret.set(um.id, content);
4606
4615
  });
4607
4616
  return ret;
@@ -4611,9 +4620,9 @@ async function createOutDirs(rootDir, units, outDirName) {
4611
4620
  if (!outDirName) {
4612
4621
  return;
4613
4622
  }
4614
- await promises(units).forEach(20, async (um) => {
4623
+ await promises2(units).forEach(20, async (um) => {
4615
4624
  const p = rootDir.resolve(um.pathInRepo.expand(outDirName));
4616
- await fse12.ensureDir(p);
4625
+ await fse10.ensureDir(p);
4617
4626
  });
4618
4627
  }
4619
4628
  __name(createOutDirs, "createOutDirs");
@@ -4780,7 +4789,7 @@ async function makeBootstrapper(options) {
4780
4789
  throw new Error(`could not find a repo dir (a directory with a package.json file that has a 'workspace' attribute) in or above ${userDir}`);
4781
4790
  }
4782
4791
  const buildRaptorDir = path16.join(rootDir, ".build-raptor");
4783
- await fse13.ensureDir(buildRaptorDir);
4792
+ await fse11.ensureDir(buildRaptorDir);
4784
4793
  const logFile = path16.join(buildRaptorDir, "main.log");
4785
4794
  const logger = createDefaultLogger(logFile, options.criticality);
4786
4795
  logger.info(`Logger initialized`);
@@ -4799,7 +4808,7 @@ async function makeBootstrapper(options) {
4799
4808
  ${JSON.stringify({ isCi, commitHash, startedAt: new Date(t0).toISOString() }, null, 2)}`);
4800
4809
  }
4801
4810
  const buildRaptorDirTasks = path16.join(buildRaptorDir, "tasks");
4802
- await fse13.rm(buildRaptorDirTasks, { recursive: true, force: true });
4811
+ await fs17.promises.rm(buildRaptorDirTasks, { recursive: true, force: true });
4803
4812
  const storageClient = await storageClientFactory(logger);
4804
4813
  const assetPublisher = new DefaultAssetPublisher(storageClient, logger);
4805
4814
  const repoProtocol = new YarnRepoProtocol(logger, assetPublisher);
@@ -4808,7 +4817,7 @@ ${JSON.stringify({ isCi, commitHash, startedAt: new Date(t0).toISOString() }, nu
4808
4817
  const visualizer = options.taskProgressOutput ? new TaskExecutionVisualizer() : void 0;
4809
4818
  const taskTimings = options.printTiming ? /* @__PURE__ */ new Map() : void 0;
4810
4819
  const allTestsFile = path16.join(buildRaptorDir, "all-tests");
4811
- fs12.writeFileSync(allTestsFile, "");
4820
+ fs17.writeFileSync(allTestsFile, "");
4812
4821
  let atLeastOneTest = false;
4813
4822
  bootstrapper.transmitter.addProcessor((s) => {
4814
4823
  if (s.step === "ASSET_PUBLISHED" || s.step === "BUILD_RUN_STARTED" || s.step === "PUBLIC_FILES" || s.step === "TASK_STORE_GET" || s.step === "TASK_STORE_PUT") {
@@ -4869,9 +4878,9 @@ ${whereIsTheLogMessage}${line}`);
4869
4878
  }
4870
4879
  });
4871
4880
  bootstrapper.subscribable.on("executionEnded", async (arg) => {
4872
- await fse13.ensureDir(buildRaptorDirTasks);
4881
+ await fse11.ensureDir(buildRaptorDirTasks);
4873
4882
  const fileName = path16.join(buildRaptorDirTasks, toReasonableFileName(arg.taskName));
4874
- const stream2 = fse13.createWriteStream(fileName);
4883
+ const stream2 = fs17.createWriteStream(fileName);
4875
4884
  try {
4876
4885
  await dumpFile(arg.outputFile, stream2);
4877
4886
  logger.info(`wrote output of ${arg.taskName} to ${fileName}`);
@@ -4892,7 +4901,7 @@ ${whereIsTheLogMessage}${line}`);
4892
4901
 
4893
4902
  `);
4894
4903
  }
4895
- fs12.appendFileSync(allTestsFile, fs12.readFileSync(arg.outputFile) + "\n");
4904
+ fs17.appendFileSync(allTestsFile, fs17.readFileSync(arg.outputFile) + "\n");
4896
4905
  logger.info(`output of ${arg.taskName} dumped`);
4897
4906
  });
4898
4907
  bootstrapper.subscribable.on("executionSkipped", (tn) => {
@@ -4976,7 +4985,7 @@ function reportTests(logger, arr, tr, allTasksFile) {
4976
4985
  const duration = at.durationMillis === void 0 ? "" : ` (${at.durationMillis} ms)`;
4977
4986
  const message = `${spaces}${v} ${at.testPath.at(-1)}${duration}`;
4978
4987
  logger.print(message, "high");
4979
- fs12.appendFileSync(allTasksFile, message + "\n");
4988
+ fs17.appendFileSync(allTasksFile, message + "\n");
4980
4989
  prev = k;
4981
4990
  }
4982
4991
  }
@@ -4986,13 +4995,13 @@ function reportTests(logger, arr, tr, allTasksFile) {
4986
4995
  const passing = sorted.filter((at) => isPassing(at.tests));
4987
4996
  for (const at of passing) {
4988
4997
  const message = `\u2705 PASSED ${at.fileName}`;
4989
- fs12.appendFileSync(allTasksFile, message + "\n");
4998
+ fs17.appendFileSync(allTasksFile, message + "\n");
4990
4999
  if (renderPassingTests) {
4991
5000
  logger.print(message, "high");
4992
5001
  }
4993
5002
  }
4994
5003
  for (const at of sorted.filter((at2) => !isPassing(at2.tests))) {
4995
- fs12.appendFileSync(allTasksFile, at.fileName + "\n");
5004
+ fs17.appendFileSync(allTasksFile, at.fileName + "\n");
4996
5005
  logger.print(at.fileName, "high");
4997
5006
  printTests(at.tests);
4998
5007
  }