nx 20.0.0-canary.20240925-6182d20 → 20.0.0-canary.20240928-f221a41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/.eslintrc.json +9 -1
  2. package/package.json +14 -15
  3. package/schemas/nx-schema.json +26 -21
  4. package/src/command-line/graph/graph.js +9 -9
  5. package/src/command-line/import/utils/prepare-source-repo.js +8 -3
  6. package/src/command-line/init/implementation/add-nx-to-nest.js +5 -5
  7. package/src/command-line/init/implementation/react/clean-up-files.js +7 -7
  8. package/src/command-line/init/implementation/react/index.js +19 -12
  9. package/src/command-line/init/implementation/react/rename-js-to-jsx.js +3 -3
  10. package/src/command-line/release/changelog.js +1 -2
  11. package/src/command-line/release/config/version-plans.js +6 -7
  12. package/src/command-line/release/plan.js +6 -5
  13. package/src/command-line/release/release.js +2 -2
  14. package/src/command-line/release/version.js +5 -3
  15. package/src/command-line/reset/reset.js +20 -13
  16. package/src/core/graph/main.js +1 -1
  17. package/src/core/graph/styles.css +1 -1
  18. package/src/core/graph/styles.js +1 -1
  19. package/src/daemon/cache.d.ts +1 -2
  20. package/src/daemon/cache.js +12 -21
  21. package/src/daemon/client/client.js +9 -8
  22. package/src/daemon/server/handle-hash-tasks.js +1 -1
  23. package/src/daemon/tmp-dir.js +6 -7
  24. package/src/executors/run-commands/run-commands.impl.js +15 -22
  25. package/src/generators/tree.d.ts +1 -1
  26. package/src/generators/tree.js +11 -11
  27. package/src/generators/utils/project-configuration.js +2 -1
  28. package/src/hasher/create-task-hasher.js +1 -1
  29. package/src/hasher/hash-task.d.ts +4 -2
  30. package/src/hasher/hash-task.js +6 -9
  31. package/src/hasher/task-hasher.d.ts +2 -6
  32. package/src/hasher/task-hasher.js +6 -32
  33. package/src/native/nx.wasm32-wasi.wasm +0 -0
  34. package/src/plugins/js/index.js +1 -2
  35. package/src/project-graph/nx-deps-cache.js +5 -6
  36. package/src/tasks-runner/cache.js +17 -16
  37. package/src/tasks-runner/forked-process-task-runner.js +16 -5
  38. package/src/tasks-runner/life-cycles/dynamic-run-many-terminal-output-life-cycle.js +5 -0
  39. package/src/tasks-runner/life-cycles/static-run-many-terminal-output-life-cycle.js +7 -0
  40. package/src/tasks-runner/remove-old-cache-records.js +2 -3
  41. package/src/tasks-runner/run-command.js +3 -1
  42. package/src/tasks-runner/task-orchestrator.d.ts +1 -0
  43. package/src/tasks-runner/task-orchestrator.js +6 -2
  44. package/src/tasks-runner/tasks-schedule.d.ts +1 -0
  45. package/src/tasks-runner/tasks-schedule.js +6 -2
  46. package/src/utils/fileutils.d.ts +9 -1
  47. package/src/utils/fileutils.js +29 -12
  48. package/src/utils/ignore.js +2 -2
  49. package/src/utils/package-json.d.ts +1 -0
  50. package/src/utils/package-manager.js +2 -2
  51. package/src/utils/plugins/core-plugins.js +4 -0
  52. package/src/utils/plugins/output.js +1 -1
  53. package/src/hasher/node-task-hasher-impl.d.ts +0 -48
  54. package/src/hasher/node-task-hasher-impl.js +0 -449
@@ -11,7 +11,6 @@ exports.expandSingleProjectInputs = expandSingleProjectInputs;
11
11
  exports.expandNamedInput = expandNamedInput;
12
12
  exports.filterUsingGlobPatterns = filterUsingGlobPatterns;
13
13
  const file_hasher_1 = require("./file-hasher");
14
- const node_task_hasher_impl_1 = require("./node-task-hasher-impl");
15
14
  const minimatch_1 = require("minimatch");
16
15
  const native_task_hasher_impl_1 = require("./native-task-hasher-impl");
17
16
  const workspace_root_1 = require("../utils/workspace-root");
@@ -29,42 +28,18 @@ class DaemonBasedTaskHasher {
29
28
  }
30
29
  exports.DaemonBasedTaskHasher = DaemonBasedTaskHasher;
31
30
  class InProcessTaskHasher {
32
- constructor(projectFileMap, allWorkspaceFiles, projectGraph, nxJson, externalRustReferences, options) {
33
- this.projectFileMap = projectFileMap;
34
- this.allWorkspaceFiles = allWorkspaceFiles;
31
+ constructor(projectGraph, nxJson, externalRustReferences, options) {
35
32
  this.projectGraph = projectGraph;
36
33
  this.nxJson = nxJson;
37
34
  this.externalRustReferences = externalRustReferences;
38
35
  this.options = options;
39
- this.useNativeTaskHasher = process.env.NX_NATIVE_TASK_HASHER !== 'false';
40
- const legacyRuntimeInputs = (this.options && this.options.runtimeCacheInputs
41
- ? this.options.runtimeCacheInputs
42
- : []).map((r) => ({ runtime: r }));
43
- if (process.env.NX_CLOUD_ENCRYPTION_KEY) {
44
- legacyRuntimeInputs.push({ env: 'NX_CLOUD_ENCRYPTION_KEY' });
45
- }
46
- const legacyFilesetInputs = [
47
- 'nx.json',
48
- // ignore files will change the set of inputs to the hasher
49
- '.gitignore',
50
- '.nxignore',
51
- ].map((d) => ({ fileset: `{workspaceRoot}/${d}` }));
52
- this.taskHasher = !this.useNativeTaskHasher
53
- ? new node_task_hasher_impl_1.NodeTaskHasherImpl(nxJson, legacyRuntimeInputs, legacyFilesetInputs, this.projectFileMap, this.allWorkspaceFiles, this.projectGraph, {
54
- selectivelyHashTsConfig: this.options?.selectivelyHashTsConfig ?? false,
55
- })
56
- : new native_task_hasher_impl_1.NativeTaskHasherImpl(workspace_root_1.workspaceRoot, nxJson, this.projectGraph, this.externalRustReferences, {
57
- selectivelyHashTsConfig: this.options?.selectivelyHashTsConfig ?? false,
58
- });
36
+ this.taskHasher = new native_task_hasher_impl_1.NativeTaskHasherImpl(workspace_root_1.workspaceRoot, this.nxJson, this.projectGraph, this.externalRustReferences, {
37
+ selectivelyHashTsConfig: this.options?.selectivelyHashTsConfig ?? false,
38
+ });
59
39
  }
60
40
  async hashTasks(tasks, taskGraph, env) {
61
- if (this.useNativeTaskHasher) {
62
- const hashes = await this.taskHasher.hashTasks(tasks, taskGraph, env ?? process.env);
63
- return tasks.map((task, index) => this.createHashDetails(task, hashes[index]));
64
- }
65
- else {
66
- return await Promise.all(tasks.map((t) => this.hashTask(t, taskGraph, env)));
67
- }
41
+ const hashes = await this.taskHasher.hashTasks(tasks, taskGraph, env ?? process.env);
42
+ return tasks.map((task, index) => this.createHashDetails(task, hashes[index]));
68
43
  }
69
44
  async hashTask(task, taskGraph, env) {
70
45
  const res = await this.taskHasher.hashTask(task, taskGraph, env ?? process.env);
@@ -98,7 +73,6 @@ class InProcessTaskHasher {
98
73
  }
99
74
  }
100
75
  exports.InProcessTaskHasher = InProcessTaskHasher;
101
- InProcessTaskHasher.version = '3.0';
102
76
  const DEFAULT_INPUTS = [
103
77
  {
104
78
  fileset: '{projectRoot}/**/*',
Binary file
@@ -2,7 +2,6 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.createDependencies = exports.createNodes = exports.name = void 0;
4
4
  const fs_1 = require("fs");
5
- const fs_extra_1 = require("fs-extra");
6
5
  const path_1 = require("path");
7
6
  const perf_hooks_1 = require("perf_hooks");
8
7
  const cache_directory_1 = require("../../utils/cache-directory");
@@ -96,7 +95,7 @@ function lockFileNeedsReprocessing(lockHash) {
96
95
  }
97
96
  }
98
97
  function writeLastProcessedLockfileHash(hash, lockFile) {
99
- (0, fs_extra_1.ensureDirSync)((0, path_1.dirname)(lockFileHashFile));
98
+ (0, fs_1.mkdirSync)((0, path_1.dirname)(lockFileHashFile), { recursive: true });
100
99
  (0, fs_1.writeFileSync)(cachedParsedLockFile, JSON.stringify(lockFile, null, 2));
101
100
  (0, fs_1.writeFileSync)(lockFileHashFile, hash);
102
101
  }
@@ -8,8 +8,7 @@ exports.createProjectFileMapCache = createProjectFileMapCache;
8
8
  exports.writeCache = writeCache;
9
9
  exports.shouldRecomputeWholeGraph = shouldRecomputeWholeGraph;
10
10
  exports.extractCachedFileData = extractCachedFileData;
11
- const fs_1 = require("fs");
12
- const fs_extra_1 = require("fs-extra");
11
+ const node_fs_1 = require("node:fs");
13
12
  const path_1 = require("path");
14
13
  const perf_hooks_1 = require("perf_hooks");
15
14
  const cache_directory_1 = require("../utils/cache-directory");
@@ -19,8 +18,8 @@ exports.nxProjectGraph = (0, path_1.join)(cache_directory_1.workspaceDataDirecto
19
18
  exports.nxFileMap = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'file-map.json');
20
19
  function ensureCacheDirectory() {
21
20
  try {
22
- if (!(0, fs_1.existsSync)(cache_directory_1.workspaceDataDirectory)) {
23
- (0, fs_extra_1.ensureDirSync)(cache_directory_1.workspaceDataDirectory);
21
+ if (!(0, node_fs_1.existsSync)(cache_directory_1.workspaceDataDirectory)) {
22
+ (0, node_fs_1.mkdirSync)(cache_directory_1.workspaceDataDirectory, { recursive: true });
24
23
  }
25
24
  }
26
25
  catch (e) {
@@ -102,9 +101,9 @@ function writeCache(cache, projectGraph) {
102
101
  const tmpFileMapPath = `${exports.nxFileMap}~${unique}`;
103
102
  try {
104
103
  (0, fileutils_1.writeJsonFile)(tmpProjectGraphPath, projectGraph);
105
- (0, fs_extra_1.renameSync)(tmpProjectGraphPath, exports.nxProjectGraph);
104
+ (0, node_fs_1.renameSync)(tmpProjectGraphPath, exports.nxProjectGraph);
106
105
  (0, fileutils_1.writeJsonFile)(tmpFileMapPath, cache);
107
- (0, fs_extra_1.renameSync)(tmpFileMapPath, exports.nxFileMap);
106
+ (0, node_fs_1.renameSync)(tmpFileMapPath, exports.nxFileMap);
108
107
  done = true;
109
108
  }
110
109
  catch (err) {
@@ -3,11 +3,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Cache = exports.DbCache = void 0;
4
4
  exports.getCache = getCache;
5
5
  const workspace_root_1 = require("../utils/workspace-root");
6
- const fs_extra_1 = require("fs-extra");
7
6
  const path_1 = require("path");
8
7
  const perf_hooks_1 = require("perf_hooks");
9
8
  const default_tasks_runner_1 = require("./default-tasks-runner");
10
9
  const child_process_1 = require("child_process");
10
+ const node_fs_1 = require("node:fs");
11
+ const promises_1 = require("node:fs/promises");
11
12
  const cache_directory_1 = require("../utils/cache-directory");
12
13
  const node_machine_id_1 = require("node-machine-id");
13
14
  const native_1 = require("../native");
@@ -233,13 +234,13 @@ class Cache {
233
234
  // might be left overs from partially-completed cache invocations
234
235
  await this.remove(tdCommit);
235
236
  await this.remove(td);
236
- await (0, fs_extra_1.mkdir)(td);
237
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
238
- await (0, fs_extra_1.mkdir)((0, path_1.join)(td, 'outputs'));
237
+ await (0, promises_1.mkdir)(td, { recursive: true });
238
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
239
+ await (0, promises_1.mkdir)((0, path_1.join)(td, 'outputs'));
239
240
  const expandedOutputs = await this.expandOutputsInWorkspace(outputs);
240
241
  await Promise.all(expandedOutputs.map(async (f) => {
241
242
  const src = (0, path_1.join)(this.root, f);
242
- if (await (0, fs_extra_1.pathExists)(src)) {
243
+ if ((0, node_fs_1.existsSync)(src)) {
243
244
  const cached = (0, path_1.join)(td, 'outputs', f);
244
245
  await this.copy(src, cached);
245
246
  }
@@ -248,15 +249,15 @@ class Cache {
248
249
  // creating this file is atomic, whereas creating a folder is not.
249
250
  // so if the process gets terminated while we are copying stuff into cache,
250
251
  // the cache entry won't be used.
251
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
252
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
253
- await (0, fs_extra_1.writeFile)(tdCommit, 'true');
252
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
253
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
254
+ await (0, promises_1.writeFile)(tdCommit, 'true');
254
255
  if (this.options.remoteCache) {
255
256
  await this.options.remoteCache.store(task.hash, this.cachePath);
256
257
  }
257
258
  if (terminalOutput) {
258
259
  const outputPath = this.temporaryOutputPath(task);
259
- await (0, fs_extra_1.writeFile)(outputPath, terminalOutput);
260
+ await (0, promises_1.writeFile)(outputPath, terminalOutput);
260
261
  }
261
262
  });
262
263
  }
@@ -265,7 +266,7 @@ class Cache {
265
266
  const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);
266
267
  await Promise.all(expandedOutputs.map(async (f) => {
267
268
  const cached = (0, path_1.join)(cachedResult.outputsPath, f);
268
- if (await (0, fs_extra_1.pathExists)(cached)) {
269
+ if ((0, node_fs_1.existsSync)(cached)) {
269
270
  const src = (0, path_1.join)(this.root, f);
270
271
  await this.remove(src);
271
272
  await this.copy(cached, src);
@@ -321,11 +322,11 @@ class Cache {
321
322
  async getFromLocalDir(task) {
322
323
  const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);
323
324
  const td = (0, path_1.join)(this.cachePath, task.hash);
324
- if (await (0, fs_extra_1.pathExists)(tdCommit)) {
325
- const terminalOutput = await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
325
+ if ((0, node_fs_1.existsSync)(tdCommit)) {
326
+ const terminalOutput = await (0, promises_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
326
327
  let code = 0;
327
328
  try {
328
- code = Number(await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
329
+ code = Number(await (0, promises_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
329
330
  }
330
331
  catch { }
331
332
  return {
@@ -342,7 +343,7 @@ class Cache {
342
343
  const td = (0, path_1.join)(this.cachePath, task.hash);
343
344
  let sourceMachineId = null;
344
345
  try {
345
- sourceMachineId = await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
346
+ sourceMachineId = await (0, promises_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
346
347
  }
347
348
  catch { }
348
349
  if (sourceMachineId && sourceMachineId != (await this.currentMachineId())) {
@@ -361,12 +362,12 @@ class Cache {
361
362
  }
362
363
  }
363
364
  createCacheDir() {
364
- (0, fs_extra_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
365
+ (0, node_fs_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
365
366
  return cache_directory_1.cacheDir;
366
367
  }
367
368
  createTerminalOutputsDir() {
368
369
  const path = (0, path_1.join)(this.cachePath, 'terminalOutputs');
369
- (0, fs_extra_1.mkdirSync)(path, { recursive: true });
370
+ (0, node_fs_1.mkdirSync)(path, { recursive: true });
370
371
  return path;
371
372
  }
372
373
  }
@@ -4,7 +4,6 @@ exports.ForkedProcessTaskRunner = void 0;
4
4
  const fs_1 = require("fs");
5
5
  const child_process_1 = require("child_process");
6
6
  const chalk = require("chalk");
7
- const logTransformer = require("strong-log-transformer");
8
7
  const output_1 = require("../utils/output");
9
8
  const utils_1 = require("./utils");
10
9
  const path_1 = require("path");
@@ -208,16 +207,16 @@ class ForkedProcessTaskRunner {
208
207
  const prefixText = `${task.target.project}:`;
209
208
  p.stdout
210
209
  .pipe(logClearLineToPrefixTransformer(color.bold(prefixText) + ' '))
211
- .pipe(logTransformer({ tag: color.bold(prefixText) }))
210
+ .pipe(addPrefixTransformer(color.bold(prefixText)))
212
211
  .pipe(process.stdout);
213
212
  p.stderr
214
213
  .pipe(logClearLineToPrefixTransformer(color(prefixText) + ' '))
215
- .pipe(logTransformer({ tag: color(prefixText) }))
214
+ .pipe(addPrefixTransformer(color(prefixText)))
216
215
  .pipe(process.stderr);
217
216
  }
218
217
  else {
219
- p.stdout.pipe(logTransformer()).pipe(process.stdout);
220
- p.stderr.pipe(logTransformer()).pipe(process.stderr);
218
+ p.stdout.pipe(addPrefixTransformer()).pipe(process.stdout);
219
+ p.stderr.pipe(addPrefixTransformer()).pipe(process.stderr);
221
220
  }
222
221
  }
223
222
  let outWithErr = [];
@@ -403,3 +402,15 @@ function logClearLineToPrefixTransformer(prefix) {
403
402
  },
404
403
  });
405
404
  }
405
+ function addPrefixTransformer(prefix) {
406
+ const newLineSeparator = process.platform.startsWith('win') ? '\r\n' : '\n';
407
+ return new stream_1.Transform({
408
+ transform(chunk, _encoding, callback) {
409
+ const list = chunk.toString().split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g);
410
+ list
411
+ .filter(Boolean)
412
+ .forEach((m) => this.push(prefix ? prefix + ' ' + m + newLineSeparator : m + newLineSeparator));
413
+ callback();
414
+ },
415
+ });
416
+ }
@@ -224,6 +224,11 @@ async function createRunManyDynamicOutputRenderer({ projectNames, tasks, args, o
224
224
  clearRenderInterval();
225
225
  const timeTakenText = (0, pretty_time_1.prettyTime)(process.hrtime(start));
226
226
  moveCursorToStartOfPinnedFooter();
227
+ if (totalTasks === 0) {
228
+ renderPinnedFooter([output_1.output.applyNxPrefix('gray', 'No tasks were run')]);
229
+ resolveRenderIsDonePromise();
230
+ return;
231
+ }
227
232
  if (totalSuccessfulTasks === totalTasks) {
228
233
  const text = `Successfully ran ${(0, formatting_utils_1.formatTargetsAndProjects)(projectNames, targets, tasks)}`;
229
234
  const taskOverridesRows = [];
@@ -23,6 +23,9 @@ class StaticRunManyTerminalOutputLifeCycle {
23
23
  this.allCompletedTasks = new Map();
24
24
  }
25
25
  startCommand() {
26
+ if (this.tasks.length === 0) {
27
+ return;
28
+ }
26
29
  if (this.projectNames.length <= 0) {
27
30
  output_1.output.logSingleLine(`No projects with ${(0, formatting_utils_1.formatTargetsAndProjects)(this.projectNames, this.args.targets, this.tasks)} were run`);
28
31
  return;
@@ -45,6 +48,10 @@ class StaticRunManyTerminalOutputLifeCycle {
45
48
  }
46
49
  endCommand() {
47
50
  output_1.output.addNewline();
51
+ if (this.tasks.length === 0) {
52
+ output_1.output.logSingleLine(`No tasks were run`);
53
+ return;
54
+ }
48
55
  if (this.failedTasks.length === 0) {
49
56
  output_1.output.addVerticalSeparatorWithoutNewLines('green');
50
57
  const bodyLines = this.cachedTasks.length > 0
@@ -1,7 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const fs_1 = require("fs");
4
- const fs_extra_1 = require("fs-extra");
5
4
  const path_1 = require("path");
6
5
  const WEEK_IN_MS = 1000 * 60 * 60 * 24 * 7;
7
6
  const folder = process.argv[2];
@@ -34,11 +33,11 @@ function removeOld(records) {
34
33
  if (time - s.mtimeMs > WEEK_IN_MS) {
35
34
  if (s.isDirectory()) {
36
35
  try {
37
- (0, fs_extra_1.removeSync)(`${r}.commit`);
36
+ (0, fs_1.rmSync)(`${r}.commit`, { recursive: true, force: true });
38
37
  }
39
38
  catch (e) { }
40
39
  }
41
- (0, fs_extra_1.removeSync)(r);
40
+ (0, fs_1.rmSync)(r, { recursive: true, force: true });
42
41
  }
43
42
  }
44
43
  catch (e) { }
@@ -363,12 +363,14 @@ function setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles) {
363
363
  }
364
364
  async function invokeTasksRunner({ tasks, projectGraph, taskGraph, lifeCycle, nxJson, nxArgs, loadDotEnvFiles, initiatingProject, }) {
365
365
  setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles);
366
+ // this needs to be done before we start to run the tasks
367
+ const taskDetails = (0, hash_task_1.getTaskDetails)();
366
368
  const { tasksRunner, runnerOptions } = getRunner(nxArgs, nxJson);
367
369
  let hasher = (0, create_task_hasher_1.createTaskHasher)(projectGraph, nxJson, runnerOptions);
368
370
  // this is used for two reasons: to fetch all remote cache hits AND
369
371
  // to submit everything that is known in advance to Nx Cloud to run in
370
372
  // a distributed fashion
371
- await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson);
373
+ await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson, taskDetails);
372
374
  const taskResultsLifecycle = new task_results_life_cycle_1.TaskResultsLifeCycle();
373
375
  const compositedLifeCycle = new life_cycle_1.CompositeLifeCycle([
374
376
  ...constructLifeCycles(lifeCycle),
@@ -15,6 +15,7 @@ export declare class TaskOrchestrator {
15
15
  private readonly bail;
16
16
  private readonly daemon;
17
17
  private readonly outputStyle;
18
+ private taskDetails;
18
19
  private cache;
19
20
  private forkedProcessTaskRunner;
20
21
  private tasksSchedule;
@@ -27,6 +27,7 @@ class TaskOrchestrator {
27
27
  this.bail = bail;
28
28
  this.daemon = daemon;
29
29
  this.outputStyle = outputStyle;
30
+ this.taskDetails = (0, hash_task_1.getTaskDetails)();
30
31
  this.cache = (0, cache_1.getCache)(this.nxJson, this.options);
31
32
  this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
32
33
  this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
@@ -93,7 +94,7 @@ class TaskOrchestrator {
93
94
  const task = this.taskGraph.tasks[taskId];
94
95
  const taskSpecificEnv = (0, task_env_1.getTaskSpecificEnv)(task);
95
96
  if (!task.hash) {
96
- await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv);
97
+ await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv, this.taskDetails);
97
98
  }
98
99
  await this.options.lifeCycle.scheduleTask(task);
99
100
  return taskSpecificEnv;
@@ -101,7 +102,7 @@ class TaskOrchestrator {
101
102
  async processScheduledBatch(batch) {
102
103
  await Promise.all(Object.values(batch.taskGraph.tasks).map(async (task) => {
103
104
  if (!task.hash) {
104
- await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv);
105
+ await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv, this.taskDetails);
105
106
  }
106
107
  await this.options.lifeCycle.scheduleTask(task);
107
108
  }));
@@ -321,6 +322,9 @@ class TaskOrchestrator {
321
322
  };
322
323
  }
323
324
  catch (e) {
325
+ if (process.env.NX_VERBOSE_LOGGING === 'true') {
326
+ console.error(e);
327
+ }
324
328
  return {
325
329
  code: 1,
326
330
  };
@@ -19,6 +19,7 @@ export declare class TasksSchedule {
19
19
  private completedTasks;
20
20
  private scheduleRequestsExecutionChain;
21
21
  private estimatedTaskTimings;
22
+ private projectDependencies;
22
23
  constructor(projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions);
23
24
  init(): Promise<void>;
24
25
  scheduleNextTasks(): Promise<void>;
@@ -20,12 +20,16 @@ class TasksSchedule {
20
20
  this.completedTasks = new Set();
21
21
  this.scheduleRequestsExecutionChain = Promise.resolve();
22
22
  this.estimatedTaskTimings = {};
23
+ this.projectDependencies = {};
23
24
  }
24
25
  async init() {
25
26
  if (this.taskHistory) {
26
27
  this.estimatedTaskTimings =
27
28
  await this.taskHistory.getEstimatedTaskTimings(Object.values(this.taskGraph.tasks).map((t) => t.target));
28
29
  }
30
+ for (const project of Object.values(this.taskGraph.tasks).map((t) => t.target.project)) {
31
+ this.projectDependencies[project] ??= (0, project_graph_utils_1.findAllProjectNodeDependencies)(project, this.reverseProjectGraph).length;
32
+ }
29
33
  }
30
34
  async scheduleNextTasks() {
31
35
  this.scheduleRequestsExecutionChain =
@@ -90,8 +94,8 @@ class TasksSchedule {
90
94
  // Most likely tasks with no dependencies such as test
91
95
  const project1 = this.taskGraph.tasks[taskId1].target.project;
92
96
  const project2 = this.taskGraph.tasks[taskId2].target.project;
93
- const project1NodeDependencies = (0, project_graph_utils_1.findAllProjectNodeDependencies)(project1, this.reverseProjectGraph).length;
94
- const project2NodeDependencies = (0, project_graph_utils_1.findAllProjectNodeDependencies)(project2, this.reverseProjectGraph).length;
97
+ const project1NodeDependencies = this.projectDependencies[project1];
98
+ const project2NodeDependencies = this.projectDependencies[project2];
95
99
  const dependenciesDiff = project2NodeDependencies - project1NodeDependencies;
96
100
  if (dependenciesDiff !== 0) {
97
101
  return dependenciesDiff;
@@ -1,5 +1,5 @@
1
1
  import type { JsonParseOptions, JsonSerializeOptions } from './json';
2
- import { PathLike } from 'fs';
2
+ import { PathLike } from 'node:fs';
3
3
  export interface JsonReadOptions extends JsonParseOptions {
4
4
  /**
5
5
  * mutable field recording whether JSON ends with new line
@@ -43,6 +43,14 @@ export declare function readYamlFile<T extends object = any>(path: string, optio
43
43
  * @param options JSON serialize options
44
44
  */
45
45
  export declare function writeJsonFile<T extends object = object>(path: string, data: T, options?: JsonWriteOptions): void;
46
+ /**
47
+ * Serializes the given data to JSON and writes it to a file asynchronously.
48
+ *
49
+ * @param path A path to a file.
50
+ * @param data data which should be serialized to JSON and written to the file
51
+ * @param options JSON serialize options
52
+ */
53
+ export declare function writeJsonFileAsync<T extends object = object>(path: string, data: T, options?: JsonWriteOptions): Promise<void>;
46
54
  /**
47
55
  * Check if a directory exists
48
56
  * @param path Path to directory
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.readJsonFile = readJsonFile;
4
4
  exports.readYamlFile = readYamlFile;
5
5
  exports.writeJsonFile = writeJsonFile;
6
+ exports.writeJsonFileAsync = writeJsonFileAsync;
6
7
  exports.directoryExists = directoryExists;
7
8
  exports.fileExists = fileExists;
8
9
  exports.createDirectory = createDirectory;
@@ -10,7 +11,8 @@ exports.isRelativePath = isRelativePath;
10
11
  exports.extractFileFromTarball = extractFileFromTarball;
11
12
  exports.readFileIfExisting = readFileIfExisting;
12
13
  const json_1 = require("./json");
13
- const fs_1 = require("fs");
14
+ const node_fs_1 = require("node:fs");
15
+ const promises_1 = require("node:fs/promises");
14
16
  const path_1 = require("path");
15
17
  const tar = require("tar-stream");
16
18
  const zlib_1 = require("zlib");
@@ -22,7 +24,7 @@ const zlib_1 = require("zlib");
22
24
  * @returns Object the JSON content of the file represents
23
25
  */
24
26
  function readJsonFile(path, options) {
25
- const content = (0, fs_1.readFileSync)(path, 'utf-8');
27
+ const content = (0, node_fs_1.readFileSync)(path, 'utf-8');
26
28
  if (options) {
27
29
  options.endsWithNewline = content.charCodeAt(content.length - 1) === 10;
28
30
  }
@@ -41,7 +43,7 @@ function readJsonFile(path, options) {
41
43
  * @returns
42
44
  */
43
45
  function readYamlFile(path, options) {
44
- const content = (0, fs_1.readFileSync)(path, 'utf-8');
46
+ const content = (0, node_fs_1.readFileSync)(path, 'utf-8');
45
47
  const { load } = require('@zkochan/js-yaml');
46
48
  return load(content, { ...options, filename: path });
47
49
  }
@@ -53,12 +55,27 @@ function readYamlFile(path, options) {
53
55
  * @param options JSON serialize options
54
56
  */
55
57
  function writeJsonFile(path, data, options) {
56
- (0, fs_1.mkdirSync)((0, path_1.dirname)(path), { recursive: true });
58
+ (0, node_fs_1.mkdirSync)((0, path_1.dirname)(path), { recursive: true });
57
59
  const serializedJson = (0, json_1.serializeJson)(data, options);
58
60
  const content = options?.appendNewLine
59
61
  ? `${serializedJson}\n`
60
62
  : serializedJson;
61
- (0, fs_1.writeFileSync)(path, content, { encoding: 'utf-8' });
63
+ (0, node_fs_1.writeFileSync)(path, content, { encoding: 'utf-8' });
64
+ }
65
+ /**
66
+ * Serializes the given data to JSON and writes it to a file asynchronously.
67
+ *
68
+ * @param path A path to a file.
69
+ * @param data data which should be serialized to JSON and written to the file
70
+ * @param options JSON serialize options
71
+ */
72
+ async function writeJsonFileAsync(path, data, options) {
73
+ await (0, promises_1.mkdir)((0, path_1.dirname)(path), { recursive: true });
74
+ const serializedJson = (0, json_1.serializeJson)(data, options);
75
+ const content = options?.appendNewLine
76
+ ? `${serializedJson}\n`
77
+ : serializedJson;
78
+ await (0, promises_1.writeFile)(path, content, { encoding: 'utf-8' });
62
79
  }
63
80
  /**
64
81
  * Check if a directory exists
@@ -66,7 +83,7 @@ function writeJsonFile(path, data, options) {
66
83
  */
67
84
  function directoryExists(path) {
68
85
  try {
69
- return (0, fs_1.statSync)(path).isDirectory();
86
+ return (0, node_fs_1.statSync)(path).isDirectory();
70
87
  }
71
88
  catch {
72
89
  return false;
@@ -78,14 +95,14 @@ function directoryExists(path) {
78
95
  */
79
96
  function fileExists(path) {
80
97
  try {
81
- return (0, fs_1.statSync)(path).isFile();
98
+ return (0, node_fs_1.statSync)(path).isFile();
82
99
  }
83
100
  catch {
84
101
  return false;
85
102
  }
86
103
  }
87
104
  function createDirectory(path) {
88
- (0, fs_1.mkdirSync)(path, { recursive: true });
105
+ (0, node_fs_1.mkdirSync)(path, { recursive: true });
89
106
  }
90
107
  function isRelativePath(path) {
91
108
  return (path === '.' ||
@@ -102,9 +119,9 @@ function isRelativePath(path) {
102
119
  */
103
120
  async function extractFileFromTarball(tarballPath, file, destinationFilePath) {
104
121
  return new Promise((resolve, reject) => {
105
- (0, fs_1.mkdirSync)((0, path_1.dirname)(destinationFilePath), { recursive: true });
122
+ (0, node_fs_1.mkdirSync)((0, path_1.dirname)(destinationFilePath), { recursive: true });
106
123
  var tarExtractStream = tar.extract();
107
- const destinationFileStream = (0, fs_1.createWriteStream)(destinationFilePath);
124
+ const destinationFileStream = (0, node_fs_1.createWriteStream)(destinationFilePath);
108
125
  let isFileExtracted = false;
109
126
  tarExtractStream.on('entry', function (header, stream, next) {
110
127
  if (header.name === file) {
@@ -126,9 +143,9 @@ async function extractFileFromTarball(tarballPath, file, destinationFilePath) {
126
143
  reject();
127
144
  }
128
145
  });
129
- (0, fs_1.createReadStream)(tarballPath).pipe((0, zlib_1.createGunzip)()).pipe(tarExtractStream);
146
+ (0, node_fs_1.createReadStream)(tarballPath).pipe((0, zlib_1.createGunzip)()).pipe(tarExtractStream);
130
147
  });
131
148
  }
132
149
  function readFileIfExisting(path) {
133
- return (0, fs_1.existsSync)(path) ? (0, fs_1.readFileSync)(path, 'utf-8') : '';
150
+ return (0, node_fs_1.existsSync)(path) ? (0, node_fs_1.readFileSync)(path, 'utf-8') : '';
134
151
  }
@@ -4,7 +4,7 @@ exports.ALWAYS_IGNORE = void 0;
4
4
  exports.getIgnoredGlobs = getIgnoredGlobs;
5
5
  exports.getAlwaysIgnore = getAlwaysIgnore;
6
6
  exports.getIgnoreObject = getIgnoreObject;
7
- const fs_extra_1 = require("fs-extra");
7
+ const node_fs_1 = require("node:fs");
8
8
  const ignore_1 = require("ignore");
9
9
  const fileutils_1 = require("./fileutils");
10
10
  const path_1 = require("./path");
@@ -48,7 +48,7 @@ function getIgnoreObject(root = workspace_root_1.workspaceRoot) {
48
48
  function getIgnoredGlobsFromFile(file, root) {
49
49
  try {
50
50
  const results = [];
51
- const contents = (0, fs_extra_1.readFileSync)(file, 'utf-8');
51
+ const contents = (0, node_fs_1.readFileSync)(file, 'utf-8');
52
52
  const lines = contents.split('\n');
53
53
  for (const line of lines) {
54
54
  const trimmed = line.trim();
@@ -52,6 +52,7 @@ export interface PackageJson {
52
52
  packages: string[];
53
53
  };
54
54
  publishConfig?: Record<string, string>;
55
+ files?: string[];
55
56
  nx?: NxProjectPackageJsonConfiguration;
56
57
  generators?: string;
57
58
  schematics?: string;
@@ -15,7 +15,7 @@ exports.packageRegistryView = packageRegistryView;
15
15
  exports.packageRegistryPack = packageRegistryPack;
16
16
  const child_process_1 = require("child_process");
17
17
  const fs_1 = require("fs");
18
- const fs_extra_1 = require("fs-extra");
18
+ const promises_1 = require("node:fs/promises");
19
19
  const path_1 = require("path");
20
20
  const semver_1 = require("semver");
21
21
  const tmp_1 = require("tmp");
@@ -301,7 +301,7 @@ function createTempNpmDirectory() {
301
301
  copyPackageManagerConfigurationFiles(workspace_root_1.workspaceRoot, dir);
302
302
  const cleanup = async () => {
303
303
  try {
304
- await (0, fs_extra_1.remove)(dir);
304
+ await (0, promises_1.rm)(dir, { recursive: true, force: true });
305
305
  }
306
306
  catch {
307
307
  // It's okay if this fails, the OS will clean it up eventually
@@ -82,6 +82,10 @@ exports.CORE_PLUGINS = [
82
82
  name: '@nx/rollup',
83
83
  capabilities: 'executors,generators',
84
84
  },
85
+ {
86
+ name: '@nx/rspack',
87
+ capabilities: 'executors,generators',
88
+ },
85
89
  {
86
90
  name: '@nx/storybook',
87
91
  capabilities: 'executors,generators',
@@ -47,7 +47,7 @@ function listAlsoAvailableCorePlugins(installedPlugins) {
47
47
  }
48
48
  }
49
49
  function listPowerpackPlugins() {
50
- const powerpackLink = 'https://nx.dev/plugin-registry';
50
+ const powerpackLink = 'https://nx.dev/plugin-registry#powerpack';
51
51
  output_1.output.log({
52
52
  title: `Available Powerpack Plugins: ${powerpackLink}`,
53
53
  });