nx 19.8.0 → 19.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/.eslintrc.json +11 -2
  2. package/bin/nx.js +10 -2
  3. package/package.json +12 -13
  4. package/schemas/nx-schema.json +26 -21
  5. package/src/adapter/compat.d.ts +1 -1
  6. package/src/adapter/compat.js +1 -0
  7. package/src/command-line/activate-powerpack/activate-powerpack.js +3 -1
  8. package/src/command-line/add/add.js +4 -2
  9. package/src/command-line/connect/view-logs.js +1 -0
  10. package/src/command-line/exec/exec.js +6 -1
  11. package/src/command-line/format/format.js +3 -1
  12. package/src/command-line/graph/graph.js +10 -9
  13. package/src/command-line/init/implementation/add-nx-to-nest.js +5 -5
  14. package/src/command-line/init/implementation/angular/integrated-workspace.js +4 -1
  15. package/src/command-line/init/implementation/angular/legacy-angular-versions.js +5 -2
  16. package/src/command-line/init/implementation/dot-nx/add-nx-scripts.js +3 -1
  17. package/src/command-line/init/implementation/dot-nx/nxw.js +1 -0
  18. package/src/command-line/init/implementation/react/check-for-uncommitted-changes.js +3 -1
  19. package/src/command-line/init/implementation/react/clean-up-files.js +7 -7
  20. package/src/command-line/init/implementation/react/index.js +36 -17
  21. package/src/command-line/init/implementation/react/rename-js-to-jsx.js +3 -3
  22. package/src/command-line/init/implementation/utils.js +5 -1
  23. package/src/command-line/init/init-v1.js +1 -0
  24. package/src/command-line/init/init-v2.js +2 -1
  25. package/src/command-line/migrate/command-object.js +4 -0
  26. package/src/command-line/migrate/migrate.js +1 -1
  27. package/src/command-line/release/changelog.js +1 -2
  28. package/src/command-line/release/config/version-plans.js +9 -8
  29. package/src/command-line/release/plan.js +6 -5
  30. package/src/command-line/release/release.js +2 -2
  31. package/src/command-line/release/utils/exec-command.js +1 -0
  32. package/src/command-line/release/utils/github.js +1 -0
  33. package/src/command-line/release/utils/launch-editor.js +6 -1
  34. package/src/command-line/release/version.js +6 -3
  35. package/src/command-line/report/report.d.ts +3 -1
  36. package/src/command-line/report/report.js +17 -2
  37. package/src/command-line/reset/reset.js +4 -4
  38. package/src/command-line/run/run.js +1 -0
  39. package/src/command-line/sync/sync.js +5 -4
  40. package/src/command-line/watch/watch.js +1 -0
  41. package/src/config/nx-json.d.ts +4 -0
  42. package/src/daemon/cache.d.ts +1 -2
  43. package/src/daemon/cache.js +12 -21
  44. package/src/daemon/client/client.d.ts +4 -1
  45. package/src/daemon/client/client.js +9 -8
  46. package/src/daemon/client/generate-help-output.js +1 -0
  47. package/src/daemon/server/sync-generators.d.ts +4 -1
  48. package/src/daemon/server/sync-generators.js +33 -15
  49. package/src/daemon/tmp-dir.js +6 -7
  50. package/src/executors/run-commands/run-commands.impl.js +1 -0
  51. package/src/executors/run-script/run-script.impl.js +1 -0
  52. package/src/generators/tree.d.ts +1 -1
  53. package/src/generators/tree.js +11 -11
  54. package/src/native/index.d.ts +1 -1
  55. package/src/native/nx.wasm32-wasi.wasm +0 -0
  56. package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +1 -1
  57. package/src/plugins/js/index.js +1 -2
  58. package/src/project-graph/file-utils.js +1 -0
  59. package/src/project-graph/nx-deps-cache.js +5 -6
  60. package/src/tasks-runner/cache.d.ts +3 -1
  61. package/src/tasks-runner/cache.js +29 -29
  62. package/src/tasks-runner/default-tasks-runner.js +1 -1
  63. package/src/tasks-runner/life-cycles/formatting-utils.d.ts +1 -1
  64. package/src/tasks-runner/life-cycles/formatting-utils.js +27 -15
  65. package/src/tasks-runner/life-cycles/task-history-life-cycle.js +3 -0
  66. package/src/tasks-runner/remove-old-cache-records.js +2 -3
  67. package/src/tasks-runner/task-orchestrator.d.ts +3 -1
  68. package/src/tasks-runner/task-orchestrator.js +3 -2
  69. package/src/tasks-runner/tasks-schedule.js +1 -1
  70. package/src/utils/ab-testing.js +4 -1
  71. package/src/utils/child-process.js +5 -3
  72. package/src/utils/command-line-utils.js +7 -1
  73. package/src/utils/default-base.js +5 -2
  74. package/src/utils/fileutils.d.ts +9 -1
  75. package/src/utils/fileutils.js +29 -12
  76. package/src/utils/git-utils.index-filter.js +2 -1
  77. package/src/utils/git-utils.js +4 -0
  78. package/src/utils/git-utils.tree-filter.js +3 -1
  79. package/src/utils/ignore.js +2 -2
  80. package/src/utils/package-manager.js +2 -2
  81. package/src/utils/powerpack.d.ts +1 -1
  82. package/src/utils/powerpack.js +3 -8
  83. package/src/utils/sync-generators.d.ts +13 -3
  84. package/src/utils/sync-generators.js +99 -25
  85. package/src/utils/task-history.d.ts +2 -2
  86. package/src/utils/task-history.js +4 -1
@@ -72,11 +72,16 @@ function collectAndScheduleSyncGenerators(projectGraph) {
72
72
  // a change imply we need to re-run all the generators
73
73
  // make sure to schedule all the collected generators
74
74
  scheduledGenerators.clear();
75
- if (!registeredSyncGenerators.size) {
75
+ if (!registeredSyncGenerators.globalGenerators.size &&
76
+ !registeredSyncGenerators.taskGenerators.size) {
76
77
  // there are no generators to run
77
78
  return;
78
79
  }
79
- for (const generator of registeredSyncGenerators) {
80
+ const uniqueSyncGenerators = new Set([
81
+ ...registeredSyncGenerators.globalGenerators,
82
+ ...registeredSyncGenerators.taskGenerators,
83
+ ]);
84
+ for (const generator of uniqueSyncGenerators) {
80
85
  scheduledGenerators.add(generator);
81
86
  }
82
87
  log('scheduling:', [...scheduledGenerators]);
@@ -102,7 +107,7 @@ function collectAndScheduleSyncGenerators(projectGraph) {
102
107
  }
103
108
  async function getCachedRegisteredSyncGenerators() {
104
109
  log('get registered sync generators');
105
- if (!registeredSyncGenerators) {
110
+ if (!registeredGlobalSyncGenerators && !registeredTaskSyncGenerators) {
106
111
  log('no registered sync generators, collecting them');
107
112
  const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
108
113
  collectAllRegisteredSyncGenerators(projectGraph);
@@ -110,7 +115,10 @@ async function getCachedRegisteredSyncGenerators() {
110
115
  else {
111
116
  log('registered sync generators already collected, returning them');
112
117
  }
113
- return [...registeredSyncGenerators];
118
+ return {
119
+ globalGenerators: [...registeredGlobalSyncGenerators],
120
+ taskGenerators: [...registeredTaskSyncGenerators],
121
+ };
114
122
  }
115
123
  async function getFromCacheOrRunGenerators(generators) {
116
124
  let projects;
@@ -296,23 +304,33 @@ function collectAllRegisteredSyncGenerators(projectGraph) {
296
304
  else {
297
305
  log('nx.json hash is the same, not collecting global sync generators');
298
306
  }
299
- const generators = new Set([
300
- ...registeredTaskSyncGenerators,
301
- ...registeredGlobalSyncGenerators,
302
- ]);
303
307
  if (!registeredSyncGenerators) {
304
- registeredSyncGenerators = generators;
308
+ registeredSyncGenerators = {
309
+ globalGenerators: registeredGlobalSyncGenerators,
310
+ taskGenerators: registeredTaskSyncGenerators,
311
+ };
305
312
  return;
306
313
  }
307
- for (const generator of registeredSyncGenerators) {
308
- if (!generators.has(generator)) {
309
- registeredSyncGenerators.delete(generator);
314
+ for (const generator of registeredSyncGenerators.globalGenerators) {
315
+ if (!registeredGlobalSyncGenerators.has(generator)) {
316
+ registeredSyncGenerators.globalGenerators.delete(generator);
310
317
  syncGeneratorsCacheResultPromises.delete(generator);
311
318
  }
312
319
  }
313
- for (const generator of generators) {
314
- if (!registeredSyncGenerators.has(generator)) {
315
- registeredSyncGenerators.add(generator);
320
+ for (const generator of registeredSyncGenerators.taskGenerators) {
321
+ if (!registeredTaskSyncGenerators.has(generator)) {
322
+ registeredSyncGenerators.taskGenerators.delete(generator);
323
+ syncGeneratorsCacheResultPromises.delete(generator);
324
+ }
325
+ }
326
+ for (const generator of registeredGlobalSyncGenerators) {
327
+ if (!registeredSyncGenerators.globalGenerators.has(generator)) {
328
+ registeredSyncGenerators.globalGenerators.add(generator);
329
+ }
330
+ }
331
+ for (const generator of registeredTaskSyncGenerators) {
332
+ if (!registeredSyncGenerators.taskGenerators.has(generator)) {
333
+ registeredSyncGenerators.taskGenerators.add(generator);
316
334
  }
317
335
  }
318
336
  }
@@ -11,8 +11,7 @@ exports.removeSocketDir = removeSocketDir;
11
11
  * location within the OS's tmp directory where we write log files for background processes
12
12
  * and where we create the actual unix socket/named pipe for the daemon.
13
13
  */
14
- const fs_1 = require("fs");
15
- const fs_extra_1 = require("fs-extra");
14
+ const node_fs_1 = require("node:fs");
16
15
  const path_1 = require("path");
17
16
  const cache_directory_1 = require("../utils/cache-directory");
18
17
  const crypto_1 = require("crypto");
@@ -26,15 +25,15 @@ const getDaemonSocketDir = () => (0, path_1.join)(getSocketDir(),
26
25
  exports.getDaemonSocketDir = getDaemonSocketDir;
27
26
  function writeDaemonLogs(error) {
28
27
  const file = (0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'daemon-error.log');
29
- (0, fs_1.writeFileSync)(file, error);
28
+ (0, node_fs_1.writeFileSync)(file, error);
30
29
  return file;
31
30
  }
32
31
  function markDaemonAsDisabled() {
33
- (0, fs_1.writeFileSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'), 'true');
32
+ (0, node_fs_1.writeFileSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'), 'true');
34
33
  }
35
34
  function isDaemonDisabled() {
36
35
  try {
37
- (0, fs_1.statSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'));
36
+ (0, node_fs_1.statSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'));
38
37
  return true;
39
38
  }
40
39
  catch (e) {
@@ -56,7 +55,7 @@ function getSocketDir(alreadyUnique = false) {
56
55
  const dir = process.env.NX_SOCKET_DIR ??
57
56
  process.env.NX_DAEMON_SOCKET_DIR ??
58
57
  (alreadyUnique ? tmp_1.tmpdir : socketDirName());
59
- (0, fs_extra_1.ensureDirSync)(dir);
58
+ (0, node_fs_1.mkdirSync)(dir, { recursive: true });
60
59
  return dir;
61
60
  }
62
61
  catch (e) {
@@ -65,7 +64,7 @@ function getSocketDir(alreadyUnique = false) {
65
64
  }
66
65
  function removeSocketDir() {
67
66
  try {
68
- (0, fs_extra_1.rmSync)(getSocketDir(), { recursive: true, force: true });
67
+ (0, node_fs_1.rmSync)(getSocketDir(), { recursive: true, force: true });
69
68
  }
70
69
  catch (e) { }
71
70
  }
@@ -231,6 +231,7 @@ function nodeProcess(commandConfig, cwd, env, readyWhenStatus, streamOutput = tr
231
231
  maxBuffer: exports.LARGE_BUFFER,
232
232
  env,
233
233
  cwd,
234
+ windowsHide: true,
234
235
  });
235
236
  childProcesses.add(childProcess);
236
237
  childProcess.stdout.on('data', (data) => {
@@ -35,6 +35,7 @@ function nodeProcess(command, cwd, env) {
35
35
  stdio: ['inherit', 'inherit', 'inherit'],
36
36
  cwd,
37
37
  env,
38
+ windowsHide: true,
38
39
  });
39
40
  }
40
41
  async function ptyProcess(command, cwd, env) {
@@ -1,4 +1,4 @@
1
- import type { Mode } from 'fs';
1
+ import type { Mode } from 'node:fs';
2
2
  /**
3
3
  * Options to set when writing a file in the Virtual file system tree.
4
4
  */
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.FsTree = void 0;
4
4
  exports.flushChanges = flushChanges;
5
5
  exports.printChanges = printChanges;
6
- const fs_extra_1 = require("fs-extra");
6
+ const node_fs_1 = require("node:fs");
7
7
  const logger_1 = require("../utils/logger");
8
8
  const output_1 = require("../utils/output");
9
9
  const path_1 = require("path");
@@ -228,22 +228,22 @@ class FsTree {
228
228
  }
229
229
  fsReadDir(dirPath) {
230
230
  try {
231
- return (0, fs_extra_1.readdirSync)((0, path_1.join)(this.root, dirPath));
231
+ return (0, node_fs_1.readdirSync)((0, path_1.join)(this.root, dirPath));
232
232
  }
233
233
  catch {
234
234
  return [];
235
235
  }
236
236
  }
237
237
  fsIsFile(filePath) {
238
- const stat = (0, fs_extra_1.statSync)((0, path_1.join)(this.root, filePath));
238
+ const stat = (0, node_fs_1.statSync)((0, path_1.join)(this.root, filePath));
239
239
  return stat.isFile();
240
240
  }
241
241
  fsReadFile(filePath) {
242
- return (0, fs_extra_1.readFileSync)((0, path_1.join)(this.root, filePath));
242
+ return (0, node_fs_1.readFileSync)((0, path_1.join)(this.root, filePath));
243
243
  }
244
244
  fsExists(filePath) {
245
245
  try {
246
- const stat = (0, fs_extra_1.statSync)((0, path_1.join)(this.root, filePath));
246
+ const stat = (0, node_fs_1.statSync)((0, path_1.join)(this.root, filePath));
247
247
  return stat.isFile() || stat.isDirectory();
248
248
  }
249
249
  catch {
@@ -279,18 +279,18 @@ function flushChanges(root, fileChanges) {
279
279
  fileChanges.forEach((f) => {
280
280
  const fpath = (0, path_1.join)(root, f.path);
281
281
  if (f.type === 'CREATE') {
282
- (0, fs_extra_1.ensureDirSync)((0, path_1.dirname)(fpath));
283
- (0, fs_extra_1.writeFileSync)(fpath, f.content);
282
+ (0, node_fs_1.mkdirSync)((0, path_1.dirname)(fpath), { recursive: true });
283
+ (0, node_fs_1.writeFileSync)(fpath, f.content);
284
284
  if (f.options?.mode)
285
- (0, fs_extra_1.chmodSync)(fpath, f.options.mode);
285
+ (0, node_fs_1.chmodSync)(fpath, f.options.mode);
286
286
  }
287
287
  else if (f.type === 'UPDATE') {
288
- (0, fs_extra_1.writeFileSync)(fpath, f.content);
288
+ (0, node_fs_1.writeFileSync)(fpath, f.content);
289
289
  if (f.options?.mode)
290
- (0, fs_extra_1.chmodSync)(fpath, f.options.mode);
290
+ (0, node_fs_1.chmodSync)(fpath, f.options.mode);
291
291
  }
292
292
  else if (f.type === 'DELETE') {
293
- (0, fs_extra_1.removeSync)(fpath);
293
+ (0, node_fs_1.rmSync)(fpath, { recursive: true, force: true });
294
294
  }
295
295
  });
296
296
  }
@@ -28,7 +28,7 @@ export declare class ImportResult {
28
28
 
29
29
  export declare class NxCache {
30
30
  cacheDirectory: string
31
- constructor(workspaceRoot: string, cachePath: string, dbConnection: ExternalObject<Connection>)
31
+ constructor(workspaceRoot: string, cachePath: string, dbConnection: ExternalObject<Connection>, linkTaskDetails?: boolean | undefined | null)
32
32
  get(hash: string): CachedResult | null
33
33
  put(hash: string, terminalOutput: string, outputs: Array<string>, code: number): void
34
34
  applyRemoteCacheResults(hash: string, result: CachedResult): void
Binary file
@@ -32,7 +32,7 @@ function getRootPackageName(tree) {
32
32
  }
33
33
  function getNxInitDate() {
34
34
  try {
35
- const nxInitIso = (0, child_process_1.execSync)('git log --diff-filter=A --follow --format=%aI -- nx.json | tail -1', { stdio: 'pipe' })
35
+ const nxInitIso = (0, child_process_1.execSync)('git log --diff-filter=A --follow --format=%aI -- nx.json | tail -1', { stdio: 'pipe', windowsHide: true })
36
36
  .toString()
37
37
  .trim();
38
38
  const nxInitDate = new Date(nxInitIso);
@@ -2,7 +2,6 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.createDependencies = exports.createNodes = exports.name = void 0;
4
4
  const fs_1 = require("fs");
5
- const fs_extra_1 = require("fs-extra");
6
5
  const path_1 = require("path");
7
6
  const perf_hooks_1 = require("perf_hooks");
8
7
  const cache_directory_1 = require("../../utils/cache-directory");
@@ -96,7 +95,7 @@ function lockFileNeedsReprocessing(lockHash) {
96
95
  }
97
96
  }
98
97
  function writeLastProcessedLockfileHash(hash, lockFile) {
99
- (0, fs_extra_1.ensureDirSync)((0, path_1.dirname)(lockFileHashFile));
98
+ (0, fs_1.mkdirSync)((0, path_1.dirname)(lockFileHashFile), { recursive: true });
100
99
  (0, fs_1.writeFileSync)(cachedParsedLockFile, JSON.stringify(lockFile, null, 2));
101
100
  (0, fs_1.writeFileSync)(lockFileHashFile, hash);
102
101
  }
@@ -91,6 +91,7 @@ function defaultReadFileAtRevision(file, revision) {
91
91
  : (0, child_process_1.execSync)(`git show ${revision}:${filePathInGitRepository}`, {
92
92
  maxBuffer: exports.TEN_MEGABYTES,
93
93
  stdio: ['pipe', 'pipe', 'ignore'],
94
+ windowsHide: true,
94
95
  })
95
96
  .toString()
96
97
  .trim();
@@ -8,8 +8,7 @@ exports.createProjectFileMapCache = createProjectFileMapCache;
8
8
  exports.writeCache = writeCache;
9
9
  exports.shouldRecomputeWholeGraph = shouldRecomputeWholeGraph;
10
10
  exports.extractCachedFileData = extractCachedFileData;
11
- const fs_1 = require("fs");
12
- const fs_extra_1 = require("fs-extra");
11
+ const node_fs_1 = require("node:fs");
13
12
  const path_1 = require("path");
14
13
  const perf_hooks_1 = require("perf_hooks");
15
14
  const cache_directory_1 = require("../utils/cache-directory");
@@ -19,8 +18,8 @@ exports.nxProjectGraph = (0, path_1.join)(cache_directory_1.workspaceDataDirecto
19
18
  exports.nxFileMap = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'file-map.json');
20
19
  function ensureCacheDirectory() {
21
20
  try {
22
- if (!(0, fs_1.existsSync)(cache_directory_1.workspaceDataDirectory)) {
23
- (0, fs_extra_1.ensureDirSync)(cache_directory_1.workspaceDataDirectory);
21
+ if (!(0, node_fs_1.existsSync)(cache_directory_1.workspaceDataDirectory)) {
22
+ (0, node_fs_1.mkdirSync)(cache_directory_1.workspaceDataDirectory, { recursive: true });
24
23
  }
25
24
  }
26
25
  catch (e) {
@@ -102,9 +101,9 @@ function writeCache(cache, projectGraph) {
102
101
  const tmpFileMapPath = `${exports.nxFileMap}~${unique}`;
103
102
  try {
104
103
  (0, fileutils_1.writeJsonFile)(tmpProjectGraphPath, projectGraph);
105
- (0, fs_extra_1.renameSync)(tmpProjectGraphPath, exports.nxProjectGraph);
104
+ (0, node_fs_1.renameSync)(tmpProjectGraphPath, exports.nxProjectGraph);
106
105
  (0, fileutils_1.writeJsonFile)(tmpFileMapPath, cache);
107
- (0, fs_extra_1.renameSync)(tmpFileMapPath, exports.nxFileMap);
106
+ (0, node_fs_1.renameSync)(tmpFileMapPath, exports.nxFileMap);
108
107
  done = true;
109
108
  }
110
109
  catch (err) {
@@ -1,5 +1,6 @@
1
1
  import { DefaultTasksRunnerOptions, RemoteCache } from './default-tasks-runner';
2
2
  import { Task } from '../config/task-graph';
3
+ import { NxJsonConfiguration } from '../config/nx-json';
3
4
  export type CachedResult = {
4
5
  terminalOutput: string;
5
6
  outputsPath: string;
@@ -10,7 +11,7 @@ export type TaskWithCachedResult = {
10
11
  task: Task;
11
12
  cachedResult: CachedResult;
12
13
  };
13
- export declare function getCache(options: DefaultTasksRunnerOptions): DbCache | Cache;
14
+ export declare function getCache(nxJson: NxJsonConfiguration, options: DefaultTasksRunnerOptions): DbCache | Cache;
14
15
  export declare class DbCache {
15
16
  private readonly options;
16
17
  private cache;
@@ -30,6 +31,7 @@ export declare class DbCache {
30
31
  private _getRemoteCache;
31
32
  private getPowerpackS3Cache;
32
33
  private getPowerpackSharedCache;
34
+ private getPowerpackCache;
33
35
  private resolvePackage;
34
36
  private assertCacheIsValid;
35
37
  }
@@ -3,11 +3,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Cache = exports.DbCache = void 0;
4
4
  exports.getCache = getCache;
5
5
  const workspace_root_1 = require("../utils/workspace-root");
6
- const fs_extra_1 = require("fs-extra");
7
6
  const path_1 = require("path");
8
7
  const perf_hooks_1 = require("perf_hooks");
9
8
  const default_tasks_runner_1 = require("./default-tasks-runner");
10
9
  const child_process_1 = require("child_process");
10
+ const node_fs_1 = require("node:fs");
11
+ const promises_1 = require("node:fs/promises");
11
12
  const cache_directory_1 = require("../utils/cache-directory");
12
13
  const node_machine_id_1 = require("node-machine-id");
13
14
  const native_1 = require("../native");
@@ -18,9 +19,9 @@ const update_manager_1 = require("../nx-cloud/update-manager");
18
19
  const get_cloud_options_1 = require("../nx-cloud/utilities/get-cloud-options");
19
20
  const is_ci_1 = require("../utils/is-ci");
20
21
  const output_1 = require("../utils/output");
21
- function getCache(options) {
22
+ function getCache(nxJson, options) {
22
23
  return process.env.NX_DISABLE_DB !== 'true' &&
23
- process.env.NX_DB_CACHE === 'true'
24
+ (nxJson.enableDbCache === true || process.env.NX_DB_CACHE === 'true')
24
25
  ? new DbCache({
25
26
  // Remove this in Nx 21
26
27
  nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
@@ -114,23 +115,21 @@ class DbCache {
114
115
  null);
115
116
  }
116
117
  }
117
- async getPowerpackS3Cache() {
118
- try {
119
- const { getRemoteCache } = await Promise.resolve(`${this.resolvePackage('@nx/powerpack-s3-cache')}`).then(s => require(s));
120
- return getRemoteCache();
121
- }
122
- catch {
123
- return null;
124
- }
118
+ getPowerpackS3Cache() {
119
+ return this.getPowerpackCache('@nx/powerpack-s3-cache');
120
+ }
121
+ getPowerpackSharedCache() {
122
+ return this.getPowerpackCache('@nx/powerpack-shared-fs-cache');
125
123
  }
126
- async getPowerpackSharedCache() {
124
+ async getPowerpackCache(pkg) {
125
+ let getRemoteCache = null;
127
126
  try {
128
- const { getRemoteCache } = await Promise.resolve(`${this.resolvePackage('@nx/powerpack-shared-fs-cache')}`).then(s => require(s));
129
- return getRemoteCache();
127
+ getRemoteCache = (await Promise.resolve(`${this.resolvePackage(pkg)}`).then(s => require(s))).getRemoteCache;
130
128
  }
131
129
  catch {
132
130
  return null;
133
131
  }
132
+ return getRemoteCache();
134
133
  }
135
134
  resolvePackage(pkg) {
136
135
  return require.resolve(pkg, {
@@ -183,6 +182,7 @@ class Cache {
183
182
  stdio: 'ignore',
184
183
  detached: true,
185
184
  shell: false,
185
+ windowsHide: true,
186
186
  });
187
187
  p.unref();
188
188
  }
@@ -234,13 +234,13 @@ class Cache {
234
234
  // might be left overs from partially-completed cache invocations
235
235
  await this.remove(tdCommit);
236
236
  await this.remove(td);
237
- await (0, fs_extra_1.mkdir)(td);
238
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
239
- await (0, fs_extra_1.mkdir)((0, path_1.join)(td, 'outputs'));
237
+ await (0, promises_1.mkdir)(td, { recursive: true });
238
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
239
+ await (0, promises_1.mkdir)((0, path_1.join)(td, 'outputs'));
240
240
  const expandedOutputs = await this.expandOutputsInWorkspace(outputs);
241
241
  await Promise.all(expandedOutputs.map(async (f) => {
242
242
  const src = (0, path_1.join)(this.root, f);
243
- if (await (0, fs_extra_1.pathExists)(src)) {
243
+ if ((0, node_fs_1.existsSync)(src)) {
244
244
  const cached = (0, path_1.join)(td, 'outputs', f);
245
245
  await this.copy(src, cached);
246
246
  }
@@ -249,15 +249,15 @@ class Cache {
249
249
  // creating this file is atomic, whereas creating a folder is not.
250
250
  // so if the process gets terminated while we are copying stuff into cache,
251
251
  // the cache entry won't be used.
252
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
253
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
254
- await (0, fs_extra_1.writeFile)(tdCommit, 'true');
252
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
253
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
254
+ await (0, promises_1.writeFile)(tdCommit, 'true');
255
255
  if (this.options.remoteCache) {
256
256
  await this.options.remoteCache.store(task.hash, this.cachePath);
257
257
  }
258
258
  if (terminalOutput) {
259
259
  const outputPath = this.temporaryOutputPath(task);
260
- await (0, fs_extra_1.writeFile)(outputPath, terminalOutput);
260
+ await (0, promises_1.writeFile)(outputPath, terminalOutput);
261
261
  }
262
262
  });
263
263
  }
@@ -266,7 +266,7 @@ class Cache {
266
266
  const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);
267
267
  await Promise.all(expandedOutputs.map(async (f) => {
268
268
  const cached = (0, path_1.join)(cachedResult.outputsPath, f);
269
- if (await (0, fs_extra_1.pathExists)(cached)) {
269
+ if ((0, node_fs_1.existsSync)(cached)) {
270
270
  const src = (0, path_1.join)(this.root, f);
271
271
  await this.remove(src);
272
272
  await this.copy(cached, src);
@@ -322,11 +322,11 @@ class Cache {
322
322
  async getFromLocalDir(task) {
323
323
  const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);
324
324
  const td = (0, path_1.join)(this.cachePath, task.hash);
325
- if (await (0, fs_extra_1.pathExists)(tdCommit)) {
326
- const terminalOutput = await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
325
+ if ((0, node_fs_1.existsSync)(tdCommit)) {
326
+ const terminalOutput = await (0, promises_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
327
327
  let code = 0;
328
328
  try {
329
- code = Number(await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
329
+ code = Number(await (0, promises_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
330
330
  }
331
331
  catch { }
332
332
  return {
@@ -343,7 +343,7 @@ class Cache {
343
343
  const td = (0, path_1.join)(this.cachePath, task.hash);
344
344
  let sourceMachineId = null;
345
345
  try {
346
- sourceMachineId = await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
346
+ sourceMachineId = await (0, promises_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
347
347
  }
348
348
  catch { }
349
349
  if (sourceMachineId && sourceMachineId != (await this.currentMachineId())) {
@@ -362,12 +362,12 @@ class Cache {
362
362
  }
363
363
  }
364
364
  createCacheDir() {
365
- (0, fs_extra_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
365
+ (0, node_fs_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
366
366
  return cache_directory_1.cacheDir;
367
367
  }
368
368
  createTerminalOutputsDir() {
369
369
  const path = (0, path_1.join)(this.cachePath, 'terminalOutputs');
370
- (0, fs_extra_1.mkdirSync)(path, { recursive: true });
370
+ (0, node_fs_1.mkdirSync)(path, { recursive: true });
371
371
  return path;
372
372
  }
373
373
  }
@@ -56,7 +56,7 @@ const defaultTasksRunner = async (tasks, options, context) => {
56
56
  };
57
57
  exports.defaultTasksRunner = defaultTasksRunner;
58
58
  async function runAllTasks(tasks, options, context) {
59
- const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, options, context.nxArgs?.nxBail, context.daemon, context.nxArgs?.outputStyle);
59
+ const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, context.nxJson, options, context.nxArgs?.nxBail, context.daemon, context.nxArgs?.outputStyle);
60
60
  return orchestrator.run();
61
61
  }
62
62
  exports.default = exports.defaultTasksRunner;
@@ -1,3 +1,3 @@
1
1
  import { Task } from '../../config/task-graph';
2
2
  export declare function formatFlags(leftPadding: string, flag: string, value: any): string;
3
- export declare function formatTargetsAndProjects(projectNames: string[], targets: string[], tasks: Task[]): any;
3
+ export declare function formatTargetsAndProjects(projectNames: string[], targets: string[], tasks: Task[]): string;
@@ -20,24 +20,36 @@ function formatValue(value) {
20
20
  }
21
21
  }
22
22
  function formatTargetsAndProjects(projectNames, targets, tasks) {
23
- if (tasks.length === 1)
24
- return `target ${targets[0]} for project ${projectNames[0]}`;
25
- let text;
26
- const project = projectNames.length === 1
27
- ? `project ${projectNames[0]}`
28
- : `${projectNames.length} projects`;
23
+ let targetsText = '';
24
+ let projectsText = '';
25
+ let dependentTasksText = '';
26
+ const tasksTargets = new Set();
27
+ const tasksProjects = new Set();
28
+ const dependentTasks = new Set();
29
+ tasks.forEach((task) => {
30
+ tasksTargets.add(task.target.target);
31
+ tasksProjects.add(task.target.project);
32
+ if (!projectNames.includes(task.target.project) ||
33
+ !targets.includes(task.target.target)) {
34
+ dependentTasks.add(task);
35
+ }
36
+ });
37
+ targets = targets.filter((t) => tasksTargets.has(t)); // filter out targets that don't exist
38
+ projectNames = projectNames.filter((p) => tasksProjects.has(p)); // filter out projects that don't exist
29
39
  if (targets.length === 1) {
30
- text = `target ${output_1.output.bold(targets[0])} for ${project}`;
40
+ targetsText = `target ${output_1.output.bold(targets[0])}`;
31
41
  }
32
42
  else {
33
- text = `targets ${targets
34
- .map((t) => output_1.output.bold(t))
35
- .join(', ')} for ${project}`;
43
+ targetsText = `targets ${targets.map((t) => output_1.output.bold(t)).join(', ')}`;
36
44
  }
37
- const dependentTasks = tasks.filter((t) => projectNames.indexOf(t.target.project) === -1 ||
38
- targets.indexOf(t.target.target) === -1).length;
39
- if (dependentTasks > 0) {
40
- text += ` and ${output_1.output.bold(dependentTasks)} ${dependentTasks === 1 ? 'task' : 'tasks'} ${projectNames.length === 1 ? 'it depends on' : 'they depend on'}`;
45
+ if (projectNames.length === 1) {
46
+ projectsText = `project ${projectNames[0]}`;
41
47
  }
42
- return text;
48
+ else {
49
+ projectsText = `${projectNames.length} projects`;
50
+ }
51
+ if (dependentTasks.size > 0) {
52
+ dependentTasksText = ` and ${output_1.output.bold(dependentTasks.size)} ${dependentTasks.size === 1 ? 'task' : 'tasks'} ${projectNames.length === 1 ? 'it depends on' : 'they depend on'}`;
53
+ }
54
+ return `${targetsText} for ${projectsText}${dependentTasksText}`;
43
55
  }
@@ -31,6 +31,9 @@ class TaskHistoryLifeCycle {
31
31
  }
32
32
  async endCommand() {
33
33
  const entries = Array.from(this.taskRuns);
34
+ if (!this.taskHistory) {
35
+ return;
36
+ }
34
37
  await this.taskHistory.recordTaskRuns(entries.map(([_, v]) => v));
35
38
  const flakyTasks = await this.taskHistory.getFlakyTasks(entries.map(([hash]) => hash));
36
39
  if (flakyTasks.length > 0) {
@@ -1,7 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const fs_1 = require("fs");
4
- const fs_extra_1 = require("fs-extra");
5
4
  const path_1 = require("path");
6
5
  const WEEK_IN_MS = 1000 * 60 * 60 * 24 * 7;
7
6
  const folder = process.argv[2];
@@ -34,11 +33,11 @@ function removeOld(records) {
34
33
  if (time - s.mtimeMs > WEEK_IN_MS) {
35
34
  if (s.isDirectory()) {
36
35
  try {
37
- (0, fs_extra_1.removeSync)(`${r}.commit`);
36
+ (0, fs_1.rmSync)(`${r}.commit`, { recursive: true, force: true });
38
37
  }
39
38
  catch (e) { }
40
39
  }
41
- (0, fs_extra_1.removeSync)(r);
40
+ (0, fs_1.rmSync)(r, { recursive: true, force: true });
42
41
  }
43
42
  }
44
43
  catch (e) { }
@@ -4,11 +4,13 @@ import { TaskStatus } from './tasks-runner';
4
4
  import { ProjectGraph } from '../config/project-graph';
5
5
  import { TaskGraph } from '../config/task-graph';
6
6
  import { DaemonClient } from '../daemon/client/client';
7
+ import { NxJsonConfiguration } from '../config/nx-json';
7
8
  export declare class TaskOrchestrator {
8
9
  private readonly hasher;
9
10
  private readonly initiatingProject;
10
11
  private readonly projectGraph;
11
12
  private readonly taskGraph;
13
+ private readonly nxJson;
12
14
  private readonly options;
13
15
  private readonly bail;
14
16
  private readonly daemon;
@@ -24,7 +26,7 @@ export declare class TaskOrchestrator {
24
26
  private waitingForTasks;
25
27
  private groups;
26
28
  private bailed;
27
- constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
29
+ constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, nxJson: NxJsonConfiguration, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
28
30
  run(): Promise<{
29
31
  [id: string]: TaskStatus;
30
32
  }>;
@@ -17,16 +17,17 @@ const output_1 = require("../utils/output");
17
17
  const params_1 = require("../utils/params");
18
18
  class TaskOrchestrator {
19
19
  // endregion internal state
20
- constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon, outputStyle) {
20
+ constructor(hasher, initiatingProject, projectGraph, taskGraph, nxJson, options, bail, daemon, outputStyle) {
21
21
  this.hasher = hasher;
22
22
  this.initiatingProject = initiatingProject;
23
23
  this.projectGraph = projectGraph;
24
24
  this.taskGraph = taskGraph;
25
+ this.nxJson = nxJson;
25
26
  this.options = options;
26
27
  this.bail = bail;
27
28
  this.daemon = daemon;
28
29
  this.outputStyle = outputStyle;
29
- this.cache = (0, cache_1.getCache)(this.options);
30
+ this.cache = (0, cache_1.getCache)(this.nxJson, this.options);
30
31
  this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
31
32
  this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
32
33
  // region internal state
@@ -13,7 +13,7 @@ class TasksSchedule {
13
13
  this.notScheduledTaskGraph = this.taskGraph;
14
14
  this.reverseTaskDeps = (0, utils_1.calculateReverseDeps)(this.taskGraph);
15
15
  this.reverseProjectGraph = (0, operators_1.reverse)(this.projectGraph);
16
- this.taskHistory = process.env.NX_DISABLE_DB !== 'true' ? (0, task_history_1.getTaskHistory)() : null;
16
+ this.taskHistory = (0, task_history_1.getTaskHistory)();
17
17
  this.scheduledBatches = [];
18
18
  this.scheduledTasks = [];
19
19
  this.runningTasks = new Set();
@@ -97,7 +97,10 @@ function shouldRecordStats() {
97
97
  return true;
98
98
  }
99
99
  try {
100
- const stdout = (0, node_child_process_1.execSync)(pmc.getRegistryUrl, { encoding: 'utf-8' });
100
+ const stdout = (0, node_child_process_1.execSync)(pmc.getRegistryUrl, {
101
+ encoding: 'utf-8',
102
+ windowsHide: true,
103
+ });
101
104
  const url = new URL(stdout.trim());
102
105
  // don't record stats when testing locally
103
106
  return url.hostname !== 'localhost';