nx 20.0.0-canary.20240924-3e1a879 → 20.0.0-canary.20240926-529ab94

Sign up to get free protection for your applications and to get access to all the features.
Files changed (81) hide show
  1. package/.eslintrc.json +11 -2
  2. package/bin/nx.js +10 -2
  3. package/package.json +12 -13
  4. package/schemas/nx-schema.json +26 -21
  5. package/src/adapter/compat.d.ts +1 -1
  6. package/src/adapter/compat.js +1 -0
  7. package/src/command-line/activate-powerpack/activate-powerpack.js +3 -1
  8. package/src/command-line/add/add.js +4 -2
  9. package/src/command-line/connect/view-logs.js +1 -0
  10. package/src/command-line/exec/exec.js +6 -1
  11. package/src/command-line/format/format.js +3 -1
  12. package/src/command-line/graph/graph.js +10 -9
  13. package/src/command-line/init/implementation/add-nx-to-nest.js +5 -5
  14. package/src/command-line/init/implementation/angular/integrated-workspace.js +4 -1
  15. package/src/command-line/init/implementation/angular/legacy-angular-versions.js +5 -2
  16. package/src/command-line/init/implementation/dot-nx/add-nx-scripts.js +3 -1
  17. package/src/command-line/init/implementation/dot-nx/nxw.js +1 -0
  18. package/src/command-line/init/implementation/react/check-for-uncommitted-changes.js +3 -1
  19. package/src/command-line/init/implementation/react/clean-up-files.js +7 -7
  20. package/src/command-line/init/implementation/react/index.js +36 -17
  21. package/src/command-line/init/implementation/react/rename-js-to-jsx.js +3 -3
  22. package/src/command-line/init/implementation/utils.js +5 -1
  23. package/src/command-line/init/init-v1.js +1 -0
  24. package/src/command-line/init/init-v2.js +1 -0
  25. package/src/command-line/migrate/command-object.js +4 -0
  26. package/src/command-line/migrate/migrate.js +1 -1
  27. package/src/command-line/release/changelog.js +1 -2
  28. package/src/command-line/release/config/version-plans.js +9 -8
  29. package/src/command-line/release/plan.js +6 -5
  30. package/src/command-line/release/release.js +2 -2
  31. package/src/command-line/release/utils/exec-command.js +1 -0
  32. package/src/command-line/release/utils/github.js +1 -0
  33. package/src/command-line/release/utils/launch-editor.js +6 -1
  34. package/src/command-line/release/version.js +6 -3
  35. package/src/command-line/report/report.d.ts +3 -1
  36. package/src/command-line/report/report.js +17 -2
  37. package/src/command-line/reset/reset.js +4 -4
  38. package/src/command-line/run/run.js +1 -0
  39. package/src/command-line/sync/sync.js +5 -4
  40. package/src/command-line/watch/watch.js +1 -0
  41. package/src/config/nx-json.d.ts +4 -0
  42. package/src/core/graph/main.js +1 -1
  43. package/src/core/graph/styles.css +1 -1
  44. package/src/daemon/cache.d.ts +1 -2
  45. package/src/daemon/cache.js +12 -21
  46. package/src/daemon/client/client.d.ts +4 -1
  47. package/src/daemon/client/client.js +9 -8
  48. package/src/daemon/client/generate-help-output.js +1 -0
  49. package/src/daemon/server/sync-generators.d.ts +4 -1
  50. package/src/daemon/server/sync-generators.js +33 -15
  51. package/src/daemon/tmp-dir.js +6 -7
  52. package/src/executors/run-commands/run-commands.impl.js +1 -0
  53. package/src/executors/run-script/run-script.impl.js +1 -0
  54. package/src/generators/tree.d.ts +1 -1
  55. package/src/generators/tree.js +11 -11
  56. package/src/native/nx.wasm32-wasi.wasm +0 -0
  57. package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +1 -1
  58. package/src/plugins/js/index.js +1 -2
  59. package/src/project-graph/file-utils.js +1 -0
  60. package/src/project-graph/nx-deps-cache.js +5 -6
  61. package/src/tasks-runner/cache.d.ts +3 -1
  62. package/src/tasks-runner/cache.js +29 -29
  63. package/src/tasks-runner/default-tasks-runner.js +1 -1
  64. package/src/tasks-runner/remove-old-cache-records.js +2 -3
  65. package/src/tasks-runner/task-orchestrator.d.ts +3 -1
  66. package/src/tasks-runner/task-orchestrator.js +3 -2
  67. package/src/utils/ab-testing.js +4 -1
  68. package/src/utils/child-process.js +5 -3
  69. package/src/utils/command-line-utils.js +7 -1
  70. package/src/utils/default-base.js +5 -2
  71. package/src/utils/fileutils.d.ts +9 -1
  72. package/src/utils/fileutils.js +29 -12
  73. package/src/utils/git-utils.index-filter.js +2 -1
  74. package/src/utils/git-utils.js +4 -0
  75. package/src/utils/git-utils.tree-filter.js +3 -1
  76. package/src/utils/ignore.js +2 -2
  77. package/src/utils/package-manager.js +2 -2
  78. package/src/utils/powerpack.d.ts +1 -1
  79. package/src/utils/powerpack.js +3 -8
  80. package/src/utils/sync-generators.d.ts +13 -3
  81. package/src/utils/sync-generators.js +99 -25
@@ -1,5 +1,6 @@
1
1
  import { DefaultTasksRunnerOptions, RemoteCache } from './default-tasks-runner';
2
2
  import { Task } from '../config/task-graph';
3
+ import { NxJsonConfiguration } from '../config/nx-json';
3
4
  export type CachedResult = {
4
5
  terminalOutput: string;
5
6
  outputsPath: string;
@@ -10,7 +11,7 @@ export type TaskWithCachedResult = {
10
11
  task: Task;
11
12
  cachedResult: CachedResult;
12
13
  };
13
- export declare function getCache(options: DefaultTasksRunnerOptions): DbCache | Cache;
14
+ export declare function getCache(nxJson: NxJsonConfiguration, options: DefaultTasksRunnerOptions): DbCache | Cache;
14
15
  export declare class DbCache {
15
16
  private readonly options;
16
17
  private cache;
@@ -30,6 +31,7 @@ export declare class DbCache {
30
31
  private _getRemoteCache;
31
32
  private getPowerpackS3Cache;
32
33
  private getPowerpackSharedCache;
34
+ private getPowerpackCache;
33
35
  private resolvePackage;
34
36
  private assertCacheIsValid;
35
37
  }
@@ -3,11 +3,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Cache = exports.DbCache = void 0;
4
4
  exports.getCache = getCache;
5
5
  const workspace_root_1 = require("../utils/workspace-root");
6
- const fs_extra_1 = require("fs-extra");
7
6
  const path_1 = require("path");
8
7
  const perf_hooks_1 = require("perf_hooks");
9
8
  const default_tasks_runner_1 = require("./default-tasks-runner");
10
9
  const child_process_1 = require("child_process");
10
+ const node_fs_1 = require("node:fs");
11
+ const promises_1 = require("node:fs/promises");
11
12
  const cache_directory_1 = require("../utils/cache-directory");
12
13
  const node_machine_id_1 = require("node-machine-id");
13
14
  const native_1 = require("../native");
@@ -18,9 +19,9 @@ const update_manager_1 = require("../nx-cloud/update-manager");
18
19
  const get_cloud_options_1 = require("../nx-cloud/utilities/get-cloud-options");
19
20
  const is_ci_1 = require("../utils/is-ci");
20
21
  const output_1 = require("../utils/output");
21
- function getCache(options) {
22
+ function getCache(nxJson, options) {
22
23
  return process.env.NX_DISABLE_DB !== 'true' &&
23
- process.env.NX_DB_CACHE === 'true'
24
+ (nxJson.enableDbCache === true || process.env.NX_DB_CACHE === 'true')
24
25
  ? new DbCache({
25
26
  // Remove this in Nx 21
26
27
  nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
@@ -114,23 +115,21 @@ class DbCache {
114
115
  null);
115
116
  }
116
117
  }
117
- async getPowerpackS3Cache() {
118
- try {
119
- const { getRemoteCache } = await Promise.resolve(`${this.resolvePackage('@nx/powerpack-s3-cache')}`).then(s => require(s));
120
- return getRemoteCache();
121
- }
122
- catch {
123
- return null;
124
- }
118
+ getPowerpackS3Cache() {
119
+ return this.getPowerpackCache('@nx/powerpack-s3-cache');
120
+ }
121
+ getPowerpackSharedCache() {
122
+ return this.getPowerpackCache('@nx/powerpack-shared-fs-cache');
125
123
  }
126
- async getPowerpackSharedCache() {
124
+ async getPowerpackCache(pkg) {
125
+ let getRemoteCache = null;
127
126
  try {
128
- const { getRemoteCache } = await Promise.resolve(`${this.resolvePackage('@nx/powerpack-shared-fs-cache')}`).then(s => require(s));
129
- return getRemoteCache();
127
+ getRemoteCache = (await Promise.resolve(`${this.resolvePackage(pkg)}`).then(s => require(s))).getRemoteCache;
130
128
  }
131
129
  catch {
132
130
  return null;
133
131
  }
132
+ return getRemoteCache();
134
133
  }
135
134
  resolvePackage(pkg) {
136
135
  return require.resolve(pkg, {
@@ -183,6 +182,7 @@ class Cache {
183
182
  stdio: 'ignore',
184
183
  detached: true,
185
184
  shell: false,
185
+ windowsHide: true,
186
186
  });
187
187
  p.unref();
188
188
  }
@@ -234,13 +234,13 @@ class Cache {
234
234
  // might be left overs from partially-completed cache invocations
235
235
  await this.remove(tdCommit);
236
236
  await this.remove(td);
237
- await (0, fs_extra_1.mkdir)(td);
238
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
239
- await (0, fs_extra_1.mkdir)((0, path_1.join)(td, 'outputs'));
237
+ await (0, promises_1.mkdir)(td, { recursive: true });
238
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
239
+ await (0, promises_1.mkdir)((0, path_1.join)(td, 'outputs'));
240
240
  const expandedOutputs = await this.expandOutputsInWorkspace(outputs);
241
241
  await Promise.all(expandedOutputs.map(async (f) => {
242
242
  const src = (0, path_1.join)(this.root, f);
243
- if (await (0, fs_extra_1.pathExists)(src)) {
243
+ if ((0, node_fs_1.existsSync)(src)) {
244
244
  const cached = (0, path_1.join)(td, 'outputs', f);
245
245
  await this.copy(src, cached);
246
246
  }
@@ -249,15 +249,15 @@ class Cache {
249
249
  // creating this file is atomic, whereas creating a folder is not.
250
250
  // so if the process gets terminated while we are copying stuff into cache,
251
251
  // the cache entry won't be used.
252
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
253
- await (0, fs_extra_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
254
- await (0, fs_extra_1.writeFile)(tdCommit, 'true');
252
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
253
+ await (0, promises_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
254
+ await (0, promises_1.writeFile)(tdCommit, 'true');
255
255
  if (this.options.remoteCache) {
256
256
  await this.options.remoteCache.store(task.hash, this.cachePath);
257
257
  }
258
258
  if (terminalOutput) {
259
259
  const outputPath = this.temporaryOutputPath(task);
260
- await (0, fs_extra_1.writeFile)(outputPath, terminalOutput);
260
+ await (0, promises_1.writeFile)(outputPath, terminalOutput);
261
261
  }
262
262
  });
263
263
  }
@@ -266,7 +266,7 @@ class Cache {
266
266
  const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);
267
267
  await Promise.all(expandedOutputs.map(async (f) => {
268
268
  const cached = (0, path_1.join)(cachedResult.outputsPath, f);
269
- if (await (0, fs_extra_1.pathExists)(cached)) {
269
+ if ((0, node_fs_1.existsSync)(cached)) {
270
270
  const src = (0, path_1.join)(this.root, f);
271
271
  await this.remove(src);
272
272
  await this.copy(cached, src);
@@ -322,11 +322,11 @@ class Cache {
322
322
  async getFromLocalDir(task) {
323
323
  const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);
324
324
  const td = (0, path_1.join)(this.cachePath, task.hash);
325
- if (await (0, fs_extra_1.pathExists)(tdCommit)) {
326
- const terminalOutput = await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
325
+ if ((0, node_fs_1.existsSync)(tdCommit)) {
326
+ const terminalOutput = await (0, promises_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
327
327
  let code = 0;
328
328
  try {
329
- code = Number(await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
329
+ code = Number(await (0, promises_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
330
330
  }
331
331
  catch { }
332
332
  return {
@@ -343,7 +343,7 @@ class Cache {
343
343
  const td = (0, path_1.join)(this.cachePath, task.hash);
344
344
  let sourceMachineId = null;
345
345
  try {
346
- sourceMachineId = await (0, fs_extra_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
346
+ sourceMachineId = await (0, promises_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
347
347
  }
348
348
  catch { }
349
349
  if (sourceMachineId && sourceMachineId != (await this.currentMachineId())) {
@@ -362,12 +362,12 @@ class Cache {
362
362
  }
363
363
  }
364
364
  createCacheDir() {
365
- (0, fs_extra_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
365
+ (0, node_fs_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
366
366
  return cache_directory_1.cacheDir;
367
367
  }
368
368
  createTerminalOutputsDir() {
369
369
  const path = (0, path_1.join)(this.cachePath, 'terminalOutputs');
370
- (0, fs_extra_1.mkdirSync)(path, { recursive: true });
370
+ (0, node_fs_1.mkdirSync)(path, { recursive: true });
371
371
  return path;
372
372
  }
373
373
  }
@@ -56,7 +56,7 @@ const defaultTasksRunner = async (tasks, options, context) => {
56
56
  };
57
57
  exports.defaultTasksRunner = defaultTasksRunner;
58
58
  async function runAllTasks(tasks, options, context) {
59
- const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, options, context.nxArgs?.nxBail, context.daemon, context.nxArgs?.outputStyle);
59
+ const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, context.nxJson, options, context.nxArgs?.nxBail, context.daemon, context.nxArgs?.outputStyle);
60
60
  return orchestrator.run();
61
61
  }
62
62
  exports.default = exports.defaultTasksRunner;
@@ -1,7 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  const fs_1 = require("fs");
4
- const fs_extra_1 = require("fs-extra");
5
4
  const path_1 = require("path");
6
5
  const WEEK_IN_MS = 1000 * 60 * 60 * 24 * 7;
7
6
  const folder = process.argv[2];
@@ -34,11 +33,11 @@ function removeOld(records) {
34
33
  if (time - s.mtimeMs > WEEK_IN_MS) {
35
34
  if (s.isDirectory()) {
36
35
  try {
37
- (0, fs_extra_1.removeSync)(`${r}.commit`);
36
+ (0, fs_1.rmSync)(`${r}.commit`, { recursive: true, force: true });
38
37
  }
39
38
  catch (e) { }
40
39
  }
41
- (0, fs_extra_1.removeSync)(r);
40
+ (0, fs_1.rmSync)(r, { recursive: true, force: true });
42
41
  }
43
42
  }
44
43
  catch (e) { }
@@ -4,11 +4,13 @@ import { TaskStatus } from './tasks-runner';
4
4
  import { ProjectGraph } from '../config/project-graph';
5
5
  import { TaskGraph } from '../config/task-graph';
6
6
  import { DaemonClient } from '../daemon/client/client';
7
+ import { NxJsonConfiguration } from '../config/nx-json';
7
8
  export declare class TaskOrchestrator {
8
9
  private readonly hasher;
9
10
  private readonly initiatingProject;
10
11
  private readonly projectGraph;
11
12
  private readonly taskGraph;
13
+ private readonly nxJson;
12
14
  private readonly options;
13
15
  private readonly bail;
14
16
  private readonly daemon;
@@ -24,7 +26,7 @@ export declare class TaskOrchestrator {
24
26
  private waitingForTasks;
25
27
  private groups;
26
28
  private bailed;
27
- constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
29
+ constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, nxJson: NxJsonConfiguration, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
28
30
  run(): Promise<{
29
31
  [id: string]: TaskStatus;
30
32
  }>;
@@ -17,16 +17,17 @@ const output_1 = require("../utils/output");
17
17
  const params_1 = require("../utils/params");
18
18
  class TaskOrchestrator {
19
19
  // endregion internal state
20
- constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon, outputStyle) {
20
+ constructor(hasher, initiatingProject, projectGraph, taskGraph, nxJson, options, bail, daemon, outputStyle) {
21
21
  this.hasher = hasher;
22
22
  this.initiatingProject = initiatingProject;
23
23
  this.projectGraph = projectGraph;
24
24
  this.taskGraph = taskGraph;
25
+ this.nxJson = nxJson;
25
26
  this.options = options;
26
27
  this.bail = bail;
27
28
  this.daemon = daemon;
28
29
  this.outputStyle = outputStyle;
29
- this.cache = (0, cache_1.getCache)(this.options);
30
+ this.cache = (0, cache_1.getCache)(this.nxJson, this.options);
30
31
  this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
31
32
  this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
32
33
  // region internal state
@@ -97,7 +97,10 @@ function shouldRecordStats() {
97
97
  return true;
98
98
  }
99
99
  try {
100
- const stdout = (0, node_child_process_1.execSync)(pmc.getRegistryUrl, { encoding: 'utf-8' });
100
+ const stdout = (0, node_child_process_1.execSync)(pmc.getRegistryUrl, {
101
+ encoding: 'utf-8',
102
+ windowsHide: true,
103
+ });
101
104
  const url = new URL(stdout.trim());
102
105
  // don't record stats when testing locally
103
106
  return url.hostname !== 'localhost';
@@ -16,6 +16,7 @@ function runNxSync(cmd, options) {
16
16
  else {
17
17
  options ??= {};
18
18
  options.cwd ??= process.cwd();
19
+ options.windowsHide ??= true;
19
20
  const offsetFromRoot = (0, path_1.relative)(options.cwd, (0, workspace_root_1.workspaceRootInner)(options.cwd, null));
20
21
  if (process.platform === 'win32') {
21
22
  baseCmd = '.\\' + (0, path_1.join)(`${offsetFromRoot}`, 'nx.bat');
@@ -34,6 +35,7 @@ async function runNxAsync(cmd, options) {
34
35
  else {
35
36
  options ??= {};
36
37
  options.cwd ??= process.cwd();
38
+ options.windowsHide ??= true;
37
39
  const offsetFromRoot = (0, path_1.relative)(options.cwd, (0, workspace_root_1.workspaceRootInner)(options.cwd, null));
38
40
  if (process.platform === 'win32') {
39
41
  baseCmd = '.\\' + (0, path_1.join)(`${offsetFromRoot}`, 'nx.bat');
@@ -46,13 +48,13 @@ async function runNxAsync(cmd, options) {
46
48
  if (options?.silent) {
47
49
  delete options.silent;
48
50
  }
49
- await new Promise((resolve, reject) => {
51
+ return new Promise((resolve, reject) => {
50
52
  const child = (0, child_process_1.exec)(`${baseCmd} ${cmd}`, options, (error, stdout, stderr) => {
51
53
  if (error) {
52
- reject(error);
54
+ reject(stderr || stdout || error.message);
53
55
  }
54
56
  else {
55
- resolve(stdout);
57
+ resolve();
56
58
  }
57
59
  });
58
60
  if (!silent) {
@@ -222,6 +222,7 @@ function getMergeBase(base, head = 'HEAD') {
222
222
  maxBuffer: file_utils_1.TEN_MEGABYTES,
223
223
  cwd: workspace_root_1.workspaceRoot,
224
224
  stdio: 'pipe',
225
+ windowsHide: true,
225
226
  })
226
227
  .toString()
227
228
  .trim();
@@ -232,6 +233,7 @@ function getMergeBase(base, head = 'HEAD') {
232
233
  maxBuffer: file_utils_1.TEN_MEGABYTES,
233
234
  cwd: workspace_root_1.workspaceRoot,
234
235
  stdio: 'pipe',
236
+ windowsHide: true,
235
237
  })
236
238
  .toString()
237
239
  .trim();
@@ -245,7 +247,11 @@ function getFilesUsingBaseAndHead(base, head) {
245
247
  return parseGitOutput(`git diff --name-only --no-renames --relative "${base}" "${head}"`);
246
248
  }
247
249
  function parseGitOutput(command) {
248
- return (0, child_process_1.execSync)(command, { maxBuffer: file_utils_1.TEN_MEGABYTES, cwd: workspace_root_1.workspaceRoot })
250
+ return (0, child_process_1.execSync)(command, {
251
+ maxBuffer: file_utils_1.TEN_MEGABYTES,
252
+ cwd: workspace_root_1.workspaceRoot,
253
+ windowsHide: true,
254
+ })
249
255
  .toString('utf-8')
250
256
  .split('\n')
251
257
  .map((a) => a.trim())
@@ -5,8 +5,11 @@ const child_process_1 = require("child_process");
5
5
  function deduceDefaultBase() {
6
6
  const nxDefaultBase = 'main';
7
7
  try {
8
- return ((0, child_process_1.execSync)('git config --get init.defaultBranch').toString().trim() ||
9
- nxDefaultBase);
8
+ return ((0, child_process_1.execSync)('git config --get init.defaultBranch', {
9
+ windowsHide: true,
10
+ })
11
+ .toString()
12
+ .trim() || nxDefaultBase);
10
13
  }
11
14
  catch {
12
15
  return nxDefaultBase;
@@ -1,5 +1,5 @@
1
1
  import type { JsonParseOptions, JsonSerializeOptions } from './json';
2
- import { PathLike } from 'fs';
2
+ import { PathLike } from 'node:fs';
3
3
  export interface JsonReadOptions extends JsonParseOptions {
4
4
  /**
5
5
  * mutable field recording whether JSON ends with new line
@@ -43,6 +43,14 @@ export declare function readYamlFile<T extends object = any>(path: string, optio
43
43
  * @param options JSON serialize options
44
44
  */
45
45
  export declare function writeJsonFile<T extends object = object>(path: string, data: T, options?: JsonWriteOptions): void;
46
+ /**
47
+ * Serializes the given data to JSON and writes it to a file asynchronously.
48
+ *
49
+ * @param path A path to a file.
50
+ * @param data data which should be serialized to JSON and written to the file
51
+ * @param options JSON serialize options
52
+ */
53
+ export declare function writeJsonFileAsync<T extends object = object>(path: string, data: T, options?: JsonWriteOptions): Promise<void>;
46
54
  /**
47
55
  * Check if a directory exists
48
56
  * @param path Path to directory
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.readJsonFile = readJsonFile;
4
4
  exports.readYamlFile = readYamlFile;
5
5
  exports.writeJsonFile = writeJsonFile;
6
+ exports.writeJsonFileAsync = writeJsonFileAsync;
6
7
  exports.directoryExists = directoryExists;
7
8
  exports.fileExists = fileExists;
8
9
  exports.createDirectory = createDirectory;
@@ -10,7 +11,8 @@ exports.isRelativePath = isRelativePath;
10
11
  exports.extractFileFromTarball = extractFileFromTarball;
11
12
  exports.readFileIfExisting = readFileIfExisting;
12
13
  const json_1 = require("./json");
13
- const fs_1 = require("fs");
14
+ const node_fs_1 = require("node:fs");
15
+ const promises_1 = require("node:fs/promises");
14
16
  const path_1 = require("path");
15
17
  const tar = require("tar-stream");
16
18
  const zlib_1 = require("zlib");
@@ -22,7 +24,7 @@ const zlib_1 = require("zlib");
22
24
  * @returns Object the JSON content of the file represents
23
25
  */
24
26
  function readJsonFile(path, options) {
25
- const content = (0, fs_1.readFileSync)(path, 'utf-8');
27
+ const content = (0, node_fs_1.readFileSync)(path, 'utf-8');
26
28
  if (options) {
27
29
  options.endsWithNewline = content.charCodeAt(content.length - 1) === 10;
28
30
  }
@@ -41,7 +43,7 @@ function readJsonFile(path, options) {
41
43
  * @returns
42
44
  */
43
45
  function readYamlFile(path, options) {
44
- const content = (0, fs_1.readFileSync)(path, 'utf-8');
46
+ const content = (0, node_fs_1.readFileSync)(path, 'utf-8');
45
47
  const { load } = require('@zkochan/js-yaml');
46
48
  return load(content, { ...options, filename: path });
47
49
  }
@@ -53,12 +55,27 @@ function readYamlFile(path, options) {
53
55
  * @param options JSON serialize options
54
56
  */
55
57
  function writeJsonFile(path, data, options) {
56
- (0, fs_1.mkdirSync)((0, path_1.dirname)(path), { recursive: true });
58
+ (0, node_fs_1.mkdirSync)((0, path_1.dirname)(path), { recursive: true });
57
59
  const serializedJson = (0, json_1.serializeJson)(data, options);
58
60
  const content = options?.appendNewLine
59
61
  ? `${serializedJson}\n`
60
62
  : serializedJson;
61
- (0, fs_1.writeFileSync)(path, content, { encoding: 'utf-8' });
63
+ (0, node_fs_1.writeFileSync)(path, content, { encoding: 'utf-8' });
64
+ }
65
+ /**
66
+ * Serializes the given data to JSON and writes it to a file asynchronously.
67
+ *
68
+ * @param path A path to a file.
69
+ * @param data data which should be serialized to JSON and written to the file
70
+ * @param options JSON serialize options
71
+ */
72
+ async function writeJsonFileAsync(path, data, options) {
73
+ await (0, promises_1.mkdir)((0, path_1.dirname)(path), { recursive: true });
74
+ const serializedJson = (0, json_1.serializeJson)(data, options);
75
+ const content = options?.appendNewLine
76
+ ? `${serializedJson}\n`
77
+ : serializedJson;
78
+ await (0, promises_1.writeFile)(path, content, { encoding: 'utf-8' });
62
79
  }
63
80
  /**
64
81
  * Check if a directory exists
@@ -66,7 +83,7 @@ function writeJsonFile(path, data, options) {
66
83
  */
67
84
  function directoryExists(path) {
68
85
  try {
69
- return (0, fs_1.statSync)(path).isDirectory();
86
+ return (0, node_fs_1.statSync)(path).isDirectory();
70
87
  }
71
88
  catch {
72
89
  return false;
@@ -78,14 +95,14 @@ function directoryExists(path) {
78
95
  */
79
96
  function fileExists(path) {
80
97
  try {
81
- return (0, fs_1.statSync)(path).isFile();
98
+ return (0, node_fs_1.statSync)(path).isFile();
82
99
  }
83
100
  catch {
84
101
  return false;
85
102
  }
86
103
  }
87
104
  function createDirectory(path) {
88
- (0, fs_1.mkdirSync)(path, { recursive: true });
105
+ (0, node_fs_1.mkdirSync)(path, { recursive: true });
89
106
  }
90
107
  function isRelativePath(path) {
91
108
  return (path === '.' ||
@@ -102,9 +119,9 @@ function isRelativePath(path) {
102
119
  */
103
120
  async function extractFileFromTarball(tarballPath, file, destinationFilePath) {
104
121
  return new Promise((resolve, reject) => {
105
- (0, fs_1.mkdirSync)((0, path_1.dirname)(destinationFilePath), { recursive: true });
122
+ (0, node_fs_1.mkdirSync)((0, path_1.dirname)(destinationFilePath), { recursive: true });
106
123
  var tarExtractStream = tar.extract();
107
- const destinationFileStream = (0, fs_1.createWriteStream)(destinationFilePath);
124
+ const destinationFileStream = (0, node_fs_1.createWriteStream)(destinationFilePath);
108
125
  let isFileExtracted = false;
109
126
  tarExtractStream.on('entry', function (header, stream, next) {
110
127
  if (header.name === file) {
@@ -126,9 +143,9 @@ async function extractFileFromTarball(tarballPath, file, destinationFilePath) {
126
143
  reject();
127
144
  }
128
145
  });
129
- (0, fs_1.createReadStream)(tarballPath).pipe((0, zlib_1.createGunzip)()).pipe(tarExtractStream);
146
+ (0, node_fs_1.createReadStream)(tarballPath).pipe((0, zlib_1.createGunzip)()).pipe(tarExtractStream);
130
147
  });
131
148
  }
132
149
  function readFileIfExisting(path) {
133
- return (0, fs_1.existsSync)(path) ? (0, fs_1.readFileSync)(path, 'utf-8') : '';
150
+ return (0, node_fs_1.existsSync)(path) ? (0, node_fs_1.readFileSync)(path, 'utf-8') : '';
134
151
  }
@@ -9,9 +9,10 @@ try {
9
9
  const { execSync } = require('child_process');
10
10
  // NOTE: Using env vars because Windows PowerShell has its own handling of quotes (") messes up quotes in args, even if escaped.
11
11
  const src = process.env.NX_IMPORT_SOURCE;
12
- execSync('git read-tree --empty', { stdio: 'inherit' });
12
+ execSync('git read-tree --empty', { stdio: 'inherit', windowsHide: true });
13
13
  execSync(`git reset ${process.env.GIT_COMMIT} -- "${src}"`, {
14
14
  stdio: 'inherit',
15
+ windowsHide: true,
15
16
  });
16
17
  }
17
18
  catch (error) {
@@ -36,6 +36,7 @@ class GitRepository {
36
36
  getGitRootPath(cwd) {
37
37
  return (0, child_process_1.execSync)('git rev-parse --show-toplevel', {
38
38
  cwd,
39
+ windowsHide: true,
39
40
  })
40
41
  .toString()
41
42
  .trim();
@@ -176,6 +177,7 @@ function getGithubSlugOrNull() {
176
177
  try {
177
178
  const gitRemote = (0, child_process_1.execSync)('git remote -v', {
178
179
  stdio: 'pipe',
180
+ windowsHide: true,
179
181
  }).toString();
180
182
  // If there are no remotes, we default to github
181
183
  if (!gitRemote || gitRemote.length === 0) {
@@ -226,6 +228,7 @@ function commitChanges(commitMessage, directory) {
226
228
  stdio: 'pipe',
227
229
  input: commitMessage,
228
230
  cwd: directory,
231
+ windowsHide: true,
229
232
  });
230
233
  }
231
234
  catch (err) {
@@ -247,6 +250,7 @@ function getLatestCommitSha() {
247
250
  return (0, child_process_1.execSync)('git rev-parse HEAD', {
248
251
  encoding: 'utf8',
249
252
  stdio: 'pipe',
253
+ windowsHide: true,
250
254
  }).trim();
251
255
  }
252
256
  catch {
@@ -14,7 +14,9 @@ try {
14
14
  // NOTE: Using env vars because Windows PowerShell has its own handling of quotes (") messes up quotes in args, even if escaped.
15
15
  const src = process.env.NX_IMPORT_SOURCE;
16
16
  const dest = process.env.NX_IMPORT_DESTINATION;
17
- const files = execSync(`git ls-files -z ${src}`)
17
+ const files = execSync(`git ls-files -z ${src}`, {
18
+ windowsHide: true,
19
+ })
18
20
  .toString()
19
21
  .trim()
20
22
  .split('\x00')
@@ -4,7 +4,7 @@ exports.ALWAYS_IGNORE = void 0;
4
4
  exports.getIgnoredGlobs = getIgnoredGlobs;
5
5
  exports.getAlwaysIgnore = getAlwaysIgnore;
6
6
  exports.getIgnoreObject = getIgnoreObject;
7
- const fs_extra_1 = require("fs-extra");
7
+ const node_fs_1 = require("node:fs");
8
8
  const ignore_1 = require("ignore");
9
9
  const fileutils_1 = require("./fileutils");
10
10
  const path_1 = require("./path");
@@ -48,7 +48,7 @@ function getIgnoreObject(root = workspace_root_1.workspaceRoot) {
48
48
  function getIgnoredGlobsFromFile(file, root) {
49
49
  try {
50
50
  const results = [];
51
- const contents = (0, fs_extra_1.readFileSync)(file, 'utf-8');
51
+ const contents = (0, node_fs_1.readFileSync)(file, 'utf-8');
52
52
  const lines = contents.split('\n');
53
53
  for (const line of lines) {
54
54
  const trimmed = line.trim();
@@ -15,7 +15,7 @@ exports.packageRegistryView = packageRegistryView;
15
15
  exports.packageRegistryPack = packageRegistryPack;
16
16
  const child_process_1 = require("child_process");
17
17
  const fs_1 = require("fs");
18
- const fs_extra_1 = require("fs-extra");
18
+ const promises_1 = require("node:fs/promises");
19
19
  const path_1 = require("path");
20
20
  const semver_1 = require("semver");
21
21
  const tmp_1 = require("tmp");
@@ -301,7 +301,7 @@ function createTempNpmDirectory() {
301
301
  copyPackageManagerConfigurationFiles(workspace_root_1.workspaceRoot, dir);
302
302
  const cleanup = async () => {
303
303
  try {
304
- await (0, fs_extra_1.remove)(dir);
304
+ await (0, promises_1.rm)(dir, { recursive: true, force: true });
305
305
  }
306
306
  catch {
307
307
  // It's okay if this fails, the OS will clean it up eventually
@@ -1,5 +1,5 @@
1
1
  export declare function printPowerpackLicense(): Promise<void>;
2
- export declare function getPowerpackLicenseInformation(): Promise<any>;
2
+ export declare function getPowerpackLicenseInformation(): Promise<import("@nx/powerpack-license").PowerpackLicense>;
3
3
  export declare class NxPowerpackNotInstalledError extends Error {
4
4
  constructor(e: Error);
5
5
  }
@@ -9,22 +9,17 @@ const workspace_root_1 = require("./workspace-root");
9
9
  async function printPowerpackLicense() {
10
10
  try {
11
11
  const { organizationName, seatCount, workspaceCount } = await getPowerpackLicenseInformation();
12
- logger_1.logger.log(`Nx Powerpack Licensed to ${organizationName} for ${seatCount} user${seatCount > 1 ? '' : 's'} in ${workspaceCount} workspace${workspaceCount > 1 ? '' : 's'}`);
12
+ logger_1.logger.log(`Nx Powerpack Licensed to ${organizationName} for ${seatCount} user${seatCount > 1 ? '' : 's'} in ${workspaceCount === 9999 ? 'an unlimited number of' : workspaceCount} workspace${workspaceCount > 1 ? '' : 's'}`);
13
13
  }
14
14
  catch { }
15
15
  }
16
16
  async function getPowerpackLicenseInformation() {
17
17
  try {
18
- const { getPowerpackLicenseInformation } = (await Promise.resolve().then(() => require(
19
- // @ts-ignore
20
- '@nx/powerpack-license'
21
- // TODO(@FrozenPandaz): Provide the right type here.
22
- )));
23
- // )) as typeof import('@nx/powerpack-license');
18
+ const { getPowerpackLicenseInformation } = (await Promise.resolve().then(() => require('@nx/powerpack-license')));
24
19
  return getPowerpackLicenseInformation(workspace_root_1.workspaceRoot);
25
20
  }
26
21
  catch (e) {
27
- if ('code' in e && e.code === 'ERR_MODULE_NOT_FOUND') {
22
+ if ('code' in e && e.code === 'MODULE_NOT_FOUND') {
28
23
  throw new NxPowerpackNotInstalledError(e);
29
24
  }
30
25
  throw e;
@@ -18,6 +18,8 @@ export type SyncGeneratorRunSuccessResult = {
18
18
  type SerializableSimpleError = {
19
19
  message: string;
20
20
  stack: string | undefined;
21
+ title?: string;
22
+ bodyLines?: string[];
21
23
  };
22
24
  export type SyncGeneratorRunErrorResult = {
23
25
  generatorName: string;
@@ -36,16 +38,24 @@ type FlushSyncGeneratorChangesFailure = {
36
38
  generalFailure?: SerializableSimpleError;
37
39
  };
38
40
  export type FlushSyncGeneratorChangesResult = FlushSyncGeneratorChangesSuccess | FlushSyncGeneratorChangesFailure;
41
+ export declare class SyncError extends Error {
42
+ title: string;
43
+ bodyLines?: string[];
44
+ constructor(title: string, bodyLines?: string[]);
45
+ }
39
46
  export declare function getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorRunResult[]>;
40
47
  export declare function flushSyncGeneratorChanges(results: SyncGeneratorRunResult[]): Promise<FlushSyncGeneratorChangesResult>;
41
- export declare function collectAllRegisteredSyncGenerators(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Promise<string[]>;
48
+ export declare function collectAllRegisteredSyncGenerators(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Promise<{
49
+ globalGenerators: string[];
50
+ taskGenerators: string[];
51
+ }>;
42
52
  export declare function runSyncGenerator(tree: Tree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorRunResult>;
43
53
  export declare function collectEnabledTaskSyncGeneratorsFromProjectGraph(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;
44
54
  export declare function collectEnabledTaskSyncGeneratorsFromTaskGraph(taskGraph: TaskGraph, projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;
45
55
  export declare function collectRegisteredGlobalSyncGenerators(nxJson?: NxJsonConfiguration<string[] | "*">): Set<string>;
46
56
  export declare function getSyncGeneratorSuccessResultsMessageLines(results: SyncGeneratorRunResult[]): string[];
47
- export declare function getFailedSyncGeneratorsFixMessageLines(results: SyncGeneratorRunResult[], verbose: boolean): string[];
48
- export declare function getFlushFailureMessageLines(result: FlushSyncGeneratorChangesFailure, verbose: boolean): string[];
57
+ export declare function getFailedSyncGeneratorsFixMessageLines(results: SyncGeneratorRunResult[], verbose: boolean, globalGeneratorSet?: Set<string>): string[];
58
+ export declare function getFlushFailureMessageLines(result: FlushSyncGeneratorChangesFailure, verbose: boolean, globalGeneratorSet?: Set<string>): string[];
49
59
  export declare function processSyncGeneratorResultErrors(results: SyncGeneratorRunResult[]): {
50
60
  failedGeneratorsCount: number;
51
61
  areAllResultsFailures: boolean;