nx 20.0.0-beta.3 → 20.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +12 -13
- package/src/command-line/import/utils/prepare-source-repo.js +8 -3
- package/src/command-line/reset/reset.js +16 -9
- package/src/generators/utils/project-configuration.js +2 -1
- package/src/hasher/hash-task.d.ts +4 -2
- package/src/hasher/hash-task.js +6 -9
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/tasks-runner/forked-process-task-runner.js +16 -5
- package/src/tasks-runner/run-command.js +3 -1
- package/src/tasks-runner/task-orchestrator.d.ts +1 -0
- package/src/tasks-runner/task-orchestrator.js +6 -2
- package/src/tasks-runner/tasks-schedule.d.ts +1 -0
- package/src/tasks-runner/tasks-schedule.js +6 -2
- package/src/utils/package-json.d.ts +1 -0
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "20.0.0-beta.
|
3
|
+
"version": "20.0.0-beta.4",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -58,7 +58,6 @@
|
|
58
58
|
"open": "^8.4.0",
|
59
59
|
"semver": "^7.5.3",
|
60
60
|
"string-width": "^4.2.3",
|
61
|
-
"strong-log-transformer": "^2.1.0",
|
62
61
|
"tar-stream": "~2.2.0",
|
63
62
|
"tmp": "~0.2.1",
|
64
63
|
"tsconfig-paths": "^4.1.2",
|
@@ -67,7 +66,7 @@
|
|
67
66
|
"yargs-parser": "21.1.1",
|
68
67
|
"node-machine-id": "1.1.12",
|
69
68
|
"ora": "5.3.0",
|
70
|
-
"@nrwl/tao": "20.0.0-beta.
|
69
|
+
"@nrwl/tao": "20.0.0-beta.4"
|
71
70
|
},
|
72
71
|
"peerDependencies": {
|
73
72
|
"@swc-node/register": "^1.8.0",
|
@@ -82,16 +81,16 @@
|
|
82
81
|
}
|
83
82
|
},
|
84
83
|
"optionalDependencies": {
|
85
|
-
"@nx/nx-darwin-x64": "20.0.0-beta.
|
86
|
-
"@nx/nx-darwin-arm64": "20.0.0-beta.
|
87
|
-
"@nx/nx-linux-x64-gnu": "20.0.0-beta.
|
88
|
-
"@nx/nx-linux-x64-musl": "20.0.0-beta.
|
89
|
-
"@nx/nx-win32-x64-msvc": "20.0.0-beta.
|
90
|
-
"@nx/nx-linux-arm64-gnu": "20.0.0-beta.
|
91
|
-
"@nx/nx-linux-arm64-musl": "20.0.0-beta.
|
92
|
-
"@nx/nx-linux-arm-gnueabihf": "20.0.0-beta.
|
93
|
-
"@nx/nx-win32-arm64-msvc": "20.0.0-beta.
|
94
|
-
"@nx/nx-freebsd-x64": "20.0.0-beta.
|
84
|
+
"@nx/nx-darwin-x64": "20.0.0-beta.4",
|
85
|
+
"@nx/nx-darwin-arm64": "20.0.0-beta.4",
|
86
|
+
"@nx/nx-linux-x64-gnu": "20.0.0-beta.4",
|
87
|
+
"@nx/nx-linux-x64-musl": "20.0.0-beta.4",
|
88
|
+
"@nx/nx-win32-x64-msvc": "20.0.0-beta.4",
|
89
|
+
"@nx/nx-linux-arm64-gnu": "20.0.0-beta.4",
|
90
|
+
"@nx/nx-linux-arm64-musl": "20.0.0-beta.4",
|
91
|
+
"@nx/nx-linux-arm-gnueabihf": "20.0.0-beta.4",
|
92
|
+
"@nx/nx-win32-arm64-msvc": "20.0.0-beta.4",
|
93
|
+
"@nx/nx-freebsd-x64": "20.0.0-beta.4"
|
95
94
|
},
|
96
95
|
"nx-migrations": {
|
97
96
|
"migrations": "./migrations.json",
|
@@ -6,15 +6,20 @@ const path_1 = require("path");
|
|
6
6
|
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl) {
|
7
7
|
const spinner = createSpinner().start(`Fetching ${ref} from ${sourceRemoteUrl}`);
|
8
8
|
const relativeSourceDir = (0, path_1.relative)(gitClient.root, (0, path_1.join)(gitClient.root, source));
|
9
|
+
const message = relativeSourceDir.trim()
|
10
|
+
? `Filtering git history to only include files in ${relativeSourceDir}`
|
11
|
+
: `Filtering git history`;
|
9
12
|
if (await gitClient.hasFilterRepoInstalled()) {
|
10
|
-
spinner.start(
|
13
|
+
spinner.start(message);
|
11
14
|
await gitClient.filterRepo(relativeSourceDir, relativeDestination);
|
12
15
|
}
|
13
16
|
else {
|
14
|
-
spinner.start(
|
17
|
+
spinner.start(`${message} (this might take a few minutes -- install git-filter-repo for faster performance)`);
|
15
18
|
await gitClient.filterBranch(relativeSourceDir, relativeDestination, tempImportBranch);
|
16
19
|
}
|
17
|
-
spinner.succeed(
|
20
|
+
spinner.succeed(relativeSourceDir.trim()
|
21
|
+
? `Filtered git history to only include files in ${relativeSourceDir}`
|
22
|
+
: `Filtered git history`);
|
18
23
|
spinner.succeed(`${sourceRemoteUrl} has been prepared to be imported into this workspace on a temporary branch: ${tempImportBranch} in ${gitClient.root}`);
|
19
24
|
}
|
20
25
|
function wait(ms) {
|
@@ -37,16 +37,16 @@ async function resetHandler(args) {
|
|
37
37
|
try {
|
38
38
|
await killDaemon();
|
39
39
|
}
|
40
|
-
catch {
|
41
|
-
errors.push('Failed to stop the Nx Daemon.');
|
40
|
+
catch (e) {
|
41
|
+
errors.push('Failed to stop the Nx Daemon.', e.toString());
|
42
42
|
}
|
43
43
|
}
|
44
44
|
if (all || args.onlyCache) {
|
45
45
|
try {
|
46
46
|
await cleanupCacheEntries();
|
47
47
|
}
|
48
|
-
catch {
|
49
|
-
errors.push('Failed to clean up the cache directory.');
|
48
|
+
catch (e) {
|
49
|
+
errors.push('Failed to clean up the cache directory.', e.toString());
|
50
50
|
}
|
51
51
|
}
|
52
52
|
if (all || args.onlyWorkspaceData) {
|
@@ -59,12 +59,17 @@ async function resetHandler(args) {
|
|
59
59
|
try {
|
60
60
|
await cleanupWorkspaceData();
|
61
61
|
}
|
62
|
-
catch {
|
63
|
-
errors.push('Failed to clean up the workspace data directory.');
|
62
|
+
catch (e) {
|
63
|
+
errors.push('Failed to clean up the workspace data directory.', e.toString());
|
64
64
|
}
|
65
65
|
}
|
66
66
|
if (all || args.onlyCloud) {
|
67
|
-
|
67
|
+
try {
|
68
|
+
await resetCloudClient();
|
69
|
+
}
|
70
|
+
catch (e) {
|
71
|
+
errors.push('Failed to reset the Nx Cloud client.', e.toString());
|
72
|
+
}
|
68
73
|
}
|
69
74
|
if (errors.length > 0) {
|
70
75
|
output_1.output.error({
|
@@ -79,8 +84,10 @@ async function resetHandler(args) {
|
|
79
84
|
});
|
80
85
|
}
|
81
86
|
}
|
82
|
-
function killDaemon() {
|
83
|
-
|
87
|
+
async function killDaemon() {
|
88
|
+
if (client_1.daemonClient.enabled()) {
|
89
|
+
return client_1.daemonClient.stop();
|
90
|
+
}
|
84
91
|
}
|
85
92
|
async function resetCloudClient() {
|
86
93
|
// Remove nx cloud marker files. This helps if the use happens to run `nx-cloud start-ci-run` or
|
@@ -69,7 +69,8 @@ function updateProjectConfiguration(tree, projectName, projectConfiguration) {
|
|
69
69
|
function updateProjectConfigurationInPackageJson(tree, projectName, projectConfiguration) {
|
70
70
|
const packageJsonFile = (0, path_2.joinPathFragments)(projectConfiguration.root, 'package.json');
|
71
71
|
const packageJson = (0, json_1.readJson)(tree, packageJsonFile);
|
72
|
-
|
72
|
+
projectConfiguration.name = projectName;
|
73
|
+
if (packageJson.name === projectConfiguration.name) {
|
73
74
|
delete projectConfiguration.name;
|
74
75
|
}
|
75
76
|
if (projectConfiguration.targets &&
|
@@ -2,5 +2,7 @@ import { Task, TaskGraph } from '../config/task-graph';
|
|
2
2
|
import { TaskHasher } from './task-hasher';
|
3
3
|
import { ProjectGraph } from '../config/project-graph';
|
4
4
|
import { NxJsonConfiguration } from '../config/nx-json';
|
5
|
-
|
6
|
-
export declare function
|
5
|
+
import { TaskDetails } from '../native';
|
6
|
+
export declare function getTaskDetails(): TaskDetails | null;
|
7
|
+
export declare function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher: TaskHasher, projectGraph: ProjectGraph, taskGraph: TaskGraph, nxJson: NxJsonConfiguration, tasksDetails: TaskDetails | null): Promise<void>;
|
8
|
+
export declare function hashTask(hasher: TaskHasher, projectGraph: ProjectGraph, taskGraph: TaskGraph, task: Task, env: NodeJS.ProcessEnv, taskDetails: TaskDetails | null): Promise<void>;
|
package/src/hasher/hash-task.js
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getTaskDetails = getTaskDetails;
|
3
4
|
exports.hashTasksThatDoNotDependOnOutputsOfOtherTasks = hashTasksThatDoNotDependOnOutputsOfOtherTasks;
|
4
5
|
exports.hashTask = hashTask;
|
5
6
|
const utils_1 = require("../tasks-runner/utils");
|
@@ -19,9 +20,8 @@ function getTaskDetails() {
|
|
19
20
|
}
|
20
21
|
return taskDetails;
|
21
22
|
}
|
22
|
-
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson) {
|
23
|
+
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson, tasksDetails) {
|
23
24
|
performance.mark('hashMultipleTasks:start');
|
24
|
-
const taskDetails = getTaskDetails();
|
25
25
|
const tasks = Object.values(taskGraph.tasks);
|
26
26
|
const tasksWithHashers = await Promise.all(tasks.map(async (task) => {
|
27
27
|
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);
|
@@ -42,9 +42,8 @@ async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGrap
|
|
42
42
|
tasksToHash[i].hash = hashes[i].value;
|
43
43
|
tasksToHash[i].hashDetails = hashes[i].details;
|
44
44
|
}
|
45
|
-
|
46
|
-
|
47
|
-
taskDetails.recordTaskDetails(tasksToHash.map((task) => ({
|
45
|
+
if (tasksDetails?.recordTaskDetails) {
|
46
|
+
tasksDetails.recordTaskDetails(tasksToHash.map((task) => ({
|
48
47
|
hash: task.hash,
|
49
48
|
project: task.target.project,
|
50
49
|
target: task.target.target,
|
@@ -54,9 +53,8 @@ async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGrap
|
|
54
53
|
performance.mark('hashMultipleTasks:end');
|
55
54
|
performance.measure('hashMultipleTasks', 'hashMultipleTasks:start', 'hashMultipleTasks:end');
|
56
55
|
}
|
57
|
-
async function hashTask(hasher, projectGraph, taskGraph, task, env) {
|
56
|
+
async function hashTask(hasher, projectGraph, taskGraph, task, env, taskDetails) {
|
58
57
|
performance.mark('hashSingleTask:start');
|
59
|
-
const taskDetails = getTaskDetails();
|
60
58
|
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);
|
61
59
|
const projectsConfigurations = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
62
60
|
const { value, details } = await (customHasher
|
@@ -72,8 +70,7 @@ async function hashTask(hasher, projectGraph, taskGraph, task, env) {
|
|
72
70
|
: hasher.hashTask(task, taskGraph, env));
|
73
71
|
task.hash = value;
|
74
72
|
task.hashDetails = details;
|
75
|
-
|
76
|
-
if (taskDetails) {
|
73
|
+
if (taskDetails?.recordTaskDetails) {
|
77
74
|
taskDetails.recordTaskDetails([
|
78
75
|
{
|
79
76
|
hash: task.hash,
|
Binary file
|
@@ -4,7 +4,6 @@ exports.ForkedProcessTaskRunner = void 0;
|
|
4
4
|
const fs_1 = require("fs");
|
5
5
|
const child_process_1 = require("child_process");
|
6
6
|
const chalk = require("chalk");
|
7
|
-
const logTransformer = require("strong-log-transformer");
|
8
7
|
const output_1 = require("../utils/output");
|
9
8
|
const utils_1 = require("./utils");
|
10
9
|
const path_1 = require("path");
|
@@ -208,16 +207,16 @@ class ForkedProcessTaskRunner {
|
|
208
207
|
const prefixText = `${task.target.project}:`;
|
209
208
|
p.stdout
|
210
209
|
.pipe(logClearLineToPrefixTransformer(color.bold(prefixText) + ' '))
|
211
|
-
.pipe(
|
210
|
+
.pipe(addPrefixTransformer(color.bold(prefixText)))
|
212
211
|
.pipe(process.stdout);
|
213
212
|
p.stderr
|
214
213
|
.pipe(logClearLineToPrefixTransformer(color(prefixText) + ' '))
|
215
|
-
.pipe(
|
214
|
+
.pipe(addPrefixTransformer(color(prefixText)))
|
216
215
|
.pipe(process.stderr);
|
217
216
|
}
|
218
217
|
else {
|
219
|
-
p.stdout.pipe(
|
220
|
-
p.stderr.pipe(
|
218
|
+
p.stdout.pipe(addPrefixTransformer()).pipe(process.stdout);
|
219
|
+
p.stderr.pipe(addPrefixTransformer()).pipe(process.stderr);
|
221
220
|
}
|
222
221
|
}
|
223
222
|
let outWithErr = [];
|
@@ -403,3 +402,15 @@ function logClearLineToPrefixTransformer(prefix) {
|
|
403
402
|
},
|
404
403
|
});
|
405
404
|
}
|
405
|
+
function addPrefixTransformer(prefix) {
|
406
|
+
const newLineSeparator = process.platform.startsWith('win') ? '\r\n' : '\n';
|
407
|
+
return new stream_1.Transform({
|
408
|
+
transform(chunk, _encoding, callback) {
|
409
|
+
const list = chunk.toString().split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g);
|
410
|
+
list
|
411
|
+
.filter(Boolean)
|
412
|
+
.forEach((m) => this.push(prefix ? prefix + ' ' + m + newLineSeparator : m + newLineSeparator));
|
413
|
+
callback();
|
414
|
+
},
|
415
|
+
});
|
416
|
+
}
|
@@ -363,12 +363,14 @@ function setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles) {
|
|
363
363
|
}
|
364
364
|
async function invokeTasksRunner({ tasks, projectGraph, taskGraph, lifeCycle, nxJson, nxArgs, loadDotEnvFiles, initiatingProject, }) {
|
365
365
|
setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles);
|
366
|
+
// this needs to be done before we start to run the tasks
|
367
|
+
const taskDetails = (0, hash_task_1.getTaskDetails)();
|
366
368
|
const { tasksRunner, runnerOptions } = getRunner(nxArgs, nxJson);
|
367
369
|
let hasher = (0, create_task_hasher_1.createTaskHasher)(projectGraph, nxJson, runnerOptions);
|
368
370
|
// this is used for two reasons: to fetch all remote cache hits AND
|
369
371
|
// to submit everything that is known in advance to Nx Cloud to run in
|
370
372
|
// a distributed fashion
|
371
|
-
await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson);
|
373
|
+
await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson, taskDetails);
|
372
374
|
const taskResultsLifecycle = new task_results_life_cycle_1.TaskResultsLifeCycle();
|
373
375
|
const compositedLifeCycle = new life_cycle_1.CompositeLifeCycle([
|
374
376
|
...constructLifeCycles(lifeCycle),
|
@@ -27,6 +27,7 @@ class TaskOrchestrator {
|
|
27
27
|
this.bail = bail;
|
28
28
|
this.daemon = daemon;
|
29
29
|
this.outputStyle = outputStyle;
|
30
|
+
this.taskDetails = (0, hash_task_1.getTaskDetails)();
|
30
31
|
this.cache = (0, cache_1.getCache)(this.nxJson, this.options);
|
31
32
|
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
|
32
33
|
this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
|
@@ -93,7 +94,7 @@ class TaskOrchestrator {
|
|
93
94
|
const task = this.taskGraph.tasks[taskId];
|
94
95
|
const taskSpecificEnv = (0, task_env_1.getTaskSpecificEnv)(task);
|
95
96
|
if (!task.hash) {
|
96
|
-
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv);
|
97
|
+
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv, this.taskDetails);
|
97
98
|
}
|
98
99
|
await this.options.lifeCycle.scheduleTask(task);
|
99
100
|
return taskSpecificEnv;
|
@@ -101,7 +102,7 @@ class TaskOrchestrator {
|
|
101
102
|
async processScheduledBatch(batch) {
|
102
103
|
await Promise.all(Object.values(batch.taskGraph.tasks).map(async (task) => {
|
103
104
|
if (!task.hash) {
|
104
|
-
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv);
|
105
|
+
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv, this.taskDetails);
|
105
106
|
}
|
106
107
|
await this.options.lifeCycle.scheduleTask(task);
|
107
108
|
}));
|
@@ -321,6 +322,9 @@ class TaskOrchestrator {
|
|
321
322
|
};
|
322
323
|
}
|
323
324
|
catch (e) {
|
325
|
+
if (process.env.NX_VERBOSE_LOGGING === 'true') {
|
326
|
+
console.error(e);
|
327
|
+
}
|
324
328
|
return {
|
325
329
|
code: 1,
|
326
330
|
};
|
@@ -19,6 +19,7 @@ export declare class TasksSchedule {
|
|
19
19
|
private completedTasks;
|
20
20
|
private scheduleRequestsExecutionChain;
|
21
21
|
private estimatedTaskTimings;
|
22
|
+
private projectDependencies;
|
22
23
|
constructor(projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions);
|
23
24
|
init(): Promise<void>;
|
24
25
|
scheduleNextTasks(): Promise<void>;
|
@@ -20,12 +20,16 @@ class TasksSchedule {
|
|
20
20
|
this.completedTasks = new Set();
|
21
21
|
this.scheduleRequestsExecutionChain = Promise.resolve();
|
22
22
|
this.estimatedTaskTimings = {};
|
23
|
+
this.projectDependencies = {};
|
23
24
|
}
|
24
25
|
async init() {
|
25
26
|
if (this.taskHistory) {
|
26
27
|
this.estimatedTaskTimings =
|
27
28
|
await this.taskHistory.getEstimatedTaskTimings(Object.values(this.taskGraph.tasks).map((t) => t.target));
|
28
29
|
}
|
30
|
+
for (const project of Object.values(this.taskGraph.tasks).map((t) => t.target.project)) {
|
31
|
+
this.projectDependencies[project] ??= (0, project_graph_utils_1.findAllProjectNodeDependencies)(project, this.reverseProjectGraph).length;
|
32
|
+
}
|
29
33
|
}
|
30
34
|
async scheduleNextTasks() {
|
31
35
|
this.scheduleRequestsExecutionChain =
|
@@ -90,8 +94,8 @@ class TasksSchedule {
|
|
90
94
|
// Most likely tasks with no dependencies such as test
|
91
95
|
const project1 = this.taskGraph.tasks[taskId1].target.project;
|
92
96
|
const project2 = this.taskGraph.tasks[taskId2].target.project;
|
93
|
-
const project1NodeDependencies =
|
94
|
-
const project2NodeDependencies =
|
97
|
+
const project1NodeDependencies = this.projectDependencies[project1];
|
98
|
+
const project2NodeDependencies = this.projectDependencies[project2];
|
95
99
|
const dependenciesDiff = project2NodeDependencies - project1NodeDependencies;
|
96
100
|
if (dependenciesDiff !== 0) {
|
97
101
|
return dependenciesDiff;
|