nx 20.0.0-beta.3 → 20.0.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +12 -14
- package/src/command-line/import/utils/prepare-source-repo.js +8 -3
- package/src/command-line/reset/reset.js +16 -9
- package/src/daemon/server/handle-hash-tasks.js +1 -1
- package/src/executors/run-commands/run-commands.impl.js +15 -22
- package/src/generators/utils/project-configuration.js +2 -1
- package/src/hasher/create-task-hasher.js +1 -1
- package/src/hasher/hash-task.d.ts +4 -2
- package/src/hasher/hash-task.js +6 -9
- package/src/hasher/task-hasher.d.ts +2 -6
- package/src/hasher/task-hasher.js +6 -32
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/tasks-runner/forked-process-task-runner.js +16 -5
- package/src/tasks-runner/run-command.js +3 -1
- package/src/tasks-runner/task-orchestrator.d.ts +1 -0
- package/src/tasks-runner/task-orchestrator.js +6 -2
- package/src/tasks-runner/tasks-schedule.d.ts +1 -0
- package/src/tasks-runner/tasks-schedule.js +6 -2
- package/src/utils/package-json.d.ts +1 -0
- package/src/utils/plugins/output.js +1 -1
- package/src/hasher/node-task-hasher-impl.d.ts +0 -48
- package/src/hasher/node-task-hasher-impl.js +0 -449
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "20.0.0-beta.
|
3
|
+
"version": "20.0.0-beta.5",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -58,7 +58,6 @@
|
|
58
58
|
"open": "^8.4.0",
|
59
59
|
"semver": "^7.5.3",
|
60
60
|
"string-width": "^4.2.3",
|
61
|
-
"strong-log-transformer": "^2.1.0",
|
62
61
|
"tar-stream": "~2.2.0",
|
63
62
|
"tmp": "~0.2.1",
|
64
63
|
"tsconfig-paths": "^4.1.2",
|
@@ -66,8 +65,7 @@
|
|
66
65
|
"yargs": "^17.6.2",
|
67
66
|
"yargs-parser": "21.1.1",
|
68
67
|
"node-machine-id": "1.1.12",
|
69
|
-
"ora": "5.3.0"
|
70
|
-
"@nrwl/tao": "20.0.0-beta.3"
|
68
|
+
"ora": "5.3.0"
|
71
69
|
},
|
72
70
|
"peerDependencies": {
|
73
71
|
"@swc-node/register": "^1.8.0",
|
@@ -82,16 +80,16 @@
|
|
82
80
|
}
|
83
81
|
},
|
84
82
|
"optionalDependencies": {
|
85
|
-
"@nx/nx-darwin-x64": "20.0.0-beta.
|
86
|
-
"@nx/nx-darwin-arm64": "20.0.0-beta.
|
87
|
-
"@nx/nx-linux-x64-gnu": "20.0.0-beta.
|
88
|
-
"@nx/nx-linux-x64-musl": "20.0.0-beta.
|
89
|
-
"@nx/nx-win32-x64-msvc": "20.0.0-beta.
|
90
|
-
"@nx/nx-linux-arm64-gnu": "20.0.0-beta.
|
91
|
-
"@nx/nx-linux-arm64-musl": "20.0.0-beta.
|
92
|
-
"@nx/nx-linux-arm-gnueabihf": "20.0.0-beta.
|
93
|
-
"@nx/nx-win32-arm64-msvc": "20.0.0-beta.
|
94
|
-
"@nx/nx-freebsd-x64": "20.0.0-beta.
|
83
|
+
"@nx/nx-darwin-x64": "20.0.0-beta.5",
|
84
|
+
"@nx/nx-darwin-arm64": "20.0.0-beta.5",
|
85
|
+
"@nx/nx-linux-x64-gnu": "20.0.0-beta.5",
|
86
|
+
"@nx/nx-linux-x64-musl": "20.0.0-beta.5",
|
87
|
+
"@nx/nx-win32-x64-msvc": "20.0.0-beta.5",
|
88
|
+
"@nx/nx-linux-arm64-gnu": "20.0.0-beta.5",
|
89
|
+
"@nx/nx-linux-arm64-musl": "20.0.0-beta.5",
|
90
|
+
"@nx/nx-linux-arm-gnueabihf": "20.0.0-beta.5",
|
91
|
+
"@nx/nx-win32-arm64-msvc": "20.0.0-beta.5",
|
92
|
+
"@nx/nx-freebsd-x64": "20.0.0-beta.5"
|
95
93
|
},
|
96
94
|
"nx-migrations": {
|
97
95
|
"migrations": "./migrations.json",
|
@@ -6,15 +6,20 @@ const path_1 = require("path");
|
|
6
6
|
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl) {
|
7
7
|
const spinner = createSpinner().start(`Fetching ${ref} from ${sourceRemoteUrl}`);
|
8
8
|
const relativeSourceDir = (0, path_1.relative)(gitClient.root, (0, path_1.join)(gitClient.root, source));
|
9
|
+
const message = relativeSourceDir.trim()
|
10
|
+
? `Filtering git history to only include files in ${relativeSourceDir}`
|
11
|
+
: `Filtering git history`;
|
9
12
|
if (await gitClient.hasFilterRepoInstalled()) {
|
10
|
-
spinner.start(
|
13
|
+
spinner.start(message);
|
11
14
|
await gitClient.filterRepo(relativeSourceDir, relativeDestination);
|
12
15
|
}
|
13
16
|
else {
|
14
|
-
spinner.start(
|
17
|
+
spinner.start(`${message} (this might take a few minutes -- install git-filter-repo for faster performance)`);
|
15
18
|
await gitClient.filterBranch(relativeSourceDir, relativeDestination, tempImportBranch);
|
16
19
|
}
|
17
|
-
spinner.succeed(
|
20
|
+
spinner.succeed(relativeSourceDir.trim()
|
21
|
+
? `Filtered git history to only include files in ${relativeSourceDir}`
|
22
|
+
: `Filtered git history`);
|
18
23
|
spinner.succeed(`${sourceRemoteUrl} has been prepared to be imported into this workspace on a temporary branch: ${tempImportBranch} in ${gitClient.root}`);
|
19
24
|
}
|
20
25
|
function wait(ms) {
|
@@ -37,16 +37,16 @@ async function resetHandler(args) {
|
|
37
37
|
try {
|
38
38
|
await killDaemon();
|
39
39
|
}
|
40
|
-
catch {
|
41
|
-
errors.push('Failed to stop the Nx Daemon.');
|
40
|
+
catch (e) {
|
41
|
+
errors.push('Failed to stop the Nx Daemon.', e.toString());
|
42
42
|
}
|
43
43
|
}
|
44
44
|
if (all || args.onlyCache) {
|
45
45
|
try {
|
46
46
|
await cleanupCacheEntries();
|
47
47
|
}
|
48
|
-
catch {
|
49
|
-
errors.push('Failed to clean up the cache directory.');
|
48
|
+
catch (e) {
|
49
|
+
errors.push('Failed to clean up the cache directory.', e.toString());
|
50
50
|
}
|
51
51
|
}
|
52
52
|
if (all || args.onlyWorkspaceData) {
|
@@ -59,12 +59,17 @@ async function resetHandler(args) {
|
|
59
59
|
try {
|
60
60
|
await cleanupWorkspaceData();
|
61
61
|
}
|
62
|
-
catch {
|
63
|
-
errors.push('Failed to clean up the workspace data directory.');
|
62
|
+
catch (e) {
|
63
|
+
errors.push('Failed to clean up the workspace data directory.', e.toString());
|
64
64
|
}
|
65
65
|
}
|
66
66
|
if (all || args.onlyCloud) {
|
67
|
-
|
67
|
+
try {
|
68
|
+
await resetCloudClient();
|
69
|
+
}
|
70
|
+
catch (e) {
|
71
|
+
errors.push('Failed to reset the Nx Cloud client.', e.toString());
|
72
|
+
}
|
68
73
|
}
|
69
74
|
if (errors.length > 0) {
|
70
75
|
output_1.output.error({
|
@@ -79,8 +84,10 @@ async function resetHandler(args) {
|
|
79
84
|
});
|
80
85
|
}
|
81
86
|
}
|
82
|
-
function killDaemon() {
|
83
|
-
|
87
|
+
async function killDaemon() {
|
88
|
+
if (client_1.daemonClient.enabled()) {
|
89
|
+
return client_1.daemonClient.stop();
|
90
|
+
}
|
84
91
|
}
|
85
92
|
async function resetCloudClient() {
|
86
93
|
// Remove nx cloud marker files. This helps if the use happens to run `nx-cloud start-ci-run` or
|
@@ -25,7 +25,7 @@ async function handleHashTasks(payload) {
|
|
25
25
|
const nxJson = (0, configuration_1.readNxJson)();
|
26
26
|
if (projectGraph !== storedProjectGraph) {
|
27
27
|
storedProjectGraph = projectGraph;
|
28
|
-
storedHasher = new task_hasher_1.InProcessTaskHasher(
|
28
|
+
storedHasher = new task_hasher_1.InProcessTaskHasher(projectGraph, nxJson, rustReferences, payload.runnerOptions);
|
29
29
|
}
|
30
30
|
const response = JSON.stringify(await storedHasher.hashTasks(payload.tasks, payload.taskGraph, payload.env));
|
31
31
|
return {
|
@@ -16,22 +16,11 @@ let pseudoTerminal;
|
|
16
16
|
const childProcesses = new Set();
|
17
17
|
function loadEnvVarsFile(path, env = {}) {
|
18
18
|
(0, task_env_1.unloadDotEnvFile)(path, env);
|
19
|
-
const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env
|
19
|
+
const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env);
|
20
20
|
if (result.error) {
|
21
21
|
throw result.error;
|
22
22
|
}
|
23
23
|
}
|
24
|
-
function loadEnvVars(path, env = {}) {
|
25
|
-
if (path) {
|
26
|
-
loadEnvVarsFile(path, env);
|
27
|
-
}
|
28
|
-
else {
|
29
|
-
try {
|
30
|
-
loadEnvVarsFile('.env', env);
|
31
|
-
}
|
32
|
-
catch { }
|
33
|
-
}
|
34
|
-
}
|
35
24
|
const propKeys = [
|
36
25
|
'command',
|
37
26
|
'commands',
|
@@ -292,20 +281,24 @@ function calculateCwd(cwd, context) {
|
|
292
281
|
return cwd;
|
293
282
|
return path.join(context.root, cwd);
|
294
283
|
}
|
295
|
-
|
296
|
-
|
297
|
-
|
284
|
+
/**
|
285
|
+
* Env variables are processed in the following order:
|
286
|
+
* - env option from executor options
|
287
|
+
* - env file from envFile option if provided
|
288
|
+
* - local env variables
|
289
|
+
*/
|
290
|
+
function processEnv(color, cwd, envOptionFromExecutor, envFile) {
|
291
|
+
let localEnv = (0, npm_run_path_1.env)({ cwd: cwd ?? process.cwd() });
|
292
|
+
localEnv = {
|
298
293
|
...process.env,
|
299
294
|
...localEnv,
|
300
295
|
};
|
301
|
-
|
302
|
-
|
303
|
-
loadEnvVars(envFile, res);
|
296
|
+
if (process.env.NX_LOAD_DOT_ENV_FILES !== 'false' && envFile) {
|
297
|
+
loadEnvVarsFile(envFile, localEnv);
|
304
298
|
}
|
305
|
-
|
306
|
-
|
307
|
-
...
|
308
|
-
...env,
|
299
|
+
let res = {
|
300
|
+
...localEnv,
|
301
|
+
...envOptionFromExecutor,
|
309
302
|
};
|
310
303
|
// need to override PATH to make sure we are using the local node_modules
|
311
304
|
if (localEnv.PATH)
|
@@ -69,7 +69,8 @@ function updateProjectConfiguration(tree, projectName, projectConfiguration) {
|
|
69
69
|
function updateProjectConfigurationInPackageJson(tree, projectName, projectConfiguration) {
|
70
70
|
const packageJsonFile = (0, path_2.joinPathFragments)(projectConfiguration.root, 'package.json');
|
71
71
|
const packageJson = (0, json_1.readJson)(tree, packageJsonFile);
|
72
|
-
|
72
|
+
projectConfiguration.name = projectName;
|
73
|
+
if (packageJson.name === projectConfiguration.name) {
|
73
74
|
delete projectConfiguration.name;
|
74
75
|
}
|
75
76
|
if (projectConfiguration.targets &&
|
@@ -10,6 +10,6 @@ function createTaskHasher(projectGraph, nxJson, runnerOptions) {
|
|
10
10
|
}
|
11
11
|
else {
|
12
12
|
const { fileMap, allWorkspaceFiles, rustReferences } = (0, build_project_graph_1.getFileMap)();
|
13
|
-
return new task_hasher_1.InProcessTaskHasher(
|
13
|
+
return new task_hasher_1.InProcessTaskHasher(projectGraph, nxJson, rustReferences, runnerOptions);
|
14
14
|
}
|
15
15
|
}
|
@@ -2,5 +2,7 @@ import { Task, TaskGraph } from '../config/task-graph';
|
|
2
2
|
import { TaskHasher } from './task-hasher';
|
3
3
|
import { ProjectGraph } from '../config/project-graph';
|
4
4
|
import { NxJsonConfiguration } from '../config/nx-json';
|
5
|
-
|
6
|
-
export declare function
|
5
|
+
import { TaskDetails } from '../native';
|
6
|
+
export declare function getTaskDetails(): TaskDetails | null;
|
7
|
+
export declare function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher: TaskHasher, projectGraph: ProjectGraph, taskGraph: TaskGraph, nxJson: NxJsonConfiguration, tasksDetails: TaskDetails | null): Promise<void>;
|
8
|
+
export declare function hashTask(hasher: TaskHasher, projectGraph: ProjectGraph, taskGraph: TaskGraph, task: Task, env: NodeJS.ProcessEnv, taskDetails: TaskDetails | null): Promise<void>;
|
package/src/hasher/hash-task.js
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getTaskDetails = getTaskDetails;
|
3
4
|
exports.hashTasksThatDoNotDependOnOutputsOfOtherTasks = hashTasksThatDoNotDependOnOutputsOfOtherTasks;
|
4
5
|
exports.hashTask = hashTask;
|
5
6
|
const utils_1 = require("../tasks-runner/utils");
|
@@ -19,9 +20,8 @@ function getTaskDetails() {
|
|
19
20
|
}
|
20
21
|
return taskDetails;
|
21
22
|
}
|
22
|
-
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson) {
|
23
|
+
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson, tasksDetails) {
|
23
24
|
performance.mark('hashMultipleTasks:start');
|
24
|
-
const taskDetails = getTaskDetails();
|
25
25
|
const tasks = Object.values(taskGraph.tasks);
|
26
26
|
const tasksWithHashers = await Promise.all(tasks.map(async (task) => {
|
27
27
|
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);
|
@@ -42,9 +42,8 @@ async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGrap
|
|
42
42
|
tasksToHash[i].hash = hashes[i].value;
|
43
43
|
tasksToHash[i].hashDetails = hashes[i].details;
|
44
44
|
}
|
45
|
-
|
46
|
-
|
47
|
-
taskDetails.recordTaskDetails(tasksToHash.map((task) => ({
|
45
|
+
if (tasksDetails?.recordTaskDetails) {
|
46
|
+
tasksDetails.recordTaskDetails(tasksToHash.map((task) => ({
|
48
47
|
hash: task.hash,
|
49
48
|
project: task.target.project,
|
50
49
|
target: task.target.target,
|
@@ -54,9 +53,8 @@ async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGrap
|
|
54
53
|
performance.mark('hashMultipleTasks:end');
|
55
54
|
performance.measure('hashMultipleTasks', 'hashMultipleTasks:start', 'hashMultipleTasks:end');
|
56
55
|
}
|
57
|
-
async function hashTask(hasher, projectGraph, taskGraph, task, env) {
|
56
|
+
async function hashTask(hasher, projectGraph, taskGraph, task, env, taskDetails) {
|
58
57
|
performance.mark('hashSingleTask:start');
|
59
|
-
const taskDetails = getTaskDetails();
|
60
58
|
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);
|
61
59
|
const projectsConfigurations = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
62
60
|
const { value, details } = await (customHasher
|
@@ -72,8 +70,7 @@ async function hashTask(hasher, projectGraph, taskGraph, task, env) {
|
|
72
70
|
: hasher.hashTask(task, taskGraph, env));
|
73
71
|
task.hash = value;
|
74
72
|
task.hashDetails = details;
|
75
|
-
|
76
|
-
if (taskDetails) {
|
73
|
+
if (taskDetails?.recordTaskDetails) {
|
77
74
|
taskDetails.recordTaskDetails([
|
78
75
|
{
|
79
76
|
hash: task.hash,
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { FileData,
|
1
|
+
import { FileData, ProjectGraph, ProjectGraphProjectNode } from '../config/project-graph';
|
2
2
|
import { NxJsonConfiguration } from '../config/nx-json';
|
3
3
|
import { Task, TaskGraph } from '../config/task-graph';
|
4
4
|
import { DaemonClient } from '../daemon/client/client';
|
@@ -66,16 +66,12 @@ export declare class DaemonBasedTaskHasher implements TaskHasher {
|
|
66
66
|
hashTask(task: Task, taskGraph?: TaskGraph, env?: NodeJS.ProcessEnv): Promise<Hash>;
|
67
67
|
}
|
68
68
|
export declare class InProcessTaskHasher implements TaskHasher {
|
69
|
-
private readonly projectFileMap;
|
70
|
-
private readonly allWorkspaceFiles;
|
71
69
|
private readonly projectGraph;
|
72
70
|
private readonly nxJson;
|
73
71
|
private readonly externalRustReferences;
|
74
72
|
private readonly options;
|
75
|
-
static version: string;
|
76
73
|
private taskHasher;
|
77
|
-
|
78
|
-
constructor(projectFileMap: ProjectFileMap, allWorkspaceFiles: FileData[], projectGraph: ProjectGraph, nxJson: NxJsonConfiguration, externalRustReferences: NxWorkspaceFilesExternals | null, options: any);
|
74
|
+
constructor(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration, externalRustReferences: NxWorkspaceFilesExternals | null, options: any);
|
79
75
|
hashTasks(tasks: Task[], taskGraph?: TaskGraph, env?: NodeJS.ProcessEnv): Promise<Hash[]>;
|
80
76
|
hashTask(task: Task, taskGraph?: TaskGraph, env?: NodeJS.ProcessEnv): Promise<Hash>;
|
81
77
|
private createHashDetails;
|
@@ -11,7 +11,6 @@ exports.expandSingleProjectInputs = expandSingleProjectInputs;
|
|
11
11
|
exports.expandNamedInput = expandNamedInput;
|
12
12
|
exports.filterUsingGlobPatterns = filterUsingGlobPatterns;
|
13
13
|
const file_hasher_1 = require("./file-hasher");
|
14
|
-
const node_task_hasher_impl_1 = require("./node-task-hasher-impl");
|
15
14
|
const minimatch_1 = require("minimatch");
|
16
15
|
const native_task_hasher_impl_1 = require("./native-task-hasher-impl");
|
17
16
|
const workspace_root_1 = require("../utils/workspace-root");
|
@@ -29,42 +28,18 @@ class DaemonBasedTaskHasher {
|
|
29
28
|
}
|
30
29
|
exports.DaemonBasedTaskHasher = DaemonBasedTaskHasher;
|
31
30
|
class InProcessTaskHasher {
|
32
|
-
constructor(
|
33
|
-
this.projectFileMap = projectFileMap;
|
34
|
-
this.allWorkspaceFiles = allWorkspaceFiles;
|
31
|
+
constructor(projectGraph, nxJson, externalRustReferences, options) {
|
35
32
|
this.projectGraph = projectGraph;
|
36
33
|
this.nxJson = nxJson;
|
37
34
|
this.externalRustReferences = externalRustReferences;
|
38
35
|
this.options = options;
|
39
|
-
this.
|
40
|
-
|
41
|
-
|
42
|
-
: []).map((r) => ({ runtime: r }));
|
43
|
-
if (process.env.NX_CLOUD_ENCRYPTION_KEY) {
|
44
|
-
legacyRuntimeInputs.push({ env: 'NX_CLOUD_ENCRYPTION_KEY' });
|
45
|
-
}
|
46
|
-
const legacyFilesetInputs = [
|
47
|
-
'nx.json',
|
48
|
-
// ignore files will change the set of inputs to the hasher
|
49
|
-
'.gitignore',
|
50
|
-
'.nxignore',
|
51
|
-
].map((d) => ({ fileset: `{workspaceRoot}/${d}` }));
|
52
|
-
this.taskHasher = !this.useNativeTaskHasher
|
53
|
-
? new node_task_hasher_impl_1.NodeTaskHasherImpl(nxJson, legacyRuntimeInputs, legacyFilesetInputs, this.projectFileMap, this.allWorkspaceFiles, this.projectGraph, {
|
54
|
-
selectivelyHashTsConfig: this.options?.selectivelyHashTsConfig ?? false,
|
55
|
-
})
|
56
|
-
: new native_task_hasher_impl_1.NativeTaskHasherImpl(workspace_root_1.workspaceRoot, nxJson, this.projectGraph, this.externalRustReferences, {
|
57
|
-
selectivelyHashTsConfig: this.options?.selectivelyHashTsConfig ?? false,
|
58
|
-
});
|
36
|
+
this.taskHasher = new native_task_hasher_impl_1.NativeTaskHasherImpl(workspace_root_1.workspaceRoot, this.nxJson, this.projectGraph, this.externalRustReferences, {
|
37
|
+
selectivelyHashTsConfig: this.options?.selectivelyHashTsConfig ?? false,
|
38
|
+
});
|
59
39
|
}
|
60
40
|
async hashTasks(tasks, taskGraph, env) {
|
61
|
-
|
62
|
-
|
63
|
-
return tasks.map((task, index) => this.createHashDetails(task, hashes[index]));
|
64
|
-
}
|
65
|
-
else {
|
66
|
-
return await Promise.all(tasks.map((t) => this.hashTask(t, taskGraph, env)));
|
67
|
-
}
|
41
|
+
const hashes = await this.taskHasher.hashTasks(tasks, taskGraph, env ?? process.env);
|
42
|
+
return tasks.map((task, index) => this.createHashDetails(task, hashes[index]));
|
68
43
|
}
|
69
44
|
async hashTask(task, taskGraph, env) {
|
70
45
|
const res = await this.taskHasher.hashTask(task, taskGraph, env ?? process.env);
|
@@ -98,7 +73,6 @@ class InProcessTaskHasher {
|
|
98
73
|
}
|
99
74
|
}
|
100
75
|
exports.InProcessTaskHasher = InProcessTaskHasher;
|
101
|
-
InProcessTaskHasher.version = '3.0';
|
102
76
|
const DEFAULT_INPUTS = [
|
103
77
|
{
|
104
78
|
fileset: '{projectRoot}/**/*',
|
Binary file
|
@@ -4,7 +4,6 @@ exports.ForkedProcessTaskRunner = void 0;
|
|
4
4
|
const fs_1 = require("fs");
|
5
5
|
const child_process_1 = require("child_process");
|
6
6
|
const chalk = require("chalk");
|
7
|
-
const logTransformer = require("strong-log-transformer");
|
8
7
|
const output_1 = require("../utils/output");
|
9
8
|
const utils_1 = require("./utils");
|
10
9
|
const path_1 = require("path");
|
@@ -208,16 +207,16 @@ class ForkedProcessTaskRunner {
|
|
208
207
|
const prefixText = `${task.target.project}:`;
|
209
208
|
p.stdout
|
210
209
|
.pipe(logClearLineToPrefixTransformer(color.bold(prefixText) + ' '))
|
211
|
-
.pipe(
|
210
|
+
.pipe(addPrefixTransformer(color.bold(prefixText)))
|
212
211
|
.pipe(process.stdout);
|
213
212
|
p.stderr
|
214
213
|
.pipe(logClearLineToPrefixTransformer(color(prefixText) + ' '))
|
215
|
-
.pipe(
|
214
|
+
.pipe(addPrefixTransformer(color(prefixText)))
|
216
215
|
.pipe(process.stderr);
|
217
216
|
}
|
218
217
|
else {
|
219
|
-
p.stdout.pipe(
|
220
|
-
p.stderr.pipe(
|
218
|
+
p.stdout.pipe(addPrefixTransformer()).pipe(process.stdout);
|
219
|
+
p.stderr.pipe(addPrefixTransformer()).pipe(process.stderr);
|
221
220
|
}
|
222
221
|
}
|
223
222
|
let outWithErr = [];
|
@@ -403,3 +402,15 @@ function logClearLineToPrefixTransformer(prefix) {
|
|
403
402
|
},
|
404
403
|
});
|
405
404
|
}
|
405
|
+
function addPrefixTransformer(prefix) {
|
406
|
+
const newLineSeparator = process.platform.startsWith('win') ? '\r\n' : '\n';
|
407
|
+
return new stream_1.Transform({
|
408
|
+
transform(chunk, _encoding, callback) {
|
409
|
+
const list = chunk.toString().split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g);
|
410
|
+
list
|
411
|
+
.filter(Boolean)
|
412
|
+
.forEach((m) => this.push(prefix ? prefix + ' ' + m + newLineSeparator : m + newLineSeparator));
|
413
|
+
callback();
|
414
|
+
},
|
415
|
+
});
|
416
|
+
}
|
@@ -363,12 +363,14 @@ function setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles) {
|
|
363
363
|
}
|
364
364
|
async function invokeTasksRunner({ tasks, projectGraph, taskGraph, lifeCycle, nxJson, nxArgs, loadDotEnvFiles, initiatingProject, }) {
|
365
365
|
setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles);
|
366
|
+
// this needs to be done before we start to run the tasks
|
367
|
+
const taskDetails = (0, hash_task_1.getTaskDetails)();
|
366
368
|
const { tasksRunner, runnerOptions } = getRunner(nxArgs, nxJson);
|
367
369
|
let hasher = (0, create_task_hasher_1.createTaskHasher)(projectGraph, nxJson, runnerOptions);
|
368
370
|
// this is used for two reasons: to fetch all remote cache hits AND
|
369
371
|
// to submit everything that is known in advance to Nx Cloud to run in
|
370
372
|
// a distributed fashion
|
371
|
-
await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson);
|
373
|
+
await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson, taskDetails);
|
372
374
|
const taskResultsLifecycle = new task_results_life_cycle_1.TaskResultsLifeCycle();
|
373
375
|
const compositedLifeCycle = new life_cycle_1.CompositeLifeCycle([
|
374
376
|
...constructLifeCycles(lifeCycle),
|
@@ -27,6 +27,7 @@ class TaskOrchestrator {
|
|
27
27
|
this.bail = bail;
|
28
28
|
this.daemon = daemon;
|
29
29
|
this.outputStyle = outputStyle;
|
30
|
+
this.taskDetails = (0, hash_task_1.getTaskDetails)();
|
30
31
|
this.cache = (0, cache_1.getCache)(this.nxJson, this.options);
|
31
32
|
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
|
32
33
|
this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
|
@@ -93,7 +94,7 @@ class TaskOrchestrator {
|
|
93
94
|
const task = this.taskGraph.tasks[taskId];
|
94
95
|
const taskSpecificEnv = (0, task_env_1.getTaskSpecificEnv)(task);
|
95
96
|
if (!task.hash) {
|
96
|
-
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv);
|
97
|
+
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv, this.taskDetails);
|
97
98
|
}
|
98
99
|
await this.options.lifeCycle.scheduleTask(task);
|
99
100
|
return taskSpecificEnv;
|
@@ -101,7 +102,7 @@ class TaskOrchestrator {
|
|
101
102
|
async processScheduledBatch(batch) {
|
102
103
|
await Promise.all(Object.values(batch.taskGraph.tasks).map(async (task) => {
|
103
104
|
if (!task.hash) {
|
104
|
-
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv);
|
105
|
+
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv, this.taskDetails);
|
105
106
|
}
|
106
107
|
await this.options.lifeCycle.scheduleTask(task);
|
107
108
|
}));
|
@@ -321,6 +322,9 @@ class TaskOrchestrator {
|
|
321
322
|
};
|
322
323
|
}
|
323
324
|
catch (e) {
|
325
|
+
if (process.env.NX_VERBOSE_LOGGING === 'true') {
|
326
|
+
console.error(e);
|
327
|
+
}
|
324
328
|
return {
|
325
329
|
code: 1,
|
326
330
|
};
|
@@ -19,6 +19,7 @@ export declare class TasksSchedule {
|
|
19
19
|
private completedTasks;
|
20
20
|
private scheduleRequestsExecutionChain;
|
21
21
|
private estimatedTaskTimings;
|
22
|
+
private projectDependencies;
|
22
23
|
constructor(projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions);
|
23
24
|
init(): Promise<void>;
|
24
25
|
scheduleNextTasks(): Promise<void>;
|
@@ -20,12 +20,16 @@ class TasksSchedule {
|
|
20
20
|
this.completedTasks = new Set();
|
21
21
|
this.scheduleRequestsExecutionChain = Promise.resolve();
|
22
22
|
this.estimatedTaskTimings = {};
|
23
|
+
this.projectDependencies = {};
|
23
24
|
}
|
24
25
|
async init() {
|
25
26
|
if (this.taskHistory) {
|
26
27
|
this.estimatedTaskTimings =
|
27
28
|
await this.taskHistory.getEstimatedTaskTimings(Object.values(this.taskGraph.tasks).map((t) => t.target));
|
28
29
|
}
|
30
|
+
for (const project of Object.values(this.taskGraph.tasks).map((t) => t.target.project)) {
|
31
|
+
this.projectDependencies[project] ??= (0, project_graph_utils_1.findAllProjectNodeDependencies)(project, this.reverseProjectGraph).length;
|
32
|
+
}
|
29
33
|
}
|
30
34
|
async scheduleNextTasks() {
|
31
35
|
this.scheduleRequestsExecutionChain =
|
@@ -90,8 +94,8 @@ class TasksSchedule {
|
|
90
94
|
// Most likely tasks with no dependencies such as test
|
91
95
|
const project1 = this.taskGraph.tasks[taskId1].target.project;
|
92
96
|
const project2 = this.taskGraph.tasks[taskId2].target.project;
|
93
|
-
const project1NodeDependencies =
|
94
|
-
const project2NodeDependencies =
|
97
|
+
const project1NodeDependencies = this.projectDependencies[project1];
|
98
|
+
const project2NodeDependencies = this.projectDependencies[project2];
|
95
99
|
const dependenciesDiff = project2NodeDependencies - project1NodeDependencies;
|
96
100
|
if (dependenciesDiff !== 0) {
|
97
101
|
return dependenciesDiff;
|
@@ -47,7 +47,7 @@ function listAlsoAvailableCorePlugins(installedPlugins) {
|
|
47
47
|
}
|
48
48
|
}
|
49
49
|
function listPowerpackPlugins() {
|
50
|
-
const powerpackLink = 'https://nx.dev/plugin-registry';
|
50
|
+
const powerpackLink = 'https://nx.dev/plugin-registry#powerpack';
|
51
51
|
output_1.output.log({
|
52
52
|
title: `Available Powerpack Plugins: ${powerpackLink}`,
|
53
53
|
});
|
@@ -1,48 +0,0 @@
|
|
1
|
-
import { NxJsonConfiguration } from '../config/nx-json';
|
2
|
-
import { FileData, ProjectFileMap, ProjectGraph } from '../config/project-graph';
|
3
|
-
import { Task, TaskGraph } from '../config/task-graph';
|
4
|
-
import { PartialHash, TaskHasherImpl } from './task-hasher';
|
5
|
-
export declare class NodeTaskHasherImpl implements TaskHasherImpl {
|
6
|
-
private readonly nxJson;
|
7
|
-
private readonly legacyRuntimeInputs;
|
8
|
-
private readonly legacyFilesetInputs;
|
9
|
-
private readonly projectFileMap;
|
10
|
-
private readonly allWorkspaceFiles;
|
11
|
-
private readonly projectGraph;
|
12
|
-
private readonly options;
|
13
|
-
private filesetHashes;
|
14
|
-
private runtimeHashes;
|
15
|
-
private externalDependencyHashes;
|
16
|
-
private allExternalDependenciesHash;
|
17
|
-
private projectRootMappings;
|
18
|
-
constructor(nxJson: NxJsonConfiguration, legacyRuntimeInputs: {
|
19
|
-
runtime: string;
|
20
|
-
}[], legacyFilesetInputs: {
|
21
|
-
fileset: string;
|
22
|
-
}[], projectFileMap: ProjectFileMap, allWorkspaceFiles: FileData[], projectGraph: ProjectGraph, options: {
|
23
|
-
selectivelyHashTsConfig: boolean;
|
24
|
-
});
|
25
|
-
hashTasks(tasks: Task[], taskGraph: TaskGraph, env: NodeJS.ProcessEnv): Promise<PartialHash[]>;
|
26
|
-
hashTask(task: Task, taskGraph: TaskGraph, env: NodeJS.ProcessEnv, visited?: string[]): Promise<PartialHash>;
|
27
|
-
private hashNamedInputForDependencies;
|
28
|
-
private hashSelfAndDepsInputs;
|
29
|
-
private combinePartialHashes;
|
30
|
-
private hashDepsInputs;
|
31
|
-
private hashDepsOutputs;
|
32
|
-
private hashDepOuputs;
|
33
|
-
private hashFiles;
|
34
|
-
private getExternalDependencyHash;
|
35
|
-
private hashSingleExternalDependency;
|
36
|
-
private hashExternalDependency;
|
37
|
-
private hashTarget;
|
38
|
-
private findExternalDependencyNodeName;
|
39
|
-
private hashSingleProjectInputs;
|
40
|
-
private hashProjectInputs;
|
41
|
-
private hashRootFilesets;
|
42
|
-
private hashProjectConfig;
|
43
|
-
private hashTsConfig;
|
44
|
-
private hashProjectFileset;
|
45
|
-
private hashRuntime;
|
46
|
-
private hashEnv;
|
47
|
-
private calculateExternalDependencyHashes;
|
48
|
-
}
|
@@ -1,449 +0,0 @@
|
|
1
|
-
"use strict";
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.NodeTaskHasherImpl = void 0;
|
4
|
-
const find_project_for_path_1 = require("../project-graph/utils/find-project-for-path");
|
5
|
-
const file_hasher_1 = require("./file-hasher");
|
6
|
-
const utils_1 = require("../tasks-runner/utils");
|
7
|
-
const workspace_root_1 = require("../utils/workspace-root");
|
8
|
-
const minimatch_1 = require("minimatch");
|
9
|
-
const path_1 = require("path");
|
10
|
-
const native_1 = require("../native");
|
11
|
-
const project_graph_utils_1 = require("../utils/project-graph-utils");
|
12
|
-
const find_matching_projects_1 = require("../utils/find-matching-projects");
|
13
|
-
const child_process_1 = require("child_process");
|
14
|
-
const task_hasher_1 = require("./task-hasher");
|
15
|
-
const hasher_1 = require("../plugins/js/hasher/hasher");
|
16
|
-
class NodeTaskHasherImpl {
|
17
|
-
constructor(nxJson, legacyRuntimeInputs, legacyFilesetInputs, projectFileMap, allWorkspaceFiles, projectGraph, options) {
|
18
|
-
this.nxJson = nxJson;
|
19
|
-
this.legacyRuntimeInputs = legacyRuntimeInputs;
|
20
|
-
this.legacyFilesetInputs = legacyFilesetInputs;
|
21
|
-
this.projectFileMap = projectFileMap;
|
22
|
-
this.allWorkspaceFiles = allWorkspaceFiles;
|
23
|
-
this.projectGraph = projectGraph;
|
24
|
-
this.options = options;
|
25
|
-
this.filesetHashes = {};
|
26
|
-
this.runtimeHashes = {};
|
27
|
-
this.externalDependencyHashes = new Map();
|
28
|
-
this.projectRootMappings = (0, find_project_for_path_1.createProjectRootMappings)(this.projectGraph.nodes);
|
29
|
-
// External Dependencies are all calculated up front in a deterministic order
|
30
|
-
this.calculateExternalDependencyHashes();
|
31
|
-
}
|
32
|
-
hashTasks(tasks, taskGraph, env) {
|
33
|
-
return Promise.all(tasks.map((t) => this.hashTask(t, taskGraph, env, [])));
|
34
|
-
}
|
35
|
-
async hashTask(task, taskGraph, env, visited = []) {
|
36
|
-
return Promise.resolve().then(async () => {
|
37
|
-
const { selfInputs, depsInputs, depsOutputs, projectInputs } = (0, task_hasher_1.getInputs)(task, this.projectGraph, this.nxJson);
|
38
|
-
const selfAndInputs = await this.hashSelfAndDepsInputs(task.target.project, task, selfInputs, depsInputs, depsOutputs, projectInputs, taskGraph, env, visited);
|
39
|
-
const target = this.hashTarget(task.target.project, task.target.target, selfInputs);
|
40
|
-
if (target) {
|
41
|
-
return this.combinePartialHashes([selfAndInputs, target]);
|
42
|
-
}
|
43
|
-
return selfAndInputs;
|
44
|
-
});
|
45
|
-
}
|
46
|
-
async hashNamedInputForDependencies(projectName, task, namedInput, taskGraph, env, visited) {
|
47
|
-
const projectNode = this.projectGraph.nodes[projectName];
|
48
|
-
const namedInputs = {
|
49
|
-
default: [{ fileset: '{projectRoot}/**/*' }],
|
50
|
-
...this.nxJson.namedInputs,
|
51
|
-
...projectNode.data.namedInputs,
|
52
|
-
};
|
53
|
-
const expandedInputs = (0, task_hasher_1.expandNamedInput)(namedInput, namedInputs);
|
54
|
-
const selfInputs = expandedInputs.filter(task_hasher_1.isSelfInput);
|
55
|
-
const depsOutputs = expandedInputs.filter(task_hasher_1.isDepsOutput);
|
56
|
-
const depsInputs = [{ input: namedInput, dependencies: true }]; // true is boolean by default
|
57
|
-
return this.hashSelfAndDepsInputs(projectName, task, selfInputs, depsInputs, depsOutputs, [], taskGraph, env, visited);
|
58
|
-
}
|
59
|
-
async hashSelfAndDepsInputs(projectName, task, selfInputs, depsInputs, depsOutputs, projectInputs, taskGraph, env, visited) {
|
60
|
-
const projectGraphDeps = this.projectGraph.dependencies[projectName] ?? [];
|
61
|
-
// we don't want random order of dependencies to change the hash
|
62
|
-
projectGraphDeps.sort((a, b) => a.target.localeCompare(b.target));
|
63
|
-
const self = await this.hashSingleProjectInputs(projectName, selfInputs, env);
|
64
|
-
const deps = await this.hashDepsInputs(task, depsInputs, projectGraphDeps, taskGraph, env, visited);
|
65
|
-
const depsOut = await this.hashDepsOutputs(task, depsOutputs, taskGraph);
|
66
|
-
const projects = await this.hashProjectInputs(projectInputs, env);
|
67
|
-
return this.combinePartialHashes([
|
68
|
-
...self,
|
69
|
-
...deps,
|
70
|
-
...projects,
|
71
|
-
...depsOut,
|
72
|
-
]);
|
73
|
-
}
|
74
|
-
combinePartialHashes(partialHashes) {
|
75
|
-
if (partialHashes.length === 1) {
|
76
|
-
return partialHashes[0];
|
77
|
-
}
|
78
|
-
const details = {};
|
79
|
-
const hashValues = [];
|
80
|
-
for (const partial of partialHashes) {
|
81
|
-
hashValues.push(partial.value);
|
82
|
-
Object.assign(details, partial.details);
|
83
|
-
}
|
84
|
-
const value = (0, file_hasher_1.hashArray)(hashValues);
|
85
|
-
return { value, details };
|
86
|
-
}
|
87
|
-
async hashDepsInputs(task, inputs, projectGraphDeps, taskGraph, env, visited) {
|
88
|
-
return (await Promise.all(inputs.map(async (input) => {
|
89
|
-
return await Promise.all(projectGraphDeps.map(async (d) => {
|
90
|
-
if (visited.indexOf(d.target) > -1) {
|
91
|
-
return null;
|
92
|
-
}
|
93
|
-
else {
|
94
|
-
visited.push(d.target);
|
95
|
-
if (this.projectGraph.nodes[d.target]) {
|
96
|
-
return await this.hashNamedInputForDependencies(d.target, task, input.input || 'default', taskGraph, env, visited);
|
97
|
-
}
|
98
|
-
else {
|
99
|
-
return this.getExternalDependencyHash(d.target);
|
100
|
-
}
|
101
|
-
}
|
102
|
-
}));
|
103
|
-
})))
|
104
|
-
.flat()
|
105
|
-
.filter((r) => !!r);
|
106
|
-
}
|
107
|
-
async hashDepsOutputs(task, depsOutputs, taskGraph) {
|
108
|
-
if (depsOutputs.length === 0) {
|
109
|
-
return [];
|
110
|
-
}
|
111
|
-
const result = [];
|
112
|
-
for (const { dependentTasksOutputFiles, transitive } of depsOutputs) {
|
113
|
-
result.push(...(await this.hashDepOuputs(task, dependentTasksOutputFiles, taskGraph, transitive)));
|
114
|
-
}
|
115
|
-
return result;
|
116
|
-
}
|
117
|
-
async hashDepOuputs(task, dependentTasksOutputFiles, taskGraph, transitive) {
|
118
|
-
// task has no dependencies
|
119
|
-
if (!taskGraph.dependencies[task.id]) {
|
120
|
-
return [];
|
121
|
-
}
|
122
|
-
const partialHashes = [];
|
123
|
-
for (const d of taskGraph.dependencies[task.id]) {
|
124
|
-
const childTask = taskGraph.tasks[d];
|
125
|
-
const outputs = (0, utils_1.getOutputsForTargetAndConfiguration)(childTask.target, childTask.overrides, this.projectGraph.nodes[childTask.target.project]);
|
126
|
-
const { getFilesForOutputs } = require('../native');
|
127
|
-
const outputFiles = getFilesForOutputs(workspace_root_1.workspaceRoot, outputs);
|
128
|
-
const filteredFiles = outputFiles.filter((p) => p === dependentTasksOutputFiles ||
|
129
|
-
(0, minimatch_1.minimatch)(p, dependentTasksOutputFiles, { dot: true }));
|
130
|
-
const hashDetails = {};
|
131
|
-
const hashes = [];
|
132
|
-
for (const [file, hash] of this.hashFiles(filteredFiles.map((p) => (0, path_1.join)(workspace_root_1.workspaceRoot, p)))) {
|
133
|
-
hashes.push(hash);
|
134
|
-
}
|
135
|
-
let hash = (0, file_hasher_1.hashArray)(hashes);
|
136
|
-
partialHashes.push({
|
137
|
-
value: hash,
|
138
|
-
details: {
|
139
|
-
[`${dependentTasksOutputFiles}:${outputs.join(',')}`]: hash,
|
140
|
-
},
|
141
|
-
});
|
142
|
-
if (transitive) {
|
143
|
-
partialHashes.push(...(await this.hashDepOuputs(childTask, dependentTasksOutputFiles, taskGraph, transitive)));
|
144
|
-
}
|
145
|
-
}
|
146
|
-
return partialHashes;
|
147
|
-
}
|
148
|
-
hashFiles(files) {
|
149
|
-
const r = new Map();
|
150
|
-
for (let f of files) {
|
151
|
-
r.set(f, (0, native_1.hashFile)(f));
|
152
|
-
}
|
153
|
-
return r;
|
154
|
-
}
|
155
|
-
getExternalDependencyHash(externalNodeName) {
|
156
|
-
const combinedHash = this.combinePartialHashes(this.externalDependencyHashes.get(externalNodeName));
|
157
|
-
// Set the combined hash into the hashes so it's not recalculated next time
|
158
|
-
this.externalDependencyHashes.set(externalNodeName, [combinedHash]);
|
159
|
-
return combinedHash;
|
160
|
-
}
|
161
|
-
hashSingleExternalDependency(externalNodeName) {
|
162
|
-
const node = this.projectGraph.externalNodes[externalNodeName];
|
163
|
-
if (node.data.hash) {
|
164
|
-
// we already know the hash of this dependency
|
165
|
-
return {
|
166
|
-
value: node.data.hash,
|
167
|
-
details: {
|
168
|
-
[externalNodeName]: node.data.hash,
|
169
|
-
},
|
170
|
-
};
|
171
|
-
}
|
172
|
-
else {
|
173
|
-
// we take version as a hash
|
174
|
-
return {
|
175
|
-
value: node.data.version,
|
176
|
-
details: {
|
177
|
-
[externalNodeName]: node.data.version,
|
178
|
-
},
|
179
|
-
};
|
180
|
-
}
|
181
|
-
}
|
182
|
-
hashExternalDependency(externalNodeName) {
|
183
|
-
const partialHashes = new Set();
|
184
|
-
partialHashes.add(this.hashSingleExternalDependency(externalNodeName));
|
185
|
-
const deps = (0, project_graph_utils_1.findAllProjectNodeDependencies)(externalNodeName, this.projectGraph, true);
|
186
|
-
for (const dep of deps) {
|
187
|
-
partialHashes.add(this.hashSingleExternalDependency(dep));
|
188
|
-
}
|
189
|
-
return Array.from(partialHashes);
|
190
|
-
}
|
191
|
-
hashTarget(projectName, targetName, selfInputs) {
|
192
|
-
const projectNode = this.projectGraph.nodes[projectName];
|
193
|
-
const target = projectNode.data.targets[targetName];
|
194
|
-
if (!target) {
|
195
|
-
return;
|
196
|
-
}
|
197
|
-
let hash;
|
198
|
-
// we can only vouch for @nx packages's executor dependencies
|
199
|
-
// if it's "run commands" or third-party we skip traversing since we have no info what this command depends on
|
200
|
-
if (target.executor.startsWith(`@nrwl/`) ||
|
201
|
-
target.executor.startsWith(`@nx/`)) {
|
202
|
-
const executorPackage = target.executor.split(':')[0];
|
203
|
-
const executorNodeName = this.findExternalDependencyNodeName(executorPackage);
|
204
|
-
// This is either a local plugin or a non-existent executor
|
205
|
-
if (!executorNodeName) {
|
206
|
-
// TODO: This should not return null if it is a local plugin's executor
|
207
|
-
return null;
|
208
|
-
}
|
209
|
-
return this.getExternalDependencyHash(executorNodeName);
|
210
|
-
}
|
211
|
-
else {
|
212
|
-
// use command external dependencies if available to construct the hash
|
213
|
-
const partialHashes = [];
|
214
|
-
let hasCommandExternalDependencies = false;
|
215
|
-
for (const input of selfInputs) {
|
216
|
-
if (input['externalDependencies']) {
|
217
|
-
// if we have externalDependencies with empty array we still want to override the default hash
|
218
|
-
hasCommandExternalDependencies = true;
|
219
|
-
const externalDependencies = input['externalDependencies'];
|
220
|
-
for (let dep of externalDependencies) {
|
221
|
-
dep = this.findExternalDependencyNodeName(dep);
|
222
|
-
if (!dep) {
|
223
|
-
throw new Error(`The externalDependency "${dep}" for "${projectName}:${targetName}" could not be found`);
|
224
|
-
}
|
225
|
-
partialHashes.push(this.getExternalDependencyHash(dep));
|
226
|
-
}
|
227
|
-
}
|
228
|
-
}
|
229
|
-
if (hasCommandExternalDependencies) {
|
230
|
-
return this.combinePartialHashes(partialHashes);
|
231
|
-
}
|
232
|
-
else {
|
233
|
-
// cache the hash of the entire external dependencies tree
|
234
|
-
if (this.allExternalDependenciesHash) {
|
235
|
-
return this.allExternalDependenciesHash;
|
236
|
-
}
|
237
|
-
else {
|
238
|
-
hash = (0, file_hasher_1.hashObject)(this.projectGraph.externalNodes);
|
239
|
-
this.allExternalDependenciesHash = {
|
240
|
-
value: hash,
|
241
|
-
details: {
|
242
|
-
AllExternalDependencies: hash,
|
243
|
-
},
|
244
|
-
};
|
245
|
-
return this.allExternalDependenciesHash;
|
246
|
-
}
|
247
|
-
}
|
248
|
-
}
|
249
|
-
}
|
250
|
-
findExternalDependencyNodeName(packageName) {
|
251
|
-
if (this.projectGraph.externalNodes[packageName]) {
|
252
|
-
return packageName;
|
253
|
-
}
|
254
|
-
if (this.projectGraph.externalNodes[`npm:${packageName}`]) {
|
255
|
-
return `npm:${packageName}`;
|
256
|
-
}
|
257
|
-
for (const node of Object.values(this.projectGraph.externalNodes)) {
|
258
|
-
if (node.data.packageName === packageName) {
|
259
|
-
return node.name;
|
260
|
-
}
|
261
|
-
}
|
262
|
-
// not found
|
263
|
-
return null;
|
264
|
-
}
|
265
|
-
async hashSingleProjectInputs(projectName, inputs, env) {
|
266
|
-
const filesets = (0, task_hasher_1.extractPatternsFromFileSets)(inputs);
|
267
|
-
const projectFilesets = [];
|
268
|
-
const workspaceFilesets = [];
|
269
|
-
let invalidFilesetNoPrefix = null;
|
270
|
-
let invalidFilesetWorkspaceRootNegative = null;
|
271
|
-
for (let f of filesets) {
|
272
|
-
if (f.startsWith('{projectRoot}/') || f.startsWith('!{projectRoot}/')) {
|
273
|
-
projectFilesets.push(f);
|
274
|
-
}
|
275
|
-
else if (f.startsWith('{workspaceRoot}/') ||
|
276
|
-
f.startsWith('!{workspaceRoot}/')) {
|
277
|
-
workspaceFilesets.push(f);
|
278
|
-
}
|
279
|
-
else {
|
280
|
-
invalidFilesetNoPrefix = f;
|
281
|
-
}
|
282
|
-
}
|
283
|
-
if (invalidFilesetNoPrefix) {
|
284
|
-
throw new Error([
|
285
|
-
`"${invalidFilesetNoPrefix}" is an invalid fileset.`,
|
286
|
-
'All filesets have to start with either {workspaceRoot} or {projectRoot}.',
|
287
|
-
'For instance: "!{projectRoot}/**/*.spec.ts" or "{workspaceRoot}/package.json".',
|
288
|
-
`If "${invalidFilesetNoPrefix}" is a named input, make sure it is defined in, for instance, nx.json.`,
|
289
|
-
].join('\n'));
|
290
|
-
}
|
291
|
-
if (invalidFilesetWorkspaceRootNegative) {
|
292
|
-
throw new Error([
|
293
|
-
`"${invalidFilesetWorkspaceRootNegative}" is an invalid fileset.`,
|
294
|
-
'It is not possible to negative filesets starting with {workspaceRoot}.',
|
295
|
-
].join('\n'));
|
296
|
-
}
|
297
|
-
const notFilesets = inputs.filter((r) => !r['fileset']);
|
298
|
-
return Promise.all([
|
299
|
-
this.hashProjectFileset(projectName, projectFilesets),
|
300
|
-
this.hashProjectConfig(projectName),
|
301
|
-
this.hashTsConfig(projectName),
|
302
|
-
...(workspaceFilesets.length
|
303
|
-
? [this.hashRootFilesets(workspaceFilesets)]
|
304
|
-
: []),
|
305
|
-
this.hashRootFilesets(this.legacyFilesetInputs.map((r) => r.fileset)),
|
306
|
-
...[...notFilesets, ...this.legacyRuntimeInputs].map((r) => r['runtime']
|
307
|
-
? this.hashRuntime(env, r['runtime'])
|
308
|
-
: this.hashEnv(env, r['env'])),
|
309
|
-
]);
|
310
|
-
}
|
311
|
-
async hashProjectInputs(projectInputs, env) {
|
312
|
-
const partialHashes = [];
|
313
|
-
for (const input of projectInputs) {
|
314
|
-
const projects = (0, find_matching_projects_1.findMatchingProjects)(input.projects, this.projectGraph.nodes);
|
315
|
-
for (const project of projects) {
|
316
|
-
const namedInputs = (0, task_hasher_1.getNamedInputs)(this.nxJson, this.projectGraph.nodes[project]);
|
317
|
-
const expandedInput = (0, task_hasher_1.expandSingleProjectInputs)([{ input: input.input }], namedInputs);
|
318
|
-
partialHashes.push(this.hashSingleProjectInputs(project, expandedInput, env));
|
319
|
-
}
|
320
|
-
}
|
321
|
-
return Promise.all(partialHashes).then((hashes) => hashes.flat());
|
322
|
-
}
|
323
|
-
async hashRootFilesets(filesets) {
|
324
|
-
const mapKey = `workspace:[${filesets.join(',')}]`;
|
325
|
-
if (!this.filesetHashes[mapKey]) {
|
326
|
-
this.filesetHashes[mapKey] = new Promise(async (res) => {
|
327
|
-
const parts = [];
|
328
|
-
const negativePatterns = [];
|
329
|
-
const positivePatterns = [];
|
330
|
-
for (const fileset of filesets) {
|
331
|
-
if (fileset.startsWith('!')) {
|
332
|
-
negativePatterns.push(fileset.substring(17));
|
333
|
-
}
|
334
|
-
else {
|
335
|
-
positivePatterns.push(fileset.substring(16));
|
336
|
-
}
|
337
|
-
}
|
338
|
-
for (const fileset of positivePatterns) {
|
339
|
-
const withoutWorkspaceRoot = fileset;
|
340
|
-
// Used to shortcut minimatch if not necessary
|
341
|
-
const matchingFile = this.allWorkspaceFiles.find((t) => t.file === withoutWorkspaceRoot);
|
342
|
-
// shortcut because there is a direct match
|
343
|
-
if (matchingFile) {
|
344
|
-
if (!negativePatterns.some((p) => (0, minimatch_1.minimatch)(matchingFile.file, p))) {
|
345
|
-
parts.push(matchingFile.hash);
|
346
|
-
}
|
347
|
-
// No direct match, check if pattern matched
|
348
|
-
}
|
349
|
-
else {
|
350
|
-
this.allWorkspaceFiles
|
351
|
-
.filter((f) => (0, minimatch_1.minimatch)(f.file, withoutWorkspaceRoot) &&
|
352
|
-
!negativePatterns.some((p) => (0, minimatch_1.minimatch)(f.file, p)))
|
353
|
-
.forEach((f) => {
|
354
|
-
parts.push(f.hash);
|
355
|
-
});
|
356
|
-
}
|
357
|
-
}
|
358
|
-
const value = (0, file_hasher_1.hashArray)(parts);
|
359
|
-
res({
|
360
|
-
value,
|
361
|
-
details: { [mapKey]: value },
|
362
|
-
});
|
363
|
-
});
|
364
|
-
}
|
365
|
-
return this.filesetHashes[mapKey];
|
366
|
-
}
|
367
|
-
hashProjectConfig(projectName) {
|
368
|
-
const p = this.projectGraph.nodes[projectName];
|
369
|
-
const projectConfig = (0, file_hasher_1.hashArray)([
|
370
|
-
JSON.stringify({ ...p.data, files: undefined }),
|
371
|
-
]);
|
372
|
-
return {
|
373
|
-
value: projectConfig,
|
374
|
-
details: {
|
375
|
-
[`${projectName}:ProjectConfiguration`]: projectConfig,
|
376
|
-
},
|
377
|
-
};
|
378
|
-
}
|
379
|
-
hashTsConfig(projectName) {
|
380
|
-
const p = this.projectGraph.nodes[projectName];
|
381
|
-
const tsConfig = (0, file_hasher_1.hashArray)([
|
382
|
-
(0, hasher_1.hashTsConfig)(p, this.projectRootMappings, this.options),
|
383
|
-
]);
|
384
|
-
return {
|
385
|
-
value: tsConfig,
|
386
|
-
details: {
|
387
|
-
[`${projectName}:TsConfig`]: tsConfig,
|
388
|
-
},
|
389
|
-
};
|
390
|
-
}
|
391
|
-
async hashProjectFileset(projectName, filesetPatterns) {
|
392
|
-
const mapKey = `${projectName}:${filesetPatterns.join(',')}`;
|
393
|
-
if (!this.filesetHashes[mapKey]) {
|
394
|
-
this.filesetHashes[mapKey] = new Promise(async (res) => {
|
395
|
-
const p = this.projectGraph.nodes[projectName];
|
396
|
-
const filteredFiles = (0, task_hasher_1.filterUsingGlobPatterns)(p.data.root, this.projectFileMap[projectName] || [], filesetPatterns);
|
397
|
-
const files = [];
|
398
|
-
for (const { file, hash } of filteredFiles) {
|
399
|
-
files.push(file, hash);
|
400
|
-
}
|
401
|
-
const value = (0, file_hasher_1.hashArray)(files);
|
402
|
-
res({
|
403
|
-
value,
|
404
|
-
details: { [mapKey]: value },
|
405
|
-
});
|
406
|
-
});
|
407
|
-
}
|
408
|
-
return this.filesetHashes[mapKey];
|
409
|
-
}
|
410
|
-
async hashRuntime(env, runtime) {
|
411
|
-
const env_key = JSON.stringify(env);
|
412
|
-
const mapKey = `runtime:${runtime}-${env_key}`;
|
413
|
-
if (!this.runtimeHashes[mapKey]) {
|
414
|
-
this.runtimeHashes[mapKey] = new Promise((res, rej) => {
|
415
|
-
(0, child_process_1.exec)(runtime, {
|
416
|
-
windowsHide: true,
|
417
|
-
cwd: workspace_root_1.workspaceRoot,
|
418
|
-
env,
|
419
|
-
}, (err, stdout, stderr) => {
|
420
|
-
if (err) {
|
421
|
-
rej(new Error(`Nx failed to execute {runtime: '${runtime}'}. ${err}.`));
|
422
|
-
}
|
423
|
-
else {
|
424
|
-
const value = (0, file_hasher_1.hashArray)([`${stdout}${stderr}`.trim()]);
|
425
|
-
res({
|
426
|
-
details: { [`runtime:${runtime}`]: value },
|
427
|
-
value,
|
428
|
-
});
|
429
|
-
}
|
430
|
-
});
|
431
|
-
});
|
432
|
-
}
|
433
|
-
return this.runtimeHashes[mapKey];
|
434
|
-
}
|
435
|
-
async hashEnv(env, envVarName) {
|
436
|
-
const value = (0, file_hasher_1.hashArray)([env[envVarName] ?? '']);
|
437
|
-
return {
|
438
|
-
details: { [`env:${envVarName}`]: value },
|
439
|
-
value,
|
440
|
-
};
|
441
|
-
}
|
442
|
-
calculateExternalDependencyHashes() {
|
443
|
-
const keys = Object.keys(this.projectGraph.externalNodes);
|
444
|
-
for (const externalNodeName of keys) {
|
445
|
-
this.externalDependencyHashes.set(externalNodeName, this.hashExternalDependency(externalNodeName));
|
446
|
-
}
|
447
|
-
}
|
448
|
-
}
|
449
|
-
exports.NodeTaskHasherImpl = NodeTaskHasherImpl;
|