@nrwl/nx-cloud 15.3.2 → 15.3.3-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/core/api/error-reporter.api.js +36 -1
- package/lib/core/api/run-group.api.js +73 -1
- package/lib/core/commands/upload-and-show-run-details.js +2 -1
- package/lib/core/commands/upload-and-show-run-details.js.map +1 -1
- package/lib/core/runners/cloud-enabled/cloud-enabled-life-cycle.js +93 -1
- package/lib/core/runners/cloud-enabled/cloud-enabled.runner.js +320 -1
- package/lib/core/runners/cloud-enabled/cloud-remote-cache.js +133 -1
- package/lib/core/runners/cloud-enabled/cloud-run.api.js +168 -1
- package/lib/core/runners/cloud-enabled/id-generator.js +16 -1
- package/lib/core/runners/distributed-agent/distributed-agent.api.js +93 -1
- package/lib/core/runners/distributed-agent/distributed-agent.impl.js +157 -1
- package/lib/core/runners/distributed-agent/execute-tasks.js +114 -1
- package/lib/core/runners/distributed-agent/invoke-tasks-using-nx-imperative-api.js +58 -1
- package/lib/core/runners/distributed-agent/invoke-tasks-using-run-many.js +97 -1
- package/lib/core/runners/distributed-execution/distributed-execution.api.js +152 -1
- package/lib/core/runners/distributed-execution/distributed-execution.runner.js +118 -1
- package/lib/core/runners/distributed-execution/process-task.js +45 -1
- package/lib/core/runners/distributed-execution/process-tasks.js +67 -1
- package/lib/core/runners/distributed-execution/split-task-graph-into-stages.js +37 -1
- package/lib/core/runners/distributed-execution/task-graph-creator.js +100 -1
- package/lib/utilities/environment.js +8 -2
- package/lib/utilities/environment.js.map +1 -1
- package/lib/utilities/is-private-cloud.d.ts +1 -0
- package/lib/utilities/is-private-cloud.js +30 -0
- package/lib/utilities/is-private-cloud.js.map +1 -0
- package/package.json +1 -1
|
@@ -1 +1,157 @@
|
|
|
1
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.startAgent = void 0;
|
|
13
|
+
const fs_1 = require("fs");
|
|
14
|
+
const stripJsonComments = require("strip-json-comments");
|
|
15
|
+
const yargsParser = require("yargs-parser");
|
|
16
|
+
const environment_1 = require("../../../utilities/environment");
|
|
17
|
+
const metric_logger_1 = require("../../../utilities/metric-logger");
|
|
18
|
+
const print_cacheable_targets_error_1 = require("../../error/print-cacheable-targets-error");
|
|
19
|
+
const print_invalid_runner_error_1 = require("../../error/print-invalid-runner-error");
|
|
20
|
+
const print_run_group_error_1 = require("../../error/print-run-group-error");
|
|
21
|
+
const distributed_agent_api_1 = require("./distributed-agent.api");
|
|
22
|
+
const execute_tasks_1 = require("./execute-tasks");
|
|
23
|
+
const dte_artifact_storage_1 = require("../../../utilities/dte-artifact-storage");
|
|
24
|
+
const file_storage_1 = require("../../file-storage/file-storage");
|
|
25
|
+
const e2e_encryption_1 = require("../../file-storage/e2e-encryption");
|
|
26
|
+
const error_reporter_api_1 = require("../../api/error-reporter.api");
|
|
27
|
+
const invoke_tasks_using_run_many_1 = require("./invoke-tasks-using-run-many");
|
|
28
|
+
const invoke_tasks_using_nx_imperative_api_1 = require("./invoke-tasks-using-nx-imperative-api");
|
|
29
|
+
const { output, initTasksRunner, workspaceRoot, } = require('../../../utilities/nx-imports');
|
|
30
|
+
const args = yargsParser(process.argv, {
|
|
31
|
+
array: ['targets'],
|
|
32
|
+
default: {},
|
|
33
|
+
});
|
|
34
|
+
// just in case someone passes the args with commas
|
|
35
|
+
// cf. https://github.com/yargs/yargs/issues/846
|
|
36
|
+
if (args.targets && args.targets.length === 1) {
|
|
37
|
+
args.targets = args.targets[0].split(',').map((x) => x.trim());
|
|
38
|
+
}
|
|
39
|
+
function startAgent() {
|
|
40
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
41
|
+
const branch = (0, environment_1.getBranch)();
|
|
42
|
+
const runGroup = (0, environment_1.getRunGroup)();
|
|
43
|
+
const ciExecutionId = (0, environment_1.getCIExecutionId)();
|
|
44
|
+
const ciExecutionEnv = (0, environment_1.getCIExecutionEnv)();
|
|
45
|
+
if (!(0, print_run_group_error_1.canDetectRunGroup)(runGroup, ciExecutionId)) {
|
|
46
|
+
(0, print_run_group_error_1.printRunGroupError)();
|
|
47
|
+
process.exit(1);
|
|
48
|
+
}
|
|
49
|
+
if (args.targets && args.targets.length) {
|
|
50
|
+
output.note({
|
|
51
|
+
title: `Starting an agent for running Nx target(s) [${args.targets.join(', ')}]`,
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
else {
|
|
55
|
+
output.note({
|
|
56
|
+
title: 'Starting an agent for running Nx tasks',
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
const defaultRunnerConfig = JSON.parse(stripJsonComments((0, fs_1.readFileSync)(`${workspaceRoot}/nx.json`).toString())).tasksRunnerOptions.default;
|
|
60
|
+
if (defaultRunnerConfig.runner !== '@nrwl/nx-cloud') {
|
|
61
|
+
(0, print_invalid_runner_error_1.printInvalidRunnerError)();
|
|
62
|
+
return process.exit(1);
|
|
63
|
+
}
|
|
64
|
+
const options = defaultRunnerConfig.options;
|
|
65
|
+
if (args.targets &&
|
|
66
|
+
args.targets.some((target) => { var _a; return !((_a = options.cacheableOperations) === null || _a === void 0 ? void 0 : _a.includes(target)); })) {
|
|
67
|
+
const wrongTargets = args.targets.filter((target) => { var _a; return !((_a = options.cacheableOperations) === null || _a === void 0 ? void 0 : _a.includes(target)); });
|
|
68
|
+
(0, print_cacheable_targets_error_1.printCacheableTargetsError)(wrongTargets);
|
|
69
|
+
return process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
const agentName = getAgentName();
|
|
72
|
+
const api = new distributed_agent_api_1.DistributedAgentApi(options, branch, runGroup, ciExecutionId, ciExecutionEnv, agentName);
|
|
73
|
+
createAgentLockfileAndSetUpListeners(api, options, agentName);
|
|
74
|
+
const encryption = new e2e_encryption_1.E2EEncryption(environment_1.ENCRYPTION_KEY || options.encryptionKey);
|
|
75
|
+
const errorReporter = new error_reporter_api_1.ErrorReporterApi(options);
|
|
76
|
+
const dteArtifactStorage = new dte_artifact_storage_1.DteArtifactStorage(new file_storage_1.FileStorage(encryption, errorReporter, options, 'dte-agent'), (0, environment_1.getNxCacheDirectory)(options));
|
|
77
|
+
const invokeTasks = initTasksRunner
|
|
78
|
+
? yield (0, invoke_tasks_using_nx_imperative_api_1.invokeTasksUsingNxImperativeApi)(options)
|
|
79
|
+
: yield (0, invoke_tasks_using_run_many_1.invokeTasksUsingRunMany)(options);
|
|
80
|
+
return (0, execute_tasks_1.executeTasks)(agentName, api, dteArtifactStorage, invokeTasks, args.targets)
|
|
81
|
+
.then((res) => __awaiter(this, void 0, void 0, function* () {
|
|
82
|
+
yield (0, metric_logger_1.submitRunMetrics)(options);
|
|
83
|
+
return res;
|
|
84
|
+
}))
|
|
85
|
+
.catch((e) => __awaiter(this, void 0, void 0, function* () {
|
|
86
|
+
yield api.completeRunGroupWithError(`Critical Error in Agent: "${e.message}"`);
|
|
87
|
+
throw e;
|
|
88
|
+
}));
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
exports.startAgent = startAgent;
|
|
92
|
+
function getAgentName() {
|
|
93
|
+
if (process.env.NX_AGENT_NAME !== undefined) {
|
|
94
|
+
return process.env.NX_AGENT_NAME;
|
|
95
|
+
}
|
|
96
|
+
else if (process.env.CIRCLECI !== undefined && process.env.CIRCLE_STAGE) {
|
|
97
|
+
return process.env.CIRCLE_STAGE;
|
|
98
|
+
}
|
|
99
|
+
else if (process.env.CIRCLECI !== undefined && process.env.CIRCLE_JOB) {
|
|
100
|
+
return process.env.CIRCLE_JOB;
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
return `Agent ${Math.floor(Math.random() * 100000)}`;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
function createAgentLockfileAndSetUpListeners(api, options, agentName) {
|
|
107
|
+
const cacheDirectory = (0, environment_1.getNxCacheDirectory)(options);
|
|
108
|
+
const lockFileDirectory = `${cacheDirectory}/lockfiles`;
|
|
109
|
+
const lockFilePath = `${lockFileDirectory}/${agentName}.lock`;
|
|
110
|
+
if (!(0, fs_1.existsSync)(lockFileDirectory)) {
|
|
111
|
+
(0, fs_1.mkdirSync)(lockFileDirectory, { recursive: true });
|
|
112
|
+
}
|
|
113
|
+
// Check for other agents' lockfiles and warn if exist
|
|
114
|
+
const lockFiles = (0, fs_1.readdirSync)(lockFileDirectory);
|
|
115
|
+
if (lockFiles.length) {
|
|
116
|
+
// Check to make sure the current agent name is not in use (only 1/100000 ^ 2 chance of this)
|
|
117
|
+
if (lockFiles.includes(`${agentName}.lock`)) {
|
|
118
|
+
output.error({
|
|
119
|
+
title: 'Duplicate Agent ID Detected',
|
|
120
|
+
bodyLines: [
|
|
121
|
+
'We have detected another agent with this ID running in this workspace. This should not happen.',
|
|
122
|
+
'',
|
|
123
|
+
'End all currently running agents, run "npx nx-cloud clean-up-agents", and try again.',
|
|
124
|
+
],
|
|
125
|
+
});
|
|
126
|
+
process.exit(1);
|
|
127
|
+
}
|
|
128
|
+
output.warn({
|
|
129
|
+
title: 'Other Nx Cloud Agents Detected',
|
|
130
|
+
bodyLines: [
|
|
131
|
+
'We have detected other agents running in this workspace. This can cause unexpected behavior.',
|
|
132
|
+
'',
|
|
133
|
+
'This can also be a false positive caused by agents that did not shut down correctly.',
|
|
134
|
+
'If you believe this is the case, run "npx nx-cloud clean-up-agents".',
|
|
135
|
+
],
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
(0, fs_1.writeFileSync)(lockFilePath, '');
|
|
139
|
+
process.on('exit', (code) => {
|
|
140
|
+
cleanupAgentLockfile(lockFilePath, code);
|
|
141
|
+
});
|
|
142
|
+
process.on('SIGTERM', () => __awaiter(this, void 0, void 0, function* () {
|
|
143
|
+
yield api.completeRunGroupWithError('Agent was terminated via SIGTERM');
|
|
144
|
+
cleanupAgentLockfile(lockFilePath, 1);
|
|
145
|
+
}));
|
|
146
|
+
process.on('SIGINT', () => __awaiter(this, void 0, void 0, function* () {
|
|
147
|
+
yield api.completeRunGroupWithError('Agent was terminated via SIGINT');
|
|
148
|
+
cleanupAgentLockfile(lockFilePath, 1);
|
|
149
|
+
}));
|
|
150
|
+
}
|
|
151
|
+
function cleanupAgentLockfile(lockFilePath, code) {
|
|
152
|
+
if ((0, fs_1.existsSync)(lockFilePath)) {
|
|
153
|
+
(0, fs_1.unlinkSync)(lockFilePath);
|
|
154
|
+
process.exit(code);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
//# sourceMappingURL=distributed-agent.impl.js.map
|
|
@@ -1 +1,114 @@
|
|
|
1
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.executeTasks = void 0;
|
|
13
|
+
const create_unchanged_value_timeout_1 = require("../../../utilities/create-unchanged-value-timeout");
|
|
14
|
+
const environment_1 = require("../../../utilities/environment");
|
|
15
|
+
const waiter_1 = require("../../../utilities/waiter");
|
|
16
|
+
const { output } = require('../../../utilities/nx-imports');
|
|
17
|
+
function executeTasks(agentName, api, dteArtifactStorage, invokeTasks, targets) {
|
|
18
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
19
|
+
let completedStatusCode = 0;
|
|
20
|
+
let apiResponse = null;
|
|
21
|
+
const failIfSameTasksAfterTimeout = (0, create_unchanged_value_timeout_1.createUnchangedValueTimeout)({
|
|
22
|
+
title: `No new messages received after ${environment_1.NO_MESSAGES_TIMEOUT / 1000} seconds`,
|
|
23
|
+
timeout: environment_1.NO_MESSAGES_TIMEOUT,
|
|
24
|
+
});
|
|
25
|
+
const waiter = new waiter_1.Waiter();
|
|
26
|
+
let completedTasks = [];
|
|
27
|
+
const startTime = new Date();
|
|
28
|
+
let executedAnyTasks = false;
|
|
29
|
+
const processedTasks = {};
|
|
30
|
+
while (true) {
|
|
31
|
+
if (environment_1.VERBOSE_LOGGING) {
|
|
32
|
+
output.note({
|
|
33
|
+
title: `${agentName} fetching tasks...`,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
apiResponse = yield api.tasks(apiResponse ? apiResponse.executionId : null, completedStatusCode, completedTasks, targets);
|
|
37
|
+
if (environment_1.VERBOSE_LOGGING) {
|
|
38
|
+
output.note({
|
|
39
|
+
title: `${agentName} received an API Response`,
|
|
40
|
+
bodyLines: [
|
|
41
|
+
`completed: ${apiResponse.completed}`,
|
|
42
|
+
`status: ${apiResponse.status}`,
|
|
43
|
+
`retryDuring: ${apiResponse.retryDuring}`,
|
|
44
|
+
`executionId: ${apiResponse.executionId}`,
|
|
45
|
+
`number of tasks: ${apiResponse.tasks.length}`,
|
|
46
|
+
`error: ${apiResponse.criticalErrorMessage}`,
|
|
47
|
+
`maxParallel: ${apiResponse.maxParallel}`,
|
|
48
|
+
],
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
if (apiResponse.criticalErrorMessage) {
|
|
52
|
+
output.error({
|
|
53
|
+
title: 'Distributed Execution Terminated',
|
|
54
|
+
bodyLines: ['Error:', apiResponse.criticalErrorMessage],
|
|
55
|
+
});
|
|
56
|
+
process.exit(0);
|
|
57
|
+
}
|
|
58
|
+
// run group is completed but it might be a rerun
|
|
59
|
+
// we will try several times before going further and
|
|
60
|
+
// completed the response
|
|
61
|
+
// we only do it if we haven't executed any tasks
|
|
62
|
+
if ((apiResponse === null || apiResponse === void 0 ? void 0 : apiResponse.retryDuring) &&
|
|
63
|
+
(apiResponse === null || apiResponse === void 0 ? void 0 : apiResponse.retryDuring) !== 0 &&
|
|
64
|
+
!executedAnyTasks &&
|
|
65
|
+
new Date().getTime() - startTime.getTime() > apiResponse.retryDuring) {
|
|
66
|
+
yield waiter.wait();
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
if ((apiResponse === null || apiResponse === void 0 ? void 0 : apiResponse.status) !== undefined) {
|
|
70
|
+
if (apiResponse.status === 'RUN_GROUP_COMPLETED' ||
|
|
71
|
+
apiResponse.status === 'NO_FURTHER_TASKS_TO_RUN') {
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
else if (apiResponse.completed) {
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
// if status is present that use the status instead of completed, otherwise use completed
|
|
79
|
+
failIfSameTasksAfterTimeout(apiResponse.tasks.map((t) => t.taskId).join(''));
|
|
80
|
+
if (!apiResponse.executionId) {
|
|
81
|
+
if (environment_1.VERBOSE_LOGGING) {
|
|
82
|
+
output.note({
|
|
83
|
+
title: `${agentName} waiting...`,
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
yield waiter.wait();
|
|
87
|
+
completedStatusCode = 0;
|
|
88
|
+
completedTasks = [];
|
|
89
|
+
continue;
|
|
90
|
+
}
|
|
91
|
+
waiter.reset();
|
|
92
|
+
executedAnyTasks = true;
|
|
93
|
+
if (apiResponse.completedTasks) {
|
|
94
|
+
for (const t of apiResponse.completedTasks) {
|
|
95
|
+
if (processedTasks[t.taskId])
|
|
96
|
+
continue;
|
|
97
|
+
output.note({
|
|
98
|
+
title: `${agentName} downloading artifacts for ${t.taskId} Hash: ${t.hash} Url: ${t.url}`,
|
|
99
|
+
});
|
|
100
|
+
yield dteArtifactStorage.retrieveAndExtract(t.hash, t.url);
|
|
101
|
+
processedTasks[t.taskId] = true;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
const r = yield invokeTasks(apiResponse.executionId, apiResponse.tasks, apiResponse.maxParallel);
|
|
105
|
+
for (const t of r.completedTasks) {
|
|
106
|
+
processedTasks[t.taskId] = true;
|
|
107
|
+
}
|
|
108
|
+
completedStatusCode = r.completedStatusCode;
|
|
109
|
+
completedTasks = r.completedTasks;
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
exports.executeTasks = executeTasks;
|
|
114
|
+
//# sourceMappingURL=execute-tasks.js.map
|
|
@@ -1 +1,58 @@
|
|
|
1
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.invokeTasksUsingNxImperativeApi = void 0;
|
|
13
|
+
const { initTasksRunner } = require('../../../utilities/nx-imports');
|
|
14
|
+
const parser = require("yargs-parser");
|
|
15
|
+
const serializer_overrides_1 = require("../../../utilities/serializer-overrides");
|
|
16
|
+
function invokeTasksUsingNxImperativeApi(options) {
|
|
17
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
18
|
+
const tasksRunner = yield initTasksRunner(options);
|
|
19
|
+
return (executionId, tasksToExecute, parallel) => __awaiter(this, void 0, void 0, function* () {
|
|
20
|
+
const tasks = tasksToExecute.map((t) => {
|
|
21
|
+
const params = parser(t.params, {
|
|
22
|
+
configuration: {
|
|
23
|
+
'camel-case-expansion': false,
|
|
24
|
+
'dot-notation': true,
|
|
25
|
+
},
|
|
26
|
+
});
|
|
27
|
+
const unparsed = (0, serializer_overrides_1.unparse)(params);
|
|
28
|
+
if (params._.length == 0) {
|
|
29
|
+
delete params._;
|
|
30
|
+
}
|
|
31
|
+
return {
|
|
32
|
+
id: t.taskId,
|
|
33
|
+
target: {
|
|
34
|
+
project: t.projectName,
|
|
35
|
+
target: t.target,
|
|
36
|
+
configuration: t.configuration,
|
|
37
|
+
},
|
|
38
|
+
overrides: Object.assign(Object.assign({}, params), { __overrides_unparsed__: unparsed }),
|
|
39
|
+
};
|
|
40
|
+
});
|
|
41
|
+
process.env.NX_CACHE_FAILURES = 'true'; // this is only requires because of how we do uploads
|
|
42
|
+
process.env.NX_CLOUD_DISTRIBUTED_EXECUTION_ID = executionId;
|
|
43
|
+
process.env.NX_STREAM_OUTPUT = 'true';
|
|
44
|
+
process.env.NX_PREFIX_OUTPUT = 'true';
|
|
45
|
+
const r = yield tasksRunner.invoke({ tasks, parallel });
|
|
46
|
+
const completedTasks = Object.values(r.taskGraph.tasks);
|
|
47
|
+
return {
|
|
48
|
+
completedTasks: completedTasks.map((t) => ({
|
|
49
|
+
taskId: t.id,
|
|
50
|
+
hash: t.hash,
|
|
51
|
+
})),
|
|
52
|
+
completedStatusCode: r.status,
|
|
53
|
+
};
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
exports.invokeTasksUsingNxImperativeApi = invokeTasksUsingNxImperativeApi;
|
|
58
|
+
//# sourceMappingURL=invoke-tasks-using-nx-imperative-api.js.map
|
|
@@ -1 +1,97 @@
|
|
|
1
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.invokeTasksUsingRunMany = void 0;
|
|
13
|
+
const environment_1 = require("../../../utilities/environment");
|
|
14
|
+
const child_process_1 = require("child_process");
|
|
15
|
+
const fs_1 = require("fs");
|
|
16
|
+
const { output } = require('../../../utilities/nx-imports');
|
|
17
|
+
function invokeTasksUsingRunMany(options) {
|
|
18
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
19
|
+
const readCompleted = completedTasksReader(options);
|
|
20
|
+
return function invokeTasksUsingRunMany(executionId, tasksToExecute, parallel) {
|
|
21
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
22
|
+
let completedStatusCode = 0;
|
|
23
|
+
const completedTasks = [];
|
|
24
|
+
for (const g of groupByTarget(tasksToExecute)) {
|
|
25
|
+
const config = g.configuration
|
|
26
|
+
? `--configuration=${g.configuration}`
|
|
27
|
+
: ``;
|
|
28
|
+
const parallelStr = parallel > 1 ? ` --parallel --max-parallel=${parallel}` : ``;
|
|
29
|
+
// TODO use pnpx or yarn when needed
|
|
30
|
+
const command = `npx nx run-many --target=${g.target} ${config} --projects=${g.projects.join(',')} ${g.params}${parallelStr}`;
|
|
31
|
+
if (environment_1.VERBOSE_LOGGING) {
|
|
32
|
+
output.note({
|
|
33
|
+
title: `Executing: '${command}'`,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
try {
|
|
37
|
+
(0, child_process_1.execSync)(command, {
|
|
38
|
+
stdio: ['ignore', 'inherit', 'inherit'],
|
|
39
|
+
env: Object.assign(Object.assign({}, process.env), { NX_CACHE_FAILURES: 'true', NX_CLOUD_DISTRIBUTED_EXECUTION_ID: executionId, NX_STREAM_OUTPUT: 'true', NX_PREFIX_OUTPUT: 'true' }),
|
|
40
|
+
});
|
|
41
|
+
completedTasks.push(...readCompleted(executionId));
|
|
42
|
+
}
|
|
43
|
+
catch (e) {
|
|
44
|
+
if (e.status === environment_1.DISTRIBUTED_TASK_EXECUTION_INTERNAL_ERROR_STATUS_CODE) {
|
|
45
|
+
throw e;
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
completedStatusCode = 1;
|
|
49
|
+
completedTasks.push(...readCompleted(executionId));
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return { completedStatusCode, completedTasks };
|
|
54
|
+
});
|
|
55
|
+
};
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
exports.invokeTasksUsingRunMany = invokeTasksUsingRunMany;
|
|
59
|
+
function groupByTarget(tasks) {
|
|
60
|
+
const res = [];
|
|
61
|
+
tasks.forEach((t) => {
|
|
62
|
+
const r = res.find((rr) => rr.target === t.target && rr.configuration === t.configuration);
|
|
63
|
+
if (r) {
|
|
64
|
+
r.projects.push(t.projectName);
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
res.push({
|
|
68
|
+
target: t.target,
|
|
69
|
+
projects: [t.projectName],
|
|
70
|
+
params: t.params,
|
|
71
|
+
configuration: t.configuration,
|
|
72
|
+
});
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
return res;
|
|
76
|
+
}
|
|
77
|
+
function completedTasksReader(options) {
|
|
78
|
+
const cacheDirectory = (0, environment_1.getNxCacheDirectory)(options);
|
|
79
|
+
return (distributedExecutionId) => {
|
|
80
|
+
const errorMessage = `Command execution failed (distributed task execution: ${distributedExecutionId}). Tasks hashes haven\'t been recorded.`;
|
|
81
|
+
let completedTasks;
|
|
82
|
+
try {
|
|
83
|
+
const taskHashesFile = `${cacheDirectory}/tasks-hashes-${distributedExecutionId}`;
|
|
84
|
+
completedTasks = JSON.parse((0, fs_1.readFileSync)(taskHashesFile).toString());
|
|
85
|
+
// remove it such that if the next command crashes we don't read an obsolete file
|
|
86
|
+
(0, fs_1.unlinkSync)(taskHashesFile);
|
|
87
|
+
}
|
|
88
|
+
catch (e) {
|
|
89
|
+
throw new Error(errorMessage);
|
|
90
|
+
}
|
|
91
|
+
if (completedTasks.length == 0) {
|
|
92
|
+
throw new Error(errorMessage);
|
|
93
|
+
}
|
|
94
|
+
return completedTasks;
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
//# sourceMappingURL=invoke-tasks-using-run-many.js.map
|