nx 19.8.1 → 19.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.json +9 -1
- package/package.json +12 -13
- package/src/command-line/graph/graph.js +9 -9
- package/src/command-line/import/utils/prepare-source-repo.js +8 -3
- package/src/command-line/init/implementation/add-nx-to-nest.js +5 -5
- package/src/command-line/init/implementation/react/clean-up-files.js +7 -7
- package/src/command-line/init/implementation/react/index.js +19 -12
- package/src/command-line/init/implementation/react/rename-js-to-jsx.js +3 -3
- package/src/command-line/release/changelog.js +1 -2
- package/src/command-line/release/config/version-plans.js +6 -7
- package/src/command-line/release/plan.js +6 -5
- package/src/command-line/release/release.js +2 -2
- package/src/command-line/reset/reset.js +20 -13
- package/src/core/graph/main.js +1 -1
- package/src/daemon/cache.d.ts +1 -2
- package/src/daemon/cache.js +12 -21
- package/src/daemon/client/client.js +9 -8
- package/src/daemon/tmp-dir.js +6 -7
- package/src/executors/run-commands/run-commands.impl.js +15 -22
- package/src/generators/tree.d.ts +1 -1
- package/src/generators/tree.js +11 -11
- package/src/hasher/hash-task.d.ts +4 -2
- package/src/hasher/hash-task.js +6 -9
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/plugins/js/index.js +1 -2
- package/src/project-graph/nx-deps-cache.js +5 -6
- package/src/tasks-runner/cache.js +17 -16
- package/src/tasks-runner/life-cycles/dynamic-run-many-terminal-output-life-cycle.js +5 -0
- package/src/tasks-runner/life-cycles/static-run-many-terminal-output-life-cycle.js +7 -0
- package/src/tasks-runner/remove-old-cache-records.js +2 -3
- package/src/tasks-runner/run-command.js +3 -1
- package/src/tasks-runner/task-orchestrator.d.ts +1 -0
- package/src/tasks-runner/task-orchestrator.js +6 -2
- package/src/tasks-runner/tasks-schedule.d.ts +1 -0
- package/src/tasks-runner/tasks-schedule.js +6 -2
- package/src/utils/fileutils.d.ts +9 -1
- package/src/utils/fileutils.js +29 -12
- package/src/utils/ignore.js +2 -2
- package/src/utils/package-manager.js +2 -2
- package/src/utils/plugins/output.js +1 -1
package/src/daemon/cache.d.ts
CHANGED
@@ -2,9 +2,8 @@ export interface DaemonProcessJson {
|
|
2
2
|
processId: number;
|
3
3
|
}
|
4
4
|
export declare const serverProcessJsonPath: string;
|
5
|
-
export declare function readDaemonProcessJsonCache():
|
5
|
+
export declare function readDaemonProcessJsonCache(): DaemonProcessJson | null;
|
6
6
|
export declare function deleteDaemonJsonProcessCache(): void;
|
7
7
|
export declare function writeDaemonJsonProcessCache(daemonJson: DaemonProcessJson): Promise<void>;
|
8
8
|
export declare function waitForDaemonToExitAndCleanupProcessJson(): Promise<void>;
|
9
|
-
export declare function safelyCleanUpExistingProcess(): Promise<void>;
|
10
9
|
export declare function getDaemonProcessIdSync(): number | null;
|
package/src/daemon/cache.js
CHANGED
@@ -5,31 +5,33 @@ exports.readDaemonProcessJsonCache = readDaemonProcessJsonCache;
|
|
5
5
|
exports.deleteDaemonJsonProcessCache = deleteDaemonJsonProcessCache;
|
6
6
|
exports.writeDaemonJsonProcessCache = writeDaemonJsonProcessCache;
|
7
7
|
exports.waitForDaemonToExitAndCleanupProcessJson = waitForDaemonToExitAndCleanupProcessJson;
|
8
|
-
exports.safelyCleanUpExistingProcess = safelyCleanUpExistingProcess;
|
9
8
|
exports.getDaemonProcessIdSync = getDaemonProcessIdSync;
|
10
|
-
const
|
9
|
+
const node_fs_1 = require("node:fs");
|
11
10
|
const path_1 = require("path");
|
12
11
|
const tmp_dir_1 = require("./tmp-dir");
|
12
|
+
const fileutils_1 = require("../utils/fileutils");
|
13
13
|
exports.serverProcessJsonPath = (0, path_1.join)(tmp_dir_1.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'server-process.json');
|
14
|
-
|
15
|
-
if (!(0,
|
14
|
+
function readDaemonProcessJsonCache() {
|
15
|
+
if (!(0, node_fs_1.existsSync)(exports.serverProcessJsonPath)) {
|
16
16
|
return null;
|
17
17
|
}
|
18
|
-
return
|
18
|
+
return (0, fileutils_1.readJsonFile)(exports.serverProcessJsonPath);
|
19
19
|
}
|
20
20
|
function deleteDaemonJsonProcessCache() {
|
21
21
|
try {
|
22
22
|
if (getDaemonProcessIdSync() === process.pid) {
|
23
|
-
(0,
|
23
|
+
(0, node_fs_1.unlinkSync)(exports.serverProcessJsonPath);
|
24
24
|
}
|
25
25
|
}
|
26
26
|
catch { }
|
27
27
|
}
|
28
28
|
async function writeDaemonJsonProcessCache(daemonJson) {
|
29
|
-
await (0,
|
29
|
+
await (0, fileutils_1.writeJsonFileAsync)(exports.serverProcessJsonPath, daemonJson, {
|
30
|
+
appendNewLine: true,
|
31
|
+
});
|
30
32
|
}
|
31
33
|
async function waitForDaemonToExitAndCleanupProcessJson() {
|
32
|
-
const daemonProcessJson =
|
34
|
+
const daemonProcessJson = readDaemonProcessJsonCache();
|
33
35
|
if (daemonProcessJson && daemonProcessJson.processId) {
|
34
36
|
await new Promise((resolve, reject) => {
|
35
37
|
let count = 0;
|
@@ -51,24 +53,13 @@ async function waitForDaemonToExitAndCleanupProcessJson() {
|
|
51
53
|
deleteDaemonJsonProcessCache();
|
52
54
|
}
|
53
55
|
}
|
54
|
-
async function safelyCleanUpExistingProcess() {
|
55
|
-
const daemonProcessJson = await readDaemonProcessJsonCache();
|
56
|
-
if (daemonProcessJson && daemonProcessJson.processId) {
|
57
|
-
try {
|
58
|
-
process.kill(daemonProcessJson.processId);
|
59
|
-
// we wait for the process to actually shut down before returning
|
60
|
-
await waitForDaemonToExitAndCleanupProcessJson();
|
61
|
-
}
|
62
|
-
catch { }
|
63
|
-
}
|
64
|
-
}
|
65
56
|
// Must be sync for the help output use case
|
66
57
|
function getDaemonProcessIdSync() {
|
67
|
-
if (!(0,
|
58
|
+
if (!(0, node_fs_1.existsSync)(exports.serverProcessJsonPath)) {
|
68
59
|
return null;
|
69
60
|
}
|
70
61
|
try {
|
71
|
-
const daemonProcessJson = (0,
|
62
|
+
const daemonProcessJson = (0, fileutils_1.readJsonFile)(exports.serverProcessJsonPath);
|
72
63
|
return daemonProcessJson.processId;
|
73
64
|
}
|
74
65
|
catch {
|
@@ -4,9 +4,8 @@ exports.daemonClient = exports.DaemonClient = void 0;
|
|
4
4
|
exports.isDaemonEnabled = isDaemonEnabled;
|
5
5
|
const workspace_root_1 = require("../../utils/workspace-root");
|
6
6
|
const child_process_1 = require("child_process");
|
7
|
-
const
|
7
|
+
const node_fs_1 = require("node:fs");
|
8
8
|
const promises_1 = require("fs/promises");
|
9
|
-
const fs_extra_1 = require("fs-extra");
|
10
9
|
const net_1 = require("net");
|
11
10
|
const path_1 = require("path");
|
12
11
|
const perf_hooks_1 = require("perf_hooks");
|
@@ -409,8 +408,10 @@ class DaemonClient {
|
|
409
408
|
}
|
410
409
|
}
|
411
410
|
async startInBackground() {
|
412
|
-
(0,
|
413
|
-
(0,
|
411
|
+
(0, node_fs_1.mkdirSync)(tmp_dir_1.DAEMON_DIR_FOR_CURRENT_WORKSPACE, { recursive: true });
|
412
|
+
if (!(0, node_fs_1.existsSync)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE)) {
|
413
|
+
(0, node_fs_1.writeFileSync)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE, '');
|
414
|
+
}
|
414
415
|
this._out = await (0, promises_1.open)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE, 'a');
|
415
416
|
this._err = await (0, promises_1.open)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE, 'a');
|
416
417
|
const backgroundProcess = (0, child_process_1.spawn)(process.execPath, [(0, path_1.join)(__dirname, `../server/start.js`)], {
|
@@ -454,7 +455,7 @@ class DaemonClient {
|
|
454
455
|
catch (err) {
|
455
456
|
output_1.output.error({
|
456
457
|
title: err?.message ||
|
457
|
-
'Something unexpected went wrong when stopping the server',
|
458
|
+
'Something unexpected went wrong when stopping the daemon server',
|
458
459
|
});
|
459
460
|
}
|
460
461
|
(0, tmp_dir_1.removeSocketDir)();
|
@@ -467,12 +468,12 @@ function isDaemonEnabled() {
|
|
467
468
|
}
|
468
469
|
function isDocker() {
|
469
470
|
try {
|
470
|
-
(0,
|
471
|
+
(0, node_fs_1.statSync)('/.dockerenv');
|
471
472
|
return true;
|
472
473
|
}
|
473
474
|
catch {
|
474
475
|
try {
|
475
|
-
return (0,
|
476
|
+
return (0, node_fs_1.readFileSync)('/proc/self/cgroup', 'utf8')?.includes('docker');
|
476
477
|
}
|
477
478
|
catch { }
|
478
479
|
return false;
|
@@ -483,7 +484,7 @@ function nxJsonIsNotPresent() {
|
|
483
484
|
}
|
484
485
|
function daemonProcessException(message) {
|
485
486
|
try {
|
486
|
-
let log = (0,
|
487
|
+
let log = (0, node_fs_1.readFileSync)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE).toString().split('\n');
|
487
488
|
if (log.length > 20) {
|
488
489
|
log = log.slice(log.length - 20);
|
489
490
|
}
|
package/src/daemon/tmp-dir.js
CHANGED
@@ -11,8 +11,7 @@ exports.removeSocketDir = removeSocketDir;
|
|
11
11
|
* location within the OS's tmp directory where we write log files for background processes
|
12
12
|
* and where we create the actual unix socket/named pipe for the daemon.
|
13
13
|
*/
|
14
|
-
const
|
15
|
-
const fs_extra_1 = require("fs-extra");
|
14
|
+
const node_fs_1 = require("node:fs");
|
16
15
|
const path_1 = require("path");
|
17
16
|
const cache_directory_1 = require("../utils/cache-directory");
|
18
17
|
const crypto_1 = require("crypto");
|
@@ -26,15 +25,15 @@ const getDaemonSocketDir = () => (0, path_1.join)(getSocketDir(),
|
|
26
25
|
exports.getDaemonSocketDir = getDaemonSocketDir;
|
27
26
|
function writeDaemonLogs(error) {
|
28
27
|
const file = (0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'daemon-error.log');
|
29
|
-
(0,
|
28
|
+
(0, node_fs_1.writeFileSync)(file, error);
|
30
29
|
return file;
|
31
30
|
}
|
32
31
|
function markDaemonAsDisabled() {
|
33
|
-
(0,
|
32
|
+
(0, node_fs_1.writeFileSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'), 'true');
|
34
33
|
}
|
35
34
|
function isDaemonDisabled() {
|
36
35
|
try {
|
37
|
-
(0,
|
36
|
+
(0, node_fs_1.statSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'));
|
38
37
|
return true;
|
39
38
|
}
|
40
39
|
catch (e) {
|
@@ -56,7 +55,7 @@ function getSocketDir(alreadyUnique = false) {
|
|
56
55
|
const dir = process.env.NX_SOCKET_DIR ??
|
57
56
|
process.env.NX_DAEMON_SOCKET_DIR ??
|
58
57
|
(alreadyUnique ? tmp_1.tmpdir : socketDirName());
|
59
|
-
(0,
|
58
|
+
(0, node_fs_1.mkdirSync)(dir, { recursive: true });
|
60
59
|
return dir;
|
61
60
|
}
|
62
61
|
catch (e) {
|
@@ -65,7 +64,7 @@ function getSocketDir(alreadyUnique = false) {
|
|
65
64
|
}
|
66
65
|
function removeSocketDir() {
|
67
66
|
try {
|
68
|
-
(0,
|
67
|
+
(0, node_fs_1.rmSync)(getSocketDir(), { recursive: true, force: true });
|
69
68
|
}
|
70
69
|
catch (e) { }
|
71
70
|
}
|
@@ -16,22 +16,11 @@ let pseudoTerminal;
|
|
16
16
|
const childProcesses = new Set();
|
17
17
|
function loadEnvVarsFile(path, env = {}) {
|
18
18
|
(0, task_env_1.unloadDotEnvFile)(path, env);
|
19
|
-
const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env
|
19
|
+
const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env);
|
20
20
|
if (result.error) {
|
21
21
|
throw result.error;
|
22
22
|
}
|
23
23
|
}
|
24
|
-
function loadEnvVars(path, env = {}) {
|
25
|
-
if (path) {
|
26
|
-
loadEnvVarsFile(path, env);
|
27
|
-
}
|
28
|
-
else {
|
29
|
-
try {
|
30
|
-
loadEnvVarsFile('.env', env);
|
31
|
-
}
|
32
|
-
catch { }
|
33
|
-
}
|
34
|
-
}
|
35
24
|
const propKeys = [
|
36
25
|
'command',
|
37
26
|
'commands',
|
@@ -292,20 +281,24 @@ function calculateCwd(cwd, context) {
|
|
292
281
|
return cwd;
|
293
282
|
return path.join(context.root, cwd);
|
294
283
|
}
|
295
|
-
|
296
|
-
|
297
|
-
|
284
|
+
/**
|
285
|
+
* Env variables are processed in the following order:
|
286
|
+
* - env option from executor options
|
287
|
+
* - env file from envFile option if provided
|
288
|
+
* - local env variables
|
289
|
+
*/
|
290
|
+
function processEnv(color, cwd, envOptionFromExecutor, envFile) {
|
291
|
+
let localEnv = (0, npm_run_path_1.env)({ cwd: cwd ?? process.cwd() });
|
292
|
+
localEnv = {
|
298
293
|
...process.env,
|
299
294
|
...localEnv,
|
300
295
|
};
|
301
|
-
|
302
|
-
|
303
|
-
loadEnvVars(envFile, res);
|
296
|
+
if (process.env.NX_LOAD_DOT_ENV_FILES !== 'false' && envFile) {
|
297
|
+
loadEnvVarsFile(envFile, localEnv);
|
304
298
|
}
|
305
|
-
|
306
|
-
|
307
|
-
...
|
308
|
-
...env,
|
299
|
+
let res = {
|
300
|
+
...localEnv,
|
301
|
+
...envOptionFromExecutor,
|
309
302
|
};
|
310
303
|
// need to override PATH to make sure we are using the local node_modules
|
311
304
|
if (localEnv.PATH)
|
package/src/generators/tree.d.ts
CHANGED
package/src/generators/tree.js
CHANGED
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.FsTree = void 0;
|
4
4
|
exports.flushChanges = flushChanges;
|
5
5
|
exports.printChanges = printChanges;
|
6
|
-
const
|
6
|
+
const node_fs_1 = require("node:fs");
|
7
7
|
const logger_1 = require("../utils/logger");
|
8
8
|
const output_1 = require("../utils/output");
|
9
9
|
const path_1 = require("path");
|
@@ -228,22 +228,22 @@ class FsTree {
|
|
228
228
|
}
|
229
229
|
fsReadDir(dirPath) {
|
230
230
|
try {
|
231
|
-
return (0,
|
231
|
+
return (0, node_fs_1.readdirSync)((0, path_1.join)(this.root, dirPath));
|
232
232
|
}
|
233
233
|
catch {
|
234
234
|
return [];
|
235
235
|
}
|
236
236
|
}
|
237
237
|
fsIsFile(filePath) {
|
238
|
-
const stat = (0,
|
238
|
+
const stat = (0, node_fs_1.statSync)((0, path_1.join)(this.root, filePath));
|
239
239
|
return stat.isFile();
|
240
240
|
}
|
241
241
|
fsReadFile(filePath) {
|
242
|
-
return (0,
|
242
|
+
return (0, node_fs_1.readFileSync)((0, path_1.join)(this.root, filePath));
|
243
243
|
}
|
244
244
|
fsExists(filePath) {
|
245
245
|
try {
|
246
|
-
const stat = (0,
|
246
|
+
const stat = (0, node_fs_1.statSync)((0, path_1.join)(this.root, filePath));
|
247
247
|
return stat.isFile() || stat.isDirectory();
|
248
248
|
}
|
249
249
|
catch {
|
@@ -279,18 +279,18 @@ function flushChanges(root, fileChanges) {
|
|
279
279
|
fileChanges.forEach((f) => {
|
280
280
|
const fpath = (0, path_1.join)(root, f.path);
|
281
281
|
if (f.type === 'CREATE') {
|
282
|
-
(0,
|
283
|
-
(0,
|
282
|
+
(0, node_fs_1.mkdirSync)((0, path_1.dirname)(fpath), { recursive: true });
|
283
|
+
(0, node_fs_1.writeFileSync)(fpath, f.content);
|
284
284
|
if (f.options?.mode)
|
285
|
-
(0,
|
285
|
+
(0, node_fs_1.chmodSync)(fpath, f.options.mode);
|
286
286
|
}
|
287
287
|
else if (f.type === 'UPDATE') {
|
288
|
-
(0,
|
288
|
+
(0, node_fs_1.writeFileSync)(fpath, f.content);
|
289
289
|
if (f.options?.mode)
|
290
|
-
(0,
|
290
|
+
(0, node_fs_1.chmodSync)(fpath, f.options.mode);
|
291
291
|
}
|
292
292
|
else if (f.type === 'DELETE') {
|
293
|
-
(0,
|
293
|
+
(0, node_fs_1.rmSync)(fpath, { recursive: true, force: true });
|
294
294
|
}
|
295
295
|
});
|
296
296
|
}
|
@@ -2,5 +2,7 @@ import { Task, TaskGraph } from '../config/task-graph';
|
|
2
2
|
import { TaskHasher } from './task-hasher';
|
3
3
|
import { ProjectGraph } from '../config/project-graph';
|
4
4
|
import { NxJsonConfiguration } from '../config/nx-json';
|
5
|
-
|
6
|
-
export declare function
|
5
|
+
import { TaskDetails } from '../native';
|
6
|
+
export declare function getTaskDetails(): TaskDetails | null;
|
7
|
+
export declare function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher: TaskHasher, projectGraph: ProjectGraph, taskGraph: TaskGraph, nxJson: NxJsonConfiguration, tasksDetails: TaskDetails | null): Promise<void>;
|
8
|
+
export declare function hashTask(hasher: TaskHasher, projectGraph: ProjectGraph, taskGraph: TaskGraph, task: Task, env: NodeJS.ProcessEnv, taskDetails: TaskDetails | null): Promise<void>;
|
package/src/hasher/hash-task.js
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getTaskDetails = getTaskDetails;
|
3
4
|
exports.hashTasksThatDoNotDependOnOutputsOfOtherTasks = hashTasksThatDoNotDependOnOutputsOfOtherTasks;
|
4
5
|
exports.hashTask = hashTask;
|
5
6
|
const utils_1 = require("../tasks-runner/utils");
|
@@ -19,9 +20,8 @@ function getTaskDetails() {
|
|
19
20
|
}
|
20
21
|
return taskDetails;
|
21
22
|
}
|
22
|
-
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson) {
|
23
|
+
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson, tasksDetails) {
|
23
24
|
performance.mark('hashMultipleTasks:start');
|
24
|
-
const taskDetails = getTaskDetails();
|
25
25
|
const tasks = Object.values(taskGraph.tasks);
|
26
26
|
const tasksWithHashers = await Promise.all(tasks.map(async (task) => {
|
27
27
|
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);
|
@@ -42,9 +42,8 @@ async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGrap
|
|
42
42
|
tasksToHash[i].hash = hashes[i].value;
|
43
43
|
tasksToHash[i].hashDetails = hashes[i].details;
|
44
44
|
}
|
45
|
-
|
46
|
-
|
47
|
-
taskDetails.recordTaskDetails(tasksToHash.map((task) => ({
|
45
|
+
if (tasksDetails?.recordTaskDetails) {
|
46
|
+
tasksDetails.recordTaskDetails(tasksToHash.map((task) => ({
|
48
47
|
hash: task.hash,
|
49
48
|
project: task.target.project,
|
50
49
|
target: task.target.target,
|
@@ -54,9 +53,8 @@ async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGrap
|
|
54
53
|
performance.mark('hashMultipleTasks:end');
|
55
54
|
performance.measure('hashMultipleTasks', 'hashMultipleTasks:start', 'hashMultipleTasks:end');
|
56
55
|
}
|
57
|
-
async function hashTask(hasher, projectGraph, taskGraph, task, env) {
|
56
|
+
async function hashTask(hasher, projectGraph, taskGraph, task, env, taskDetails) {
|
58
57
|
performance.mark('hashSingleTask:start');
|
59
|
-
const taskDetails = getTaskDetails();
|
60
58
|
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);
|
61
59
|
const projectsConfigurations = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
62
60
|
const { value, details } = await (customHasher
|
@@ -72,8 +70,7 @@ async function hashTask(hasher, projectGraph, taskGraph, task, env) {
|
|
72
70
|
: hasher.hashTask(task, taskGraph, env));
|
73
71
|
task.hash = value;
|
74
72
|
task.hashDetails = details;
|
75
|
-
|
76
|
-
if (taskDetails) {
|
73
|
+
if (taskDetails?.recordTaskDetails) {
|
77
74
|
taskDetails.recordTaskDetails([
|
78
75
|
{
|
79
76
|
hash: task.hash,
|
Binary file
|
package/src/plugins/js/index.js
CHANGED
@@ -2,7 +2,6 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.createDependencies = exports.createNodes = exports.name = void 0;
|
4
4
|
const fs_1 = require("fs");
|
5
|
-
const fs_extra_1 = require("fs-extra");
|
6
5
|
const path_1 = require("path");
|
7
6
|
const perf_hooks_1 = require("perf_hooks");
|
8
7
|
const cache_directory_1 = require("../../utils/cache-directory");
|
@@ -96,7 +95,7 @@ function lockFileNeedsReprocessing(lockHash) {
|
|
96
95
|
}
|
97
96
|
}
|
98
97
|
function writeLastProcessedLockfileHash(hash, lockFile) {
|
99
|
-
(0,
|
98
|
+
(0, fs_1.mkdirSync)((0, path_1.dirname)(lockFileHashFile), { recursive: true });
|
100
99
|
(0, fs_1.writeFileSync)(cachedParsedLockFile, JSON.stringify(lockFile, null, 2));
|
101
100
|
(0, fs_1.writeFileSync)(lockFileHashFile, hash);
|
102
101
|
}
|
@@ -8,8 +8,7 @@ exports.createProjectFileMapCache = createProjectFileMapCache;
|
|
8
8
|
exports.writeCache = writeCache;
|
9
9
|
exports.shouldRecomputeWholeGraph = shouldRecomputeWholeGraph;
|
10
10
|
exports.extractCachedFileData = extractCachedFileData;
|
11
|
-
const
|
12
|
-
const fs_extra_1 = require("fs-extra");
|
11
|
+
const node_fs_1 = require("node:fs");
|
13
12
|
const path_1 = require("path");
|
14
13
|
const perf_hooks_1 = require("perf_hooks");
|
15
14
|
const cache_directory_1 = require("../utils/cache-directory");
|
@@ -19,8 +18,8 @@ exports.nxProjectGraph = (0, path_1.join)(cache_directory_1.workspaceDataDirecto
|
|
19
18
|
exports.nxFileMap = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'file-map.json');
|
20
19
|
function ensureCacheDirectory() {
|
21
20
|
try {
|
22
|
-
if (!(0,
|
23
|
-
(0,
|
21
|
+
if (!(0, node_fs_1.existsSync)(cache_directory_1.workspaceDataDirectory)) {
|
22
|
+
(0, node_fs_1.mkdirSync)(cache_directory_1.workspaceDataDirectory, { recursive: true });
|
24
23
|
}
|
25
24
|
}
|
26
25
|
catch (e) {
|
@@ -102,9 +101,9 @@ function writeCache(cache, projectGraph) {
|
|
102
101
|
const tmpFileMapPath = `${exports.nxFileMap}~${unique}`;
|
103
102
|
try {
|
104
103
|
(0, fileutils_1.writeJsonFile)(tmpProjectGraphPath, projectGraph);
|
105
|
-
(0,
|
104
|
+
(0, node_fs_1.renameSync)(tmpProjectGraphPath, exports.nxProjectGraph);
|
106
105
|
(0, fileutils_1.writeJsonFile)(tmpFileMapPath, cache);
|
107
|
-
(0,
|
106
|
+
(0, node_fs_1.renameSync)(tmpFileMapPath, exports.nxFileMap);
|
108
107
|
done = true;
|
109
108
|
}
|
110
109
|
catch (err) {
|
@@ -3,11 +3,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Cache = exports.DbCache = void 0;
|
4
4
|
exports.getCache = getCache;
|
5
5
|
const workspace_root_1 = require("../utils/workspace-root");
|
6
|
-
const fs_extra_1 = require("fs-extra");
|
7
6
|
const path_1 = require("path");
|
8
7
|
const perf_hooks_1 = require("perf_hooks");
|
9
8
|
const default_tasks_runner_1 = require("./default-tasks-runner");
|
10
9
|
const child_process_1 = require("child_process");
|
10
|
+
const node_fs_1 = require("node:fs");
|
11
|
+
const promises_1 = require("node:fs/promises");
|
11
12
|
const cache_directory_1 = require("../utils/cache-directory");
|
12
13
|
const node_machine_id_1 = require("node-machine-id");
|
13
14
|
const native_1 = require("../native");
|
@@ -233,13 +234,13 @@ class Cache {
|
|
233
234
|
// might be left overs from partially-completed cache invocations
|
234
235
|
await this.remove(tdCommit);
|
235
236
|
await this.remove(td);
|
236
|
-
await (0,
|
237
|
-
await (0,
|
238
|
-
await (0,
|
237
|
+
await (0, promises_1.mkdir)(td, { recursive: true });
|
238
|
+
await (0, promises_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
|
239
|
+
await (0, promises_1.mkdir)((0, path_1.join)(td, 'outputs'));
|
239
240
|
const expandedOutputs = await this.expandOutputsInWorkspace(outputs);
|
240
241
|
await Promise.all(expandedOutputs.map(async (f) => {
|
241
242
|
const src = (0, path_1.join)(this.root, f);
|
242
|
-
if (
|
243
|
+
if ((0, node_fs_1.existsSync)(src)) {
|
243
244
|
const cached = (0, path_1.join)(td, 'outputs', f);
|
244
245
|
await this.copy(src, cached);
|
245
246
|
}
|
@@ -248,15 +249,15 @@ class Cache {
|
|
248
249
|
// creating this file is atomic, whereas creating a folder is not.
|
249
250
|
// so if the process gets terminated while we are copying stuff into cache,
|
250
251
|
// the cache entry won't be used.
|
251
|
-
await (0,
|
252
|
-
await (0,
|
253
|
-
await (0,
|
252
|
+
await (0, promises_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
|
253
|
+
await (0, promises_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
|
254
|
+
await (0, promises_1.writeFile)(tdCommit, 'true');
|
254
255
|
if (this.options.remoteCache) {
|
255
256
|
await this.options.remoteCache.store(task.hash, this.cachePath);
|
256
257
|
}
|
257
258
|
if (terminalOutput) {
|
258
259
|
const outputPath = this.temporaryOutputPath(task);
|
259
|
-
await (0,
|
260
|
+
await (0, promises_1.writeFile)(outputPath, terminalOutput);
|
260
261
|
}
|
261
262
|
});
|
262
263
|
}
|
@@ -265,7 +266,7 @@ class Cache {
|
|
265
266
|
const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);
|
266
267
|
await Promise.all(expandedOutputs.map(async (f) => {
|
267
268
|
const cached = (0, path_1.join)(cachedResult.outputsPath, f);
|
268
|
-
if (
|
269
|
+
if ((0, node_fs_1.existsSync)(cached)) {
|
269
270
|
const src = (0, path_1.join)(this.root, f);
|
270
271
|
await this.remove(src);
|
271
272
|
await this.copy(cached, src);
|
@@ -321,11 +322,11 @@ class Cache {
|
|
321
322
|
async getFromLocalDir(task) {
|
322
323
|
const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);
|
323
324
|
const td = (0, path_1.join)(this.cachePath, task.hash);
|
324
|
-
if (
|
325
|
-
const terminalOutput = await (0,
|
325
|
+
if ((0, node_fs_1.existsSync)(tdCommit)) {
|
326
|
+
const terminalOutput = await (0, promises_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
|
326
327
|
let code = 0;
|
327
328
|
try {
|
328
|
-
code = Number(await (0,
|
329
|
+
code = Number(await (0, promises_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
|
329
330
|
}
|
330
331
|
catch { }
|
331
332
|
return {
|
@@ -342,7 +343,7 @@ class Cache {
|
|
342
343
|
const td = (0, path_1.join)(this.cachePath, task.hash);
|
343
344
|
let sourceMachineId = null;
|
344
345
|
try {
|
345
|
-
sourceMachineId = await (0,
|
346
|
+
sourceMachineId = await (0, promises_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
|
346
347
|
}
|
347
348
|
catch { }
|
348
349
|
if (sourceMachineId && sourceMachineId != (await this.currentMachineId())) {
|
@@ -361,12 +362,12 @@ class Cache {
|
|
361
362
|
}
|
362
363
|
}
|
363
364
|
createCacheDir() {
|
364
|
-
(0,
|
365
|
+
(0, node_fs_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
|
365
366
|
return cache_directory_1.cacheDir;
|
366
367
|
}
|
367
368
|
createTerminalOutputsDir() {
|
368
369
|
const path = (0, path_1.join)(this.cachePath, 'terminalOutputs');
|
369
|
-
(0,
|
370
|
+
(0, node_fs_1.mkdirSync)(path, { recursive: true });
|
370
371
|
return path;
|
371
372
|
}
|
372
373
|
}
|
@@ -224,6 +224,11 @@ async function createRunManyDynamicOutputRenderer({ projectNames, tasks, args, o
|
|
224
224
|
clearRenderInterval();
|
225
225
|
const timeTakenText = (0, pretty_time_1.prettyTime)(process.hrtime(start));
|
226
226
|
moveCursorToStartOfPinnedFooter();
|
227
|
+
if (totalTasks === 0) {
|
228
|
+
renderPinnedFooter([output_1.output.applyNxPrefix('gray', 'No tasks were run')]);
|
229
|
+
resolveRenderIsDonePromise();
|
230
|
+
return;
|
231
|
+
}
|
227
232
|
if (totalSuccessfulTasks === totalTasks) {
|
228
233
|
const text = `Successfully ran ${(0, formatting_utils_1.formatTargetsAndProjects)(projectNames, targets, tasks)}`;
|
229
234
|
const taskOverridesRows = [];
|
@@ -23,6 +23,9 @@ class StaticRunManyTerminalOutputLifeCycle {
|
|
23
23
|
this.allCompletedTasks = new Map();
|
24
24
|
}
|
25
25
|
startCommand() {
|
26
|
+
if (this.tasks.length === 0) {
|
27
|
+
return;
|
28
|
+
}
|
26
29
|
if (this.projectNames.length <= 0) {
|
27
30
|
output_1.output.logSingleLine(`No projects with ${(0, formatting_utils_1.formatTargetsAndProjects)(this.projectNames, this.args.targets, this.tasks)} were run`);
|
28
31
|
return;
|
@@ -45,6 +48,10 @@ class StaticRunManyTerminalOutputLifeCycle {
|
|
45
48
|
}
|
46
49
|
endCommand() {
|
47
50
|
output_1.output.addNewline();
|
51
|
+
if (this.tasks.length === 0) {
|
52
|
+
output_1.output.logSingleLine(`No tasks were run`);
|
53
|
+
return;
|
54
|
+
}
|
48
55
|
if (this.failedTasks.length === 0) {
|
49
56
|
output_1.output.addVerticalSeparatorWithoutNewLines('green');
|
50
57
|
const bodyLines = this.cachedTasks.length > 0
|
@@ -1,7 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
const fs_1 = require("fs");
|
4
|
-
const fs_extra_1 = require("fs-extra");
|
5
4
|
const path_1 = require("path");
|
6
5
|
const WEEK_IN_MS = 1000 * 60 * 60 * 24 * 7;
|
7
6
|
const folder = process.argv[2];
|
@@ -34,11 +33,11 @@ function removeOld(records) {
|
|
34
33
|
if (time - s.mtimeMs > WEEK_IN_MS) {
|
35
34
|
if (s.isDirectory()) {
|
36
35
|
try {
|
37
|
-
(0,
|
36
|
+
(0, fs_1.rmSync)(`${r}.commit`, { recursive: true, force: true });
|
38
37
|
}
|
39
38
|
catch (e) { }
|
40
39
|
}
|
41
|
-
(0,
|
40
|
+
(0, fs_1.rmSync)(r, { recursive: true, force: true });
|
42
41
|
}
|
43
42
|
}
|
44
43
|
catch (e) { }
|
@@ -363,12 +363,14 @@ function setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles) {
|
|
363
363
|
}
|
364
364
|
async function invokeTasksRunner({ tasks, projectGraph, taskGraph, lifeCycle, nxJson, nxArgs, loadDotEnvFiles, initiatingProject, }) {
|
365
365
|
setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles);
|
366
|
+
// this needs to be done before we start to run the tasks
|
367
|
+
const taskDetails = (0, hash_task_1.getTaskDetails)();
|
366
368
|
const { tasksRunner, runnerOptions } = getRunner(nxArgs, nxJson);
|
367
369
|
let hasher = (0, create_task_hasher_1.createTaskHasher)(projectGraph, nxJson, runnerOptions);
|
368
370
|
// this is used for two reasons: to fetch all remote cache hits AND
|
369
371
|
// to submit everything that is known in advance to Nx Cloud to run in
|
370
372
|
// a distributed fashion
|
371
|
-
await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson);
|
373
|
+
await (0, hash_task_1.hashTasksThatDoNotDependOnOutputsOfOtherTasks)(hasher, projectGraph, taskGraph, nxJson, taskDetails);
|
372
374
|
const taskResultsLifecycle = new task_results_life_cycle_1.TaskResultsLifeCycle();
|
373
375
|
const compositedLifeCycle = new life_cycle_1.CompositeLifeCycle([
|
374
376
|
...constructLifeCycles(lifeCycle),
|
@@ -27,6 +27,7 @@ class TaskOrchestrator {
|
|
27
27
|
this.bail = bail;
|
28
28
|
this.daemon = daemon;
|
29
29
|
this.outputStyle = outputStyle;
|
30
|
+
this.taskDetails = (0, hash_task_1.getTaskDetails)();
|
30
31
|
this.cache = (0, cache_1.getCache)(this.nxJson, this.options);
|
31
32
|
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
|
32
33
|
this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
|
@@ -93,7 +94,7 @@ class TaskOrchestrator {
|
|
93
94
|
const task = this.taskGraph.tasks[taskId];
|
94
95
|
const taskSpecificEnv = (0, task_env_1.getTaskSpecificEnv)(task);
|
95
96
|
if (!task.hash) {
|
96
|
-
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv);
|
97
|
+
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv, this.taskDetails);
|
97
98
|
}
|
98
99
|
await this.options.lifeCycle.scheduleTask(task);
|
99
100
|
return taskSpecificEnv;
|
@@ -101,7 +102,7 @@ class TaskOrchestrator {
|
|
101
102
|
async processScheduledBatch(batch) {
|
102
103
|
await Promise.all(Object.values(batch.taskGraph.tasks).map(async (task) => {
|
103
104
|
if (!task.hash) {
|
104
|
-
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv);
|
105
|
+
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv, this.taskDetails);
|
105
106
|
}
|
106
107
|
await this.options.lifeCycle.scheduleTask(task);
|
107
108
|
}));
|
@@ -321,6 +322,9 @@ class TaskOrchestrator {
|
|
321
322
|
};
|
322
323
|
}
|
323
324
|
catch (e) {
|
325
|
+
if (process.env.NX_VERBOSE_LOGGING === 'true') {
|
326
|
+
console.error(e);
|
327
|
+
}
|
324
328
|
return {
|
325
329
|
code: 1,
|
326
330
|
};
|