nx 19.6.0-beta.1 → 19.6.0-beta.2
Sign up to get free protection for your applications and to get access to all the features.
- package/bin/post-install.js +8 -0
- package/package.json +12 -12
- package/schemas/nx-schema.json +25 -0
- package/schemas/project-schema.json +7 -0
- package/src/adapter/compat.d.ts +1 -1
- package/src/adapter/compat.js +1 -0
- package/src/command-line/nx-commands.js +3 -0
- package/src/command-line/sync/command-object.d.ts +6 -0
- package/src/command-line/sync/command-object.js +25 -0
- package/src/command-line/sync/sync.d.ts +6 -0
- package/src/command-line/sync/sync.js +30 -0
- package/src/config/nx-json.d.ts +23 -0
- package/src/config/workspace-json-project-json.d.ts +5 -0
- package/src/daemon/client/client.d.ts +5 -0
- package/src/daemon/client/client.js +33 -0
- package/src/daemon/message-types/flush-sync-generator-changes-to-disk.d.ts +6 -0
- package/src/daemon/message-types/flush-sync-generator-changes-to-disk.js +11 -0
- package/src/daemon/message-types/get-registered-sync-generators.d.ts +5 -0
- package/src/daemon/message-types/get-registered-sync-generators.js +11 -0
- package/src/daemon/message-types/get-sync-generator-changes.d.ts +6 -0
- package/src/daemon/message-types/get-sync-generator-changes.js +11 -0
- package/src/daemon/message-types/update-workspace-context.d.ts +8 -0
- package/src/daemon/message-types/update-workspace-context.js +11 -0
- package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.d.ts +2 -0
- package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.js +11 -0
- package/src/daemon/server/handle-get-registered-sync-generators.d.ts +2 -0
- package/src/daemon/server/handle-get-registered-sync-generators.js +11 -0
- package/src/daemon/server/handle-get-sync-generator-changes.d.ts +2 -0
- package/src/daemon/server/handle-get-sync-generator-changes.js +17 -0
- package/src/daemon/server/handle-update-workspace-context.d.ts +2 -0
- package/src/daemon/server/handle-update-workspace-context.js +11 -0
- package/src/daemon/server/project-graph-incremental-recomputation.d.ts +1 -0
- package/src/daemon/server/project-graph-incremental-recomputation.js +19 -2
- package/src/daemon/server/server.js +25 -0
- package/src/daemon/server/sync-generators.d.ts +6 -0
- package/src/daemon/server/sync-generators.js +202 -0
- package/src/daemon/socket-utils.js +18 -5
- package/src/daemon/tmp-dir.js +2 -1
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +1 -1
- package/src/tasks-runner/run-command.d.ts +1 -1
- package/src/tasks-runner/run-command.js +120 -2
- package/src/utils/plugins/output.js +1 -1
- package/src/utils/sync-generators.d.ts +22 -0
- package/src/utils/sync-generators.js +161 -0
- package/src/utils/workspace-context.d.ts +1 -0
- package/src/utils/workspace-context.js +16 -0
- package/src/daemon/message-types/update-context-files.d.ts +0 -7
- package/src/daemon/message-types/update-context-files.js +0 -11
@@ -0,0 +1,202 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
|
4
|
+
exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
|
5
|
+
exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
|
6
|
+
exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
|
7
|
+
const nx_json_1 = require("../../config/nx-json");
|
8
|
+
const tree_1 = require("../../generators/tree");
|
9
|
+
const file_hasher_1 = require("../../hasher/file-hasher");
|
10
|
+
const project_graph_1 = require("../../project-graph/project-graph");
|
11
|
+
const sync_generators_1 = require("../../utils/sync-generators");
|
12
|
+
const workspace_root_1 = require("../../utils/workspace-root");
|
13
|
+
const logger_1 = require("./logger");
|
14
|
+
const project_graph_incremental_recomputation_1 = require("./project-graph-incremental-recomputation");
|
15
|
+
const syncGeneratorsCacheResultPromises = new Map();
|
16
|
+
let registeredTaskSyncGenerators = new Set();
|
17
|
+
let registeredGlobalSyncGenerators = new Set();
|
18
|
+
const scheduledGenerators = new Set();
|
19
|
+
let waitPeriod = 100;
|
20
|
+
let registeredSyncGenerators;
|
21
|
+
let scheduledTimeoutId;
|
22
|
+
let storedProjectGraphHash;
|
23
|
+
let storedNxJsonHash;
|
24
|
+
const log = (...messageParts) => {
|
25
|
+
logger_1.serverLogger.log('[SYNC]:', ...messageParts);
|
26
|
+
};
|
27
|
+
// TODO(leo): check conflicts and reuse the Tree where possible
|
28
|
+
async function getCachedSyncGeneratorChanges(generators) {
|
29
|
+
try {
|
30
|
+
log('get sync generators changes on demand', generators);
|
31
|
+
// this is invoked imperatively, so we clear any scheduled run
|
32
|
+
if (scheduledTimeoutId) {
|
33
|
+
log('clearing scheduled run');
|
34
|
+
clearTimeout(scheduledTimeoutId);
|
35
|
+
scheduledTimeoutId = undefined;
|
36
|
+
}
|
37
|
+
// reset the wait time
|
38
|
+
waitPeriod = 100;
|
39
|
+
let projects;
|
40
|
+
let errored = false;
|
41
|
+
const getProjectsConfigurations = async () => {
|
42
|
+
if (projects || errored) {
|
43
|
+
return projects;
|
44
|
+
}
|
45
|
+
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
46
|
+
projects = projectGraph
|
47
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
48
|
+
: null;
|
49
|
+
errored = error !== undefined;
|
50
|
+
return projects;
|
51
|
+
};
|
52
|
+
return (await Promise.all(generators.map(async (generator) => {
|
53
|
+
if (scheduledGenerators.has(generator) ||
|
54
|
+
!syncGeneratorsCacheResultPromises.has(generator)) {
|
55
|
+
// it's scheduled to run (there are pending changes to process) or
|
56
|
+
// it's not scheduled and there's no cached result, so run it
|
57
|
+
const projects = await getProjectsConfigurations();
|
58
|
+
if (projects) {
|
59
|
+
log(generator, 'already scheduled or not cached, running it now');
|
60
|
+
runGenerator(generator, projects);
|
61
|
+
}
|
62
|
+
else {
|
63
|
+
log(generator, 'already scheduled or not cached, project graph errored');
|
64
|
+
/**
|
65
|
+
* This should never happen. This is invoked imperatively, and by
|
66
|
+
* the time it is invoked, the project graph would have already
|
67
|
+
* been requested. If it errored, it would have been reported and
|
68
|
+
* this wouldn't have been invoked. We handle it just in case.
|
69
|
+
*
|
70
|
+
* Since the project graph would be reported by the relevant
|
71
|
+
* handlers separately, we just ignore the error, don't cache
|
72
|
+
* any result and return an empty result, the next time this is
|
73
|
+
* invoked the process will repeat until it eventually recovers
|
74
|
+
* when the project graph is fixed.
|
75
|
+
*/
|
76
|
+
return Promise.resolve({ changes: [], generatorName: generator });
|
77
|
+
}
|
78
|
+
}
|
79
|
+
else {
|
80
|
+
log(generator, 'not scheduled and has cached result, returning cached result');
|
81
|
+
}
|
82
|
+
return syncGeneratorsCacheResultPromises.get(generator);
|
83
|
+
}))).flat();
|
84
|
+
}
|
85
|
+
catch (e) {
|
86
|
+
console.error(e);
|
87
|
+
syncGeneratorsCacheResultPromises.clear();
|
88
|
+
return [];
|
89
|
+
}
|
90
|
+
}
|
91
|
+
async function flushSyncGeneratorChangesToDisk(generators) {
|
92
|
+
log('flush sync generators changes', generators);
|
93
|
+
const results = await getCachedSyncGeneratorChanges(generators);
|
94
|
+
for (const generator of generators) {
|
95
|
+
syncGeneratorsCacheResultPromises.delete(generator);
|
96
|
+
}
|
97
|
+
await (0, sync_generators_1.flushSyncGeneratorChanges)(results);
|
98
|
+
}
|
99
|
+
function collectAndScheduleSyncGenerators(projectGraph) {
|
100
|
+
if (!projectGraph) {
|
101
|
+
// If the project graph is not available, we can't collect and schedule
|
102
|
+
// sync generators. The project graph error will be reported separately.
|
103
|
+
return;
|
104
|
+
}
|
105
|
+
log('collect registered sync generators');
|
106
|
+
collectAllRegisteredSyncGenerators(projectGraph);
|
107
|
+
// a change imply we need to re-run all the generators
|
108
|
+
// make sure to schedule all the collected generators
|
109
|
+
scheduledGenerators.clear();
|
110
|
+
for (const generator of registeredSyncGenerators) {
|
111
|
+
scheduledGenerators.add(generator);
|
112
|
+
}
|
113
|
+
log('scheduling:', [...scheduledGenerators]);
|
114
|
+
if (scheduledTimeoutId) {
|
115
|
+
// we have a scheduled run already, so we don't need to do anything
|
116
|
+
return;
|
117
|
+
}
|
118
|
+
scheduledTimeoutId = setTimeout(async () => {
|
119
|
+
scheduledTimeoutId = undefined;
|
120
|
+
if (waitPeriod < 4000) {
|
121
|
+
waitPeriod = waitPeriod * 2;
|
122
|
+
}
|
123
|
+
if (scheduledGenerators.size === 0) {
|
124
|
+
// no generators to run
|
125
|
+
return;
|
126
|
+
}
|
127
|
+
const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
128
|
+
for (const generator of scheduledGenerators) {
|
129
|
+
runGenerator(generator, projects);
|
130
|
+
}
|
131
|
+
await Promise.all(syncGeneratorsCacheResultPromises.values());
|
132
|
+
}, waitPeriod);
|
133
|
+
}
|
134
|
+
async function getCachedRegisteredSyncGenerators() {
|
135
|
+
log('get registered sync generators');
|
136
|
+
if (!registeredSyncGenerators) {
|
137
|
+
log('no registered sync generators, collecting them');
|
138
|
+
const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
139
|
+
collectAllRegisteredSyncGenerators(projectGraph);
|
140
|
+
}
|
141
|
+
else {
|
142
|
+
log('registered sync generators already collected, returning them');
|
143
|
+
}
|
144
|
+
return [...registeredSyncGenerators];
|
145
|
+
}
|
146
|
+
function collectAllRegisteredSyncGenerators(projectGraph) {
|
147
|
+
const projectGraphHash = hashProjectGraph(projectGraph);
|
148
|
+
if (storedProjectGraphHash !== projectGraphHash) {
|
149
|
+
storedProjectGraphHash = projectGraphHash;
|
150
|
+
registeredTaskSyncGenerators =
|
151
|
+
(0, sync_generators_1.collectRegisteredTaskSyncGenerators)(projectGraph);
|
152
|
+
}
|
153
|
+
else {
|
154
|
+
log('project graph hash is the same, not collecting task sync generators');
|
155
|
+
}
|
156
|
+
const nxJson = (0, nx_json_1.readNxJson)();
|
157
|
+
const nxJsonHash = (0, file_hasher_1.hashArray)(nxJson.sync?.globalGenerators?.sort() ?? []);
|
158
|
+
if (storedNxJsonHash !== nxJsonHash) {
|
159
|
+
storedNxJsonHash = nxJsonHash;
|
160
|
+
registeredGlobalSyncGenerators =
|
161
|
+
(0, sync_generators_1.collectRegisteredGlobalSyncGenerators)(nxJson);
|
162
|
+
}
|
163
|
+
else {
|
164
|
+
log('nx.json hash is the same, not collecting global sync generators');
|
165
|
+
}
|
166
|
+
const generators = new Set([
|
167
|
+
...registeredTaskSyncGenerators,
|
168
|
+
...registeredGlobalSyncGenerators,
|
169
|
+
]);
|
170
|
+
if (!registeredSyncGenerators) {
|
171
|
+
registeredSyncGenerators = generators;
|
172
|
+
return;
|
173
|
+
}
|
174
|
+
for (const generator of registeredSyncGenerators) {
|
175
|
+
if (!generators.has(generator)) {
|
176
|
+
registeredSyncGenerators.delete(generator);
|
177
|
+
syncGeneratorsCacheResultPromises.delete(generator);
|
178
|
+
}
|
179
|
+
}
|
180
|
+
for (const generator of generators) {
|
181
|
+
if (!registeredSyncGenerators.has(generator)) {
|
182
|
+
registeredSyncGenerators.add(generator);
|
183
|
+
}
|
184
|
+
}
|
185
|
+
}
|
186
|
+
function runGenerator(generator, projects) {
|
187
|
+
log('running scheduled generator', generator);
|
188
|
+
// remove it from the scheduled set
|
189
|
+
scheduledGenerators.delete(generator);
|
190
|
+
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
|
191
|
+
// run the generator and cache the result
|
192
|
+
syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
193
|
+
log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
|
194
|
+
return result;
|
195
|
+
}));
|
196
|
+
}
|
197
|
+
function hashProjectGraph(projectGraph) {
|
198
|
+
const stringifiedProjects = Object.entries(projectGraph.nodes)
|
199
|
+
.sort(([projectNameA], [projectNameB]) => projectNameA.localeCompare(projectNameB))
|
200
|
+
.map(([projectName, projectConfig]) => `${projectName}:${JSON.stringify(projectConfig)}`);
|
201
|
+
return (0, file_hasher_1.hashArray)(stringifiedProjects);
|
202
|
+
}
|
@@ -15,20 +15,33 @@ exports.isWindows = (0, os_1.platform)() === 'win32';
|
|
15
15
|
* See https://nodejs.org/dist/latest-v14.x/docs/api/net.html#net_identifying_paths_for_ipc_connections for a full breakdown
|
16
16
|
* of OS differences between Unix domain sockets and named pipes.
|
17
17
|
*/
|
18
|
-
const getFullOsSocketPath = () =>
|
19
|
-
|
20
|
-
|
18
|
+
const getFullOsSocketPath = () => {
|
19
|
+
const path = (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)());
|
20
|
+
assertValidSocketPath(path);
|
21
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
22
|
+
};
|
21
23
|
exports.getFullOsSocketPath = getFullOsSocketPath;
|
22
24
|
const getForkedProcessOsSocketPath = (id) => {
|
23
25
|
let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(), 'fp' + id + '.sock'));
|
24
|
-
|
26
|
+
assertValidSocketPath(path);
|
27
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
25
28
|
};
|
26
29
|
exports.getForkedProcessOsSocketPath = getForkedProcessOsSocketPath;
|
27
30
|
const getPluginOsSocketPath = (id) => {
|
28
31
|
let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(true), 'plugin' + id + '.sock'));
|
29
|
-
|
32
|
+
assertValidSocketPath(path);
|
33
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
30
34
|
};
|
31
35
|
exports.getPluginOsSocketPath = getPluginOsSocketPath;
|
36
|
+
function assertValidSocketPath(path) {
|
37
|
+
if (path.length > 95) {
|
38
|
+
throw new Error([
|
39
|
+
'Attempted to open socket that exceeds the maximum socket length.',
|
40
|
+
'',
|
41
|
+
`Set NX_SOCKET_DIR to a shorter path (e.g. ${exports.isWindows ? '%TMP%/nx-tmp' : '/tmp/nx-tmp'}) to avoid this issue.`,
|
42
|
+
].join('\n'));
|
43
|
+
}
|
44
|
+
}
|
32
45
|
function killSocketOrPath() {
|
33
46
|
try {
|
34
47
|
(0, fs_1.unlinkSync)((0, exports.getFullOsSocketPath)());
|
package/src/daemon/tmp-dir.js
CHANGED
@@ -53,7 +53,8 @@ function socketDirName() {
|
|
53
53
|
*/
|
54
54
|
function getSocketDir(alreadyUnique = false) {
|
55
55
|
try {
|
56
|
-
const dir = process.env.
|
56
|
+
const dir = process.env.NX_SOCKET_DIR ??
|
57
|
+
process.env.NX_DAEMON_SOCKET_DIR ??
|
57
58
|
(alreadyUnique ? tmp_1.tmpdir : socketDirName());
|
58
59
|
(0, fs_extra_1.ensureDirSync)(dir);
|
59
60
|
return dir;
|
Binary file
|
@@ -94,7 +94,7 @@ function addNxCloudOptionsToNxJson(tree, token, directory = '') {
|
|
94
94
|
});
|
95
95
|
}
|
96
96
|
}
|
97
|
-
function addNxCloudIdToNxJson(tree, nxCloudId, directory =
|
97
|
+
function addNxCloudIdToNxJson(tree, nxCloudId, directory = '') {
|
98
98
|
const nxJsonPath = (0, path_1.join)(directory, 'nx.json');
|
99
99
|
if (tree.exists(nxJsonPath)) {
|
100
100
|
(0, json_1.updateJson)(tree, (0, path_1.join)(directory, 'nx.json'), (nxJson) => {
|
@@ -5,7 +5,7 @@ import { TargetDependencyConfig } from '../config/workspace-json-project-json';
|
|
5
5
|
import { NxArgs } from '../utils/command-line-utils';
|
6
6
|
import { LifeCycle } from './life-cycle';
|
7
7
|
import { TasksRunner } from './tasks-runner';
|
8
|
-
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[],
|
8
|
+
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[], currentProjectGraph: ProjectGraph, { nxJson }: {
|
9
9
|
nxJson: NxJsonConfiguration;
|
10
10
|
}, nxArgs: NxArgs, overrides: any, initiatingProject: string | null, extraTargetDependencies: Record<string, (TargetDependencyConfig | string)[]>, extraOptions: {
|
11
11
|
excludeTaskDependencies: boolean;
|
@@ -4,16 +4,20 @@ exports.runCommand = runCommand;
|
|
4
4
|
exports.invokeTasksRunner = invokeTasksRunner;
|
5
5
|
exports.getRunner = getRunner;
|
6
6
|
exports.getRunnerOptions = getRunnerOptions;
|
7
|
+
const enquirer_1 = require("enquirer");
|
8
|
+
const ora = require("ora");
|
7
9
|
const path_1 = require("path");
|
8
10
|
const nx_json_1 = require("../config/nx-json");
|
9
11
|
const client_1 = require("../daemon/client/client");
|
10
12
|
const create_task_hasher_1 = require("../hasher/create-task-hasher");
|
11
13
|
const hash_task_1 = require("../hasher/hash-task");
|
14
|
+
const project_graph_1 = require("../project-graph/project-graph");
|
12
15
|
const fileutils_1 = require("../utils/fileutils");
|
13
16
|
const is_ci_1 = require("../utils/is-ci");
|
14
17
|
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
|
15
18
|
const output_1 = require("../utils/output");
|
16
19
|
const params_1 = require("../utils/params");
|
20
|
+
const sync_generators_1 = require("../utils/sync-generators");
|
17
21
|
const workspace_root_1 = require("../utils/workspace-root");
|
18
22
|
const create_task_graph_1 = require("./create-task-graph");
|
19
23
|
const life_cycle_1 = require("./life-cycle");
|
@@ -27,6 +31,7 @@ const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-c
|
|
27
31
|
const task_timings_life_cycle_1 = require("./life-cycles/task-timings-life-cycle");
|
28
32
|
const task_graph_utils_1 = require("./task-graph-utils");
|
29
33
|
const utils_1 = require("./utils");
|
34
|
+
const chalk = require("chalk");
|
30
35
|
async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {
|
31
36
|
const { runnerOptions } = getRunner(nxArgs, nxJson);
|
32
37
|
const isRunOne = initiatingProject != null;
|
@@ -90,10 +95,10 @@ function createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies,
|
|
90
95
|
}
|
91
96
|
return taskGraph;
|
92
97
|
}
|
93
|
-
async function runCommand(projectsToRun,
|
98
|
+
async function runCommand(projectsToRun, currentProjectGraph, { nxJson }, nxArgs, overrides, initiatingProject, extraTargetDependencies, extraOptions) {
|
94
99
|
const status = await (0, params_1.handleErrors)(process.env.NX_VERBOSE_LOGGING === 'true', async () => {
|
95
100
|
const projectNames = projectsToRun.map((t) => t.name);
|
96
|
-
const taskGraph =
|
101
|
+
const { projectGraph, taskGraph } = await ensureWorkspaceIsInSyncAndGetGraphs(currentProjectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions);
|
97
102
|
const tasks = Object.values(taskGraph.tasks);
|
98
103
|
const { lifeCycle, renderIsDone } = await getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides);
|
99
104
|
const status = await invokeTasksRunner({
|
@@ -111,6 +116,119 @@ async function runCommand(projectsToRun, projectGraph, { nxJson }, nxArgs, overr
|
|
111
116
|
});
|
112
117
|
return status;
|
113
118
|
}
|
119
|
+
async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions) {
|
120
|
+
let taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
|
121
|
+
if (process.env.NX_ENABLE_SYNC_GENERATORS !== 'true') {
|
122
|
+
return { projectGraph, taskGraph };
|
123
|
+
}
|
124
|
+
// collect unique syncGenerators from the tasks
|
125
|
+
const uniqueSyncGenerators = new Set();
|
126
|
+
for (const { target } of Object.values(taskGraph.tasks)) {
|
127
|
+
const { syncGenerators } = projectGraph.nodes[target.project].data.targets[target.target];
|
128
|
+
if (!syncGenerators) {
|
129
|
+
continue;
|
130
|
+
}
|
131
|
+
for (const generator of syncGenerators) {
|
132
|
+
uniqueSyncGenerators.add(generator);
|
133
|
+
}
|
134
|
+
}
|
135
|
+
if (!uniqueSyncGenerators.size) {
|
136
|
+
// There are no sync generators registered in the tasks to run
|
137
|
+
return { projectGraph, taskGraph };
|
138
|
+
}
|
139
|
+
const syncGenerators = Array.from(uniqueSyncGenerators);
|
140
|
+
const results = await (0, sync_generators_1.getSyncGeneratorChanges)(syncGenerators);
|
141
|
+
if (!results.length) {
|
142
|
+
// There are no changes to sync, workspace is up to date
|
143
|
+
return { projectGraph, taskGraph };
|
144
|
+
}
|
145
|
+
const outOfSyncTitle = 'The workspace is out of sync';
|
146
|
+
const resultBodyLines = (0, sync_generators_1.syncGeneratorResultsToMessageLines)(results);
|
147
|
+
const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks.';
|
148
|
+
const willErrorOnCiMessage = 'Please note that this will be an error on CI.';
|
149
|
+
if ((0, is_ci_1.isCI)() || !process.stdout.isTTY) {
|
150
|
+
// If the user is running in CI or is running in a non-TTY environment we
|
151
|
+
// throw an error to stop the execution of the tasks.
|
152
|
+
throw new Error(`${outOfSyncTitle}\n${resultBodyLines.join('\n')}\n${fixMessage}`);
|
153
|
+
}
|
154
|
+
if (nxJson.sync?.applyChanges === false) {
|
155
|
+
// If the user has set `sync.applyChanges` to `false` in their `nx.json`
|
156
|
+
// we don't prompt the them and just log a warning informing them that
|
157
|
+
// the workspace is out of sync and they have it set to not apply changes
|
158
|
+
// automatically.
|
159
|
+
output_1.output.warn({
|
160
|
+
title: outOfSyncTitle,
|
161
|
+
bodyLines: [
|
162
|
+
...resultBodyLines,
|
163
|
+
'Your workspace is set to not apply changes automatically (`sync.applyChanges` is set to `false` in your `nx.json`).',
|
164
|
+
willErrorOnCiMessage,
|
165
|
+
fixMessage,
|
166
|
+
],
|
167
|
+
});
|
168
|
+
return { projectGraph, taskGraph };
|
169
|
+
}
|
170
|
+
output_1.output.warn({
|
171
|
+
title: outOfSyncTitle,
|
172
|
+
bodyLines: [
|
173
|
+
...resultBodyLines,
|
174
|
+
nxJson.sync?.applyChanges === true
|
175
|
+
? 'Proceeding to sync the changes automatically (`sync.applyChanges` is set to `true` in your `nx.json`).'
|
176
|
+
: willErrorOnCiMessage,
|
177
|
+
],
|
178
|
+
});
|
179
|
+
const applyChanges = nxJson.sync?.applyChanges === true ||
|
180
|
+
(await promptForApplyingSyncGeneratorChanges());
|
181
|
+
if (applyChanges) {
|
182
|
+
const spinner = ora('Syncing the workspace...');
|
183
|
+
spinner.start();
|
184
|
+
// Flush sync generator changes to disk
|
185
|
+
await (0, sync_generators_1.flushSyncGeneratorChanges)(results);
|
186
|
+
// Re-create project graph and task graph
|
187
|
+
projectGraph = await (0, project_graph_1.createProjectGraphAsync)();
|
188
|
+
taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
|
189
|
+
if (nxJson.sync?.applyChanges === true) {
|
190
|
+
spinner.succeed(`The workspace was synced successfully!
|
191
|
+
|
192
|
+
Please make sure to commit the changes to your repository or this will error on CI.`);
|
193
|
+
}
|
194
|
+
else {
|
195
|
+
// The user was prompted and we already logged a message about erroring on CI
|
196
|
+
// so here we just tell them to commit the changes.
|
197
|
+
spinner.succeed(`The workspace was synced successfully!
|
198
|
+
|
199
|
+
Please make sure to commit the changes to your repository.`);
|
200
|
+
}
|
201
|
+
}
|
202
|
+
else {
|
203
|
+
output_1.output.warn({
|
204
|
+
title: 'Syncing the workspace was skipped',
|
205
|
+
bodyLines: [
|
206
|
+
'This could lead to unexpected results or errors when running tasks.',
|
207
|
+
fixMessage,
|
208
|
+
],
|
209
|
+
});
|
210
|
+
}
|
211
|
+
return { projectGraph, taskGraph };
|
212
|
+
}
|
213
|
+
async function promptForApplyingSyncGeneratorChanges() {
|
214
|
+
const promptConfig = {
|
215
|
+
name: 'applyChanges',
|
216
|
+
type: 'select',
|
217
|
+
message: 'Would you like to sync the changes to get your worskpace up to date?',
|
218
|
+
choices: [
|
219
|
+
{
|
220
|
+
name: 'yes',
|
221
|
+
message: 'Yes, sync the changes and run the tasks',
|
222
|
+
},
|
223
|
+
{
|
224
|
+
name: 'no',
|
225
|
+
message: 'No, run the tasks without syncing the changes',
|
226
|
+
},
|
227
|
+
],
|
228
|
+
footer: () => chalk.dim('\nYou can skip this prompt by setting the `sync.applyChanges` option in your `nx.json`.'),
|
229
|
+
};
|
230
|
+
return await (0, enquirer_1.prompt)([promptConfig]).then(({ applyChanges }) => applyChanges === 'yes');
|
231
|
+
}
|
114
232
|
function setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles) {
|
115
233
|
if (nxArgs.outputStyle == 'stream' ||
|
116
234
|
process.env.NX_BATCH_MODE === 'true' ||
|
@@ -27,7 +27,7 @@ function listPlugins(plugins, title) {
|
|
27
27
|
if (p.projectInference) {
|
28
28
|
capabilities.push('project-inference');
|
29
29
|
}
|
30
|
-
bodyLines.push(`${chalk.bold(p.name)} (${capabilities.join()})`);
|
30
|
+
bodyLines.push(`${chalk.bold(p.name)} ${capabilities.length >= 1 ? `(${capabilities.join()})` : ''}`);
|
31
31
|
}
|
32
32
|
output_1.output.log({
|
33
33
|
title: title,
|
@@ -0,0 +1,22 @@
|
|
1
|
+
import type { GeneratorCallback } from '../config/misc-interfaces';
|
2
|
+
import type { ProjectGraph } from '../config/project-graph';
|
3
|
+
import type { ProjectConfiguration } from '../config/workspace-json-project-json';
|
4
|
+
import { FsTree, type FileChange, type Tree } from '../generators/tree';
|
5
|
+
export type SyncGeneratorResult = void | {
|
6
|
+
callback?: GeneratorCallback;
|
7
|
+
outOfSyncMessage?: string;
|
8
|
+
};
|
9
|
+
export type SyncGenerator = (tree: Tree) => SyncGeneratorResult | Promise<SyncGeneratorResult>;
|
10
|
+
export type SyncGeneratorChangesResult = {
|
11
|
+
changes: FileChange[];
|
12
|
+
generatorName: string;
|
13
|
+
callback?: GeneratorCallback;
|
14
|
+
outOfSyncMessage?: string;
|
15
|
+
};
|
16
|
+
export declare function getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;
|
17
|
+
export declare function flushSyncGeneratorChanges(results: SyncGeneratorChangesResult[]): Promise<void>;
|
18
|
+
export declare function collectAllRegisteredSyncGenerators(projectGraph: ProjectGraph): Promise<string[]>;
|
19
|
+
export declare function runSyncGenerator(tree: FsTree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorChangesResult>;
|
20
|
+
export declare function collectRegisteredTaskSyncGenerators(projectGraph: ProjectGraph): Set<string>;
|
21
|
+
export declare function collectRegisteredGlobalSyncGenerators(nxJson?: import("../config/nx-json").NxJsonConfiguration<string[] | "*">): Set<string>;
|
22
|
+
export declare function syncGeneratorResultsToMessageLines(results: SyncGeneratorChangesResult[]): string[];
|
@@ -0,0 +1,161 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getSyncGeneratorChanges = getSyncGeneratorChanges;
|
4
|
+
exports.flushSyncGeneratorChanges = flushSyncGeneratorChanges;
|
5
|
+
exports.collectAllRegisteredSyncGenerators = collectAllRegisteredSyncGenerators;
|
6
|
+
exports.runSyncGenerator = runSyncGenerator;
|
7
|
+
exports.collectRegisteredTaskSyncGenerators = collectRegisteredTaskSyncGenerators;
|
8
|
+
exports.collectRegisteredGlobalSyncGenerators = collectRegisteredGlobalSyncGenerators;
|
9
|
+
exports.syncGeneratorResultsToMessageLines = syncGeneratorResultsToMessageLines;
|
10
|
+
const perf_hooks_1 = require("perf_hooks");
|
11
|
+
const generate_1 = require("../command-line/generate/generate");
|
12
|
+
const generator_utils_1 = require("../command-line/generate/generator-utils");
|
13
|
+
const nx_json_1 = require("../config/nx-json");
|
14
|
+
const client_1 = require("../daemon/client/client");
|
15
|
+
const is_on_daemon_1 = require("../daemon/is-on-daemon");
|
16
|
+
const tree_1 = require("../generators/tree");
|
17
|
+
const project_graph_1 = require("../project-graph/project-graph");
|
18
|
+
const workspace_context_1 = require("./workspace-context");
|
19
|
+
const workspace_root_1 = require("./workspace-root");
|
20
|
+
const chalk = require("chalk");
|
21
|
+
async function getSyncGeneratorChanges(generators) {
|
22
|
+
perf_hooks_1.performance.mark('get-sync-generators-changes:start');
|
23
|
+
let results;
|
24
|
+
if (!client_1.daemonClient.enabled()) {
|
25
|
+
results = await runSyncGenerators(generators);
|
26
|
+
}
|
27
|
+
else {
|
28
|
+
results = await client_1.daemonClient.getSyncGeneratorChanges(generators);
|
29
|
+
}
|
30
|
+
perf_hooks_1.performance.mark('get-sync-generators-changes:end');
|
31
|
+
perf_hooks_1.performance.measure('get-sync-generators-changes', 'get-sync-generators-changes:start', 'get-sync-generators-changes:end');
|
32
|
+
return results.filter((r) => r.changes.length > 0);
|
33
|
+
}
|
34
|
+
async function flushSyncGeneratorChanges(results) {
|
35
|
+
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
|
36
|
+
await flushSyncGeneratorChangesToDisk(results);
|
37
|
+
}
|
38
|
+
else {
|
39
|
+
await client_1.daemonClient.flushSyncGeneratorChangesToDisk(results.map((r) => r.generatorName));
|
40
|
+
}
|
41
|
+
}
|
42
|
+
async function collectAllRegisteredSyncGenerators(projectGraph) {
|
43
|
+
if (!client_1.daemonClient.enabled()) {
|
44
|
+
return [
|
45
|
+
...collectRegisteredTaskSyncGenerators(projectGraph),
|
46
|
+
...collectRegisteredGlobalSyncGenerators(),
|
47
|
+
];
|
48
|
+
}
|
49
|
+
return await client_1.daemonClient.getRegisteredSyncGenerators();
|
50
|
+
}
|
51
|
+
async function runSyncGenerator(tree, generatorSpecifier, projects) {
|
52
|
+
perf_hooks_1.performance.mark(`run-sync-generator:${generatorSpecifier}:start`);
|
53
|
+
const { collection, generator } = (0, generate_1.parseGeneratorString)(generatorSpecifier);
|
54
|
+
const { implementationFactory } = (0, generator_utils_1.getGeneratorInformation)(collection, generator, workspace_root_1.workspaceRoot, projects);
|
55
|
+
const implementation = implementationFactory();
|
56
|
+
const result = await implementation(tree);
|
57
|
+
let callback;
|
58
|
+
let outOfSyncMessage;
|
59
|
+
if (result && typeof result === 'object') {
|
60
|
+
callback = result.callback;
|
61
|
+
outOfSyncMessage = result.outOfSyncMessage;
|
62
|
+
}
|
63
|
+
perf_hooks_1.performance.mark(`run-sync-generator:${generatorSpecifier}:end`);
|
64
|
+
perf_hooks_1.performance.measure(`run-sync-generator:${generatorSpecifier}`, `run-sync-generator:${generatorSpecifier}:start`, `run-sync-generator:${generatorSpecifier}:end`);
|
65
|
+
return {
|
66
|
+
changes: tree.listChanges(),
|
67
|
+
generatorName: generatorSpecifier,
|
68
|
+
callback,
|
69
|
+
outOfSyncMessage,
|
70
|
+
};
|
71
|
+
}
|
72
|
+
function collectRegisteredTaskSyncGenerators(projectGraph) {
|
73
|
+
const taskSyncGenerators = new Set();
|
74
|
+
for (const { data: { targets }, } of Object.values(projectGraph.nodes)) {
|
75
|
+
if (!targets) {
|
76
|
+
continue;
|
77
|
+
}
|
78
|
+
for (const target of Object.values(targets)) {
|
79
|
+
if (!target.syncGenerators) {
|
80
|
+
continue;
|
81
|
+
}
|
82
|
+
for (const generator of target.syncGenerators) {
|
83
|
+
taskSyncGenerators.add(generator);
|
84
|
+
}
|
85
|
+
}
|
86
|
+
}
|
87
|
+
return taskSyncGenerators;
|
88
|
+
}
|
89
|
+
function collectRegisteredGlobalSyncGenerators(nxJson = (0, nx_json_1.readNxJson)()) {
|
90
|
+
const globalSyncGenerators = new Set();
|
91
|
+
if (!nxJson.sync?.globalGenerators?.length) {
|
92
|
+
return globalSyncGenerators;
|
93
|
+
}
|
94
|
+
for (const generator of nxJson.sync.globalGenerators) {
|
95
|
+
globalSyncGenerators.add(generator);
|
96
|
+
}
|
97
|
+
return globalSyncGenerators;
|
98
|
+
}
|
99
|
+
function syncGeneratorResultsToMessageLines(results) {
|
100
|
+
const messageLines = [];
|
101
|
+
for (const result of results) {
|
102
|
+
messageLines.push(`The ${chalk.bold(result.generatorName)} sync generator identified ${chalk.bold(result.changes.length)} file${result.changes.length === 1 ? '' : 's'} in the workspace that ${result.changes.length === 1 ? 'is' : 'are'} out of sync${result.outOfSyncMessage ? ':' : '.'}`);
|
103
|
+
if (result.outOfSyncMessage) {
|
104
|
+
messageLines.push(result.outOfSyncMessage);
|
105
|
+
}
|
106
|
+
messageLines.push('');
|
107
|
+
}
|
108
|
+
return messageLines;
|
109
|
+
}
|
110
|
+
async function runSyncGenerators(generators) {
|
111
|
+
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, 'running sync generators');
|
112
|
+
const projectGraph = await (0, project_graph_1.createProjectGraphAsync)();
|
113
|
+
const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
114
|
+
const results = [];
|
115
|
+
for (const generator of generators) {
|
116
|
+
const result = await runSyncGenerator(tree, generator, projects);
|
117
|
+
results.push(result);
|
118
|
+
}
|
119
|
+
return results;
|
120
|
+
}
|
121
|
+
async function flushSyncGeneratorChangesToDisk(results) {
|
122
|
+
perf_hooks_1.performance.mark('flush-sync-generator-changes-to-disk:start');
|
123
|
+
const { changes, createdFiles, updatedFiles, deletedFiles, callbacks } = processSyncGeneratorResults(results);
|
124
|
+
// Write changes to disk
|
125
|
+
(0, tree_1.flushChanges)(workspace_root_1.workspaceRoot, changes);
|
126
|
+
// Run the callbacks
|
127
|
+
if (callbacks.length) {
|
128
|
+
for (const callback of callbacks) {
|
129
|
+
await callback();
|
130
|
+
}
|
131
|
+
}
|
132
|
+
// Update the context files
|
133
|
+
await (0, workspace_context_1.updateContextWithChangedFiles)(createdFiles, updatedFiles, deletedFiles);
|
134
|
+
perf_hooks_1.performance.mark('flush-sync-generator-changes-to-disk:end');
|
135
|
+
perf_hooks_1.performance.measure('flush sync generator changes to disk', 'flush-sync-generator-changes-to-disk:start', 'flush-sync-generator-changes-to-disk:end');
|
136
|
+
}
|
137
|
+
function processSyncGeneratorResults(results) {
|
138
|
+
const changes = [];
|
139
|
+
const createdFiles = [];
|
140
|
+
const updatedFiles = [];
|
141
|
+
const deletedFiles = [];
|
142
|
+
const callbacks = [];
|
143
|
+
for (const result of results) {
|
144
|
+
if (result.callback) {
|
145
|
+
callbacks.push(result.callback);
|
146
|
+
}
|
147
|
+
for (const change of result.changes) {
|
148
|
+
changes.push(change);
|
149
|
+
if (change.type === 'CREATE') {
|
150
|
+
createdFiles.push(change.path);
|
151
|
+
}
|
152
|
+
else if (change.type === 'UPDATE') {
|
153
|
+
updatedFiles.push(change.path);
|
154
|
+
}
|
155
|
+
else if (change.type === 'DELETE') {
|
156
|
+
deletedFiles.push(change.path);
|
157
|
+
}
|
158
|
+
}
|
159
|
+
}
|
160
|
+
return { changes, createdFiles, updatedFiles, deletedFiles, callbacks };
|
161
|
+
}
|
@@ -11,6 +11,7 @@ export declare function getNxWorkspaceFilesFromContext(workspaceRoot: string, pr
|
|
11
11
|
export declare function globWithWorkspaceContextSync(workspaceRoot: string, globs: string[], exclude?: string[]): string[];
|
12
12
|
export declare function globWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string[]>;
|
13
13
|
export declare function hashWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string>;
|
14
|
+
export declare function updateContextWithChangedFiles(createdFiles: string[], updatedFiles: string[], deletedFiles: string[]): Promise<void>;
|
14
15
|
export declare function updateFilesInContext(updatedFiles: string[], deletedFiles: string[]): Record<string, string>;
|
15
16
|
export declare function getAllFileDataInContext(workspaceRoot: string): Promise<import("../native").FileData[]>;
|
16
17
|
export declare function getFilesInDirectoryUsingContext(workspaceRoot: string, dir: string): Promise<string[]>;
|