nx 19.6.0-beta.1 → 19.6.0-beta.3
Sign up to get free protection for your applications and to get access to all the features.
- package/bin/post-install.js +8 -0
- package/package.json +12 -12
- package/schemas/nx-schema.json +55 -4
- package/schemas/project-schema.json +7 -0
- package/src/adapter/compat.d.ts +1 -1
- package/src/adapter/compat.js +1 -0
- package/src/command-line/nx-commands.js +3 -0
- package/src/command-line/release/changelog.js +9 -9
- package/src/command-line/release/command-object.d.ts +12 -3
- package/src/command-line/release/command-object.js +16 -1
- package/src/command-line/release/config/config.js +4 -2
- package/src/command-line/release/config/filter-release-groups.d.ts +2 -2
- package/src/command-line/release/config/filter-release-groups.js +1 -1
- package/src/command-line/release/config/version-plans.d.ts +1 -1
- package/src/command-line/release/config/version-plans.js +12 -12
- package/src/command-line/release/plan-check.d.ts +4 -0
- package/src/command-line/release/plan-check.js +225 -0
- package/src/command-line/release/plan.js +1 -1
- package/src/command-line/release/release.js +3 -3
- package/src/command-line/release/version.js +1 -1
- package/src/command-line/sync/command-object.d.ts +6 -0
- package/src/command-line/sync/command-object.js +25 -0
- package/src/command-line/sync/sync.d.ts +6 -0
- package/src/command-line/sync/sync.js +30 -0
- package/src/config/nx-json.d.ts +32 -2
- package/src/config/workspace-json-project-json.d.ts +5 -0
- package/src/daemon/client/client.d.ts +5 -0
- package/src/daemon/client/client.js +33 -0
- package/src/daemon/message-types/flush-sync-generator-changes-to-disk.d.ts +6 -0
- package/src/daemon/message-types/flush-sync-generator-changes-to-disk.js +11 -0
- package/src/daemon/message-types/get-registered-sync-generators.d.ts +5 -0
- package/src/daemon/message-types/get-registered-sync-generators.js +11 -0
- package/src/daemon/message-types/get-sync-generator-changes.d.ts +6 -0
- package/src/daemon/message-types/get-sync-generator-changes.js +11 -0
- package/src/daemon/message-types/update-workspace-context.d.ts +8 -0
- package/src/daemon/message-types/update-workspace-context.js +11 -0
- package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.d.ts +2 -0
- package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.js +11 -0
- package/src/daemon/server/handle-get-registered-sync-generators.d.ts +2 -0
- package/src/daemon/server/handle-get-registered-sync-generators.js +11 -0
- package/src/daemon/server/handle-get-sync-generator-changes.d.ts +2 -0
- package/src/daemon/server/handle-get-sync-generator-changes.js +17 -0
- package/src/daemon/server/handle-update-workspace-context.d.ts +2 -0
- package/src/daemon/server/handle-update-workspace-context.js +11 -0
- package/src/daemon/server/project-graph-incremental-recomputation.d.ts +1 -0
- package/src/daemon/server/project-graph-incremental-recomputation.js +19 -2
- package/src/daemon/server/server.js +25 -0
- package/src/daemon/server/sync-generators.d.ts +6 -0
- package/src/daemon/server/sync-generators.js +202 -0
- package/src/daemon/socket-utils.js +18 -5
- package/src/daemon/tmp-dir.js +2 -1
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +1 -1
- package/src/tasks-runner/run-command.d.ts +1 -1
- package/src/tasks-runner/run-command.js +120 -2
- package/src/utils/command-line-utils.js +1 -1
- package/src/utils/plugins/output.js +1 -1
- package/src/utils/sync-generators.d.ts +22 -0
- package/src/utils/sync-generators.js +161 -0
- package/src/utils/workspace-context.d.ts +1 -0
- package/src/utils/workspace-context.js +16 -0
- package/src/daemon/message-types/update-context-files.d.ts +0 -7
- package/src/daemon/message-types/update-context-files.js +0 -11
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.currentProjectGraph = exports.currentProjectFileMapCache = exports.fileMapWithFiles = void 0;
|
4
4
|
exports.getCachedSerializedProjectGraphPromise = getCachedSerializedProjectGraphPromise;
|
5
5
|
exports.addUpdatedAndDeletedFiles = addUpdatedAndDeletedFiles;
|
6
|
+
exports.registerProjectGraphRecomputationListener = registerProjectGraphRecomputationListener;
|
6
7
|
const perf_hooks_1 = require("perf_hooks");
|
7
8
|
const nx_json_1 = require("../../config/nx-json");
|
8
9
|
const file_hasher_1 = require("../../hasher/file-hasher");
|
@@ -20,14 +21,17 @@ const error_types_1 = require("../../project-graph/error-types");
|
|
20
21
|
let cachedSerializedProjectGraphPromise;
|
21
22
|
const collectedUpdatedFiles = new Set();
|
22
23
|
const collectedDeletedFiles = new Set();
|
24
|
+
const projectGraphRecomputationListeners = new Set();
|
23
25
|
let storedWorkspaceConfigHash;
|
24
26
|
let waitPeriod = 100;
|
25
27
|
let scheduledTimeoutId;
|
26
28
|
let knownExternalNodes = {};
|
27
29
|
async function getCachedSerializedProjectGraphPromise() {
|
28
30
|
try {
|
31
|
+
let wasScheduled = false;
|
29
32
|
// recomputing it now on demand. we can ignore the scheduled timeout
|
30
33
|
if (scheduledTimeoutId) {
|
34
|
+
wasScheduled = true;
|
31
35
|
clearTimeout(scheduledTimeoutId);
|
32
36
|
scheduledTimeoutId = undefined;
|
33
37
|
}
|
@@ -45,7 +49,11 @@ async function getCachedSerializedProjectGraphPromise() {
|
|
45
49
|
cachedSerializedProjectGraphPromise =
|
46
50
|
processFilesAndCreateAndSerializeProjectGraph(plugins);
|
47
51
|
}
|
48
|
-
|
52
|
+
const result = await cachedSerializedProjectGraphPromise;
|
53
|
+
if (wasScheduled) {
|
54
|
+
notifyProjectGraphRecomputationListeners(result.projectGraph);
|
55
|
+
}
|
56
|
+
return result;
|
49
57
|
}
|
50
58
|
catch (e) {
|
51
59
|
return {
|
@@ -83,13 +91,17 @@ function addUpdatedAndDeletedFiles(createdFiles, updatedFiles, deletedFiles) {
|
|
83
91
|
}
|
84
92
|
cachedSerializedProjectGraphPromise =
|
85
93
|
processFilesAndCreateAndSerializeProjectGraph(await (0, plugins_1.getPlugins)());
|
86
|
-
await cachedSerializedProjectGraphPromise;
|
94
|
+
const { projectGraph } = await cachedSerializedProjectGraphPromise;
|
87
95
|
if (createdFiles.length > 0) {
|
88
96
|
(0, file_watcher_sockets_1.notifyFileWatcherSockets)(createdFiles, null, null);
|
89
97
|
}
|
98
|
+
notifyProjectGraphRecomputationListeners(projectGraph);
|
90
99
|
}, waitPeriod);
|
91
100
|
}
|
92
101
|
}
|
102
|
+
function registerProjectGraphRecomputationListener(listener) {
|
103
|
+
projectGraphRecomputationListeners.add(listener);
|
104
|
+
}
|
93
105
|
function computeWorkspaceConfigHash(projectsConfigurations) {
|
94
106
|
const projectConfigurationStrings = Object.entries(projectsConfigurations)
|
95
107
|
.sort(([projectNameA], [projectNameB]) => projectNameA.localeCompare(projectNameB))
|
@@ -281,3 +293,8 @@ async function resetInternalStateIfNxDepsMissing() {
|
|
281
293
|
await resetInternalState();
|
282
294
|
}
|
283
295
|
}
|
296
|
+
function notifyProjectGraphRecomputationListeners(projectGraph) {
|
297
|
+
for (const listener of projectGraphRecomputationListeners) {
|
298
|
+
listener(projectGraph);
|
299
|
+
}
|
300
|
+
}
|
@@ -42,6 +42,15 @@ const handle_get_task_history_1 = require("./handle-get-task-history");
|
|
42
42
|
const handle_write_task_runs_to_history_1 = require("./handle-write-task-runs-to-history");
|
43
43
|
const force_shutdown_1 = require("../message-types/force-shutdown");
|
44
44
|
const handle_force_shutdown_1 = require("./handle-force-shutdown");
|
45
|
+
const get_sync_generator_changes_1 = require("../message-types/get-sync-generator-changes");
|
46
|
+
const handle_get_sync_generator_changes_1 = require("./handle-get-sync-generator-changes");
|
47
|
+
const sync_generators_1 = require("./sync-generators");
|
48
|
+
const get_registered_sync_generators_1 = require("../message-types/get-registered-sync-generators");
|
49
|
+
const handle_get_registered_sync_generators_1 = require("./handle-get-registered-sync-generators");
|
50
|
+
const update_workspace_context_1 = require("../message-types/update-workspace-context");
|
51
|
+
const handle_update_workspace_context_1 = require("./handle-update-workspace-context");
|
52
|
+
const flush_sync_generator_changes_to_disk_1 = require("../message-types/flush-sync-generator-changes-to-disk");
|
53
|
+
const handle_flush_sync_generator_changes_to_disk_1 = require("./handle-flush-sync-generator-changes-to-disk");
|
45
54
|
let performanceObserver;
|
46
55
|
let workspaceWatcherError;
|
47
56
|
let outputsWatcherError;
|
@@ -140,6 +149,18 @@ async function handleMessage(socket, data) {
|
|
140
149
|
else if ((0, force_shutdown_1.isHandleForceShutdownMessage)(payload)) {
|
141
150
|
await handleResult(socket, 'FORCE_SHUTDOWN', () => (0, handle_force_shutdown_1.handleForceShutdown)(server));
|
142
151
|
}
|
152
|
+
else if ((0, get_sync_generator_changes_1.isHandleGetSyncGeneratorChangesMessage)(payload)) {
|
153
|
+
await handleResult(socket, get_sync_generator_changes_1.GET_SYNC_GENERATOR_CHANGES, () => (0, handle_get_sync_generator_changes_1.handleGetSyncGeneratorChanges)(payload.generators));
|
154
|
+
}
|
155
|
+
else if ((0, flush_sync_generator_changes_to_disk_1.isHandleFlushSyncGeneratorChangesToDiskMessage)(payload)) {
|
156
|
+
await handleResult(socket, flush_sync_generator_changes_to_disk_1.FLUSH_SYNC_GENERATOR_CHANGES_TO_DISK, () => (0, handle_flush_sync_generator_changes_to_disk_1.handleFlushSyncGeneratorChangesToDisk)(payload.generators));
|
157
|
+
}
|
158
|
+
else if ((0, get_registered_sync_generators_1.isHandleGetRegisteredSyncGeneratorsMessage)(payload)) {
|
159
|
+
await handleResult(socket, get_registered_sync_generators_1.GET_REGISTERED_SYNC_GENERATORS, () => (0, handle_get_registered_sync_generators_1.handleGetRegisteredSyncGenerators)());
|
160
|
+
}
|
161
|
+
else if ((0, update_workspace_context_1.isHandleUpdateWorkspaceContextMessage)(payload)) {
|
162
|
+
await handleResult(socket, update_workspace_context_1.UPDATE_WORKSPACE_CONTEXT, () => (0, handle_update_workspace_context_1.handleUpdateWorkspaceContext)(payload.createdFiles, payload.updatedFiles, payload.deletedFiles));
|
163
|
+
}
|
143
164
|
else {
|
144
165
|
await (0, shutdown_utils_1.respondWithErrorAndExit)(socket, `Invalid payload from the client`, new Error(`Unsupported payload sent to daemon server: ${unparsedPayload}`));
|
145
166
|
}
|
@@ -339,6 +360,10 @@ async function startServer() {
|
|
339
360
|
if (!(0, shutdown_utils_1.getOutputWatcherInstance)()) {
|
340
361
|
(0, shutdown_utils_1.storeOutputWatcherInstance)(await (0, watcher_1.watchOutputFiles)(handleOutputsChanges));
|
341
362
|
}
|
363
|
+
// listen for project graph recomputation events to collect and schedule sync generators
|
364
|
+
(0, project_graph_incremental_recomputation_1.registerProjectGraphRecomputationListener)(sync_generators_1.collectAndScheduleSyncGenerators);
|
365
|
+
// trigger an initial project graph recomputation
|
366
|
+
(0, project_graph_incremental_recomputation_1.addUpdatedAndDeletedFiles)([], [], []);
|
342
367
|
return resolve(server);
|
343
368
|
}
|
344
369
|
catch (err) {
|
@@ -0,0 +1,6 @@
|
|
1
|
+
import type { ProjectGraph } from '../../config/project-graph';
|
2
|
+
import { type SyncGeneratorChangesResult } from '../../utils/sync-generators';
|
3
|
+
export declare function getCachedSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;
|
4
|
+
export declare function flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
|
5
|
+
export declare function collectAndScheduleSyncGenerators(projectGraph: ProjectGraph): void;
|
6
|
+
export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
|
@@ -0,0 +1,202 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
|
4
|
+
exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
|
5
|
+
exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
|
6
|
+
exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
|
7
|
+
const nx_json_1 = require("../../config/nx-json");
|
8
|
+
const tree_1 = require("../../generators/tree");
|
9
|
+
const file_hasher_1 = require("../../hasher/file-hasher");
|
10
|
+
const project_graph_1 = require("../../project-graph/project-graph");
|
11
|
+
const sync_generators_1 = require("../../utils/sync-generators");
|
12
|
+
const workspace_root_1 = require("../../utils/workspace-root");
|
13
|
+
const logger_1 = require("./logger");
|
14
|
+
const project_graph_incremental_recomputation_1 = require("./project-graph-incremental-recomputation");
|
15
|
+
const syncGeneratorsCacheResultPromises = new Map();
|
16
|
+
let registeredTaskSyncGenerators = new Set();
|
17
|
+
let registeredGlobalSyncGenerators = new Set();
|
18
|
+
const scheduledGenerators = new Set();
|
19
|
+
let waitPeriod = 100;
|
20
|
+
let registeredSyncGenerators;
|
21
|
+
let scheduledTimeoutId;
|
22
|
+
let storedProjectGraphHash;
|
23
|
+
let storedNxJsonHash;
|
24
|
+
const log = (...messageParts) => {
|
25
|
+
logger_1.serverLogger.log('[SYNC]:', ...messageParts);
|
26
|
+
};
|
27
|
+
// TODO(leo): check conflicts and reuse the Tree where possible
|
28
|
+
async function getCachedSyncGeneratorChanges(generators) {
|
29
|
+
try {
|
30
|
+
log('get sync generators changes on demand', generators);
|
31
|
+
// this is invoked imperatively, so we clear any scheduled run
|
32
|
+
if (scheduledTimeoutId) {
|
33
|
+
log('clearing scheduled run');
|
34
|
+
clearTimeout(scheduledTimeoutId);
|
35
|
+
scheduledTimeoutId = undefined;
|
36
|
+
}
|
37
|
+
// reset the wait time
|
38
|
+
waitPeriod = 100;
|
39
|
+
let projects;
|
40
|
+
let errored = false;
|
41
|
+
const getProjectsConfigurations = async () => {
|
42
|
+
if (projects || errored) {
|
43
|
+
return projects;
|
44
|
+
}
|
45
|
+
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
46
|
+
projects = projectGraph
|
47
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
48
|
+
: null;
|
49
|
+
errored = error !== undefined;
|
50
|
+
return projects;
|
51
|
+
};
|
52
|
+
return (await Promise.all(generators.map(async (generator) => {
|
53
|
+
if (scheduledGenerators.has(generator) ||
|
54
|
+
!syncGeneratorsCacheResultPromises.has(generator)) {
|
55
|
+
// it's scheduled to run (there are pending changes to process) or
|
56
|
+
// it's not scheduled and there's no cached result, so run it
|
57
|
+
const projects = await getProjectsConfigurations();
|
58
|
+
if (projects) {
|
59
|
+
log(generator, 'already scheduled or not cached, running it now');
|
60
|
+
runGenerator(generator, projects);
|
61
|
+
}
|
62
|
+
else {
|
63
|
+
log(generator, 'already scheduled or not cached, project graph errored');
|
64
|
+
/**
|
65
|
+
* This should never happen. This is invoked imperatively, and by
|
66
|
+
* the time it is invoked, the project graph would have already
|
67
|
+
* been requested. If it errored, it would have been reported and
|
68
|
+
* this wouldn't have been invoked. We handle it just in case.
|
69
|
+
*
|
70
|
+
* Since the project graph would be reported by the relevant
|
71
|
+
* handlers separately, we just ignore the error, don't cache
|
72
|
+
* any result and return an empty result, the next time this is
|
73
|
+
* invoked the process will repeat until it eventually recovers
|
74
|
+
* when the project graph is fixed.
|
75
|
+
*/
|
76
|
+
return Promise.resolve({ changes: [], generatorName: generator });
|
77
|
+
}
|
78
|
+
}
|
79
|
+
else {
|
80
|
+
log(generator, 'not scheduled and has cached result, returning cached result');
|
81
|
+
}
|
82
|
+
return syncGeneratorsCacheResultPromises.get(generator);
|
83
|
+
}))).flat();
|
84
|
+
}
|
85
|
+
catch (e) {
|
86
|
+
console.error(e);
|
87
|
+
syncGeneratorsCacheResultPromises.clear();
|
88
|
+
return [];
|
89
|
+
}
|
90
|
+
}
|
91
|
+
async function flushSyncGeneratorChangesToDisk(generators) {
|
92
|
+
log('flush sync generators changes', generators);
|
93
|
+
const results = await getCachedSyncGeneratorChanges(generators);
|
94
|
+
for (const generator of generators) {
|
95
|
+
syncGeneratorsCacheResultPromises.delete(generator);
|
96
|
+
}
|
97
|
+
await (0, sync_generators_1.flushSyncGeneratorChanges)(results);
|
98
|
+
}
|
99
|
+
function collectAndScheduleSyncGenerators(projectGraph) {
|
100
|
+
if (!projectGraph) {
|
101
|
+
// If the project graph is not available, we can't collect and schedule
|
102
|
+
// sync generators. The project graph error will be reported separately.
|
103
|
+
return;
|
104
|
+
}
|
105
|
+
log('collect registered sync generators');
|
106
|
+
collectAllRegisteredSyncGenerators(projectGraph);
|
107
|
+
// a change imply we need to re-run all the generators
|
108
|
+
// make sure to schedule all the collected generators
|
109
|
+
scheduledGenerators.clear();
|
110
|
+
for (const generator of registeredSyncGenerators) {
|
111
|
+
scheduledGenerators.add(generator);
|
112
|
+
}
|
113
|
+
log('scheduling:', [...scheduledGenerators]);
|
114
|
+
if (scheduledTimeoutId) {
|
115
|
+
// we have a scheduled run already, so we don't need to do anything
|
116
|
+
return;
|
117
|
+
}
|
118
|
+
scheduledTimeoutId = setTimeout(async () => {
|
119
|
+
scheduledTimeoutId = undefined;
|
120
|
+
if (waitPeriod < 4000) {
|
121
|
+
waitPeriod = waitPeriod * 2;
|
122
|
+
}
|
123
|
+
if (scheduledGenerators.size === 0) {
|
124
|
+
// no generators to run
|
125
|
+
return;
|
126
|
+
}
|
127
|
+
const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
128
|
+
for (const generator of scheduledGenerators) {
|
129
|
+
runGenerator(generator, projects);
|
130
|
+
}
|
131
|
+
await Promise.all(syncGeneratorsCacheResultPromises.values());
|
132
|
+
}, waitPeriod);
|
133
|
+
}
|
134
|
+
async function getCachedRegisteredSyncGenerators() {
|
135
|
+
log('get registered sync generators');
|
136
|
+
if (!registeredSyncGenerators) {
|
137
|
+
log('no registered sync generators, collecting them');
|
138
|
+
const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
139
|
+
collectAllRegisteredSyncGenerators(projectGraph);
|
140
|
+
}
|
141
|
+
else {
|
142
|
+
log('registered sync generators already collected, returning them');
|
143
|
+
}
|
144
|
+
return [...registeredSyncGenerators];
|
145
|
+
}
|
146
|
+
function collectAllRegisteredSyncGenerators(projectGraph) {
|
147
|
+
const projectGraphHash = hashProjectGraph(projectGraph);
|
148
|
+
if (storedProjectGraphHash !== projectGraphHash) {
|
149
|
+
storedProjectGraphHash = projectGraphHash;
|
150
|
+
registeredTaskSyncGenerators =
|
151
|
+
(0, sync_generators_1.collectRegisteredTaskSyncGenerators)(projectGraph);
|
152
|
+
}
|
153
|
+
else {
|
154
|
+
log('project graph hash is the same, not collecting task sync generators');
|
155
|
+
}
|
156
|
+
const nxJson = (0, nx_json_1.readNxJson)();
|
157
|
+
const nxJsonHash = (0, file_hasher_1.hashArray)(nxJson.sync?.globalGenerators?.sort() ?? []);
|
158
|
+
if (storedNxJsonHash !== nxJsonHash) {
|
159
|
+
storedNxJsonHash = nxJsonHash;
|
160
|
+
registeredGlobalSyncGenerators =
|
161
|
+
(0, sync_generators_1.collectRegisteredGlobalSyncGenerators)(nxJson);
|
162
|
+
}
|
163
|
+
else {
|
164
|
+
log('nx.json hash is the same, not collecting global sync generators');
|
165
|
+
}
|
166
|
+
const generators = new Set([
|
167
|
+
...registeredTaskSyncGenerators,
|
168
|
+
...registeredGlobalSyncGenerators,
|
169
|
+
]);
|
170
|
+
if (!registeredSyncGenerators) {
|
171
|
+
registeredSyncGenerators = generators;
|
172
|
+
return;
|
173
|
+
}
|
174
|
+
for (const generator of registeredSyncGenerators) {
|
175
|
+
if (!generators.has(generator)) {
|
176
|
+
registeredSyncGenerators.delete(generator);
|
177
|
+
syncGeneratorsCacheResultPromises.delete(generator);
|
178
|
+
}
|
179
|
+
}
|
180
|
+
for (const generator of generators) {
|
181
|
+
if (!registeredSyncGenerators.has(generator)) {
|
182
|
+
registeredSyncGenerators.add(generator);
|
183
|
+
}
|
184
|
+
}
|
185
|
+
}
|
186
|
+
function runGenerator(generator, projects) {
|
187
|
+
log('running scheduled generator', generator);
|
188
|
+
// remove it from the scheduled set
|
189
|
+
scheduledGenerators.delete(generator);
|
190
|
+
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
|
191
|
+
// run the generator and cache the result
|
192
|
+
syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
193
|
+
log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
|
194
|
+
return result;
|
195
|
+
}));
|
196
|
+
}
|
197
|
+
function hashProjectGraph(projectGraph) {
|
198
|
+
const stringifiedProjects = Object.entries(projectGraph.nodes)
|
199
|
+
.sort(([projectNameA], [projectNameB]) => projectNameA.localeCompare(projectNameB))
|
200
|
+
.map(([projectName, projectConfig]) => `${projectName}:${JSON.stringify(projectConfig)}`);
|
201
|
+
return (0, file_hasher_1.hashArray)(stringifiedProjects);
|
202
|
+
}
|
@@ -15,20 +15,33 @@ exports.isWindows = (0, os_1.platform)() === 'win32';
|
|
15
15
|
* See https://nodejs.org/dist/latest-v14.x/docs/api/net.html#net_identifying_paths_for_ipc_connections for a full breakdown
|
16
16
|
* of OS differences between Unix domain sockets and named pipes.
|
17
17
|
*/
|
18
|
-
const getFullOsSocketPath = () =>
|
19
|
-
|
20
|
-
|
18
|
+
const getFullOsSocketPath = () => {
|
19
|
+
const path = (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)());
|
20
|
+
assertValidSocketPath(path);
|
21
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
22
|
+
};
|
21
23
|
exports.getFullOsSocketPath = getFullOsSocketPath;
|
22
24
|
const getForkedProcessOsSocketPath = (id) => {
|
23
25
|
let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(), 'fp' + id + '.sock'));
|
24
|
-
|
26
|
+
assertValidSocketPath(path);
|
27
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
25
28
|
};
|
26
29
|
exports.getForkedProcessOsSocketPath = getForkedProcessOsSocketPath;
|
27
30
|
const getPluginOsSocketPath = (id) => {
|
28
31
|
let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(true), 'plugin' + id + '.sock'));
|
29
|
-
|
32
|
+
assertValidSocketPath(path);
|
33
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
30
34
|
};
|
31
35
|
exports.getPluginOsSocketPath = getPluginOsSocketPath;
|
36
|
+
function assertValidSocketPath(path) {
|
37
|
+
if (path.length > 95) {
|
38
|
+
throw new Error([
|
39
|
+
'Attempted to open socket that exceeds the maximum socket length.',
|
40
|
+
'',
|
41
|
+
`Set NX_SOCKET_DIR to a shorter path (e.g. ${exports.isWindows ? '%TMP%/nx-tmp' : '/tmp/nx-tmp'}) to avoid this issue.`,
|
42
|
+
].join('\n'));
|
43
|
+
}
|
44
|
+
}
|
32
45
|
function killSocketOrPath() {
|
33
46
|
try {
|
34
47
|
(0, fs_1.unlinkSync)((0, exports.getFullOsSocketPath)());
|
package/src/daemon/tmp-dir.js
CHANGED
@@ -53,7 +53,8 @@ function socketDirName() {
|
|
53
53
|
*/
|
54
54
|
function getSocketDir(alreadyUnique = false) {
|
55
55
|
try {
|
56
|
-
const dir = process.env.
|
56
|
+
const dir = process.env.NX_SOCKET_DIR ??
|
57
|
+
process.env.NX_DAEMON_SOCKET_DIR ??
|
57
58
|
(alreadyUnique ? tmp_1.tmpdir : socketDirName());
|
58
59
|
(0, fs_extra_1.ensureDirSync)(dir);
|
59
60
|
return dir;
|
Binary file
|
@@ -94,7 +94,7 @@ function addNxCloudOptionsToNxJson(tree, token, directory = '') {
|
|
94
94
|
});
|
95
95
|
}
|
96
96
|
}
|
97
|
-
function addNxCloudIdToNxJson(tree, nxCloudId, directory =
|
97
|
+
function addNxCloudIdToNxJson(tree, nxCloudId, directory = '') {
|
98
98
|
const nxJsonPath = (0, path_1.join)(directory, 'nx.json');
|
99
99
|
if (tree.exists(nxJsonPath)) {
|
100
100
|
(0, json_1.updateJson)(tree, (0, path_1.join)(directory, 'nx.json'), (nxJson) => {
|
@@ -5,7 +5,7 @@ import { TargetDependencyConfig } from '../config/workspace-json-project-json';
|
|
5
5
|
import { NxArgs } from '../utils/command-line-utils';
|
6
6
|
import { LifeCycle } from './life-cycle';
|
7
7
|
import { TasksRunner } from './tasks-runner';
|
8
|
-
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[],
|
8
|
+
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[], currentProjectGraph: ProjectGraph, { nxJson }: {
|
9
9
|
nxJson: NxJsonConfiguration;
|
10
10
|
}, nxArgs: NxArgs, overrides: any, initiatingProject: string | null, extraTargetDependencies: Record<string, (TargetDependencyConfig | string)[]>, extraOptions: {
|
11
11
|
excludeTaskDependencies: boolean;
|
@@ -4,16 +4,20 @@ exports.runCommand = runCommand;
|
|
4
4
|
exports.invokeTasksRunner = invokeTasksRunner;
|
5
5
|
exports.getRunner = getRunner;
|
6
6
|
exports.getRunnerOptions = getRunnerOptions;
|
7
|
+
const enquirer_1 = require("enquirer");
|
8
|
+
const ora = require("ora");
|
7
9
|
const path_1 = require("path");
|
8
10
|
const nx_json_1 = require("../config/nx-json");
|
9
11
|
const client_1 = require("../daemon/client/client");
|
10
12
|
const create_task_hasher_1 = require("../hasher/create-task-hasher");
|
11
13
|
const hash_task_1 = require("../hasher/hash-task");
|
14
|
+
const project_graph_1 = require("../project-graph/project-graph");
|
12
15
|
const fileutils_1 = require("../utils/fileutils");
|
13
16
|
const is_ci_1 = require("../utils/is-ci");
|
14
17
|
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
|
15
18
|
const output_1 = require("../utils/output");
|
16
19
|
const params_1 = require("../utils/params");
|
20
|
+
const sync_generators_1 = require("../utils/sync-generators");
|
17
21
|
const workspace_root_1 = require("../utils/workspace-root");
|
18
22
|
const create_task_graph_1 = require("./create-task-graph");
|
19
23
|
const life_cycle_1 = require("./life-cycle");
|
@@ -27,6 +31,7 @@ const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-c
|
|
27
31
|
const task_timings_life_cycle_1 = require("./life-cycles/task-timings-life-cycle");
|
28
32
|
const task_graph_utils_1 = require("./task-graph-utils");
|
29
33
|
const utils_1 = require("./utils");
|
34
|
+
const chalk = require("chalk");
|
30
35
|
async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {
|
31
36
|
const { runnerOptions } = getRunner(nxArgs, nxJson);
|
32
37
|
const isRunOne = initiatingProject != null;
|
@@ -90,10 +95,10 @@ function createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies,
|
|
90
95
|
}
|
91
96
|
return taskGraph;
|
92
97
|
}
|
93
|
-
async function runCommand(projectsToRun,
|
98
|
+
async function runCommand(projectsToRun, currentProjectGraph, { nxJson }, nxArgs, overrides, initiatingProject, extraTargetDependencies, extraOptions) {
|
94
99
|
const status = await (0, params_1.handleErrors)(process.env.NX_VERBOSE_LOGGING === 'true', async () => {
|
95
100
|
const projectNames = projectsToRun.map((t) => t.name);
|
96
|
-
const taskGraph =
|
101
|
+
const { projectGraph, taskGraph } = await ensureWorkspaceIsInSyncAndGetGraphs(currentProjectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions);
|
97
102
|
const tasks = Object.values(taskGraph.tasks);
|
98
103
|
const { lifeCycle, renderIsDone } = await getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides);
|
99
104
|
const status = await invokeTasksRunner({
|
@@ -111,6 +116,119 @@ async function runCommand(projectsToRun, projectGraph, { nxJson }, nxArgs, overr
|
|
111
116
|
});
|
112
117
|
return status;
|
113
118
|
}
|
119
|
+
async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions) {
|
120
|
+
let taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
|
121
|
+
if (process.env.NX_ENABLE_SYNC_GENERATORS !== 'true') {
|
122
|
+
return { projectGraph, taskGraph };
|
123
|
+
}
|
124
|
+
// collect unique syncGenerators from the tasks
|
125
|
+
const uniqueSyncGenerators = new Set();
|
126
|
+
for (const { target } of Object.values(taskGraph.tasks)) {
|
127
|
+
const { syncGenerators } = projectGraph.nodes[target.project].data.targets[target.target];
|
128
|
+
if (!syncGenerators) {
|
129
|
+
continue;
|
130
|
+
}
|
131
|
+
for (const generator of syncGenerators) {
|
132
|
+
uniqueSyncGenerators.add(generator);
|
133
|
+
}
|
134
|
+
}
|
135
|
+
if (!uniqueSyncGenerators.size) {
|
136
|
+
// There are no sync generators registered in the tasks to run
|
137
|
+
return { projectGraph, taskGraph };
|
138
|
+
}
|
139
|
+
const syncGenerators = Array.from(uniqueSyncGenerators);
|
140
|
+
const results = await (0, sync_generators_1.getSyncGeneratorChanges)(syncGenerators);
|
141
|
+
if (!results.length) {
|
142
|
+
// There are no changes to sync, workspace is up to date
|
143
|
+
return { projectGraph, taskGraph };
|
144
|
+
}
|
145
|
+
const outOfSyncTitle = 'The workspace is out of sync';
|
146
|
+
const resultBodyLines = (0, sync_generators_1.syncGeneratorResultsToMessageLines)(results);
|
147
|
+
const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks.';
|
148
|
+
const willErrorOnCiMessage = 'Please note that this will be an error on CI.';
|
149
|
+
if ((0, is_ci_1.isCI)() || !process.stdout.isTTY) {
|
150
|
+
// If the user is running in CI or is running in a non-TTY environment we
|
151
|
+
// throw an error to stop the execution of the tasks.
|
152
|
+
throw new Error(`${outOfSyncTitle}\n${resultBodyLines.join('\n')}\n${fixMessage}`);
|
153
|
+
}
|
154
|
+
if (nxJson.sync?.applyChanges === false) {
|
155
|
+
// If the user has set `sync.applyChanges` to `false` in their `nx.json`
|
156
|
+
// we don't prompt the them and just log a warning informing them that
|
157
|
+
// the workspace is out of sync and they have it set to not apply changes
|
158
|
+
// automatically.
|
159
|
+
output_1.output.warn({
|
160
|
+
title: outOfSyncTitle,
|
161
|
+
bodyLines: [
|
162
|
+
...resultBodyLines,
|
163
|
+
'Your workspace is set to not apply changes automatically (`sync.applyChanges` is set to `false` in your `nx.json`).',
|
164
|
+
willErrorOnCiMessage,
|
165
|
+
fixMessage,
|
166
|
+
],
|
167
|
+
});
|
168
|
+
return { projectGraph, taskGraph };
|
169
|
+
}
|
170
|
+
output_1.output.warn({
|
171
|
+
title: outOfSyncTitle,
|
172
|
+
bodyLines: [
|
173
|
+
...resultBodyLines,
|
174
|
+
nxJson.sync?.applyChanges === true
|
175
|
+
? 'Proceeding to sync the changes automatically (`sync.applyChanges` is set to `true` in your `nx.json`).'
|
176
|
+
: willErrorOnCiMessage,
|
177
|
+
],
|
178
|
+
});
|
179
|
+
const applyChanges = nxJson.sync?.applyChanges === true ||
|
180
|
+
(await promptForApplyingSyncGeneratorChanges());
|
181
|
+
if (applyChanges) {
|
182
|
+
const spinner = ora('Syncing the workspace...');
|
183
|
+
spinner.start();
|
184
|
+
// Flush sync generator changes to disk
|
185
|
+
await (0, sync_generators_1.flushSyncGeneratorChanges)(results);
|
186
|
+
// Re-create project graph and task graph
|
187
|
+
projectGraph = await (0, project_graph_1.createProjectGraphAsync)();
|
188
|
+
taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
|
189
|
+
if (nxJson.sync?.applyChanges === true) {
|
190
|
+
spinner.succeed(`The workspace was synced successfully!
|
191
|
+
|
192
|
+
Please make sure to commit the changes to your repository or this will error on CI.`);
|
193
|
+
}
|
194
|
+
else {
|
195
|
+
// The user was prompted and we already logged a message about erroring on CI
|
196
|
+
// so here we just tell them to commit the changes.
|
197
|
+
spinner.succeed(`The workspace was synced successfully!
|
198
|
+
|
199
|
+
Please make sure to commit the changes to your repository.`);
|
200
|
+
}
|
201
|
+
}
|
202
|
+
else {
|
203
|
+
output_1.output.warn({
|
204
|
+
title: 'Syncing the workspace was skipped',
|
205
|
+
bodyLines: [
|
206
|
+
'This could lead to unexpected results or errors when running tasks.',
|
207
|
+
fixMessage,
|
208
|
+
],
|
209
|
+
});
|
210
|
+
}
|
211
|
+
return { projectGraph, taskGraph };
|
212
|
+
}
|
213
|
+
async function promptForApplyingSyncGeneratorChanges() {
|
214
|
+
const promptConfig = {
|
215
|
+
name: 'applyChanges',
|
216
|
+
type: 'select',
|
217
|
+
message: 'Would you like to sync the changes to get your worskpace up to date?',
|
218
|
+
choices: [
|
219
|
+
{
|
220
|
+
name: 'yes',
|
221
|
+
message: 'Yes, sync the changes and run the tasks',
|
222
|
+
},
|
223
|
+
{
|
224
|
+
name: 'no',
|
225
|
+
message: 'No, run the tasks without syncing the changes',
|
226
|
+
},
|
227
|
+
],
|
228
|
+
footer: () => chalk.dim('\nYou can skip this prompt by setting the `sync.applyChanges` option in your `nx.json`.'),
|
229
|
+
};
|
230
|
+
return await (0, enquirer_1.prompt)([promptConfig]).then(({ applyChanges }) => applyChanges === 'yes');
|
231
|
+
}
|
114
232
|
function setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles) {
|
115
233
|
if (nxArgs.outputStyle == 'stream' ||
|
116
234
|
process.env.NX_BATCH_MODE === 'true' ||
|
@@ -70,7 +70,7 @@ function splitArgsIntoNxArgsAndOverrides(args, mode, options = { printWarnings:
|
|
70
70
|
],
|
71
71
|
});
|
72
72
|
}
|
73
|
-
// Allow setting base and head via environment variables (lower priority
|
73
|
+
// Allow setting base and head via environment variables (lower priority than direct command arguments)
|
74
74
|
if (!nxArgs.base && process.env.NX_BASE) {
|
75
75
|
nxArgs.base = process.env.NX_BASE;
|
76
76
|
if (options.printWarnings) {
|
@@ -27,7 +27,7 @@ function listPlugins(plugins, title) {
|
|
27
27
|
if (p.projectInference) {
|
28
28
|
capabilities.push('project-inference');
|
29
29
|
}
|
30
|
-
bodyLines.push(`${chalk.bold(p.name)} (${capabilities.join()})`);
|
30
|
+
bodyLines.push(`${chalk.bold(p.name)} ${capabilities.length >= 1 ? `(${capabilities.join()})` : ''}`);
|
31
31
|
}
|
32
32
|
output_1.output.log({
|
33
33
|
title: title,
|
@@ -0,0 +1,22 @@
|
|
1
|
+
import type { GeneratorCallback } from '../config/misc-interfaces';
|
2
|
+
import type { ProjectGraph } from '../config/project-graph';
|
3
|
+
import type { ProjectConfiguration } from '../config/workspace-json-project-json';
|
4
|
+
import { FsTree, type FileChange, type Tree } from '../generators/tree';
|
5
|
+
export type SyncGeneratorResult = void | {
|
6
|
+
callback?: GeneratorCallback;
|
7
|
+
outOfSyncMessage?: string;
|
8
|
+
};
|
9
|
+
export type SyncGenerator = (tree: Tree) => SyncGeneratorResult | Promise<SyncGeneratorResult>;
|
10
|
+
export type SyncGeneratorChangesResult = {
|
11
|
+
changes: FileChange[];
|
12
|
+
generatorName: string;
|
13
|
+
callback?: GeneratorCallback;
|
14
|
+
outOfSyncMessage?: string;
|
15
|
+
};
|
16
|
+
export declare function getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;
|
17
|
+
export declare function flushSyncGeneratorChanges(results: SyncGeneratorChangesResult[]): Promise<void>;
|
18
|
+
export declare function collectAllRegisteredSyncGenerators(projectGraph: ProjectGraph): Promise<string[]>;
|
19
|
+
export declare function runSyncGenerator(tree: FsTree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorChangesResult>;
|
20
|
+
export declare function collectRegisteredTaskSyncGenerators(projectGraph: ProjectGraph): Set<string>;
|
21
|
+
export declare function collectRegisteredGlobalSyncGenerators(nxJson?: import("../config/nx-json").NxJsonConfiguration<string[] | "*">): Set<string>;
|
22
|
+
export declare function syncGeneratorResultsToMessageLines(results: SyncGeneratorChangesResult[]): string[];
|