nx 19.6.0-beta.0 → 19.6.0-beta.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (84) hide show
  1. package/bin/post-install.js +8 -0
  2. package/package.json +12 -12
  3. package/release/changelog-renderer/index.js +16 -1
  4. package/schemas/nx-schema.json +25 -0
  5. package/schemas/project-schema.json +7 -0
  6. package/src/adapter/compat.d.ts +1 -1
  7. package/src/adapter/compat.js +1 -0
  8. package/src/command-line/init/init-v2.js +1 -1
  9. package/src/command-line/nx-commands.js +3 -0
  10. package/src/command-line/release/changelog.js +80 -42
  11. package/src/command-line/release/config/version-plans.d.ts +5 -0
  12. package/src/command-line/release/config/version-plans.js +9 -5
  13. package/src/command-line/release/plan.js +25 -45
  14. package/src/command-line/release/utils/generate-version-plan-content.js +2 -3
  15. package/src/command-line/release/version.d.ts +5 -0
  16. package/src/command-line/sync/command-object.d.ts +6 -0
  17. package/src/command-line/sync/command-object.js +25 -0
  18. package/src/command-line/sync/sync.d.ts +6 -0
  19. package/src/command-line/sync/sync.js +30 -0
  20. package/src/config/nx-json.d.ts +23 -0
  21. package/src/config/workspace-json-project-json.d.ts +5 -0
  22. package/src/core/graph/main.js +1 -1
  23. package/src/core/graph/styles.css +1 -1
  24. package/src/daemon/cache.d.ts +1 -0
  25. package/src/daemon/cache.js +25 -18
  26. package/src/daemon/client/client.d.ts +5 -0
  27. package/src/daemon/client/client.js +42 -1
  28. package/src/daemon/message-types/flush-sync-generator-changes-to-disk.d.ts +6 -0
  29. package/src/daemon/message-types/flush-sync-generator-changes-to-disk.js +11 -0
  30. package/src/daemon/message-types/force-shutdown.d.ts +5 -0
  31. package/src/daemon/message-types/force-shutdown.js +11 -0
  32. package/src/daemon/message-types/get-registered-sync-generators.d.ts +5 -0
  33. package/src/daemon/message-types/get-registered-sync-generators.js +11 -0
  34. package/src/daemon/message-types/get-sync-generator-changes.d.ts +6 -0
  35. package/src/daemon/message-types/get-sync-generator-changes.js +11 -0
  36. package/src/daemon/message-types/update-workspace-context.d.ts +8 -0
  37. package/src/daemon/message-types/update-workspace-context.js +11 -0
  38. package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.d.ts +2 -0
  39. package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.js +11 -0
  40. package/src/daemon/server/handle-force-shutdown.d.ts +5 -0
  41. package/src/daemon/server/handle-force-shutdown.js +18 -0
  42. package/src/daemon/server/handle-get-registered-sync-generators.d.ts +2 -0
  43. package/src/daemon/server/handle-get-registered-sync-generators.js +11 -0
  44. package/src/daemon/server/handle-get-sync-generator-changes.d.ts +2 -0
  45. package/src/daemon/server/handle-get-sync-generator-changes.js +17 -0
  46. package/src/daemon/server/handle-request-shutdown.js +2 -0
  47. package/src/daemon/server/handle-update-workspace-context.d.ts +2 -0
  48. package/src/daemon/server/handle-update-workspace-context.js +11 -0
  49. package/src/daemon/server/project-graph-incremental-recomputation.d.ts +1 -0
  50. package/src/daemon/server/project-graph-incremental-recomputation.js +19 -2
  51. package/src/daemon/server/server.d.ts +1 -0
  52. package/src/daemon/server/server.js +39 -0
  53. package/src/daemon/server/shutdown-utils.d.ts +2 -1
  54. package/src/daemon/server/shutdown-utils.js +11 -4
  55. package/src/daemon/server/sync-generators.d.ts +6 -0
  56. package/src/daemon/server/sync-generators.js +202 -0
  57. package/src/daemon/server/watcher.js +3 -0
  58. package/src/daemon/socket-utils.js +18 -5
  59. package/src/daemon/tmp-dir.js +2 -1
  60. package/src/native/nx.wasm32-wasi.wasm +0 -0
  61. package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +1 -1
  62. package/src/nx-cloud/models/onboarding-status.d.ts +1 -0
  63. package/src/nx-cloud/models/onboarding-status.js +2 -0
  64. package/src/nx-cloud/utilities/is-workspace-claimed.d.ts +1 -0
  65. package/src/nx-cloud/utilities/is-workspace-claimed.js +24 -0
  66. package/src/nx-cloud/utilities/onboarding.d.ts +5 -0
  67. package/src/nx-cloud/utilities/onboarding.js +28 -0
  68. package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +7 -2
  69. package/src/project-graph/plugins/internal-api.js +16 -5
  70. package/src/project-graph/plugins/isolation/messaging.d.ts +5 -1
  71. package/src/project-graph/plugins/isolation/messaging.js +1 -0
  72. package/src/project-graph/plugins/isolation/plugin-pool.js +4 -6
  73. package/src/project-graph/plugins/isolation/plugin-worker.js +15 -0
  74. package/src/project-graph/utils/project-configuration-utils.js +5 -2
  75. package/src/tasks-runner/run-command.d.ts +1 -1
  76. package/src/tasks-runner/run-command.js +120 -2
  77. package/src/utils/package-manager.js +12 -3
  78. package/src/utils/plugins/output.js +1 -1
  79. package/src/utils/sync-generators.d.ts +22 -0
  80. package/src/utils/sync-generators.js +161 -0
  81. package/src/utils/workspace-context.d.ts +1 -0
  82. package/src/utils/workspace-context.js +16 -0
  83. package/src/daemon/message-types/update-context-files.d.ts +0 -7
  84. package/src/daemon/message-types/update-context-files.js +0 -11
@@ -1,5 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.openSockets = void 0;
3
4
  exports.handleResult = handleResult;
4
5
  exports.startServer = startServer;
5
6
  const fs_1 = require("fs");
@@ -39,13 +40,26 @@ const handle_hash_glob_1 = require("./handle-hash-glob");
39
40
  const task_history_1 = require("../message-types/task-history");
40
41
  const handle_get_task_history_1 = require("./handle-get-task-history");
41
42
  const handle_write_task_runs_to_history_1 = require("./handle-write-task-runs-to-history");
43
+ const force_shutdown_1 = require("../message-types/force-shutdown");
44
+ const handle_force_shutdown_1 = require("./handle-force-shutdown");
45
+ const get_sync_generator_changes_1 = require("../message-types/get-sync-generator-changes");
46
+ const handle_get_sync_generator_changes_1 = require("./handle-get-sync-generator-changes");
47
+ const sync_generators_1 = require("./sync-generators");
48
+ const get_registered_sync_generators_1 = require("../message-types/get-registered-sync-generators");
49
+ const handle_get_registered_sync_generators_1 = require("./handle-get-registered-sync-generators");
50
+ const update_workspace_context_1 = require("../message-types/update-workspace-context");
51
+ const handle_update_workspace_context_1 = require("./handle-update-workspace-context");
52
+ const flush_sync_generator_changes_to_disk_1 = require("../message-types/flush-sync-generator-changes-to-disk");
53
+ const handle_flush_sync_generator_changes_to_disk_1 = require("./handle-flush-sync-generator-changes-to-disk");
42
54
  let performanceObserver;
43
55
  let workspaceWatcherError;
44
56
  let outputsWatcherError;
45
57
  global.NX_DAEMON = true;
46
58
  let numberOfOpenConnections = 0;
59
+ exports.openSockets = new Set();
47
60
  const server = (0, net_1.createServer)(async (socket) => {
48
61
  numberOfOpenConnections += 1;
62
+ exports.openSockets.add(socket);
49
63
  logger_1.serverLogger.log(`Established a connection. Number of open connections: ${numberOfOpenConnections}`);
50
64
  (0, shutdown_utils_1.resetInactivityTimeout)(handleInactivityTimeout);
51
65
  if (!performanceObserver) {
@@ -64,6 +78,7 @@ const server = (0, net_1.createServer)(async (socket) => {
64
78
  });
65
79
  socket.on('close', () => {
66
80
  numberOfOpenConnections -= 1;
81
+ exports.openSockets.delete(socket);
67
82
  logger_1.serverLogger.log(`Closed a connection. Number of open connections: ${numberOfOpenConnections}`);
68
83
  (0, file_watcher_sockets_1.removeRegisteredFileWatcherSocket)(socket);
69
84
  });
@@ -131,6 +146,21 @@ async function handleMessage(socket, data) {
131
146
  else if ((0, task_history_1.isHandleWriteTaskRunsToHistoryMessage)(payload)) {
132
147
  await handleResult(socket, 'WRITE_TASK_RUNS_TO_HISTORY', () => (0, handle_write_task_runs_to_history_1.handleWriteTaskRunsToHistory)(payload.taskRuns));
133
148
  }
149
+ else if ((0, force_shutdown_1.isHandleForceShutdownMessage)(payload)) {
150
+ await handleResult(socket, 'FORCE_SHUTDOWN', () => (0, handle_force_shutdown_1.handleForceShutdown)(server));
151
+ }
152
+ else if ((0, get_sync_generator_changes_1.isHandleGetSyncGeneratorChangesMessage)(payload)) {
153
+ await handleResult(socket, get_sync_generator_changes_1.GET_SYNC_GENERATOR_CHANGES, () => (0, handle_get_sync_generator_changes_1.handleGetSyncGeneratorChanges)(payload.generators));
154
+ }
155
+ else if ((0, flush_sync_generator_changes_to_disk_1.isHandleFlushSyncGeneratorChangesToDiskMessage)(payload)) {
156
+ await handleResult(socket, flush_sync_generator_changes_to_disk_1.FLUSH_SYNC_GENERATOR_CHANGES_TO_DISK, () => (0, handle_flush_sync_generator_changes_to_disk_1.handleFlushSyncGeneratorChangesToDisk)(payload.generators));
157
+ }
158
+ else if ((0, get_registered_sync_generators_1.isHandleGetRegisteredSyncGeneratorsMessage)(payload)) {
159
+ await handleResult(socket, get_registered_sync_generators_1.GET_REGISTERED_SYNC_GENERATORS, () => (0, handle_get_registered_sync_generators_1.handleGetRegisteredSyncGenerators)());
160
+ }
161
+ else if ((0, update_workspace_context_1.isHandleUpdateWorkspaceContextMessage)(payload)) {
162
+ await handleResult(socket, update_workspace_context_1.UPDATE_WORKSPACE_CONTEXT, () => (0, handle_update_workspace_context_1.handleUpdateWorkspaceContext)(payload.createdFiles, payload.updatedFiles, payload.deletedFiles));
163
+ }
134
164
  else {
135
165
  await (0, shutdown_utils_1.respondWithErrorAndExit)(socket, `Invalid payload from the client`, new Error(`Unsupported payload sent to daemon server: ${unparsedPayload}`));
136
166
  }
@@ -157,6 +187,7 @@ function handleInactivityTimeout() {
157
187
  (0, shutdown_utils_1.handleServerProcessTermination)({
158
188
  server,
159
189
  reason: `${shutdown_utils_1.SERVER_INACTIVITY_TIMEOUT_MS}ms of inactivity`,
190
+ sockets: exports.openSockets,
160
191
  });
161
192
  }
162
193
  }
@@ -165,14 +196,17 @@ function registerProcessTerminationListeners() {
165
196
  .on('SIGINT', () => (0, shutdown_utils_1.handleServerProcessTermination)({
166
197
  server,
167
198
  reason: 'received process SIGINT',
199
+ sockets: exports.openSockets,
168
200
  }))
169
201
  .on('SIGTERM', () => (0, shutdown_utils_1.handleServerProcessTermination)({
170
202
  server,
171
203
  reason: 'received process SIGTERM',
204
+ sockets: exports.openSockets,
172
205
  }))
173
206
  .on('SIGHUP', () => (0, shutdown_utils_1.handleServerProcessTermination)({
174
207
  server,
175
208
  reason: 'received process SIGHUP',
209
+ sockets: exports.openSockets,
176
210
  }));
177
211
  }
178
212
  let existingLockHash;
@@ -235,6 +269,7 @@ const handleWorkspaceChanges = async (err, changeEvents) => {
235
269
  await (0, shutdown_utils_1.handleServerProcessTermination)({
236
270
  server,
237
271
  reason: outdatedReason,
272
+ sockets: exports.openSockets,
238
273
  });
239
274
  return;
240
275
  }
@@ -325,6 +360,10 @@ async function startServer() {
325
360
  if (!(0, shutdown_utils_1.getOutputWatcherInstance)()) {
326
361
  (0, shutdown_utils_1.storeOutputWatcherInstance)(await (0, watcher_1.watchOutputFiles)(handleOutputsChanges));
327
362
  }
363
+ // listen for project graph recomputation events to collect and schedule sync generators
364
+ (0, project_graph_incremental_recomputation_1.registerProjectGraphRecomputationListener)(sync_generators_1.collectAndScheduleSyncGenerators);
365
+ // trigger an initial project graph recomputation
366
+ (0, project_graph_incremental_recomputation_1.addUpdatedAndDeletedFiles)([], [], []);
328
367
  return resolve(server);
329
368
  }
330
369
  catch (err) {
@@ -8,8 +8,9 @@ export declare function getOutputWatcherInstance(): Watcher;
8
8
  interface HandleServerProcessTerminationParams {
9
9
  server: Server;
10
10
  reason: string;
11
+ sockets: Iterable<Socket>;
11
12
  }
12
- export declare function handleServerProcessTermination({ server, reason, }: HandleServerProcessTerminationParams): Promise<void>;
13
+ export declare function handleServerProcessTermination({ server, reason, sockets, }: HandleServerProcessTerminationParams): Promise<void>;
13
14
  export declare function resetInactivityTimeout(cb: () => void): void;
14
15
  export declare function respondToClient(socket: Socket, response: string, description: string): Promise<unknown>;
15
16
  export declare function respondWithErrorAndExit(socket: Socket, description: string, error: Error): Promise<void>;
@@ -30,11 +30,16 @@ function storeOutputWatcherInstance(instance) {
30
30
  function getOutputWatcherInstance() {
31
31
  return outputWatcherInstance;
32
32
  }
33
- async function handleServerProcessTermination({ server, reason, }) {
33
+ async function handleServerProcessTermination({ server, reason, sockets, }) {
34
34
  try {
35
- server.close();
36
- (0, cache_1.deleteDaemonJsonProcessCache)();
37
- (0, plugins_1.cleanupPlugins)();
35
+ await new Promise((res) => {
36
+ server.close(() => {
37
+ res(null);
38
+ });
39
+ for (const socket of sockets) {
40
+ socket.destroy();
41
+ }
42
+ });
38
43
  if (watcherInstance) {
39
44
  await watcherInstance.stop();
40
45
  logger_1.serverLogger.watcherLog(`Stopping the watcher for ${workspace_root_1.workspaceRoot} (sources)`);
@@ -43,6 +48,8 @@ async function handleServerProcessTermination({ server, reason, }) {
43
48
  await outputWatcherInstance.stop();
44
49
  logger_1.serverLogger.watcherLog(`Stopping the watcher for ${workspace_root_1.workspaceRoot} (outputs)`);
45
50
  }
51
+ (0, cache_1.deleteDaemonJsonProcessCache)();
52
+ (0, plugins_1.cleanupPlugins)();
46
53
  logger_1.serverLogger.log(`Server stopped because: "${reason}"`);
47
54
  }
48
55
  finally {
@@ -0,0 +1,6 @@
1
+ import type { ProjectGraph } from '../../config/project-graph';
2
+ import { type SyncGeneratorChangesResult } from '../../utils/sync-generators';
3
+ export declare function getCachedSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;
4
+ export declare function flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
5
+ export declare function collectAndScheduleSyncGenerators(projectGraph: ProjectGraph): void;
6
+ export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
@@ -0,0 +1,202 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
4
+ exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
5
+ exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
6
+ exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
7
+ const nx_json_1 = require("../../config/nx-json");
8
+ const tree_1 = require("../../generators/tree");
9
+ const file_hasher_1 = require("../../hasher/file-hasher");
10
+ const project_graph_1 = require("../../project-graph/project-graph");
11
+ const sync_generators_1 = require("../../utils/sync-generators");
12
+ const workspace_root_1 = require("../../utils/workspace-root");
13
+ const logger_1 = require("./logger");
14
+ const project_graph_incremental_recomputation_1 = require("./project-graph-incremental-recomputation");
15
+ const syncGeneratorsCacheResultPromises = new Map();
16
+ let registeredTaskSyncGenerators = new Set();
17
+ let registeredGlobalSyncGenerators = new Set();
18
+ const scheduledGenerators = new Set();
19
+ let waitPeriod = 100;
20
+ let registeredSyncGenerators;
21
+ let scheduledTimeoutId;
22
+ let storedProjectGraphHash;
23
+ let storedNxJsonHash;
24
+ const log = (...messageParts) => {
25
+ logger_1.serverLogger.log('[SYNC]:', ...messageParts);
26
+ };
27
+ // TODO(leo): check conflicts and reuse the Tree where possible
28
+ async function getCachedSyncGeneratorChanges(generators) {
29
+ try {
30
+ log('get sync generators changes on demand', generators);
31
+ // this is invoked imperatively, so we clear any scheduled run
32
+ if (scheduledTimeoutId) {
33
+ log('clearing scheduled run');
34
+ clearTimeout(scheduledTimeoutId);
35
+ scheduledTimeoutId = undefined;
36
+ }
37
+ // reset the wait time
38
+ waitPeriod = 100;
39
+ let projects;
40
+ let errored = false;
41
+ const getProjectsConfigurations = async () => {
42
+ if (projects || errored) {
43
+ return projects;
44
+ }
45
+ const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
46
+ projects = projectGraph
47
+ ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
48
+ : null;
49
+ errored = error !== undefined;
50
+ return projects;
51
+ };
52
+ return (await Promise.all(generators.map(async (generator) => {
53
+ if (scheduledGenerators.has(generator) ||
54
+ !syncGeneratorsCacheResultPromises.has(generator)) {
55
+ // it's scheduled to run (there are pending changes to process) or
56
+ // it's not scheduled and there's no cached result, so run it
57
+ const projects = await getProjectsConfigurations();
58
+ if (projects) {
59
+ log(generator, 'already scheduled or not cached, running it now');
60
+ runGenerator(generator, projects);
61
+ }
62
+ else {
63
+ log(generator, 'already scheduled or not cached, project graph errored');
64
+ /**
65
+ * This should never happen. This is invoked imperatively, and by
66
+ * the time it is invoked, the project graph would have already
67
+ * been requested. If it errored, it would have been reported and
68
+ * this wouldn't have been invoked. We handle it just in case.
69
+ *
70
+ * Since the project graph would be reported by the relevant
71
+ * handlers separately, we just ignore the error, don't cache
72
+ * any result and return an empty result, the next time this is
73
+ * invoked the process will repeat until it eventually recovers
74
+ * when the project graph is fixed.
75
+ */
76
+ return Promise.resolve({ changes: [], generatorName: generator });
77
+ }
78
+ }
79
+ else {
80
+ log(generator, 'not scheduled and has cached result, returning cached result');
81
+ }
82
+ return syncGeneratorsCacheResultPromises.get(generator);
83
+ }))).flat();
84
+ }
85
+ catch (e) {
86
+ console.error(e);
87
+ syncGeneratorsCacheResultPromises.clear();
88
+ return [];
89
+ }
90
+ }
91
+ async function flushSyncGeneratorChangesToDisk(generators) {
92
+ log('flush sync generators changes', generators);
93
+ const results = await getCachedSyncGeneratorChanges(generators);
94
+ for (const generator of generators) {
95
+ syncGeneratorsCacheResultPromises.delete(generator);
96
+ }
97
+ await (0, sync_generators_1.flushSyncGeneratorChanges)(results);
98
+ }
99
+ function collectAndScheduleSyncGenerators(projectGraph) {
100
+ if (!projectGraph) {
101
+ // If the project graph is not available, we can't collect and schedule
102
+ // sync generators. The project graph error will be reported separately.
103
+ return;
104
+ }
105
+ log('collect registered sync generators');
106
+ collectAllRegisteredSyncGenerators(projectGraph);
107
+ // a change imply we need to re-run all the generators
108
+ // make sure to schedule all the collected generators
109
+ scheduledGenerators.clear();
110
+ for (const generator of registeredSyncGenerators) {
111
+ scheduledGenerators.add(generator);
112
+ }
113
+ log('scheduling:', [...scheduledGenerators]);
114
+ if (scheduledTimeoutId) {
115
+ // we have a scheduled run already, so we don't need to do anything
116
+ return;
117
+ }
118
+ scheduledTimeoutId = setTimeout(async () => {
119
+ scheduledTimeoutId = undefined;
120
+ if (waitPeriod < 4000) {
121
+ waitPeriod = waitPeriod * 2;
122
+ }
123
+ if (scheduledGenerators.size === 0) {
124
+ // no generators to run
125
+ return;
126
+ }
127
+ const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
128
+ for (const generator of scheduledGenerators) {
129
+ runGenerator(generator, projects);
130
+ }
131
+ await Promise.all(syncGeneratorsCacheResultPromises.values());
132
+ }, waitPeriod);
133
+ }
134
+ async function getCachedRegisteredSyncGenerators() {
135
+ log('get registered sync generators');
136
+ if (!registeredSyncGenerators) {
137
+ log('no registered sync generators, collecting them');
138
+ const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
139
+ collectAllRegisteredSyncGenerators(projectGraph);
140
+ }
141
+ else {
142
+ log('registered sync generators already collected, returning them');
143
+ }
144
+ return [...registeredSyncGenerators];
145
+ }
146
+ function collectAllRegisteredSyncGenerators(projectGraph) {
147
+ const projectGraphHash = hashProjectGraph(projectGraph);
148
+ if (storedProjectGraphHash !== projectGraphHash) {
149
+ storedProjectGraphHash = projectGraphHash;
150
+ registeredTaskSyncGenerators =
151
+ (0, sync_generators_1.collectRegisteredTaskSyncGenerators)(projectGraph);
152
+ }
153
+ else {
154
+ log('project graph hash is the same, not collecting task sync generators');
155
+ }
156
+ const nxJson = (0, nx_json_1.readNxJson)();
157
+ const nxJsonHash = (0, file_hasher_1.hashArray)(nxJson.sync?.globalGenerators?.sort() ?? []);
158
+ if (storedNxJsonHash !== nxJsonHash) {
159
+ storedNxJsonHash = nxJsonHash;
160
+ registeredGlobalSyncGenerators =
161
+ (0, sync_generators_1.collectRegisteredGlobalSyncGenerators)(nxJson);
162
+ }
163
+ else {
164
+ log('nx.json hash is the same, not collecting global sync generators');
165
+ }
166
+ const generators = new Set([
167
+ ...registeredTaskSyncGenerators,
168
+ ...registeredGlobalSyncGenerators,
169
+ ]);
170
+ if (!registeredSyncGenerators) {
171
+ registeredSyncGenerators = generators;
172
+ return;
173
+ }
174
+ for (const generator of registeredSyncGenerators) {
175
+ if (!generators.has(generator)) {
176
+ registeredSyncGenerators.delete(generator);
177
+ syncGeneratorsCacheResultPromises.delete(generator);
178
+ }
179
+ }
180
+ for (const generator of generators) {
181
+ if (!registeredSyncGenerators.has(generator)) {
182
+ registeredSyncGenerators.add(generator);
183
+ }
184
+ }
185
+ }
186
+ function runGenerator(generator, projects) {
187
+ log('running scheduled generator', generator);
188
+ // remove it from the scheduled set
189
+ scheduledGenerators.delete(generator);
190
+ const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
191
+ // run the generator and cache the result
192
+ syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
193
+ log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
194
+ return result;
195
+ }));
196
+ }
197
+ function hashProjectGraph(projectGraph) {
198
+ const stringifiedProjects = Object.entries(projectGraph.nodes)
199
+ .sort(([projectNameA], [projectNameB]) => projectNameA.localeCompare(projectNameB))
200
+ .map(([projectName, projectConfig]) => `${projectName}:${JSON.stringify(projectConfig)}`);
201
+ return (0, file_hasher_1.hashArray)(stringifiedProjects);
202
+ }
@@ -10,6 +10,7 @@ const shutdown_utils_1 = require("./shutdown-utils");
10
10
  const path_2 = require("../../utils/path");
11
11
  const ignore_1 = require("../../utils/ignore");
12
12
  const cache_1 = require("../cache");
13
+ const server_1 = require("./server");
13
14
  const ALWAYS_IGNORE = [
14
15
  ...(0, ignore_1.getAlwaysIgnore)(workspace_root_1.workspaceRoot),
15
16
  (0, socket_utils_1.getFullOsSocketPath)(),
@@ -28,6 +29,7 @@ async function watchWorkspace(server, cb) {
28
29
  (0, shutdown_utils_1.handleServerProcessTermination)({
29
30
  server,
30
31
  reason: 'this process is no longer the current daemon (native)',
32
+ sockets: server_1.openSockets,
31
33
  });
32
34
  }
33
35
  if (event.path.endsWith('.gitignore') || event.path === '.nxignore') {
@@ -35,6 +37,7 @@ async function watchWorkspace(server, cb) {
35
37
  (0, shutdown_utils_1.handleServerProcessTermination)({
36
38
  server,
37
39
  reason: 'Stopping the daemon the set of ignored files changed (native)',
40
+ sockets: server_1.openSockets,
38
41
  });
39
42
  }
40
43
  }
@@ -15,20 +15,33 @@ exports.isWindows = (0, os_1.platform)() === 'win32';
15
15
  * See https://nodejs.org/dist/latest-v14.x/docs/api/net.html#net_identifying_paths_for_ipc_connections for a full breakdown
16
16
  * of OS differences between Unix domain sockets and named pipes.
17
17
  */
18
- const getFullOsSocketPath = () => exports.isWindows
19
- ? '\\\\.\\pipe\\nx\\' + (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)())
20
- : (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)());
18
+ const getFullOsSocketPath = () => {
19
+ const path = (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)());
20
+ assertValidSocketPath(path);
21
+ return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
22
+ };
21
23
  exports.getFullOsSocketPath = getFullOsSocketPath;
22
24
  const getForkedProcessOsSocketPath = (id) => {
23
25
  let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(), 'fp' + id + '.sock'));
24
- return exports.isWindows ? '\\\\.\\pipe\\nx\\' + (0, path_1.resolve)(path) : (0, path_1.resolve)(path);
26
+ assertValidSocketPath(path);
27
+ return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
25
28
  };
26
29
  exports.getForkedProcessOsSocketPath = getForkedProcessOsSocketPath;
27
30
  const getPluginOsSocketPath = (id) => {
28
31
  let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(true), 'plugin' + id + '.sock'));
29
- return exports.isWindows ? '\\\\.\\pipe\\nx\\' + (0, path_1.resolve)(path) : (0, path_1.resolve)(path);
32
+ assertValidSocketPath(path);
33
+ return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
30
34
  };
31
35
  exports.getPluginOsSocketPath = getPluginOsSocketPath;
36
+ function assertValidSocketPath(path) {
37
+ if (path.length > 95) {
38
+ throw new Error([
39
+ 'Attempted to open socket that exceeds the maximum socket length.',
40
+ '',
41
+ `Set NX_SOCKET_DIR to a shorter path (e.g. ${exports.isWindows ? '%TMP%/nx-tmp' : '/tmp/nx-tmp'}) to avoid this issue.`,
42
+ ].join('\n'));
43
+ }
44
+ }
32
45
  function killSocketOrPath() {
33
46
  try {
34
47
  (0, fs_1.unlinkSync)((0, exports.getFullOsSocketPath)());
@@ -53,7 +53,8 @@ function socketDirName() {
53
53
  */
54
54
  function getSocketDir(alreadyUnique = false) {
55
55
  try {
56
- const dir = process.env.NX_DAEMON_SOCKET_DIR ??
56
+ const dir = process.env.NX_SOCKET_DIR ??
57
+ process.env.NX_DAEMON_SOCKET_DIR ??
57
58
  (alreadyUnique ? tmp_1.tmpdir : socketDirName());
58
59
  (0, fs_extra_1.ensureDirSync)(dir);
59
60
  return dir;
Binary file
@@ -94,7 +94,7 @@ function addNxCloudOptionsToNxJson(tree, token, directory = '') {
94
94
  });
95
95
  }
96
96
  }
97
- function addNxCloudIdToNxJson(tree, nxCloudId, directory = tree.root) {
97
+ function addNxCloudIdToNxJson(tree, nxCloudId, directory = '') {
98
98
  const nxJsonPath = (0, path_1.join)(directory, 'nx.json');
99
99
  if (tree.exists(nxJsonPath)) {
100
100
  (0, json_1.updateJson)(tree, (0, path_1.join)(directory, 'nx.json'), (nxJson) => {
@@ -0,0 +1 @@
1
+ export type NxCloudOnBoardingStatus = 'claimed' | 'unclaimed' | 'not-configured';
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1 @@
1
+ export declare function isWorkspaceClaimed(nxCloudAccessToken: any): Promise<any>;
@@ -0,0 +1,24 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.isWorkspaceClaimed = isWorkspaceClaimed;
4
+ const get_cloud_options_1 = require("./get-cloud-options");
5
+ async function isWorkspaceClaimed(nxCloudAccessToken) {
6
+ if (!nxCloudAccessToken)
7
+ return false;
8
+ const apiUrl = (0, get_cloud_options_1.getCloudUrl)();
9
+ try {
10
+ const response = await require('axios').post(`${apiUrl}/nx-cloud/is-workspace-claimed`, {
11
+ nxCloudAccessToken,
12
+ });
13
+ if (response.data.message) {
14
+ return false;
15
+ }
16
+ else {
17
+ return response.data;
18
+ }
19
+ }
20
+ catch (e) {
21
+ // We want to handle cases the if the request fails for any reason
22
+ return false;
23
+ }
24
+ }
@@ -0,0 +1,5 @@
1
+ import type { Tree } from '../../generators/tree';
2
+ import { NxCloudOnBoardingStatus } from '../models/onboarding-status';
3
+ export declare function createNxCloudOnboardingURLForWelcomeApp(tree: Tree, token?: string): Promise<NxCloudOnBoardingStatus>;
4
+ export declare function getNxCloudAppOnBoardingUrl(token: string): Promise<string>;
5
+ export declare function readNxCloudToken(tree: Tree): any;
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createNxCloudOnboardingURLForWelcomeApp = createNxCloudOnboardingURLForWelcomeApp;
4
+ exports.getNxCloudAppOnBoardingUrl = getNxCloudAppOnBoardingUrl;
5
+ exports.readNxCloudToken = readNxCloudToken;
6
+ const devkit_exports_1 = require("../../devkit-exports");
7
+ const is_workspace_claimed_1 = require("./is-workspace-claimed");
8
+ const url_shorten_1 = require("./url-shorten");
9
+ const run_command_1 = require("../../tasks-runner/run-command");
10
+ async function createNxCloudOnboardingURLForWelcomeApp(tree, token) {
11
+ token = token || readNxCloudToken(tree);
12
+ if (!token) {
13
+ return 'not-configured';
14
+ }
15
+ return (await (0, is_workspace_claimed_1.isWorkspaceClaimed)(token)) ? 'claimed' : 'unclaimed';
16
+ }
17
+ async function getNxCloudAppOnBoardingUrl(token) {
18
+ if (!token) {
19
+ return null;
20
+ }
21
+ const onboardingUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-welcome-app', token);
22
+ return onboardingUrl;
23
+ }
24
+ function readNxCloudToken(tree) {
25
+ const nxJson = (0, devkit_exports_1.readNxJson)(tree);
26
+ const { accessToken } = (0, run_command_1.getRunnerOptions)('default', nxJson, {}, true);
27
+ return accessToken;
28
+ }
@@ -141,9 +141,14 @@ class TargetProjectLocator {
141
141
  }
142
142
  const version = (0, semver_1.clean)(externalPackageJson.version);
143
143
  const npmProjectKey = `npm:${externalPackageJson.name}@${version}`;
144
- const matchingExternalNode = this.npmProjects[npmProjectKey];
144
+ let matchingExternalNode = this.npmProjects[npmProjectKey];
145
145
  if (!matchingExternalNode) {
146
- return null;
146
+ // check if it's a package alias, where the resolved package key is used as the version
147
+ const aliasNpmProjectKey = `npm:${packageName}@${npmProjectKey}`;
148
+ matchingExternalNode = this.npmProjects[aliasNpmProjectKey];
149
+ if (!matchingExternalNode) {
150
+ return null;
151
+ }
147
152
  }
148
153
  this.npmResolutionCache.set(npmImportForProject, matchingExternalNode.name);
149
154
  return matchingExternalNode.name;
@@ -13,7 +13,6 @@ const loader_1 = require("./loader");
13
13
  const utils_1 = require("./utils");
14
14
  const error_types_1 = require("../error-types");
15
15
  const native_1 = require("../../native");
16
- const os_1 = require("os");
17
16
  class LoadedNxPlugin {
18
17
  constructor(plugin, pluginDefinition) {
19
18
  this.name = plugin.name;
@@ -72,12 +71,24 @@ exports.LoadedNxPlugin = LoadedNxPlugin;
72
71
  // Allows loaded plugins to not be reloaded when
73
72
  // referenced multiple times.
74
73
  exports.nxPluginCache = new Map();
74
+ function isIsolationEnabled() {
75
+ // Explicitly enabled, regardless of further conditions
76
+ if (process.env.NX_ISOLATE_PLUGINS === 'true') {
77
+ return true;
78
+ }
79
+ if (
80
+ // Explicitly disabled
81
+ process.env.NX_ISOLATE_PLUGINS === 'false' ||
82
+ // Isolation is disabled on WASM builds currently.
83
+ native_1.IS_WASM) {
84
+ return false;
85
+ }
86
+ // Default value
87
+ return true;
88
+ }
75
89
  async function loadNxPlugins(plugins, root = workspace_root_1.workspaceRoot) {
76
90
  performance.mark('loadNxPlugins:start');
77
- const loadingMethod = process.env.NX_ISOLATE_PLUGINS === 'true' ||
78
- (!native_1.IS_WASM &&
79
- (0, os_1.platform)() !== 'win32' &&
80
- process.env.NX_ISOLATE_PLUGINS !== 'false')
91
+ const loadingMethod = isIsolationEnabled()
81
92
  ? isolation_1.loadNxPluginInIsolation
82
93
  : loader_1.loadNxPlugin;
83
94
  plugins = await normalizePlugins(plugins, root);
@@ -106,7 +106,11 @@ export interface PluginWorkerProcessProjectGraphResult {
106
106
  tx: string;
107
107
  };
108
108
  }
109
- export type PluginWorkerMessage = PluginWorkerLoadMessage | PluginWorkerCreateNodesMessage | PluginCreateDependenciesMessage | PluginWorkerProcessProjectGraphMessage | PluginCreateMetadataMessage;
109
+ export interface PluginWorkerShutdownMessage {
110
+ type: 'shutdown';
111
+ payload: {};
112
+ }
113
+ export type PluginWorkerMessage = PluginWorkerLoadMessage | PluginWorkerShutdownMessage | PluginWorkerCreateNodesMessage | PluginCreateDependenciesMessage | PluginWorkerProcessProjectGraphMessage | PluginCreateMetadataMessage;
110
114
  export type PluginWorkerResult = PluginWorkerLoadResult | PluginWorkerCreateNodesResult | PluginCreateDependenciesResult | PluginWorkerProcessProjectGraphResult | PluginCreateMetadataResult;
111
115
  export declare function isPluginWorkerMessage(message: Serializable): message is PluginWorkerMessage;
112
116
  export declare function isPluginWorkerResult(message: Serializable): message is PluginWorkerResult;
@@ -14,6 +14,7 @@ function isPluginWorkerMessage(message) {
14
14
  'createDependencies',
15
15
  'processProjectGraph',
16
16
  'createMetadata',
17
+ 'shutdown',
17
18
  ].includes(message.type));
18
19
  }
19
20
  function isPluginWorkerResult(message) {
@@ -29,8 +29,8 @@ async function loadRemoteNxPlugin(plugin, root) {
29
29
  const exitHandler = createWorkerExitHandler(worker, pendingPromises);
30
30
  const cleanupFunction = () => {
31
31
  worker.off('exit', exitHandler);
32
+ shutdownPluginWorker(socket);
32
33
  socket.destroy();
33
- shutdownPluginWorker(worker);
34
34
  nxPluginWorkerCache.delete(cacheKey);
35
35
  };
36
36
  cleanupFunctions.add(cleanupFunction);
@@ -55,11 +55,8 @@ async function loadRemoteNxPlugin(plugin, root) {
55
55
  nxPluginWorkerCache.set(cacheKey, pluginPromise);
56
56
  return [pluginPromise, cleanupFunction];
57
57
  }
58
- function shutdownPluginWorker(worker) {
59
- // Clears the plugin cache so no refs to the workers are held
60
- internal_api_1.nxPluginCache.clear();
61
- // logger.verbose(`[plugin-pool] starting worker shutdown`);
62
- worker.kill('SIGINT');
58
+ function shutdownPluginWorker(socket) {
59
+ (0, messaging_1.sendMessageOverSocket)(socket, { type: 'shutdown', payload: {} });
63
60
  }
64
61
  /**
65
62
  * Creates a message handler for the given worker.
@@ -200,6 +197,7 @@ function createWorkerExitHandler(worker, pendingPromises) {
200
197
  }
201
198
  let cleanedUp = false;
202
199
  const exitHandler = () => {
200
+ internal_api_1.nxPluginCache.clear();
203
201
  for (const fn of cleanupFunctions) {
204
202
  fn();
205
203
  }
@@ -49,6 +49,21 @@ const server = (0, net_1.createServer)((socket) => {
49
49
  };
50
50
  }
51
51
  },
52
+ shutdown: async () => {
53
+ // Stops accepting new connections, but existing connections are
54
+ // not closed immediately.
55
+ server.close(() => {
56
+ try {
57
+ (0, fs_1.unlinkSync)(socketPath);
58
+ }
59
+ catch (e) { }
60
+ process.exit(0);
61
+ });
62
+ // Closes existing connection.
63
+ socket.end();
64
+ // Destroys the socket once it's fully closed.
65
+ socket.destroySoon();
66
+ },
52
67
  createNodes: async ({ configFiles, context, tx }) => {
53
68
  try {
54
69
  const result = await plugin.createNodes[1](configFiles, context);