nx 21.3.0-canary.20250710-13551c9 → 21.3.0-canary.20250712-18e5d95

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/package.json +12 -12
  2. package/src/command-line/format/command-object.js +12 -6
  3. package/src/command-line/yargs-utils/shared-options.d.ts +1 -0
  4. package/src/command-line/yargs-utils/shared-options.js +5 -0
  5. package/src/core/graph/main.js +1 -1
  6. package/src/core/graph/styles.js +1 -1
  7. package/src/daemon/client/client.js +7 -15
  8. package/src/daemon/client/daemon-socket-messenger.js +2 -9
  9. package/src/daemon/server/file-watching/file-watcher-sockets.js +1 -1
  10. package/src/daemon/server/handle-context-file-data.js +1 -1
  11. package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.js +1 -1
  12. package/src/daemon/server/handle-get-files-in-directory.js +1 -1
  13. package/src/daemon/server/handle-get-registered-sync-generators.js +1 -1
  14. package/src/daemon/server/handle-get-sync-generator-changes.js +1 -1
  15. package/src/daemon/server/handle-glob.js +2 -2
  16. package/src/daemon/server/handle-hash-tasks.d.ts +1 -1
  17. package/src/daemon/server/handle-hash-tasks.js +1 -1
  18. package/src/daemon/server/handle-nx-workspace-files.js +1 -1
  19. package/src/daemon/server/handle-outputs-tracking.js +1 -1
  20. package/src/daemon/server/handle-task-history.d.ts +2 -2
  21. package/src/daemon/server/handle-task-history.js +2 -2
  22. package/src/daemon/server/handle-tasks-execution-hooks.d.ts +1 -1
  23. package/src/daemon/server/handle-tasks-execution-hooks.js +1 -1
  24. package/src/daemon/server/server.d.ts +2 -2
  25. package/src/daemon/server/server.js +28 -49
  26. package/src/daemon/server/shutdown-utils.js +1 -2
  27. package/src/native/index.d.ts +2 -1
  28. package/src/native/nx.wasm32-wasi.wasm +0 -0
  29. package/src/project-graph/plugins/isolation/messaging.js +1 -2
  30. package/src/project-graph/plugins/isolation/plugin-pool.js +5 -19
  31. package/src/project-graph/plugins/loaded-nx-plugin.js +0 -2
  32. package/src/tasks-runner/is-tui-enabled.js +8 -0
  33. package/src/tasks-runner/pseudo-ipc.js +4 -4
  34. package/src/tasks-runner/run-command.js +1 -1
  35. package/src/utils/command-line-utils.d.ts +1 -0
  36. package/src/utils/consume-messages-from-socket.d.ts +0 -2
  37. package/src/utils/consume-messages-from-socket.js +3 -18
  38. package/src/utils/project-graph-utils.d.ts +6 -1
  39. package/src/utils/project-graph-utils.js +11 -6
@@ -1 +1 @@
1
- "use strict";(self.webpackChunk=self.webpackChunk||[]).push([[869],{5873:()=>{}},s=>{var e;e=5873,s(s.s=e)}]);
1
+ "use strict";(self.webpackChunk=self.webpackChunk||[]).push([[869],{7910:()=>{}},s=>{var e;e=7910,s(s.s=e)}]);
@@ -32,8 +32,6 @@ const update_workspace_context_1 = require("../message-types/update-workspace-co
32
32
  const flush_sync_generator_changes_to_disk_1 = require("../message-types/flush-sync-generator-changes-to-disk");
33
33
  const delayed_spinner_1 = require("../../utils/delayed-spinner");
34
34
  const run_tasks_execution_hooks_1 = require("../message-types/run-tasks-execution-hooks");
35
- const node_v8_1 = require("node:v8");
36
- const consume_messages_from_socket_1 = require("../../utils/consume-messages-from-socket");
37
35
  const DAEMON_ENV_SETTINGS = {
38
36
  NX_PROJECT_GLOB_CACHE: 'false',
39
37
  NX_CACHE_PROJECTS_CONFIG: 'false',
@@ -148,9 +146,7 @@ class DaemonClient {
148
146
  return this.sendToDaemonViaQueue({
149
147
  type: 'HASH_TASKS',
150
148
  runnerOptions,
151
- env: process.env.NX_USE_V8_SERIALIZER !== 'false'
152
- ? structuredClone(process.env)
153
- : env,
149
+ env,
154
150
  tasks,
155
151
  taskGraph,
156
152
  });
@@ -171,9 +167,7 @@ class DaemonClient {
171
167
  await this.queue.sendToQueue(() => {
172
168
  messenger = new daemon_socket_messenger_1.DaemonSocketMessenger((0, net_1.connect)((0, socket_utils_1.getFullOsSocketPath)())).listen((message) => {
173
169
  try {
174
- const parsedMessage = (0, consume_messages_from_socket_1.isJsonMessage)(message)
175
- ? JSON.parse(message)
176
- : (0, node_v8_1.deserialize)(Buffer.from(message, 'binary'));
170
+ const parsedMessage = JSON.parse(message);
177
171
  callback(null, parsedMessage);
178
172
  }
179
173
  catch (e) {
@@ -422,17 +416,15 @@ class DaemonClient {
422
416
  }
423
417
  handleMessage(serializedResult) {
424
418
  try {
425
- perf_hooks_1.performance.mark('result-parse-start');
426
- const parsedResult = (0, consume_messages_from_socket_1.isJsonMessage)(serializedResult)
427
- ? JSON.parse(serializedResult)
428
- : (0, node_v8_1.deserialize)(Buffer.from(serializedResult, 'binary'));
429
- perf_hooks_1.performance.mark('result-parse-end');
430
- perf_hooks_1.performance.measure('deserialize daemon response', 'result-parse-start', 'result-parse-end');
419
+ perf_hooks_1.performance.mark('json-parse-start');
420
+ const parsedResult = JSON.parse(serializedResult);
421
+ perf_hooks_1.performance.mark('json-parse-end');
422
+ perf_hooks_1.performance.measure('deserialize daemon response', 'json-parse-start', 'json-parse-end');
431
423
  if (parsedResult.error) {
432
424
  this.currentReject(parsedResult.error);
433
425
  }
434
426
  else {
435
- perf_hooks_1.performance.measure('total for sendMessageToDaemon()', 'sendMessageToDaemon-start', 'result-parse-end');
427
+ perf_hooks_1.performance.measure('total for sendMessageToDaemon()', 'sendMessageToDaemon-start', 'json-parse-end');
436
428
  return this.currentResolve(parsedResult);
437
429
  }
438
430
  }
@@ -1,22 +1,15 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DaemonSocketMessenger = void 0;
4
- const v8_1 = require("v8");
5
4
  const consume_messages_from_socket_1 = require("../../utils/consume-messages-from-socket");
6
5
  class DaemonSocketMessenger {
7
6
  constructor(socket) {
8
7
  this.socket = socket;
9
8
  }
10
9
  async sendMessage(messageToDaemon) {
11
- if (process.env.NX_USE_V8_SERIALIZER !== 'false') {
12
- const serialized = (0, v8_1.serialize)(messageToDaemon);
13
- this.socket.write(serialized.toString('binary'));
14
- }
15
- else {
16
- this.socket.write(JSON.stringify(messageToDaemon));
17
- }
10
+ this.socket.write(JSON.stringify(messageToDaemon));
18
11
  // send EOT to indicate that the message has been fully written
19
- this.socket.write(consume_messages_from_socket_1.MESSAGE_END_SEQ);
12
+ this.socket.write(String.fromCodePoint(4));
20
13
  }
21
14
  listen(onData, onClose = () => { }, onError = (err) => { }) {
22
15
  this.socket.on('data', (0, consume_messages_from_socket_1.consumeMessagesFromSocket)(async (message) => {
@@ -59,7 +59,7 @@ function notifyFileWatcherSockets(createdFiles, updatedFiles, deletedFiles) {
59
59
  changedProjects,
60
60
  changedFiles,
61
61
  }),
62
- }), 'json');
62
+ }));
63
63
  }
64
64
  }));
65
65
  });
@@ -6,7 +6,7 @@ const workspace_root_1 = require("../../utils/workspace-root");
6
6
  async function handleContextFileData() {
7
7
  const files = await (0, workspace_context_1.getAllFileDataInContext)(workspace_root_1.workspaceRoot);
8
8
  return {
9
- response: files,
9
+ response: JSON.stringify(files),
10
10
  description: 'handleContextFileData',
11
11
  };
12
12
  }
@@ -5,7 +5,7 @@ const sync_generators_1 = require("./sync-generators");
5
5
  async function handleFlushSyncGeneratorChangesToDisk(generators) {
6
6
  const result = await (0, sync_generators_1.flushSyncGeneratorChangesToDisk)(generators);
7
7
  return {
8
- response: result,
8
+ response: JSON.stringify(result),
9
9
  description: 'handleFlushSyncGeneratorChangesToDisk',
10
10
  };
11
11
  }
@@ -6,7 +6,7 @@ const workspace_root_1 = require("../../utils/workspace-root");
6
6
  async function handleGetFilesInDirectory(dir) {
7
7
  const files = await (0, workspace_context_1.getFilesInDirectoryUsingContext)(workspace_root_1.workspaceRoot, dir);
8
8
  return {
9
- response: files,
9
+ response: JSON.stringify(files),
10
10
  description: 'handleNxWorkspaceFiles',
11
11
  };
12
12
  }
@@ -5,7 +5,7 @@ const sync_generators_1 = require("./sync-generators");
5
5
  async function handleGetRegisteredSyncGenerators() {
6
6
  const syncGenerators = await (0, sync_generators_1.getCachedRegisteredSyncGenerators)();
7
7
  return {
8
- response: syncGenerators,
8
+ response: JSON.stringify(syncGenerators),
9
9
  description: 'handleGetSyncGeneratorChanges',
10
10
  };
11
11
  }
@@ -13,7 +13,7 @@ async function handleGetSyncGeneratorChanges(generators) {
13
13
  outOfSyncMessage: change.outOfSyncMessage,
14
14
  });
15
15
  return {
16
- response: result,
16
+ response: JSON.stringify(result),
17
17
  description: 'handleGetSyncGeneratorChanges',
18
18
  };
19
19
  }
@@ -7,14 +7,14 @@ const workspace_context_1 = require("../../utils/workspace-context");
7
7
  async function handleGlob(globs, exclude) {
8
8
  const files = await (0, workspace_context_1.globWithWorkspaceContext)(workspace_root_1.workspaceRoot, globs, exclude);
9
9
  return {
10
- response: files,
10
+ response: JSON.stringify(files),
11
11
  description: 'handleGlob',
12
12
  };
13
13
  }
14
14
  async function handleMultiGlob(globs, exclude) {
15
15
  const files = await (0, workspace_context_1.multiGlobWithWorkspaceContext)(workspace_root_1.workspaceRoot, globs, exclude);
16
16
  return {
17
- response: files,
17
+ response: JSON.stringify(files),
18
18
  description: 'handleMultiGlob',
19
19
  };
20
20
  }
@@ -5,6 +5,6 @@ export declare function handleHashTasks(payload: {
5
5
  tasks: Task[];
6
6
  taskGraph: TaskGraph;
7
7
  }): Promise<{
8
- response: import("../../hasher/task-hasher").Hash[];
8
+ response: string;
9
9
  description: string;
10
10
  }>;
@@ -27,7 +27,7 @@ async function handleHashTasks(payload) {
27
27
  storedProjectGraph = projectGraph;
28
28
  storedHasher = new task_hasher_1.InProcessTaskHasher(projectGraph, nxJson, rustReferences, payload.runnerOptions);
29
29
  }
30
- const response = await storedHasher.hashTasks(payload.tasks, payload.taskGraph, payload.env);
30
+ const response = JSON.stringify(await storedHasher.hashTasks(payload.tasks, payload.taskGraph, payload.env));
31
31
  return {
32
32
  response,
33
33
  description: 'handleHashTasks',
@@ -6,7 +6,7 @@ const workspace_root_1 = require("../../utils/workspace-root");
6
6
  async function handleNxWorkspaceFiles(projectRootMap) {
7
7
  const files = await (0, workspace_context_1.getNxWorkspaceFilesFromContext)(workspace_root_1.workspaceRoot, projectRootMap);
8
8
  return {
9
- response: files,
9
+ response: JSON.stringify(files),
10
10
  description: 'handleNxWorkspaceFiles',
11
11
  };
12
12
  }
@@ -22,7 +22,7 @@ async function handleOutputsHashesMatch(payload) {
22
22
  try {
23
23
  const res = await (0, outputs_tracking_1.outputsHashesMatch)(payload.data.outputs, payload.data.hash);
24
24
  return {
25
- response: res,
25
+ response: JSON.stringify(res),
26
26
  description: 'outputsHashesMatch',
27
27
  };
28
28
  }
@@ -4,10 +4,10 @@ export declare function handleRecordTaskRuns(taskRuns: TaskRun[]): Promise<{
4
4
  description: string;
5
5
  }>;
6
6
  export declare function handleGetFlakyTasks(hashes: string[]): Promise<{
7
- response: string[];
7
+ response: string;
8
8
  description: string;
9
9
  }>;
10
10
  export declare function handleGetEstimatedTaskTimings(targets: TaskTarget[]): Promise<{
11
- response: Record<string, number>;
11
+ response: string;
12
12
  description: string;
13
13
  }>;
@@ -16,7 +16,7 @@ async function handleGetFlakyTasks(hashes) {
16
16
  const taskHistory = (0, task_history_1.getTaskHistory)();
17
17
  const history = await taskHistory.getFlakyTasks(hashes);
18
18
  return {
19
- response: history,
19
+ response: JSON.stringify(history),
20
20
  description: 'handleGetFlakyTasks',
21
21
  };
22
22
  }
@@ -24,7 +24,7 @@ async function handleGetEstimatedTaskTimings(targets) {
24
24
  const taskHistory = (0, task_history_1.getTaskHistory)();
25
25
  const history = await taskHistory.getEstimatedTaskTimings(targets);
26
26
  return {
27
- response: history,
27
+ response: JSON.stringify(history),
28
28
  description: 'handleGetEstimatedTaskTimings',
29
29
  };
30
30
  }
@@ -1,6 +1,6 @@
1
1
  import type { PostTasksExecutionContext, PreTasksExecutionContext } from '../../project-graph/plugins/public-api';
2
2
  export declare function handleRunPreTasksExecution(context: PreTasksExecutionContext): Promise<{
3
- response: NodeJS.ProcessEnv[];
3
+ response: string;
4
4
  description: string;
5
5
  error?: undefined;
6
6
  } | {
@@ -7,7 +7,7 @@ async function handleRunPreTasksExecution(context) {
7
7
  try {
8
8
  const envs = await (0, tasks_execution_hooks_1.runPreTasksExecution)(context);
9
9
  return {
10
- response: envs,
10
+ response: JSON.stringify(envs),
11
11
  description: 'handleRunPreTasksExecution',
12
12
  };
13
13
  }
@@ -2,8 +2,8 @@ import { Server, Socket } from 'net';
2
2
  export type HandlerResult = {
3
3
  description: string;
4
4
  error?: any;
5
- response?: string | object | boolean;
5
+ response?: string;
6
6
  };
7
7
  export declare const openSockets: Set<Socket>;
8
- export declare function handleResult(socket: Socket, type: string, hrFn: () => Promise<HandlerResult>, mode: 'json' | 'v8'): Promise<void>;
8
+ export declare function handleResult(socket: Socket, type: string, hrFn: () => Promise<HandlerResult>): Promise<void>;
9
9
  export declare function startServer(): Promise<Server>;
@@ -52,7 +52,6 @@ const flush_sync_generator_changes_to_disk_1 = require("../message-types/flush-s
52
52
  const handle_flush_sync_generator_changes_to_disk_1 = require("./handle-flush-sync-generator-changes-to-disk");
53
53
  const run_tasks_execution_hooks_1 = require("../message-types/run-tasks-execution-hooks");
54
54
  const handle_tasks_execution_hooks_1 = require("./handle-tasks-execution-hooks");
55
- const v8_1 = require("v8");
56
55
  let performanceObserver;
57
56
  let workspaceWatcherError;
58
57
  let outputsWatcherError;
@@ -97,101 +96,92 @@ async function handleMessage(socket, data) {
97
96
  (0, shutdown_utils_1.resetInactivityTimeout)(handleInactivityTimeout);
98
97
  const unparsedPayload = data;
99
98
  let payload;
100
- let mode = 'json';
101
99
  try {
102
- // JSON Message
103
- if ((0, consume_messages_from_socket_1.isJsonMessage)(unparsedPayload)) {
104
- payload = JSON.parse(unparsedPayload);
105
- }
106
- else {
107
- // V8 Serialized Message
108
- payload = (0, v8_1.deserialize)(Buffer.from(unparsedPayload, 'binary'));
109
- mode = 'v8';
110
- }
100
+ payload = JSON.parse(unparsedPayload);
111
101
  }
112
102
  catch (e) {
113
103
  await (0, shutdown_utils_1.respondWithErrorAndExit)(socket, `Invalid payload from the client`, new Error(`Unsupported payload sent to daemon server: ${unparsedPayload}`));
114
104
  }
115
105
  if (payload.type === 'PING') {
116
- await handleResult(socket, 'PING', () => Promise.resolve({ response: true, description: 'ping' }), mode);
106
+ await handleResult(socket, 'PING', () => Promise.resolve({ response: JSON.stringify(true), description: 'ping' }));
117
107
  }
118
108
  else if (payload.type === 'REQUEST_PROJECT_GRAPH') {
119
- await handleResult(socket, 'REQUEST_PROJECT_GRAPH', () => (0, handle_request_project_graph_1.handleRequestProjectGraph)(), mode);
109
+ await handleResult(socket, 'REQUEST_PROJECT_GRAPH', () => (0, handle_request_project_graph_1.handleRequestProjectGraph)());
120
110
  }
121
111
  else if (payload.type === 'HASH_TASKS') {
122
- await handleResult(socket, 'HASH_TASKS', () => (0, handle_hash_tasks_1.handleHashTasks)(payload), mode);
112
+ await handleResult(socket, 'HASH_TASKS', () => (0, handle_hash_tasks_1.handleHashTasks)(payload));
123
113
  }
124
114
  else if (payload.type === 'PROCESS_IN_BACKGROUND') {
125
- await handleResult(socket, 'PROCESS_IN_BACKGROUND', () => (0, handle_process_in_background_1.handleProcessInBackground)(payload), mode);
115
+ await handleResult(socket, 'PROCESS_IN_BACKGROUND', () => (0, handle_process_in_background_1.handleProcessInBackground)(payload));
126
116
  }
127
117
  else if (payload.type === 'RECORD_OUTPUTS_HASH') {
128
- await handleResult(socket, 'RECORD_OUTPUTS_HASH', () => (0, handle_outputs_tracking_1.handleRecordOutputsHash)(payload), mode);
118
+ await handleResult(socket, 'RECORD_OUTPUTS_HASH', () => (0, handle_outputs_tracking_1.handleRecordOutputsHash)(payload));
129
119
  }
130
120
  else if (payload.type === 'OUTPUTS_HASHES_MATCH') {
131
- await handleResult(socket, 'OUTPUTS_HASHES_MATCH', () => (0, handle_outputs_tracking_1.handleOutputsHashesMatch)(payload), mode);
121
+ await handleResult(socket, 'OUTPUTS_HASHES_MATCH', () => (0, handle_outputs_tracking_1.handleOutputsHashesMatch)(payload));
132
122
  }
133
123
  else if (payload.type === 'REQUEST_SHUTDOWN') {
134
- await handleResult(socket, 'REQUEST_SHUTDOWN', () => (0, handle_request_shutdown_1.handleRequestShutdown)(server, numberOfOpenConnections), mode);
124
+ await handleResult(socket, 'REQUEST_SHUTDOWN', () => (0, handle_request_shutdown_1.handleRequestShutdown)(server, numberOfOpenConnections));
135
125
  }
136
126
  else if (payload.type === 'REGISTER_FILE_WATCHER') {
137
127
  file_watcher_sockets_1.registeredFileWatcherSockets.push({ socket, config: payload.config });
138
128
  }
139
129
  else if ((0, glob_1.isHandleGlobMessage)(payload)) {
140
- await handleResult(socket, glob_1.GLOB, () => (0, handle_glob_1.handleGlob)(payload.globs, payload.exclude), mode);
130
+ await handleResult(socket, glob_1.GLOB, () => (0, handle_glob_1.handleGlob)(payload.globs, payload.exclude));
141
131
  }
142
132
  else if ((0, glob_1.isHandleMultiGlobMessage)(payload)) {
143
- await handleResult(socket, glob_1.MULTI_GLOB, () => (0, handle_glob_1.handleMultiGlob)(payload.globs, payload.exclude), mode);
133
+ await handleResult(socket, glob_1.MULTI_GLOB, () => (0, handle_glob_1.handleMultiGlob)(payload.globs, payload.exclude));
144
134
  }
145
135
  else if ((0, get_nx_workspace_files_1.isHandleNxWorkspaceFilesMessage)(payload)) {
146
- await handleResult(socket, get_nx_workspace_files_1.GET_NX_WORKSPACE_FILES, () => (0, handle_nx_workspace_files_1.handleNxWorkspaceFiles)(payload.projectRootMap), mode);
136
+ await handleResult(socket, get_nx_workspace_files_1.GET_NX_WORKSPACE_FILES, () => (0, handle_nx_workspace_files_1.handleNxWorkspaceFiles)(payload.projectRootMap));
147
137
  }
148
138
  else if ((0, get_files_in_directory_1.isHandleGetFilesInDirectoryMessage)(payload)) {
149
- await handleResult(socket, get_files_in_directory_1.GET_FILES_IN_DIRECTORY, () => (0, handle_get_files_in_directory_1.handleGetFilesInDirectory)(payload.dir), mode);
139
+ await handleResult(socket, get_files_in_directory_1.GET_FILES_IN_DIRECTORY, () => (0, handle_get_files_in_directory_1.handleGetFilesInDirectory)(payload.dir));
150
140
  }
151
141
  else if ((0, get_context_file_data_1.isHandleContextFileDataMessage)(payload)) {
152
- await handleResult(socket, get_context_file_data_1.GET_CONTEXT_FILE_DATA, () => (0, handle_context_file_data_1.handleContextFileData)(), mode);
142
+ await handleResult(socket, get_context_file_data_1.GET_CONTEXT_FILE_DATA, () => (0, handle_context_file_data_1.handleContextFileData)());
153
143
  }
154
144
  else if ((0, hash_glob_1.isHandleHashGlobMessage)(payload)) {
155
- await handleResult(socket, hash_glob_1.HASH_GLOB, () => (0, handle_hash_glob_1.handleHashGlob)(payload.globs, payload.exclude), mode);
145
+ await handleResult(socket, hash_glob_1.HASH_GLOB, () => (0, handle_hash_glob_1.handleHashGlob)(payload.globs, payload.exclude));
156
146
  }
157
147
  else if ((0, hash_glob_1.isHandleHashMultiGlobMessage)(payload)) {
158
- await handleResult(socket, hash_glob_1.HASH_GLOB, () => (0, handle_hash_glob_1.handleHashMultiGlob)(payload.globGroups), mode);
148
+ await handleResult(socket, hash_glob_1.HASH_GLOB, () => (0, handle_hash_glob_1.handleHashMultiGlob)(payload.globGroups));
159
149
  }
160
150
  else if ((0, task_history_1.isHandleGetFlakyTasksMessage)(payload)) {
161
- await handleResult(socket, task_history_1.GET_FLAKY_TASKS, () => (0, handle_task_history_1.handleGetFlakyTasks)(payload.hashes), mode);
151
+ await handleResult(socket, task_history_1.GET_FLAKY_TASKS, () => (0, handle_task_history_1.handleGetFlakyTasks)(payload.hashes));
162
152
  }
163
153
  else if ((0, task_history_1.isHandleGetEstimatedTaskTimings)(payload)) {
164
- await handleResult(socket, task_history_1.GET_ESTIMATED_TASK_TIMINGS, () => (0, handle_task_history_1.handleGetEstimatedTaskTimings)(payload.targets), mode);
154
+ await handleResult(socket, task_history_1.GET_ESTIMATED_TASK_TIMINGS, () => (0, handle_task_history_1.handleGetEstimatedTaskTimings)(payload.targets));
165
155
  }
166
156
  else if ((0, task_history_1.isHandleWriteTaskRunsToHistoryMessage)(payload)) {
167
- await handleResult(socket, task_history_1.RECORD_TASK_RUNS, () => (0, handle_task_history_1.handleRecordTaskRuns)(payload.taskRuns), mode);
157
+ await handleResult(socket, task_history_1.RECORD_TASK_RUNS, () => (0, handle_task_history_1.handleRecordTaskRuns)(payload.taskRuns));
168
158
  }
169
159
  else if ((0, force_shutdown_1.isHandleForceShutdownMessage)(payload)) {
170
- await handleResult(socket, 'FORCE_SHUTDOWN', () => (0, handle_force_shutdown_1.handleForceShutdown)(server), mode);
160
+ await handleResult(socket, 'FORCE_SHUTDOWN', () => (0, handle_force_shutdown_1.handleForceShutdown)(server));
171
161
  }
172
162
  else if ((0, get_sync_generator_changes_1.isHandleGetSyncGeneratorChangesMessage)(payload)) {
173
- await handleResult(socket, get_sync_generator_changes_1.GET_SYNC_GENERATOR_CHANGES, () => (0, handle_get_sync_generator_changes_1.handleGetSyncGeneratorChanges)(payload.generators), mode);
163
+ await handleResult(socket, get_sync_generator_changes_1.GET_SYNC_GENERATOR_CHANGES, () => (0, handle_get_sync_generator_changes_1.handleGetSyncGeneratorChanges)(payload.generators));
174
164
  }
175
165
  else if ((0, flush_sync_generator_changes_to_disk_1.isHandleFlushSyncGeneratorChangesToDiskMessage)(payload)) {
176
- await handleResult(socket, flush_sync_generator_changes_to_disk_1.FLUSH_SYNC_GENERATOR_CHANGES_TO_DISK, () => (0, handle_flush_sync_generator_changes_to_disk_1.handleFlushSyncGeneratorChangesToDisk)(payload.generators), mode);
166
+ await handleResult(socket, flush_sync_generator_changes_to_disk_1.FLUSH_SYNC_GENERATOR_CHANGES_TO_DISK, () => (0, handle_flush_sync_generator_changes_to_disk_1.handleFlushSyncGeneratorChangesToDisk)(payload.generators));
177
167
  }
178
168
  else if ((0, get_registered_sync_generators_1.isHandleGetRegisteredSyncGeneratorsMessage)(payload)) {
179
- await handleResult(socket, get_registered_sync_generators_1.GET_REGISTERED_SYNC_GENERATORS, () => (0, handle_get_registered_sync_generators_1.handleGetRegisteredSyncGenerators)(), mode);
169
+ await handleResult(socket, get_registered_sync_generators_1.GET_REGISTERED_SYNC_GENERATORS, () => (0, handle_get_registered_sync_generators_1.handleGetRegisteredSyncGenerators)());
180
170
  }
181
171
  else if ((0, update_workspace_context_1.isHandleUpdateWorkspaceContextMessage)(payload)) {
182
- await handleResult(socket, update_workspace_context_1.UPDATE_WORKSPACE_CONTEXT, () => (0, handle_update_workspace_context_1.handleUpdateWorkspaceContext)(payload.createdFiles, payload.updatedFiles, payload.deletedFiles), mode);
172
+ await handleResult(socket, update_workspace_context_1.UPDATE_WORKSPACE_CONTEXT, () => (0, handle_update_workspace_context_1.handleUpdateWorkspaceContext)(payload.createdFiles, payload.updatedFiles, payload.deletedFiles));
183
173
  }
184
174
  else if ((0, run_tasks_execution_hooks_1.isHandlePreTasksExecutionMessage)(payload)) {
185
- await handleResult(socket, run_tasks_execution_hooks_1.PRE_TASKS_EXECUTION, () => (0, handle_tasks_execution_hooks_1.handleRunPreTasksExecution)(payload.context), mode);
175
+ await handleResult(socket, run_tasks_execution_hooks_1.PRE_TASKS_EXECUTION, () => (0, handle_tasks_execution_hooks_1.handleRunPreTasksExecution)(payload.context));
186
176
  }
187
177
  else if ((0, run_tasks_execution_hooks_1.isHandlePostTasksExecutionMessage)(payload)) {
188
- await handleResult(socket, run_tasks_execution_hooks_1.POST_TASKS_EXECUTION, () => (0, handle_tasks_execution_hooks_1.handleRunPostTasksExecution)(payload.context), mode);
178
+ await handleResult(socket, run_tasks_execution_hooks_1.POST_TASKS_EXECUTION, () => (0, handle_tasks_execution_hooks_1.handleRunPostTasksExecution)(payload.context));
189
179
  }
190
180
  else {
191
181
  await (0, shutdown_utils_1.respondWithErrorAndExit)(socket, `Invalid payload from the client`, new Error(`Unsupported payload sent to daemon server: ${unparsedPayload}`));
192
182
  }
193
183
  }
194
- async function handleResult(socket, type, hrFn, mode) {
184
+ async function handleResult(socket, type, hrFn) {
195
185
  let hr;
196
186
  const startMark = new Date();
197
187
  try {
@@ -205,13 +195,10 @@ async function handleResult(socket, type, hrFn, mode) {
205
195
  await (0, shutdown_utils_1.respondWithErrorAndExit)(socket, hr.description, hr.error);
206
196
  }
207
197
  else {
208
- const response = typeof hr.response === 'string'
209
- ? hr.response
210
- : serializeUnserializedResult(hr.response, mode);
211
- await (0, shutdown_utils_1.respondToClient)(socket, response, hr.description);
198
+ await (0, shutdown_utils_1.respondToClient)(socket, hr.response, hr.description);
212
199
  }
213
200
  const endMark = new Date();
214
- logger_1.serverLogger.log(`Handled ${mode} message ${type}. Handling time: ${doneHandlingMark.getTime() - startMark.getTime()}. Response time: ${endMark.getTime() - doneHandlingMark.getTime()}.`);
201
+ logger_1.serverLogger.log(`Handled ${type}. Handling time: ${doneHandlingMark.getTime() - startMark.getTime()}. Response time: ${endMark.getTime() - doneHandlingMark.getTime()}.`);
215
202
  }
216
203
  function handleInactivityTimeout() {
217
204
  if ((0, file_watcher_sockets_1.hasRegisteredFileWatcherSockets)()) {
@@ -421,11 +408,3 @@ async function startServer() {
421
408
  }
422
409
  });
423
410
  }
424
- function serializeUnserializedResult(response, mode) {
425
- if (mode === 'json') {
426
- return JSON.stringify(response);
427
- }
428
- else {
429
- return (0, v8_1.serialize)(response).toString('binary');
430
- }
431
- }
@@ -16,7 +16,6 @@ const cache_1 = require("../cache");
16
16
  const error_types_1 = require("../../project-graph/error-types");
17
17
  const db_connection_1 = require("../../utils/db-connection");
18
18
  const get_plugins_1 = require("../../project-graph/plugins/get-plugins");
19
- const consume_messages_from_socket_1 = require("../../utils/consume-messages-from-socket");
20
19
  exports.SERVER_INACTIVITY_TIMEOUT_MS = 10800000; // 10800000 ms = 3 hours
21
20
  let watcherInstance;
22
21
  function storeWatcherInstance(instance) {
@@ -71,7 +70,7 @@ function respondToClient(socket, response, description) {
71
70
  if (description) {
72
71
  logger_1.serverLogger.requestLog(`Responding to the client.`, description);
73
72
  }
74
- socket.write(response + consume_messages_from_socket_1.MESSAGE_END_SEQ, (err) => {
73
+ socket.write(`${response}${String.fromCodePoint(4)}`, (err) => {
75
74
  if (err) {
76
75
  console.error(err);
77
76
  }
@@ -8,7 +8,7 @@ export declare class ExternalObject<T> {
8
8
  }
9
9
  }
10
10
  export declare class AppLifeCycle {
11
- constructor(tasks: Array<Task>, initiatingTasks: Array<string>, runMode: RunMode, pinnedTasks: Array<string>, tuiCliArgs: TuiCliArgs, tuiConfig: TuiConfig, titleText: string, workspaceRoot: string)
11
+ constructor(tasks: Array<Task>, initiatingTasks: Array<string>, runMode: RunMode, pinnedTasks: Array<string>, tuiCliArgs: TuiCliArgs, tuiConfig: TuiConfig, titleText: string, workspaceRoot: string, taskGraph: TaskGraph)
12
12
  startCommand(threadCount?: number | undefined | null): void
13
13
  scheduleTask(task: Task): void
14
14
  startTasks(tasks: Array<Task>, metadata: object): void
@@ -322,6 +322,7 @@ export interface TaskGraph {
322
322
  roots: Array<string>
323
323
  tasks: Record<string, Task>
324
324
  dependencies: Record<string, Array<string>>
325
+ continuousDependencies: Record<string, Array<string>>
325
326
  }
326
327
 
327
328
  export interface TaskResult {
Binary file
@@ -4,7 +4,6 @@ exports.isPluginWorkerMessage = isPluginWorkerMessage;
4
4
  exports.isPluginWorkerResult = isPluginWorkerResult;
5
5
  exports.consumeMessage = consumeMessage;
6
6
  exports.sendMessageOverSocket = sendMessageOverSocket;
7
- const consume_messages_from_socket_1 = require("../../../utils/consume-messages-from-socket");
8
7
  function isPluginWorkerMessage(message) {
9
8
  return (typeof message === 'object' &&
10
9
  'type' in message &&
@@ -47,5 +46,5 @@ async function consumeMessage(socket, raw, handlers) {
47
46
  }
48
47
  }
49
48
  function sendMessageOverSocket(socket, message) {
50
- socket.write(JSON.stringify(message) + consume_messages_from_socket_1.MESSAGE_END_SEQ);
49
+ socket.write(JSON.stringify(message) + String.fromCodePoint(4));
51
50
  }
@@ -166,7 +166,7 @@ function createWorkerHandler(worker, pending, onload, onloadError, socket) {
166
166
  }
167
167
  },
168
168
  createDependenciesResult: ({ tx, ...result }) => {
169
- const { resolver, rejector } = getPendingPromise(tx, pending);
169
+ const { resolver, rejector } = pending.get(tx);
170
170
  if (result.success) {
171
171
  resolver(result.dependencies);
172
172
  }
@@ -175,7 +175,7 @@ function createWorkerHandler(worker, pending, onload, onloadError, socket) {
175
175
  }
176
176
  },
177
177
  createNodesResult: ({ tx, ...result }) => {
178
- const { resolver, rejector } = getPendingPromise(tx, pending);
178
+ const { resolver, rejector } = pending.get(tx);
179
179
  if (result.success) {
180
180
  resolver(result.result);
181
181
  }
@@ -184,7 +184,7 @@ function createWorkerHandler(worker, pending, onload, onloadError, socket) {
184
184
  }
185
185
  },
186
186
  createMetadataResult: ({ tx, ...result }) => {
187
- const { resolver, rejector } = getPendingPromise(tx, pending);
187
+ const { resolver, rejector } = pending.get(tx);
188
188
  if (result.success) {
189
189
  resolver(result.metadata);
190
190
  }
@@ -193,7 +193,7 @@ function createWorkerHandler(worker, pending, onload, onloadError, socket) {
193
193
  }
194
194
  },
195
195
  preTasksExecutionResult: ({ tx, ...result }) => {
196
- const { resolver, rejector } = getPendingPromise(tx, pending);
196
+ const { resolver, rejector } = pending.get(tx);
197
197
  if (result.success) {
198
198
  resolver(result.mutations);
199
199
  }
@@ -202,7 +202,7 @@ function createWorkerHandler(worker, pending, onload, onloadError, socket) {
202
202
  }
203
203
  },
204
204
  postTasksExecutionResult: ({ tx, ...result }) => {
205
- const { resolver, rejector } = getPendingPromise(tx, pending);
205
+ const { resolver, rejector } = pending.get(tx);
206
206
  if (result.success) {
207
207
  resolver();
208
208
  }
@@ -220,20 +220,6 @@ function createWorkerExitHandler(worker, pendingPromises) {
220
220
  }
221
221
  };
222
222
  }
223
- function getPendingPromise(tx, pending) {
224
- const pendingPromise = pending.get(tx);
225
- if (!pendingPromise) {
226
- throw new Error(`No pending promise found for transaction "${tx}". This may indicate a bug in the plugin pool. Currently pending promises:\n` +
227
- Array.from(pending.keys())
228
- .map((t) => ` - ${t}`)
229
- .join('\n'));
230
- }
231
- const { rejector, resolver } = pendingPromise;
232
- return {
233
- rejector,
234
- resolver,
235
- };
236
- }
237
223
  function registerPendingPromise(tx, pending, callback, context) {
238
224
  let resolver, rejector, timeout;
239
225
  const promise = new Promise((res, rej) => {
@@ -64,8 +64,6 @@ class LoadedNxPlugin {
64
64
  });
65
65
  }
66
66
  await plugin.preTasksExecution(this.options, context);
67
- // This doesn't revert env changes, as the proxy still updates
68
- // originalEnv, rather it removes the proxy.
69
67
  process.env = originalEnv;
70
68
  return updates;
71
69
  };
@@ -4,6 +4,7 @@ exports.isTuiEnabled = isTuiEnabled;
4
4
  exports.shouldUseTui = shouldUseTui;
5
5
  const native_1 = require("../native");
6
6
  const is_ci_1 = require("../utils/is-ci");
7
+ const logger_1 = require("../utils/logger");
7
8
  let tuiEnabled = undefined;
8
9
  /**
9
10
  * @returns If tui is enabled
@@ -25,6 +26,13 @@ function shouldUseTui(nxJson, nxArgs, skipCapabilityCheck = process.env.NX_TUI_S
25
26
  // If the current terminal/environment is not capable of displaying the TUI, we don't run it
26
27
  const isWindows = process.platform === 'win32';
27
28
  const isCapable = skipCapabilityCheck || (process.stderr.isTTY && isUnicodeSupported());
29
+ if (typeof nxArgs.tui === 'boolean') {
30
+ if (nxArgs.tui && !isCapable) {
31
+ logger_1.logger.warn('Nx Terminal UI was not enabled as it is not supported in this environment.');
32
+ return false;
33
+ }
34
+ return nxArgs.tui;
35
+ }
28
36
  if (!isCapable) {
29
37
  return false;
30
38
  }
@@ -77,13 +77,13 @@ class PseudoIPCServer {
77
77
  this.sockets.forEach((socket) => {
78
78
  socket.write(JSON.stringify({ type: 'TO_CHILDREN_FROM_PARENT', message }));
79
79
  // send EOT to indicate that the message has been fully written
80
- socket.write(consume_messages_from_socket_1.MESSAGE_END_SEQ);
80
+ socket.write(String.fromCodePoint(4));
81
81
  });
82
82
  }
83
83
  sendMessageToChild(id, message) {
84
84
  this.sockets.forEach((socket) => {
85
85
  socket.write(JSON.stringify({ type: 'TO_CHILDREN_FROM_PARENT', id, message }));
86
- socket.write(consume_messages_from_socket_1.MESSAGE_END_SEQ);
86
+ socket.write(String.fromCodePoint(4));
87
87
  });
88
88
  }
89
89
  onMessageFromChildren(onMessage, onClose = () => { }, onError = (err) => { }) {
@@ -107,7 +107,7 @@ class PseudoIPCClient {
107
107
  sendMessageToParent(message) {
108
108
  this.socket.write(JSON.stringify({ type: 'TO_PARENT_FROM_CHILDREN', message }));
109
109
  // send EOT to indicate that the message has been fully written
110
- this.socket.write(consume_messages_from_socket_1.MESSAGE_END_SEQ);
110
+ this.socket.write(String.fromCodePoint(4));
111
111
  }
112
112
  notifyChildIsReady(id) {
113
113
  this.socket.write(JSON.stringify({
@@ -115,7 +115,7 @@ class PseudoIPCClient {
115
115
  message: id,
116
116
  }));
117
117
  // send EOT to indicate that the message has been fully written
118
- this.socket.write(consume_messages_from_socket_1.MESSAGE_END_SEQ);
118
+ this.socket.write(String.fromCodePoint(4));
119
119
  }
120
120
  onMessageFromParent(forkId, onMessage, onClose = () => { }, onError = (err) => { }) {
121
121
  this.socket.on('data', (0, consume_messages_from_socket_1.consumeMessagesFromSocket)(async (rawMessage) => {
@@ -128,7 +128,7 @@ async function getTerminalOutputLifeCycle(initiatingProject, initiatingTasks, pr
128
128
  const lifeCycles = [tsLifeCycle];
129
129
  // Only run the TUI if there are tasks to run
130
130
  if (tasks.length > 0) {
131
- appLifeCycle = new AppLifeCycle(tasks, initiatingTasks.map((t) => t.id), isRunOne ? 0 /* RunMode.RunOne */ : 1 /* RunMode.RunMany */, pinnedTasks, nxArgs ?? {}, nxJson.tui ?? {}, titleText, workspace_root_1.workspaceRoot);
131
+ appLifeCycle = new AppLifeCycle(tasks, initiatingTasks.map((t) => t.id), isRunOne ? 0 /* RunMode.RunOne */ : 1 /* RunMode.RunMany */, pinnedTasks, nxArgs ?? {}, nxJson.tui ?? {}, titleText, workspace_root_1.workspaceRoot, taskGraph);
132
132
  lifeCycles.unshift(appLifeCycle);
133
133
  /**
134
134
  * Patch stdout.write and stderr.write methods to pass Nx Cloud client logs to the TUI via the lifecycle