nx 19.4.0-canary.20240621-472459d → 19.4.0-canary.20240626-3a2e8d4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/package.json +12 -12
  2. package/release/changelog-renderer/index.d.ts +6 -0
  3. package/release/changelog-renderer/index.js +1 -1
  4. package/src/command-line/init/implementation/dot-nx/add-nx-scripts.js +5 -6
  5. package/src/command-line/release/changelog.js +6 -1
  6. package/src/command-line/release/config/config.js +3 -0
  7. package/src/core/graph/main.js +1 -1
  8. package/src/daemon/client/client.d.ts +5 -0
  9. package/src/daemon/client/client.js +14 -0
  10. package/src/daemon/message-types/task-history.d.ts +13 -0
  11. package/src/daemon/message-types/task-history.js +19 -0
  12. package/src/daemon/server/handle-get-task-history.d.ts +4 -0
  13. package/src/daemon/server/handle-get-task-history.js +12 -0
  14. package/src/daemon/server/handle-write-task-runs-to-history.d.ts +5 -0
  15. package/src/daemon/server/handle-write-task-runs-to-history.js +12 -0
  16. package/src/daemon/server/plugins.js +12 -2
  17. package/src/daemon/server/server.js +9 -0
  18. package/src/project-graph/plugins/isolation/index.d.ts +1 -1
  19. package/src/project-graph/plugins/isolation/index.js +11 -4
  20. package/src/project-graph/plugins/isolation/plugin-pool.d.ts +1 -1
  21. package/src/project-graph/plugins/isolation/plugin-pool.js +10 -4
  22. package/src/project-graph/project-graph.js +7 -1
  23. package/src/project-graph/utils/project-configuration-utils.js +1 -1
  24. package/src/tasks-runner/default-tasks-runner.js +2 -2
  25. package/src/tasks-runner/life-cycle.d.ts +10 -10
  26. package/src/tasks-runner/life-cycle.js +10 -10
  27. package/src/tasks-runner/life-cycles/task-history-life-cycle.d.ts +9 -0
  28. package/src/tasks-runner/life-cycles/task-history-life-cycle.js +54 -0
  29. package/src/tasks-runner/run-command.js +6 -0
  30. package/src/tasks-runner/task-orchestrator.js +4 -4
  31. package/src/utils/serialize-target.d.ts +1 -0
  32. package/src/utils/serialize-target.js +7 -0
  33. package/src/utils/task-history.d.ts +8 -0
  34. package/src/utils/task-history.js +97 -0
@@ -6,6 +6,7 @@ import { Hash } from '../../hasher/task-hasher';
6
6
  import { Task, TaskGraph } from '../../config/task-graph';
7
7
  import { ConfigurationSourceMaps } from '../../project-graph/utils/project-configuration-utils';
8
8
  import { NxWorkspaceFiles } from '../../native';
9
+ import { TaskRun } from '../../utils/task-history';
9
10
  export type UnregisterCallback = () => void;
10
11
  export type ChangedFile = {
11
12
  path: string;
@@ -51,6 +52,10 @@ export declare class DaemonClient {
51
52
  getWorkspaceFiles(projectRootMap: Record<string, string>): Promise<NxWorkspaceFiles>;
52
53
  getFilesInDirectory(dir: string): Promise<string[]>;
53
54
  hashGlob(globs: string[], exclude?: string[]): Promise<string>;
55
+ getTaskHistoryForHashes(hashes: string[]): Promise<{
56
+ [hash: string]: TaskRun[];
57
+ }>;
58
+ writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
54
59
  isServerAvailable(): Promise<boolean>;
55
60
  private sendToDaemonViaQueue;
56
61
  private setUpConnection;
@@ -221,6 +221,20 @@ class DaemonClient {
221
221
  };
222
222
  return this.sendToDaemonViaQueue(message);
223
223
  }
224
+ getTaskHistoryForHashes(hashes) {
225
+ const message = {
226
+ type: 'GET_TASK_HISTORY_FOR_HASHES',
227
+ hashes,
228
+ };
229
+ return this.sendToDaemonViaQueue(message);
230
+ }
231
+ writeTaskRunsToHistory(taskRuns) {
232
+ const message = {
233
+ type: 'WRITE_TASK_RUNS_TO_HISTORY',
234
+ taskRuns,
235
+ };
236
+ return this.sendMessageToDaemon(message);
237
+ }
224
238
  async isServerAvailable() {
225
239
  return new Promise((resolve) => {
226
240
  try {
@@ -0,0 +1,13 @@
1
+ import { TaskRun } from '../../utils/task-history';
2
+ export declare const GET_TASK_HISTORY_FOR_HASHES: "GET_TASK_HISTORY_FOR_HASHES";
3
+ export type HandleGetTaskHistoryForHashesMessage = {
4
+ type: typeof GET_TASK_HISTORY_FOR_HASHES;
5
+ hashes: string[];
6
+ };
7
+ export declare function isHandleGetTaskHistoryForHashesMessage(message: unknown): message is HandleGetTaskHistoryForHashesMessage;
8
+ export declare const WRITE_TASK_RUNS_TO_HISTORY: "WRITE_TASK_RUNS_TO_HISTORY";
9
+ export type HandleWriteTaskRunsToHistoryMessage = {
10
+ type: typeof WRITE_TASK_RUNS_TO_HISTORY;
11
+ taskRuns: TaskRun[];
12
+ };
13
+ export declare function isHandleWriteTaskRunsToHistoryMessage(message: unknown): message is HandleWriteTaskRunsToHistoryMessage;
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.isHandleWriteTaskRunsToHistoryMessage = exports.WRITE_TASK_RUNS_TO_HISTORY = exports.isHandleGetTaskHistoryForHashesMessage = exports.GET_TASK_HISTORY_FOR_HASHES = void 0;
4
+ exports.GET_TASK_HISTORY_FOR_HASHES = 'GET_TASK_HISTORY_FOR_HASHES';
5
+ function isHandleGetTaskHistoryForHashesMessage(message) {
6
+ return (typeof message === 'object' &&
7
+ message !== null &&
8
+ 'type' in message &&
9
+ message['type'] === exports.GET_TASK_HISTORY_FOR_HASHES);
10
+ }
11
+ exports.isHandleGetTaskHistoryForHashesMessage = isHandleGetTaskHistoryForHashesMessage;
12
+ exports.WRITE_TASK_RUNS_TO_HISTORY = 'WRITE_TASK_RUNS_TO_HISTORY';
13
+ function isHandleWriteTaskRunsToHistoryMessage(message) {
14
+ return (typeof message === 'object' &&
15
+ message !== null &&
16
+ 'type' in message &&
17
+ message['type'] === exports.WRITE_TASK_RUNS_TO_HISTORY);
18
+ }
19
+ exports.isHandleWriteTaskRunsToHistoryMessage = isHandleWriteTaskRunsToHistoryMessage;
@@ -0,0 +1,4 @@
1
+ export declare function handleGetTaskHistoryForHashes(hashes: string[]): Promise<{
2
+ response: string;
3
+ description: string;
4
+ }>;
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.handleGetTaskHistoryForHashes = void 0;
4
+ const task_history_1 = require("../../utils/task-history");
5
+ async function handleGetTaskHistoryForHashes(hashes) {
6
+ const history = await (0, task_history_1.getHistoryForHashes)(hashes);
7
+ return {
8
+ response: JSON.stringify(history),
9
+ description: 'handleGetTaskHistoryForHashes',
10
+ };
11
+ }
12
+ exports.handleGetTaskHistoryForHashes = handleGetTaskHistoryForHashes;
@@ -0,0 +1,5 @@
1
+ import { TaskRun } from '../../utils/task-history';
2
+ export declare function handleWriteTaskRunsToHistory(taskRuns: TaskRun[]): Promise<{
3
+ response: string;
4
+ description: string;
5
+ }>;
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.handleWriteTaskRunsToHistory = void 0;
4
+ const task_history_1 = require("../../utils/task-history");
5
+ async function handleWriteTaskRunsToHistory(taskRuns) {
6
+ await (0, task_history_1.writeTaskRunsToHistory)(taskRuns);
7
+ return {
8
+ response: 'true',
9
+ description: 'handleWriteTaskRunsToHistory',
10
+ };
11
+ }
12
+ exports.handleWriteTaskRunsToHistory = handleWriteTaskRunsToHistory;
@@ -1,16 +1,26 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.cleanupPlugins = exports.getPlugins = void 0;
4
+ const file_hasher_1 = require("../../hasher/file-hasher");
4
5
  const nx_json_1 = require("../../config/nx-json");
5
6
  const internal_api_1 = require("../../project-graph/plugins/internal-api");
6
7
  const workspace_root_1 = require("../../utils/workspace-root");
8
+ let currentPluginsConfigurationHash;
7
9
  let loadedPlugins;
8
10
  let cleanup;
9
11
  async function getPlugins() {
10
- if (loadedPlugins) {
12
+ const pluginsConfiguration = (0, nx_json_1.readNxJson)().plugins ?? [];
13
+ const pluginsConfigurationHash = (0, file_hasher_1.hashObject)(pluginsConfiguration);
14
+ // If the plugins configuration has not changed, reuse the current plugins
15
+ if (loadedPlugins &&
16
+ pluginsConfigurationHash === currentPluginsConfigurationHash) {
11
17
  return loadedPlugins;
12
18
  }
13
- const pluginsConfiguration = (0, nx_json_1.readNxJson)().plugins ?? [];
19
+ // Cleanup current plugins before loading new ones
20
+ if (cleanup) {
21
+ cleanup();
22
+ }
23
+ currentPluginsConfigurationHash = pluginsConfigurationHash;
14
24
  const [result, cleanupFn] = await (0, internal_api_1.loadNxPlugins)(pluginsConfiguration, workspace_root_1.workspaceRoot);
15
25
  cleanup = cleanupFn;
16
26
  loadedPlugins = result;
@@ -35,6 +35,9 @@ const get_files_in_directory_1 = require("../message-types/get-files-in-director
35
35
  const handle_get_files_in_directory_1 = require("./handle-get-files-in-directory");
36
36
  const hash_glob_1 = require("../message-types/hash-glob");
37
37
  const handle_hash_glob_1 = require("./handle-hash-glob");
38
+ const task_history_1 = require("../message-types/task-history");
39
+ const handle_get_task_history_1 = require("./handle-get-task-history");
40
+ const handle_write_task_runs_to_history_1 = require("./handle-write-task-runs-to-history");
38
41
  let performanceObserver;
39
42
  let workspaceWatcherError;
40
43
  let outputsWatcherError;
@@ -121,6 +124,12 @@ async function handleMessage(socket, data) {
121
124
  else if ((0, hash_glob_1.isHandleHashGlobMessage)(payload)) {
122
125
  await handleResult(socket, hash_glob_1.HASH_GLOB, () => (0, handle_hash_glob_1.handleHashGlob)(payload.globs, payload.exclude));
123
126
  }
127
+ else if ((0, task_history_1.isHandleGetTaskHistoryForHashesMessage)(payload)) {
128
+ await handleResult(socket, 'GET_TASK_HISTORY_FOR_HASHES', () => (0, handle_get_task_history_1.handleGetTaskHistoryForHashes)(payload.hashes));
129
+ }
130
+ else if ((0, task_history_1.isHandleWriteTaskRunsToHistoryMessage)(payload)) {
131
+ await handleResult(socket, 'WRITE_TASK_RUNS_TO_HISTORY', () => (0, handle_write_task_runs_to_history_1.handleWriteTaskRunsToHistory)(payload.taskRuns));
132
+ }
124
133
  else {
125
134
  await (0, shutdown_utils_1.respondWithErrorAndExit)(socket, `Invalid payload from the client`, new Error(`Unsupported payload sent to daemon server: ${unparsedPayload}`));
126
135
  }
@@ -1,3 +1,3 @@
1
1
  import { PluginConfiguration } from '../../../config/nx-json';
2
2
  import { LoadedNxPlugin } from '../internal-api';
3
- export declare function loadNxPluginInIsolation(plugin: PluginConfiguration, root?: string): [Promise<LoadedNxPlugin>, () => void];
3
+ export declare function loadNxPluginInIsolation(plugin: PluginConfiguration, root?: string): readonly [Promise<LoadedNxPlugin>, () => void];
@@ -10,11 +10,18 @@ const remotePluginCache = new Map();
10
10
  function loadNxPluginInIsolation(plugin, root = workspace_root_1.workspaceRoot) {
11
11
  const cacheKey = JSON.stringify(plugin);
12
12
  if (remotePluginCache.has(cacheKey)) {
13
- return [remotePluginCache.get(cacheKey), () => { }];
13
+ return remotePluginCache.get(cacheKey);
14
14
  }
15
- const loadingPlugin = (0, plugin_pool_1.loadRemoteNxPlugin)(plugin, root);
16
- remotePluginCache.set(cacheKey, loadingPlugin);
15
+ const [loadingPlugin, cleanup] = (0, plugin_pool_1.loadRemoteNxPlugin)(plugin, root);
17
16
  // We clean up plugin workers when Nx process completes.
18
- return [loadingPlugin, () => { }];
17
+ const val = [
18
+ loadingPlugin,
19
+ () => {
20
+ cleanup();
21
+ remotePluginCache.delete(cacheKey);
22
+ },
23
+ ];
24
+ remotePluginCache.set(cacheKey, val);
25
+ return val;
19
26
  }
20
27
  exports.loadNxPluginInIsolation = loadNxPluginInIsolation;
@@ -1,3 +1,3 @@
1
1
  import { PluginConfiguration } from '../../../config/nx-json';
2
2
  import { LoadedNxPlugin } from '../internal-api';
3
- export declare function loadRemoteNxPlugin(plugin: PluginConfiguration, root: string): Promise<LoadedNxPlugin>;
3
+ export declare function loadRemoteNxPlugin(plugin: PluginConfiguration, root: string): [Promise<LoadedNxPlugin>, () => void];
@@ -42,10 +42,16 @@ function loadRemoteNxPlugin(plugin, root) {
42
42
  shutdownPluginWorker(worker);
43
43
  };
44
44
  cleanupFunctions.add(cleanupFunction);
45
- return new Promise((res, rej) => {
46
- worker.on('message', createWorkerHandler(worker, pendingPromises, res, rej));
47
- worker.on('exit', exitHandler);
48
- });
45
+ return [
46
+ new Promise((res, rej) => {
47
+ worker.on('message', createWorkerHandler(worker, pendingPromises, res, rej));
48
+ worker.on('exit', exitHandler);
49
+ }),
50
+ () => {
51
+ cleanupFunction();
52
+ cleanupFunctions.delete(cleanupFunction);
53
+ },
54
+ ];
49
55
  }
50
56
  exports.loadRemoteNxPlugin = loadRemoteNxPlugin;
51
57
  function shutdownPluginWorker(worker) {
@@ -110,7 +110,13 @@ async function buildProjectGraphAndSourceMapsWithoutDaemon() {
110
110
  }
111
111
  }
112
112
  finally {
113
- cleanup();
113
+ // When plugins are isolated we don't clean them up during
114
+ // a single run of the CLI. They are cleaned up when the CLI
115
+ // process exits. Cleaning them here could cause issues if pending
116
+ // promises are not resolved.
117
+ if (process.env.NX_ISOLATE_PLUGINS !== 'true') {
118
+ cleanup();
119
+ }
114
120
  }
115
121
  const { projectGraph, projectFileMapCache } = projectGraphResult;
116
122
  perf_hooks_1.performance.mark('build-project-graph-using-project-file-map:end');
@@ -293,7 +293,7 @@ function mergeCreateNodesResults(results, errors) {
293
293
  const externalNodes = {};
294
294
  const configurationSourceMaps = {};
295
295
  for (const result of results.flat()) {
296
- const [file, pluginName, nodes] = result;
296
+ const [pluginName, file, nodes] = result;
297
297
  const { projects: projectNodes, externalNodes: pluginExternalNodes } = nodes;
298
298
  const sourceInfo = [file, pluginName];
299
299
  if (result[symbols_1.OVERRIDE_SOURCE_FILE]) {
@@ -13,12 +13,12 @@ const defaultTasksRunner = async (tasks, options, context) => {
13
13
  options['parallel'] === '') {
14
14
  options['parallel'] = Number(options['maxParallel'] || 3);
15
15
  }
16
- options.lifeCycle.startCommand();
16
+ await options.lifeCycle.startCommand();
17
17
  try {
18
18
  return await runAllTasks(tasks, options, context);
19
19
  }
20
20
  finally {
21
- options.lifeCycle.endCommand();
21
+ await options.lifeCycle.endCommand();
22
22
  }
23
23
  };
24
24
  exports.defaultTasksRunner = defaultTasksRunner;
@@ -10,9 +10,9 @@ export interface TaskMetadata {
10
10
  groupId: number;
11
11
  }
12
12
  export interface LifeCycle {
13
- startCommand?(): void;
14
- endCommand?(): void;
15
- scheduleTask?(task: Task): void;
13
+ startCommand?(): void | Promise<void>;
14
+ endCommand?(): void | Promise<void>;
15
+ scheduleTask?(task: Task): void | Promise<void>;
16
16
  /**
17
17
  * @deprecated use startTasks
18
18
  *
@@ -25,19 +25,19 @@ export interface LifeCycle {
25
25
  * endTask won't be supported after Nx 14 is released.
26
26
  */
27
27
  endTask?(task: Task, code: number): void;
28
- startTasks?(task: Task[], metadata: TaskMetadata): void;
29
- endTasks?(taskResults: TaskResult[], metadata: TaskMetadata): void;
28
+ startTasks?(task: Task[], metadata: TaskMetadata): void | Promise<void>;
29
+ endTasks?(taskResults: TaskResult[], metadata: TaskMetadata): void | Promise<void>;
30
30
  printTaskTerminalOutput?(task: Task, status: TaskStatus, output: string): void;
31
31
  }
32
32
  export declare class CompositeLifeCycle implements LifeCycle {
33
33
  private readonly lifeCycles;
34
34
  constructor(lifeCycles: LifeCycle[]);
35
- startCommand(): void;
36
- endCommand(): void;
37
- scheduleTask(task: Task): void;
35
+ startCommand(): Promise<void>;
36
+ endCommand(): Promise<void>;
37
+ scheduleTask(task: Task): Promise<void>;
38
38
  startTask(task: Task): void;
39
39
  endTask(task: Task, code: number): void;
40
- startTasks(tasks: Task[], metadata: TaskMetadata): void;
41
- endTasks(taskResults: TaskResult[], metadata: TaskMetadata): void;
40
+ startTasks(tasks: Task[], metadata: TaskMetadata): Promise<void>;
41
+ endTasks(taskResults: TaskResult[], metadata: TaskMetadata): Promise<void>;
42
42
  printTaskTerminalOutput(task: Task, status: TaskStatus, output: string): void;
43
43
  }
@@ -5,24 +5,24 @@ class CompositeLifeCycle {
5
5
  constructor(lifeCycles) {
6
6
  this.lifeCycles = lifeCycles;
7
7
  }
8
- startCommand() {
8
+ async startCommand() {
9
9
  for (let l of this.lifeCycles) {
10
10
  if (l.startCommand) {
11
- l.startCommand();
11
+ await l.startCommand();
12
12
  }
13
13
  }
14
14
  }
15
- endCommand() {
15
+ async endCommand() {
16
16
  for (let l of this.lifeCycles) {
17
17
  if (l.endCommand) {
18
- l.endCommand();
18
+ await l.endCommand();
19
19
  }
20
20
  }
21
21
  }
22
- scheduleTask(task) {
22
+ async scheduleTask(task) {
23
23
  for (let l of this.lifeCycles) {
24
24
  if (l.scheduleTask) {
25
- l.scheduleTask(task);
25
+ await l.scheduleTask(task);
26
26
  }
27
27
  }
28
28
  }
@@ -40,20 +40,20 @@ class CompositeLifeCycle {
40
40
  }
41
41
  }
42
42
  }
43
- startTasks(tasks, metadata) {
43
+ async startTasks(tasks, metadata) {
44
44
  for (let l of this.lifeCycles) {
45
45
  if (l.startTasks) {
46
- l.startTasks(tasks, metadata);
46
+ await l.startTasks(tasks, metadata);
47
47
  }
48
48
  else if (l.startTask) {
49
49
  tasks.forEach((t) => l.startTask(t));
50
50
  }
51
51
  }
52
52
  }
53
- endTasks(taskResults, metadata) {
53
+ async endTasks(taskResults, metadata) {
54
54
  for (let l of this.lifeCycles) {
55
55
  if (l.endTasks) {
56
- l.endTasks(taskResults, metadata);
56
+ await l.endTasks(taskResults, metadata);
57
57
  }
58
58
  else if (l.endTask) {
59
59
  taskResults.forEach((t) => l.endTask(t.task, t.code));
@@ -0,0 +1,9 @@
1
+ import { Task } from '../../config/task-graph';
2
+ import { LifeCycle, TaskResult } from '../life-cycle';
3
+ export declare class TaskHistoryLifeCycle implements LifeCycle {
4
+ private startTimings;
5
+ private taskRuns;
6
+ startTasks(tasks: Task[]): void;
7
+ endTasks(taskResults: TaskResult[]): Promise<void>;
8
+ endCommand(): Promise<void>;
9
+ }
@@ -0,0 +1,54 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.TaskHistoryLifeCycle = void 0;
4
+ const serialize_target_1 = require("../../utils/serialize-target");
5
+ const output_1 = require("../../utils/output");
6
+ const task_history_1 = require("../../utils/task-history");
7
+ class TaskHistoryLifeCycle {
8
+ constructor() {
9
+ this.startTimings = {};
10
+ this.taskRuns = [];
11
+ }
12
+ startTasks(tasks) {
13
+ for (let task of tasks) {
14
+ this.startTimings[task.id] = new Date().getTime();
15
+ }
16
+ }
17
+ async endTasks(taskResults) {
18
+ const taskRuns = taskResults.map((taskResult) => ({
19
+ project: taskResult.task.target.project,
20
+ target: taskResult.task.target.target,
21
+ configuration: taskResult.task.target.configuration,
22
+ hash: taskResult.task.hash,
23
+ code: taskResult.code.toString(),
24
+ status: taskResult.status,
25
+ start: (taskResult.task.startTime ?? this.startTimings[taskResult.task.id]).toString(),
26
+ end: (taskResult.task.endTime ?? new Date().getTime()).toString(),
27
+ }));
28
+ this.taskRuns.push(...taskRuns);
29
+ }
30
+ async endCommand() {
31
+ await (0, task_history_1.writeTaskRunsToHistory)(this.taskRuns);
32
+ const history = await (0, task_history_1.getHistoryForHashes)(this.taskRuns.map((t) => t.hash));
33
+ const flakyTasks = [];
34
+ // check if any hash has different exit codes => flaky
35
+ for (let hash in history) {
36
+ if (history[hash].length > 1 &&
37
+ history[hash].some((run) => run.code !== history[hash][0].code)) {
38
+ flakyTasks.push((0, serialize_target_1.serializeTarget)(history[hash][0].project, history[hash][0].target, history[hash][0].configuration));
39
+ }
40
+ }
41
+ if (flakyTasks.length > 0) {
42
+ output_1.output.warn({
43
+ title: `Nx detected ${flakyTasks.length === 1 ? 'a flaky task' : ' flaky tasks'}`,
44
+ bodyLines: [
45
+ ,
46
+ ...flakyTasks.map((t) => ` ${t}`),
47
+ '',
48
+ `Flaky tasks can disrupt your CI pipeline. Automatically retry them with Nx Cloud. Learn more at https://nx.dev/ci/features/flaky-tasks`,
49
+ ],
50
+ });
51
+ }
52
+ }
53
+ }
54
+ exports.TaskHistoryLifeCycle = TaskHistoryLifeCycle;
@@ -14,6 +14,7 @@ const dynamic_run_many_terminal_output_life_cycle_1 = require("./life-cycles/dyn
14
14
  const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-cycle");
15
15
  const is_ci_1 = require("../utils/is-ci");
16
16
  const dynamic_run_one_terminal_output_life_cycle_1 = require("./life-cycles/dynamic-run-one-terminal-output-life-cycle");
17
+ const nx_json_1 = require("../config/nx-json");
17
18
  const create_task_graph_1 = require("./create-task-graph");
18
19
  const task_graph_utils_1 = require("./task-graph-utils");
19
20
  const params_1 = require("../utils/params");
@@ -21,6 +22,8 @@ const hash_task_1 = require("../hasher/hash-task");
21
22
  const client_1 = require("../daemon/client/client");
22
23
  const store_run_information_life_cycle_1 = require("./life-cycles/store-run-information-life-cycle");
23
24
  const create_task_hasher_1 = require("../hasher/create-task-hasher");
25
+ const task_history_life_cycle_1 = require("./life-cycles/task-history-life-cycle");
26
+ const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
24
27
  async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {
25
28
  const { runnerOptions } = getRunner(nxArgs, nxJson);
26
29
  const isRunOne = initiatingProject != null;
@@ -203,6 +206,9 @@ function constructLifeCycles(lifeCycle) {
203
206
  if (process.env.NX_PROFILE) {
204
207
  lifeCycles.push(new task_profiling_life_cycle_1.TaskProfilingLifeCycle(process.env.NX_PROFILE));
205
208
  }
209
+ if (!(0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())) {
210
+ lifeCycles.push(new task_history_life_cycle_1.TaskHistoryLifeCycle());
211
+ }
206
212
  return lifeCycles;
207
213
  }
208
214
  function mergeTargetDependencies(defaults, deps) {
@@ -88,7 +88,7 @@ class TaskOrchestrator {
88
88
  if (!task.hash) {
89
89
  await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv);
90
90
  }
91
- this.options.lifeCycle.scheduleTask(task);
91
+ await this.options.lifeCycle.scheduleTask(task);
92
92
  return taskSpecificEnv;
93
93
  }
94
94
  async processScheduledBatch(batch) {
@@ -96,7 +96,7 @@ class TaskOrchestrator {
96
96
  if (!task.hash) {
97
97
  await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv);
98
98
  }
99
- this.options.lifeCycle.scheduleTask(task);
99
+ await this.options.lifeCycle.scheduleTask(task);
100
100
  }));
101
101
  }
102
102
  processAllScheduledTasks() {
@@ -312,7 +312,7 @@ class TaskOrchestrator {
312
312
  // endregion Single Task
313
313
  // region Lifecycle
314
314
  async preRunSteps(tasks, metadata) {
315
- this.options.lifeCycle.startTasks(tasks, metadata);
315
+ await this.options.lifeCycle.startTasks(tasks, metadata);
316
316
  }
317
317
  async postRunSteps(tasks, results, doNotSkipCache, { groupId }) {
318
318
  for (const task of tasks) {
@@ -342,7 +342,7 @@ class TaskOrchestrator {
342
342
  perf_hooks_1.performance.mark('cache-results-end');
343
343
  perf_hooks_1.performance.measure('cache-results', 'cache-results-start', 'cache-results-end');
344
344
  }
345
- this.options.lifeCycle.endTasks(results.map((result) => {
345
+ await this.options.lifeCycle.endTasks(results.map((result) => {
346
346
  const code = result.status === 'success' ||
347
347
  result.status === 'local-cache' ||
348
348
  result.status === 'local-cache-kept-existing' ||
@@ -0,0 +1 @@
1
+ export declare function serializeTarget(project: any, target: any, configuration: any): string;
@@ -0,0 +1,7 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.serializeTarget = void 0;
4
+ function serializeTarget(project, target, configuration) {
5
+ return [project, target, configuration].filter((part) => !!part).join(':');
6
+ }
7
+ exports.serializeTarget = serializeTarget;
@@ -0,0 +1,8 @@
1
+ declare const taskRunKeys: readonly ["project", "target", "configuration", "hash", "code", "status", "start", "end"];
2
+ export type TaskRun = Record<(typeof taskRunKeys)[number], string>;
3
+ export declare function getHistoryForHashes(hashes: string[]): Promise<{
4
+ [hash: string]: TaskRun[];
5
+ }>;
6
+ export declare function writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
7
+ export declare const taskHistoryFile: string;
8
+ export {};
@@ -0,0 +1,97 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.taskHistoryFile = exports.writeTaskRunsToHistory = exports.getHistoryForHashes = void 0;
4
+ const fs_1 = require("fs");
5
+ const path_1 = require("path");
6
+ const client_1 = require("../daemon/client/client");
7
+ const is_on_daemon_1 = require("../daemon/is-on-daemon");
8
+ const cache_directory_1 = require("./cache-directory");
9
+ const taskRunKeys = [
10
+ 'project',
11
+ 'target',
12
+ 'configuration',
13
+ 'hash',
14
+ 'code',
15
+ 'status',
16
+ 'start',
17
+ 'end',
18
+ ];
19
+ let taskHistory = undefined;
20
+ let taskHashToIndicesMap = new Map();
21
+ async function getHistoryForHashes(hashes) {
22
+ if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
23
+ if (taskHistory === undefined) {
24
+ loadTaskHistoryFromDisk();
25
+ }
26
+ const result = {};
27
+ for (let hash of hashes) {
28
+ const indices = taskHashToIndicesMap.get(hash);
29
+ if (!indices) {
30
+ result[hash] = [];
31
+ }
32
+ else {
33
+ result[hash] = indices.map((index) => taskHistory[index]);
34
+ }
35
+ }
36
+ return result;
37
+ }
38
+ return await client_1.daemonClient.getTaskHistoryForHashes(hashes);
39
+ }
40
+ exports.getHistoryForHashes = getHistoryForHashes;
41
+ async function writeTaskRunsToHistory(taskRuns) {
42
+ if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
43
+ if (taskHistory === undefined) {
44
+ loadTaskHistoryFromDisk();
45
+ }
46
+ const serializedLines = [];
47
+ for (let taskRun of taskRuns) {
48
+ const serializedLine = taskRunKeys.map((key) => taskRun[key]).join(',');
49
+ serializedLines.push(serializedLine);
50
+ recordTaskRunInMemory(taskRun);
51
+ }
52
+ if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
53
+ (0, fs_1.writeFileSync)(exports.taskHistoryFile, `${taskRunKeys.join(',')}\n`);
54
+ }
55
+ (0, fs_1.appendFileSync)(exports.taskHistoryFile, serializedLines.join('\n') + '\n');
56
+ }
57
+ else {
58
+ await client_1.daemonClient.writeTaskRunsToHistory(taskRuns);
59
+ }
60
+ }
61
+ exports.writeTaskRunsToHistory = writeTaskRunsToHistory;
62
+ exports.taskHistoryFile = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'task-history.csv');
63
+ function loadTaskHistoryFromDisk() {
64
+ taskHashToIndicesMap.clear();
65
+ taskHistory = [];
66
+ if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
67
+ return;
68
+ }
69
+ const fileContent = (0, fs_1.readFileSync)(exports.taskHistoryFile, 'utf8');
70
+ if (!fileContent) {
71
+ return;
72
+ }
73
+ const lines = fileContent.split('\n');
74
+ // if there are no lines or just the header, return
75
+ if (lines.length <= 1) {
76
+ return;
77
+ }
78
+ const contentLines = lines.slice(1).filter((l) => l.trim() !== '');
79
+ // read the values from csv format where each header is a key and the value is the value
80
+ for (let line of contentLines) {
81
+ const values = line.trim().split(',');
82
+ const run = {};
83
+ taskRunKeys.forEach((header, index) => {
84
+ run[header] = values[index];
85
+ });
86
+ recordTaskRunInMemory(run);
87
+ }
88
+ }
89
+ function recordTaskRunInMemory(taskRun) {
90
+ const index = taskHistory.push(taskRun) - 1;
91
+ if (taskHashToIndicesMap.has(taskRun.hash)) {
92
+ taskHashToIndicesMap.get(taskRun.hash).push(index);
93
+ }
94
+ else {
95
+ taskHashToIndicesMap.set(taskRun.hash, [index]);
96
+ }
97
+ }