nx 19.7.0-canary.20240821-2065033 → 19.7.0-canary.20240822-d6a0cfb
Sign up to get free protection for your applications and to get access to all the features.
- package/package.json +12 -12
- package/src/command-line/add/add.js +1 -5
- package/src/command-line/add/command-object.js +1 -5
- package/src/command-line/affected/affected.js +0 -3
- package/src/command-line/exec/exec.js +0 -3
- package/src/command-line/generate/command-object.js +2 -5
- package/src/command-line/generate/generate.js +4 -8
- package/src/command-line/import/command-object.js +1 -1
- package/src/command-line/migrate/command-object.js +3 -2
- package/src/command-line/migrate/migrate.js +0 -3
- package/src/command-line/release/changelog.js +0 -3
- package/src/command-line/release/command-object.js +1 -5
- package/src/command-line/release/plan-check.js +0 -3
- package/src/command-line/release/plan.js +0 -3
- package/src/command-line/release/publish.js +0 -6
- package/src/command-line/release/release.js +0 -3
- package/src/command-line/release/version.js +0 -3
- package/src/command-line/repair/command-object.js +2 -4
- package/src/command-line/repair/repair.js +2 -6
- package/src/command-line/run/run-one.js +0 -3
- package/src/command-line/show/command-object.js +2 -2
- package/src/command-line/sync/command-object.js +3 -8
- package/src/command-line/sync/sync.js +1 -5
- package/src/command-line/watch/command-object.js +1 -1
- package/src/command-line/watch/watch.js +0 -3
- package/src/command-line/yargs-utils/shared-options.js +3 -3
- package/src/daemon/client/client.d.ts +3 -6
- package/src/daemon/client/client.js +5 -4
- package/src/daemon/message-types/task-history.d.ts +9 -9
- package/src/daemon/message-types/task-history.js +7 -7
- package/src/daemon/server/handle-task-history.d.ts +9 -0
- package/src/daemon/server/handle-task-history.js +28 -0
- package/src/daemon/server/server.js +4 -5
- package/src/hasher/hash-task.js +36 -1
- package/src/native/assert-supported-platform.js +1 -1
- package/src/native/index.d.ts +45 -4
- package/src/native/native-bindings.js +4 -0
- package/src/native/nx.wasi-browser.js +50 -36
- package/src/native/nx.wasi.cjs +48 -36
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/nx-cloud/update-manager.d.ts +2 -0
- package/src/nx-cloud/utilities/url-shorten.js +1 -1
- package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +11 -6
- package/src/tasks-runner/cache.d.ts +21 -2
- package/src/tasks-runner/cache.js +118 -26
- package/src/tasks-runner/default-tasks-runner.d.ts +6 -0
- package/src/tasks-runner/default-tasks-runner.js +34 -1
- package/src/tasks-runner/life-cycles/task-history-life-cycle-old.d.ts +9 -0
- package/src/tasks-runner/life-cycles/task-history-life-cycle-old.js +54 -0
- package/src/tasks-runner/life-cycles/task-history-life-cycle.d.ts +1 -0
- package/src/tasks-runner/life-cycles/task-history-life-cycle.js +19 -21
- package/src/tasks-runner/run-command.js +3 -1
- package/src/tasks-runner/task-orchestrator.js +10 -1
- package/src/utils/cache-directory.d.ts +1 -0
- package/src/utils/cache-directory.js +7 -3
- package/src/utils/db-connection.d.ts +2 -0
- package/src/utils/db-connection.js +11 -0
- package/src/utils/legacy-task-history.d.ts +8 -0
- package/src/utils/legacy-task-history.js +87 -0
- package/src/utils/logger.js +1 -1
- package/src/utils/task-history.d.ts +6 -8
- package/src/utils/task-history.js +16 -88
- package/src/utils/workspace-context.js +1 -1
- package/src/daemon/server/handle-get-task-history.d.ts +0 -4
- package/src/daemon/server/handle-get-task-history.js +0 -11
- package/src/daemon/server/handle-write-task-runs-to-history.d.ts +0 -5
- package/src/daemon/server/handle-write-task-runs-to-history.js +0 -11
@@ -1,13 +1,103 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.Cache = void 0;
|
3
|
+
exports.Cache = exports.DbCache = void 0;
|
4
4
|
const workspace_root_1 = require("../utils/workspace-root");
|
5
5
|
const fs_extra_1 = require("fs-extra");
|
6
6
|
const path_1 = require("path");
|
7
7
|
const perf_hooks_1 = require("perf_hooks");
|
8
|
+
const default_tasks_runner_1 = require("./default-tasks-runner");
|
8
9
|
const child_process_1 = require("child_process");
|
9
10
|
const cache_directory_1 = require("../utils/cache-directory");
|
10
11
|
const node_machine_id_1 = require("node-machine-id");
|
12
|
+
const native_1 = require("../native");
|
13
|
+
const db_connection_1 = require("../utils/db-connection");
|
14
|
+
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
|
15
|
+
const nx_json_1 = require("../config/nx-json");
|
16
|
+
const update_manager_1 = require("../nx-cloud/update-manager");
|
17
|
+
const get_cloud_options_1 = require("../nx-cloud/utilities/get-cloud-options");
|
18
|
+
class DbCache {
|
19
|
+
async setup() {
|
20
|
+
this.remoteCache = await this.getRemoteCache();
|
21
|
+
}
|
22
|
+
constructor(options) {
|
23
|
+
this.options = options;
|
24
|
+
this.cache = new native_1.NxCache(workspace_root_1.workspaceRoot, cache_directory_1.cacheDir, (0, db_connection_1.getDbConnection)());
|
25
|
+
}
|
26
|
+
async get(task) {
|
27
|
+
const res = this.cache.get(task.hash);
|
28
|
+
if (res) {
|
29
|
+
return {
|
30
|
+
...res,
|
31
|
+
remote: false,
|
32
|
+
};
|
33
|
+
}
|
34
|
+
await this.setup();
|
35
|
+
if (this.remoteCache) {
|
36
|
+
// didn't find it locally but we have a remote cache
|
37
|
+
// attempt remote cache
|
38
|
+
const res = await this.remoteCache.retrieve(task.hash, this.cache.cacheDirectory);
|
39
|
+
if (res) {
|
40
|
+
this.cache.applyRemoteCacheResults(task.hash, res);
|
41
|
+
return {
|
42
|
+
...res,
|
43
|
+
remote: true,
|
44
|
+
};
|
45
|
+
}
|
46
|
+
else {
|
47
|
+
return null;
|
48
|
+
}
|
49
|
+
}
|
50
|
+
else {
|
51
|
+
return null;
|
52
|
+
}
|
53
|
+
}
|
54
|
+
async put(task, terminalOutput, outputs, code) {
|
55
|
+
return tryAndRetry(async () => {
|
56
|
+
this.cache.put(task.hash, terminalOutput, outputs, code);
|
57
|
+
await this.setup();
|
58
|
+
if (this.remoteCache) {
|
59
|
+
await this.remoteCache.store(task.hash, this.cache.cacheDirectory, terminalOutput, code);
|
60
|
+
}
|
61
|
+
});
|
62
|
+
}
|
63
|
+
copyFilesFromCache(_, cachedResult, outputs) {
|
64
|
+
return tryAndRetry(async () => this.cache.copyFilesFromCache(cachedResult, outputs));
|
65
|
+
}
|
66
|
+
removeOldCacheRecords() {
|
67
|
+
return this.cache.removeOldCacheRecords();
|
68
|
+
}
|
69
|
+
temporaryOutputPath(task) {
|
70
|
+
return this.cache.getTaskOutputsPath(task.hash);
|
71
|
+
}
|
72
|
+
async getRemoteCache() {
|
73
|
+
if (this.remoteCachePromise) {
|
74
|
+
return this.remoteCachePromise;
|
75
|
+
}
|
76
|
+
this.remoteCachePromise = this._getRemoteCache();
|
77
|
+
return this.remoteCachePromise;
|
78
|
+
}
|
79
|
+
async _getRemoteCache() {
|
80
|
+
const nxJson = (0, nx_json_1.readNxJson)();
|
81
|
+
if ((0, nx_cloud_utils_1.isNxCloudUsed)(nxJson)) {
|
82
|
+
const options = (0, get_cloud_options_1.getCloudOptions)();
|
83
|
+
const { nxCloudClient } = await (0, update_manager_1.verifyOrUpdateNxCloudClient)(options);
|
84
|
+
if (nxCloudClient.remoteCache) {
|
85
|
+
return nxCloudClient.remoteCache;
|
86
|
+
}
|
87
|
+
else {
|
88
|
+
// old nx cloud instance
|
89
|
+
return await default_tasks_runner_1.RemoteCacheV2.fromCacheV1(this.options.nxCloudRemoteCache);
|
90
|
+
}
|
91
|
+
}
|
92
|
+
else {
|
93
|
+
return null;
|
94
|
+
}
|
95
|
+
}
|
96
|
+
}
|
97
|
+
exports.DbCache = DbCache;
|
98
|
+
/**
|
99
|
+
* @deprecated Use the {@link DbCache} class instead. This will be removed in Nx 21.
|
100
|
+
*/
|
11
101
|
class Cache {
|
12
102
|
constructor(options) {
|
13
103
|
this.options = options;
|
@@ -44,7 +134,7 @@ class Cache {
|
|
44
134
|
this._currentMachineId = await (0, node_machine_id_1.machineId)();
|
45
135
|
}
|
46
136
|
catch (e) {
|
47
|
-
if (process.env.NX_VERBOSE_LOGGING
|
137
|
+
if (process.env.NX_VERBOSE_LOGGING === 'true') {
|
48
138
|
console.log(`Unable to get machineId. Error: ${e.message}`);
|
49
139
|
}
|
50
140
|
this._currentMachineId = '';
|
@@ -71,7 +161,10 @@ class Cache {
|
|
71
161
|
}
|
72
162
|
}
|
73
163
|
async put(task, terminalOutput, outputs, code) {
|
74
|
-
return
|
164
|
+
return tryAndRetry(async () => {
|
165
|
+
/**
|
166
|
+
* This is the directory with the cached artifacts
|
167
|
+
*/
|
75
168
|
const td = (0, path_1.join)(this.cachePath, task.hash);
|
76
169
|
const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);
|
77
170
|
// might be left overs from partially-completed cache invocations
|
@@ -105,7 +198,7 @@ class Cache {
|
|
105
198
|
});
|
106
199
|
}
|
107
200
|
async copyFilesFromCache(hash, cachedResult, outputs) {
|
108
|
-
return
|
201
|
+
return tryAndRetry(async () => {
|
109
202
|
const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);
|
110
203
|
await Promise.all(expandedOutputs.map(async (f) => {
|
111
204
|
const cached = (0, path_1.join)(cachedResult.outputsPath, f);
|
@@ -213,27 +306,26 @@ class Cache {
|
|
213
306
|
(0, fs_extra_1.mkdirSync)(path, { recursive: true });
|
214
307
|
return path;
|
215
308
|
}
|
216
|
-
tryAndRetry(fn) {
|
217
|
-
let attempts = 0;
|
218
|
-
const baseTimeout = 5;
|
219
|
-
// Generate a random number between 2 and 4 to raise to the power of attempts
|
220
|
-
const baseExponent = Math.random() * 2 + 2;
|
221
|
-
const _try = async () => {
|
222
|
-
try {
|
223
|
-
attempts++;
|
224
|
-
return await fn();
|
225
|
-
}
|
226
|
-
catch (e) {
|
227
|
-
// Max time is 5 * 4^3 = 20480ms
|
228
|
-
if (attempts === 6) {
|
229
|
-
// After enough attempts, throw the error
|
230
|
-
throw e;
|
231
|
-
}
|
232
|
-
await new Promise((res) => setTimeout(res, baseExponent ** attempts));
|
233
|
-
return await _try();
|
234
|
-
}
|
235
|
-
};
|
236
|
-
return _try();
|
237
|
-
}
|
238
309
|
}
|
239
310
|
exports.Cache = Cache;
|
311
|
+
function tryAndRetry(fn) {
|
312
|
+
let attempts = 0;
|
313
|
+
// Generate a random number between 2 and 4 to raise to the power of attempts
|
314
|
+
const baseExponent = Math.random() * 2 + 2;
|
315
|
+
const _try = async () => {
|
316
|
+
try {
|
317
|
+
attempts++;
|
318
|
+
return await fn();
|
319
|
+
}
|
320
|
+
catch (e) {
|
321
|
+
// Max time is 5 * 4^3 = 20480ms
|
322
|
+
if (attempts === 6) {
|
323
|
+
// After enough attempts, throw the error
|
324
|
+
throw e;
|
325
|
+
}
|
326
|
+
await new Promise((res) => setTimeout(res, baseExponent ** attempts));
|
327
|
+
return await _try();
|
328
|
+
}
|
329
|
+
};
|
330
|
+
return _try();
|
331
|
+
}
|
@@ -1,9 +1,15 @@
|
|
1
1
|
import { TasksRunner } from './tasks-runner';
|
2
2
|
import { LifeCycle } from './life-cycle';
|
3
|
+
import { CachedResult } from '../native';
|
3
4
|
export interface RemoteCache {
|
4
5
|
retrieve: (hash: string, cacheDirectory: string) => Promise<boolean>;
|
5
6
|
store: (hash: string, cacheDirectory: string) => Promise<boolean>;
|
6
7
|
}
|
8
|
+
export declare abstract class RemoteCacheV2 {
|
9
|
+
static fromCacheV1(cache: RemoteCache): Promise<RemoteCacheV2>;
|
10
|
+
abstract retrieve(hash: string, cacheDirectory: string): Promise<CachedResult | null>;
|
11
|
+
abstract store(hash: string, cacheDirectory: string, terminalOutput: string, code: number): Promise<boolean>;
|
12
|
+
}
|
7
13
|
export interface DefaultTasksRunnerOptions {
|
8
14
|
parallel?: number;
|
9
15
|
cacheableOperations?: string[];
|
@@ -1,7 +1,40 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.defaultTasksRunner = void 0;
|
3
|
+
exports.defaultTasksRunner = exports.RemoteCacheV2 = void 0;
|
4
4
|
const task_orchestrator_1 = require("./task-orchestrator");
|
5
|
+
const cache_directory_1 = require("../utils/cache-directory");
|
6
|
+
const promises_1 = require("fs/promises");
|
7
|
+
const path_1 = require("path");
|
8
|
+
class RemoteCacheV2 {
|
9
|
+
static async fromCacheV1(cache) {
|
10
|
+
await (0, promises_1.mkdir)((0, path_1.join)(cache_directory_1.cacheDir, 'terminalOutputs'), { recursive: true });
|
11
|
+
return {
|
12
|
+
retrieve: async (hash, cacheDirectory) => {
|
13
|
+
const res = await cache.retrieve(hash, cacheDirectory);
|
14
|
+
if (res) {
|
15
|
+
const [terminalOutput, oldTerminalOutput, code] = await Promise.all([
|
16
|
+
(0, promises_1.readFile)((0, path_1.join)(cacheDirectory, hash, 'terminalOutputs'), 'utf-8').catch(() => null),
|
17
|
+
(0, promises_1.readFile)((0, path_1.join)(cache_directory_1.cacheDir, 'terminalOutputs', hash), 'utf-8').catch(() => null),
|
18
|
+
(0, promises_1.readFile)((0, path_1.join)(cacheDirectory, hash, 'code'), 'utf-8').then((s) => +s),
|
19
|
+
]);
|
20
|
+
return {
|
21
|
+
outputsPath: cacheDirectory,
|
22
|
+
terminalOutput: terminalOutput ?? oldTerminalOutput,
|
23
|
+
code,
|
24
|
+
};
|
25
|
+
}
|
26
|
+
else {
|
27
|
+
return null;
|
28
|
+
}
|
29
|
+
},
|
30
|
+
store: async (hash, cacheDirectory, __, code) => {
|
31
|
+
await (0, promises_1.writeFile)((0, path_1.join)(cacheDirectory, hash, 'code'), code.toString());
|
32
|
+
return cache.store(hash, cacheDirectory);
|
33
|
+
},
|
34
|
+
};
|
35
|
+
}
|
36
|
+
}
|
37
|
+
exports.RemoteCacheV2 = RemoteCacheV2;
|
5
38
|
const defaultTasksRunner = async (tasks, options, context) => {
|
6
39
|
if (options['parallel'] === 'false' ||
|
7
40
|
options['parallel'] === false) {
|
@@ -0,0 +1,9 @@
|
|
1
|
+
import { Task } from '../../config/task-graph';
|
2
|
+
import { LifeCycle, TaskResult } from '../life-cycle';
|
3
|
+
export declare class LegacyTaskHistoryLifeCycle implements LifeCycle {
|
4
|
+
private startTimings;
|
5
|
+
private taskRuns;
|
6
|
+
startTasks(tasks: Task[]): void;
|
7
|
+
endTasks(taskResults: TaskResult[]): Promise<void>;
|
8
|
+
endCommand(): Promise<void>;
|
9
|
+
}
|
@@ -0,0 +1,54 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.LegacyTaskHistoryLifeCycle = void 0;
|
4
|
+
const serialize_target_1 = require("../../utils/serialize-target");
|
5
|
+
const output_1 = require("../../utils/output");
|
6
|
+
const legacy_task_history_1 = require("../../utils/legacy-task-history");
|
7
|
+
class LegacyTaskHistoryLifeCycle {
|
8
|
+
constructor() {
|
9
|
+
this.startTimings = {};
|
10
|
+
this.taskRuns = [];
|
11
|
+
}
|
12
|
+
startTasks(tasks) {
|
13
|
+
for (let task of tasks) {
|
14
|
+
this.startTimings[task.id] = new Date().getTime();
|
15
|
+
}
|
16
|
+
}
|
17
|
+
async endTasks(taskResults) {
|
18
|
+
const taskRuns = taskResults.map((taskResult) => ({
|
19
|
+
project: taskResult.task.target.project,
|
20
|
+
target: taskResult.task.target.target,
|
21
|
+
configuration: taskResult.task.target.configuration,
|
22
|
+
hash: taskResult.task.hash,
|
23
|
+
code: taskResult.code.toString(),
|
24
|
+
status: taskResult.status,
|
25
|
+
start: (taskResult.task.startTime ?? this.startTimings[taskResult.task.id]).toString(),
|
26
|
+
end: (taskResult.task.endTime ?? new Date().getTime()).toString(),
|
27
|
+
}));
|
28
|
+
this.taskRuns.push(...taskRuns);
|
29
|
+
}
|
30
|
+
async endCommand() {
|
31
|
+
await (0, legacy_task_history_1.writeTaskRunsToHistory)(this.taskRuns);
|
32
|
+
const history = await (0, legacy_task_history_1.getHistoryForHashes)(this.taskRuns.map((t) => t.hash));
|
33
|
+
const flakyTasks = [];
|
34
|
+
// check if any hash has different exit codes => flaky
|
35
|
+
for (let hash in history) {
|
36
|
+
if (history[hash].length > 1 &&
|
37
|
+
history[hash].some((run) => run.code !== history[hash][0].code)) {
|
38
|
+
flakyTasks.push((0, serialize_target_1.serializeTarget)(history[hash][0].project, history[hash][0].target, history[hash][0].configuration));
|
39
|
+
}
|
40
|
+
}
|
41
|
+
if (flakyTasks.length > 0) {
|
42
|
+
output_1.output.warn({
|
43
|
+
title: `Nx detected ${flakyTasks.length === 1 ? 'a flaky task' : ' flaky tasks'}`,
|
44
|
+
bodyLines: [
|
45
|
+
,
|
46
|
+
...flakyTasks.map((t) => ` ${t}`),
|
47
|
+
'',
|
48
|
+
`Flaky tasks can disrupt your CI pipeline. Automatically retry them with Nx Cloud. Learn more at https://nx.dev/ci/features/flaky-tasks`,
|
49
|
+
],
|
50
|
+
});
|
51
|
+
}
|
52
|
+
}
|
53
|
+
}
|
54
|
+
exports.LegacyTaskHistoryLifeCycle = LegacyTaskHistoryLifeCycle;
|
@@ -3,6 +3,7 @@ import { LifeCycle, TaskResult } from '../life-cycle';
|
|
3
3
|
export declare class TaskHistoryLifeCycle implements LifeCycle {
|
4
4
|
private startTimings;
|
5
5
|
private taskRuns;
|
6
|
+
private taskHistory;
|
6
7
|
startTasks(tasks: Task[]): void;
|
7
8
|
endTasks(taskResults: TaskResult[]): Promise<void>;
|
8
9
|
endCommand(): Promise<void>;
|
@@ -7,7 +7,8 @@ const task_history_1 = require("../../utils/task-history");
|
|
7
7
|
class TaskHistoryLifeCycle {
|
8
8
|
constructor() {
|
9
9
|
this.startTimings = {};
|
10
|
-
this.taskRuns =
|
10
|
+
this.taskRuns = new Map();
|
11
|
+
this.taskHistory = new task_history_1.TaskHistory();
|
11
12
|
}
|
12
13
|
startTasks(tasks) {
|
13
14
|
for (let task of tasks) {
|
@@ -15,35 +16,32 @@ class TaskHistoryLifeCycle {
|
|
15
16
|
}
|
16
17
|
}
|
17
18
|
async endTasks(taskResults) {
|
18
|
-
|
19
|
-
|
20
|
-
target: taskResult.task.target.target,
|
21
|
-
configuration: taskResult.task.target.configuration,
|
19
|
+
taskResults
|
20
|
+
.map((taskResult) => ({
|
22
21
|
hash: taskResult.task.hash,
|
23
|
-
|
22
|
+
target: taskResult.task.target,
|
23
|
+
code: taskResult.code,
|
24
24
|
status: taskResult.status,
|
25
|
-
start:
|
26
|
-
end:
|
27
|
-
}))
|
28
|
-
|
25
|
+
start: taskResult.task.startTime ?? this.startTimings[taskResult.task.id],
|
26
|
+
end: taskResult.task.endTime ?? Date.now(),
|
27
|
+
}))
|
28
|
+
.forEach((taskRun) => {
|
29
|
+
this.taskRuns.set(taskRun.hash, taskRun);
|
30
|
+
});
|
29
31
|
}
|
30
32
|
async endCommand() {
|
31
|
-
|
32
|
-
|
33
|
-
const flakyTasks = [];
|
34
|
-
// check if any hash has different exit codes => flaky
|
35
|
-
for (let hash in history) {
|
36
|
-
if (history[hash].length > 1 &&
|
37
|
-
history[hash].some((run) => run.code !== history[hash][0].code)) {
|
38
|
-
flakyTasks.push((0, serialize_target_1.serializeTarget)(history[hash][0].project, history[hash][0].target, history[hash][0].configuration));
|
39
|
-
}
|
40
|
-
}
|
33
|
+
const entries = Array.from(this.taskRuns);
|
34
|
+
await this.taskHistory.recordTaskRuns(entries.map(([_, v]) => v));
|
35
|
+
const flakyTasks = await this.taskHistory.getFlakyTasks(entries.map(([hash]) => hash));
|
41
36
|
if (flakyTasks.length > 0) {
|
42
37
|
output_1.output.warn({
|
43
38
|
title: `Nx detected ${flakyTasks.length === 1 ? 'a flaky task' : ' flaky tasks'}`,
|
44
39
|
bodyLines: [
|
45
40
|
,
|
46
|
-
...flakyTasks.map((
|
41
|
+
...flakyTasks.map((hash) => {
|
42
|
+
const taskRun = this.taskRuns.get(hash);
|
43
|
+
return ` ${(0, serialize_target_1.serializeTarget)(taskRun.target.project, taskRun.target.target, taskRun.target.configuration)}`;
|
44
|
+
}),
|
47
45
|
'',
|
48
46
|
`Flaky tasks can disrupt your CI pipeline. Automatically retry them with Nx Cloud. Learn more at https://nx.dev/ci/features/flaky-tasks`,
|
49
47
|
],
|
@@ -27,11 +27,13 @@ const static_run_many_terminal_output_life_cycle_1 = require("./life-cycles/stat
|
|
27
27
|
const static_run_one_terminal_output_life_cycle_1 = require("./life-cycles/static-run-one-terminal-output-life-cycle");
|
28
28
|
const store_run_information_life_cycle_1 = require("./life-cycles/store-run-information-life-cycle");
|
29
29
|
const task_history_life_cycle_1 = require("./life-cycles/task-history-life-cycle");
|
30
|
+
const task_history_life_cycle_old_1 = require("./life-cycles/task-history-life-cycle-old");
|
30
31
|
const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-cycle");
|
31
32
|
const task_timings_life_cycle_1 = require("./life-cycles/task-timings-life-cycle");
|
32
33
|
const task_graph_utils_1 = require("./task-graph-utils");
|
33
34
|
const utils_1 = require("./utils");
|
34
35
|
const chalk = require("chalk");
|
36
|
+
const native_1 = require("../native");
|
35
37
|
async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {
|
36
38
|
const { runnerOptions } = getRunner(nxArgs, nxJson);
|
37
39
|
const isRunOne = initiatingProject != null;
|
@@ -333,7 +335,7 @@ function constructLifeCycles(lifeCycle) {
|
|
333
335
|
lifeCycles.push(new task_profiling_life_cycle_1.TaskProfilingLifeCycle(process.env.NX_PROFILE));
|
334
336
|
}
|
335
337
|
if (!(0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())) {
|
336
|
-
lifeCycles.push(new task_history_life_cycle_1.TaskHistoryLifeCycle());
|
338
|
+
lifeCycles.push(!native_1.IS_WASM ? new task_history_life_cycle_1.TaskHistoryLifeCycle() : new task_history_life_cycle_old_1.LegacyTaskHistoryLifeCycle());
|
337
339
|
}
|
338
340
|
return lifeCycles;
|
339
341
|
}
|
@@ -15,6 +15,8 @@ const task_env_1 = require("./task-env");
|
|
15
15
|
const workspace_root_1 = require("../utils/workspace-root");
|
16
16
|
const output_1 = require("../utils/output");
|
17
17
|
const params_1 = require("../utils/params");
|
18
|
+
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
|
19
|
+
const nx_json_1 = require("../config/nx-json");
|
18
20
|
class TaskOrchestrator {
|
19
21
|
// endregion internal state
|
20
22
|
constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon) {
|
@@ -25,7 +27,14 @@ class TaskOrchestrator {
|
|
25
27
|
this.options = options;
|
26
28
|
this.bail = bail;
|
27
29
|
this.daemon = daemon;
|
28
|
-
this.cache =
|
30
|
+
this.cache = process.env.NX_DB_CACHE === 'true'
|
31
|
+
? new cache_1.DbCache({
|
32
|
+
// Remove this in Nx 21
|
33
|
+
nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
|
34
|
+
? this.options.remoteCache
|
35
|
+
: null,
|
36
|
+
})
|
37
|
+
: new cache_1.Cache(this.options);
|
29
38
|
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
|
30
39
|
this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
|
31
40
|
// region internal state
|
@@ -2,6 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.workspaceDataDirectory = exports.cacheDir = void 0;
|
4
4
|
exports.cacheDirectoryForWorkspace = cacheDirectoryForWorkspace;
|
5
|
+
exports.workspaceDataDirectoryForWorkspace = workspaceDataDirectoryForWorkspace;
|
5
6
|
const fs_1 = require("fs");
|
6
7
|
const path_1 = require("path");
|
7
8
|
const fileutils_1 = require("./fileutils");
|
@@ -62,6 +63,9 @@ exports.cacheDir = cacheDirectory(workspace_root_1.workspaceRoot, readCacheDirec
|
|
62
63
|
function cacheDirectoryForWorkspace(workspaceRoot) {
|
63
64
|
return cacheDirectory(workspaceRoot, readCacheDirectoryProperty(workspaceRoot));
|
64
65
|
}
|
65
|
-
exports.workspaceDataDirectory =
|
66
|
-
|
67
|
-
|
66
|
+
exports.workspaceDataDirectory = workspaceDataDirectoryForWorkspace(workspace_root_1.workspaceRoot);
|
67
|
+
function workspaceDataDirectoryForWorkspace(workspaceRoot) {
|
68
|
+
return absolutePath(workspaceRoot, process.env.NX_WORKSPACE_DATA_DIRECTORY ??
|
69
|
+
process.env.NX_PROJECT_GRAPH_CACHE_DIRECTORY ??
|
70
|
+
defaultWorkspaceDataDirectory(workspaceRoot));
|
71
|
+
}
|
@@ -0,0 +1,11 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getDbConnection = getDbConnection;
|
4
|
+
const native_1 = require("../native");
|
5
|
+
const cache_directory_1 = require("./cache-directory");
|
6
|
+
const package_json_1 = require("../../package.json");
|
7
|
+
let dbConnection;
|
8
|
+
function getDbConnection(directory = cache_directory_1.workspaceDataDirectory) {
|
9
|
+
dbConnection ??= (0, native_1.connectToNxDb)(directory, package_json_1.version);
|
10
|
+
return dbConnection;
|
11
|
+
}
|
@@ -0,0 +1,8 @@
|
|
1
|
+
declare const taskRunKeys: readonly ["project", "target", "configuration", "hash", "code", "status", "start", "end"];
|
2
|
+
export type TaskRun = Record<(typeof taskRunKeys)[number], string>;
|
3
|
+
export declare function getHistoryForHashes(hashes: string[]): Promise<{
|
4
|
+
[hash: string]: TaskRun[];
|
5
|
+
}>;
|
6
|
+
export declare function writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
|
7
|
+
export declare const taskHistoryFile: string;
|
8
|
+
export {};
|
@@ -0,0 +1,87 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.taskHistoryFile = void 0;
|
4
|
+
exports.getHistoryForHashes = getHistoryForHashes;
|
5
|
+
exports.writeTaskRunsToHistory = writeTaskRunsToHistory;
|
6
|
+
const fs_1 = require("fs");
|
7
|
+
const path_1 = require("path");
|
8
|
+
const cache_directory_1 = require("./cache-directory");
|
9
|
+
const taskRunKeys = [
|
10
|
+
'project',
|
11
|
+
'target',
|
12
|
+
'configuration',
|
13
|
+
'hash',
|
14
|
+
'code',
|
15
|
+
'status',
|
16
|
+
'start',
|
17
|
+
'end',
|
18
|
+
];
|
19
|
+
let taskHistory = undefined;
|
20
|
+
let taskHashToIndicesMap = new Map();
|
21
|
+
async function getHistoryForHashes(hashes) {
|
22
|
+
if (taskHistory === undefined) {
|
23
|
+
loadTaskHistoryFromDisk();
|
24
|
+
}
|
25
|
+
const result = {};
|
26
|
+
for (let hash of hashes) {
|
27
|
+
const indices = taskHashToIndicesMap.get(hash);
|
28
|
+
if (!indices) {
|
29
|
+
result[hash] = [];
|
30
|
+
}
|
31
|
+
else {
|
32
|
+
result[hash] = indices.map((index) => taskHistory[index]);
|
33
|
+
}
|
34
|
+
}
|
35
|
+
return result;
|
36
|
+
}
|
37
|
+
async function writeTaskRunsToHistory(taskRuns) {
|
38
|
+
if (taskHistory === undefined) {
|
39
|
+
loadTaskHistoryFromDisk();
|
40
|
+
}
|
41
|
+
const serializedLines = [];
|
42
|
+
for (let taskRun of taskRuns) {
|
43
|
+
const serializedLine = taskRunKeys.map((key) => taskRun[key]).join(',');
|
44
|
+
serializedLines.push(serializedLine);
|
45
|
+
recordTaskRunInMemory(taskRun);
|
46
|
+
}
|
47
|
+
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
|
48
|
+
(0, fs_1.writeFileSync)(exports.taskHistoryFile, `${taskRunKeys.join(',')}\n`);
|
49
|
+
}
|
50
|
+
(0, fs_1.appendFileSync)(exports.taskHistoryFile, serializedLines.join('\n') + '\n');
|
51
|
+
}
|
52
|
+
exports.taskHistoryFile = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'task-history.csv');
|
53
|
+
function loadTaskHistoryFromDisk() {
|
54
|
+
taskHashToIndicesMap.clear();
|
55
|
+
taskHistory = [];
|
56
|
+
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
|
57
|
+
return;
|
58
|
+
}
|
59
|
+
const fileContent = (0, fs_1.readFileSync)(exports.taskHistoryFile, 'utf8');
|
60
|
+
if (!fileContent) {
|
61
|
+
return;
|
62
|
+
}
|
63
|
+
const lines = fileContent.split('\n');
|
64
|
+
// if there are no lines or just the header, return
|
65
|
+
if (lines.length <= 1) {
|
66
|
+
return;
|
67
|
+
}
|
68
|
+
const contentLines = lines.slice(1).filter((l) => l.trim() !== '');
|
69
|
+
// read the values from csv format where each header is a key and the value is the value
|
70
|
+
for (let line of contentLines) {
|
71
|
+
const values = line.trim().split(',');
|
72
|
+
const run = {};
|
73
|
+
taskRunKeys.forEach((header, index) => {
|
74
|
+
run[header] = values[index];
|
75
|
+
});
|
76
|
+
recordTaskRunInMemory(run);
|
77
|
+
}
|
78
|
+
}
|
79
|
+
function recordTaskRunInMemory(taskRun) {
|
80
|
+
const index = taskHistory.push(taskRun) - 1;
|
81
|
+
if (taskHashToIndicesMap.has(taskRun.hash)) {
|
82
|
+
taskHashToIndicesMap.get(taskRun.hash).push(index);
|
83
|
+
}
|
84
|
+
else {
|
85
|
+
taskHashToIndicesMap.set(taskRun.hash, [index]);
|
86
|
+
}
|
87
|
+
}
|
package/src/utils/logger.js
CHANGED
@@ -1,8 +1,6 @@
|
|
1
|
-
|
2
|
-
export
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
export declare const taskHistoryFile: string;
|
8
|
-
export {};
|
1
|
+
import { NxTaskHistory, TaskRun } from '../native';
|
2
|
+
export declare class TaskHistory {
|
3
|
+
taskHistory: NxTaskHistory;
|
4
|
+
getFlakyTasks(hashes: string[]): Promise<string[]>;
|
5
|
+
recordTaskRuns(taskRuns: TaskRun[]): Promise<void>;
|
6
|
+
}
|