nx 19.4.0-beta.0 → 19.4.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +14 -14
- package/release/changelog-renderer/index.d.ts +6 -0
- package/release/changelog-renderer/index.js +1 -1
- package/src/command-line/connect/connect-to-nx-cloud.d.ts +1 -1
- package/src/command-line/connect/connect-to-nx-cloud.js +16 -25
- package/src/command-line/graph/graph.d.ts +1 -0
- package/src/command-line/graph/graph.js +12 -1
- package/src/command-line/init/implementation/dot-nx/add-nx-scripts.js +5 -6
- package/src/command-line/release/changelog.js +6 -1
- package/src/command-line/release/config/config.js +3 -0
- package/src/command-line/run/command-object.js +2 -1
- package/src/config/workspace-json-project-json.d.ts +2 -0
- package/src/core/graph/main.js +1 -1
- package/src/core/graph/styles.css +1 -1
- package/src/daemon/client/client.d.ts +5 -0
- package/src/daemon/client/client.js +14 -0
- package/src/daemon/message-types/task-history.d.ts +13 -0
- package/src/daemon/message-types/task-history.js +19 -0
- package/src/daemon/server/handle-get-task-history.d.ts +4 -0
- package/src/daemon/server/handle-get-task-history.js +12 -0
- package/src/daemon/server/handle-write-task-runs-to-history.d.ts +5 -0
- package/src/daemon/server/handle-write-task-runs-to-history.js +12 -0
- package/src/daemon/server/plugins.js +12 -2
- package/src/daemon/server/server.js +9 -0
- package/src/daemon/socket-utils.d.ts +1 -0
- package/src/daemon/socket-utils.js +6 -1
- package/src/executors/run-commands/run-commands.impl.js +29 -20
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.d.ts +1 -0
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +68 -33
- package/src/nx-cloud/generators/connect-to-nx-cloud/schema.json +5 -0
- package/src/nx-cloud/utilities/url-shorten.d.ts +2 -1
- package/src/nx-cloud/utilities/url-shorten.js +47 -11
- package/src/plugins/package-json-workspaces/create-nodes.js +12 -7
- package/src/plugins/project-json/build-nodes/package-json-next-to-project-json.js +10 -2
- package/src/plugins/target-defaults/target-defaults-plugin.d.ts +5 -0
- package/src/project-graph/plugins/internal-api.js +1 -1
- package/src/project-graph/plugins/isolation/index.d.ts +1 -1
- package/src/project-graph/plugins/isolation/index.js +8 -13
- package/src/project-graph/plugins/isolation/messaging.d.ts +6 -3
- package/src/project-graph/plugins/isolation/messaging.js +9 -3
- package/src/project-graph/plugins/isolation/plugin-pool.d.ts +1 -1
- package/src/project-graph/plugins/isolation/plugin-pool.js +123 -43
- package/src/project-graph/plugins/isolation/plugin-worker.js +128 -107
- package/src/project-graph/project-graph.js +7 -1
- package/src/project-graph/utils/normalize-project-nodes.d.ts +1 -5
- package/src/project-graph/utils/normalize-project-nodes.js +2 -17
- package/src/project-graph/utils/project-configuration-utils.js +14 -3
- package/src/project-graph/utils/retrieve-workspace-files.d.ts +3 -3
- package/src/tasks-runner/default-tasks-runner.js +2 -2
- package/src/tasks-runner/life-cycle.d.ts +10 -10
- package/src/tasks-runner/life-cycle.js +10 -10
- package/src/tasks-runner/life-cycles/task-history-life-cycle.d.ts +9 -0
- package/src/tasks-runner/life-cycles/task-history-life-cycle.js +54 -0
- package/src/tasks-runner/run-command.js +6 -0
- package/src/tasks-runner/task-env.d.ts +13 -0
- package/src/tasks-runner/task-env.js +41 -26
- package/src/tasks-runner/task-orchestrator.js +4 -4
- package/src/utils/git-utils.d.ts +1 -1
- package/src/utils/git-utils.js +13 -2
- package/src/utils/nx-cloud-utils.d.ts +1 -1
- package/src/utils/nx-cloud-utils.js +1 -1
- package/src/utils/package-json.d.ts +3 -0
- package/src/utils/package-json.js +15 -3
- package/src/utils/serialize-target.d.ts +1 -0
- package/src/utils/serialize-target.js +7 -0
- package/src/utils/task-history.d.ts +8 -0
- package/src/utils/task-history.js +97 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.getEnvVariablesForTask = exports.getTaskSpecificEnv = exports.getEnvVariablesForBatchProcess = void 0;
|
3
|
+
exports.unloadDotEnvFile = exports.loadAndExpandDotEnvFile = exports.getEnvVariablesForTask = exports.getTaskSpecificEnv = exports.getEnvVariablesForBatchProcess = void 0;
|
4
4
|
const dotenv_1 = require("dotenv");
|
5
5
|
const dotenv_expand_1 = require("dotenv-expand");
|
6
6
|
const workspace_root_1 = require("../utils/workspace-root");
|
@@ -75,9 +75,42 @@ function getNxEnvVariablesForTask(task, forceColor, skipNxCache, captureStderr,
|
|
75
75
|
...env,
|
76
76
|
};
|
77
77
|
}
|
78
|
-
|
78
|
+
/**
|
79
|
+
* This function loads a .env file and expands the variables in it.
|
80
|
+
* It is going to override existing environmentVariables.
|
81
|
+
* @param filename
|
82
|
+
* @param environmentVariables
|
83
|
+
*/
|
84
|
+
function loadAndExpandDotEnvFile(filename, environmentVariables, override = false) {
|
85
|
+
const myEnv = (0, dotenv_1.config)({
|
86
|
+
path: filename,
|
87
|
+
processEnv: environmentVariables,
|
88
|
+
override,
|
89
|
+
});
|
90
|
+
return (0, dotenv_expand_1.expand)({
|
91
|
+
...myEnv,
|
92
|
+
processEnv: environmentVariables,
|
93
|
+
});
|
94
|
+
}
|
95
|
+
exports.loadAndExpandDotEnvFile = loadAndExpandDotEnvFile;
|
96
|
+
/**
|
97
|
+
* This function unloads a .env file and removes the variables in it from the environmentVariables.
|
98
|
+
* @param filename
|
99
|
+
* @param environmentVariables
|
100
|
+
*/
|
101
|
+
function unloadDotEnvFile(filename, environmentVariables, override = false) {
|
102
|
+
const parsedDotEnvFile = {};
|
103
|
+
loadAndExpandDotEnvFile(filename, parsedDotEnvFile, override);
|
104
|
+
Object.keys(parsedDotEnvFile).forEach((envVarKey) => {
|
105
|
+
if (environmentVariables[envVarKey] === parsedDotEnvFile[envVarKey]) {
|
106
|
+
delete environmentVariables[envVarKey];
|
107
|
+
}
|
108
|
+
});
|
109
|
+
}
|
110
|
+
exports.unloadDotEnvFile = unloadDotEnvFile;
|
111
|
+
function getEnvFilesForTask(task) {
|
79
112
|
// Collect dot env files that may pertain to a task
|
80
|
-
|
113
|
+
return [
|
81
114
|
// Load DotEnv Files for a configuration in the project root
|
82
115
|
...(task.target.configuration
|
83
116
|
? [
|
@@ -122,35 +155,17 @@ function loadDotEnvFilesForTask(task, environmentVariables) {
|
|
122
155
|
`.env.local`,
|
123
156
|
`.env`,
|
124
157
|
];
|
158
|
+
}
|
159
|
+
function loadDotEnvFilesForTask(task, environmentVariables) {
|
160
|
+
const dotEnvFiles = getEnvFilesForTask(task);
|
125
161
|
for (const file of dotEnvFiles) {
|
126
|
-
|
127
|
-
path: file,
|
128
|
-
processEnv: environmentVariables,
|
129
|
-
// Do not override existing env variables as we load
|
130
|
-
override: false,
|
131
|
-
});
|
132
|
-
environmentVariables = {
|
133
|
-
...(0, dotenv_expand_1.expand)({
|
134
|
-
...myEnv,
|
135
|
-
ignoreProcessEnv: true, // Do not override existing env variables as we load
|
136
|
-
}).parsed,
|
137
|
-
...environmentVariables,
|
138
|
-
};
|
162
|
+
loadAndExpandDotEnvFile(file, environmentVariables);
|
139
163
|
}
|
140
164
|
return environmentVariables;
|
141
165
|
}
|
142
166
|
function unloadDotEnvFiles(environmentVariables) {
|
143
|
-
const unloadDotEnvFile = (filename) => {
|
144
|
-
let parsedDotEnvFile = {};
|
145
|
-
(0, dotenv_1.config)({ path: filename, processEnv: parsedDotEnvFile });
|
146
|
-
Object.keys(parsedDotEnvFile).forEach((envVarKey) => {
|
147
|
-
if (environmentVariables[envVarKey] === parsedDotEnvFile[envVarKey]) {
|
148
|
-
delete environmentVariables[envVarKey];
|
149
|
-
}
|
150
|
-
});
|
151
|
-
};
|
152
167
|
for (const file of ['.env', '.local.env', '.env.local']) {
|
153
|
-
unloadDotEnvFile(file);
|
168
|
+
unloadDotEnvFile(file, environmentVariables);
|
154
169
|
}
|
155
170
|
return environmentVariables;
|
156
171
|
}
|
@@ -88,7 +88,7 @@ class TaskOrchestrator {
|
|
88
88
|
if (!task.hash) {
|
89
89
|
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, taskSpecificEnv);
|
90
90
|
}
|
91
|
-
this.options.lifeCycle.scheduleTask(task);
|
91
|
+
await this.options.lifeCycle.scheduleTask(task);
|
92
92
|
return taskSpecificEnv;
|
93
93
|
}
|
94
94
|
async processScheduledBatch(batch) {
|
@@ -96,7 +96,7 @@ class TaskOrchestrator {
|
|
96
96
|
if (!task.hash) {
|
97
97
|
await (0, hash_task_1.hashTask)(this.hasher, this.projectGraph, this.taskGraph, task, this.batchEnv);
|
98
98
|
}
|
99
|
-
this.options.lifeCycle.scheduleTask(task);
|
99
|
+
await this.options.lifeCycle.scheduleTask(task);
|
100
100
|
}));
|
101
101
|
}
|
102
102
|
processAllScheduledTasks() {
|
@@ -312,7 +312,7 @@ class TaskOrchestrator {
|
|
312
312
|
// endregion Single Task
|
313
313
|
// region Lifecycle
|
314
314
|
async preRunSteps(tasks, metadata) {
|
315
|
-
this.options.lifeCycle.startTasks(tasks, metadata);
|
315
|
+
await this.options.lifeCycle.startTasks(tasks, metadata);
|
316
316
|
}
|
317
317
|
async postRunSteps(tasks, results, doNotSkipCache, { groupId }) {
|
318
318
|
for (const task of tasks) {
|
@@ -342,7 +342,7 @@ class TaskOrchestrator {
|
|
342
342
|
perf_hooks_1.performance.mark('cache-results-end');
|
343
343
|
perf_hooks_1.performance.measure('cache-results', 'cache-results-start', 'cache-results-end');
|
344
344
|
}
|
345
|
-
this.options.lifeCycle.endTasks(results.map((result) => {
|
345
|
+
await this.options.lifeCycle.endTasks(results.map((result) => {
|
346
346
|
const code = result.status === 'success' ||
|
347
347
|
result.status === 'local-cache' ||
|
348
348
|
result.status === 'local-cache-kept-existing' ||
|
package/src/utils/git-utils.d.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1
1
|
export declare function getGithubSlugOrNull(): string | null;
|
2
2
|
export declare function extractUserAndRepoFromGitHubUrl(gitRemotes: string): string | null;
|
3
|
-
export declare function commitChanges(commitMessage: string): string | null;
|
3
|
+
export declare function commitChanges(commitMessage: string, directory?: string): string | null;
|
4
4
|
export declare function getLatestCommitSha(): string | null;
|
package/src/utils/git-utils.js
CHANGED
@@ -2,6 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.getLatestCommitSha = exports.commitChanges = exports.extractUserAndRepoFromGitHubUrl = exports.getGithubSlugOrNull = void 0;
|
4
4
|
const child_process_1 = require("child_process");
|
5
|
+
const devkit_exports_1 = require("../devkit-exports");
|
5
6
|
function getGithubSlugOrNull() {
|
6
7
|
try {
|
7
8
|
const gitRemote = (0, child_process_1.execSync)('git remote -v').toString();
|
@@ -38,17 +39,27 @@ function parseGitHubUrl(url) {
|
|
38
39
|
}
|
39
40
|
return null;
|
40
41
|
}
|
41
|
-
function commitChanges(commitMessage) {
|
42
|
+
function commitChanges(commitMessage, directory) {
|
42
43
|
try {
|
43
44
|
(0, child_process_1.execSync)('git add -A', { encoding: 'utf8', stdio: 'pipe' });
|
44
45
|
(0, child_process_1.execSync)('git commit --no-verify -F -', {
|
45
46
|
encoding: 'utf8',
|
46
47
|
stdio: 'pipe',
|
47
48
|
input: commitMessage,
|
49
|
+
cwd: directory,
|
48
50
|
});
|
49
51
|
}
|
50
52
|
catch (err) {
|
51
|
-
|
53
|
+
if (directory) {
|
54
|
+
// We don't want to throw during create-nx-workspace
|
55
|
+
// because maybe there was an error when setting up git
|
56
|
+
// initially.
|
57
|
+
devkit_exports_1.logger.verbose(`Git may not be set up correctly for this new workspace.
|
58
|
+
${err}`);
|
59
|
+
}
|
60
|
+
else {
|
61
|
+
throw new Error(`Error committing changes:\n${err.stderr}`);
|
62
|
+
}
|
52
63
|
}
|
53
64
|
return getLatestCommitSha();
|
54
65
|
}
|
@@ -1,4 +1,4 @@
|
|
1
1
|
import { NxJsonConfiguration } from '../config/nx-json';
|
2
|
-
export declare function isNxCloudUsed(nxJson: NxJsonConfiguration):
|
2
|
+
export declare function isNxCloudUsed(nxJson: NxJsonConfiguration): boolean;
|
3
3
|
export declare function getNxCloudUrl(nxJson: NxJsonConfiguration): string;
|
4
4
|
export declare function getNxCloudToken(nxJson: NxJsonConfiguration): string;
|
@@ -2,7 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.getNxCloudToken = exports.getNxCloudUrl = exports.isNxCloudUsed = void 0;
|
4
4
|
function isNxCloudUsed(nxJson) {
|
5
|
-
return (process.env.NX_CLOUD_ACCESS_TOKEN ||
|
5
|
+
return (!!process.env.NX_CLOUD_ACCESS_TOKEN ||
|
6
6
|
!!nxJson.nxCloudAccessToken ||
|
7
7
|
!!Object.values(nxJson.tasksRunnerOptions ?? {}).find((r) => r.runner == '@nrwl/nx-cloud' || r.runner == 'nx-cloud'));
|
8
8
|
}
|
@@ -63,6 +63,8 @@ export interface PackageJson {
|
|
63
63
|
'nx-migrations'?: string | NxMigrationsConfiguration;
|
64
64
|
'ng-update'?: string | NxMigrationsConfiguration;
|
65
65
|
packageManager?: string;
|
66
|
+
description?: string;
|
67
|
+
keywords?: string[];
|
66
68
|
}
|
67
69
|
export declare function normalizePackageGroup(packageGroup: PackageGroup): ArrayPackageGroup;
|
68
70
|
export declare function readNxMigrateConfig(json: Partial<PackageJson>): NxMigrationsConfiguration & {
|
@@ -70,6 +72,7 @@ export declare function readNxMigrateConfig(json: Partial<PackageJson>): NxMigra
|
|
70
72
|
};
|
71
73
|
export declare function buildTargetFromScript(script: string, scripts: Record<string, string>, packageManagerCommand: PackageManagerCommands): TargetConfiguration;
|
72
74
|
export declare function getMetadataFromPackageJson(packageJson: PackageJson): ProjectMetadata;
|
75
|
+
export declare function getTagsFromPackageJson(packageJson: PackageJson): string[];
|
73
76
|
export declare function readTargetsFromPackageJson(packageJson: PackageJson): Record<string, TargetConfiguration<any>>;
|
74
77
|
/**
|
75
78
|
* Uses `require.resolve` to read the package.json for a module.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.readModulePackageJson = exports.readModulePackageJsonWithoutFallbacks = exports.readTargetsFromPackageJson = exports.getMetadataFromPackageJson = exports.buildTargetFromScript = exports.readNxMigrateConfig = exports.normalizePackageGroup = void 0;
|
3
|
+
exports.readModulePackageJson = exports.readModulePackageJsonWithoutFallbacks = exports.readTargetsFromPackageJson = exports.getTagsFromPackageJson = exports.getMetadataFromPackageJson = exports.buildTargetFromScript = exports.readNxMigrateConfig = exports.normalizePackageGroup = void 0;
|
4
4
|
const fs_1 = require("fs");
|
5
5
|
const path_1 = require("path");
|
6
6
|
const project_configuration_utils_1 = require("../project-graph/utils/project-configuration-utils");
|
@@ -54,15 +54,27 @@ function buildTargetFromScript(script, scripts = {}, packageManagerCommand) {
|
|
54
54
|
exports.buildTargetFromScript = buildTargetFromScript;
|
55
55
|
let packageManagerCommand;
|
56
56
|
function getMetadataFromPackageJson(packageJson) {
|
57
|
-
const { scripts, nx } = packageJson ?? {};
|
57
|
+
const { scripts, nx, description } = packageJson ?? {};
|
58
58
|
const includedScripts = nx?.includedScripts || Object.keys(scripts ?? {});
|
59
59
|
return {
|
60
60
|
targetGroups: {
|
61
|
-
'NPM Scripts': includedScripts,
|
61
|
+
...(includedScripts.length ? { 'NPM Scripts': includedScripts } : {}),
|
62
62
|
},
|
63
|
+
description,
|
63
64
|
};
|
64
65
|
}
|
65
66
|
exports.getMetadataFromPackageJson = getMetadataFromPackageJson;
|
67
|
+
function getTagsFromPackageJson(packageJson) {
|
68
|
+
const tags = packageJson.private ? ['npm:private'] : ['npm:public'];
|
69
|
+
if (packageJson.keywords?.length) {
|
70
|
+
tags.push(...packageJson.keywords.map((k) => `npm:${k}`));
|
71
|
+
}
|
72
|
+
if (packageJson?.nx?.tags?.length) {
|
73
|
+
tags.push(...packageJson?.nx.tags);
|
74
|
+
}
|
75
|
+
return tags;
|
76
|
+
}
|
77
|
+
exports.getTagsFromPackageJson = getTagsFromPackageJson;
|
66
78
|
function readTargetsFromPackageJson(packageJson) {
|
67
79
|
const { scripts, nx, private: isPrivate } = packageJson ?? {};
|
68
80
|
const res = {};
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare function serializeTarget(project: any, target: any, configuration: any): string;
|
@@ -0,0 +1,7 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.serializeTarget = void 0;
|
4
|
+
function serializeTarget(project, target, configuration) {
|
5
|
+
return [project, target, configuration].filter((part) => !!part).join(':');
|
6
|
+
}
|
7
|
+
exports.serializeTarget = serializeTarget;
|
@@ -0,0 +1,8 @@
|
|
1
|
+
declare const taskRunKeys: readonly ["project", "target", "configuration", "hash", "code", "status", "start", "end"];
|
2
|
+
export type TaskRun = Record<(typeof taskRunKeys)[number], string>;
|
3
|
+
export declare function getHistoryForHashes(hashes: string[]): Promise<{
|
4
|
+
[hash: string]: TaskRun[];
|
5
|
+
}>;
|
6
|
+
export declare function writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
|
7
|
+
export declare const taskHistoryFile: string;
|
8
|
+
export {};
|
@@ -0,0 +1,97 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.taskHistoryFile = exports.writeTaskRunsToHistory = exports.getHistoryForHashes = void 0;
|
4
|
+
const fs_1 = require("fs");
|
5
|
+
const path_1 = require("path");
|
6
|
+
const client_1 = require("../daemon/client/client");
|
7
|
+
const is_on_daemon_1 = require("../daemon/is-on-daemon");
|
8
|
+
const cache_directory_1 = require("./cache-directory");
|
9
|
+
const taskRunKeys = [
|
10
|
+
'project',
|
11
|
+
'target',
|
12
|
+
'configuration',
|
13
|
+
'hash',
|
14
|
+
'code',
|
15
|
+
'status',
|
16
|
+
'start',
|
17
|
+
'end',
|
18
|
+
];
|
19
|
+
let taskHistory = undefined;
|
20
|
+
let taskHashToIndicesMap = new Map();
|
21
|
+
async function getHistoryForHashes(hashes) {
|
22
|
+
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
|
23
|
+
if (taskHistory === undefined) {
|
24
|
+
loadTaskHistoryFromDisk();
|
25
|
+
}
|
26
|
+
const result = {};
|
27
|
+
for (let hash of hashes) {
|
28
|
+
const indices = taskHashToIndicesMap.get(hash);
|
29
|
+
if (!indices) {
|
30
|
+
result[hash] = [];
|
31
|
+
}
|
32
|
+
else {
|
33
|
+
result[hash] = indices.map((index) => taskHistory[index]);
|
34
|
+
}
|
35
|
+
}
|
36
|
+
return result;
|
37
|
+
}
|
38
|
+
return await client_1.daemonClient.getTaskHistoryForHashes(hashes);
|
39
|
+
}
|
40
|
+
exports.getHistoryForHashes = getHistoryForHashes;
|
41
|
+
async function writeTaskRunsToHistory(taskRuns) {
|
42
|
+
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
|
43
|
+
if (taskHistory === undefined) {
|
44
|
+
loadTaskHistoryFromDisk();
|
45
|
+
}
|
46
|
+
const serializedLines = [];
|
47
|
+
for (let taskRun of taskRuns) {
|
48
|
+
const serializedLine = taskRunKeys.map((key) => taskRun[key]).join(',');
|
49
|
+
serializedLines.push(serializedLine);
|
50
|
+
recordTaskRunInMemory(taskRun);
|
51
|
+
}
|
52
|
+
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
|
53
|
+
(0, fs_1.writeFileSync)(exports.taskHistoryFile, `${taskRunKeys.join(',')}\n`);
|
54
|
+
}
|
55
|
+
(0, fs_1.appendFileSync)(exports.taskHistoryFile, serializedLines.join('\n') + '\n');
|
56
|
+
}
|
57
|
+
else {
|
58
|
+
await client_1.daemonClient.writeTaskRunsToHistory(taskRuns);
|
59
|
+
}
|
60
|
+
}
|
61
|
+
exports.writeTaskRunsToHistory = writeTaskRunsToHistory;
|
62
|
+
exports.taskHistoryFile = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'task-history.csv');
|
63
|
+
function loadTaskHistoryFromDisk() {
|
64
|
+
taskHashToIndicesMap.clear();
|
65
|
+
taskHistory = [];
|
66
|
+
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
|
67
|
+
return;
|
68
|
+
}
|
69
|
+
const fileContent = (0, fs_1.readFileSync)(exports.taskHistoryFile, 'utf8');
|
70
|
+
if (!fileContent) {
|
71
|
+
return;
|
72
|
+
}
|
73
|
+
const lines = fileContent.split('\n');
|
74
|
+
// if there are no lines or just the header, return
|
75
|
+
if (lines.length <= 1) {
|
76
|
+
return;
|
77
|
+
}
|
78
|
+
const contentLines = lines.slice(1).filter((l) => l.trim() !== '');
|
79
|
+
// read the values from csv format where each header is a key and the value is the value
|
80
|
+
for (let line of contentLines) {
|
81
|
+
const values = line.trim().split(',');
|
82
|
+
const run = {};
|
83
|
+
taskRunKeys.forEach((header, index) => {
|
84
|
+
run[header] = values[index];
|
85
|
+
});
|
86
|
+
recordTaskRunInMemory(run);
|
87
|
+
}
|
88
|
+
}
|
89
|
+
function recordTaskRunInMemory(taskRun) {
|
90
|
+
const index = taskHistory.push(taskRun) - 1;
|
91
|
+
if (taskHashToIndicesMap.has(taskRun.hash)) {
|
92
|
+
taskHashToIndicesMap.get(taskRun.hash).push(index);
|
93
|
+
}
|
94
|
+
else {
|
95
|
+
taskHashToIndicesMap.set(taskRun.hash, [index]);
|
96
|
+
}
|
97
|
+
}
|