nx 20.4.0 → 20.5.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +11 -11
- package/src/daemon/server/project-graph-incremental-recomputation.js +1 -1
- package/src/devkit-exports.d.ts +1 -1
- package/src/devkit-exports.js +2 -1
- package/src/native/index.d.ts +9 -0
- package/src/native/native-bindings.js +1 -0
- package/src/native/nx.wasi-browser.js +11 -8
- package/src/native/nx.wasi.cjs +11 -8
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/native/tests/__fixtures__/file-lock.fixture.js +20 -0
- package/src/project-graph/error-types.d.ts +6 -2
- package/src/project-graph/error-types.js +7 -1
- package/src/project-graph/nx-deps-cache.d.ts +6 -2
- package/src/project-graph/nx-deps-cache.js +59 -8
- package/src/project-graph/project-graph.d.ts +3 -1
- package/src/project-graph/project-graph.js +71 -13
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "20.
|
3
|
+
"version": "20.5.0-beta.0",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -82,16 +82,16 @@
|
|
82
82
|
}
|
83
83
|
},
|
84
84
|
"optionalDependencies": {
|
85
|
-
"@nx/nx-darwin-arm64": "20.
|
86
|
-
"@nx/nx-darwin-x64": "20.
|
87
|
-
"@nx/nx-freebsd-x64": "20.
|
88
|
-
"@nx/nx-linux-arm-gnueabihf": "20.
|
89
|
-
"@nx/nx-linux-arm64-gnu": "20.
|
90
|
-
"@nx/nx-linux-arm64-musl": "20.
|
91
|
-
"@nx/nx-linux-x64-gnu": "20.
|
92
|
-
"@nx/nx-linux-x64-musl": "20.
|
93
|
-
"@nx/nx-win32-arm64-msvc": "20.
|
94
|
-
"@nx/nx-win32-x64-msvc": "20.
|
85
|
+
"@nx/nx-darwin-arm64": "20.5.0-beta.0",
|
86
|
+
"@nx/nx-darwin-x64": "20.5.0-beta.0",
|
87
|
+
"@nx/nx-freebsd-x64": "20.5.0-beta.0",
|
88
|
+
"@nx/nx-linux-arm-gnueabihf": "20.5.0-beta.0",
|
89
|
+
"@nx/nx-linux-arm64-gnu": "20.5.0-beta.0",
|
90
|
+
"@nx/nx-linux-arm64-musl": "20.5.0-beta.0",
|
91
|
+
"@nx/nx-linux-x64-gnu": "20.5.0-beta.0",
|
92
|
+
"@nx/nx-linux-x64-musl": "20.5.0-beta.0",
|
93
|
+
"@nx/nx-win32-arm64-msvc": "20.5.0-beta.0",
|
94
|
+
"@nx/nx-win32-x64-msvc": "20.5.0-beta.0"
|
95
95
|
},
|
96
96
|
"nx-migrations": {
|
97
97
|
"migrations": "./migrations.json",
|
@@ -189,6 +189,7 @@ async function processFilesAndCreateAndSerializeProjectGraph(plugins) {
|
|
189
189
|
};
|
190
190
|
}
|
191
191
|
}
|
192
|
+
(0, nx_deps_cache_1.writeCache)(g.projectFileMapCache, g.projectGraph, projectConfigurationsResult.sourceMaps, errors);
|
192
193
|
if (errors.length > 0) {
|
193
194
|
return {
|
194
195
|
error: new error_types_1.DaemonProjectGraphError(errors, g.projectGraph, projectConfigurationsResult.sourceMaps),
|
@@ -202,7 +203,6 @@ async function processFilesAndCreateAndSerializeProjectGraph(plugins) {
|
|
202
203
|
};
|
203
204
|
}
|
204
205
|
else {
|
205
|
-
(0, nx_deps_cache_1.writeCache)(g.projectFileMapCache, g.projectGraph);
|
206
206
|
return g;
|
207
207
|
}
|
208
208
|
}
|
package/src/devkit-exports.d.ts
CHANGED
@@ -16,7 +16,7 @@ export type { WorkspaceJsonConfiguration, ProjectsConfigurations, TargetDependen
|
|
16
16
|
export type { Generator, GeneratorCallback, PromiseExecutor, AsyncIteratorExecutor, Executor, ExecutorContext, TaskGraphExecutor, GeneratorsJson, ExecutorsJson, MigrationsJson, CustomHasher, HasherContext, } from './config/misc-interfaces';
|
17
17
|
export { workspaceLayout } from './config/configuration';
|
18
18
|
export type { NxPlugin, NxPluginV2, CreateNodes, CreateNodesFunction, CreateNodesResult, CreateNodesContext, CreateNodesContextV2, CreateNodesFunctionV2, CreateNodesResultV2, CreateNodesV2, CreateDependencies, CreateDependenciesContext, CreateMetadata, CreateMetadataContext, ProjectsMetadata, PreTasksExecution, PreTasksExecutionContext, PostTasksExecution, PostTasksExecutionContext, } from './project-graph/plugins';
|
19
|
-
export { AggregateCreateNodesError } from './project-graph/error-types';
|
19
|
+
export { AggregateCreateNodesError, StaleProjectGraphCacheError, } from './project-graph/error-types';
|
20
20
|
export { createNodesFromFiles } from './project-graph/plugins';
|
21
21
|
/**
|
22
22
|
* @category Tasks
|
package/src/devkit-exports.js
CHANGED
@@ -4,11 +4,12 @@
|
|
4
4
|
* Try hard to not add to this API to reduce the surface area we need to maintain.
|
5
5
|
*/
|
6
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
7
|
-
exports.isDaemonEnabled = exports.createProjectFileMapUsingProjectGraph = exports.cacheDir = exports.hashArray = exports.defaultTasksRunner = exports.getOutputsForTargetAndConfiguration = exports.readProjectsConfigurationFromProjectGraph = exports.readCachedProjectGraph = exports.createProjectGraphAsync = exports.reverse = exports.workspaceRoot = exports.normalizePath = exports.joinPathFragments = exports.stripIndents = exports.writeJsonFile = exports.readJsonFile = exports.stripJsonComments = exports.serializeJson = exports.parseJson = exports.updateJson = exports.writeJson = exports.readJson = exports.validateDependency = exports.DependencyType = exports.updateNxJson = exports.readNxJson = exports.globAsync = exports.glob = exports.getProjects = exports.updateProjectConfiguration = exports.removeProjectConfiguration = exports.readProjectConfiguration = exports.addProjectConfiguration = exports.runExecutor = exports.isWorkspacesEnabled = exports.getPackageManagerVersion = exports.detectPackageManager = exports.getPackageManagerCommand = exports.output = exports.logger = exports.createNodesFromFiles = exports.AggregateCreateNodesError = exports.workspaceLayout = void 0;
|
7
|
+
exports.isDaemonEnabled = exports.createProjectFileMapUsingProjectGraph = exports.cacheDir = exports.hashArray = exports.defaultTasksRunner = exports.getOutputsForTargetAndConfiguration = exports.readProjectsConfigurationFromProjectGraph = exports.readCachedProjectGraph = exports.createProjectGraphAsync = exports.reverse = exports.workspaceRoot = exports.normalizePath = exports.joinPathFragments = exports.stripIndents = exports.writeJsonFile = exports.readJsonFile = exports.stripJsonComments = exports.serializeJson = exports.parseJson = exports.updateJson = exports.writeJson = exports.readJson = exports.validateDependency = exports.DependencyType = exports.updateNxJson = exports.readNxJson = exports.globAsync = exports.glob = exports.getProjects = exports.updateProjectConfiguration = exports.removeProjectConfiguration = exports.readProjectConfiguration = exports.addProjectConfiguration = exports.runExecutor = exports.isWorkspacesEnabled = exports.getPackageManagerVersion = exports.detectPackageManager = exports.getPackageManagerCommand = exports.output = exports.logger = exports.createNodesFromFiles = exports.StaleProjectGraphCacheError = exports.AggregateCreateNodesError = exports.workspaceLayout = void 0;
|
8
8
|
var configuration_1 = require("./config/configuration");
|
9
9
|
Object.defineProperty(exports, "workspaceLayout", { enumerable: true, get: function () { return configuration_1.workspaceLayout; } });
|
10
10
|
var error_types_1 = require("./project-graph/error-types");
|
11
11
|
Object.defineProperty(exports, "AggregateCreateNodesError", { enumerable: true, get: function () { return error_types_1.AggregateCreateNodesError; } });
|
12
|
+
Object.defineProperty(exports, "StaleProjectGraphCacheError", { enumerable: true, get: function () { return error_types_1.StaleProjectGraphCacheError; } });
|
12
13
|
var plugins_1 = require("./project-graph/plugins");
|
13
14
|
Object.defineProperty(exports, "createNodesFromFiles", { enumerable: true, get: function () { return plugins_1.createNodesFromFiles; } });
|
14
15
|
/**
|
package/src/native/index.d.ts
CHANGED
@@ -13,6 +13,15 @@ export declare class ChildProcess {
|
|
13
13
|
onOutput(callback: (message: string) => void): void
|
14
14
|
}
|
15
15
|
|
16
|
+
export declare class FileLock {
|
17
|
+
locked: boolean
|
18
|
+
constructor(lockFilePath: string)
|
19
|
+
unlock(): void
|
20
|
+
check(): boolean
|
21
|
+
wait(): Promise<void>
|
22
|
+
lock(): void
|
23
|
+
}
|
24
|
+
|
16
25
|
export declare class HashPlanner {
|
17
26
|
constructor(nxJson: NxJson, projectGraph: ExternalObject<ProjectGraph>)
|
18
27
|
getPlans(taskIds: Array<string>, taskGraph: TaskGraph): Record<string, string[]>
|
@@ -362,6 +362,7 @@ if (!nativeBinding) {
|
|
362
362
|
}
|
363
363
|
|
364
364
|
module.exports.ChildProcess = nativeBinding.ChildProcess
|
365
|
+
module.exports.FileLock = nativeBinding.FileLock
|
365
366
|
module.exports.HashPlanner = nativeBinding.HashPlanner
|
366
367
|
module.exports.ImportResult = nativeBinding.ImportResult
|
367
368
|
module.exports.NxCache = nativeBinding.NxCache
|
@@ -85,15 +85,18 @@ function __napi_rs_initialize_modules(__napiInstance) {
|
|
85
85
|
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_36']?.()
|
86
86
|
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_37']?.()
|
87
87
|
__napiInstance.exports['__napi_register__NxJson_struct_38']?.()
|
88
|
-
__napiInstance.exports['
|
89
|
-
__napiInstance.exports['
|
90
|
-
__napiInstance.exports['
|
91
|
-
__napiInstance.exports['
|
92
|
-
__napiInstance.exports['
|
93
|
-
__napiInstance.exports['
|
94
|
-
__napiInstance.exports['
|
95
|
-
__napiInstance.exports['
|
88
|
+
__napiInstance.exports['__napi_register__FileLock_struct_39']?.()
|
89
|
+
__napiInstance.exports['__napi_register__FileLock_impl_41']?.()
|
90
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_struct_42']?.()
|
91
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_impl_51']?.()
|
92
|
+
__napiInstance.exports['__napi_register__WorkspaceErrors_52']?.()
|
93
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_53']?.()
|
94
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_54']?.()
|
95
|
+
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_55']?.()
|
96
|
+
__napiInstance.exports['__napi_register__FileMap_struct_56']?.()
|
97
|
+
__napiInstance.exports['__napi_register____test_only_transfer_file_map_57']?.()
|
96
98
|
}
|
99
|
+
export const FileLock = __napiModule.exports.FileLock
|
97
100
|
export const HashPlanner = __napiModule.exports.HashPlanner
|
98
101
|
export const ImportResult = __napiModule.exports.ImportResult
|
99
102
|
export const TaskHasher = __napiModule.exports.TaskHasher
|
package/src/native/nx.wasi.cjs
CHANGED
@@ -116,15 +116,18 @@ function __napi_rs_initialize_modules(__napiInstance) {
|
|
116
116
|
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_36']?.()
|
117
117
|
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_37']?.()
|
118
118
|
__napiInstance.exports['__napi_register__NxJson_struct_38']?.()
|
119
|
-
__napiInstance.exports['
|
120
|
-
__napiInstance.exports['
|
121
|
-
__napiInstance.exports['
|
122
|
-
__napiInstance.exports['
|
123
|
-
__napiInstance.exports['
|
124
|
-
__napiInstance.exports['
|
125
|
-
__napiInstance.exports['
|
126
|
-
__napiInstance.exports['
|
119
|
+
__napiInstance.exports['__napi_register__FileLock_struct_39']?.()
|
120
|
+
__napiInstance.exports['__napi_register__FileLock_impl_41']?.()
|
121
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_struct_42']?.()
|
122
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_impl_51']?.()
|
123
|
+
__napiInstance.exports['__napi_register__WorkspaceErrors_52']?.()
|
124
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_53']?.()
|
125
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_54']?.()
|
126
|
+
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_55']?.()
|
127
|
+
__napiInstance.exports['__napi_register__FileMap_struct_56']?.()
|
128
|
+
__napiInstance.exports['__napi_register____test_only_transfer_file_map_57']?.()
|
127
129
|
}
|
130
|
+
module.exports.FileLock = __napiModule.exports.FileLock
|
128
131
|
module.exports.HashPlanner = __napiModule.exports.HashPlanner
|
129
132
|
module.exports.ImportResult = __napiModule.exports.ImportResult
|
130
133
|
module.exports.TaskHasher = __napiModule.exports.TaskHasher
|
Binary file
|
@@ -0,0 +1,20 @@
|
|
1
|
+
const { FileLock } = require('../../native-bindings.js');
|
2
|
+
const ora = require('ora');
|
3
|
+
const tmp = require('os').tmpdir();
|
4
|
+
|
5
|
+
(async () => {
|
6
|
+
const lock = new FileLock(
|
7
|
+
require('path').join(tmp, 'nx-unit-tests', 'file-lock-fixture')
|
8
|
+
);
|
9
|
+
if (lock.locked) {
|
10
|
+
const s = ora('Waiting for lock').start();
|
11
|
+
await lock.wait();
|
12
|
+
s.stop();
|
13
|
+
console.log('waited for lock');
|
14
|
+
} else {
|
15
|
+
await lock.lock();
|
16
|
+
await new Promise((resolve) => setTimeout(resolve, 5000));
|
17
|
+
console.log('ran with lock');
|
18
|
+
await lock.unlock();
|
19
|
+
}
|
20
|
+
})();
|
@@ -2,10 +2,14 @@ import { ConfigurationResult, ConfigurationSourceMaps } from './utils/project-co
|
|
2
2
|
import { ProjectConfiguration } from '../config/workspace-json-project-json';
|
3
3
|
import { ProjectGraph } from '../config/project-graph';
|
4
4
|
import { CreateNodesFunctionV2 } from './plugins/public-api';
|
5
|
+
export type ProjectGraphErrorTypes = AggregateCreateNodesError | MergeNodesError | CreateMetadataError | ProjectsWithNoNameError | MultipleProjectsWithSameNameError | ProcessDependenciesError | WorkspaceValidityError;
|
6
|
+
export declare class StaleProjectGraphCacheError extends Error {
|
7
|
+
constructor();
|
8
|
+
}
|
5
9
|
export declare class ProjectGraphError extends Error {
|
6
10
|
#private;
|
7
11
|
private readonly errors;
|
8
|
-
constructor(errors: Array<
|
12
|
+
constructor(errors: Array<ProjectGraphErrorTypes>, partialProjectGraph: ProjectGraph, partialSourceMaps: ConfigurationSourceMaps | null);
|
9
13
|
/**
|
10
14
|
* The daemon cannot throw errors which contain methods as they are not serializable.
|
11
15
|
*
|
@@ -19,7 +23,7 @@ export declare class ProjectGraphError extends Error {
|
|
19
23
|
*/
|
20
24
|
getPartialProjectGraph(): ProjectGraph;
|
21
25
|
getPartialSourcemaps(): ConfigurationSourceMaps;
|
22
|
-
getErrors():
|
26
|
+
getErrors(): ProjectGraphErrorTypes[];
|
23
27
|
}
|
24
28
|
export declare class MultipleProjectsWithSameNameError extends Error {
|
25
29
|
conflicts: Map<string, string[]>;
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
var _ProjectGraphError_partialProjectGraph, _ProjectGraphError_partialSourceMaps;
|
3
3
|
Object.defineProperty(exports, "__esModule", { value: true });
|
4
|
-
exports.LoadPluginError = exports.DaemonProjectGraphError = exports.AggregateProjectGraphError = exports.WorkspaceValidityError = exports.ProcessDependenciesError = exports.CreateMetadataError = exports.MergeNodesError = exports.AggregateCreateNodesError = exports.ProjectConfigurationsError = exports.ProjectWithNoNameError = exports.ProjectsWithNoNameError = exports.ProjectWithExistingNameError = exports.MultipleProjectsWithSameNameError = exports.ProjectGraphError = void 0;
|
4
|
+
exports.LoadPluginError = exports.DaemonProjectGraphError = exports.AggregateProjectGraphError = exports.WorkspaceValidityError = exports.ProcessDependenciesError = exports.CreateMetadataError = exports.MergeNodesError = exports.AggregateCreateNodesError = exports.ProjectConfigurationsError = exports.ProjectWithNoNameError = exports.ProjectsWithNoNameError = exports.ProjectWithExistingNameError = exports.MultipleProjectsWithSameNameError = exports.ProjectGraphError = exports.StaleProjectGraphCacheError = void 0;
|
5
5
|
exports.isProjectWithExistingNameError = isProjectWithExistingNameError;
|
6
6
|
exports.isMultipleProjectsWithSameNameError = isMultipleProjectsWithSameNameError;
|
7
7
|
exports.isProjectsWithNoNameError = isProjectsWithNoNameError;
|
@@ -14,6 +14,12 @@ exports.isCreateMetadataError = isCreateMetadataError;
|
|
14
14
|
exports.isAggregateCreateNodesError = isAggregateCreateNodesError;
|
15
15
|
exports.isMergeNodesError = isMergeNodesError;
|
16
16
|
const tslib_1 = require("tslib");
|
17
|
+
class StaleProjectGraphCacheError extends Error {
|
18
|
+
constructor() {
|
19
|
+
super('The project graph cache was stale. Ensure that it has been recently created before using `readCachedProjectGraph`.');
|
20
|
+
}
|
21
|
+
}
|
22
|
+
exports.StaleProjectGraphCacheError = StaleProjectGraphCacheError;
|
17
23
|
class ProjectGraphError extends Error {
|
18
24
|
constructor(errors, partialProjectGraph, partialSourceMaps) {
|
19
25
|
const messageFragments = ['Failed to process project graph.'];
|
@@ -1,6 +1,8 @@
|
|
1
1
|
import { NxJsonConfiguration } from '../config/nx-json';
|
2
2
|
import { FileData, FileMap, ProjectGraph } from '../config/project-graph';
|
3
3
|
import { ProjectConfiguration } from '../config/workspace-json-project-json';
|
4
|
+
import { ConfigurationSourceMaps } from './utils/project-configuration-utils';
|
5
|
+
import { ProjectGraphErrorTypes } from './error-types';
|
4
6
|
export interface FileMapCache {
|
5
7
|
version: string;
|
6
8
|
nxVersion: string;
|
@@ -11,9 +13,11 @@ export interface FileMapCache {
|
|
11
13
|
}
|
12
14
|
export declare const nxProjectGraph: string;
|
13
15
|
export declare const nxFileMap: string;
|
16
|
+
export declare const nxSourceMaps: string;
|
14
17
|
export declare function ensureCacheDirectory(): void;
|
15
18
|
export declare function readFileMapCache(): null | FileMapCache;
|
16
|
-
export declare function readProjectGraphCache(): null | ProjectGraph;
|
19
|
+
export declare function readProjectGraphCache(minimumComputedAt?: number): null | ProjectGraph;
|
20
|
+
export declare function readSourceMapsCache(): null | ConfigurationSourceMaps;
|
17
21
|
export declare function createProjectFileMapCache(nxJson: NxJsonConfiguration<'*' | string[]>, packageJsonDeps: Record<string, string>, fileMap: FileMap, tsConfig: {
|
18
22
|
compilerOptions?: {
|
19
23
|
paths?: {
|
@@ -21,7 +25,7 @@ export declare function createProjectFileMapCache(nxJson: NxJsonConfiguration<'*
|
|
21
25
|
};
|
22
26
|
};
|
23
27
|
}): FileMapCache;
|
24
|
-
export declare function writeCache(cache: FileMapCache, projectGraph: ProjectGraph): void;
|
28
|
+
export declare function writeCache(cache: FileMapCache, projectGraph: ProjectGraph, sourceMaps: ConfigurationSourceMaps, errors: ProjectGraphErrorTypes[]): void;
|
25
29
|
export declare function shouldRecomputeWholeGraph(cache: FileMapCache, packageJsonDeps: Record<string, string>, projects: Record<string, ProjectConfiguration>, nxJson: NxJsonConfiguration, tsConfig: {
|
26
30
|
compilerOptions: {
|
27
31
|
paths: {
|
@@ -1,9 +1,10 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.nxFileMap = exports.nxProjectGraph = void 0;
|
3
|
+
exports.nxSourceMaps = exports.nxFileMap = exports.nxProjectGraph = void 0;
|
4
4
|
exports.ensureCacheDirectory = ensureCacheDirectory;
|
5
5
|
exports.readFileMapCache = readFileMapCache;
|
6
6
|
exports.readProjectGraphCache = readProjectGraphCache;
|
7
|
+
exports.readSourceMapsCache = readSourceMapsCache;
|
7
8
|
exports.createProjectFileMapCache = createProjectFileMapCache;
|
8
9
|
exports.writeCache = writeCache;
|
9
10
|
exports.shouldRecomputeWholeGraph = shouldRecomputeWholeGraph;
|
@@ -14,8 +15,10 @@ const perf_hooks_1 = require("perf_hooks");
|
|
14
15
|
const cache_directory_1 = require("../utils/cache-directory");
|
15
16
|
const fileutils_1 = require("../utils/fileutils");
|
16
17
|
const versions_1 = require("../utils/versions");
|
18
|
+
const error_types_1 = require("./error-types");
|
17
19
|
exports.nxProjectGraph = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'project-graph.json');
|
18
20
|
exports.nxFileMap = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'file-map.json');
|
21
|
+
exports.nxSourceMaps = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'source-maps.json');
|
19
22
|
function ensureCacheDirectory() {
|
20
23
|
try {
|
21
24
|
if (!(0, node_fs_1.existsSync)(cache_directory_1.workspaceDataDirectory)) {
|
@@ -55,21 +58,62 @@ function readFileMapCache() {
|
|
55
58
|
perf_hooks_1.performance.measure('read cache', 'read cache:start', 'read cache:end');
|
56
59
|
return data ?? null;
|
57
60
|
}
|
58
|
-
function readProjectGraphCache() {
|
61
|
+
function readProjectGraphCache(minimumComputedAt) {
|
59
62
|
perf_hooks_1.performance.mark('read project-graph:start');
|
60
63
|
ensureCacheDirectory();
|
61
|
-
let data = null;
|
62
64
|
try {
|
63
65
|
if ((0, fileutils_1.fileExists)(exports.nxProjectGraph)) {
|
64
|
-
|
66
|
+
const { computedAt, errors, ...projectGraphCache } = (0, fileutils_1.readJsonFile)(exports.nxProjectGraph);
|
67
|
+
if (minimumComputedAt &&
|
68
|
+
(!computedAt || computedAt < minimumComputedAt)) {
|
69
|
+
throw new error_types_1.StaleProjectGraphCacheError();
|
70
|
+
}
|
71
|
+
if (errors && errors.length > 0) {
|
72
|
+
if (!minimumComputedAt) {
|
73
|
+
// If you didn't pass minimum computed at, we do not know if
|
74
|
+
// the errors on the cached graph would be relevant to what you
|
75
|
+
// are running. Prior to adding error handling here, the graph
|
76
|
+
// would not have been written to the cache. As such, this matches
|
77
|
+
// existing behavior of the public API.
|
78
|
+
return null;
|
79
|
+
}
|
80
|
+
throw new error_types_1.ProjectGraphError(errors, projectGraphCache, readSourceMapsCache());
|
81
|
+
}
|
82
|
+
return projectGraphCache;
|
83
|
+
}
|
84
|
+
else {
|
85
|
+
return null;
|
65
86
|
}
|
66
87
|
}
|
67
88
|
catch (error) {
|
89
|
+
if (error instanceof error_types_1.StaleProjectGraphCacheError ||
|
90
|
+
error instanceof error_types_1.ProjectGraphError) {
|
91
|
+
throw error;
|
92
|
+
}
|
68
93
|
console.log(`Error reading '${exports.nxProjectGraph}'. Continue the process without the cache.`);
|
69
94
|
console.log(error);
|
95
|
+
return null;
|
96
|
+
}
|
97
|
+
finally {
|
98
|
+
perf_hooks_1.performance.mark('read project-graph:end');
|
99
|
+
perf_hooks_1.performance.measure('read cache', 'read project-graph:start', 'read project-graph:end');
|
100
|
+
}
|
101
|
+
}
|
102
|
+
function readSourceMapsCache() {
|
103
|
+
perf_hooks_1.performance.mark('read source-maps:start');
|
104
|
+
ensureCacheDirectory();
|
105
|
+
let data = null;
|
106
|
+
try {
|
107
|
+
if ((0, fileutils_1.fileExists)(exports.nxSourceMaps)) {
|
108
|
+
data = (0, fileutils_1.readJsonFile)(exports.nxSourceMaps);
|
109
|
+
}
|
110
|
+
}
|
111
|
+
catch (error) {
|
112
|
+
console.log(`Error reading '${exports.nxSourceMaps}'. Continue the process without the cache.`);
|
113
|
+
console.log(error);
|
70
114
|
}
|
71
|
-
perf_hooks_1.performance.mark('read
|
72
|
-
perf_hooks_1.performance.measure('read cache', 'read
|
115
|
+
perf_hooks_1.performance.mark('read source-maps:end');
|
116
|
+
perf_hooks_1.performance.measure('read cache', 'read source-maps:start', 'read source-maps:end');
|
73
117
|
return data ?? null;
|
74
118
|
}
|
75
119
|
function createProjectFileMapCache(nxJson, packageJsonDeps, fileMap, tsConfig) {
|
@@ -85,7 +129,7 @@ function createProjectFileMapCache(nxJson, packageJsonDeps, fileMap, tsConfig) {
|
|
85
129
|
};
|
86
130
|
return newValue;
|
87
131
|
}
|
88
|
-
function writeCache(cache, projectGraph) {
|
132
|
+
function writeCache(cache, projectGraph, sourceMaps, errors) {
|
89
133
|
perf_hooks_1.performance.mark('write cache:start');
|
90
134
|
let retry = 1;
|
91
135
|
let done = false;
|
@@ -98,11 +142,18 @@ function writeCache(cache, projectGraph) {
|
|
98
142
|
const unique = (Math.random().toString(16) + '0000000').slice(2, 10);
|
99
143
|
const tmpProjectGraphPath = `${exports.nxProjectGraph}~${unique}`;
|
100
144
|
const tmpFileMapPath = `${exports.nxFileMap}~${unique}`;
|
145
|
+
const tmpSourceMapPath = `${exports.nxSourceMaps}~${unique}`;
|
101
146
|
try {
|
102
|
-
(0, fileutils_1.writeJsonFile)(tmpProjectGraphPath,
|
147
|
+
(0, fileutils_1.writeJsonFile)(tmpProjectGraphPath, {
|
148
|
+
...projectGraph,
|
149
|
+
errors,
|
150
|
+
computedAt: Date.now(),
|
151
|
+
});
|
103
152
|
(0, node_fs_1.renameSync)(tmpProjectGraphPath, exports.nxProjectGraph);
|
104
153
|
(0, fileutils_1.writeJsonFile)(tmpFileMapPath, cache);
|
105
154
|
(0, node_fs_1.renameSync)(tmpFileMapPath, exports.nxFileMap);
|
155
|
+
(0, fileutils_1.writeJsonFile)(tmpSourceMapPath, sourceMaps);
|
156
|
+
(0, node_fs_1.renameSync)(tmpSourceMapPath, exports.nxSourceMaps);
|
106
157
|
done = true;
|
107
158
|
}
|
108
159
|
catch (err) {
|
@@ -2,9 +2,11 @@ import { ProjectGraph } from '../config/project-graph';
|
|
2
2
|
import { ProjectConfiguration, ProjectsConfigurations } from '../config/workspace-json-project-json';
|
3
3
|
/**
|
4
4
|
* Synchronously reads the latest cached copy of the workspace's ProjectGraph.
|
5
|
+
*
|
6
|
+
* @param {number} [minimumComputedAt] - The minimum timestamp that the cached ProjectGraph must have been computed at.
|
5
7
|
* @throws {Error} if there is no cached ProjectGraph to read from
|
6
8
|
*/
|
7
|
-
export declare function readCachedProjectGraph(): ProjectGraph;
|
9
|
+
export declare function readCachedProjectGraph(minimumComputedAt?: number): ProjectGraph;
|
8
10
|
export declare function readCachedProjectConfiguration(projectName: string): ProjectConfiguration;
|
9
11
|
/**
|
10
12
|
* Get the {@link ProjectsConfigurations} from the {@link ProjectGraph}
|
@@ -21,12 +21,18 @@ const nx_deps_cache_1 = require("./nx-deps-cache");
|
|
21
21
|
const retrieve_workspace_files_1 = require("./utils/retrieve-workspace-files");
|
22
22
|
const get_plugins_1 = require("./plugins/get-plugins");
|
23
23
|
const logger_1 = require("../utils/logger");
|
24
|
+
const native_1 = require("../native");
|
25
|
+
const path_1 = require("path");
|
26
|
+
const cache_directory_1 = require("../utils/cache-directory");
|
27
|
+
const delayed_spinner_1 = require("../utils/delayed-spinner");
|
24
28
|
/**
|
25
29
|
* Synchronously reads the latest cached copy of the workspace's ProjectGraph.
|
30
|
+
*
|
31
|
+
* @param {number} [minimumComputedAt] - The minimum timestamp that the cached ProjectGraph must have been computed at.
|
26
32
|
* @throws {Error} if there is no cached ProjectGraph to read from
|
27
33
|
*/
|
28
|
-
function readCachedProjectGraph() {
|
29
|
-
const projectGraphCache = (0, nx_deps_cache_1.readProjectGraphCache)();
|
34
|
+
function readCachedProjectGraph(minimumComputedAt) {
|
35
|
+
const projectGraphCache = (0, nx_deps_cache_1.readProjectGraphCache)(minimumComputedAt);
|
30
36
|
if (!projectGraphCache) {
|
31
37
|
const angularSpecificError = (0, fileutils_1.fileExists)(`${workspace_root_1.workspaceRoot}/angular.json`)
|
32
38
|
? (0, strip_indents_1.stripIndents) `
|
@@ -120,13 +126,13 @@ async function buildProjectGraphAndSourceMapsWithoutDaemon() {
|
|
120
126
|
...(projectConfigurationsError?.errors ?? []),
|
121
127
|
...(projectGraphError?.errors ?? []),
|
122
128
|
];
|
129
|
+
if (cacheEnabled) {
|
130
|
+
(0, nx_deps_cache_1.writeCache)(projectFileMapCache, projectGraph, sourceMaps, errors);
|
131
|
+
}
|
123
132
|
if (errors.length > 0) {
|
124
133
|
throw new error_types_1.ProjectGraphError(errors, projectGraph, sourceMaps);
|
125
134
|
}
|
126
135
|
else {
|
127
|
-
if (cacheEnabled) {
|
128
|
-
(0, nx_deps_cache_1.writeCache)(projectFileMapCache, projectGraph);
|
129
|
-
}
|
130
136
|
return { projectGraph, sourceMaps };
|
131
137
|
}
|
132
138
|
}
|
@@ -159,6 +165,16 @@ function handleProjectGraphError(opts, e) {
|
|
159
165
|
throw e;
|
160
166
|
}
|
161
167
|
}
|
168
|
+
async function readCachedGraphAndHydrateFileMap(minimumComputedAt) {
|
169
|
+
const graph = readCachedProjectGraph(minimumComputedAt);
|
170
|
+
const projectRootMap = Object.fromEntries(Object.entries(graph.nodes).map(([project, { data }]) => [
|
171
|
+
data.root,
|
172
|
+
project,
|
173
|
+
]));
|
174
|
+
const { allWorkspaceFiles, fileMap, rustReferences } = await (0, retrieve_workspace_files_1.retrieveWorkspaceFiles)(workspace_root_1.workspaceRoot, projectRootMap);
|
175
|
+
(0, build_project_graph_1.hydrateFileMap)(fileMap, allWorkspaceFiles, rustReferences);
|
176
|
+
return graph;
|
177
|
+
}
|
162
178
|
/**
|
163
179
|
* Computes and returns a ProjectGraph.
|
164
180
|
*
|
@@ -186,17 +202,14 @@ async function createProjectGraphAsync(opts = {
|
|
186
202
|
}) {
|
187
203
|
if (process.env.NX_FORCE_REUSE_CACHED_GRAPH === 'true') {
|
188
204
|
try {
|
189
|
-
const graph = readCachedProjectGraph();
|
190
|
-
const projectRootMap = Object.fromEntries(Object.entries(graph.nodes).map(([project, { data }]) => [
|
191
|
-
data.root,
|
192
|
-
project,
|
193
|
-
]));
|
194
|
-
const { allWorkspaceFiles, fileMap, rustReferences } = await (0, retrieve_workspace_files_1.retrieveWorkspaceFiles)(workspace_root_1.workspaceRoot, projectRootMap);
|
195
|
-
(0, build_project_graph_1.hydrateFileMap)(fileMap, allWorkspaceFiles, rustReferences);
|
196
|
-
return graph;
|
197
205
|
// If no cached graph is found, we will fall through to the normal flow
|
206
|
+
const graph = await readCachedGraphAndHydrateFileMap();
|
207
|
+
return graph;
|
198
208
|
}
|
199
209
|
catch (e) {
|
210
|
+
if (e instanceof error_types_1.ProjectGraphError) {
|
211
|
+
throw e;
|
212
|
+
}
|
200
213
|
logger_1.logger.verbose('Unable to use cached project graph', e);
|
201
214
|
}
|
202
215
|
}
|
@@ -209,6 +222,48 @@ async function createProjectGraphAndSourceMapsAsync(opts = {
|
|
209
222
|
}) {
|
210
223
|
perf_hooks_1.performance.mark('create-project-graph-async:start');
|
211
224
|
if (!client_1.daemonClient.enabled()) {
|
225
|
+
const lock = !native_1.IS_WASM
|
226
|
+
? new native_1.FileLock((0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'project-graph.lock'))
|
227
|
+
: null;
|
228
|
+
let locked = lock?.locked;
|
229
|
+
while (locked) {
|
230
|
+
logger_1.logger.verbose('Waiting for graph construction in another process to complete');
|
231
|
+
const spinner = new delayed_spinner_1.DelayedSpinner('Waiting for graph construction in another process to complete');
|
232
|
+
const start = Date.now();
|
233
|
+
await lock.wait();
|
234
|
+
spinner.cleanup();
|
235
|
+
// Note: This will currently throw if any of the caches are missing...
|
236
|
+
// It would be nice if one of the processes that was waiting for the lock
|
237
|
+
// could pick up the slack and build the graph if it's missing, but
|
238
|
+
// we wouldn't want either of the below to happen:
|
239
|
+
// - All of the waiting processes to build the graph
|
240
|
+
// - Even one of the processes building the graph on a legitimate error
|
241
|
+
try {
|
242
|
+
// Ensuring that computedAt was after this process started
|
243
|
+
// waiting for the graph to complete, means that the graph
|
244
|
+
// was computed by the process was already working.
|
245
|
+
const graph = await readCachedGraphAndHydrateFileMap(start);
|
246
|
+
const sourceMaps = (0, nx_deps_cache_1.readSourceMapsCache)();
|
247
|
+
if (!sourceMaps) {
|
248
|
+
throw new Error('The project graph was computed in another process, but the source maps are missing.');
|
249
|
+
}
|
250
|
+
return {
|
251
|
+
projectGraph: graph,
|
252
|
+
sourceMaps,
|
253
|
+
};
|
254
|
+
}
|
255
|
+
catch (e) {
|
256
|
+
// If the error is that the cached graph is stale after unlock,
|
257
|
+
// the process that was working on the graph must have been canceled,
|
258
|
+
// so we will fall through to the normal flow to ensure
|
259
|
+
// its created by one of the processes that was waiting
|
260
|
+
if (!(e instanceof error_types_1.StaleProjectGraphCacheError)) {
|
261
|
+
throw e;
|
262
|
+
}
|
263
|
+
}
|
264
|
+
locked = lock.check();
|
265
|
+
}
|
266
|
+
lock?.lock();
|
212
267
|
try {
|
213
268
|
const res = await buildProjectGraphAndSourceMapsWithoutDaemon();
|
214
269
|
perf_hooks_1.performance.measure('create-project-graph-async >> retrieve-project-configurations', 'retrieve-project-configurations:start', 'retrieve-project-configurations:end');
|
@@ -221,6 +276,9 @@ async function createProjectGraphAndSourceMapsAsync(opts = {
|
|
221
276
|
catch (e) {
|
222
277
|
handleProjectGraphError(opts, e);
|
223
278
|
}
|
279
|
+
finally {
|
280
|
+
lock.unlock();
|
281
|
+
}
|
224
282
|
}
|
225
283
|
else {
|
226
284
|
try {
|