nx 19.6.4 → 19.6.6
Sign up to get free protection for your applications and to get access to all the features.
- package/package.json +12 -12
- package/release/changelog-renderer/index.d.ts +1 -1
- package/release/changelog-renderer/index.js +46 -11
- package/schemas/nx-schema.json +5 -0
- package/src/command-line/import/command-object.js +4 -0
- package/src/command-line/import/import.d.ts +4 -0
- package/src/command-line/import/import.js +147 -12
- package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
- package/src/command-line/import/utils/prepare-source-repo.js +31 -85
- package/src/command-line/release/changelog.js +53 -12
- package/src/command-line/release/command-object.d.ts +1 -0
- package/src/command-line/release/command-object.js +6 -1
- package/src/command-line/release/config/version-plans.d.ts +14 -1
- package/src/command-line/release/config/version-plans.js +33 -1
- package/src/command-line/release/plan-check.js +8 -61
- package/src/command-line/release/plan.js +131 -37
- package/src/command-line/release/release.js +1 -1
- package/src/command-line/release/utils/get-touched-projects-for-group.d.ts +7 -0
- package/src/command-line/release/utils/get-touched-projects-for-group.js +78 -0
- package/src/command-line/release/utils/git.d.ts +1 -1
- package/src/command-line/release/utils/git.js +46 -19
- package/src/command-line/release/version.js +1 -1
- package/src/command-line/yargs-utils/shared-options.d.ts +2 -1
- package/src/command-line/yargs-utils/shared-options.js +5 -0
- package/src/core/graph/main.js +1 -1
- package/src/daemon/server/sync-generators.d.ts +4 -0
- package/src/daemon/server/sync-generators.js +172 -52
- package/src/migrations/update-15-0-0/prefix-outputs.js +3 -18
- package/src/native/index.d.ts +4 -0
- package/src/native/native-bindings.js +2 -0
- package/src/native/nx.wasi-browser.js +42 -36
- package/src/native/nx.wasi.cjs +40 -36
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/project-graph/plugins/isolation/plugin-pool.js +1 -1
- package/src/tasks-runner/run-command.js +4 -1
- package/src/tasks-runner/utils.d.ts +1 -8
- package/src/tasks-runner/utils.js +9 -12
- package/src/utils/command-line-utils.d.ts +1 -0
- package/src/utils/git-utils.d.ts +7 -10
- package/src/utils/git-utils.js +61 -44
- package/src/utils/sync-generators.d.ts +2 -2
- package/src/utils/squash.d.ts +0 -1
- package/src/utils/squash.js +0 -12
@@ -4,3 +4,7 @@ export declare function getCachedSyncGeneratorChanges(generators: string[]): Pro
|
|
4
4
|
export declare function flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
|
5
5
|
export declare function collectAndScheduleSyncGenerators(projectGraph: ProjectGraph): void;
|
6
6
|
export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
|
7
|
+
/**
|
8
|
+
* @internal
|
9
|
+
*/
|
10
|
+
export declare function _getConflictingGeneratorGroups(results: SyncGeneratorChangesResult[]): string[][];
|
@@ -4,6 +4,7 @@ exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
|
|
4
4
|
exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
|
5
5
|
exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
|
6
6
|
exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
|
7
|
+
exports._getConflictingGeneratorGroups = _getConflictingGeneratorGroups;
|
7
8
|
const nx_json_1 = require("../../config/nx-json");
|
8
9
|
const tree_1 = require("../../generators/tree");
|
9
10
|
const file_hasher_1 = require("../../hasher/file-hasher");
|
@@ -24,7 +25,6 @@ let storedNxJsonHash;
|
|
24
25
|
const log = (...messageParts) => {
|
25
26
|
logger_1.serverLogger.log('[SYNC]:', ...messageParts);
|
26
27
|
};
|
27
|
-
// TODO(leo): check conflicts and reuse the Tree where possible
|
28
28
|
async function getCachedSyncGeneratorChanges(generators) {
|
29
29
|
try {
|
30
30
|
log('get sync generators changes on demand', generators);
|
@@ -36,51 +36,15 @@ async function getCachedSyncGeneratorChanges(generators) {
|
|
36
36
|
}
|
37
37
|
// reset the wait time
|
38
38
|
waitPeriod = 100;
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
: null;
|
49
|
-
errored = error !== undefined;
|
50
|
-
return projects;
|
51
|
-
};
|
52
|
-
return (await Promise.all(generators.map(async (generator) => {
|
53
|
-
if (scheduledGenerators.has(generator) ||
|
54
|
-
!syncGeneratorsCacheResultPromises.has(generator)) {
|
55
|
-
// it's scheduled to run (there are pending changes to process) or
|
56
|
-
// it's not scheduled and there's no cached result, so run it
|
57
|
-
const projects = await getProjectsConfigurations();
|
58
|
-
if (projects) {
|
59
|
-
log(generator, 'already scheduled or not cached, running it now');
|
60
|
-
runGenerator(generator, projects);
|
61
|
-
}
|
62
|
-
else {
|
63
|
-
log(generator, 'already scheduled or not cached, project graph errored');
|
64
|
-
/**
|
65
|
-
* This should never happen. This is invoked imperatively, and by
|
66
|
-
* the time it is invoked, the project graph would have already
|
67
|
-
* been requested. If it errored, it would have been reported and
|
68
|
-
* this wouldn't have been invoked. We handle it just in case.
|
69
|
-
*
|
70
|
-
* Since the project graph would be reported by the relevant
|
71
|
-
* handlers separately, we just ignore the error, don't cache
|
72
|
-
* any result and return an empty result, the next time this is
|
73
|
-
* invoked the process will repeat until it eventually recovers
|
74
|
-
* when the project graph is fixed.
|
75
|
-
*/
|
76
|
-
return Promise.resolve({ changes: [], generatorName: generator });
|
77
|
-
}
|
78
|
-
}
|
79
|
-
else {
|
80
|
-
log(generator, 'not scheduled and has cached result, returning cached result');
|
81
|
-
}
|
82
|
-
return syncGeneratorsCacheResultPromises.get(generator);
|
83
|
-
}))).flat();
|
39
|
+
const results = await getFromCacheOrRunGenerators(generators);
|
40
|
+
const conflicts = _getConflictingGeneratorGroups(results);
|
41
|
+
if (!conflicts.length) {
|
42
|
+
// there are no conflicts
|
43
|
+
return results;
|
44
|
+
}
|
45
|
+
// there are conflicts, so we need to re-run the conflicting generators
|
46
|
+
// using the same tree
|
47
|
+
return await processConflictingGenerators(conflicts, results);
|
84
48
|
}
|
85
49
|
catch (e) {
|
86
50
|
console.error(e);
|
@@ -126,7 +90,7 @@ function collectAndScheduleSyncGenerators(projectGraph) {
|
|
126
90
|
}
|
127
91
|
const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
128
92
|
for (const generator of scheduledGenerators) {
|
129
|
-
runGenerator(generator, projects);
|
93
|
+
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
|
130
94
|
}
|
131
95
|
await Promise.all(syncGeneratorsCacheResultPromises.values());
|
132
96
|
}, waitPeriod);
|
@@ -143,6 +107,163 @@ async function getCachedRegisteredSyncGenerators() {
|
|
143
107
|
}
|
144
108
|
return [...registeredSyncGenerators];
|
145
109
|
}
|
110
|
+
async function getFromCacheOrRunGenerators(generators) {
|
111
|
+
let projects;
|
112
|
+
let errored = false;
|
113
|
+
const getProjectsConfigurations = async () => {
|
114
|
+
if (projects || errored) {
|
115
|
+
return projects;
|
116
|
+
}
|
117
|
+
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
118
|
+
projects = projectGraph
|
119
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
120
|
+
: null;
|
121
|
+
errored = error !== undefined;
|
122
|
+
return projects;
|
123
|
+
};
|
124
|
+
return (await Promise.all(generators.map(async (generator) => {
|
125
|
+
if (scheduledGenerators.has(generator) ||
|
126
|
+
!syncGeneratorsCacheResultPromises.has(generator)) {
|
127
|
+
// it's scheduled to run (there are pending changes to process) or
|
128
|
+
// it's not scheduled and there's no cached result, so run it
|
129
|
+
const projects = await getProjectsConfigurations();
|
130
|
+
if (projects) {
|
131
|
+
log(generator, 'already scheduled or not cached, running it now');
|
132
|
+
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
|
133
|
+
}
|
134
|
+
else {
|
135
|
+
log(generator, 'already scheduled or not cached, project graph errored');
|
136
|
+
/**
|
137
|
+
* This should never happen. This is invoked imperatively, and by
|
138
|
+
* the time it is invoked, the project graph would have already
|
139
|
+
* been requested. If it errored, it would have been reported and
|
140
|
+
* this wouldn't have been invoked. We handle it just in case.
|
141
|
+
*
|
142
|
+
* Since the project graph would be reported by the relevant
|
143
|
+
* handlers separately, we just ignore the error, don't cache
|
144
|
+
* any result and return an empty result, the next time this is
|
145
|
+
* invoked the process will repeat until it eventually recovers
|
146
|
+
* when the project graph is fixed.
|
147
|
+
*/
|
148
|
+
return Promise.resolve({ changes: [], generatorName: generator });
|
149
|
+
}
|
150
|
+
}
|
151
|
+
else {
|
152
|
+
log(generator, 'not scheduled and has cached result, returning cached result');
|
153
|
+
}
|
154
|
+
return syncGeneratorsCacheResultPromises.get(generator);
|
155
|
+
}))).flat();
|
156
|
+
}
|
157
|
+
async function runConflictingGenerators(tree, generators) {
|
158
|
+
const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
159
|
+
const projects = projectGraph
|
160
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
161
|
+
: null;
|
162
|
+
if (!projects) {
|
163
|
+
/**
|
164
|
+
* This should never happen. This is invoked imperatively, and by
|
165
|
+
* the time it is invoked, the project graph would have already
|
166
|
+
* been requested. If it errored, it would have been reported and
|
167
|
+
* this wouldn't have been invoked. We handle it just in case.
|
168
|
+
*
|
169
|
+
* Since the project graph would be reported by the relevant
|
170
|
+
* handlers separately, we just ignore the error.
|
171
|
+
*/
|
172
|
+
return generators.map((generator) => ({
|
173
|
+
changes: [],
|
174
|
+
generatorName: generator,
|
175
|
+
}));
|
176
|
+
}
|
177
|
+
// we need to run conflicting generators sequentially because they use the same tree
|
178
|
+
const results = [];
|
179
|
+
for (const generator of generators) {
|
180
|
+
log(generator, 'running it now');
|
181
|
+
results.push(await runGenerator(generator, projects, tree));
|
182
|
+
}
|
183
|
+
return results;
|
184
|
+
}
|
185
|
+
async function processConflictingGenerators(conflicts, initialResults) {
|
186
|
+
const conflictRunResults = (await Promise.all(conflicts.map((generators) => {
|
187
|
+
const [firstGenerator, ...generatorsToRun] = generators;
|
188
|
+
// it must exists because the conflicts were identified from the initial results
|
189
|
+
const firstGeneratorResult = initialResults.find((r) => r.generatorName === firstGenerator);
|
190
|
+
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generators ${generators.join(',')}`);
|
191
|
+
// pre-apply the changes from the first generator to avoid running it
|
192
|
+
for (const change of firstGeneratorResult.changes) {
|
193
|
+
if (change.type === 'CREATE' || change.type === 'UPDATE') {
|
194
|
+
tree.write(change.path, change.content, change.options);
|
195
|
+
}
|
196
|
+
else if (change.type === 'DELETE') {
|
197
|
+
tree.delete(change.path);
|
198
|
+
}
|
199
|
+
}
|
200
|
+
/**
|
201
|
+
* We don't cache the results of conflicting generators because they
|
202
|
+
* use the same tree, so some files might contain results from multiple
|
203
|
+
* generators and we don't have guarantees that the same combination of
|
204
|
+
* generators will run together.
|
205
|
+
*/
|
206
|
+
return runConflictingGenerators(tree, generatorsToRun);
|
207
|
+
}))).flat();
|
208
|
+
/**
|
209
|
+
* The order of the results from the re-run generators is important because
|
210
|
+
* the last result from a group of conflicting generators will contain the
|
211
|
+
* changes from the previous conflicting generators. So, instead of replacing
|
212
|
+
* in-place the initial results, we first add the results from the re-run
|
213
|
+
* generators, and then add the initial results that were not from a
|
214
|
+
* conflicting generator.
|
215
|
+
*/
|
216
|
+
const results = [...conflictRunResults];
|
217
|
+
for (const result of initialResults) {
|
218
|
+
if (conflictRunResults.every((r) => r.generatorName !== result.generatorName)) {
|
219
|
+
// this result is not from a conflicting generator, so we add it to the
|
220
|
+
// results
|
221
|
+
results.push(result);
|
222
|
+
}
|
223
|
+
}
|
224
|
+
return results;
|
225
|
+
}
|
226
|
+
/**
|
227
|
+
* @internal
|
228
|
+
*/
|
229
|
+
function _getConflictingGeneratorGroups(results) {
|
230
|
+
const changedFileToGeneratorMap = new Map();
|
231
|
+
for (const result of results) {
|
232
|
+
for (const change of result.changes) {
|
233
|
+
if (!changedFileToGeneratorMap.has(change.path)) {
|
234
|
+
changedFileToGeneratorMap.set(change.path, new Set());
|
235
|
+
}
|
236
|
+
changedFileToGeneratorMap.get(change.path).add(result.generatorName);
|
237
|
+
}
|
238
|
+
}
|
239
|
+
const conflicts = [];
|
240
|
+
for (const generatorSet of changedFileToGeneratorMap.values()) {
|
241
|
+
if (generatorSet.size === 1) {
|
242
|
+
// no conflicts
|
243
|
+
continue;
|
244
|
+
}
|
245
|
+
if (conflicts.length === 0) {
|
246
|
+
// there are no conflicts yet, so we just add the first group
|
247
|
+
conflicts.push(new Set(generatorSet));
|
248
|
+
continue;
|
249
|
+
}
|
250
|
+
// identify if any of the current generator sets intersect with any of the
|
251
|
+
// existing conflict groups
|
252
|
+
const generatorsArray = Array.from(generatorSet);
|
253
|
+
const existingConflictGroup = conflicts.find((group) => generatorsArray.some((generator) => group.has(generator)));
|
254
|
+
if (existingConflictGroup) {
|
255
|
+
// there's an intersecting group, so we merge the two
|
256
|
+
for (const generator of generatorsArray) {
|
257
|
+
existingConflictGroup.add(generator);
|
258
|
+
}
|
259
|
+
}
|
260
|
+
else {
|
261
|
+
// there's no intersecting group, so we create a new one
|
262
|
+
conflicts.push(new Set(generatorsArray));
|
263
|
+
}
|
264
|
+
}
|
265
|
+
return conflicts.map((group) => Array.from(group));
|
266
|
+
}
|
146
267
|
function collectAllRegisteredSyncGenerators(projectGraph) {
|
147
268
|
const projectGraphHash = hashProjectGraph(projectGraph);
|
148
269
|
if (storedProjectGraphHash !== projectGraphHash) {
|
@@ -183,16 +304,15 @@ function collectAllRegisteredSyncGenerators(projectGraph) {
|
|
183
304
|
}
|
184
305
|
}
|
185
306
|
}
|
186
|
-
function runGenerator(generator, projects) {
|
307
|
+
function runGenerator(generator, projects, tree) {
|
187
308
|
log('running scheduled generator', generator);
|
188
309
|
// remove it from the scheduled set
|
189
310
|
scheduledGenerators.delete(generator);
|
190
|
-
|
191
|
-
|
192
|
-
syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
311
|
+
tree ??= new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
|
312
|
+
return (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
193
313
|
log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
|
194
314
|
return result;
|
195
|
-
})
|
315
|
+
});
|
196
316
|
}
|
197
317
|
function hashProjectGraph(projectGraph) {
|
198
318
|
const stringifiedProjects = Object.entries(projectGraph.nodes)
|
@@ -18,12 +18,7 @@ async function default_1(tree) {
|
|
18
18
|
if (!target.outputs) {
|
19
19
|
continue;
|
20
20
|
}
|
21
|
-
|
22
|
-
(0, utils_1.validateOutputs)(target.outputs);
|
23
|
-
}
|
24
|
-
catch (e) {
|
25
|
-
target.outputs = (0, utils_1.transformLegacyOutputs)(project.root, e);
|
26
|
-
}
|
21
|
+
target.outputs = (0, utils_1.transformLegacyOutputs)(project.root, target.outputs);
|
27
22
|
}
|
28
23
|
try {
|
29
24
|
(0, project_configuration_1.updateProjectConfiguration)(tree, projectName, project);
|
@@ -33,12 +28,7 @@ async function default_1(tree) {
|
|
33
28
|
(0, json_1.updateJson)(tree, (0, path_1.join)(project.root, 'package.json'), (json) => {
|
34
29
|
for (const target of Object.values(json.nx?.targets ?? {})) {
|
35
30
|
if (target.outputs) {
|
36
|
-
|
37
|
-
(0, utils_1.validateOutputs)(target.outputs);
|
38
|
-
}
|
39
|
-
catch (e) {
|
40
|
-
target.outputs = (0, utils_1.transformLegacyOutputs)(project.root, e);
|
41
|
-
}
|
31
|
+
target.outputs = (0, utils_1.transformLegacyOutputs)(project.root, target.outputs);
|
42
32
|
}
|
43
33
|
}
|
44
34
|
return json;
|
@@ -51,12 +41,7 @@ async function default_1(tree) {
|
|
51
41
|
if (!target.outputs) {
|
52
42
|
continue;
|
53
43
|
}
|
54
|
-
|
55
|
-
(0, utils_1.validateOutputs)(target.outputs);
|
56
|
-
}
|
57
|
-
catch (e) {
|
58
|
-
target.outputs = (0, utils_1.transformLegacyOutputs)('{projectRoot}', e);
|
59
|
-
}
|
44
|
+
target.outputs = (0, utils_1.transformLegacyOutputs)('{projectRoot}', target.outputs);
|
60
45
|
}
|
61
46
|
(0, nx_json_1.updateNxJson)(tree, nxJson);
|
62
47
|
}
|
package/src/native/index.d.ts
CHANGED
@@ -124,6 +124,8 @@ export declare export function getBinaryTarget(): string
|
|
124
124
|
*/
|
125
125
|
export declare export function getFilesForOutputs(directory: string, entries: Array<string>): Array<string>
|
126
126
|
|
127
|
+
export declare export function getTransformableOutputs(outputs: Array<string>): Array<string>
|
128
|
+
|
127
129
|
export declare export function hashArray(input: Array<string>): string
|
128
130
|
|
129
131
|
export interface HashDetails {
|
@@ -222,6 +224,8 @@ export interface UpdatedWorkspaceFiles {
|
|
222
224
|
externalReferences: NxWorkspaceFilesExternals
|
223
225
|
}
|
224
226
|
|
227
|
+
export declare export function validateOutputs(outputs: Array<string>): void
|
228
|
+
|
225
229
|
export interface WatchEvent {
|
226
230
|
path: string
|
227
231
|
type: EventType
|
@@ -374,10 +374,12 @@ module.exports.expandOutputs = nativeBinding.expandOutputs
|
|
374
374
|
module.exports.findImports = nativeBinding.findImports
|
375
375
|
module.exports.getBinaryTarget = nativeBinding.getBinaryTarget
|
376
376
|
module.exports.getFilesForOutputs = nativeBinding.getFilesForOutputs
|
377
|
+
module.exports.getTransformableOutputs = nativeBinding.getTransformableOutputs
|
377
378
|
module.exports.hashArray = nativeBinding.hashArray
|
378
379
|
module.exports.hashFile = nativeBinding.hashFile
|
379
380
|
module.exports.IS_WASM = nativeBinding.IS_WASM
|
380
381
|
module.exports.remove = nativeBinding.remove
|
381
382
|
module.exports.testOnlyTransferFileMap = nativeBinding.testOnlyTransferFileMap
|
382
383
|
module.exports.transferProjectGraph = nativeBinding.transferProjectGraph
|
384
|
+
module.exports.validateOutputs = nativeBinding.validateOutputs
|
383
385
|
module.exports.WorkspaceErrors = nativeBinding.WorkspaceErrors
|
@@ -14,7 +14,7 @@ const __wasi = new __WASI({
|
|
14
14
|
const __emnapiContext = __emnapiGetDefaultContext()
|
15
15
|
|
16
16
|
const __sharedMemory = new WebAssembly.Memory({
|
17
|
-
initial:
|
17
|
+
initial: 1024,
|
18
18
|
maximum: 32768,
|
19
19
|
shared: true,
|
20
20
|
})
|
@@ -55,41 +55,44 @@ function __napi_rs_initialize_modules(__napiInstance) {
|
|
55
55
|
__napiInstance.exports['__napi_register__get_files_for_outputs_1']?.()
|
56
56
|
__napiInstance.exports['__napi_register__remove_2']?.()
|
57
57
|
__napiInstance.exports['__napi_register__copy_3']?.()
|
58
|
-
__napiInstance.exports['
|
59
|
-
__napiInstance.exports['
|
60
|
-
__napiInstance.exports['
|
61
|
-
__napiInstance.exports['
|
62
|
-
__napiInstance.exports['
|
63
|
-
__napiInstance.exports['
|
64
|
-
__napiInstance.exports['
|
65
|
-
__napiInstance.exports['
|
66
|
-
__napiInstance.exports['
|
67
|
-
__napiInstance.exports['
|
68
|
-
__napiInstance.exports['
|
69
|
-
__napiInstance.exports['
|
70
|
-
__napiInstance.exports['
|
71
|
-
__napiInstance.exports['
|
72
|
-
__napiInstance.exports['
|
73
|
-
__napiInstance.exports['
|
74
|
-
__napiInstance.exports['
|
75
|
-
__napiInstance.exports['
|
76
|
-
__napiInstance.exports['
|
77
|
-
__napiInstance.exports['
|
78
|
-
__napiInstance.exports['
|
79
|
-
__napiInstance.exports['
|
80
|
-
__napiInstance.exports['
|
81
|
-
__napiInstance.exports['
|
82
|
-
__napiInstance.exports['
|
83
|
-
__napiInstance.exports['
|
84
|
-
__napiInstance.exports['
|
85
|
-
__napiInstance.exports['
|
86
|
-
__napiInstance.exports['
|
87
|
-
__napiInstance.exports['
|
88
|
-
__napiInstance.exports['
|
89
|
-
__napiInstance.exports['
|
90
|
-
__napiInstance.exports['
|
91
|
-
__napiInstance.exports['
|
92
|
-
__napiInstance.exports['
|
58
|
+
__napiInstance.exports['__napi_register__validate_outputs_4']?.()
|
59
|
+
__napiInstance.exports['__napi_register__get_transformable_outputs_5']?.()
|
60
|
+
__napiInstance.exports['__napi_register__hash_array_6']?.()
|
61
|
+
__napiInstance.exports['__napi_register__hash_file_7']?.()
|
62
|
+
__napiInstance.exports['__napi_register__IS_WASM_8']?.()
|
63
|
+
__napiInstance.exports['__napi_register__get_binary_target_9']?.()
|
64
|
+
__napiInstance.exports['__napi_register__ImportResult_struct_10']?.()
|
65
|
+
__napiInstance.exports['__napi_register__find_imports_11']?.()
|
66
|
+
__napiInstance.exports['__napi_register__transfer_project_graph_12']?.()
|
67
|
+
__napiInstance.exports['__napi_register__ExternalNode_struct_13']?.()
|
68
|
+
__napiInstance.exports['__napi_register__Target_struct_14']?.()
|
69
|
+
__napiInstance.exports['__napi_register__Project_struct_15']?.()
|
70
|
+
__napiInstance.exports['__napi_register__ProjectGraph_struct_16']?.()
|
71
|
+
__napiInstance.exports['__napi_register__HashPlanner_struct_17']?.()
|
72
|
+
__napiInstance.exports['__napi_register__HashPlanner_impl_21']?.()
|
73
|
+
__napiInstance.exports['__napi_register__HashDetails_struct_22']?.()
|
74
|
+
__napiInstance.exports['__napi_register__HasherOptions_struct_23']?.()
|
75
|
+
__napiInstance.exports['__napi_register__TaskHasher_struct_24']?.()
|
76
|
+
__napiInstance.exports['__napi_register__TaskHasher_impl_27']?.()
|
77
|
+
__napiInstance.exports['__napi_register__Task_struct_28']?.()
|
78
|
+
__napiInstance.exports['__napi_register__TaskTarget_struct_29']?.()
|
79
|
+
__napiInstance.exports['__napi_register__TaskGraph_struct_30']?.()
|
80
|
+
__napiInstance.exports['__napi_register__FileData_struct_31']?.()
|
81
|
+
__napiInstance.exports['__napi_register__InputsInput_struct_32']?.()
|
82
|
+
__napiInstance.exports['__napi_register__FileSetInput_struct_33']?.()
|
83
|
+
__napiInstance.exports['__napi_register__RuntimeInput_struct_34']?.()
|
84
|
+
__napiInstance.exports['__napi_register__EnvironmentInput_struct_35']?.()
|
85
|
+
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_36']?.()
|
86
|
+
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_37']?.()
|
87
|
+
__napiInstance.exports['__napi_register__NxJson_struct_38']?.()
|
88
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_struct_39']?.()
|
89
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_impl_48']?.()
|
90
|
+
__napiInstance.exports['__napi_register__WorkspaceErrors_49']?.()
|
91
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_50']?.()
|
92
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_51']?.()
|
93
|
+
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_52']?.()
|
94
|
+
__napiInstance.exports['__napi_register__FileMap_struct_53']?.()
|
95
|
+
__napiInstance.exports['__napi_register____test_only_transfer_file_map_54']?.()
|
93
96
|
}
|
94
97
|
export const HashPlanner = __napiModule.exports.HashPlanner
|
95
98
|
export const ImportResult = __napiModule.exports.ImportResult
|
@@ -98,11 +101,14 @@ export const WorkspaceContext = __napiModule.exports.WorkspaceContext
|
|
98
101
|
export const copy = __napiModule.exports.copy
|
99
102
|
export const expandOutputs = __napiModule.exports.expandOutputs
|
100
103
|
export const findImports = __napiModule.exports.findImports
|
104
|
+
export const getBinaryTarget = __napiModule.exports.getBinaryTarget
|
101
105
|
export const getFilesForOutputs = __napiModule.exports.getFilesForOutputs
|
106
|
+
export const getTransformableOutputs = __napiModule.exports.getTransformableOutputs
|
102
107
|
export const hashArray = __napiModule.exports.hashArray
|
103
108
|
export const hashFile = __napiModule.exports.hashFile
|
104
109
|
export const IS_WASM = __napiModule.exports.IS_WASM
|
105
110
|
export const remove = __napiModule.exports.remove
|
106
111
|
export const testOnlyTransferFileMap = __napiModule.exports.testOnlyTransferFileMap
|
107
112
|
export const transferProjectGraph = __napiModule.exports.transferProjectGraph
|
113
|
+
export const validateOutputs = __napiModule.exports.validateOutputs
|
108
114
|
export const WorkspaceErrors = __napiModule.exports.WorkspaceErrors
|
package/src/native/nx.wasi.cjs
CHANGED
@@ -86,42 +86,44 @@ function __napi_rs_initialize_modules(__napiInstance) {
|
|
86
86
|
__napiInstance.exports['__napi_register__get_files_for_outputs_1']?.()
|
87
87
|
__napiInstance.exports['__napi_register__remove_2']?.()
|
88
88
|
__napiInstance.exports['__napi_register__copy_3']?.()
|
89
|
-
__napiInstance.exports['
|
90
|
-
__napiInstance.exports['
|
91
|
-
__napiInstance.exports['
|
92
|
-
__napiInstance.exports['
|
93
|
-
__napiInstance.exports['
|
94
|
-
__napiInstance.exports['
|
95
|
-
__napiInstance.exports['
|
96
|
-
__napiInstance.exports['
|
97
|
-
__napiInstance.exports['
|
98
|
-
__napiInstance.exports['
|
99
|
-
__napiInstance.exports['
|
100
|
-
__napiInstance.exports['
|
101
|
-
__napiInstance.exports['
|
102
|
-
__napiInstance.exports['
|
103
|
-
__napiInstance.exports['
|
104
|
-
__napiInstance.exports['
|
105
|
-
__napiInstance.exports['
|
106
|
-
__napiInstance.exports['
|
107
|
-
__napiInstance.exports['
|
108
|
-
__napiInstance.exports['
|
109
|
-
__napiInstance.exports['
|
110
|
-
__napiInstance.exports['
|
111
|
-
__napiInstance.exports['
|
112
|
-
__napiInstance.exports['
|
113
|
-
__napiInstance.exports['
|
114
|
-
__napiInstance.exports['
|
115
|
-
__napiInstance.exports['
|
116
|
-
__napiInstance.exports['
|
117
|
-
__napiInstance.exports['
|
118
|
-
__napiInstance.exports['
|
119
|
-
__napiInstance.exports['
|
120
|
-
__napiInstance.exports['
|
121
|
-
__napiInstance.exports['
|
122
|
-
__napiInstance.exports['
|
123
|
-
__napiInstance.exports['
|
124
|
-
__napiInstance.exports['
|
89
|
+
__napiInstance.exports['__napi_register__validate_outputs_4']?.()
|
90
|
+
__napiInstance.exports['__napi_register__get_transformable_outputs_5']?.()
|
91
|
+
__napiInstance.exports['__napi_register__hash_array_6']?.()
|
92
|
+
__napiInstance.exports['__napi_register__hash_file_7']?.()
|
93
|
+
__napiInstance.exports['__napi_register__IS_WASM_8']?.()
|
94
|
+
__napiInstance.exports['__napi_register__get_binary_target_9']?.()
|
95
|
+
__napiInstance.exports['__napi_register__ImportResult_struct_10']?.()
|
96
|
+
__napiInstance.exports['__napi_register__find_imports_11']?.()
|
97
|
+
__napiInstance.exports['__napi_register__transfer_project_graph_12']?.()
|
98
|
+
__napiInstance.exports['__napi_register__ExternalNode_struct_13']?.()
|
99
|
+
__napiInstance.exports['__napi_register__Target_struct_14']?.()
|
100
|
+
__napiInstance.exports['__napi_register__Project_struct_15']?.()
|
101
|
+
__napiInstance.exports['__napi_register__ProjectGraph_struct_16']?.()
|
102
|
+
__napiInstance.exports['__napi_register__HashPlanner_struct_17']?.()
|
103
|
+
__napiInstance.exports['__napi_register__HashPlanner_impl_21']?.()
|
104
|
+
__napiInstance.exports['__napi_register__HashDetails_struct_22']?.()
|
105
|
+
__napiInstance.exports['__napi_register__HasherOptions_struct_23']?.()
|
106
|
+
__napiInstance.exports['__napi_register__TaskHasher_struct_24']?.()
|
107
|
+
__napiInstance.exports['__napi_register__TaskHasher_impl_27']?.()
|
108
|
+
__napiInstance.exports['__napi_register__Task_struct_28']?.()
|
109
|
+
__napiInstance.exports['__napi_register__TaskTarget_struct_29']?.()
|
110
|
+
__napiInstance.exports['__napi_register__TaskGraph_struct_30']?.()
|
111
|
+
__napiInstance.exports['__napi_register__FileData_struct_31']?.()
|
112
|
+
__napiInstance.exports['__napi_register__InputsInput_struct_32']?.()
|
113
|
+
__napiInstance.exports['__napi_register__FileSetInput_struct_33']?.()
|
114
|
+
__napiInstance.exports['__napi_register__RuntimeInput_struct_34']?.()
|
115
|
+
__napiInstance.exports['__napi_register__EnvironmentInput_struct_35']?.()
|
116
|
+
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_36']?.()
|
117
|
+
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_37']?.()
|
118
|
+
__napiInstance.exports['__napi_register__NxJson_struct_38']?.()
|
119
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_struct_39']?.()
|
120
|
+
__napiInstance.exports['__napi_register__WorkspaceContext_impl_48']?.()
|
121
|
+
__napiInstance.exports['__napi_register__WorkspaceErrors_49']?.()
|
122
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_50']?.()
|
123
|
+
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_51']?.()
|
124
|
+
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_52']?.()
|
125
|
+
__napiInstance.exports['__napi_register__FileMap_struct_53']?.()
|
126
|
+
__napiInstance.exports['__napi_register____test_only_transfer_file_map_54']?.()
|
125
127
|
}
|
126
128
|
module.exports.HashPlanner = __napiModule.exports.HashPlanner
|
127
129
|
module.exports.ImportResult = __napiModule.exports.ImportResult
|
@@ -132,10 +134,12 @@ module.exports.expandOutputs = __napiModule.exports.expandOutputs
|
|
132
134
|
module.exports.findImports = __napiModule.exports.findImports
|
133
135
|
module.exports.getBinaryTarget = __napiModule.exports.getBinaryTarget
|
134
136
|
module.exports.getFilesForOutputs = __napiModule.exports.getFilesForOutputs
|
137
|
+
module.exports.getTransformableOutputs = __napiModule.exports.getTransformableOutputs
|
135
138
|
module.exports.hashArray = __napiModule.exports.hashArray
|
136
139
|
module.exports.hashFile = __napiModule.exports.hashFile
|
137
140
|
module.exports.IS_WASM = __napiModule.exports.IS_WASM
|
138
141
|
module.exports.remove = __napiModule.exports.remove
|
139
142
|
module.exports.testOnlyTransferFileMap = __napiModule.exports.testOnlyTransferFileMap
|
140
143
|
module.exports.transferProjectGraph = __napiModule.exports.transferProjectGraph
|
144
|
+
module.exports.validateOutputs = __napiModule.exports.validateOutputs
|
141
145
|
module.exports.WorkspaceErrors = __napiModule.exports.WorkspaceErrors
|
Binary file
|
@@ -273,7 +273,7 @@ async function startPluginWorker() {
|
|
273
273
|
socket,
|
274
274
|
});
|
275
275
|
}
|
276
|
-
else if (attempts >
|
276
|
+
else if (attempts > 10000) {
|
277
277
|
// daemon fails to start, the process probably exited
|
278
278
|
// we print the logs and exit the client
|
279
279
|
reject('Failed to start plugin worker.');
|
@@ -118,6 +118,9 @@ async function runCommand(projectsToRun, currentProjectGraph, { nxJson }, nxArgs
|
|
118
118
|
}
|
119
119
|
async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions) {
|
120
120
|
let taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
|
121
|
+
if (nxArgs.skipSync) {
|
122
|
+
return { projectGraph, taskGraph };
|
123
|
+
}
|
121
124
|
// collect unique syncGenerators from the tasks
|
122
125
|
const uniqueSyncGenerators = new Set();
|
123
126
|
for (const { target } of Object.values(taskGraph.tasks)) {
|
@@ -141,7 +144,7 @@ async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, project
|
|
141
144
|
}
|
142
145
|
const outOfSyncTitle = 'The workspace is out of sync';
|
143
146
|
const resultBodyLines = [...(0, sync_generators_1.syncGeneratorResultsToMessageLines)(results), ''];
|
144
|
-
const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks.';
|
147
|
+
const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks in interactive environments.';
|
145
148
|
const willErrorOnCiMessage = 'Please note that this will be an error on CI.';
|
146
149
|
if ((0, is_ci_1.isCI)() || !process.stdout.isTTY) {
|
147
150
|
// If the user is running in CI or is running in a non-TTY environment we
|
@@ -21,14 +21,8 @@ export declare function getOutputs(p: Record<string, ProjectGraphProjectNode>, t
|
|
21
21
|
export declare function normalizeTargetDependencyWithStringProjects(dependencyConfig: TargetDependencyConfig): Omit<TargetDependencyConfig, 'projects'> & {
|
22
22
|
projects: string[];
|
23
23
|
};
|
24
|
-
declare class InvalidOutputsError extends Error {
|
25
|
-
outputs: string[];
|
26
|
-
invalidOutputs: Set<string>;
|
27
|
-
constructor(outputs: string[], invalidOutputs: Set<string>);
|
28
|
-
private static createMessage;
|
29
|
-
}
|
30
24
|
export declare function validateOutputs(outputs: string[]): void;
|
31
|
-
export declare function transformLegacyOutputs(projectRoot: string,
|
25
|
+
export declare function transformLegacyOutputs(projectRoot: string, outputs: string[]): string[];
|
32
26
|
/**
|
33
27
|
* @deprecated Pass the target and overrides instead. This will be removed in v20.
|
34
28
|
*/
|
@@ -61,4 +55,3 @@ export declare function isCacheableTask(task: Task, options: {
|
|
61
55
|
cacheableTargets?: string[] | null;
|
62
56
|
}): boolean;
|
63
57
|
export declare function unparse(options: Object): string[];
|
64
|
-
export {};
|
@@ -37,6 +37,7 @@ const project_graph_1 = require("../project-graph/project-graph");
|
|
37
37
|
const find_matching_projects_1 = require("../utils/find-matching-projects");
|
38
38
|
const minimatch_1 = require("minimatch");
|
39
39
|
const globs_1 = require("../utils/globs");
|
40
|
+
const native_1 = require("../native");
|
40
41
|
function getDependencyConfigs({ project, target }, extraTargetDependencies, projectGraph, allTargetNames) {
|
41
42
|
const dependencyConfigs = (projectGraph.nodes[project].data?.targets[target]?.dependsOn ??
|
42
43
|
// This is passed into `run-command` from programmatic invocations
|
@@ -177,19 +178,15 @@ function assertOutputsAreValidType(outputs) {
|
|
177
178
|
}
|
178
179
|
function validateOutputs(outputs) {
|
179
180
|
assertOutputsAreValidType(outputs);
|
180
|
-
|
181
|
-
for (const output of outputs) {
|
182
|
-
if (!/^!?{[\s\S]+}/.test(output)) {
|
183
|
-
invalidOutputs.add(output);
|
184
|
-
}
|
185
|
-
}
|
186
|
-
if (invalidOutputs.size > 0) {
|
187
|
-
throw new InvalidOutputsError(outputs, invalidOutputs);
|
188
|
-
}
|
181
|
+
(0, native_1.validateOutputs)(outputs);
|
189
182
|
}
|
190
|
-
function transformLegacyOutputs(projectRoot,
|
191
|
-
|
192
|
-
|
183
|
+
function transformLegacyOutputs(projectRoot, outputs) {
|
184
|
+
const transformableOutputs = new Set((0, native_1.getTransformableOutputs)(outputs));
|
185
|
+
if (transformableOutputs.size === 0) {
|
186
|
+
return outputs;
|
187
|
+
}
|
188
|
+
return outputs.map((output) => {
|
189
|
+
if (!transformableOutputs.has(output)) {
|
193
190
|
return output;
|
194
191
|
}
|
195
192
|
let [isNegated, outputPath] = output.startsWith('!')
|