nx 19.6.4 → 19.6.5
Sign up to get free protection for your applications and to get access to all the features.
- package/package.json +12 -12
- package/release/changelog-renderer/index.d.ts +1 -1
- package/release/changelog-renderer/index.js +46 -11
- package/schemas/nx-schema.json +5 -0
- package/src/command-line/import/command-object.js +4 -0
- package/src/command-line/import/import.d.ts +4 -0
- package/src/command-line/import/import.js +147 -12
- package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
- package/src/command-line/import/utils/prepare-source-repo.js +31 -85
- package/src/command-line/release/changelog.js +52 -11
- package/src/command-line/release/command-object.d.ts +1 -0
- package/src/command-line/release/command-object.js +6 -1
- package/src/command-line/release/config/version-plans.d.ts +14 -1
- package/src/command-line/release/config/version-plans.js +33 -1
- package/src/command-line/release/plan-check.js +8 -61
- package/src/command-line/release/plan.js +131 -37
- package/src/command-line/release/release.js +1 -1
- package/src/command-line/release/utils/get-touched-projects-for-group.d.ts +7 -0
- package/src/command-line/release/utils/get-touched-projects-for-group.js +78 -0
- package/src/command-line/release/utils/git.d.ts +1 -1
- package/src/command-line/release/utils/git.js +45 -18
- package/src/command-line/release/version.js +1 -1
- package/src/daemon/server/sync-generators.d.ts +4 -0
- package/src/daemon/server/sync-generators.js +172 -52
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/project-graph/plugins/isolation/plugin-pool.js +1 -1
- package/src/utils/git-utils.d.ts +7 -10
- package/src/utils/git-utils.js +61 -44
- package/src/utils/sync-generators.d.ts +2 -2
- package/src/utils/squash.d.ts +0 -1
- package/src/utils/squash.js +0 -12
@@ -0,0 +1,78 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.createGetTouchedProjectsForGroup = createGetTouchedProjectsForGroup;
|
4
|
+
const workspace_projects_1 = require("../../../project-graph/affected/locators/workspace-projects");
|
5
|
+
const file_utils_1 = require("../../../project-graph/file-utils");
|
6
|
+
const ignore_1 = require("../../../utils/ignore");
|
7
|
+
const output_1 = require("../../../utils/output");
|
8
|
+
const config_1 = require("../config/config");
|
9
|
+
/**
|
10
|
+
* Create a function that returns the touched projects for a given release group. Only relevant when version plans are enabled.
|
11
|
+
*/
|
12
|
+
function createGetTouchedProjectsForGroup(nxArgs, projectGraph, changedFiles, fileData) {
|
13
|
+
/**
|
14
|
+
* Create a minimal subset of touched projects based on the configured ignore patterns, we only need
|
15
|
+
* to recompute when the ignorePatternsForPlanCheck differs between release groups.
|
16
|
+
*/
|
17
|
+
const serializedIgnorePatternsToTouchedProjects = new Map();
|
18
|
+
return async function getTouchedProjectsForGroup(releaseGroup,
|
19
|
+
// We don't access releaseGroups.projects directly, because we need to take the --projects filter into account
|
20
|
+
releaseGroupFilteredProjectNames, hasProjectsFilter) {
|
21
|
+
// The current release group doesn't leverage version plans
|
22
|
+
if (!releaseGroup.versionPlans) {
|
23
|
+
return [];
|
24
|
+
}
|
25
|
+
// Exclude patterns from .nxignore, .gitignore and explicit version plan config
|
26
|
+
let serializedIgnorePatterns = '[]';
|
27
|
+
const ignore = (0, ignore_1.getIgnoreObject)();
|
28
|
+
if (typeof releaseGroup.versionPlans !== 'boolean' &&
|
29
|
+
Array.isArray(releaseGroup.versionPlans.ignorePatternsForPlanCheck) &&
|
30
|
+
releaseGroup.versionPlans.ignorePatternsForPlanCheck.length) {
|
31
|
+
output_1.output.note({
|
32
|
+
title: `Applying configured ignore patterns to changed files${releaseGroup.name !== config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
|
33
|
+
? ` for release group "${releaseGroup.name}"`
|
34
|
+
: ''}`,
|
35
|
+
bodyLines: [
|
36
|
+
...releaseGroup.versionPlans.ignorePatternsForPlanCheck.map((pattern) => ` - ${pattern}`),
|
37
|
+
],
|
38
|
+
});
|
39
|
+
ignore.add(releaseGroup.versionPlans.ignorePatternsForPlanCheck);
|
40
|
+
serializedIgnorePatterns = JSON.stringify(releaseGroup.versionPlans.ignorePatternsForPlanCheck);
|
41
|
+
}
|
42
|
+
let touchedProjects = {};
|
43
|
+
if (serializedIgnorePatternsToTouchedProjects.has(serializedIgnorePatterns)) {
|
44
|
+
touchedProjects = serializedIgnorePatternsToTouchedProjects.get(serializedIgnorePatterns);
|
45
|
+
}
|
46
|
+
else {
|
47
|
+
// We only care about directly touched projects, not implicitly affected ones etc
|
48
|
+
const touchedProjectsArr = await (0, workspace_projects_1.getTouchedProjects)((0, file_utils_1.calculateFileChanges)(changedFiles, fileData, nxArgs, undefined, ignore), projectGraph.nodes);
|
49
|
+
touchedProjects = touchedProjectsArr.reduce((acc, project) => ({ ...acc, [project]: true }), {});
|
50
|
+
serializedIgnorePatternsToTouchedProjects.set(serializedIgnorePatterns, touchedProjects);
|
51
|
+
}
|
52
|
+
const touchedProjectsUnderReleaseGroup = releaseGroupFilteredProjectNames.filter((project) => touchedProjects[project]);
|
53
|
+
if (touchedProjectsUnderReleaseGroup.length) {
|
54
|
+
output_1.output.log({
|
55
|
+
title: `Touched projects${hasProjectsFilter ? ` (after --projects filter applied)` : ''} based on changed files${releaseGroup.name !== config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
|
56
|
+
? ` under release group "${releaseGroup.name}"`
|
57
|
+
: ''}`,
|
58
|
+
bodyLines: [
|
59
|
+
...touchedProjectsUnderReleaseGroup.map((project) => ` - ${project}`),
|
60
|
+
'',
|
61
|
+
'NOTE: You can adjust your "versionPlans.ignorePatternsForPlanCheck" config to stop certain files from resulting in projects being classed as touched for the purposes of this command.',
|
62
|
+
],
|
63
|
+
});
|
64
|
+
}
|
65
|
+
else {
|
66
|
+
output_1.output.log({
|
67
|
+
title: `No touched projects${hasProjectsFilter ? ` (after --projects filter applied)` : ''} found based on changed files${typeof releaseGroup.versionPlans !== 'boolean' &&
|
68
|
+
Array.isArray(releaseGroup.versionPlans.ignorePatternsForPlanCheck) &&
|
69
|
+
releaseGroup.versionPlans.ignorePatternsForPlanCheck.length
|
70
|
+
? ' combined with configured ignore patterns'
|
71
|
+
: ''}${releaseGroup.name !== config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
|
72
|
+
? ` under release group "${releaseGroup.name}"`
|
73
|
+
: ''}`,
|
74
|
+
});
|
75
|
+
}
|
76
|
+
return touchedProjectsUnderReleaseGroup;
|
77
|
+
};
|
78
|
+
}
|
@@ -62,6 +62,6 @@ export declare function parseConventionalCommitsMessage(message: string): {
|
|
62
62
|
description: string;
|
63
63
|
breaking: boolean;
|
64
64
|
} | null;
|
65
|
-
export declare function parseGitCommit(commit: RawGitCommit): GitCommit | null;
|
65
|
+
export declare function parseGitCommit(commit: RawGitCommit, isVersionPlanCommit?: boolean): GitCommit | null;
|
66
66
|
export declare function getCommitHash(ref: string): Promise<string>;
|
67
67
|
export declare function getFirstGitCommit(): Promise<string>;
|
@@ -281,6 +281,30 @@ function parseConventionalCommitsMessage(message) {
|
|
281
281
|
breaking: Boolean(match.groups.breaking),
|
282
282
|
};
|
283
283
|
}
|
284
|
+
function extractReferencesFromCommitMessage(message, shortHash) {
|
285
|
+
const references = [];
|
286
|
+
for (const m of message.matchAll(PullRequestRE)) {
|
287
|
+
references.push({ type: 'pull-request', value: m[1] });
|
288
|
+
}
|
289
|
+
for (const m of message.matchAll(IssueRE)) {
|
290
|
+
if (!references.some((i) => i.value === m[1])) {
|
291
|
+
references.push({ type: 'issue', value: m[1] });
|
292
|
+
}
|
293
|
+
}
|
294
|
+
references.push({ value: shortHash, type: 'hash' });
|
295
|
+
return references;
|
296
|
+
}
|
297
|
+
function getAllAuthorsForCommit(commit) {
|
298
|
+
const authors = [commit.author];
|
299
|
+
// Additional authors can be specified in the commit body (depending on the VCS provider)
|
300
|
+
for (const match of commit.body.matchAll(CoAuthoredByRegex)) {
|
301
|
+
authors.push({
|
302
|
+
name: (match.groups.name || '').trim(),
|
303
|
+
email: (match.groups.email || '').trim(),
|
304
|
+
});
|
305
|
+
}
|
306
|
+
return authors;
|
307
|
+
}
|
284
308
|
// https://www.conventionalcommits.org/en/v1.0.0/
|
285
309
|
// https://regex101.com/r/FSfNvA/1
|
286
310
|
const ConventionalCommitRegex = /(?<type>[a-z]+)(\((?<scope>.+)\))?(?<breaking>!)?: (?<description>.+)/i;
|
@@ -289,7 +313,25 @@ const PullRequestRE = /\([ a-z]*(#\d+)\s*\)/gm;
|
|
289
313
|
const IssueRE = /(#\d+)/gm;
|
290
314
|
const ChangedFileRegex = /(A|M|D|R\d*|C\d*)\t([^\t\n]*)\t?(.*)?/gm;
|
291
315
|
const RevertHashRE = /This reverts commit (?<hash>[\da-f]{40})./gm;
|
292
|
-
function parseGitCommit(commit) {
|
316
|
+
function parseGitCommit(commit, isVersionPlanCommit = false) {
|
317
|
+
// For version plans, we do not require conventional commits and therefore cannot extract data based on that format
|
318
|
+
if (isVersionPlanCommit) {
|
319
|
+
return {
|
320
|
+
...commit,
|
321
|
+
description: commit.message,
|
322
|
+
type: '',
|
323
|
+
scope: '',
|
324
|
+
references: extractReferencesFromCommitMessage(commit.message, commit.shortHash),
|
325
|
+
// The commit message is not the source of truth for a breaking (major) change in version plans, so the value is not relevant
|
326
|
+
// TODO(v20): Make the current GitCommit interface more clearly tied to conventional commits
|
327
|
+
isBreaking: false,
|
328
|
+
authors: getAllAuthorsForCommit(commit),
|
329
|
+
// Not applicable to version plans
|
330
|
+
affectedFiles: [],
|
331
|
+
// Not applicable, a version plan cannot have been added in a commit that also reverts another commit
|
332
|
+
revertedHashes: [],
|
333
|
+
};
|
334
|
+
}
|
293
335
|
const parsedMessage = parseConventionalCommitsMessage(commit.message);
|
294
336
|
if (!parsedMessage) {
|
295
337
|
return null;
|
@@ -298,16 +340,7 @@ function parseGitCommit(commit) {
|
|
298
340
|
const isBreaking = parsedMessage.breaking || commit.body.includes('BREAKING CHANGE:');
|
299
341
|
let description = parsedMessage.description;
|
300
342
|
// Extract references from message
|
301
|
-
const references =
|
302
|
-
for (const m of description.matchAll(PullRequestRE)) {
|
303
|
-
references.push({ type: 'pull-request', value: m[1] });
|
304
|
-
}
|
305
|
-
for (const m of description.matchAll(IssueRE)) {
|
306
|
-
if (!references.some((i) => i.value === m[1])) {
|
307
|
-
references.push({ type: 'issue', value: m[1] });
|
308
|
-
}
|
309
|
-
}
|
310
|
-
references.push({ value: commit.shortHash, type: 'hash' });
|
343
|
+
const references = extractReferencesFromCommitMessage(description, commit.shortHash);
|
311
344
|
// Remove references and normalize
|
312
345
|
description = description.replace(PullRequestRE, '').trim();
|
313
346
|
let type = parsedMessage.type;
|
@@ -322,13 +355,7 @@ function parseGitCommit(commit) {
|
|
322
355
|
description = commit.message;
|
323
356
|
}
|
324
357
|
// Find all authors
|
325
|
-
const authors =
|
326
|
-
for (const match of commit.body.matchAll(CoAuthoredByRegex)) {
|
327
|
-
authors.push({
|
328
|
-
name: (match.groups.name || '').trim(),
|
329
|
-
email: (match.groups.email || '').trim(),
|
330
|
-
});
|
331
|
-
}
|
358
|
+
const authors = getAllAuthorsForCommit(commit);
|
332
359
|
// Extract file changes from commit body
|
333
360
|
const affectedFiles = Array.from(commit.body.matchAll(ChangedFileRegex)).reduce((prev, [fullLine, changeType, file1, file2]) =>
|
334
361
|
// file2 only exists for some change types, such as renames
|
@@ -82,7 +82,7 @@ function createAPI(overrideReleaseConfig) {
|
|
82
82
|
}
|
83
83
|
if (!args.specifier) {
|
84
84
|
const rawVersionPlans = await (0, version_plans_1.readRawVersionPlans)();
|
85
|
-
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
|
85
|
+
await (0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes), args.verbose);
|
86
86
|
}
|
87
87
|
else {
|
88
88
|
if (args.verbose && releaseGroups.some((g) => !!g.versionPlans)) {
|
@@ -4,3 +4,7 @@ export declare function getCachedSyncGeneratorChanges(generators: string[]): Pro
|
|
4
4
|
export declare function flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
|
5
5
|
export declare function collectAndScheduleSyncGenerators(projectGraph: ProjectGraph): void;
|
6
6
|
export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
|
7
|
+
/**
|
8
|
+
* @internal
|
9
|
+
*/
|
10
|
+
export declare function _getConflictingGeneratorGroups(results: SyncGeneratorChangesResult[]): string[][];
|
@@ -4,6 +4,7 @@ exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
|
|
4
4
|
exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
|
5
5
|
exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
|
6
6
|
exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
|
7
|
+
exports._getConflictingGeneratorGroups = _getConflictingGeneratorGroups;
|
7
8
|
const nx_json_1 = require("../../config/nx-json");
|
8
9
|
const tree_1 = require("../../generators/tree");
|
9
10
|
const file_hasher_1 = require("../../hasher/file-hasher");
|
@@ -24,7 +25,6 @@ let storedNxJsonHash;
|
|
24
25
|
const log = (...messageParts) => {
|
25
26
|
logger_1.serverLogger.log('[SYNC]:', ...messageParts);
|
26
27
|
};
|
27
|
-
// TODO(leo): check conflicts and reuse the Tree where possible
|
28
28
|
async function getCachedSyncGeneratorChanges(generators) {
|
29
29
|
try {
|
30
30
|
log('get sync generators changes on demand', generators);
|
@@ -36,51 +36,15 @@ async function getCachedSyncGeneratorChanges(generators) {
|
|
36
36
|
}
|
37
37
|
// reset the wait time
|
38
38
|
waitPeriod = 100;
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
: null;
|
49
|
-
errored = error !== undefined;
|
50
|
-
return projects;
|
51
|
-
};
|
52
|
-
return (await Promise.all(generators.map(async (generator) => {
|
53
|
-
if (scheduledGenerators.has(generator) ||
|
54
|
-
!syncGeneratorsCacheResultPromises.has(generator)) {
|
55
|
-
// it's scheduled to run (there are pending changes to process) or
|
56
|
-
// it's not scheduled and there's no cached result, so run it
|
57
|
-
const projects = await getProjectsConfigurations();
|
58
|
-
if (projects) {
|
59
|
-
log(generator, 'already scheduled or not cached, running it now');
|
60
|
-
runGenerator(generator, projects);
|
61
|
-
}
|
62
|
-
else {
|
63
|
-
log(generator, 'already scheduled or not cached, project graph errored');
|
64
|
-
/**
|
65
|
-
* This should never happen. This is invoked imperatively, and by
|
66
|
-
* the time it is invoked, the project graph would have already
|
67
|
-
* been requested. If it errored, it would have been reported and
|
68
|
-
* this wouldn't have been invoked. We handle it just in case.
|
69
|
-
*
|
70
|
-
* Since the project graph would be reported by the relevant
|
71
|
-
* handlers separately, we just ignore the error, don't cache
|
72
|
-
* any result and return an empty result, the next time this is
|
73
|
-
* invoked the process will repeat until it eventually recovers
|
74
|
-
* when the project graph is fixed.
|
75
|
-
*/
|
76
|
-
return Promise.resolve({ changes: [], generatorName: generator });
|
77
|
-
}
|
78
|
-
}
|
79
|
-
else {
|
80
|
-
log(generator, 'not scheduled and has cached result, returning cached result');
|
81
|
-
}
|
82
|
-
return syncGeneratorsCacheResultPromises.get(generator);
|
83
|
-
}))).flat();
|
39
|
+
const results = await getFromCacheOrRunGenerators(generators);
|
40
|
+
const conflicts = _getConflictingGeneratorGroups(results);
|
41
|
+
if (!conflicts.length) {
|
42
|
+
// there are no conflicts
|
43
|
+
return results;
|
44
|
+
}
|
45
|
+
// there are conflicts, so we need to re-run the conflicting generators
|
46
|
+
// using the same tree
|
47
|
+
return await processConflictingGenerators(conflicts, results);
|
84
48
|
}
|
85
49
|
catch (e) {
|
86
50
|
console.error(e);
|
@@ -126,7 +90,7 @@ function collectAndScheduleSyncGenerators(projectGraph) {
|
|
126
90
|
}
|
127
91
|
const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
128
92
|
for (const generator of scheduledGenerators) {
|
129
|
-
runGenerator(generator, projects);
|
93
|
+
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
|
130
94
|
}
|
131
95
|
await Promise.all(syncGeneratorsCacheResultPromises.values());
|
132
96
|
}, waitPeriod);
|
@@ -143,6 +107,163 @@ async function getCachedRegisteredSyncGenerators() {
|
|
143
107
|
}
|
144
108
|
return [...registeredSyncGenerators];
|
145
109
|
}
|
110
|
+
async function getFromCacheOrRunGenerators(generators) {
|
111
|
+
let projects;
|
112
|
+
let errored = false;
|
113
|
+
const getProjectsConfigurations = async () => {
|
114
|
+
if (projects || errored) {
|
115
|
+
return projects;
|
116
|
+
}
|
117
|
+
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
118
|
+
projects = projectGraph
|
119
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
120
|
+
: null;
|
121
|
+
errored = error !== undefined;
|
122
|
+
return projects;
|
123
|
+
};
|
124
|
+
return (await Promise.all(generators.map(async (generator) => {
|
125
|
+
if (scheduledGenerators.has(generator) ||
|
126
|
+
!syncGeneratorsCacheResultPromises.has(generator)) {
|
127
|
+
// it's scheduled to run (there are pending changes to process) or
|
128
|
+
// it's not scheduled and there's no cached result, so run it
|
129
|
+
const projects = await getProjectsConfigurations();
|
130
|
+
if (projects) {
|
131
|
+
log(generator, 'already scheduled or not cached, running it now');
|
132
|
+
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
|
133
|
+
}
|
134
|
+
else {
|
135
|
+
log(generator, 'already scheduled or not cached, project graph errored');
|
136
|
+
/**
|
137
|
+
* This should never happen. This is invoked imperatively, and by
|
138
|
+
* the time it is invoked, the project graph would have already
|
139
|
+
* been requested. If it errored, it would have been reported and
|
140
|
+
* this wouldn't have been invoked. We handle it just in case.
|
141
|
+
*
|
142
|
+
* Since the project graph would be reported by the relevant
|
143
|
+
* handlers separately, we just ignore the error, don't cache
|
144
|
+
* any result and return an empty result, the next time this is
|
145
|
+
* invoked the process will repeat until it eventually recovers
|
146
|
+
* when the project graph is fixed.
|
147
|
+
*/
|
148
|
+
return Promise.resolve({ changes: [], generatorName: generator });
|
149
|
+
}
|
150
|
+
}
|
151
|
+
else {
|
152
|
+
log(generator, 'not scheduled and has cached result, returning cached result');
|
153
|
+
}
|
154
|
+
return syncGeneratorsCacheResultPromises.get(generator);
|
155
|
+
}))).flat();
|
156
|
+
}
|
157
|
+
async function runConflictingGenerators(tree, generators) {
|
158
|
+
const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
159
|
+
const projects = projectGraph
|
160
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
161
|
+
: null;
|
162
|
+
if (!projects) {
|
163
|
+
/**
|
164
|
+
* This should never happen. This is invoked imperatively, and by
|
165
|
+
* the time it is invoked, the project graph would have already
|
166
|
+
* been requested. If it errored, it would have been reported and
|
167
|
+
* this wouldn't have been invoked. We handle it just in case.
|
168
|
+
*
|
169
|
+
* Since the project graph would be reported by the relevant
|
170
|
+
* handlers separately, we just ignore the error.
|
171
|
+
*/
|
172
|
+
return generators.map((generator) => ({
|
173
|
+
changes: [],
|
174
|
+
generatorName: generator,
|
175
|
+
}));
|
176
|
+
}
|
177
|
+
// we need to run conflicting generators sequentially because they use the same tree
|
178
|
+
const results = [];
|
179
|
+
for (const generator of generators) {
|
180
|
+
log(generator, 'running it now');
|
181
|
+
results.push(await runGenerator(generator, projects, tree));
|
182
|
+
}
|
183
|
+
return results;
|
184
|
+
}
|
185
|
+
async function processConflictingGenerators(conflicts, initialResults) {
|
186
|
+
const conflictRunResults = (await Promise.all(conflicts.map((generators) => {
|
187
|
+
const [firstGenerator, ...generatorsToRun] = generators;
|
188
|
+
// it must exists because the conflicts were identified from the initial results
|
189
|
+
const firstGeneratorResult = initialResults.find((r) => r.generatorName === firstGenerator);
|
190
|
+
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generators ${generators.join(',')}`);
|
191
|
+
// pre-apply the changes from the first generator to avoid running it
|
192
|
+
for (const change of firstGeneratorResult.changes) {
|
193
|
+
if (change.type === 'CREATE' || change.type === 'UPDATE') {
|
194
|
+
tree.write(change.path, change.content, change.options);
|
195
|
+
}
|
196
|
+
else if (change.type === 'DELETE') {
|
197
|
+
tree.delete(change.path);
|
198
|
+
}
|
199
|
+
}
|
200
|
+
/**
|
201
|
+
* We don't cache the results of conflicting generators because they
|
202
|
+
* use the same tree, so some files might contain results from multiple
|
203
|
+
* generators and we don't have guarantees that the same combination of
|
204
|
+
* generators will run together.
|
205
|
+
*/
|
206
|
+
return runConflictingGenerators(tree, generatorsToRun);
|
207
|
+
}))).flat();
|
208
|
+
/**
|
209
|
+
* The order of the results from the re-run generators is important because
|
210
|
+
* the last result from a group of conflicting generators will contain the
|
211
|
+
* changes from the previous conflicting generators. So, instead of replacing
|
212
|
+
* in-place the initial results, we first add the results from the re-run
|
213
|
+
* generators, and then add the initial results that were not from a
|
214
|
+
* conflicting generator.
|
215
|
+
*/
|
216
|
+
const results = [...conflictRunResults];
|
217
|
+
for (const result of initialResults) {
|
218
|
+
if (conflictRunResults.every((r) => r.generatorName !== result.generatorName)) {
|
219
|
+
// this result is not from a conflicting generator, so we add it to the
|
220
|
+
// results
|
221
|
+
results.push(result);
|
222
|
+
}
|
223
|
+
}
|
224
|
+
return results;
|
225
|
+
}
|
226
|
+
/**
|
227
|
+
* @internal
|
228
|
+
*/
|
229
|
+
function _getConflictingGeneratorGroups(results) {
|
230
|
+
const changedFileToGeneratorMap = new Map();
|
231
|
+
for (const result of results) {
|
232
|
+
for (const change of result.changes) {
|
233
|
+
if (!changedFileToGeneratorMap.has(change.path)) {
|
234
|
+
changedFileToGeneratorMap.set(change.path, new Set());
|
235
|
+
}
|
236
|
+
changedFileToGeneratorMap.get(change.path).add(result.generatorName);
|
237
|
+
}
|
238
|
+
}
|
239
|
+
const conflicts = [];
|
240
|
+
for (const generatorSet of changedFileToGeneratorMap.values()) {
|
241
|
+
if (generatorSet.size === 1) {
|
242
|
+
// no conflicts
|
243
|
+
continue;
|
244
|
+
}
|
245
|
+
if (conflicts.length === 0) {
|
246
|
+
// there are no conflicts yet, so we just add the first group
|
247
|
+
conflicts.push(new Set(generatorSet));
|
248
|
+
continue;
|
249
|
+
}
|
250
|
+
// identify if any of the current generator sets intersect with any of the
|
251
|
+
// existing conflict groups
|
252
|
+
const generatorsArray = Array.from(generatorSet);
|
253
|
+
const existingConflictGroup = conflicts.find((group) => generatorsArray.some((generator) => group.has(generator)));
|
254
|
+
if (existingConflictGroup) {
|
255
|
+
// there's an intersecting group, so we merge the two
|
256
|
+
for (const generator of generatorsArray) {
|
257
|
+
existingConflictGroup.add(generator);
|
258
|
+
}
|
259
|
+
}
|
260
|
+
else {
|
261
|
+
// there's no intersecting group, so we create a new one
|
262
|
+
conflicts.push(new Set(generatorsArray));
|
263
|
+
}
|
264
|
+
}
|
265
|
+
return conflicts.map((group) => Array.from(group));
|
266
|
+
}
|
146
267
|
function collectAllRegisteredSyncGenerators(projectGraph) {
|
147
268
|
const projectGraphHash = hashProjectGraph(projectGraph);
|
148
269
|
if (storedProjectGraphHash !== projectGraphHash) {
|
@@ -183,16 +304,15 @@ function collectAllRegisteredSyncGenerators(projectGraph) {
|
|
183
304
|
}
|
184
305
|
}
|
185
306
|
}
|
186
|
-
function runGenerator(generator, projects) {
|
307
|
+
function runGenerator(generator, projects, tree) {
|
187
308
|
log('running scheduled generator', generator);
|
188
309
|
// remove it from the scheduled set
|
189
310
|
scheduledGenerators.delete(generator);
|
190
|
-
|
191
|
-
|
192
|
-
syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
311
|
+
tree ??= new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
|
312
|
+
return (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
193
313
|
log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
|
194
314
|
return result;
|
195
|
-
})
|
315
|
+
});
|
196
316
|
}
|
197
317
|
function hashProjectGraph(projectGraph) {
|
198
318
|
const stringifiedProjects = Object.entries(projectGraph.nodes)
|
Binary file
|
@@ -273,7 +273,7 @@ async function startPluginWorker() {
|
|
273
273
|
socket,
|
274
274
|
});
|
275
275
|
}
|
276
|
-
else if (attempts >
|
276
|
+
else if (attempts > 10000) {
|
277
277
|
// daemon fails to start, the process probably exited
|
278
278
|
// we print the logs and exit the client
|
279
279
|
reject('Failed to start plugin worker.');
|
package/src/utils/git-utils.d.ts
CHANGED
@@ -1,6 +1,6 @@
|
|
1
|
-
|
2
|
-
export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
|
1
|
+
export declare function cloneFromUpstream(url: string, destination: string, { originName, depth }?: {
|
3
2
|
originName: string;
|
3
|
+
depth?: number;
|
4
4
|
}): Promise<GitRepository>;
|
5
5
|
export declare class GitRepository {
|
6
6
|
private directory;
|
@@ -8,12 +8,10 @@ export declare class GitRepository {
|
|
8
8
|
constructor(directory: string);
|
9
9
|
getGitRootPath(cwd: string): string;
|
10
10
|
addFetchRemote(remoteName: string, branch: string): Promise<string>;
|
11
|
-
private execAsync;
|
12
11
|
showStat(): Promise<string>;
|
13
12
|
listBranches(): Promise<string[]>;
|
14
13
|
getGitFiles(path: string): Promise<string[]>;
|
15
14
|
reset(ref: string): Promise<string>;
|
16
|
-
squashLastTwoCommits(): Promise<string>;
|
17
15
|
mergeUnrelatedHistories(ref: string, message: string): Promise<string>;
|
18
16
|
fetch(remote: string, ref?: string): Promise<string>;
|
19
17
|
checkout(branch: string, opts: {
|
@@ -25,14 +23,13 @@ export declare class GitRepository {
|
|
25
23
|
commit(message: string): Promise<string>;
|
26
24
|
amendCommit(): Promise<string>;
|
27
25
|
deleteGitRemote(name: string): Promise<string>;
|
28
|
-
deleteBranch(branch: string): Promise<string>;
|
29
26
|
addGitRemote(name: string, url: string): Promise<string>;
|
27
|
+
hasFilterRepoInstalled(): Promise<boolean>;
|
28
|
+
filterRepo(subdirectory: string): Promise<string>;
|
29
|
+
filterBranch(subdirectory: string, branchName: string): Promise<string>;
|
30
|
+
private execAsync;
|
31
|
+
private quotePath;
|
30
32
|
}
|
31
|
-
/**
|
32
|
-
* This is used by the squash editor script to update the rebase file.
|
33
|
-
*/
|
34
|
-
export declare function updateRebaseFile(contents: string): string;
|
35
|
-
export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
|
36
33
|
/**
|
37
34
|
* This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.
|
38
35
|
*/
|