nx 19.6.4 → 19.6.5
Sign up to get free protection for your applications and to get access to all the features.
- package/package.json +12 -12
- package/release/changelog-renderer/index.d.ts +1 -1
- package/release/changelog-renderer/index.js +46 -11
- package/schemas/nx-schema.json +5 -0
- package/src/command-line/import/command-object.js +4 -0
- package/src/command-line/import/import.d.ts +4 -0
- package/src/command-line/import/import.js +147 -12
- package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
- package/src/command-line/import/utils/prepare-source-repo.js +31 -85
- package/src/command-line/release/changelog.js +52 -11
- package/src/command-line/release/command-object.d.ts +1 -0
- package/src/command-line/release/command-object.js +6 -1
- package/src/command-line/release/config/version-plans.d.ts +14 -1
- package/src/command-line/release/config/version-plans.js +33 -1
- package/src/command-line/release/plan-check.js +8 -61
- package/src/command-line/release/plan.js +131 -37
- package/src/command-line/release/release.js +1 -1
- package/src/command-line/release/utils/get-touched-projects-for-group.d.ts +7 -0
- package/src/command-line/release/utils/get-touched-projects-for-group.js +78 -0
- package/src/command-line/release/utils/git.d.ts +1 -1
- package/src/command-line/release/utils/git.js +45 -18
- package/src/command-line/release/version.js +1 -1
- package/src/daemon/server/sync-generators.d.ts +4 -0
- package/src/daemon/server/sync-generators.js +172 -52
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/project-graph/plugins/isolation/plugin-pool.js +1 -1
- package/src/utils/git-utils.d.ts +7 -10
- package/src/utils/git-utils.js +61 -44
- package/src/utils/sync-generators.d.ts +2 -2
- package/src/utils/squash.d.ts +0 -1
- package/src/utils/squash.js +0 -12
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "19.6.
|
3
|
+
"version": "19.6.5",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -71,7 +71,7 @@
|
|
71
71
|
"yargs-parser": "21.1.1",
|
72
72
|
"node-machine-id": "1.1.12",
|
73
73
|
"ora": "5.3.0",
|
74
|
-
"@nrwl/tao": "19.6.
|
74
|
+
"@nrwl/tao": "19.6.5"
|
75
75
|
},
|
76
76
|
"peerDependencies": {
|
77
77
|
"@swc-node/register": "^1.8.0",
|
@@ -86,16 +86,16 @@
|
|
86
86
|
}
|
87
87
|
},
|
88
88
|
"optionalDependencies": {
|
89
|
-
"@nx/nx-darwin-x64": "19.6.
|
90
|
-
"@nx/nx-darwin-arm64": "19.6.
|
91
|
-
"@nx/nx-linux-x64-gnu": "19.6.
|
92
|
-
"@nx/nx-linux-x64-musl": "19.6.
|
93
|
-
"@nx/nx-win32-x64-msvc": "19.6.
|
94
|
-
"@nx/nx-linux-arm64-gnu": "19.6.
|
95
|
-
"@nx/nx-linux-arm64-musl": "19.6.
|
96
|
-
"@nx/nx-linux-arm-gnueabihf": "19.6.
|
97
|
-
"@nx/nx-win32-arm64-msvc": "19.6.
|
98
|
-
"@nx/nx-freebsd-x64": "19.6.
|
89
|
+
"@nx/nx-darwin-x64": "19.6.5",
|
90
|
+
"@nx/nx-darwin-arm64": "19.6.5",
|
91
|
+
"@nx/nx-linux-x64-gnu": "19.6.5",
|
92
|
+
"@nx/nx-linux-x64-musl": "19.6.5",
|
93
|
+
"@nx/nx-win32-x64-msvc": "19.6.5",
|
94
|
+
"@nx/nx-linux-arm64-gnu": "19.6.5",
|
95
|
+
"@nx/nx-linux-arm64-musl": "19.6.5",
|
96
|
+
"@nx/nx-linux-arm-gnueabihf": "19.6.5",
|
97
|
+
"@nx/nx-win32-arm64-msvc": "19.6.5",
|
98
|
+
"@nx/nx-freebsd-x64": "19.6.5"
|
99
99
|
},
|
100
100
|
"nx-migrations": {
|
101
101
|
"migrations": "./migrations.json",
|
@@ -41,7 +41,7 @@ export type ChangelogRenderer = (config: {
|
|
41
41
|
changelogRenderOptions: DefaultChangelogRenderOptions;
|
42
42
|
dependencyBumps?: DependencyBump[];
|
43
43
|
repoSlug?: RepoSlug;
|
44
|
-
conventionalCommitsConfig: NxReleaseConfig['conventionalCommits'];
|
44
|
+
conventionalCommitsConfig: NxReleaseConfig['conventionalCommits'] | null;
|
45
45
|
}) => Promise<string> | string;
|
46
46
|
/**
|
47
47
|
* The specific options available to the default implementation of the ChangelogRenderer that nx exports
|
@@ -1,6 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
const semver_1 = require("semver");
|
4
|
+
const conventional_commits_1 = require("../../src/command-line/release/config/conventional-commits");
|
4
5
|
const github_1 = require("../../src/command-line/release/utils/github");
|
5
6
|
// axios types and values don't seem to match
|
6
7
|
const _axios = require("axios");
|
@@ -10,9 +11,7 @@ const axios = _axios;
|
|
10
11
|
* from the given commits and other metadata.
|
11
12
|
*/
|
12
13
|
const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion, project, entryWhenNoChanges, changelogRenderOptions, dependencyBumps, repoSlug, conventionalCommitsConfig, }) => {
|
13
|
-
const changeTypes = conventionalCommitsConfig.types;
|
14
14
|
const markdownLines = [];
|
15
|
-
const breakingChanges = [];
|
16
15
|
// If the current range of changes contains both a commit and its revert, we strip them both from the final list. Changes from version plans are unaffected, as they have no hashes.
|
17
16
|
for (const change of changes) {
|
18
17
|
if (change.type === 'revert' && change.revertedHashes) {
|
@@ -26,10 +25,38 @@ const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion,
|
|
26
25
|
}
|
27
26
|
}
|
28
27
|
let relevantChanges = changes;
|
28
|
+
const breakingChanges = [];
|
29
|
+
// For now to keep the interface of the changelog renderer non-breaking for v19 releases we have a somewhat indirect check for whether or not we are generating a changelog for version plans
|
30
|
+
const isVersionPlans = !conventionalCommitsConfig;
|
31
|
+
// Only applicable for version plans
|
32
|
+
const additionalChangesForAuthorsSection = [];
|
33
|
+
// Provide a default configuration for version plans to allow most of the subsequent logic to work in the same way it would for conventional commits
|
34
|
+
// NOTE: The one exception is breaking/major changes, where we do not follow the same structure and instead only show the changes once
|
35
|
+
if (isVersionPlans) {
|
36
|
+
conventionalCommitsConfig = {
|
37
|
+
types: {
|
38
|
+
feat: conventional_commits_1.DEFAULT_CONVENTIONAL_COMMITS_CONFIG.types.feat,
|
39
|
+
fix: conventional_commits_1.DEFAULT_CONVENTIONAL_COMMITS_CONFIG.types.fix,
|
40
|
+
},
|
41
|
+
};
|
42
|
+
// Trim down "relevant changes" to only include non-breaking ones so that we can render them differently under version plans,
|
43
|
+
// but keep track of the changes for the purposes of the authors section
|
44
|
+
// TODO(v20): Clean this abstraction up as part of the larger overall refactor of changelog rendering
|
45
|
+
for (let i = 0; i < relevantChanges.length; i++) {
|
46
|
+
if (relevantChanges[i].isBreaking) {
|
47
|
+
const change = relevantChanges[i];
|
48
|
+
additionalChangesForAuthorsSection.push(change);
|
49
|
+
const line = formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug);
|
50
|
+
breakingChanges.push(line);
|
51
|
+
relevantChanges.splice(i, 1);
|
52
|
+
}
|
53
|
+
}
|
54
|
+
}
|
55
|
+
const changeTypes = conventionalCommitsConfig.types;
|
29
56
|
// workspace root level changelog
|
30
57
|
if (project === null) {
|
31
58
|
// No changes for the workspace
|
32
|
-
if (relevantChanges.length === 0) {
|
59
|
+
if (relevantChanges.length === 0 && breakingChanges.length === 0) {
|
33
60
|
if (dependencyBumps?.length) {
|
34
61
|
applyAdditionalDependencyBumps({
|
35
62
|
markdownLines,
|
@@ -62,7 +89,7 @@ const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion,
|
|
62
89
|
for (const scope of scopesSortedAlphabetically) {
|
63
90
|
const changes = changesGroupedByScope[scope];
|
64
91
|
for (const change of changes) {
|
65
|
-
const line = formatChange(change, changelogRenderOptions, repoSlug);
|
92
|
+
const line = formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug);
|
66
93
|
markdownLines.push(line);
|
67
94
|
if (change.isBreaking) {
|
68
95
|
const breakingChangeExplanation = extractBreakingChangeExplanation(change.body);
|
@@ -79,7 +106,7 @@ const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion,
|
|
79
106
|
relevantChanges = relevantChanges.filter((c) => c.affectedProjects &&
|
80
107
|
(c.affectedProjects === '*' || c.affectedProjects.includes(project)));
|
81
108
|
// Generating for a named project, but that project has no relevant changes in the current set of commits, exit early
|
82
|
-
if (relevantChanges.length === 0) {
|
109
|
+
if (relevantChanges.length === 0 && breakingChanges.length === 0) {
|
83
110
|
if (dependencyBumps?.length) {
|
84
111
|
applyAdditionalDependencyBumps({
|
85
112
|
markdownLines,
|
@@ -105,7 +132,7 @@ const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion,
|
|
105
132
|
markdownLines.push('', `### ${changeTypes[type].changelog.title}`, '');
|
106
133
|
const changesInChronologicalOrder = group.reverse();
|
107
134
|
for (const change of changesInChronologicalOrder) {
|
108
|
-
const line = formatChange(change, changelogRenderOptions, repoSlug);
|
135
|
+
const line = formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug);
|
109
136
|
markdownLines.push(line + '\n');
|
110
137
|
if (change.isBreaking) {
|
111
138
|
const breakingChangeExplanation = extractBreakingChangeExplanation(change.body);
|
@@ -117,7 +144,7 @@ const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion,
|
|
117
144
|
}
|
118
145
|
}
|
119
146
|
if (breakingChanges.length > 0) {
|
120
|
-
markdownLines.push('', '
|
147
|
+
markdownLines.push('', '### ⚠️ Breaking Changes', '', ...breakingChanges);
|
121
148
|
}
|
122
149
|
if (dependencyBumps?.length) {
|
123
150
|
applyAdditionalDependencyBumps({
|
@@ -129,7 +156,10 @@ const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion,
|
|
129
156
|
}
|
130
157
|
if (changelogRenderOptions.authors) {
|
131
158
|
const _authors = new Map();
|
132
|
-
for (const change of
|
159
|
+
for (const change of [
|
160
|
+
...relevantChanges,
|
161
|
+
...additionalChangesForAuthorsSection,
|
162
|
+
]) {
|
133
163
|
if (!change.author) {
|
134
164
|
continue;
|
135
165
|
}
|
@@ -215,7 +245,7 @@ function groupBy(items, key) {
|
|
215
245
|
}
|
216
246
|
return groups;
|
217
247
|
}
|
218
|
-
function formatChange(change, changelogRenderOptions, repoSlug) {
|
248
|
+
function formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug) {
|
219
249
|
let description = change.description;
|
220
250
|
let extraLines = [];
|
221
251
|
let extraLinesStr = '';
|
@@ -228,9 +258,14 @@ function formatChange(change, changelogRenderOptions, repoSlug) {
|
|
228
258
|
.map((l) => `${indentation}${l}`)
|
229
259
|
.join('\n');
|
230
260
|
}
|
261
|
+
/**
|
262
|
+
* In version plans changelogs:
|
263
|
+
* - don't repeat the breaking change icon
|
264
|
+
* - don't render the scope
|
265
|
+
*/
|
231
266
|
let changeLine = '- ' +
|
232
|
-
(change.isBreaking ? '⚠️ ' : '') +
|
233
|
-
(change.scope ? `**${change.scope.trim()}:** ` : '') +
|
267
|
+
(!isVersionPlans && change.isBreaking ? '⚠️ ' : '') +
|
268
|
+
(!isVersionPlans && change.scope ? `**${change.scope.trim()}:** ` : '') +
|
234
269
|
description;
|
235
270
|
if (repoSlug && changelogRenderOptions.commitReferences) {
|
236
271
|
changeLine += (0, github_1.formatReferences)(change.githubReferences, repoSlug);
|
package/schemas/nx-schema.json
CHANGED
@@ -23,6 +23,10 @@ exports.yargsImportCommand = {
|
|
23
23
|
.option('ref', {
|
24
24
|
type: 'string',
|
25
25
|
description: 'The branch from the source repository to import',
|
26
|
+
})
|
27
|
+
.option('depth', {
|
28
|
+
type: 'number',
|
29
|
+
description: 'The depth to clone the source repository (limit this for faster git clone)',
|
26
30
|
})
|
27
31
|
.option('interactive', {
|
28
32
|
type: 'boolean',
|
@@ -15,6 +15,10 @@ export interface ImportOptions {
|
|
15
15
|
* The directory in the destination repo to import into
|
16
16
|
*/
|
17
17
|
destination: string;
|
18
|
+
/**
|
19
|
+
* The depth to clone the source repository (limit this for faster clone times)
|
20
|
+
*/
|
21
|
+
depth: number;
|
18
22
|
verbose: boolean;
|
19
23
|
interactive: boolean;
|
20
24
|
}
|
@@ -2,6 +2,10 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.importHandler = importHandler;
|
4
4
|
const path_1 = require("path");
|
5
|
+
const minimatch_1 = require("minimatch");
|
6
|
+
const node_fs_1 = require("node:fs");
|
7
|
+
const chalk = require("chalk");
|
8
|
+
const js_yaml_1 = require("@zkochan/js-yaml");
|
5
9
|
const git_utils_1 = require("../../utils/git-utils");
|
6
10
|
const promises_1 = require("node:fs/promises");
|
7
11
|
const tmp_1 = require("tmp");
|
@@ -18,6 +22,7 @@ const command_line_utils_1 = require("../../utils/command-line-utils");
|
|
18
22
|
const prepare_source_repo_1 = require("./utils/prepare-source-repo");
|
19
23
|
const merge_remote_source_1 = require("./utils/merge-remote-source");
|
20
24
|
const needs_install_1 = require("./utils/needs-install");
|
25
|
+
const file_utils_1 = require("../../project-graph/file-utils");
|
21
26
|
const importRemoteName = '__tmp_nx_import__';
|
22
27
|
async function importHandler(options) {
|
23
28
|
let { sourceRemoteUrl, ref, source, destination } = options;
|
@@ -54,7 +59,7 @@ async function importHandler(options) {
|
|
54
59
|
// It's a remote url
|
55
60
|
}
|
56
61
|
const sourceRepoPath = (0, path_1.join)(tempImportDirectory, 'repo');
|
57
|
-
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath}`).start();
|
62
|
+
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath} (Use --depth to limit commit history and speed up clone times)`).start();
|
58
63
|
try {
|
59
64
|
await (0, promises_1.rm)(tempImportDirectory, { recursive: true });
|
60
65
|
}
|
@@ -64,6 +69,7 @@ async function importHandler(options) {
|
|
64
69
|
try {
|
65
70
|
sourceGitClient = await (0, git_utils_1.cloneFromUpstream)(sourceRemoteUrl, sourceRepoPath, {
|
66
71
|
originName: importRemoteName,
|
72
|
+
depth: options.depth,
|
67
73
|
});
|
68
74
|
}
|
69
75
|
catch (e) {
|
@@ -72,6 +78,8 @@ async function importHandler(options) {
|
|
72
78
|
throw new Error(errorMessage);
|
73
79
|
}
|
74
80
|
spinner.succeed(`Cloned into ${sourceRepoPath}`);
|
81
|
+
// Detecting the package manager before preparing the source repo for import.
|
82
|
+
const sourcePackageManager = (0, package_manager_1.detectPackageManager)(sourceGitClient.root);
|
75
83
|
if (!ref) {
|
76
84
|
const branchChoices = await sourceGitClient.listBranches();
|
77
85
|
ref = (await (0, enquirer_1.prompt)([
|
@@ -104,24 +112,34 @@ async function importHandler(options) {
|
|
104
112
|
name: 'destination',
|
105
113
|
message: 'Where in this workspace should the code be imported into?',
|
106
114
|
required: true,
|
115
|
+
initial: source ? source : undefined,
|
107
116
|
},
|
108
117
|
])).destination;
|
109
118
|
}
|
110
119
|
const absSource = (0, path_1.join)(sourceRepoPath, source);
|
111
120
|
const absDestination = (0, path_1.join)(process.cwd(), destination);
|
121
|
+
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
|
122
|
+
await assertDestinationEmpty(destinationGitClient, absDestination);
|
123
|
+
const tempImportBranch = getTempImportBranch(ref);
|
124
|
+
await sourceGitClient.addFetchRemote(importRemoteName, ref);
|
125
|
+
await sourceGitClient.fetch(importRemoteName, ref);
|
126
|
+
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
|
127
|
+
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
|
128
|
+
await sourceGitClient.checkout(tempImportBranch, {
|
129
|
+
new: true,
|
130
|
+
base: `${importRemoteName}/${ref}`,
|
131
|
+
});
|
132
|
+
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
|
112
133
|
try {
|
113
134
|
await (0, promises_1.stat)(absSource);
|
114
135
|
}
|
115
136
|
catch (e) {
|
116
137
|
throw new Error(`The source directory ${source} does not exist in ${sourceRemoteUrl}. Please double check to make sure it exists.`);
|
117
138
|
}
|
118
|
-
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
|
119
|
-
await assertDestinationEmpty(destinationGitClient, absDestination);
|
120
|
-
const tempImportBranch = getTempImportBranch(ref);
|
121
139
|
const packageManager = (0, package_manager_1.detectPackageManager)(workspace_root_1.workspaceRoot);
|
122
140
|
const originalPackageWorkspaces = await (0, needs_install_1.getPackagesInPackageManagerWorkspace)(packageManager);
|
123
141
|
const relativeDestination = (0, path_1.relative)(destinationGitClient.root, absDestination);
|
124
|
-
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl
|
142
|
+
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl);
|
125
143
|
await createTemporaryRemote(destinationGitClient, (0, path_1.join)(sourceRepoPath, '.git'), importRemoteName);
|
126
144
|
await (0, merge_remote_source_1.mergeRemoteSource)(destinationGitClient, sourceRemoteUrl, tempImportBranch, destination, importRemoteName, ref);
|
127
145
|
spinner.start('Cleaning up temporary files and remotes');
|
@@ -132,19 +150,69 @@ async function importHandler(options) {
|
|
132
150
|
const nxJson = (0, nx_json_1.readNxJson)(workspace_root_1.workspaceRoot);
|
133
151
|
(0, workspace_context_1.resetWorkspaceContext)();
|
134
152
|
const { plugins, updatePackageScripts } = await (0, init_v2_1.detectPlugins)(nxJson, options.interactive);
|
153
|
+
if (packageManager !== sourcePackageManager) {
|
154
|
+
output_1.output.warn({
|
155
|
+
title: `Mismatched package managers`,
|
156
|
+
bodyLines: [
|
157
|
+
`The source repository is using a different package manager (${sourcePackageManager}) than this workspace (${packageManager}).`,
|
158
|
+
`This could lead to install issues due to discrepancies in "package.json" features.`,
|
159
|
+
],
|
160
|
+
});
|
161
|
+
}
|
162
|
+
// If install fails, we should continue since the errors could be resolved later.
|
163
|
+
let installFailed = false;
|
135
164
|
if (plugins.length > 0) {
|
136
|
-
|
137
|
-
|
138
|
-
|
165
|
+
try {
|
166
|
+
output_1.output.log({ title: 'Installing Plugins' });
|
167
|
+
(0, init_v2_1.installPlugins)(workspace_root_1.workspaceRoot, plugins, pmc, updatePackageScripts);
|
168
|
+
await destinationGitClient.amendCommit();
|
169
|
+
}
|
170
|
+
catch (e) {
|
171
|
+
installFailed = true;
|
172
|
+
output_1.output.error({
|
173
|
+
title: `Install failed: ${e.message || 'Unknown error'}`,
|
174
|
+
bodyLines: [e.stack],
|
175
|
+
});
|
176
|
+
}
|
139
177
|
}
|
140
178
|
else if (await (0, needs_install_1.needsInstall)(packageManager, originalPackageWorkspaces)) {
|
179
|
+
try {
|
180
|
+
output_1.output.log({
|
181
|
+
title: 'Installing dependencies for imported code',
|
182
|
+
});
|
183
|
+
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
|
184
|
+
await destinationGitClient.amendCommit();
|
185
|
+
}
|
186
|
+
catch (e) {
|
187
|
+
installFailed = true;
|
188
|
+
output_1.output.error({
|
189
|
+
title: `Install failed: ${e.message || 'Unknown error'}`,
|
190
|
+
bodyLines: [e.stack],
|
191
|
+
});
|
192
|
+
}
|
193
|
+
}
|
194
|
+
console.log(await destinationGitClient.showStat());
|
195
|
+
if (installFailed) {
|
196
|
+
const pmc = (0, package_manager_1.getPackageManagerCommand)(packageManager);
|
197
|
+
output_1.output.warn({
|
198
|
+
title: `The import was successful, but the install failed`,
|
199
|
+
bodyLines: [
|
200
|
+
`You may need to run "${pmc.install}" manually to resolve the issue. The error is logged above.`,
|
201
|
+
],
|
202
|
+
});
|
203
|
+
}
|
204
|
+
await warnOnMissingWorkspacesEntry(packageManager, pmc, relativeDestination);
|
205
|
+
// When only a subdirectory is imported, there might be devDependencies in the root package.json file
|
206
|
+
// that needs to be ported over as well.
|
207
|
+
if (ref) {
|
141
208
|
output_1.output.log({
|
142
|
-
title:
|
209
|
+
title: `Check root dependencies`,
|
210
|
+
bodyLines: [
|
211
|
+
`"dependencies" and "devDependencies" are not imported from the source repository (${sourceRemoteUrl}).`,
|
212
|
+
`You may need to add some of those dependencies to this workspace in order to run tasks successfully.`,
|
213
|
+
],
|
143
214
|
});
|
144
|
-
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
|
145
|
-
await destinationGitClient.amendCommit();
|
146
215
|
}
|
147
|
-
console.log(await destinationGitClient.showStat());
|
148
216
|
output_1.output.log({
|
149
217
|
title: `Merging these changes into ${(0, command_line_utils_1.getBaseRef)(nxJson)}`,
|
150
218
|
bodyLines: [
|
@@ -171,3 +239,70 @@ async function createTemporaryRemote(destinationGitClient, sourceRemoteUrl, remo
|
|
171
239
|
await destinationGitClient.addGitRemote(remoteName, sourceRemoteUrl);
|
172
240
|
await destinationGitClient.fetch(remoteName);
|
173
241
|
}
|
242
|
+
// If the user imports a project that isn't in NPM/Yarn/PNPM workspaces, then its dependencies
|
243
|
+
// will not be installed. We should warn users and provide instructions on how to fix this.
|
244
|
+
async function warnOnMissingWorkspacesEntry(pm, pmc, pkgPath) {
|
245
|
+
if (!(0, package_manager_1.isWorkspacesEnabled)(pm, workspace_root_1.workspaceRoot)) {
|
246
|
+
output_1.output.warn({
|
247
|
+
title: `Missing workspaces in package.json`,
|
248
|
+
bodyLines: pm === 'npm'
|
249
|
+
? [
|
250
|
+
`We recommend enabling NPM workspaces to install dependencies for the imported project.`,
|
251
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
252
|
+
`See: https://docs.npmjs.com/cli/using-npm/workspaces`,
|
253
|
+
]
|
254
|
+
: pm === 'yarn'
|
255
|
+
? [
|
256
|
+
`We recommend enabling Yarn workspaces to install dependencies for the imported project.`,
|
257
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
258
|
+
`See: https://yarnpkg.com/features/workspaces`,
|
259
|
+
]
|
260
|
+
: pm === 'bun'
|
261
|
+
? [
|
262
|
+
`We recommend enabling Bun workspaces to install dependencies for the imported project.`,
|
263
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
264
|
+
`See: https://bun.sh/docs/install/workspaces`,
|
265
|
+
]
|
266
|
+
: [
|
267
|
+
`We recommend enabling PNPM workspaces to install dependencies for the imported project.`,
|
268
|
+
`Add the following entry to to pnpm-workspace.yaml and run "${pmc.install}":`,
|
269
|
+
chalk.bold(`packages:\n - '${pkgPath}'`),
|
270
|
+
`See: https://pnpm.io/workspaces`,
|
271
|
+
],
|
272
|
+
});
|
273
|
+
}
|
274
|
+
else {
|
275
|
+
// Check if the new package is included in existing workspaces entries. If not, warn the user.
|
276
|
+
let workspaces = null;
|
277
|
+
if (pm === 'npm' || pm === 'yarn' || pm === 'bun') {
|
278
|
+
const packageJson = (0, file_utils_1.readPackageJson)();
|
279
|
+
workspaces = packageJson.workspaces;
|
280
|
+
}
|
281
|
+
else if (pm === 'pnpm') {
|
282
|
+
const yamlPath = (0, path_1.join)(workspace_root_1.workspaceRoot, 'pnpm-workspace.yaml');
|
283
|
+
if ((0, node_fs_1.existsSync)(yamlPath)) {
|
284
|
+
const yamlContent = await node_fs_1.promises.readFile(yamlPath, 'utf-8');
|
285
|
+
const yaml = (0, js_yaml_1.load)(yamlContent);
|
286
|
+
workspaces = yaml.packages;
|
287
|
+
}
|
288
|
+
}
|
289
|
+
if (workspaces) {
|
290
|
+
const isPkgIncluded = workspaces.some((w) => (0, minimatch_1.minimatch)(pkgPath, w));
|
291
|
+
if (!isPkgIncluded) {
|
292
|
+
const pkgsDir = (0, path_1.dirname)(pkgPath);
|
293
|
+
output_1.output.warn({
|
294
|
+
title: `Project missing in workspaces`,
|
295
|
+
bodyLines: pm === 'npm' || pm === 'yarn' || pm === 'bun'
|
296
|
+
? [
|
297
|
+
`The imported project (${pkgPath}) is missing the "workspaces" field in package.json.`,
|
298
|
+
`Add "${pkgsDir}/*" to workspaces run "${pmc.install}".`,
|
299
|
+
]
|
300
|
+
: [
|
301
|
+
`The imported project (${pkgPath}) is missing the "packages" field in pnpm-workspaces.yaml.`,
|
302
|
+
`Add "${pkgsDir}/*" to packages run "${pmc.install}".`,
|
303
|
+
],
|
304
|
+
});
|
305
|
+
}
|
306
|
+
}
|
307
|
+
}
|
308
|
+
}
|
@@ -1,2 +1,2 @@
|
|
1
1
|
import { GitRepository } from '../../../utils/git-utils';
|
2
|
-
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string
|
2
|
+
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string): Promise<void>;
|
@@ -4,99 +4,45 @@ exports.prepareSourceRepo = prepareSourceRepo;
|
|
4
4
|
const createSpinner = require("ora");
|
5
5
|
const path_1 = require("path");
|
6
6
|
const promises_1 = require("node:fs/promises");
|
7
|
-
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl
|
7
|
+
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl) {
|
8
8
|
const spinner = createSpinner().start(`Fetching ${ref} from ${sourceRemoteUrl}`);
|
9
|
-
await gitClient.addFetchRemote(originName, ref);
|
10
|
-
await gitClient.fetch(originName, ref);
|
11
|
-
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
|
12
|
-
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
|
13
|
-
await gitClient.checkout(tempImportBranch, {
|
14
|
-
new: true,
|
15
|
-
base: `${originName}/${ref}`,
|
16
|
-
});
|
17
|
-
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
|
18
9
|
const relativeSourceDir = (0, path_1.relative)(gitClient.root, (0, path_1.join)(gitClient.root, source));
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
try {
|
24
|
-
await (0, promises_1.rm)(destinationInSource, {
|
25
|
-
recursive: true,
|
26
|
-
});
|
27
|
-
}
|
28
|
-
catch { }
|
29
|
-
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
|
30
|
-
const gitignores = new Set();
|
31
|
-
for (const file of files) {
|
32
|
-
if ((0, path_1.basename)(file) === '.gitignore') {
|
33
|
-
gitignores.add(file);
|
34
|
-
continue;
|
35
|
-
}
|
36
|
-
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
37
|
-
const newPath = (0, path_1.join)(destinationInSource, file);
|
38
|
-
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
39
|
-
try {
|
40
|
-
await gitClient.move(file, newPath);
|
41
|
-
}
|
42
|
-
catch {
|
43
|
-
await wait(100);
|
44
|
-
await gitClient.move(file, newPath);
|
45
|
-
}
|
46
|
-
}
|
47
|
-
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
|
48
|
-
for (const gitignore of gitignores) {
|
49
|
-
await gitClient.move(gitignore, (0, path_1.join)(destinationInSource, gitignore));
|
10
|
+
if (relativeSourceDir !== '') {
|
11
|
+
if (await gitClient.hasFilterRepoInstalled()) {
|
12
|
+
spinner.start(`Filtering git history to only include files in ${relativeSourceDir}`);
|
13
|
+
await gitClient.filterRepo(relativeSourceDir);
|
50
14
|
}
|
51
|
-
|
52
|
-
|
53
|
-
await
|
15
|
+
else {
|
16
|
+
spinner.start(`Filtering git history to only include files in ${relativeSourceDir} (this might take a few minutes -- install git-filter-repo for faster performance)`);
|
17
|
+
await gitClient.filterBranch(relativeSourceDir, tempImportBranch);
|
54
18
|
}
|
19
|
+
spinner.succeed(`Filtered git history to only include files in ${relativeSourceDir}`);
|
55
20
|
}
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
continue;
|
74
|
-
}
|
75
|
-
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
76
|
-
if (!(0, path_1.relative)(source, file).startsWith('..')) {
|
77
|
-
if (needsMove) {
|
78
|
-
const newPath = (0, path_1.join)(destinationInSource, file);
|
79
|
-
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
80
|
-
try {
|
81
|
-
await gitClient.move(file, newPath);
|
82
|
-
}
|
83
|
-
catch {
|
84
|
-
await wait(100);
|
85
|
-
await gitClient.move(file, newPath);
|
86
|
-
}
|
87
|
-
}
|
88
|
-
}
|
89
|
-
else {
|
90
|
-
await (0, promises_1.rm)((0, path_1.join)(gitClient.root, file), {
|
91
|
-
recursive: true,
|
92
|
-
});
|
93
|
-
}
|
21
|
+
const destinationInSource = (0, path_1.join)(gitClient.root, relativeDestination);
|
22
|
+
spinner.start(`Moving files and git history to ${destinationInSource}`);
|
23
|
+
// The result of filter-branch will contain only the files in the subdirectory at its root.
|
24
|
+
const files = await gitClient.getGitFiles('.');
|
25
|
+
try {
|
26
|
+
await (0, promises_1.rm)(destinationInSource, {
|
27
|
+
recursive: true,
|
28
|
+
});
|
29
|
+
}
|
30
|
+
catch { }
|
31
|
+
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
|
32
|
+
for (const file of files) {
|
33
|
+
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
34
|
+
const newPath = (0, path_1.join)(destinationInSource, file);
|
35
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
36
|
+
try {
|
37
|
+
await gitClient.move(file, newPath);
|
94
38
|
}
|
95
|
-
|
96
|
-
|
97
|
-
await gitClient.
|
39
|
+
catch {
|
40
|
+
await wait(100);
|
41
|
+
await gitClient.move(file, newPath);
|
98
42
|
}
|
99
43
|
}
|
44
|
+
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
|
45
|
+
await gitClient.amendCommit();
|
100
46
|
spinner.succeed(`${sourceRemoteUrl} has been prepared to be imported into this workspace on a temporary branch: ${tempImportBranch} in ${gitClient.root}`);
|
101
47
|
}
|
102
48
|
function wait(ms) {
|