nx 19.7.0-canary.20240828-13170da → 19.7.0-canary.20240830-83a387a
Sign up to get free protection for your applications and to get access to all the features.
- package/package.json +12 -12
- package/schemas/nx-schema.json +7 -0
- package/src/command-line/connect/command-object.js +10 -3
- package/src/command-line/connect/connect-to-nx-cloud.d.ts +3 -1
- package/src/command-line/connect/connect-to-nx-cloud.js +7 -4
- package/src/command-line/import/command-object.js +4 -0
- package/src/command-line/import/import.d.ts +4 -0
- package/src/command-line/import/import.js +147 -12
- package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
- package/src/command-line/import/utils/prepare-source-repo.js +31 -85
- package/src/command-line/sync/sync.js +12 -1
- package/src/command-line/yargs-utils/shared-options.js +1 -9
- package/src/config/nx-json.d.ts +5 -1
- package/src/daemon/server/sync-generators.d.ts +4 -0
- package/src/daemon/server/sync-generators.js +183 -55
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.d.ts +2 -1
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +16 -50
- package/src/nx-cloud/generators/connect-to-nx-cloud/schema.json +4 -0
- package/src/nx-cloud/update-manager.d.ts +1 -1
- package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +8 -1
- package/src/tasks-runner/cache.js +2 -2
- package/src/tasks-runner/run-command.js +1 -10
- package/src/utils/git-utils.d.ts +7 -10
- package/src/utils/git-utils.js +61 -44
- package/src/utils/sync-generators.d.ts +8 -5
- package/src/utils/sync-generators.js +27 -5
- package/src/utils/squash.d.ts +0 -1
- package/src/utils/squash.js +0 -12
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "19.7.0-canary.
|
3
|
+
"version": "19.7.0-canary.20240830-83a387a",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -71,7 +71,7 @@
|
|
71
71
|
"yargs-parser": "21.1.1",
|
72
72
|
"node-machine-id": "1.1.12",
|
73
73
|
"ora": "5.3.0",
|
74
|
-
"@nrwl/tao": "19.7.0-canary.
|
74
|
+
"@nrwl/tao": "19.7.0-canary.20240830-83a387a"
|
75
75
|
},
|
76
76
|
"peerDependencies": {
|
77
77
|
"@swc-node/register": "^1.8.0",
|
@@ -86,16 +86,16 @@
|
|
86
86
|
}
|
87
87
|
},
|
88
88
|
"optionalDependencies": {
|
89
|
-
"@nx/nx-darwin-x64": "19.7.0-canary.
|
90
|
-
"@nx/nx-darwin-arm64": "19.7.0-canary.
|
91
|
-
"@nx/nx-linux-x64-gnu": "19.7.0-canary.
|
92
|
-
"@nx/nx-linux-x64-musl": "19.7.0-canary.
|
93
|
-
"@nx/nx-win32-x64-msvc": "19.7.0-canary.
|
94
|
-
"@nx/nx-linux-arm64-gnu": "19.7.0-canary.
|
95
|
-
"@nx/nx-linux-arm64-musl": "19.7.0-canary.
|
96
|
-
"@nx/nx-linux-arm-gnueabihf": "19.7.0-canary.
|
97
|
-
"@nx/nx-win32-arm64-msvc": "19.7.0-canary.
|
98
|
-
"@nx/nx-freebsd-x64": "19.7.0-canary.
|
89
|
+
"@nx/nx-darwin-x64": "19.7.0-canary.20240830-83a387a",
|
90
|
+
"@nx/nx-darwin-arm64": "19.7.0-canary.20240830-83a387a",
|
91
|
+
"@nx/nx-linux-x64-gnu": "19.7.0-canary.20240830-83a387a",
|
92
|
+
"@nx/nx-linux-x64-musl": "19.7.0-canary.20240830-83a387a",
|
93
|
+
"@nx/nx-win32-x64-msvc": "19.7.0-canary.20240830-83a387a",
|
94
|
+
"@nx/nx-linux-arm64-gnu": "19.7.0-canary.20240830-83a387a",
|
95
|
+
"@nx/nx-linux-arm64-musl": "19.7.0-canary.20240830-83a387a",
|
96
|
+
"@nx/nx-linux-arm-gnueabihf": "19.7.0-canary.20240830-83a387a",
|
97
|
+
"@nx/nx-win32-arm64-msvc": "19.7.0-canary.20240830-83a387a",
|
98
|
+
"@nx/nx-freebsd-x64": "19.7.0-canary.20240830-83a387a"
|
99
99
|
},
|
100
100
|
"nx-migrations": {
|
101
101
|
"migrations": "./migrations.json",
|
package/schemas/nx-schema.json
CHANGED
@@ -282,6 +282,13 @@
|
|
282
282
|
"applyChanges": {
|
283
283
|
"type": "boolean",
|
284
284
|
"description": "Whether to automatically apply sync generator changes when running tasks. If not set, the user will be prompted. If set to `true`, the user will not be prompted and the changes will be applied. If set to `false`, the user will not be prompted and the changes will not be applied."
|
285
|
+
},
|
286
|
+
"disabledTaskSyncGenerators": {
|
287
|
+
"type": "array",
|
288
|
+
"items": {
|
289
|
+
"type": "string"
|
290
|
+
},
|
291
|
+
"description": "List of registered task sync generators to disable."
|
285
292
|
}
|
286
293
|
},
|
287
294
|
"additionalProperties": false
|
@@ -3,13 +3,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.yargsViewLogsCommand = exports.yargsConnectCommand = void 0;
|
4
4
|
const documentation_1 = require("../yargs-utils/documentation");
|
5
5
|
const versions_1 = require("../../utils/versions");
|
6
|
+
const shared_options_1 = require("../yargs-utils/shared-options");
|
6
7
|
exports.yargsConnectCommand = {
|
7
8
|
command: 'connect',
|
8
9
|
aliases: ['connect-to-nx-cloud'],
|
9
10
|
describe: `Connect workspace to Nx Cloud`,
|
10
|
-
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(yargs, 'connect-to-nx-cloud'),
|
11
|
-
handler: async () => {
|
12
|
-
await (await Promise.resolve().then(() => require('./connect-to-nx-cloud'))).connectToNxCloudCommand();
|
11
|
+
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(withConnectOptions(yargs), 'connect-to-nx-cloud'),
|
12
|
+
handler: async (args) => {
|
13
|
+
await (await Promise.resolve().then(() => require('./connect-to-nx-cloud'))).connectToNxCloudCommand(args);
|
13
14
|
await (await Promise.resolve().then(() => require('../../utils/ab-testing'))).recordStat({
|
14
15
|
command: 'connect',
|
15
16
|
nxVersion: versions_1.nxVersion,
|
@@ -18,6 +19,12 @@ exports.yargsConnectCommand = {
|
|
18
19
|
process.exit(0);
|
19
20
|
},
|
20
21
|
};
|
22
|
+
function withConnectOptions(yargs) {
|
23
|
+
return (0, shared_options_1.withVerbose)(yargs).option('generateToken', {
|
24
|
+
type: 'boolean',
|
25
|
+
description: 'Explicitly asks for a token to be created, do not override existing tokens from Nx Cloud',
|
26
|
+
});
|
27
|
+
}
|
21
28
|
exports.yargsViewLogsCommand = {
|
22
29
|
command: 'view-logs',
|
23
30
|
describe: 'Enables you to view and interact with the logs via the advanced analytic UI from Nx Cloud to help you debug your issue. To do this, Nx needs to connect your workspace to Nx Cloud and upload the most recent run details. Only the metrics are uploaded, not the artefacts.',
|
@@ -5,6 +5,8 @@ import { MessageKey } from '../../utils/ab-testing';
|
|
5
5
|
export declare function onlyDefaultRunnerIsUsed(nxJson: NxJsonConfiguration): boolean;
|
6
6
|
export declare function connectToNxCloudIfExplicitlyAsked(opts: NxArgs): Promise<void>;
|
7
7
|
export declare function connectWorkspaceToCloud(options: ConnectToNxCloudOptions, directory?: string): Promise<string>;
|
8
|
-
export declare function connectToNxCloudCommand(
|
8
|
+
export declare function connectToNxCloudCommand(options: {
|
9
|
+
generateToken?: boolean;
|
10
|
+
}, command?: string): Promise<boolean>;
|
9
11
|
export declare function connectExistingRepoToNxCloudPrompt(command?: string, key?: MessageKey): Promise<boolean>;
|
10
12
|
export declare function connectToNxCloudWithPrompt(command: string): Promise<void>;
|
@@ -54,7 +54,7 @@ async function connectWorkspaceToCloud(options, directory = workspace_root_1.wor
|
|
54
54
|
(0, tree_1.flushChanges)(directory, tree.listChanges());
|
55
55
|
return accessToken;
|
56
56
|
}
|
57
|
-
async function connectToNxCloudCommand(command) {
|
57
|
+
async function connectToNxCloudCommand(options, command) {
|
58
58
|
const nxJson = (0, configuration_1.readNxJson)();
|
59
59
|
const installationSource = process.env.NX_CONSOLE
|
60
60
|
? 'nx-console'
|
@@ -66,7 +66,7 @@ async function connectToNxCloudCommand(command) {
|
|
66
66
|
if (!token) {
|
67
67
|
throw new Error(`Unable to authenticate. If you are connecting to Nx Cloud locally, set Nx Cloud ID in nx.json. If you are connecting in a CI context, either define accessToken in nx.json or set the NX_CLOUD_ACCESS_TOKEN env variable.`);
|
68
68
|
}
|
69
|
-
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)(installationSource, token);
|
69
|
+
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)(installationSource, token, options?.generateToken !== true);
|
70
70
|
output_1.output.log({
|
71
71
|
title: '✔ This workspace already has Nx Cloud set up',
|
72
72
|
bodyLines: [
|
@@ -78,9 +78,10 @@ async function connectToNxCloudCommand(command) {
|
|
78
78
|
return false;
|
79
79
|
}
|
80
80
|
const token = await connectWorkspaceToCloud({
|
81
|
+
generateToken: options?.generateToken,
|
81
82
|
installationSource: command ?? installationSource,
|
82
83
|
});
|
83
|
-
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-connect', token);
|
84
|
+
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-connect', token, options?.generateToken !== true);
|
84
85
|
try {
|
85
86
|
const cloudConnectSpinner = ora(`Opening Nx Cloud ${connectCloudUrl} in your browser to connect your workspace.`).start();
|
86
87
|
await sleep(2000);
|
@@ -115,7 +116,9 @@ async function connectExistingRepoToNxCloudPrompt(command = 'init', key = 'setup
|
|
115
116
|
}
|
116
117
|
async function connectToNxCloudWithPrompt(command) {
|
117
118
|
const setNxCloud = await nxCloudPrompt('setupNxCloud');
|
118
|
-
const useCloud = setNxCloud === 'yes'
|
119
|
+
const useCloud = setNxCloud === 'yes'
|
120
|
+
? await connectToNxCloudCommand({ generateToken: false }, command)
|
121
|
+
: false;
|
119
122
|
await (0, ab_testing_1.recordStat)({
|
120
123
|
command,
|
121
124
|
nxVersion: versions_1.nxVersion,
|
@@ -23,6 +23,10 @@ exports.yargsImportCommand = {
|
|
23
23
|
.option('ref', {
|
24
24
|
type: 'string',
|
25
25
|
description: 'The branch from the source repository to import',
|
26
|
+
})
|
27
|
+
.option('depth', {
|
28
|
+
type: 'number',
|
29
|
+
description: 'The depth to clone the source repository (limit this for faster git clone)',
|
26
30
|
})
|
27
31
|
.option('interactive', {
|
28
32
|
type: 'boolean',
|
@@ -15,6 +15,10 @@ export interface ImportOptions {
|
|
15
15
|
* The directory in the destination repo to import into
|
16
16
|
*/
|
17
17
|
destination: string;
|
18
|
+
/**
|
19
|
+
* The depth to clone the source repository (limit this for faster clone times)
|
20
|
+
*/
|
21
|
+
depth: number;
|
18
22
|
verbose: boolean;
|
19
23
|
interactive: boolean;
|
20
24
|
}
|
@@ -2,6 +2,10 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.importHandler = importHandler;
|
4
4
|
const path_1 = require("path");
|
5
|
+
const minimatch_1 = require("minimatch");
|
6
|
+
const node_fs_1 = require("node:fs");
|
7
|
+
const chalk = require("chalk");
|
8
|
+
const js_yaml_1 = require("@zkochan/js-yaml");
|
5
9
|
const git_utils_1 = require("../../utils/git-utils");
|
6
10
|
const promises_1 = require("node:fs/promises");
|
7
11
|
const tmp_1 = require("tmp");
|
@@ -18,6 +22,7 @@ const command_line_utils_1 = require("../../utils/command-line-utils");
|
|
18
22
|
const prepare_source_repo_1 = require("./utils/prepare-source-repo");
|
19
23
|
const merge_remote_source_1 = require("./utils/merge-remote-source");
|
20
24
|
const needs_install_1 = require("./utils/needs-install");
|
25
|
+
const file_utils_1 = require("../../project-graph/file-utils");
|
21
26
|
const importRemoteName = '__tmp_nx_import__';
|
22
27
|
async function importHandler(options) {
|
23
28
|
let { sourceRemoteUrl, ref, source, destination } = options;
|
@@ -54,7 +59,7 @@ async function importHandler(options) {
|
|
54
59
|
// It's a remote url
|
55
60
|
}
|
56
61
|
const sourceRepoPath = (0, path_1.join)(tempImportDirectory, 'repo');
|
57
|
-
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath}`).start();
|
62
|
+
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath} (Use --depth to limit commit history and speed up clone times)`).start();
|
58
63
|
try {
|
59
64
|
await (0, promises_1.rm)(tempImportDirectory, { recursive: true });
|
60
65
|
}
|
@@ -64,6 +69,7 @@ async function importHandler(options) {
|
|
64
69
|
try {
|
65
70
|
sourceGitClient = await (0, git_utils_1.cloneFromUpstream)(sourceRemoteUrl, sourceRepoPath, {
|
66
71
|
originName: importRemoteName,
|
72
|
+
depth: options.depth,
|
67
73
|
});
|
68
74
|
}
|
69
75
|
catch (e) {
|
@@ -72,6 +78,8 @@ async function importHandler(options) {
|
|
72
78
|
throw new Error(errorMessage);
|
73
79
|
}
|
74
80
|
spinner.succeed(`Cloned into ${sourceRepoPath}`);
|
81
|
+
// Detecting the package manager before preparing the source repo for import.
|
82
|
+
const sourcePackageManager = (0, package_manager_1.detectPackageManager)(sourceGitClient.root);
|
75
83
|
if (!ref) {
|
76
84
|
const branchChoices = await sourceGitClient.listBranches();
|
77
85
|
ref = (await (0, enquirer_1.prompt)([
|
@@ -104,24 +112,34 @@ async function importHandler(options) {
|
|
104
112
|
name: 'destination',
|
105
113
|
message: 'Where in this workspace should the code be imported into?',
|
106
114
|
required: true,
|
115
|
+
initial: source ? source : undefined,
|
107
116
|
},
|
108
117
|
])).destination;
|
109
118
|
}
|
110
119
|
const absSource = (0, path_1.join)(sourceRepoPath, source);
|
111
120
|
const absDestination = (0, path_1.join)(process.cwd(), destination);
|
121
|
+
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
|
122
|
+
await assertDestinationEmpty(destinationGitClient, absDestination);
|
123
|
+
const tempImportBranch = getTempImportBranch(ref);
|
124
|
+
await sourceGitClient.addFetchRemote(importRemoteName, ref);
|
125
|
+
await sourceGitClient.fetch(importRemoteName, ref);
|
126
|
+
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
|
127
|
+
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
|
128
|
+
await sourceGitClient.checkout(tempImportBranch, {
|
129
|
+
new: true,
|
130
|
+
base: `${importRemoteName}/${ref}`,
|
131
|
+
});
|
132
|
+
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
|
112
133
|
try {
|
113
134
|
await (0, promises_1.stat)(absSource);
|
114
135
|
}
|
115
136
|
catch (e) {
|
116
137
|
throw new Error(`The source directory ${source} does not exist in ${sourceRemoteUrl}. Please double check to make sure it exists.`);
|
117
138
|
}
|
118
|
-
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
|
119
|
-
await assertDestinationEmpty(destinationGitClient, absDestination);
|
120
|
-
const tempImportBranch = getTempImportBranch(ref);
|
121
139
|
const packageManager = (0, package_manager_1.detectPackageManager)(workspace_root_1.workspaceRoot);
|
122
140
|
const originalPackageWorkspaces = await (0, needs_install_1.getPackagesInPackageManagerWorkspace)(packageManager);
|
123
141
|
const relativeDestination = (0, path_1.relative)(destinationGitClient.root, absDestination);
|
124
|
-
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl
|
142
|
+
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl);
|
125
143
|
await createTemporaryRemote(destinationGitClient, (0, path_1.join)(sourceRepoPath, '.git'), importRemoteName);
|
126
144
|
await (0, merge_remote_source_1.mergeRemoteSource)(destinationGitClient, sourceRemoteUrl, tempImportBranch, destination, importRemoteName, ref);
|
127
145
|
spinner.start('Cleaning up temporary files and remotes');
|
@@ -132,19 +150,69 @@ async function importHandler(options) {
|
|
132
150
|
const nxJson = (0, nx_json_1.readNxJson)(workspace_root_1.workspaceRoot);
|
133
151
|
(0, workspace_context_1.resetWorkspaceContext)();
|
134
152
|
const { plugins, updatePackageScripts } = await (0, init_v2_1.detectPlugins)(nxJson, options.interactive);
|
153
|
+
if (packageManager !== sourcePackageManager) {
|
154
|
+
output_1.output.warn({
|
155
|
+
title: `Mismatched package managers`,
|
156
|
+
bodyLines: [
|
157
|
+
`The source repository is using a different package manager (${sourcePackageManager}) than this workspace (${packageManager}).`,
|
158
|
+
`This could lead to install issues due to discrepancies in "package.json" features.`,
|
159
|
+
],
|
160
|
+
});
|
161
|
+
}
|
162
|
+
// If install fails, we should continue since the errors could be resolved later.
|
163
|
+
let installFailed = false;
|
135
164
|
if (plugins.length > 0) {
|
136
|
-
|
137
|
-
|
138
|
-
|
165
|
+
try {
|
166
|
+
output_1.output.log({ title: 'Installing Plugins' });
|
167
|
+
(0, init_v2_1.installPlugins)(workspace_root_1.workspaceRoot, plugins, pmc, updatePackageScripts);
|
168
|
+
await destinationGitClient.amendCommit();
|
169
|
+
}
|
170
|
+
catch (e) {
|
171
|
+
installFailed = true;
|
172
|
+
output_1.output.error({
|
173
|
+
title: `Install failed: ${e.message || 'Unknown error'}`,
|
174
|
+
bodyLines: [e.stack],
|
175
|
+
});
|
176
|
+
}
|
139
177
|
}
|
140
178
|
else if (await (0, needs_install_1.needsInstall)(packageManager, originalPackageWorkspaces)) {
|
179
|
+
try {
|
180
|
+
output_1.output.log({
|
181
|
+
title: 'Installing dependencies for imported code',
|
182
|
+
});
|
183
|
+
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
|
184
|
+
await destinationGitClient.amendCommit();
|
185
|
+
}
|
186
|
+
catch (e) {
|
187
|
+
installFailed = true;
|
188
|
+
output_1.output.error({
|
189
|
+
title: `Install failed: ${e.message || 'Unknown error'}`,
|
190
|
+
bodyLines: [e.stack],
|
191
|
+
});
|
192
|
+
}
|
193
|
+
}
|
194
|
+
console.log(await destinationGitClient.showStat());
|
195
|
+
if (installFailed) {
|
196
|
+
const pmc = (0, package_manager_1.getPackageManagerCommand)(packageManager);
|
197
|
+
output_1.output.warn({
|
198
|
+
title: `The import was successful, but the install failed`,
|
199
|
+
bodyLines: [
|
200
|
+
`You may need to run "${pmc.install}" manually to resolve the issue. The error is logged above.`,
|
201
|
+
],
|
202
|
+
});
|
203
|
+
}
|
204
|
+
await warnOnMissingWorkspacesEntry(packageManager, pmc, relativeDestination);
|
205
|
+
// When only a subdirectory is imported, there might be devDependencies in the root package.json file
|
206
|
+
// that needs to be ported over as well.
|
207
|
+
if (ref) {
|
141
208
|
output_1.output.log({
|
142
|
-
title:
|
209
|
+
title: `Check root dependencies`,
|
210
|
+
bodyLines: [
|
211
|
+
`"dependencies" and "devDependencies" are not imported from the source repository (${sourceRemoteUrl}).`,
|
212
|
+
`You may need to add some of those dependencies to this workspace in order to run tasks successfully.`,
|
213
|
+
],
|
143
214
|
});
|
144
|
-
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
|
145
|
-
await destinationGitClient.amendCommit();
|
146
215
|
}
|
147
|
-
console.log(await destinationGitClient.showStat());
|
148
216
|
output_1.output.log({
|
149
217
|
title: `Merging these changes into ${(0, command_line_utils_1.getBaseRef)(nxJson)}`,
|
150
218
|
bodyLines: [
|
@@ -171,3 +239,70 @@ async function createTemporaryRemote(destinationGitClient, sourceRemoteUrl, remo
|
|
171
239
|
await destinationGitClient.addGitRemote(remoteName, sourceRemoteUrl);
|
172
240
|
await destinationGitClient.fetch(remoteName);
|
173
241
|
}
|
242
|
+
// If the user imports a project that isn't in NPM/Yarn/PNPM workspaces, then its dependencies
|
243
|
+
// will not be installed. We should warn users and provide instructions on how to fix this.
|
244
|
+
async function warnOnMissingWorkspacesEntry(pm, pmc, pkgPath) {
|
245
|
+
if (!(0, package_manager_1.isWorkspacesEnabled)(pm, workspace_root_1.workspaceRoot)) {
|
246
|
+
output_1.output.warn({
|
247
|
+
title: `Missing workspaces in package.json`,
|
248
|
+
bodyLines: pm === 'npm'
|
249
|
+
? [
|
250
|
+
`We recommend enabling NPM workspaces to install dependencies for the imported project.`,
|
251
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
252
|
+
`See: https://docs.npmjs.com/cli/using-npm/workspaces`,
|
253
|
+
]
|
254
|
+
: pm === 'yarn'
|
255
|
+
? [
|
256
|
+
`We recommend enabling Yarn workspaces to install dependencies for the imported project.`,
|
257
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
258
|
+
`See: https://yarnpkg.com/features/workspaces`,
|
259
|
+
]
|
260
|
+
: pm === 'bun'
|
261
|
+
? [
|
262
|
+
`We recommend enabling Bun workspaces to install dependencies for the imported project.`,
|
263
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
264
|
+
`See: https://bun.sh/docs/install/workspaces`,
|
265
|
+
]
|
266
|
+
: [
|
267
|
+
`We recommend enabling PNPM workspaces to install dependencies for the imported project.`,
|
268
|
+
`Add the following entry to to pnpm-workspace.yaml and run "${pmc.install}":`,
|
269
|
+
chalk.bold(`packages:\n - '${pkgPath}'`),
|
270
|
+
`See: https://pnpm.io/workspaces`,
|
271
|
+
],
|
272
|
+
});
|
273
|
+
}
|
274
|
+
else {
|
275
|
+
// Check if the new package is included in existing workspaces entries. If not, warn the user.
|
276
|
+
let workspaces = null;
|
277
|
+
if (pm === 'npm' || pm === 'yarn' || pm === 'bun') {
|
278
|
+
const packageJson = (0, file_utils_1.readPackageJson)();
|
279
|
+
workspaces = packageJson.workspaces;
|
280
|
+
}
|
281
|
+
else if (pm === 'pnpm') {
|
282
|
+
const yamlPath = (0, path_1.join)(workspace_root_1.workspaceRoot, 'pnpm-workspace.yaml');
|
283
|
+
if ((0, node_fs_1.existsSync)(yamlPath)) {
|
284
|
+
const yamlContent = await node_fs_1.promises.readFile(yamlPath, 'utf-8');
|
285
|
+
const yaml = (0, js_yaml_1.load)(yamlContent);
|
286
|
+
workspaces = yaml.packages;
|
287
|
+
}
|
288
|
+
}
|
289
|
+
if (workspaces) {
|
290
|
+
const isPkgIncluded = workspaces.some((w) => (0, minimatch_1.minimatch)(pkgPath, w));
|
291
|
+
if (!isPkgIncluded) {
|
292
|
+
const pkgsDir = (0, path_1.dirname)(pkgPath);
|
293
|
+
output_1.output.warn({
|
294
|
+
title: `Project missing in workspaces`,
|
295
|
+
bodyLines: pm === 'npm' || pm === 'yarn' || pm === 'bun'
|
296
|
+
? [
|
297
|
+
`The imported project (${pkgPath}) is missing the "workspaces" field in package.json.`,
|
298
|
+
`Add "${pkgsDir}/*" to workspaces run "${pmc.install}".`,
|
299
|
+
]
|
300
|
+
: [
|
301
|
+
`The imported project (${pkgPath}) is missing the "packages" field in pnpm-workspaces.yaml.`,
|
302
|
+
`Add "${pkgsDir}/*" to packages run "${pmc.install}".`,
|
303
|
+
],
|
304
|
+
});
|
305
|
+
}
|
306
|
+
}
|
307
|
+
}
|
308
|
+
}
|
@@ -1,2 +1,2 @@
|
|
1
1
|
import { GitRepository } from '../../../utils/git-utils';
|
2
|
-
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string
|
2
|
+
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string): Promise<void>;
|
@@ -4,99 +4,45 @@ exports.prepareSourceRepo = prepareSourceRepo;
|
|
4
4
|
const createSpinner = require("ora");
|
5
5
|
const path_1 = require("path");
|
6
6
|
const promises_1 = require("node:fs/promises");
|
7
|
-
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl
|
7
|
+
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl) {
|
8
8
|
const spinner = createSpinner().start(`Fetching ${ref} from ${sourceRemoteUrl}`);
|
9
|
-
await gitClient.addFetchRemote(originName, ref);
|
10
|
-
await gitClient.fetch(originName, ref);
|
11
|
-
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
|
12
|
-
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
|
13
|
-
await gitClient.checkout(tempImportBranch, {
|
14
|
-
new: true,
|
15
|
-
base: `${originName}/${ref}`,
|
16
|
-
});
|
17
|
-
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
|
18
9
|
const relativeSourceDir = (0, path_1.relative)(gitClient.root, (0, path_1.join)(gitClient.root, source));
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
try {
|
24
|
-
await (0, promises_1.rm)(destinationInSource, {
|
25
|
-
recursive: true,
|
26
|
-
});
|
27
|
-
}
|
28
|
-
catch { }
|
29
|
-
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
|
30
|
-
const gitignores = new Set();
|
31
|
-
for (const file of files) {
|
32
|
-
if ((0, path_1.basename)(file) === '.gitignore') {
|
33
|
-
gitignores.add(file);
|
34
|
-
continue;
|
35
|
-
}
|
36
|
-
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
37
|
-
const newPath = (0, path_1.join)(destinationInSource, file);
|
38
|
-
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
39
|
-
try {
|
40
|
-
await gitClient.move(file, newPath);
|
41
|
-
}
|
42
|
-
catch {
|
43
|
-
await wait(100);
|
44
|
-
await gitClient.move(file, newPath);
|
45
|
-
}
|
46
|
-
}
|
47
|
-
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
|
48
|
-
for (const gitignore of gitignores) {
|
49
|
-
await gitClient.move(gitignore, (0, path_1.join)(destinationInSource, gitignore));
|
10
|
+
if (relativeSourceDir !== '') {
|
11
|
+
if (await gitClient.hasFilterRepoInstalled()) {
|
12
|
+
spinner.start(`Filtering git history to only include files in ${relativeSourceDir}`);
|
13
|
+
await gitClient.filterRepo(relativeSourceDir);
|
50
14
|
}
|
51
|
-
|
52
|
-
|
53
|
-
await
|
15
|
+
else {
|
16
|
+
spinner.start(`Filtering git history to only include files in ${relativeSourceDir} (this might take a few minutes -- install git-filter-repo for faster performance)`);
|
17
|
+
await gitClient.filterBranch(relativeSourceDir, tempImportBranch);
|
54
18
|
}
|
19
|
+
spinner.succeed(`Filtered git history to only include files in ${relativeSourceDir}`);
|
55
20
|
}
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
continue;
|
74
|
-
}
|
75
|
-
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
76
|
-
if (!(0, path_1.relative)(source, file).startsWith('..')) {
|
77
|
-
if (needsMove) {
|
78
|
-
const newPath = (0, path_1.join)(destinationInSource, file);
|
79
|
-
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
80
|
-
try {
|
81
|
-
await gitClient.move(file, newPath);
|
82
|
-
}
|
83
|
-
catch {
|
84
|
-
await wait(100);
|
85
|
-
await gitClient.move(file, newPath);
|
86
|
-
}
|
87
|
-
}
|
88
|
-
}
|
89
|
-
else {
|
90
|
-
await (0, promises_1.rm)((0, path_1.join)(gitClient.root, file), {
|
91
|
-
recursive: true,
|
92
|
-
});
|
93
|
-
}
|
21
|
+
const destinationInSource = (0, path_1.join)(gitClient.root, relativeDestination);
|
22
|
+
spinner.start(`Moving files and git history to ${destinationInSource}`);
|
23
|
+
// The result of filter-branch will contain only the files in the subdirectory at its root.
|
24
|
+
const files = await gitClient.getGitFiles('.');
|
25
|
+
try {
|
26
|
+
await (0, promises_1.rm)(destinationInSource, {
|
27
|
+
recursive: true,
|
28
|
+
});
|
29
|
+
}
|
30
|
+
catch { }
|
31
|
+
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
|
32
|
+
for (const file of files) {
|
33
|
+
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
34
|
+
const newPath = (0, path_1.join)(destinationInSource, file);
|
35
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
36
|
+
try {
|
37
|
+
await gitClient.move(file, newPath);
|
94
38
|
}
|
95
|
-
|
96
|
-
|
97
|
-
await gitClient.
|
39
|
+
catch {
|
40
|
+
await wait(100);
|
41
|
+
await gitClient.move(file, newPath);
|
98
42
|
}
|
99
43
|
}
|
44
|
+
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
|
45
|
+
await gitClient.amendCommit();
|
100
46
|
spinner.succeed(`${sourceRemoteUrl} has been prepared to be imported into this workspace on a temporary branch: ${tempImportBranch} in ${gitClient.root}`);
|
101
47
|
}
|
102
48
|
function wait(ms) {
|
@@ -2,6 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.syncHandler = syncHandler;
|
4
4
|
const ora = require("ora");
|
5
|
+
const nx_json_1 = require("../../config/nx-json");
|
5
6
|
const project_graph_1 = require("../../project-graph/project-graph");
|
6
7
|
const output_1 = require("../../utils/output");
|
7
8
|
const params_1 = require("../../utils/params");
|
@@ -10,7 +11,17 @@ const chalk = require("chalk");
|
|
10
11
|
function syncHandler(options) {
|
11
12
|
return (0, params_1.handleErrors)(options.verbose, async () => {
|
12
13
|
const projectGraph = await (0, project_graph_1.createProjectGraphAsync)();
|
13
|
-
const
|
14
|
+
const nxJson = (0, nx_json_1.readNxJson)();
|
15
|
+
const syncGenerators = await (0, sync_generators_1.collectAllRegisteredSyncGenerators)(projectGraph, nxJson);
|
16
|
+
if (!syncGenerators.length) {
|
17
|
+
output_1.output.success({
|
18
|
+
title: options.check
|
19
|
+
? 'The workspace is up to date'
|
20
|
+
: 'The workspace is already up to date',
|
21
|
+
bodyLines: ['There are no sync generators to run.'],
|
22
|
+
});
|
23
|
+
return 0;
|
24
|
+
}
|
14
25
|
const results = await (0, sync_generators_1.getSyncGeneratorChanges)(syncGenerators);
|
15
26
|
if (!results.length) {
|
16
27
|
output_1.output.success({
|
@@ -209,15 +209,7 @@ function withOutputStyleOption(yargs, choices = [
|
|
209
209
|
'stream-without-prefixes',
|
210
210
|
]) {
|
211
211
|
return yargs.option('output-style', {
|
212
|
-
describe: `Defines how Nx emits outputs tasks logs
|
213
|
-
|
214
|
-
| option | description |
|
215
|
-
| --- | --- |
|
216
|
-
| dynamic | use dynamic output life cycle, previous content is overwritten or modified as new outputs are added, display minimal logs by default, always show errors. This output format is recommended on your local development environments. |
|
217
|
-
| static | uses static output life cycle, no previous content is rewritten or modified as new outputs are added. This output format is recommened for CI environments. |
|
218
|
-
| stream | nx by default logs output to an internal output stream, enable this option to stream logs to stdout / stderr |
|
219
|
-
| stream-without-prefixes | nx prefixes the project name the target is running on, use this option remove the project name prefix from output |
|
220
|
-
`,
|
212
|
+
describe: `Defines how Nx emits outputs tasks logs. **dynamic**: use dynamic output life cycle, previous content is overwritten or modified as new outputs are added, display minimal logs by default, always show errors. This output format is recommended on your local development environments. **static**: uses static output life cycle, no previous content is rewritten or modified as new outputs are added. This output format is recommened for CI environments. **stream**: nx by default logs output to an internal output stream, enable this option to stream logs to stdout / stderr. **stream-without-prefixes**: nx prefixes the project name the target is running on, use this option remove the project name prefix from output.`,
|
221
213
|
type: 'string',
|
222
214
|
choices,
|
223
215
|
});
|
package/src/config/nx-json.d.ts
CHANGED
@@ -286,11 +286,15 @@ export interface NxSyncConfiguration {
|
|
286
286
|
};
|
287
287
|
/**
|
288
288
|
* Whether to automatically apply sync generator changes when running tasks.
|
289
|
-
* If not set, the user will be prompted.
|
289
|
+
* If not set, the user will be prompted in interactive mode.
|
290
290
|
* If set to `true`, the user will not be prompted and the changes will be applied.
|
291
291
|
* If set to `false`, the user will not be prompted and the changes will not be applied.
|
292
292
|
*/
|
293
293
|
applyChanges?: boolean;
|
294
|
+
/**
|
295
|
+
* List of registered task sync generators to disable.
|
296
|
+
*/
|
297
|
+
disabledTaskSyncGenerators?: string[];
|
294
298
|
}
|
295
299
|
/**
|
296
300
|
* Nx.json configuration
|
@@ -4,3 +4,7 @@ export declare function getCachedSyncGeneratorChanges(generators: string[]): Pro
|
|
4
4
|
export declare function flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
|
5
5
|
export declare function collectAndScheduleSyncGenerators(projectGraph: ProjectGraph): void;
|
6
6
|
export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
|
7
|
+
/**
|
8
|
+
* @internal
|
9
|
+
*/
|
10
|
+
export declare function _getConflictingGeneratorGroups(results: SyncGeneratorChangesResult[]): string[][];
|