nx 19.7.0-canary.20240829-0ef6892 → 19.7.0-canary.20240830-83a387a
Sign up to get free protection for your applications and to get access to all the features.
- package/package.json +12 -12
- package/src/command-line/connect/command-object.js +10 -3
- package/src/command-line/connect/connect-to-nx-cloud.d.ts +3 -1
- package/src/command-line/connect/connect-to-nx-cloud.js +7 -4
- package/src/command-line/import/command-object.js +4 -0
- package/src/command-line/import/import.d.ts +4 -0
- package/src/command-line/import/import.js +147 -12
- package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
- package/src/command-line/import/utils/prepare-source-repo.js +31 -85
- package/src/command-line/yargs-utils/shared-options.js +1 -9
- package/src/daemon/server/sync-generators.d.ts +4 -0
- package/src/daemon/server/sync-generators.js +172 -52
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.d.ts +2 -1
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +16 -14
- package/src/nx-cloud/generators/connect-to-nx-cloud/schema.json +4 -0
- package/src/utils/git-utils.d.ts +7 -10
- package/src/utils/git-utils.js +61 -44
- package/src/utils/sync-generators.d.ts +2 -2
- package/src/utils/squash.d.ts +0 -1
- package/src/utils/squash.js +0 -12
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "19.7.0-canary.
|
3
|
+
"version": "19.7.0-canary.20240830-83a387a",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -71,7 +71,7 @@
|
|
71
71
|
"yargs-parser": "21.1.1",
|
72
72
|
"node-machine-id": "1.1.12",
|
73
73
|
"ora": "5.3.0",
|
74
|
-
"@nrwl/tao": "19.7.0-canary.
|
74
|
+
"@nrwl/tao": "19.7.0-canary.20240830-83a387a"
|
75
75
|
},
|
76
76
|
"peerDependencies": {
|
77
77
|
"@swc-node/register": "^1.8.0",
|
@@ -86,16 +86,16 @@
|
|
86
86
|
}
|
87
87
|
},
|
88
88
|
"optionalDependencies": {
|
89
|
-
"@nx/nx-darwin-x64": "19.7.0-canary.
|
90
|
-
"@nx/nx-darwin-arm64": "19.7.0-canary.
|
91
|
-
"@nx/nx-linux-x64-gnu": "19.7.0-canary.
|
92
|
-
"@nx/nx-linux-x64-musl": "19.7.0-canary.
|
93
|
-
"@nx/nx-win32-x64-msvc": "19.7.0-canary.
|
94
|
-
"@nx/nx-linux-arm64-gnu": "19.7.0-canary.
|
95
|
-
"@nx/nx-linux-arm64-musl": "19.7.0-canary.
|
96
|
-
"@nx/nx-linux-arm-gnueabihf": "19.7.0-canary.
|
97
|
-
"@nx/nx-win32-arm64-msvc": "19.7.0-canary.
|
98
|
-
"@nx/nx-freebsd-x64": "19.7.0-canary.
|
89
|
+
"@nx/nx-darwin-x64": "19.7.0-canary.20240830-83a387a",
|
90
|
+
"@nx/nx-darwin-arm64": "19.7.0-canary.20240830-83a387a",
|
91
|
+
"@nx/nx-linux-x64-gnu": "19.7.0-canary.20240830-83a387a",
|
92
|
+
"@nx/nx-linux-x64-musl": "19.7.0-canary.20240830-83a387a",
|
93
|
+
"@nx/nx-win32-x64-msvc": "19.7.0-canary.20240830-83a387a",
|
94
|
+
"@nx/nx-linux-arm64-gnu": "19.7.0-canary.20240830-83a387a",
|
95
|
+
"@nx/nx-linux-arm64-musl": "19.7.0-canary.20240830-83a387a",
|
96
|
+
"@nx/nx-linux-arm-gnueabihf": "19.7.0-canary.20240830-83a387a",
|
97
|
+
"@nx/nx-win32-arm64-msvc": "19.7.0-canary.20240830-83a387a",
|
98
|
+
"@nx/nx-freebsd-x64": "19.7.0-canary.20240830-83a387a"
|
99
99
|
},
|
100
100
|
"nx-migrations": {
|
101
101
|
"migrations": "./migrations.json",
|
@@ -3,13 +3,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.yargsViewLogsCommand = exports.yargsConnectCommand = void 0;
|
4
4
|
const documentation_1 = require("../yargs-utils/documentation");
|
5
5
|
const versions_1 = require("../../utils/versions");
|
6
|
+
const shared_options_1 = require("../yargs-utils/shared-options");
|
6
7
|
exports.yargsConnectCommand = {
|
7
8
|
command: 'connect',
|
8
9
|
aliases: ['connect-to-nx-cloud'],
|
9
10
|
describe: `Connect workspace to Nx Cloud`,
|
10
|
-
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(yargs, 'connect-to-nx-cloud'),
|
11
|
-
handler: async () => {
|
12
|
-
await (await Promise.resolve().then(() => require('./connect-to-nx-cloud'))).connectToNxCloudCommand();
|
11
|
+
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(withConnectOptions(yargs), 'connect-to-nx-cloud'),
|
12
|
+
handler: async (args) => {
|
13
|
+
await (await Promise.resolve().then(() => require('./connect-to-nx-cloud'))).connectToNxCloudCommand(args);
|
13
14
|
await (await Promise.resolve().then(() => require('../../utils/ab-testing'))).recordStat({
|
14
15
|
command: 'connect',
|
15
16
|
nxVersion: versions_1.nxVersion,
|
@@ -18,6 +19,12 @@ exports.yargsConnectCommand = {
|
|
18
19
|
process.exit(0);
|
19
20
|
},
|
20
21
|
};
|
22
|
+
function withConnectOptions(yargs) {
|
23
|
+
return (0, shared_options_1.withVerbose)(yargs).option('generateToken', {
|
24
|
+
type: 'boolean',
|
25
|
+
description: 'Explicitly asks for a token to be created, do not override existing tokens from Nx Cloud',
|
26
|
+
});
|
27
|
+
}
|
21
28
|
exports.yargsViewLogsCommand = {
|
22
29
|
command: 'view-logs',
|
23
30
|
describe: 'Enables you to view and interact with the logs via the advanced analytic UI from Nx Cloud to help you debug your issue. To do this, Nx needs to connect your workspace to Nx Cloud and upload the most recent run details. Only the metrics are uploaded, not the artefacts.',
|
@@ -5,6 +5,8 @@ import { MessageKey } from '../../utils/ab-testing';
|
|
5
5
|
export declare function onlyDefaultRunnerIsUsed(nxJson: NxJsonConfiguration): boolean;
|
6
6
|
export declare function connectToNxCloudIfExplicitlyAsked(opts: NxArgs): Promise<void>;
|
7
7
|
export declare function connectWorkspaceToCloud(options: ConnectToNxCloudOptions, directory?: string): Promise<string>;
|
8
|
-
export declare function connectToNxCloudCommand(
|
8
|
+
export declare function connectToNxCloudCommand(options: {
|
9
|
+
generateToken?: boolean;
|
10
|
+
}, command?: string): Promise<boolean>;
|
9
11
|
export declare function connectExistingRepoToNxCloudPrompt(command?: string, key?: MessageKey): Promise<boolean>;
|
10
12
|
export declare function connectToNxCloudWithPrompt(command: string): Promise<void>;
|
@@ -54,7 +54,7 @@ async function connectWorkspaceToCloud(options, directory = workspace_root_1.wor
|
|
54
54
|
(0, tree_1.flushChanges)(directory, tree.listChanges());
|
55
55
|
return accessToken;
|
56
56
|
}
|
57
|
-
async function connectToNxCloudCommand(command) {
|
57
|
+
async function connectToNxCloudCommand(options, command) {
|
58
58
|
const nxJson = (0, configuration_1.readNxJson)();
|
59
59
|
const installationSource = process.env.NX_CONSOLE
|
60
60
|
? 'nx-console'
|
@@ -66,7 +66,7 @@ async function connectToNxCloudCommand(command) {
|
|
66
66
|
if (!token) {
|
67
67
|
throw new Error(`Unable to authenticate. If you are connecting to Nx Cloud locally, set Nx Cloud ID in nx.json. If you are connecting in a CI context, either define accessToken in nx.json or set the NX_CLOUD_ACCESS_TOKEN env variable.`);
|
68
68
|
}
|
69
|
-
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)(installationSource, token);
|
69
|
+
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)(installationSource, token, options?.generateToken !== true);
|
70
70
|
output_1.output.log({
|
71
71
|
title: '✔ This workspace already has Nx Cloud set up',
|
72
72
|
bodyLines: [
|
@@ -78,9 +78,10 @@ async function connectToNxCloudCommand(command) {
|
|
78
78
|
return false;
|
79
79
|
}
|
80
80
|
const token = await connectWorkspaceToCloud({
|
81
|
+
generateToken: options?.generateToken,
|
81
82
|
installationSource: command ?? installationSource,
|
82
83
|
});
|
83
|
-
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-connect', token);
|
84
|
+
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-connect', token, options?.generateToken !== true);
|
84
85
|
try {
|
85
86
|
const cloudConnectSpinner = ora(`Opening Nx Cloud ${connectCloudUrl} in your browser to connect your workspace.`).start();
|
86
87
|
await sleep(2000);
|
@@ -115,7 +116,9 @@ async function connectExistingRepoToNxCloudPrompt(command = 'init', key = 'setup
|
|
115
116
|
}
|
116
117
|
async function connectToNxCloudWithPrompt(command) {
|
117
118
|
const setNxCloud = await nxCloudPrompt('setupNxCloud');
|
118
|
-
const useCloud = setNxCloud === 'yes'
|
119
|
+
const useCloud = setNxCloud === 'yes'
|
120
|
+
? await connectToNxCloudCommand({ generateToken: false }, command)
|
121
|
+
: false;
|
119
122
|
await (0, ab_testing_1.recordStat)({
|
120
123
|
command,
|
121
124
|
nxVersion: versions_1.nxVersion,
|
@@ -23,6 +23,10 @@ exports.yargsImportCommand = {
|
|
23
23
|
.option('ref', {
|
24
24
|
type: 'string',
|
25
25
|
description: 'The branch from the source repository to import',
|
26
|
+
})
|
27
|
+
.option('depth', {
|
28
|
+
type: 'number',
|
29
|
+
description: 'The depth to clone the source repository (limit this for faster git clone)',
|
26
30
|
})
|
27
31
|
.option('interactive', {
|
28
32
|
type: 'boolean',
|
@@ -15,6 +15,10 @@ export interface ImportOptions {
|
|
15
15
|
* The directory in the destination repo to import into
|
16
16
|
*/
|
17
17
|
destination: string;
|
18
|
+
/**
|
19
|
+
* The depth to clone the source repository (limit this for faster clone times)
|
20
|
+
*/
|
21
|
+
depth: number;
|
18
22
|
verbose: boolean;
|
19
23
|
interactive: boolean;
|
20
24
|
}
|
@@ -2,6 +2,10 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.importHandler = importHandler;
|
4
4
|
const path_1 = require("path");
|
5
|
+
const minimatch_1 = require("minimatch");
|
6
|
+
const node_fs_1 = require("node:fs");
|
7
|
+
const chalk = require("chalk");
|
8
|
+
const js_yaml_1 = require("@zkochan/js-yaml");
|
5
9
|
const git_utils_1 = require("../../utils/git-utils");
|
6
10
|
const promises_1 = require("node:fs/promises");
|
7
11
|
const tmp_1 = require("tmp");
|
@@ -18,6 +22,7 @@ const command_line_utils_1 = require("../../utils/command-line-utils");
|
|
18
22
|
const prepare_source_repo_1 = require("./utils/prepare-source-repo");
|
19
23
|
const merge_remote_source_1 = require("./utils/merge-remote-source");
|
20
24
|
const needs_install_1 = require("./utils/needs-install");
|
25
|
+
const file_utils_1 = require("../../project-graph/file-utils");
|
21
26
|
const importRemoteName = '__tmp_nx_import__';
|
22
27
|
async function importHandler(options) {
|
23
28
|
let { sourceRemoteUrl, ref, source, destination } = options;
|
@@ -54,7 +59,7 @@ async function importHandler(options) {
|
|
54
59
|
// It's a remote url
|
55
60
|
}
|
56
61
|
const sourceRepoPath = (0, path_1.join)(tempImportDirectory, 'repo');
|
57
|
-
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath}`).start();
|
62
|
+
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath} (Use --depth to limit commit history and speed up clone times)`).start();
|
58
63
|
try {
|
59
64
|
await (0, promises_1.rm)(tempImportDirectory, { recursive: true });
|
60
65
|
}
|
@@ -64,6 +69,7 @@ async function importHandler(options) {
|
|
64
69
|
try {
|
65
70
|
sourceGitClient = await (0, git_utils_1.cloneFromUpstream)(sourceRemoteUrl, sourceRepoPath, {
|
66
71
|
originName: importRemoteName,
|
72
|
+
depth: options.depth,
|
67
73
|
});
|
68
74
|
}
|
69
75
|
catch (e) {
|
@@ -72,6 +78,8 @@ async function importHandler(options) {
|
|
72
78
|
throw new Error(errorMessage);
|
73
79
|
}
|
74
80
|
spinner.succeed(`Cloned into ${sourceRepoPath}`);
|
81
|
+
// Detecting the package manager before preparing the source repo for import.
|
82
|
+
const sourcePackageManager = (0, package_manager_1.detectPackageManager)(sourceGitClient.root);
|
75
83
|
if (!ref) {
|
76
84
|
const branchChoices = await sourceGitClient.listBranches();
|
77
85
|
ref = (await (0, enquirer_1.prompt)([
|
@@ -104,24 +112,34 @@ async function importHandler(options) {
|
|
104
112
|
name: 'destination',
|
105
113
|
message: 'Where in this workspace should the code be imported into?',
|
106
114
|
required: true,
|
115
|
+
initial: source ? source : undefined,
|
107
116
|
},
|
108
117
|
])).destination;
|
109
118
|
}
|
110
119
|
const absSource = (0, path_1.join)(sourceRepoPath, source);
|
111
120
|
const absDestination = (0, path_1.join)(process.cwd(), destination);
|
121
|
+
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
|
122
|
+
await assertDestinationEmpty(destinationGitClient, absDestination);
|
123
|
+
const tempImportBranch = getTempImportBranch(ref);
|
124
|
+
await sourceGitClient.addFetchRemote(importRemoteName, ref);
|
125
|
+
await sourceGitClient.fetch(importRemoteName, ref);
|
126
|
+
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
|
127
|
+
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
|
128
|
+
await sourceGitClient.checkout(tempImportBranch, {
|
129
|
+
new: true,
|
130
|
+
base: `${importRemoteName}/${ref}`,
|
131
|
+
});
|
132
|
+
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
|
112
133
|
try {
|
113
134
|
await (0, promises_1.stat)(absSource);
|
114
135
|
}
|
115
136
|
catch (e) {
|
116
137
|
throw new Error(`The source directory ${source} does not exist in ${sourceRemoteUrl}. Please double check to make sure it exists.`);
|
117
138
|
}
|
118
|
-
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
|
119
|
-
await assertDestinationEmpty(destinationGitClient, absDestination);
|
120
|
-
const tempImportBranch = getTempImportBranch(ref);
|
121
139
|
const packageManager = (0, package_manager_1.detectPackageManager)(workspace_root_1.workspaceRoot);
|
122
140
|
const originalPackageWorkspaces = await (0, needs_install_1.getPackagesInPackageManagerWorkspace)(packageManager);
|
123
141
|
const relativeDestination = (0, path_1.relative)(destinationGitClient.root, absDestination);
|
124
|
-
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl
|
142
|
+
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl);
|
125
143
|
await createTemporaryRemote(destinationGitClient, (0, path_1.join)(sourceRepoPath, '.git'), importRemoteName);
|
126
144
|
await (0, merge_remote_source_1.mergeRemoteSource)(destinationGitClient, sourceRemoteUrl, tempImportBranch, destination, importRemoteName, ref);
|
127
145
|
spinner.start('Cleaning up temporary files and remotes');
|
@@ -132,19 +150,69 @@ async function importHandler(options) {
|
|
132
150
|
const nxJson = (0, nx_json_1.readNxJson)(workspace_root_1.workspaceRoot);
|
133
151
|
(0, workspace_context_1.resetWorkspaceContext)();
|
134
152
|
const { plugins, updatePackageScripts } = await (0, init_v2_1.detectPlugins)(nxJson, options.interactive);
|
153
|
+
if (packageManager !== sourcePackageManager) {
|
154
|
+
output_1.output.warn({
|
155
|
+
title: `Mismatched package managers`,
|
156
|
+
bodyLines: [
|
157
|
+
`The source repository is using a different package manager (${sourcePackageManager}) than this workspace (${packageManager}).`,
|
158
|
+
`This could lead to install issues due to discrepancies in "package.json" features.`,
|
159
|
+
],
|
160
|
+
});
|
161
|
+
}
|
162
|
+
// If install fails, we should continue since the errors could be resolved later.
|
163
|
+
let installFailed = false;
|
135
164
|
if (plugins.length > 0) {
|
136
|
-
|
137
|
-
|
138
|
-
|
165
|
+
try {
|
166
|
+
output_1.output.log({ title: 'Installing Plugins' });
|
167
|
+
(0, init_v2_1.installPlugins)(workspace_root_1.workspaceRoot, plugins, pmc, updatePackageScripts);
|
168
|
+
await destinationGitClient.amendCommit();
|
169
|
+
}
|
170
|
+
catch (e) {
|
171
|
+
installFailed = true;
|
172
|
+
output_1.output.error({
|
173
|
+
title: `Install failed: ${e.message || 'Unknown error'}`,
|
174
|
+
bodyLines: [e.stack],
|
175
|
+
});
|
176
|
+
}
|
139
177
|
}
|
140
178
|
else if (await (0, needs_install_1.needsInstall)(packageManager, originalPackageWorkspaces)) {
|
179
|
+
try {
|
180
|
+
output_1.output.log({
|
181
|
+
title: 'Installing dependencies for imported code',
|
182
|
+
});
|
183
|
+
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
|
184
|
+
await destinationGitClient.amendCommit();
|
185
|
+
}
|
186
|
+
catch (e) {
|
187
|
+
installFailed = true;
|
188
|
+
output_1.output.error({
|
189
|
+
title: `Install failed: ${e.message || 'Unknown error'}`,
|
190
|
+
bodyLines: [e.stack],
|
191
|
+
});
|
192
|
+
}
|
193
|
+
}
|
194
|
+
console.log(await destinationGitClient.showStat());
|
195
|
+
if (installFailed) {
|
196
|
+
const pmc = (0, package_manager_1.getPackageManagerCommand)(packageManager);
|
197
|
+
output_1.output.warn({
|
198
|
+
title: `The import was successful, but the install failed`,
|
199
|
+
bodyLines: [
|
200
|
+
`You may need to run "${pmc.install}" manually to resolve the issue. The error is logged above.`,
|
201
|
+
],
|
202
|
+
});
|
203
|
+
}
|
204
|
+
await warnOnMissingWorkspacesEntry(packageManager, pmc, relativeDestination);
|
205
|
+
// When only a subdirectory is imported, there might be devDependencies in the root package.json file
|
206
|
+
// that needs to be ported over as well.
|
207
|
+
if (ref) {
|
141
208
|
output_1.output.log({
|
142
|
-
title:
|
209
|
+
title: `Check root dependencies`,
|
210
|
+
bodyLines: [
|
211
|
+
`"dependencies" and "devDependencies" are not imported from the source repository (${sourceRemoteUrl}).`,
|
212
|
+
`You may need to add some of those dependencies to this workspace in order to run tasks successfully.`,
|
213
|
+
],
|
143
214
|
});
|
144
|
-
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
|
145
|
-
await destinationGitClient.amendCommit();
|
146
215
|
}
|
147
|
-
console.log(await destinationGitClient.showStat());
|
148
216
|
output_1.output.log({
|
149
217
|
title: `Merging these changes into ${(0, command_line_utils_1.getBaseRef)(nxJson)}`,
|
150
218
|
bodyLines: [
|
@@ -171,3 +239,70 @@ async function createTemporaryRemote(destinationGitClient, sourceRemoteUrl, remo
|
|
171
239
|
await destinationGitClient.addGitRemote(remoteName, sourceRemoteUrl);
|
172
240
|
await destinationGitClient.fetch(remoteName);
|
173
241
|
}
|
242
|
+
// If the user imports a project that isn't in NPM/Yarn/PNPM workspaces, then its dependencies
|
243
|
+
// will not be installed. We should warn users and provide instructions on how to fix this.
|
244
|
+
async function warnOnMissingWorkspacesEntry(pm, pmc, pkgPath) {
|
245
|
+
if (!(0, package_manager_1.isWorkspacesEnabled)(pm, workspace_root_1.workspaceRoot)) {
|
246
|
+
output_1.output.warn({
|
247
|
+
title: `Missing workspaces in package.json`,
|
248
|
+
bodyLines: pm === 'npm'
|
249
|
+
? [
|
250
|
+
`We recommend enabling NPM workspaces to install dependencies for the imported project.`,
|
251
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
252
|
+
`See: https://docs.npmjs.com/cli/using-npm/workspaces`,
|
253
|
+
]
|
254
|
+
: pm === 'yarn'
|
255
|
+
? [
|
256
|
+
`We recommend enabling Yarn workspaces to install dependencies for the imported project.`,
|
257
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
258
|
+
`See: https://yarnpkg.com/features/workspaces`,
|
259
|
+
]
|
260
|
+
: pm === 'bun'
|
261
|
+
? [
|
262
|
+
`We recommend enabling Bun workspaces to install dependencies for the imported project.`,
|
263
|
+
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
|
264
|
+
`See: https://bun.sh/docs/install/workspaces`,
|
265
|
+
]
|
266
|
+
: [
|
267
|
+
`We recommend enabling PNPM workspaces to install dependencies for the imported project.`,
|
268
|
+
`Add the following entry to to pnpm-workspace.yaml and run "${pmc.install}":`,
|
269
|
+
chalk.bold(`packages:\n - '${pkgPath}'`),
|
270
|
+
`See: https://pnpm.io/workspaces`,
|
271
|
+
],
|
272
|
+
});
|
273
|
+
}
|
274
|
+
else {
|
275
|
+
// Check if the new package is included in existing workspaces entries. If not, warn the user.
|
276
|
+
let workspaces = null;
|
277
|
+
if (pm === 'npm' || pm === 'yarn' || pm === 'bun') {
|
278
|
+
const packageJson = (0, file_utils_1.readPackageJson)();
|
279
|
+
workspaces = packageJson.workspaces;
|
280
|
+
}
|
281
|
+
else if (pm === 'pnpm') {
|
282
|
+
const yamlPath = (0, path_1.join)(workspace_root_1.workspaceRoot, 'pnpm-workspace.yaml');
|
283
|
+
if ((0, node_fs_1.existsSync)(yamlPath)) {
|
284
|
+
const yamlContent = await node_fs_1.promises.readFile(yamlPath, 'utf-8');
|
285
|
+
const yaml = (0, js_yaml_1.load)(yamlContent);
|
286
|
+
workspaces = yaml.packages;
|
287
|
+
}
|
288
|
+
}
|
289
|
+
if (workspaces) {
|
290
|
+
const isPkgIncluded = workspaces.some((w) => (0, minimatch_1.minimatch)(pkgPath, w));
|
291
|
+
if (!isPkgIncluded) {
|
292
|
+
const pkgsDir = (0, path_1.dirname)(pkgPath);
|
293
|
+
output_1.output.warn({
|
294
|
+
title: `Project missing in workspaces`,
|
295
|
+
bodyLines: pm === 'npm' || pm === 'yarn' || pm === 'bun'
|
296
|
+
? [
|
297
|
+
`The imported project (${pkgPath}) is missing the "workspaces" field in package.json.`,
|
298
|
+
`Add "${pkgsDir}/*" to workspaces run "${pmc.install}".`,
|
299
|
+
]
|
300
|
+
: [
|
301
|
+
`The imported project (${pkgPath}) is missing the "packages" field in pnpm-workspaces.yaml.`,
|
302
|
+
`Add "${pkgsDir}/*" to packages run "${pmc.install}".`,
|
303
|
+
],
|
304
|
+
});
|
305
|
+
}
|
306
|
+
}
|
307
|
+
}
|
308
|
+
}
|
@@ -1,2 +1,2 @@
|
|
1
1
|
import { GitRepository } from '../../../utils/git-utils';
|
2
|
-
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string
|
2
|
+
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string): Promise<void>;
|
@@ -4,99 +4,45 @@ exports.prepareSourceRepo = prepareSourceRepo;
|
|
4
4
|
const createSpinner = require("ora");
|
5
5
|
const path_1 = require("path");
|
6
6
|
const promises_1 = require("node:fs/promises");
|
7
|
-
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl
|
7
|
+
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl) {
|
8
8
|
const spinner = createSpinner().start(`Fetching ${ref} from ${sourceRemoteUrl}`);
|
9
|
-
await gitClient.addFetchRemote(originName, ref);
|
10
|
-
await gitClient.fetch(originName, ref);
|
11
|
-
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
|
12
|
-
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
|
13
|
-
await gitClient.checkout(tempImportBranch, {
|
14
|
-
new: true,
|
15
|
-
base: `${originName}/${ref}`,
|
16
|
-
});
|
17
|
-
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
|
18
9
|
const relativeSourceDir = (0, path_1.relative)(gitClient.root, (0, path_1.join)(gitClient.root, source));
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
try {
|
24
|
-
await (0, promises_1.rm)(destinationInSource, {
|
25
|
-
recursive: true,
|
26
|
-
});
|
27
|
-
}
|
28
|
-
catch { }
|
29
|
-
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
|
30
|
-
const gitignores = new Set();
|
31
|
-
for (const file of files) {
|
32
|
-
if ((0, path_1.basename)(file) === '.gitignore') {
|
33
|
-
gitignores.add(file);
|
34
|
-
continue;
|
35
|
-
}
|
36
|
-
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
37
|
-
const newPath = (0, path_1.join)(destinationInSource, file);
|
38
|
-
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
39
|
-
try {
|
40
|
-
await gitClient.move(file, newPath);
|
41
|
-
}
|
42
|
-
catch {
|
43
|
-
await wait(100);
|
44
|
-
await gitClient.move(file, newPath);
|
45
|
-
}
|
46
|
-
}
|
47
|
-
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
|
48
|
-
for (const gitignore of gitignores) {
|
49
|
-
await gitClient.move(gitignore, (0, path_1.join)(destinationInSource, gitignore));
|
10
|
+
if (relativeSourceDir !== '') {
|
11
|
+
if (await gitClient.hasFilterRepoInstalled()) {
|
12
|
+
spinner.start(`Filtering git history to only include files in ${relativeSourceDir}`);
|
13
|
+
await gitClient.filterRepo(relativeSourceDir);
|
50
14
|
}
|
51
|
-
|
52
|
-
|
53
|
-
await
|
15
|
+
else {
|
16
|
+
spinner.start(`Filtering git history to only include files in ${relativeSourceDir} (this might take a few minutes -- install git-filter-repo for faster performance)`);
|
17
|
+
await gitClient.filterBranch(relativeSourceDir, tempImportBranch);
|
54
18
|
}
|
19
|
+
spinner.succeed(`Filtered git history to only include files in ${relativeSourceDir}`);
|
55
20
|
}
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
continue;
|
74
|
-
}
|
75
|
-
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
76
|
-
if (!(0, path_1.relative)(source, file).startsWith('..')) {
|
77
|
-
if (needsMove) {
|
78
|
-
const newPath = (0, path_1.join)(destinationInSource, file);
|
79
|
-
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
80
|
-
try {
|
81
|
-
await gitClient.move(file, newPath);
|
82
|
-
}
|
83
|
-
catch {
|
84
|
-
await wait(100);
|
85
|
-
await gitClient.move(file, newPath);
|
86
|
-
}
|
87
|
-
}
|
88
|
-
}
|
89
|
-
else {
|
90
|
-
await (0, promises_1.rm)((0, path_1.join)(gitClient.root, file), {
|
91
|
-
recursive: true,
|
92
|
-
});
|
93
|
-
}
|
21
|
+
const destinationInSource = (0, path_1.join)(gitClient.root, relativeDestination);
|
22
|
+
spinner.start(`Moving files and git history to ${destinationInSource}`);
|
23
|
+
// The result of filter-branch will contain only the files in the subdirectory at its root.
|
24
|
+
const files = await gitClient.getGitFiles('.');
|
25
|
+
try {
|
26
|
+
await (0, promises_1.rm)(destinationInSource, {
|
27
|
+
recursive: true,
|
28
|
+
});
|
29
|
+
}
|
30
|
+
catch { }
|
31
|
+
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
|
32
|
+
for (const file of files) {
|
33
|
+
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
|
34
|
+
const newPath = (0, path_1.join)(destinationInSource, file);
|
35
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
|
36
|
+
try {
|
37
|
+
await gitClient.move(file, newPath);
|
94
38
|
}
|
95
|
-
|
96
|
-
|
97
|
-
await gitClient.
|
39
|
+
catch {
|
40
|
+
await wait(100);
|
41
|
+
await gitClient.move(file, newPath);
|
98
42
|
}
|
99
43
|
}
|
44
|
+
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
|
45
|
+
await gitClient.amendCommit();
|
100
46
|
spinner.succeed(`${sourceRemoteUrl} has been prepared to be imported into this workspace on a temporary branch: ${tempImportBranch} in ${gitClient.root}`);
|
101
47
|
}
|
102
48
|
function wait(ms) {
|
@@ -209,15 +209,7 @@ function withOutputStyleOption(yargs, choices = [
|
|
209
209
|
'stream-without-prefixes',
|
210
210
|
]) {
|
211
211
|
return yargs.option('output-style', {
|
212
|
-
describe: `Defines how Nx emits outputs tasks logs
|
213
|
-
|
214
|
-
| option | description |
|
215
|
-
| --- | --- |
|
216
|
-
| dynamic | use dynamic output life cycle, previous content is overwritten or modified as new outputs are added, display minimal logs by default, always show errors. This output format is recommended on your local development environments. |
|
217
|
-
| static | uses static output life cycle, no previous content is rewritten or modified as new outputs are added. This output format is recommened for CI environments. |
|
218
|
-
| stream | nx by default logs output to an internal output stream, enable this option to stream logs to stdout / stderr |
|
219
|
-
| stream-without-prefixes | nx prefixes the project name the target is running on, use this option remove the project name prefix from output |
|
220
|
-
`,
|
212
|
+
describe: `Defines how Nx emits outputs tasks logs. **dynamic**: use dynamic output life cycle, previous content is overwritten or modified as new outputs are added, display minimal logs by default, always show errors. This output format is recommended on your local development environments. **static**: uses static output life cycle, no previous content is rewritten or modified as new outputs are added. This output format is recommened for CI environments. **stream**: nx by default logs output to an internal output stream, enable this option to stream logs to stdout / stderr. **stream-without-prefixes**: nx prefixes the project name the target is running on, use this option remove the project name prefix from output.`,
|
221
213
|
type: 'string',
|
222
214
|
choices,
|
223
215
|
});
|
@@ -4,3 +4,7 @@ export declare function getCachedSyncGeneratorChanges(generators: string[]): Pro
|
|
4
4
|
export declare function flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
|
5
5
|
export declare function collectAndScheduleSyncGenerators(projectGraph: ProjectGraph): void;
|
6
6
|
export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
|
7
|
+
/**
|
8
|
+
* @internal
|
9
|
+
*/
|
10
|
+
export declare function _getConflictingGeneratorGroups(results: SyncGeneratorChangesResult[]): string[][];
|
@@ -4,6 +4,7 @@ exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
|
|
4
4
|
exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
|
5
5
|
exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
|
6
6
|
exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
|
7
|
+
exports._getConflictingGeneratorGroups = _getConflictingGeneratorGroups;
|
7
8
|
const nx_json_1 = require("../../config/nx-json");
|
8
9
|
const tree_1 = require("../../generators/tree");
|
9
10
|
const file_hasher_1 = require("../../hasher/file-hasher");
|
@@ -25,7 +26,6 @@ let storedDisabledTaskSyncGeneratorsHash;
|
|
25
26
|
const log = (...messageParts) => {
|
26
27
|
logger_1.serverLogger.log('[SYNC]:', ...messageParts);
|
27
28
|
};
|
28
|
-
// TODO(leo): check conflicts and reuse the Tree where possible
|
29
29
|
async function getCachedSyncGeneratorChanges(generators) {
|
30
30
|
try {
|
31
31
|
log('get sync generators changes on demand', generators);
|
@@ -37,51 +37,15 @@ async function getCachedSyncGeneratorChanges(generators) {
|
|
37
37
|
}
|
38
38
|
// reset the wait time
|
39
39
|
waitPeriod = 100;
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
: null;
|
50
|
-
errored = error !== undefined;
|
51
|
-
return projects;
|
52
|
-
};
|
53
|
-
return (await Promise.all(generators.map(async (generator) => {
|
54
|
-
if (scheduledGenerators.has(generator) ||
|
55
|
-
!syncGeneratorsCacheResultPromises.has(generator)) {
|
56
|
-
// it's scheduled to run (there are pending changes to process) or
|
57
|
-
// it's not scheduled and there's no cached result, so run it
|
58
|
-
const projects = await getProjectsConfigurations();
|
59
|
-
if (projects) {
|
60
|
-
log(generator, 'already scheduled or not cached, running it now');
|
61
|
-
runGenerator(generator, projects);
|
62
|
-
}
|
63
|
-
else {
|
64
|
-
log(generator, 'already scheduled or not cached, project graph errored');
|
65
|
-
/**
|
66
|
-
* This should never happen. This is invoked imperatively, and by
|
67
|
-
* the time it is invoked, the project graph would have already
|
68
|
-
* been requested. If it errored, it would have been reported and
|
69
|
-
* this wouldn't have been invoked. We handle it just in case.
|
70
|
-
*
|
71
|
-
* Since the project graph would be reported by the relevant
|
72
|
-
* handlers separately, we just ignore the error, don't cache
|
73
|
-
* any result and return an empty result, the next time this is
|
74
|
-
* invoked the process will repeat until it eventually recovers
|
75
|
-
* when the project graph is fixed.
|
76
|
-
*/
|
77
|
-
return Promise.resolve({ changes: [], generatorName: generator });
|
78
|
-
}
|
79
|
-
}
|
80
|
-
else {
|
81
|
-
log(generator, 'not scheduled and has cached result, returning cached result');
|
82
|
-
}
|
83
|
-
return syncGeneratorsCacheResultPromises.get(generator);
|
84
|
-
}))).flat();
|
40
|
+
const results = await getFromCacheOrRunGenerators(generators);
|
41
|
+
const conflicts = _getConflictingGeneratorGroups(results);
|
42
|
+
if (!conflicts.length) {
|
43
|
+
// there are no conflicts
|
44
|
+
return results;
|
45
|
+
}
|
46
|
+
// there are conflicts, so we need to re-run the conflicting generators
|
47
|
+
// using the same tree
|
48
|
+
return await processConflictingGenerators(conflicts, results);
|
85
49
|
}
|
86
50
|
catch (e) {
|
87
51
|
console.error(e);
|
@@ -131,7 +95,7 @@ function collectAndScheduleSyncGenerators(projectGraph) {
|
|
131
95
|
}
|
132
96
|
const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
133
97
|
for (const generator of scheduledGenerators) {
|
134
|
-
runGenerator(generator, projects);
|
98
|
+
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
|
135
99
|
}
|
136
100
|
await Promise.all(syncGeneratorsCacheResultPromises.values());
|
137
101
|
}, waitPeriod);
|
@@ -148,6 +112,163 @@ async function getCachedRegisteredSyncGenerators() {
|
|
148
112
|
}
|
149
113
|
return [...registeredSyncGenerators];
|
150
114
|
}
|
115
|
+
async function getFromCacheOrRunGenerators(generators) {
|
116
|
+
let projects;
|
117
|
+
let errored = false;
|
118
|
+
const getProjectsConfigurations = async () => {
|
119
|
+
if (projects || errored) {
|
120
|
+
return projects;
|
121
|
+
}
|
122
|
+
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
123
|
+
projects = projectGraph
|
124
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
125
|
+
: null;
|
126
|
+
errored = error !== undefined;
|
127
|
+
return projects;
|
128
|
+
};
|
129
|
+
return (await Promise.all(generators.map(async (generator) => {
|
130
|
+
if (scheduledGenerators.has(generator) ||
|
131
|
+
!syncGeneratorsCacheResultPromises.has(generator)) {
|
132
|
+
// it's scheduled to run (there are pending changes to process) or
|
133
|
+
// it's not scheduled and there's no cached result, so run it
|
134
|
+
const projects = await getProjectsConfigurations();
|
135
|
+
if (projects) {
|
136
|
+
log(generator, 'already scheduled or not cached, running it now');
|
137
|
+
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
|
138
|
+
}
|
139
|
+
else {
|
140
|
+
log(generator, 'already scheduled or not cached, project graph errored');
|
141
|
+
/**
|
142
|
+
* This should never happen. This is invoked imperatively, and by
|
143
|
+
* the time it is invoked, the project graph would have already
|
144
|
+
* been requested. If it errored, it would have been reported and
|
145
|
+
* this wouldn't have been invoked. We handle it just in case.
|
146
|
+
*
|
147
|
+
* Since the project graph would be reported by the relevant
|
148
|
+
* handlers separately, we just ignore the error, don't cache
|
149
|
+
* any result and return an empty result, the next time this is
|
150
|
+
* invoked the process will repeat until it eventually recovers
|
151
|
+
* when the project graph is fixed.
|
152
|
+
*/
|
153
|
+
return Promise.resolve({ changes: [], generatorName: generator });
|
154
|
+
}
|
155
|
+
}
|
156
|
+
else {
|
157
|
+
log(generator, 'not scheduled and has cached result, returning cached result');
|
158
|
+
}
|
159
|
+
return syncGeneratorsCacheResultPromises.get(generator);
|
160
|
+
}))).flat();
|
161
|
+
}
|
162
|
+
async function runConflictingGenerators(tree, generators) {
|
163
|
+
const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
164
|
+
const projects = projectGraph
|
165
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
166
|
+
: null;
|
167
|
+
if (!projects) {
|
168
|
+
/**
|
169
|
+
* This should never happen. This is invoked imperatively, and by
|
170
|
+
* the time it is invoked, the project graph would have already
|
171
|
+
* been requested. If it errored, it would have been reported and
|
172
|
+
* this wouldn't have been invoked. We handle it just in case.
|
173
|
+
*
|
174
|
+
* Since the project graph would be reported by the relevant
|
175
|
+
* handlers separately, we just ignore the error.
|
176
|
+
*/
|
177
|
+
return generators.map((generator) => ({
|
178
|
+
changes: [],
|
179
|
+
generatorName: generator,
|
180
|
+
}));
|
181
|
+
}
|
182
|
+
// we need to run conflicting generators sequentially because they use the same tree
|
183
|
+
const results = [];
|
184
|
+
for (const generator of generators) {
|
185
|
+
log(generator, 'running it now');
|
186
|
+
results.push(await runGenerator(generator, projects, tree));
|
187
|
+
}
|
188
|
+
return results;
|
189
|
+
}
|
190
|
+
async function processConflictingGenerators(conflicts, initialResults) {
|
191
|
+
const conflictRunResults = (await Promise.all(conflicts.map((generators) => {
|
192
|
+
const [firstGenerator, ...generatorsToRun] = generators;
|
193
|
+
// it must exists because the conflicts were identified from the initial results
|
194
|
+
const firstGeneratorResult = initialResults.find((r) => r.generatorName === firstGenerator);
|
195
|
+
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generators ${generators.join(',')}`);
|
196
|
+
// pre-apply the changes from the first generator to avoid running it
|
197
|
+
for (const change of firstGeneratorResult.changes) {
|
198
|
+
if (change.type === 'CREATE' || change.type === 'UPDATE') {
|
199
|
+
tree.write(change.path, change.content, change.options);
|
200
|
+
}
|
201
|
+
else if (change.type === 'DELETE') {
|
202
|
+
tree.delete(change.path);
|
203
|
+
}
|
204
|
+
}
|
205
|
+
/**
|
206
|
+
* We don't cache the results of conflicting generators because they
|
207
|
+
* use the same tree, so some files might contain results from multiple
|
208
|
+
* generators and we don't have guarantees that the same combination of
|
209
|
+
* generators will run together.
|
210
|
+
*/
|
211
|
+
return runConflictingGenerators(tree, generatorsToRun);
|
212
|
+
}))).flat();
|
213
|
+
/**
|
214
|
+
* The order of the results from the re-run generators is important because
|
215
|
+
* the last result from a group of conflicting generators will contain the
|
216
|
+
* changes from the previous conflicting generators. So, instead of replacing
|
217
|
+
* in-place the initial results, we first add the results from the re-run
|
218
|
+
* generators, and then add the initial results that were not from a
|
219
|
+
* conflicting generator.
|
220
|
+
*/
|
221
|
+
const results = [...conflictRunResults];
|
222
|
+
for (const result of initialResults) {
|
223
|
+
if (conflictRunResults.every((r) => r.generatorName !== result.generatorName)) {
|
224
|
+
// this result is not from a conflicting generator, so we add it to the
|
225
|
+
// results
|
226
|
+
results.push(result);
|
227
|
+
}
|
228
|
+
}
|
229
|
+
return results;
|
230
|
+
}
|
231
|
+
/**
|
232
|
+
* @internal
|
233
|
+
*/
|
234
|
+
function _getConflictingGeneratorGroups(results) {
|
235
|
+
const changedFileToGeneratorMap = new Map();
|
236
|
+
for (const result of results) {
|
237
|
+
for (const change of result.changes) {
|
238
|
+
if (!changedFileToGeneratorMap.has(change.path)) {
|
239
|
+
changedFileToGeneratorMap.set(change.path, new Set());
|
240
|
+
}
|
241
|
+
changedFileToGeneratorMap.get(change.path).add(result.generatorName);
|
242
|
+
}
|
243
|
+
}
|
244
|
+
const conflicts = [];
|
245
|
+
for (const generatorSet of changedFileToGeneratorMap.values()) {
|
246
|
+
if (generatorSet.size === 1) {
|
247
|
+
// no conflicts
|
248
|
+
continue;
|
249
|
+
}
|
250
|
+
if (conflicts.length === 0) {
|
251
|
+
// there are no conflicts yet, so we just add the first group
|
252
|
+
conflicts.push(new Set(generatorSet));
|
253
|
+
continue;
|
254
|
+
}
|
255
|
+
// identify if any of the current generator sets intersect with any of the
|
256
|
+
// existing conflict groups
|
257
|
+
const generatorsArray = Array.from(generatorSet);
|
258
|
+
const existingConflictGroup = conflicts.find((group) => generatorsArray.some((generator) => group.has(generator)));
|
259
|
+
if (existingConflictGroup) {
|
260
|
+
// there's an intersecting group, so we merge the two
|
261
|
+
for (const generator of generatorsArray) {
|
262
|
+
existingConflictGroup.add(generator);
|
263
|
+
}
|
264
|
+
}
|
265
|
+
else {
|
266
|
+
// there's no intersecting group, so we create a new one
|
267
|
+
conflicts.push(new Set(generatorsArray));
|
268
|
+
}
|
269
|
+
}
|
270
|
+
return conflicts.map((group) => Array.from(group));
|
271
|
+
}
|
151
272
|
function collectAllRegisteredSyncGenerators(projectGraph) {
|
152
273
|
const nxJson = (0, nx_json_1.readNxJson)();
|
153
274
|
const projectGraphHash = hashProjectGraph(projectGraph);
|
@@ -191,16 +312,15 @@ function collectAllRegisteredSyncGenerators(projectGraph) {
|
|
191
312
|
}
|
192
313
|
}
|
193
314
|
}
|
194
|
-
function runGenerator(generator, projects) {
|
315
|
+
function runGenerator(generator, projects, tree) {
|
195
316
|
log('running scheduled generator', generator);
|
196
317
|
// remove it from the scheduled set
|
197
318
|
scheduledGenerators.delete(generator);
|
198
|
-
|
199
|
-
|
200
|
-
syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
319
|
+
tree ??= new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
|
320
|
+
return (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
201
321
|
log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
|
202
322
|
return result;
|
203
|
-
})
|
323
|
+
});
|
204
324
|
}
|
205
325
|
function hashProjectGraph(projectGraph) {
|
206
326
|
const stringifiedProjects = Object.entries(projectGraph.nodes)
|
Binary file
|
@@ -7,7 +7,8 @@ export interface ConnectToNxCloudOptions {
|
|
7
7
|
hideFormatLogs?: boolean;
|
8
8
|
github?: boolean;
|
9
9
|
directory?: string;
|
10
|
+
generateToken?: boolean;
|
10
11
|
}
|
11
|
-
export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string>;
|
12
|
+
export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string | null>;
|
12
13
|
declare function connectToNxCloudGenerator(tree: Tree, options: ConnectToNxCloudOptions): Promise<void>;
|
13
14
|
export default connectToNxCloudGenerator;
|
@@ -88,20 +88,22 @@ async function connectToNxCloud(tree, schema, nxJson = (0, nx_json_1.readNxJson)
|
|
88
88
|
printCloudConnectionDisabledMessage();
|
89
89
|
return null;
|
90
90
|
}
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
91
|
+
const isGitHubDetected = schema.github ?? (await (0, url_shorten_1.repoUsesGithub)(schema.github));
|
92
|
+
let responseFromCreateNxCloudWorkspaceV2;
|
93
|
+
/**
|
94
|
+
* Do not create an Nx Cloud token if the user is using GitHub and
|
95
|
+
* is running `nx-connect` AND `token` is undefined (override)
|
96
|
+
*/
|
97
|
+
if (!schema.generateToken &&
|
98
|
+
isGitHubDetected &&
|
99
|
+
schema.installationSource === 'nx-connect')
|
100
|
+
return null;
|
101
|
+
responseFromCreateNxCloudWorkspaceV2 = await createNxCloudWorkspaceV2(getRootPackageName(tree), schema.installationSource, getNxInitDate());
|
102
|
+
addNxCloudIdToNxJson(tree, responseFromCreateNxCloudWorkspaceV2?.nxCloudId, schema.directory);
|
103
|
+
await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
|
104
|
+
silent: schema.hideFormatLogs,
|
105
|
+
});
|
106
|
+
return responseFromCreateNxCloudWorkspaceV2.nxCloudId;
|
105
107
|
}
|
106
108
|
async function connectToNxCloudGenerator(tree, options) {
|
107
109
|
await connectToNxCloud(tree, options);
|
@@ -21,6 +21,10 @@
|
|
21
21
|
"description": "Hide formatting logs",
|
22
22
|
"x-priority": "internal"
|
23
23
|
},
|
24
|
+
"generateToken": {
|
25
|
+
"type": "boolean",
|
26
|
+
"description": "Explicitly asks for a token to be created, do not override existing tokens from Nx Cloud"
|
27
|
+
},
|
24
28
|
"github": {
|
25
29
|
"type": "boolean",
|
26
30
|
"description": "If the user will be using GitHub as their git hosting provider",
|
package/src/utils/git-utils.d.ts
CHANGED
@@ -1,6 +1,6 @@
|
|
1
|
-
|
2
|
-
export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
|
1
|
+
export declare function cloneFromUpstream(url: string, destination: string, { originName, depth }?: {
|
3
2
|
originName: string;
|
3
|
+
depth?: number;
|
4
4
|
}): Promise<GitRepository>;
|
5
5
|
export declare class GitRepository {
|
6
6
|
private directory;
|
@@ -8,12 +8,10 @@ export declare class GitRepository {
|
|
8
8
|
constructor(directory: string);
|
9
9
|
getGitRootPath(cwd: string): string;
|
10
10
|
addFetchRemote(remoteName: string, branch: string): Promise<string>;
|
11
|
-
private execAsync;
|
12
11
|
showStat(): Promise<string>;
|
13
12
|
listBranches(): Promise<string[]>;
|
14
13
|
getGitFiles(path: string): Promise<string[]>;
|
15
14
|
reset(ref: string): Promise<string>;
|
16
|
-
squashLastTwoCommits(): Promise<string>;
|
17
15
|
mergeUnrelatedHistories(ref: string, message: string): Promise<string>;
|
18
16
|
fetch(remote: string, ref?: string): Promise<string>;
|
19
17
|
checkout(branch: string, opts: {
|
@@ -25,14 +23,13 @@ export declare class GitRepository {
|
|
25
23
|
commit(message: string): Promise<string>;
|
26
24
|
amendCommit(): Promise<string>;
|
27
25
|
deleteGitRemote(name: string): Promise<string>;
|
28
|
-
deleteBranch(branch: string): Promise<string>;
|
29
26
|
addGitRemote(name: string, url: string): Promise<string>;
|
27
|
+
hasFilterRepoInstalled(): Promise<boolean>;
|
28
|
+
filterRepo(subdirectory: string): Promise<string>;
|
29
|
+
filterBranch(subdirectory: string, branchName: string): Promise<string>;
|
30
|
+
private execAsync;
|
31
|
+
private quotePath;
|
30
32
|
}
|
31
|
-
/**
|
32
|
-
* This is used by the squash editor script to update the rebase file.
|
33
|
-
*/
|
34
|
-
export declare function updateRebaseFile(contents: string): string;
|
35
|
-
export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
|
36
33
|
/**
|
37
34
|
* This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.
|
38
35
|
*/
|
package/src/utils/git-utils.js
CHANGED
@@ -2,16 +2,13 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.GitRepository = void 0;
|
4
4
|
exports.cloneFromUpstream = cloneFromUpstream;
|
5
|
-
exports.updateRebaseFile = updateRebaseFile;
|
6
|
-
exports.fetchGitRemote = fetchGitRemote;
|
7
5
|
exports.getGithubSlugOrNull = getGithubSlugOrNull;
|
8
6
|
exports.extractUserAndRepoFromGitHubUrl = extractUserAndRepoFromGitHubUrl;
|
9
7
|
exports.commitChanges = commitChanges;
|
10
8
|
exports.getLatestCommitSha = getLatestCommitSha;
|
11
9
|
const child_process_1 = require("child_process");
|
12
|
-
const devkit_exports_1 = require("../devkit-exports");
|
13
10
|
const path_1 = require("path");
|
14
|
-
const
|
11
|
+
const devkit_exports_1 = require("../devkit-exports");
|
15
12
|
function execAsync(command, execOptions) {
|
16
13
|
return new Promise((res, rej) => {
|
17
14
|
(0, child_process_1.exec)(command, execOptions, (err, stdout, stderr) => {
|
@@ -22,9 +19,12 @@ function execAsync(command, execOptions) {
|
|
22
19
|
});
|
23
20
|
});
|
24
21
|
}
|
25
|
-
async function cloneFromUpstream(url, destination, { originName } = {
|
26
|
-
|
22
|
+
async function cloneFromUpstream(url, destination, { originName, depth } = {
|
23
|
+
originName: 'origin',
|
24
|
+
}) {
|
25
|
+
await execAsync(`git clone ${url} ${destination} ${depth ? `--depth ${depth}` : ''} --origin ${originName}`, {
|
27
26
|
cwd: (0, path_1.dirname)(destination),
|
27
|
+
maxBuffer: 10 * 1024 * 1024,
|
28
28
|
});
|
29
29
|
return new GitRepository(destination);
|
30
30
|
}
|
@@ -40,13 +40,8 @@ class GitRepository {
|
|
40
40
|
.toString()
|
41
41
|
.trim();
|
42
42
|
}
|
43
|
-
addFetchRemote(remoteName, branch) {
|
44
|
-
return this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
|
45
|
-
}
|
46
|
-
execAsync(command) {
|
47
|
-
return execAsync(command, {
|
48
|
-
cwd: this.root,
|
49
|
-
});
|
43
|
+
async addFetchRemote(remoteName, branch) {
|
44
|
+
return await this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
|
50
45
|
}
|
51
46
|
async showStat() {
|
52
47
|
return await this.execAsync(`git show --stat`);
|
@@ -61,62 +56,84 @@ class GitRepository {
|
|
61
56
|
.replace('refs/heads/', ''));
|
62
57
|
}
|
63
58
|
async getGitFiles(path) {
|
64
|
-
return
|
59
|
+
// Use -z to return file names exactly as they are stored in git, separated by NULL (\x00) character.
|
60
|
+
// This avoids problems with special characters in file names.
|
61
|
+
return (await this.execAsync(`git ls-files -z ${path}`))
|
65
62
|
.trim()
|
66
|
-
.split('\
|
63
|
+
.split('\x00')
|
67
64
|
.map((s) => s.trim())
|
68
65
|
.filter(Boolean);
|
69
66
|
}
|
70
67
|
async reset(ref) {
|
71
|
-
return this.execAsync(`git reset ${ref} --hard`);
|
72
|
-
}
|
73
|
-
async squashLastTwoCommits() {
|
74
|
-
return this.execAsync(`git -c core.editor="node ${SQUASH_EDITOR}" rebase --interactive --no-autosquash HEAD~2`);
|
68
|
+
return await this.execAsync(`git reset ${ref} --hard`);
|
75
69
|
}
|
76
70
|
async mergeUnrelatedHistories(ref, message) {
|
77
|
-
return this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
|
71
|
+
return await this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
|
78
72
|
}
|
79
73
|
async fetch(remote, ref) {
|
80
|
-
return this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
|
74
|
+
return await this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
|
81
75
|
}
|
82
76
|
async checkout(branch, opts) {
|
83
|
-
return this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
|
77
|
+
return await this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
|
84
78
|
}
|
85
79
|
async move(path, destination) {
|
86
|
-
return this.execAsync(`git mv
|
80
|
+
return await this.execAsync(`git mv ${this.quotePath(path)} ${this.quotePath(destination)}`);
|
87
81
|
}
|
88
82
|
async push(ref, remoteName) {
|
89
|
-
return this.execAsync(`git push -u -f ${remoteName} ${ref}`);
|
83
|
+
return await this.execAsync(`git push -u -f ${remoteName} ${ref}`);
|
90
84
|
}
|
91
85
|
async commit(message) {
|
92
|
-
return this.execAsync(`git commit -am "${message}"`);
|
86
|
+
return await this.execAsync(`git commit -am "${message}"`);
|
93
87
|
}
|
94
88
|
async amendCommit() {
|
95
|
-
return this.execAsync(`git commit --amend -a --no-edit`);
|
89
|
+
return await this.execAsync(`git commit --amend -a --no-edit`);
|
96
90
|
}
|
97
|
-
deleteGitRemote(name) {
|
98
|
-
return this.execAsync(`git remote rm ${name}`);
|
91
|
+
async deleteGitRemote(name) {
|
92
|
+
return await this.execAsync(`git remote rm ${name}`);
|
99
93
|
}
|
100
|
-
|
101
|
-
return this.execAsync(`git
|
94
|
+
async addGitRemote(name, url) {
|
95
|
+
return await this.execAsync(`git remote add ${name} ${url}`);
|
96
|
+
}
|
97
|
+
async hasFilterRepoInstalled() {
|
98
|
+
try {
|
99
|
+
await this.execAsync(`git filter-repo --help`);
|
100
|
+
return true;
|
101
|
+
}
|
102
|
+
catch {
|
103
|
+
return false;
|
104
|
+
}
|
102
105
|
}
|
103
|
-
|
104
|
-
|
106
|
+
// git-filter-repo is much faster than filter-branch, but needs to be installed by user
|
107
|
+
// Use `hasFilterRepoInstalled` to check if it's installed
|
108
|
+
async filterRepo(subdirectory) {
|
109
|
+
// filter-repo requires POSIX path to work
|
110
|
+
const posixPath = subdirectory.split(path_1.sep).join(path_1.posix.sep);
|
111
|
+
return await this.execAsync(`git filter-repo -f --subdirectory-filter ${this.quotePath(posixPath)}`);
|
112
|
+
}
|
113
|
+
async filterBranch(subdirectory, branchName) {
|
114
|
+
// filter-repo requires POSIX path to work
|
115
|
+
const posixPath = subdirectory.split(path_1.sep).join(path_1.posix.sep);
|
116
|
+
// We need non-ASCII file names to not be quoted, or else filter-branch will exclude them.
|
117
|
+
await this.execAsync(`git config core.quotepath false`);
|
118
|
+
return await this.execAsync(`git filter-branch --subdirectory-filter ${this.quotePath(posixPath)} -- ${branchName}`);
|
119
|
+
}
|
120
|
+
execAsync(command) {
|
121
|
+
return execAsync(command, {
|
122
|
+
cwd: this.root,
|
123
|
+
maxBuffer: 10 * 1024 * 1024,
|
124
|
+
});
|
125
|
+
}
|
126
|
+
quotePath(path) {
|
127
|
+
return process.platform === 'win32'
|
128
|
+
? // Windows/CMD only understands double-quotes, single-quotes are treated as part of the file name
|
129
|
+
// Bash and other shells will substitute `$` in file names with a variable value.
|
130
|
+
`"${path}"`
|
131
|
+
: // e.g. `git mv "$$file.txt" "libs/a/$$file.txt"` will not work since `$$` is swapped with the PID of the last process.
|
132
|
+
// Using single-quotes prevents this substitution.
|
133
|
+
`'${path}'`;
|
105
134
|
}
|
106
135
|
}
|
107
136
|
exports.GitRepository = GitRepository;
|
108
|
-
/**
|
109
|
-
* This is used by the squash editor script to update the rebase file.
|
110
|
-
*/
|
111
|
-
function updateRebaseFile(contents) {
|
112
|
-
const lines = contents.split('\n');
|
113
|
-
const lastCommitIndex = lines.findIndex((line) => line === '') - 1;
|
114
|
-
lines[lastCommitIndex] = lines[lastCommitIndex].replace('pick', 'fixup');
|
115
|
-
return lines.join('\n');
|
116
|
-
}
|
117
|
-
function fetchGitRemote(name, branch, execOptions) {
|
118
|
-
return (0, child_process_1.execSync)(`git fetch ${name} ${branch} --depth 1`, execOptions);
|
119
|
-
}
|
120
137
|
/**
|
121
138
|
* This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.
|
122
139
|
*/
|
@@ -3,7 +3,7 @@ import { type NxJsonConfiguration } from '../config/nx-json';
|
|
3
3
|
import type { ProjectGraph } from '../config/project-graph';
|
4
4
|
import type { TaskGraph } from '../config/task-graph';
|
5
5
|
import type { ProjectConfiguration } from '../config/workspace-json-project-json';
|
6
|
-
import {
|
6
|
+
import { type FileChange, type Tree } from '../generators/tree';
|
7
7
|
export type SyncGeneratorResult = void | {
|
8
8
|
callback?: GeneratorCallback;
|
9
9
|
outOfSyncMessage?: string;
|
@@ -18,7 +18,7 @@ export type SyncGeneratorChangesResult = {
|
|
18
18
|
export declare function getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;
|
19
19
|
export declare function flushSyncGeneratorChanges(results: SyncGeneratorChangesResult[]): Promise<void>;
|
20
20
|
export declare function collectAllRegisteredSyncGenerators(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Promise<string[]>;
|
21
|
-
export declare function runSyncGenerator(tree:
|
21
|
+
export declare function runSyncGenerator(tree: Tree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorChangesResult>;
|
22
22
|
export declare function collectEnabledTaskSyncGeneratorsFromProjectGraph(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;
|
23
23
|
export declare function collectEnabledTaskSyncGeneratorsFromTaskGraph(taskGraph: TaskGraph, projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;
|
24
24
|
export declare function collectRegisteredGlobalSyncGenerators(nxJson?: NxJsonConfiguration<string[] | "*">): Set<string>;
|
package/src/utils/squash.d.ts
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
export {};
|
package/src/utils/squash.js
DELETED
@@ -1,12 +0,0 @@
|
|
1
|
-
"use strict";
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
const fs_1 = require("fs");
|
4
|
-
const git_utils_1 = require("./git-utils");
|
5
|
-
// This script is used as an editor for git rebase -i
|
6
|
-
// This is the file which git creates. When this script exits, the updates should be written to this file.
|
7
|
-
const filePath = process.argv[2];
|
8
|
-
// Change the second commit from pick to fixup
|
9
|
-
const contents = (0, fs_1.readFileSync)(filePath).toString();
|
10
|
-
const newContents = (0, git_utils_1.updateRebaseFile)(contents);
|
11
|
-
// Write the updated contents back to the file
|
12
|
-
(0, fs_1.writeFileSync)(filePath, newContents);
|