nx 19.6.0-beta.3 → 19.6.0-beta.5

Sign up to get free protection for your applications and to get access to all the features.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nx",
3
- "version": "19.6.0-beta.3",
3
+ "version": "19.6.0-beta.5",
4
4
  "private": false,
5
5
  "description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
6
6
  "repository": {
@@ -71,7 +71,7 @@
71
71
  "yargs-parser": "21.1.1",
72
72
  "node-machine-id": "1.1.12",
73
73
  "ora": "5.3.0",
74
- "@nrwl/tao": "19.6.0-beta.3"
74
+ "@nrwl/tao": "19.6.0-beta.5"
75
75
  },
76
76
  "peerDependencies": {
77
77
  "@swc-node/register": "^1.8.0",
@@ -86,16 +86,16 @@
86
86
  }
87
87
  },
88
88
  "optionalDependencies": {
89
- "@nx/nx-darwin-x64": "19.6.0-beta.3",
90
- "@nx/nx-darwin-arm64": "19.6.0-beta.3",
91
- "@nx/nx-linux-x64-gnu": "19.6.0-beta.3",
92
- "@nx/nx-linux-x64-musl": "19.6.0-beta.3",
93
- "@nx/nx-win32-x64-msvc": "19.6.0-beta.3",
94
- "@nx/nx-linux-arm64-gnu": "19.6.0-beta.3",
95
- "@nx/nx-linux-arm64-musl": "19.6.0-beta.3",
96
- "@nx/nx-linux-arm-gnueabihf": "19.6.0-beta.3",
97
- "@nx/nx-win32-arm64-msvc": "19.6.0-beta.3",
98
- "@nx/nx-freebsd-x64": "19.6.0-beta.3"
89
+ "@nx/nx-darwin-x64": "19.6.0-beta.5",
90
+ "@nx/nx-darwin-arm64": "19.6.0-beta.5",
91
+ "@nx/nx-linux-x64-gnu": "19.6.0-beta.5",
92
+ "@nx/nx-linux-x64-musl": "19.6.0-beta.5",
93
+ "@nx/nx-win32-x64-msvc": "19.6.0-beta.5",
94
+ "@nx/nx-linux-arm64-gnu": "19.6.0-beta.5",
95
+ "@nx/nx-linux-arm64-musl": "19.6.0-beta.5",
96
+ "@nx/nx-linux-arm-gnueabihf": "19.6.0-beta.5",
97
+ "@nx/nx-win32-arm64-msvc": "19.6.0-beta.5",
98
+ "@nx/nx-freebsd-x64": "19.6.0-beta.5"
99
99
  },
100
100
  "nx-migrations": {
101
101
  "migrations": "./migrations.json",
@@ -0,0 +1,2 @@
1
+ import { CommandModule } from 'yargs';
2
+ export declare const yargsImportCommand: CommandModule;
@@ -0,0 +1,38 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.yargsImportCommand = void 0;
4
+ const documentation_1 = require("../yargs-utils/documentation");
5
+ const shared_options_1 = require("../yargs-utils/shared-options");
6
+ const params_1 = require("../../utils/params");
7
+ exports.yargsImportCommand = {
8
+ command: 'import [sourceRemoteUrl] [destination]',
9
+ describe: false,
10
+ builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)((0, shared_options_1.withVerbose)(yargs
11
+ .positional('sourceRemoteUrl', {
12
+ type: 'string',
13
+ description: 'The remote URL of the source to import',
14
+ })
15
+ .positional('destination', {
16
+ type: 'string',
17
+ description: 'The directory in the current workspace to import into',
18
+ })
19
+ .option('source', {
20
+ type: 'string',
21
+ description: 'The directory in the source repository to import from',
22
+ })
23
+ .option('ref', {
24
+ type: 'string',
25
+ description: 'The branch from the source repository to import',
26
+ })
27
+ .option('interactive', {
28
+ type: 'boolean',
29
+ description: 'Interactive mode',
30
+ default: true,
31
+ })), 'import'),
32
+ handler: async (args) => {
33
+ const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {
34
+ return (await Promise.resolve().then(() => require('./import'))).importHandler(args);
35
+ });
36
+ process.exit(exitCode);
37
+ },
38
+ };
@@ -0,0 +1,21 @@
1
+ export interface ImportOptions {
2
+ /**
3
+ * The remote URL of the repository to import
4
+ */
5
+ sourceRemoteUrl: string;
6
+ /**
7
+ * The branch or reference to import
8
+ */
9
+ ref: string;
10
+ /**
11
+ * The directory in the source repo to import
12
+ */
13
+ source: string;
14
+ /**
15
+ * The directory in the destination repo to import into
16
+ */
17
+ destination: string;
18
+ verbose: boolean;
19
+ interactive: boolean;
20
+ }
21
+ export declare function importHandler(options: ImportOptions): Promise<void>;
@@ -0,0 +1,173 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.importHandler = importHandler;
4
+ const path_1 = require("path");
5
+ const git_utils_1 = require("../../utils/git-utils");
6
+ const promises_1 = require("node:fs/promises");
7
+ const tmp_1 = require("tmp");
8
+ const enquirer_1 = require("enquirer");
9
+ const output_1 = require("../../utils/output");
10
+ const createSpinner = require("ora");
11
+ const init_v2_1 = require("../init/init-v2");
12
+ const nx_json_1 = require("../../config/nx-json");
13
+ const workspace_root_1 = require("../../utils/workspace-root");
14
+ const package_manager_1 = require("../../utils/package-manager");
15
+ const workspace_context_1 = require("../../utils/workspace-context");
16
+ const utils_1 = require("../init/implementation/utils");
17
+ const command_line_utils_1 = require("../../utils/command-line-utils");
18
+ const prepare_source_repo_1 = require("./utils/prepare-source-repo");
19
+ const merge_remote_source_1 = require("./utils/merge-remote-source");
20
+ const needs_install_1 = require("./utils/needs-install");
21
+ const importRemoteName = '__tmp_nx_import__';
22
+ async function importHandler(options) {
23
+ let { sourceRemoteUrl, ref, source, destination } = options;
24
+ output_1.output.log({
25
+ title: 'Nx will walk you through the process of importing code from another repository into this workspace:',
26
+ bodyLines: [
27
+ `1. Nx will clone the other repository into a temporary directory`,
28
+ `2. Code to be imported will be moved to the same directory it will be imported into on a temporary branch`,
29
+ `3. The code will be merged into the current branch in this workspace`,
30
+ `4. Nx will recommend plugins to integrate tools used in the imported code with Nx`,
31
+ `5. The code will be successfully imported into this workspace`,
32
+ '',
33
+ `Git history will be preserved during this process`,
34
+ ],
35
+ });
36
+ const tempImportDirectory = (0, path_1.join)(tmp_1.tmpdir, 'nx-import');
37
+ if (!sourceRemoteUrl) {
38
+ sourceRemoteUrl = (await (0, enquirer_1.prompt)([
39
+ {
40
+ type: 'input',
41
+ name: 'sourceRemoteUrl',
42
+ message: 'What is the URL of the repository you want to import? (This can be a local git repository or a git remote URL)',
43
+ required: true,
44
+ },
45
+ ])).sourceRemoteUrl;
46
+ }
47
+ try {
48
+ const maybeLocalDirectory = await (0, promises_1.stat)(sourceRemoteUrl);
49
+ if (maybeLocalDirectory.isDirectory()) {
50
+ sourceRemoteUrl = (0, path_1.resolve)(sourceRemoteUrl);
51
+ }
52
+ }
53
+ catch (e) {
54
+ // It's a remote url
55
+ }
56
+ const sourceRepoPath = (0, path_1.join)(tempImportDirectory, 'repo');
57
+ const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath}`).start();
58
+ try {
59
+ await (0, promises_1.rm)(tempImportDirectory, { recursive: true });
60
+ }
61
+ catch { }
62
+ await (0, promises_1.mkdir)(tempImportDirectory, { recursive: true });
63
+ let sourceGitClient;
64
+ try {
65
+ sourceGitClient = await (0, git_utils_1.cloneFromUpstream)(sourceRemoteUrl, sourceRepoPath, {
66
+ originName: importRemoteName,
67
+ });
68
+ }
69
+ catch (e) {
70
+ spinner.fail(`Failed to clone ${sourceRemoteUrl} into ${sourceRepoPath}`);
71
+ let errorMessage = `Failed to clone ${sourceRemoteUrl} into ${sourceRepoPath}. Please double check the remote and try again.\n${e.message}`;
72
+ throw new Error(errorMessage);
73
+ }
74
+ spinner.succeed(`Cloned into ${sourceRepoPath}`);
75
+ if (!ref) {
76
+ const branchChoices = await sourceGitClient.listBranches();
77
+ ref = (await (0, enquirer_1.prompt)([
78
+ {
79
+ type: 'autocomplete',
80
+ name: 'ref',
81
+ message: `Which branch do you want to import?`,
82
+ choices: branchChoices,
83
+ /**
84
+ * Limit the number of choices so that it fits on screen
85
+ */
86
+ limit: process.stdout.rows - 3,
87
+ required: true,
88
+ },
89
+ ])).ref;
90
+ }
91
+ if (!source) {
92
+ source = (await (0, enquirer_1.prompt)([
93
+ {
94
+ type: 'input',
95
+ name: 'source',
96
+ message: `Which directory do you want to import into this workspace? (leave blank to import the entire repository)`,
97
+ },
98
+ ])).source;
99
+ }
100
+ if (!destination) {
101
+ destination = (await (0, enquirer_1.prompt)([
102
+ {
103
+ type: 'input',
104
+ name: 'destination',
105
+ message: 'Where in this workspace should the code be imported into?',
106
+ required: true,
107
+ },
108
+ ])).destination;
109
+ }
110
+ const absSource = (0, path_1.join)(sourceRepoPath, source);
111
+ const absDestination = (0, path_1.join)(process.cwd(), destination);
112
+ try {
113
+ await (0, promises_1.stat)(absSource);
114
+ }
115
+ catch (e) {
116
+ throw new Error(`The source directory ${source} does not exist in ${sourceRemoteUrl}. Please double check to make sure it exists.`);
117
+ }
118
+ const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
119
+ await assertDestinationEmpty(destinationGitClient, absDestination);
120
+ const tempImportBranch = getTempImportBranch(ref);
121
+ const packageManager = (0, package_manager_1.detectPackageManager)(workspace_root_1.workspaceRoot);
122
+ const originalPackageWorkspaces = await (0, needs_install_1.getPackagesInPackageManagerWorkspace)(packageManager);
123
+ const relativeDestination = (0, path_1.relative)(destinationGitClient.root, absDestination);
124
+ await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl, importRemoteName);
125
+ await createTemporaryRemote(destinationGitClient, (0, path_1.join)(sourceRepoPath, '.git'), importRemoteName);
126
+ await (0, merge_remote_source_1.mergeRemoteSource)(destinationGitClient, sourceRemoteUrl, tempImportBranch, destination, importRemoteName, ref);
127
+ spinner.start('Cleaning up temporary files and remotes');
128
+ await (0, promises_1.rm)(tempImportDirectory, { recursive: true });
129
+ await destinationGitClient.deleteGitRemote(importRemoteName);
130
+ spinner.succeed('Cleaned up temporary files and remotes');
131
+ const pmc = (0, package_manager_1.getPackageManagerCommand)();
132
+ const nxJson = (0, nx_json_1.readNxJson)(workspace_root_1.workspaceRoot);
133
+ (0, workspace_context_1.resetWorkspaceContext)();
134
+ const { plugins, updatePackageScripts } = await (0, init_v2_1.detectPlugins)(nxJson, options.interactive);
135
+ if (plugins.length > 0) {
136
+ output_1.output.log({ title: 'Installing Plugins' });
137
+ (0, init_v2_1.installPlugins)(workspace_root_1.workspaceRoot, plugins, pmc, updatePackageScripts);
138
+ await destinationGitClient.amendCommit();
139
+ }
140
+ else if (await (0, needs_install_1.needsInstall)(packageManager, originalPackageWorkspaces)) {
141
+ output_1.output.log({
142
+ title: 'Installing dependencies for imported code',
143
+ });
144
+ (0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
145
+ await destinationGitClient.amendCommit();
146
+ }
147
+ console.log(await destinationGitClient.showStat());
148
+ output_1.output.log({
149
+ title: `Merging these changes into ${(0, command_line_utils_1.getBaseRef)(nxJson)}`,
150
+ bodyLines: [
151
+ `MERGE these changes when merging these changes.`,
152
+ `Do NOT squash and do NOT rebase these changes when merging these changes.`,
153
+ `If you would like to UNDO these changes, run "git reset HEAD~1 --hard"`,
154
+ ],
155
+ });
156
+ }
157
+ async function assertDestinationEmpty(gitClient, absDestination) {
158
+ const files = await gitClient.getGitFiles(absDestination);
159
+ if (files.length > 0) {
160
+ throw new Error(`Destination directory ${absDestination} is not empty. Please make sure it is empty before importing into it.`);
161
+ }
162
+ }
163
+ function getTempImportBranch(sourceBranch) {
164
+ return `__nx_tmp_import__/${sourceBranch}`;
165
+ }
166
+ async function createTemporaryRemote(destinationGitClient, sourceRemoteUrl, remoteName) {
167
+ try {
168
+ await destinationGitClient.deleteGitRemote(remoteName);
169
+ }
170
+ catch { }
171
+ await destinationGitClient.addGitRemote(remoteName, sourceRemoteUrl);
172
+ await destinationGitClient.fetch(remoteName);
173
+ }
@@ -0,0 +1,2 @@
1
+ import { GitRepository } from '../../../utils/git-utils';
2
+ export declare function mergeRemoteSource(destinationGitClient: GitRepository, sourceRemoteUrl: string, tempBranch: string, destination: string, remoteName: string, branchName: string): Promise<void>;
@@ -0,0 +1,14 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.mergeRemoteSource = mergeRemoteSource;
4
+ const createSpinner = require("ora");
5
+ async function mergeRemoteSource(destinationGitClient, sourceRemoteUrl, tempBranch, destination, remoteName, branchName) {
6
+ const spinner = createSpinner();
7
+ spinner.start(`Merging ${branchName} from ${sourceRemoteUrl} into ${destination}`);
8
+ spinner.start(`Fetching ${tempBranch} from ${remoteName}`);
9
+ await destinationGitClient.fetch(remoteName, tempBranch);
10
+ spinner.succeed(`Fetched ${tempBranch} from ${remoteName}`);
11
+ spinner.start(`Merging files and git history from ${branchName} from ${sourceRemoteUrl} into ${destination}`);
12
+ await destinationGitClient.mergeUnrelatedHistories(`${remoteName}/${tempBranch}`, `feat(repo): merge ${branchName} from ${sourceRemoteUrl}`);
13
+ spinner.succeed(`Merged files and git history from ${branchName} from ${sourceRemoteUrl} into ${destination}`);
14
+ }
@@ -0,0 +1,3 @@
1
+ import { PackageManager } from '../../../utils/package-manager';
2
+ export declare function getPackagesInPackageManagerWorkspace(packageManager: PackageManager): Promise<Set<string>>;
3
+ export declare function needsInstall(packageManager: PackageManager, originalPackagesInPackageManagerWorkspaces: Set<string>): Promise<boolean>;
@@ -0,0 +1,31 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getPackagesInPackageManagerWorkspace = getPackagesInPackageManagerWorkspace;
4
+ exports.needsInstall = needsInstall;
5
+ const package_manager_1 = require("../../../utils/package-manager");
6
+ const workspace_root_1 = require("../../../utils/workspace-root");
7
+ const package_json_1 = require("../../../plugins/package-json");
8
+ const workspace_context_1 = require("../../../utils/workspace-context");
9
+ async function getPackagesInPackageManagerWorkspace(packageManager) {
10
+ if (!(0, package_manager_1.isWorkspacesEnabled)(packageManager, workspace_root_1.workspaceRoot)) {
11
+ return new Set();
12
+ }
13
+ const patterns = (0, package_json_1.getGlobPatternsFromPackageManagerWorkspaces)(workspace_root_1.workspaceRoot);
14
+ return new Set(await (0, workspace_context_1.globWithWorkspaceContext)(workspace_root_1.workspaceRoot, patterns));
15
+ }
16
+ async function needsInstall(packageManager, originalPackagesInPackageManagerWorkspaces) {
17
+ if (!(0, package_manager_1.isWorkspacesEnabled)(packageManager, workspace_root_1.workspaceRoot)) {
18
+ return false;
19
+ }
20
+ const updatedPackagesInPackageManagerWorkspaces = await getPackagesInPackageManagerWorkspace(packageManager);
21
+ if (updatedPackagesInPackageManagerWorkspaces.size !==
22
+ originalPackagesInPackageManagerWorkspaces.size) {
23
+ return true;
24
+ }
25
+ for (const pkg of updatedPackagesInPackageManagerWorkspaces) {
26
+ if (!originalPackagesInPackageManagerWorkspaces.has(pkg)) {
27
+ return true;
28
+ }
29
+ }
30
+ return false;
31
+ }
@@ -0,0 +1,2 @@
1
+ import { GitRepository } from '../../../utils/git-utils';
2
+ export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string, originName: string): Promise<void>;
@@ -0,0 +1,104 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.prepareSourceRepo = prepareSourceRepo;
4
+ const createSpinner = require("ora");
5
+ const path_1 = require("path");
6
+ const promises_1 = require("node:fs/promises");
7
+ async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl, originName) {
8
+ const spinner = createSpinner().start(`Fetching ${ref} from ${sourceRemoteUrl}`);
9
+ await gitClient.addFetchRemote(originName, ref);
10
+ await gitClient.fetch(originName, ref);
11
+ spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
12
+ spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
13
+ await gitClient.checkout(tempImportBranch, {
14
+ new: true,
15
+ base: `${originName}/${ref}`,
16
+ });
17
+ spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
18
+ const relativeSourceDir = (0, path_1.relative)(gitClient.root, (0, path_1.join)(gitClient.root, source));
19
+ const destinationInSource = (0, path_1.join)(gitClient.root, relativeDestination);
20
+ spinner.start(`Moving files and git history to ${destinationInSource}`);
21
+ if (relativeSourceDir === '') {
22
+ const files = await gitClient.getGitFiles('.');
23
+ try {
24
+ await (0, promises_1.rm)(destinationInSource, {
25
+ recursive: true,
26
+ });
27
+ }
28
+ catch { }
29
+ await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
30
+ const gitignores = new Set();
31
+ for (const file of files) {
32
+ if ((0, path_1.basename)(file) === '.gitignore') {
33
+ gitignores.add(file);
34
+ continue;
35
+ }
36
+ spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
37
+ const newPath = (0, path_1.join)(destinationInSource, file);
38
+ await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
39
+ try {
40
+ await gitClient.move(file, newPath);
41
+ }
42
+ catch {
43
+ await wait(100);
44
+ await gitClient.move(file, newPath);
45
+ }
46
+ }
47
+ await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
48
+ for (const gitignore of gitignores) {
49
+ await gitClient.move(gitignore, (0, path_1.join)(destinationInSource, gitignore));
50
+ }
51
+ await gitClient.amendCommit();
52
+ for (const gitignore of gitignores) {
53
+ await (0, promises_1.copyFile)((0, path_1.join)(destinationInSource, gitignore), (0, path_1.join)(gitClient.root, gitignore));
54
+ }
55
+ }
56
+ else {
57
+ let needsSquash = false;
58
+ const needsMove = destinationInSource !== (0, path_1.join)(gitClient.root, source);
59
+ if (needsMove) {
60
+ try {
61
+ await (0, promises_1.rm)(destinationInSource, {
62
+ recursive: true,
63
+ });
64
+ await gitClient.commit('chore(repo): prepare for import');
65
+ needsSquash = true;
66
+ }
67
+ catch { }
68
+ await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
69
+ }
70
+ const files = await gitClient.getGitFiles('.');
71
+ for (const file of files) {
72
+ if (file === '.gitignore') {
73
+ continue;
74
+ }
75
+ spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
76
+ if (!(0, path_1.relative)(source, file).startsWith('..')) {
77
+ if (needsMove) {
78
+ const newPath = (0, path_1.join)(destinationInSource, file);
79
+ await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
80
+ try {
81
+ await gitClient.move(file, newPath);
82
+ }
83
+ catch {
84
+ await wait(100);
85
+ await gitClient.move(file, newPath);
86
+ }
87
+ }
88
+ }
89
+ else {
90
+ await (0, promises_1.rm)((0, path_1.join)(gitClient.root, file), {
91
+ recursive: true,
92
+ });
93
+ }
94
+ }
95
+ await gitClient.commit('chore(repo): prepare for import 2');
96
+ if (needsSquash) {
97
+ await gitClient.squashLastTwoCommits();
98
+ }
99
+ }
100
+ spinner.succeed(`${sourceRemoteUrl} has been prepared to be imported into this workspace on a temporary branch: ${tempImportBranch} in ${gitClient.root}`);
101
+ }
102
+ function wait(ms) {
103
+ return new Promise((resolve) => setTimeout(resolve, ms));
104
+ }
@@ -1,7 +1,14 @@
1
+ import { PackageManagerCommands } from '../../utils/package-manager';
2
+ import { NxJsonConfiguration } from '../../config/nx-json';
1
3
  export interface InitArgs {
2
4
  interactive: boolean;
3
5
  nxCloud?: boolean;
4
6
  useDotNxInstallation?: boolean;
5
7
  integrated?: boolean;
6
8
  }
9
+ export declare function installPlugins(repoRoot: string, plugins: string[], pmc: PackageManagerCommands, updatePackageScripts: boolean): void;
7
10
  export declare function initHandler(options: InitArgs): Promise<void>;
11
+ export declare function detectPlugins(nxJson: NxJsonConfiguration, interactive: boolean): Promise<{
12
+ plugins: string[];
13
+ updatePackageScripts: boolean;
14
+ }>;
@@ -1,6 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.installPlugins = installPlugins;
3
4
  exports.initHandler = initHandler;
5
+ exports.detectPlugins = detectPlugins;
4
6
  const fs_1 = require("fs");
5
7
  const semver_1 = require("semver");
6
8
  const output_1 = require("../../utils/output");
@@ -17,6 +19,22 @@ const workspace_context_1 = require("../../utils/workspace-context");
17
19
  const connect_to_nx_cloud_1 = require("../connect/connect-to-nx-cloud");
18
20
  const add_nx_to_npm_repo_1 = require("./implementation/add-nx-to-npm-repo");
19
21
  const add_nx_to_monorepo_1 = require("./implementation/add-nx-to-monorepo");
22
+ const nx_json_1 = require("../../config/nx-json");
23
+ const get_package_name_from_import_path_1 = require("../../utils/get-package-name-from-import-path");
24
+ function installPlugins(repoRoot, plugins, pmc, updatePackageScripts) {
25
+ if (plugins.length === 0) {
26
+ return;
27
+ }
28
+ (0, utils_1.addDepsToPackageJson)(repoRoot, plugins);
29
+ (0, utils_1.runInstall)(repoRoot, pmc);
30
+ output_1.output.log({ title: '🔨 Configuring plugins' });
31
+ for (const plugin of plugins) {
32
+ (0, child_process_2.execSync)(`${pmc.exec} nx g ${plugin}:init --keepExistingVersions ${updatePackageScripts ? '--updatePackageScripts' : ''} --no-interactive`, {
33
+ stdio: [0, 1, 2],
34
+ cwd: repoRoot,
35
+ });
36
+ }
37
+ }
20
38
  async function initHandler(options) {
21
39
  process.env.NX_RUNNING_NX_INIT = 'true';
22
40
  const version = process.env.NX_VERSION ?? ((0, semver_1.prerelease)(versions_1.nxVersion) ? 'next' : 'latest');
@@ -31,7 +49,8 @@ async function initHandler(options) {
31
49
  console.log('Setting Nx up installation in `.nx`. You can run Nx commands like: `./nx.bat --help`');
32
50
  }
33
51
  (0, add_nx_scripts_1.generateDotNxSetup)(version);
34
- const { plugins } = await detectPlugins();
52
+ const nxJson = (0, nx_json_1.readNxJson)(process.cwd());
53
+ const { plugins } = await detectPlugins(nxJson, options.interactive);
35
54
  plugins.forEach((plugin) => {
36
55
  (0, child_process_1.runNxSync)(`add ${plugin}`, {
37
56
  stdio: 'inherit',
@@ -52,8 +71,6 @@ async function initHandler(options) {
52
71
  });
53
72
  return;
54
73
  }
55
- output_1.output.log({ title: '🧐 Checking dependencies' });
56
- const { plugins, updatePackageScripts } = await detectPlugins();
57
74
  const packageJson = (0, fileutils_1.readJsonFile)('package.json');
58
75
  if ((0, utils_1.isMonorepo)(packageJson)) {
59
76
  await (0, add_nx_to_monorepo_1.addNxToMonorepo)({
@@ -76,18 +93,11 @@ async function initHandler(options) {
76
93
  const pmc = (0, package_manager_1.getPackageManagerCommand)();
77
94
  (0, utils_1.createNxJsonFile)(repoRoot, [], [], {});
78
95
  (0, utils_1.updateGitIgnore)(repoRoot);
79
- (0, utils_1.addDepsToPackageJson)(repoRoot, plugins);
96
+ const nxJson = (0, nx_json_1.readNxJson)(repoRoot);
97
+ output_1.output.log({ title: '🧐 Checking dependencies' });
98
+ const { plugins, updatePackageScripts } = await detectPlugins(nxJson, options.interactive);
80
99
  output_1.output.log({ title: '📦 Installing Nx' });
81
- (0, utils_1.runInstall)(repoRoot, pmc);
82
- if (plugins.length > 0) {
83
- output_1.output.log({ title: '🔨 Configuring plugins' });
84
- for (const plugin of plugins) {
85
- (0, child_process_2.execSync)(`${pmc.exec} nx g ${plugin}:init --keepExistingVersions ${updatePackageScripts ? '--updatePackageScripts' : ''} --no-interactive`, {
86
- stdio: [0, 1, 2],
87
- cwd: repoRoot,
88
- });
89
- }
90
- }
100
+ installPlugins(repoRoot, plugins, pmc, updatePackageScripts);
91
101
  if (useNxCloud) {
92
102
  output_1.output.log({ title: '🛠️ Setting up Nx Cloud' });
93
103
  await (0, utils_1.initCloud)('nx-init');
@@ -117,8 +127,12 @@ const npmPackageToPluginMap = {
117
127
  'react-native': '@nx/react-native',
118
128
  '@remix-run/dev': '@nx/remix',
119
129
  };
120
- async function detectPlugins() {
130
+ async function detectPlugins(nxJson, interactive) {
121
131
  let files = ['package.json'].concat(await (0, workspace_context_1.globWithWorkspaceContext)(process.cwd(), ['**/*/package.json']));
132
+ const currentPlugins = new Set((nxJson.plugins ?? []).map((p) => {
133
+ const plugin = typeof p === 'string' ? p : p.plugin;
134
+ return (0, get_package_name_from_import_path_1.getPackageNameFromImportPath)(plugin);
135
+ }));
122
136
  const detectedPlugins = new Set();
123
137
  for (const file of files) {
124
138
  if (!(0, fs_1.existsSync)(file))
@@ -144,6 +158,12 @@ async function detectPlugins() {
144
158
  if ((0, fs_1.existsSync)('gradlew') || (0, fs_1.existsSync)('gradlew.bat')) {
145
159
  detectedPlugins.add('@nx/gradle');
146
160
  }
161
+ // Remove existing plugins
162
+ for (const plugin of detectedPlugins) {
163
+ if (currentPlugins.has(plugin)) {
164
+ detectedPlugins.delete(plugin);
165
+ }
166
+ }
147
167
  const plugins = Array.from(detectedPlugins);
148
168
  if (plugins.length === 0) {
149
169
  return {
@@ -151,6 +171,19 @@ async function detectPlugins() {
151
171
  updatePackageScripts: false,
152
172
  };
153
173
  }
174
+ if (!interactive) {
175
+ output_1.output.log({
176
+ title: `Recommended Plugins:`,
177
+ bodyLines: [
178
+ `Adding these Nx plugins to integrate with the tools used in your workspace:`,
179
+ ...plugins.map((p) => `- ${p}`),
180
+ ],
181
+ });
182
+ return {
183
+ plugins,
184
+ updatePackageScripts: true,
185
+ };
186
+ }
154
187
  output_1.output.log({
155
188
  title: `Recommended Plugins:`,
156
189
  bodyLines: [
@@ -10,21 +10,22 @@ const command_object_4 = require("./graph/command-object");
10
10
  const command_object_5 = require("./exec/command-object");
11
11
  const command_object_6 = require("./format/command-object");
12
12
  const command_object_7 = require("./generate/command-object");
13
- const command_object_8 = require("./init/command-object");
14
- const command_object_9 = require("./list/command-object");
15
- const command_object_10 = require("./migrate/command-object");
16
- const command_object_11 = require("./new/command-object");
17
- const command_object_12 = require("./repair/command-object");
18
- const command_object_13 = require("./report/command-object");
19
- const command_object_14 = require("./run/command-object");
20
- const command_object_15 = require("./run-many/command-object");
21
- const command_object_16 = require("./show/command-object");
22
- const command_object_17 = require("./watch/command-object");
23
- const command_object_18 = require("./reset/command-object");
24
- const command_object_19 = require("./release/command-object");
25
- const command_object_20 = require("./add/command-object");
13
+ const command_object_8 = require("./import/command-object");
14
+ const command_object_9 = require("./init/command-object");
15
+ const command_object_10 = require("./list/command-object");
16
+ const command_object_11 = require("./migrate/command-object");
17
+ const command_object_12 = require("./new/command-object");
18
+ const command_object_13 = require("./repair/command-object");
19
+ const command_object_14 = require("./report/command-object");
20
+ const command_object_15 = require("./run/command-object");
21
+ const command_object_16 = require("./run-many/command-object");
22
+ const command_object_17 = require("./show/command-object");
23
+ const command_object_18 = require("./watch/command-object");
24
+ const command_object_19 = require("./reset/command-object");
25
+ const command_object_20 = require("./release/command-object");
26
+ const command_object_21 = require("./add/command-object");
26
27
  const command_objects_1 = require("./deprecated/command-objects");
27
- const command_object_21 = require("./sync/command-object");
28
+ const command_object_22 = require("./sync/command-object");
28
29
  // Ensure that the output takes up the available width of the terminal.
29
30
  yargs.wrap(yargs.terminalWidth());
30
31
  exports.parserConfiguration = {
@@ -41,7 +42,7 @@ exports.commandsObject = yargs
41
42
  .parserConfiguration(exports.parserConfiguration)
42
43
  .usage(chalk.bold('Smart Monorepos · Fast CI'))
43
44
  .demandCommand(1, '')
44
- .command(command_object_20.yargsAddCommand)
45
+ .command(command_object_21.yargsAddCommand)
45
46
  .command(command_object_1.yargsAffectedBuildCommand)
46
47
  .command(command_object_1.yargsAffectedCommand)
47
48
  .command(command_object_1.yargsAffectedE2ECommand)
@@ -55,24 +56,25 @@ exports.commandsObject = yargs
55
56
  .command(command_object_6.yargsFormatCheckCommand)
56
57
  .command(command_object_6.yargsFormatWriteCommand)
57
58
  .command(command_object_7.yargsGenerateCommand)
58
- .command(command_object_8.yargsInitCommand)
59
- .command(command_object_10.yargsInternalMigrateCommand)
60
- .command(command_object_9.yargsListCommand)
61
- .command(command_object_10.yargsMigrateCommand)
62
- .command(command_object_11.yargsNewCommand)
59
+ .command(command_object_8.yargsImportCommand)
60
+ .command(command_object_9.yargsInitCommand)
61
+ .command(command_object_11.yargsInternalMigrateCommand)
62
+ .command(command_object_10.yargsListCommand)
63
+ .command(command_object_11.yargsMigrateCommand)
64
+ .command(command_object_12.yargsNewCommand)
63
65
  .command(command_objects_1.yargsPrintAffectedCommand)
64
- .command(command_object_19.yargsReleaseCommand)
65
- .command(command_object_12.yargsRepairCommand)
66
- .command(command_object_13.yargsReportCommand)
67
- .command(command_object_18.yargsResetCommand)
68
- .command(command_object_14.yargsRunCommand)
69
- .command(command_object_15.yargsRunManyCommand)
70
- .command(command_object_16.yargsShowCommand)
71
- .command(command_object_21.yargsSyncCommand)
72
- .command(command_object_21.yargsSyncCheckCommand)
66
+ .command(command_object_20.yargsReleaseCommand)
67
+ .command(command_object_13.yargsRepairCommand)
68
+ .command(command_object_14.yargsReportCommand)
69
+ .command(command_object_19.yargsResetCommand)
70
+ .command(command_object_15.yargsRunCommand)
71
+ .command(command_object_16.yargsRunManyCommand)
72
+ .command(command_object_17.yargsShowCommand)
73
+ .command(command_object_22.yargsSyncCommand)
74
+ .command(command_object_22.yargsSyncCheckCommand)
73
75
  .command(command_object_2.yargsViewLogsCommand)
74
- .command(command_object_17.yargsWatchCommand)
75
- .command(command_object_14.yargsNxInfixCommand)
76
+ .command(command_object_18.yargsWatchCommand)
77
+ .command(command_object_15.yargsNxInfixCommand)
76
78
  .scriptName('nx')
77
79
  .help()
78
80
  // NOTE: we handle --version in nx.ts, this just tells yargs that the option exists
@@ -29,7 +29,7 @@ export declare function withTargetAndConfigurationOption(yargs: Argv, demandOpti
29
29
  export declare function withConfiguration(yargs: Argv): Argv<{
30
30
  configuration: string;
31
31
  }>;
32
- export declare function withVerbose(yargs: Argv): Argv<{
32
+ export declare function withVerbose<T>(yargs: Argv<T>): Argv<T & {
33
33
  verbose: boolean;
34
34
  }>;
35
35
  export declare function withBatch(yargs: Argv): any;
@@ -146,7 +146,7 @@ async function processFilesAndCreateAndSerializeProjectGraph(plugins) {
146
146
  perf_hooks_1.performance.mark('hash-watched-changes-start');
147
147
  const updatedFiles = [...collectedUpdatedFiles.values()];
148
148
  const deletedFiles = [...collectedDeletedFiles.values()];
149
- let updatedFileHashes = (0, workspace_context_1.updateFilesInContext)(updatedFiles, deletedFiles);
149
+ let updatedFileHashes = (0, workspace_context_1.updateFilesInContext)(workspace_root_1.workspaceRoot, updatedFiles, deletedFiles);
150
150
  perf_hooks_1.performance.mark('hash-watched-changes-end');
151
151
  perf_hooks_1.performance.measure('hash changed files from watcher', 'hash-watched-changes-start', 'hash-watched-changes-end');
152
152
  logger_1.serverLogger.requestLog(`Updated workspace context based on watched changes, recomputing project graph...`);
Binary file
@@ -118,9 +118,6 @@ async function runCommand(projectsToRun, currentProjectGraph, { nxJson }, nxArgs
118
118
  }
119
119
  async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions) {
120
120
  let taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
121
- if (process.env.NX_ENABLE_SYNC_GENERATORS !== 'true') {
122
- return { projectGraph, taskGraph };
123
- }
124
121
  // collect unique syncGenerators from the tasks
125
122
  const uniqueSyncGenerators = new Set();
126
123
  for (const { target } of Object.values(taskGraph.tasks)) {
@@ -32,6 +32,7 @@ export interface NxArgs {
32
32
  excludeTaskDependencies?: boolean;
33
33
  }
34
34
  export declare function createOverrides(__overrides_unparsed__?: string[]): Record<string, any>;
35
+ export declare function getBaseRef(nxJson: NxJsonConfiguration): string;
35
36
  export declare function splitArgsIntoNxArgsAndOverrides(args: {
36
37
  [k: string]: any;
37
38
  }, mode: 'run-one' | 'run-many' | 'affected' | 'print-affected', options: {
@@ -1,6 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.createOverrides = createOverrides;
4
+ exports.getBaseRef = getBaseRef;
4
5
  exports.splitArgsIntoNxArgsAndOverrides = splitArgsIntoNxArgsAndOverrides;
5
6
  exports.readParallelFromArgsAndEnv = readParallelFromArgsAndEnv;
6
7
  exports.parseFiles = parseFiles;
@@ -24,6 +25,9 @@ function createOverrides(__overrides_unparsed__ = []) {
24
25
  overrides.__overrides_unparsed__ = __overrides_unparsed__;
25
26
  return overrides;
26
27
  }
28
+ function getBaseRef(nxJson) {
29
+ return nxJson.defaultBase ?? nxJson.affected?.defaultBase ?? 'main';
30
+ }
27
31
  function splitArgsIntoNxArgsAndOverrides(args, mode, options = { printWarnings: true }, nxJson) {
28
32
  // this is to lerna case when this function is invoked imperatively
29
33
  if (args['target'] && !args['targets']) {
@@ -88,8 +92,7 @@ function splitArgsIntoNxArgsAndOverrides(args, mode, options = { printWarnings:
88
92
  }
89
93
  }
90
94
  if (!nxArgs.base) {
91
- nxArgs.base =
92
- nxJson.defaultBase ?? nxJson.affected?.defaultBase ?? 'main';
95
+ nxArgs.base = getBaseRef(nxJson);
93
96
  // No user-provided arguments to set the affected criteria, so inform the user of the defaults being used
94
97
  if (options.printWarnings &&
95
98
  !nxArgs.head &&
@@ -1,3 +1,38 @@
1
+ import { ExecSyncOptions } from 'child_process';
2
+ export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
3
+ originName: string;
4
+ }): Promise<GitRepository>;
5
+ export declare class GitRepository {
6
+ private directory;
7
+ root: string;
8
+ constructor(directory: string);
9
+ getGitRootPath(cwd: string): string;
10
+ addFetchRemote(remoteName: string, branch: string): Promise<string>;
11
+ private execAsync;
12
+ showStat(): Promise<string>;
13
+ listBranches(): Promise<string[]>;
14
+ getGitFiles(path: string): Promise<string[]>;
15
+ reset(ref: string): Promise<string>;
16
+ squashLastTwoCommits(): Promise<string>;
17
+ mergeUnrelatedHistories(ref: string, message: string): Promise<string>;
18
+ fetch(remote: string, ref?: string): Promise<string>;
19
+ checkout(branch: string, opts: {
20
+ new: boolean;
21
+ base: string;
22
+ }): Promise<string>;
23
+ move(path: string, destination: string): Promise<string>;
24
+ push(ref: string, remoteName: string): Promise<string>;
25
+ commit(message: string): Promise<string>;
26
+ amendCommit(): Promise<string>;
27
+ deleteGitRemote(name: string): Promise<string>;
28
+ deleteBranch(branch: string): Promise<string>;
29
+ addGitRemote(name: string, url: string): Promise<string>;
30
+ }
31
+ /**
32
+ * This is used by the squash editor script to update the rebase file.
33
+ */
34
+ export declare function updateRebaseFile(contents: string): string;
35
+ export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
1
36
  export declare function getGithubSlugOrNull(): string | null;
2
37
  export declare function extractUserAndRepoFromGitHubUrl(gitRemotes: string): string | null;
3
38
  export declare function commitChanges(commitMessage: string, directory?: string): string | null;
@@ -1,11 +1,122 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.GitRepository = void 0;
4
+ exports.cloneFromUpstream = cloneFromUpstream;
5
+ exports.updateRebaseFile = updateRebaseFile;
6
+ exports.fetchGitRemote = fetchGitRemote;
3
7
  exports.getGithubSlugOrNull = getGithubSlugOrNull;
4
8
  exports.extractUserAndRepoFromGitHubUrl = extractUserAndRepoFromGitHubUrl;
5
9
  exports.commitChanges = commitChanges;
6
10
  exports.getLatestCommitSha = getLatestCommitSha;
7
11
  const child_process_1 = require("child_process");
8
12
  const devkit_exports_1 = require("../devkit-exports");
13
+ const path_1 = require("path");
14
+ const SQUASH_EDITOR = (0, path_1.join)(__dirname, 'squash.js');
15
+ function execAsync(command, execOptions) {
16
+ return new Promise((res, rej) => {
17
+ (0, child_process_1.exec)(command, execOptions, (err, stdout, stderr) => {
18
+ if (err) {
19
+ return rej(err);
20
+ }
21
+ res(stdout);
22
+ });
23
+ });
24
+ }
25
+ async function cloneFromUpstream(url, destination, { originName } = { originName: 'origin' }) {
26
+ await execAsync(`git clone ${url} ${destination} --depth 1 --origin ${originName}`, {
27
+ cwd: (0, path_1.dirname)(destination),
28
+ });
29
+ return new GitRepository(destination);
30
+ }
31
+ class GitRepository {
32
+ constructor(directory) {
33
+ this.directory = directory;
34
+ this.root = this.getGitRootPath(this.directory);
35
+ }
36
+ getGitRootPath(cwd) {
37
+ return (0, child_process_1.execSync)('git rev-parse --show-toplevel', {
38
+ cwd,
39
+ })
40
+ .toString()
41
+ .trim();
42
+ }
43
+ addFetchRemote(remoteName, branch) {
44
+ return this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
45
+ }
46
+ execAsync(command) {
47
+ return execAsync(command, {
48
+ cwd: this.root,
49
+ });
50
+ }
51
+ async showStat() {
52
+ return await this.execAsync(`git show --stat`);
53
+ }
54
+ async listBranches() {
55
+ return (await this.execAsync(`git ls-remote --heads --quiet`))
56
+ .trim()
57
+ .split('\n')
58
+ .map((s) => s
59
+ .trim()
60
+ .substring(s.indexOf('\t') + 1)
61
+ .replace('refs/heads/', ''));
62
+ }
63
+ async getGitFiles(path) {
64
+ return (await this.execAsync(`git ls-files ${path}`))
65
+ .trim()
66
+ .split('\n')
67
+ .map((s) => s.trim())
68
+ .filter(Boolean);
69
+ }
70
+ async reset(ref) {
71
+ return this.execAsync(`git reset ${ref} --hard`);
72
+ }
73
+ async squashLastTwoCommits() {
74
+ return this.execAsync(`git -c core.editor="node ${SQUASH_EDITOR}" rebase --interactive --no-autosquash HEAD~2`);
75
+ }
76
+ async mergeUnrelatedHistories(ref, message) {
77
+ return this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
78
+ }
79
+ async fetch(remote, ref) {
80
+ return this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
81
+ }
82
+ async checkout(branch, opts) {
83
+ return this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
84
+ }
85
+ async move(path, destination) {
86
+ return this.execAsync(`git mv ${path} ${destination}`);
87
+ }
88
+ async push(ref, remoteName) {
89
+ return this.execAsync(`git push -u -f ${remoteName} ${ref}`);
90
+ }
91
+ async commit(message) {
92
+ return this.execAsync(`git commit -am "${message}"`);
93
+ }
94
+ async amendCommit() {
95
+ return this.execAsync(`git commit --amend -a --no-edit`);
96
+ }
97
+ deleteGitRemote(name) {
98
+ return this.execAsync(`git remote rm ${name}`);
99
+ }
100
+ deleteBranch(branch) {
101
+ return this.execAsync(`git branch -D ${branch}`);
102
+ }
103
+ addGitRemote(name, url) {
104
+ return this.execAsync(`git remote add ${name} ${url}`);
105
+ }
106
+ }
107
+ exports.GitRepository = GitRepository;
108
+ /**
109
+ * This is used by the squash editor script to update the rebase file.
110
+ */
111
+ function updateRebaseFile(contents) {
112
+ const lines = contents.split('\n');
113
+ const lastCommitIndex = lines.findIndex((line) => line === '') - 1;
114
+ lines[lastCommitIndex] = lines[lastCommitIndex].replace('pick', 'fixup');
115
+ return lines.join('\n');
116
+ }
117
+ function fetchGitRemote(name, branch, execOptions) {
118
+ return (0, child_process_1.execSync)(`git fetch ${name} ${branch} --depth 1`, execOptions);
119
+ }
9
120
  function getGithubSlugOrNull() {
10
121
  try {
11
122
  const gitRemote = (0, child_process_1.execSync)('git remote -v', {
@@ -49,7 +49,7 @@ function isWorkspacesEnabled(packageManager = detectPackageManager(), root = wor
49
49
  if (packageManager === 'pnpm') {
50
50
  return (0, fs_1.existsSync)((0, path_1.join)(root, 'pnpm-workspace.yaml'));
51
51
  }
52
- // yarn and pnpm both use the same 'workspaces' property in package.json
52
+ // yarn and npm both use the same 'workspaces' property in package.json
53
53
  const packageJson = (0, file_utils_1.readPackageJson)();
54
54
  return !!packageJson?.workspaces;
55
55
  }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,12 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const fs_1 = require("fs");
4
+ const git_utils_1 = require("./git-utils");
5
+ // This script is used as an editor for git rebase -i
6
+ // This is the file which git creates. When this script exits, the updates should be written to this file.
7
+ const filePath = process.argv[2];
8
+ // Change the second commit from pick to fixup
9
+ const contents = (0, fs_1.readFileSync)(filePath).toString();
10
+ const newContents = (0, git_utils_1.updateRebaseFile)(contents);
11
+ // Write the updated contents back to the file
12
+ (0, fs_1.writeFileSync)(filePath, newContents);
@@ -130,7 +130,7 @@ async function flushSyncGeneratorChangesToDisk(results) {
130
130
  }
131
131
  }
132
132
  // Update the context files
133
- await (0, workspace_context_1.updateContextWithChangedFiles)(createdFiles, updatedFiles, deletedFiles);
133
+ await (0, workspace_context_1.updateContextWithChangedFiles)(workspace_root_1.workspaceRoot, createdFiles, updatedFiles, deletedFiles);
134
134
  perf_hooks_1.performance.mark('flush-sync-generator-changes-to-disk:end');
135
135
  perf_hooks_1.performance.measure('flush sync generator changes to disk', 'flush-sync-generator-changes-to-disk:start', 'flush-sync-generator-changes-to-disk:end');
136
136
  }
@@ -11,8 +11,8 @@ export declare function getNxWorkspaceFilesFromContext(workspaceRoot: string, pr
11
11
  export declare function globWithWorkspaceContextSync(workspaceRoot: string, globs: string[], exclude?: string[]): string[];
12
12
  export declare function globWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string[]>;
13
13
  export declare function hashWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string>;
14
- export declare function updateContextWithChangedFiles(createdFiles: string[], updatedFiles: string[], deletedFiles: string[]): Promise<void>;
15
- export declare function updateFilesInContext(updatedFiles: string[], deletedFiles: string[]): Record<string, string>;
14
+ export declare function updateContextWithChangedFiles(workspaceRoot: string, createdFiles: string[], updatedFiles: string[], deletedFiles: string[]): Promise<void>;
15
+ export declare function updateFilesInContext(workspaceRoot: string, updatedFiles: string[], deletedFiles: string[]): Record<string, string>;
16
16
  export declare function getAllFileDataInContext(workspaceRoot: string): Promise<import("../native").FileData[]>;
17
17
  export declare function getFilesInDirectoryUsingContext(workspaceRoot: string, dir: string): Promise<string[]>;
18
18
  export declare function updateProjectFiles(projectRootMappings: Record<string, string>, rustReferences: NxWorkspaceFilesExternals, updatedFiles: Record<string, string>, deletedFiles: string[]): import("../native").UpdatedWorkspaceFiles;
@@ -57,9 +57,9 @@ async function hashWithWorkspaceContext(workspaceRoot, globs, exclude) {
57
57
  }
58
58
  return client_1.daemonClient.hashGlob(globs, exclude);
59
59
  }
60
- async function updateContextWithChangedFiles(createdFiles, updatedFiles, deletedFiles) {
60
+ async function updateContextWithChangedFiles(workspaceRoot, createdFiles, updatedFiles, deletedFiles) {
61
61
  if (!client_1.daemonClient.enabled()) {
62
- updateFilesInContext([...createdFiles, ...updatedFiles], deletedFiles);
62
+ updateFilesInContext(workspaceRoot, [...createdFiles, ...updatedFiles], deletedFiles);
63
63
  }
64
64
  else if ((0, is_on_daemon_1.isOnDaemon)()) {
65
65
  // make sure to only import this when running on the daemon
@@ -72,7 +72,8 @@ async function updateContextWithChangedFiles(createdFiles, updatedFiles, deleted
72
72
  await client_1.daemonClient.updateWorkspaceContext(createdFiles, updatedFiles, deletedFiles);
73
73
  }
74
74
  }
75
- function updateFilesInContext(updatedFiles, deletedFiles) {
75
+ function updateFilesInContext(workspaceRoot, updatedFiles, deletedFiles) {
76
+ ensureContextAvailable(workspaceRoot);
76
77
  return workspaceContext?.incrementalUpdate(updatedFiles, deletedFiles);
77
78
  }
78
79
  async function getAllFileDataInContext(workspaceRoot) {