nx 19.7.0-canary.20240829-0ef6892 → 19.7.0-canary.20240831-18e5535

Sign up to get free protection for your applications and to get access to all the features.
Files changed (29) hide show
  1. package/package.json +12 -12
  2. package/schemas/nx-schema.json +5 -0
  3. package/src/command-line/connect/command-object.js +10 -3
  4. package/src/command-line/connect/connect-to-nx-cloud.d.ts +3 -1
  5. package/src/command-line/connect/connect-to-nx-cloud.js +7 -4
  6. package/src/command-line/import/command-object.js +4 -0
  7. package/src/command-line/import/import.d.ts +4 -0
  8. package/src/command-line/import/import.js +147 -12
  9. package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
  10. package/src/command-line/import/utils/prepare-source-repo.js +31 -85
  11. package/src/command-line/release/changelog.js +11 -1
  12. package/src/command-line/release/command-object.d.ts +1 -0
  13. package/src/command-line/release/command-object.js +6 -1
  14. package/src/command-line/release/plan-check.js +7 -60
  15. package/src/command-line/release/plan.js +131 -37
  16. package/src/command-line/release/utils/get-touched-projects-for-group.d.ts +7 -0
  17. package/src/command-line/release/utils/get-touched-projects-for-group.js +78 -0
  18. package/src/command-line/yargs-utils/shared-options.js +1 -9
  19. package/src/daemon/server/sync-generators.d.ts +4 -0
  20. package/src/daemon/server/sync-generators.js +172 -52
  21. package/src/native/nx.wasm32-wasi.wasm +0 -0
  22. package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.d.ts +2 -1
  23. package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +16 -14
  24. package/src/nx-cloud/generators/connect-to-nx-cloud/schema.json +4 -0
  25. package/src/utils/git-utils.d.ts +7 -10
  26. package/src/utils/git-utils.js +61 -44
  27. package/src/utils/sync-generators.d.ts +2 -2
  28. package/src/utils/squash.d.ts +0 -1
  29. package/src/utils/squash.js +0 -12
@@ -4,6 +4,7 @@ exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
4
4
  exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
5
5
  exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
6
6
  exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
7
+ exports._getConflictingGeneratorGroups = _getConflictingGeneratorGroups;
7
8
  const nx_json_1 = require("../../config/nx-json");
8
9
  const tree_1 = require("../../generators/tree");
9
10
  const file_hasher_1 = require("../../hasher/file-hasher");
@@ -25,7 +26,6 @@ let storedDisabledTaskSyncGeneratorsHash;
25
26
  const log = (...messageParts) => {
26
27
  logger_1.serverLogger.log('[SYNC]:', ...messageParts);
27
28
  };
28
- // TODO(leo): check conflicts and reuse the Tree where possible
29
29
  async function getCachedSyncGeneratorChanges(generators) {
30
30
  try {
31
31
  log('get sync generators changes on demand', generators);
@@ -37,51 +37,15 @@ async function getCachedSyncGeneratorChanges(generators) {
37
37
  }
38
38
  // reset the wait time
39
39
  waitPeriod = 100;
40
- let projects;
41
- let errored = false;
42
- const getProjectsConfigurations = async () => {
43
- if (projects || errored) {
44
- return projects;
45
- }
46
- const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
47
- projects = projectGraph
48
- ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
49
- : null;
50
- errored = error !== undefined;
51
- return projects;
52
- };
53
- return (await Promise.all(generators.map(async (generator) => {
54
- if (scheduledGenerators.has(generator) ||
55
- !syncGeneratorsCacheResultPromises.has(generator)) {
56
- // it's scheduled to run (there are pending changes to process) or
57
- // it's not scheduled and there's no cached result, so run it
58
- const projects = await getProjectsConfigurations();
59
- if (projects) {
60
- log(generator, 'already scheduled or not cached, running it now');
61
- runGenerator(generator, projects);
62
- }
63
- else {
64
- log(generator, 'already scheduled or not cached, project graph errored');
65
- /**
66
- * This should never happen. This is invoked imperatively, and by
67
- * the time it is invoked, the project graph would have already
68
- * been requested. If it errored, it would have been reported and
69
- * this wouldn't have been invoked. We handle it just in case.
70
- *
71
- * Since the project graph would be reported by the relevant
72
- * handlers separately, we just ignore the error, don't cache
73
- * any result and return an empty result, the next time this is
74
- * invoked the process will repeat until it eventually recovers
75
- * when the project graph is fixed.
76
- */
77
- return Promise.resolve({ changes: [], generatorName: generator });
78
- }
79
- }
80
- else {
81
- log(generator, 'not scheduled and has cached result, returning cached result');
82
- }
83
- return syncGeneratorsCacheResultPromises.get(generator);
84
- }))).flat();
40
+ const results = await getFromCacheOrRunGenerators(generators);
41
+ const conflicts = _getConflictingGeneratorGroups(results);
42
+ if (!conflicts.length) {
43
+ // there are no conflicts
44
+ return results;
45
+ }
46
+ // there are conflicts, so we need to re-run the conflicting generators
47
+ // using the same tree
48
+ return await processConflictingGenerators(conflicts, results);
85
49
  }
86
50
  catch (e) {
87
51
  console.error(e);
@@ -131,7 +95,7 @@ function collectAndScheduleSyncGenerators(projectGraph) {
131
95
  }
132
96
  const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
133
97
  for (const generator of scheduledGenerators) {
134
- runGenerator(generator, projects);
98
+ syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
135
99
  }
136
100
  await Promise.all(syncGeneratorsCacheResultPromises.values());
137
101
  }, waitPeriod);
@@ -148,6 +112,163 @@ async function getCachedRegisteredSyncGenerators() {
148
112
  }
149
113
  return [...registeredSyncGenerators];
150
114
  }
115
+ async function getFromCacheOrRunGenerators(generators) {
116
+ let projects;
117
+ let errored = false;
118
+ const getProjectsConfigurations = async () => {
119
+ if (projects || errored) {
120
+ return projects;
121
+ }
122
+ const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
123
+ projects = projectGraph
124
+ ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
125
+ : null;
126
+ errored = error !== undefined;
127
+ return projects;
128
+ };
129
+ return (await Promise.all(generators.map(async (generator) => {
130
+ if (scheduledGenerators.has(generator) ||
131
+ !syncGeneratorsCacheResultPromises.has(generator)) {
132
+ // it's scheduled to run (there are pending changes to process) or
133
+ // it's not scheduled and there's no cached result, so run it
134
+ const projects = await getProjectsConfigurations();
135
+ if (projects) {
136
+ log(generator, 'already scheduled or not cached, running it now');
137
+ syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
138
+ }
139
+ else {
140
+ log(generator, 'already scheduled or not cached, project graph errored');
141
+ /**
142
+ * This should never happen. This is invoked imperatively, and by
143
+ * the time it is invoked, the project graph would have already
144
+ * been requested. If it errored, it would have been reported and
145
+ * this wouldn't have been invoked. We handle it just in case.
146
+ *
147
+ * Since the project graph would be reported by the relevant
148
+ * handlers separately, we just ignore the error, don't cache
149
+ * any result and return an empty result, the next time this is
150
+ * invoked the process will repeat until it eventually recovers
151
+ * when the project graph is fixed.
152
+ */
153
+ return Promise.resolve({ changes: [], generatorName: generator });
154
+ }
155
+ }
156
+ else {
157
+ log(generator, 'not scheduled and has cached result, returning cached result');
158
+ }
159
+ return syncGeneratorsCacheResultPromises.get(generator);
160
+ }))).flat();
161
+ }
162
+ async function runConflictingGenerators(tree, generators) {
163
+ const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
164
+ const projects = projectGraph
165
+ ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
166
+ : null;
167
+ if (!projects) {
168
+ /**
169
+ * This should never happen. This is invoked imperatively, and by
170
+ * the time it is invoked, the project graph would have already
171
+ * been requested. If it errored, it would have been reported and
172
+ * this wouldn't have been invoked. We handle it just in case.
173
+ *
174
+ * Since the project graph would be reported by the relevant
175
+ * handlers separately, we just ignore the error.
176
+ */
177
+ return generators.map((generator) => ({
178
+ changes: [],
179
+ generatorName: generator,
180
+ }));
181
+ }
182
+ // we need to run conflicting generators sequentially because they use the same tree
183
+ const results = [];
184
+ for (const generator of generators) {
185
+ log(generator, 'running it now');
186
+ results.push(await runGenerator(generator, projects, tree));
187
+ }
188
+ return results;
189
+ }
190
+ async function processConflictingGenerators(conflicts, initialResults) {
191
+ const conflictRunResults = (await Promise.all(conflicts.map((generators) => {
192
+ const [firstGenerator, ...generatorsToRun] = generators;
193
+ // it must exists because the conflicts were identified from the initial results
194
+ const firstGeneratorResult = initialResults.find((r) => r.generatorName === firstGenerator);
195
+ const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generators ${generators.join(',')}`);
196
+ // pre-apply the changes from the first generator to avoid running it
197
+ for (const change of firstGeneratorResult.changes) {
198
+ if (change.type === 'CREATE' || change.type === 'UPDATE') {
199
+ tree.write(change.path, change.content, change.options);
200
+ }
201
+ else if (change.type === 'DELETE') {
202
+ tree.delete(change.path);
203
+ }
204
+ }
205
+ /**
206
+ * We don't cache the results of conflicting generators because they
207
+ * use the same tree, so some files might contain results from multiple
208
+ * generators and we don't have guarantees that the same combination of
209
+ * generators will run together.
210
+ */
211
+ return runConflictingGenerators(tree, generatorsToRun);
212
+ }))).flat();
213
+ /**
214
+ * The order of the results from the re-run generators is important because
215
+ * the last result from a group of conflicting generators will contain the
216
+ * changes from the previous conflicting generators. So, instead of replacing
217
+ * in-place the initial results, we first add the results from the re-run
218
+ * generators, and then add the initial results that were not from a
219
+ * conflicting generator.
220
+ */
221
+ const results = [...conflictRunResults];
222
+ for (const result of initialResults) {
223
+ if (conflictRunResults.every((r) => r.generatorName !== result.generatorName)) {
224
+ // this result is not from a conflicting generator, so we add it to the
225
+ // results
226
+ results.push(result);
227
+ }
228
+ }
229
+ return results;
230
+ }
231
+ /**
232
+ * @internal
233
+ */
234
+ function _getConflictingGeneratorGroups(results) {
235
+ const changedFileToGeneratorMap = new Map();
236
+ for (const result of results) {
237
+ for (const change of result.changes) {
238
+ if (!changedFileToGeneratorMap.has(change.path)) {
239
+ changedFileToGeneratorMap.set(change.path, new Set());
240
+ }
241
+ changedFileToGeneratorMap.get(change.path).add(result.generatorName);
242
+ }
243
+ }
244
+ const conflicts = [];
245
+ for (const generatorSet of changedFileToGeneratorMap.values()) {
246
+ if (generatorSet.size === 1) {
247
+ // no conflicts
248
+ continue;
249
+ }
250
+ if (conflicts.length === 0) {
251
+ // there are no conflicts yet, so we just add the first group
252
+ conflicts.push(new Set(generatorSet));
253
+ continue;
254
+ }
255
+ // identify if any of the current generator sets intersect with any of the
256
+ // existing conflict groups
257
+ const generatorsArray = Array.from(generatorSet);
258
+ const existingConflictGroup = conflicts.find((group) => generatorsArray.some((generator) => group.has(generator)));
259
+ if (existingConflictGroup) {
260
+ // there's an intersecting group, so we merge the two
261
+ for (const generator of generatorsArray) {
262
+ existingConflictGroup.add(generator);
263
+ }
264
+ }
265
+ else {
266
+ // there's no intersecting group, so we create a new one
267
+ conflicts.push(new Set(generatorsArray));
268
+ }
269
+ }
270
+ return conflicts.map((group) => Array.from(group));
271
+ }
151
272
  function collectAllRegisteredSyncGenerators(projectGraph) {
152
273
  const nxJson = (0, nx_json_1.readNxJson)();
153
274
  const projectGraphHash = hashProjectGraph(projectGraph);
@@ -191,16 +312,15 @@ function collectAllRegisteredSyncGenerators(projectGraph) {
191
312
  }
192
313
  }
193
314
  }
194
- function runGenerator(generator, projects) {
315
+ function runGenerator(generator, projects, tree) {
195
316
  log('running scheduled generator', generator);
196
317
  // remove it from the scheduled set
197
318
  scheduledGenerators.delete(generator);
198
- const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
199
- // run the generator and cache the result
200
- syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
319
+ tree ??= new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
320
+ return (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
201
321
  log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
202
322
  return result;
203
- }));
323
+ });
204
324
  }
205
325
  function hashProjectGraph(projectGraph) {
206
326
  const stringifiedProjects = Object.entries(projectGraph.nodes)
Binary file
@@ -7,7 +7,8 @@ export interface ConnectToNxCloudOptions {
7
7
  hideFormatLogs?: boolean;
8
8
  github?: boolean;
9
9
  directory?: string;
10
+ generateToken?: boolean;
10
11
  }
11
- export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string>;
12
+ export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string | null>;
12
13
  declare function connectToNxCloudGenerator(tree: Tree, options: ConnectToNxCloudOptions): Promise<void>;
13
14
  export default connectToNxCloudGenerator;
@@ -88,20 +88,22 @@ async function connectToNxCloud(tree, schema, nxJson = (0, nx_json_1.readNxJson)
88
88
  printCloudConnectionDisabledMessage();
89
89
  return null;
90
90
  }
91
- else {
92
- const usesGithub = schema.github ?? (await (0, url_shorten_1.repoUsesGithub)(schema.github));
93
- let responseFromCreateNxCloudWorkspaceV2;
94
- // do NOT create Nx Cloud token (createNxCloudWorkspace)
95
- // if user is using github and is running nx-connect
96
- if (!(usesGithub && schema.installationSource === 'nx-connect')) {
97
- responseFromCreateNxCloudWorkspaceV2 = await createNxCloudWorkspaceV2(getRootPackageName(tree), schema.installationSource, getNxInitDate());
98
- addNxCloudIdToNxJson(tree, responseFromCreateNxCloudWorkspaceV2?.nxCloudId, schema.directory);
99
- await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
100
- silent: schema.hideFormatLogs,
101
- });
102
- return responseFromCreateNxCloudWorkspaceV2.nxCloudId;
103
- }
104
- }
91
+ const isGitHubDetected = schema.github ?? (await (0, url_shorten_1.repoUsesGithub)(schema.github));
92
+ let responseFromCreateNxCloudWorkspaceV2;
93
+ /**
94
+ * Do not create an Nx Cloud token if the user is using GitHub and
95
+ * is running `nx-connect` AND `token` is undefined (override)
96
+ */
97
+ if (!schema.generateToken &&
98
+ isGitHubDetected &&
99
+ schema.installationSource === 'nx-connect')
100
+ return null;
101
+ responseFromCreateNxCloudWorkspaceV2 = await createNxCloudWorkspaceV2(getRootPackageName(tree), schema.installationSource, getNxInitDate());
102
+ addNxCloudIdToNxJson(tree, responseFromCreateNxCloudWorkspaceV2?.nxCloudId, schema.directory);
103
+ await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
104
+ silent: schema.hideFormatLogs,
105
+ });
106
+ return responseFromCreateNxCloudWorkspaceV2.nxCloudId;
105
107
  }
106
108
  async function connectToNxCloudGenerator(tree, options) {
107
109
  await connectToNxCloud(tree, options);
@@ -21,6 +21,10 @@
21
21
  "description": "Hide formatting logs",
22
22
  "x-priority": "internal"
23
23
  },
24
+ "generateToken": {
25
+ "type": "boolean",
26
+ "description": "Explicitly asks for a token to be created, do not override existing tokens from Nx Cloud"
27
+ },
24
28
  "github": {
25
29
  "type": "boolean",
26
30
  "description": "If the user will be using GitHub as their git hosting provider",
@@ -1,6 +1,6 @@
1
- import { ExecSyncOptions } from 'child_process';
2
- export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
1
+ export declare function cloneFromUpstream(url: string, destination: string, { originName, depth }?: {
3
2
  originName: string;
3
+ depth?: number;
4
4
  }): Promise<GitRepository>;
5
5
  export declare class GitRepository {
6
6
  private directory;
@@ -8,12 +8,10 @@ export declare class GitRepository {
8
8
  constructor(directory: string);
9
9
  getGitRootPath(cwd: string): string;
10
10
  addFetchRemote(remoteName: string, branch: string): Promise<string>;
11
- private execAsync;
12
11
  showStat(): Promise<string>;
13
12
  listBranches(): Promise<string[]>;
14
13
  getGitFiles(path: string): Promise<string[]>;
15
14
  reset(ref: string): Promise<string>;
16
- squashLastTwoCommits(): Promise<string>;
17
15
  mergeUnrelatedHistories(ref: string, message: string): Promise<string>;
18
16
  fetch(remote: string, ref?: string): Promise<string>;
19
17
  checkout(branch: string, opts: {
@@ -25,14 +23,13 @@ export declare class GitRepository {
25
23
  commit(message: string): Promise<string>;
26
24
  amendCommit(): Promise<string>;
27
25
  deleteGitRemote(name: string): Promise<string>;
28
- deleteBranch(branch: string): Promise<string>;
29
26
  addGitRemote(name: string, url: string): Promise<string>;
27
+ hasFilterRepoInstalled(): Promise<boolean>;
28
+ filterRepo(subdirectory: string): Promise<string>;
29
+ filterBranch(subdirectory: string, branchName: string): Promise<string>;
30
+ private execAsync;
31
+ private quotePath;
30
32
  }
31
- /**
32
- * This is used by the squash editor script to update the rebase file.
33
- */
34
- export declare function updateRebaseFile(contents: string): string;
35
- export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
36
33
  /**
37
34
  * This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.
38
35
  */
@@ -2,16 +2,13 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.GitRepository = void 0;
4
4
  exports.cloneFromUpstream = cloneFromUpstream;
5
- exports.updateRebaseFile = updateRebaseFile;
6
- exports.fetchGitRemote = fetchGitRemote;
7
5
  exports.getGithubSlugOrNull = getGithubSlugOrNull;
8
6
  exports.extractUserAndRepoFromGitHubUrl = extractUserAndRepoFromGitHubUrl;
9
7
  exports.commitChanges = commitChanges;
10
8
  exports.getLatestCommitSha = getLatestCommitSha;
11
9
  const child_process_1 = require("child_process");
12
- const devkit_exports_1 = require("../devkit-exports");
13
10
  const path_1 = require("path");
14
- const SQUASH_EDITOR = (0, path_1.join)(__dirname, 'squash.js');
11
+ const devkit_exports_1 = require("../devkit-exports");
15
12
  function execAsync(command, execOptions) {
16
13
  return new Promise((res, rej) => {
17
14
  (0, child_process_1.exec)(command, execOptions, (err, stdout, stderr) => {
@@ -22,9 +19,12 @@ function execAsync(command, execOptions) {
22
19
  });
23
20
  });
24
21
  }
25
- async function cloneFromUpstream(url, destination, { originName } = { originName: 'origin' }) {
26
- await execAsync(`git clone ${url} ${destination} --depth 1 --origin ${originName}`, {
22
+ async function cloneFromUpstream(url, destination, { originName, depth } = {
23
+ originName: 'origin',
24
+ }) {
25
+ await execAsync(`git clone ${url} ${destination} ${depth ? `--depth ${depth}` : ''} --origin ${originName}`, {
27
26
  cwd: (0, path_1.dirname)(destination),
27
+ maxBuffer: 10 * 1024 * 1024,
28
28
  });
29
29
  return new GitRepository(destination);
30
30
  }
@@ -40,13 +40,8 @@ class GitRepository {
40
40
  .toString()
41
41
  .trim();
42
42
  }
43
- addFetchRemote(remoteName, branch) {
44
- return this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
45
- }
46
- execAsync(command) {
47
- return execAsync(command, {
48
- cwd: this.root,
49
- });
43
+ async addFetchRemote(remoteName, branch) {
44
+ return await this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
50
45
  }
51
46
  async showStat() {
52
47
  return await this.execAsync(`git show --stat`);
@@ -61,62 +56,84 @@ class GitRepository {
61
56
  .replace('refs/heads/', ''));
62
57
  }
63
58
  async getGitFiles(path) {
64
- return (await this.execAsync(`git ls-files ${path}`))
59
+ // Use -z to return file names exactly as they are stored in git, separated by NULL (\x00) character.
60
+ // This avoids problems with special characters in file names.
61
+ return (await this.execAsync(`git ls-files -z ${path}`))
65
62
  .trim()
66
- .split('\n')
63
+ .split('\x00')
67
64
  .map((s) => s.trim())
68
65
  .filter(Boolean);
69
66
  }
70
67
  async reset(ref) {
71
- return this.execAsync(`git reset ${ref} --hard`);
72
- }
73
- async squashLastTwoCommits() {
74
- return this.execAsync(`git -c core.editor="node ${SQUASH_EDITOR}" rebase --interactive --no-autosquash HEAD~2`);
68
+ return await this.execAsync(`git reset ${ref} --hard`);
75
69
  }
76
70
  async mergeUnrelatedHistories(ref, message) {
77
- return this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
71
+ return await this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
78
72
  }
79
73
  async fetch(remote, ref) {
80
- return this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
74
+ return await this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
81
75
  }
82
76
  async checkout(branch, opts) {
83
- return this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
77
+ return await this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
84
78
  }
85
79
  async move(path, destination) {
86
- return this.execAsync(`git mv "${path}" "${destination}"`);
80
+ return await this.execAsync(`git mv ${this.quotePath(path)} ${this.quotePath(destination)}`);
87
81
  }
88
82
  async push(ref, remoteName) {
89
- return this.execAsync(`git push -u -f ${remoteName} ${ref}`);
83
+ return await this.execAsync(`git push -u -f ${remoteName} ${ref}`);
90
84
  }
91
85
  async commit(message) {
92
- return this.execAsync(`git commit -am "${message}"`);
86
+ return await this.execAsync(`git commit -am "${message}"`);
93
87
  }
94
88
  async amendCommit() {
95
- return this.execAsync(`git commit --amend -a --no-edit`);
89
+ return await this.execAsync(`git commit --amend -a --no-edit`);
96
90
  }
97
- deleteGitRemote(name) {
98
- return this.execAsync(`git remote rm ${name}`);
91
+ async deleteGitRemote(name) {
92
+ return await this.execAsync(`git remote rm ${name}`);
99
93
  }
100
- deleteBranch(branch) {
101
- return this.execAsync(`git branch -D ${branch}`);
94
+ async addGitRemote(name, url) {
95
+ return await this.execAsync(`git remote add ${name} ${url}`);
96
+ }
97
+ async hasFilterRepoInstalled() {
98
+ try {
99
+ await this.execAsync(`git filter-repo --help`);
100
+ return true;
101
+ }
102
+ catch {
103
+ return false;
104
+ }
102
105
  }
103
- addGitRemote(name, url) {
104
- return this.execAsync(`git remote add ${name} ${url}`);
106
+ // git-filter-repo is much faster than filter-branch, but needs to be installed by user
107
+ // Use `hasFilterRepoInstalled` to check if it's installed
108
+ async filterRepo(subdirectory) {
109
+ // filter-repo requires POSIX path to work
110
+ const posixPath = subdirectory.split(path_1.sep).join(path_1.posix.sep);
111
+ return await this.execAsync(`git filter-repo -f --subdirectory-filter ${this.quotePath(posixPath)}`);
112
+ }
113
+ async filterBranch(subdirectory, branchName) {
114
+ // filter-repo requires POSIX path to work
115
+ const posixPath = subdirectory.split(path_1.sep).join(path_1.posix.sep);
116
+ // We need non-ASCII file names to not be quoted, or else filter-branch will exclude them.
117
+ await this.execAsync(`git config core.quotepath false`);
118
+ return await this.execAsync(`git filter-branch --subdirectory-filter ${this.quotePath(posixPath)} -- ${branchName}`);
119
+ }
120
+ execAsync(command) {
121
+ return execAsync(command, {
122
+ cwd: this.root,
123
+ maxBuffer: 10 * 1024 * 1024,
124
+ });
125
+ }
126
+ quotePath(path) {
127
+ return process.platform === 'win32'
128
+ ? // Windows/CMD only understands double-quotes, single-quotes are treated as part of the file name
129
+ // Bash and other shells will substitute `$` in file names with a variable value.
130
+ `"${path}"`
131
+ : // e.g. `git mv "$$file.txt" "libs/a/$$file.txt"` will not work since `$$` is swapped with the PID of the last process.
132
+ // Using single-quotes prevents this substitution.
133
+ `'${path}'`;
105
134
  }
106
135
  }
107
136
  exports.GitRepository = GitRepository;
108
- /**
109
- * This is used by the squash editor script to update the rebase file.
110
- */
111
- function updateRebaseFile(contents) {
112
- const lines = contents.split('\n');
113
- const lastCommitIndex = lines.findIndex((line) => line === '') - 1;
114
- lines[lastCommitIndex] = lines[lastCommitIndex].replace('pick', 'fixup');
115
- return lines.join('\n');
116
- }
117
- function fetchGitRemote(name, branch, execOptions) {
118
- return (0, child_process_1.execSync)(`git fetch ${name} ${branch} --depth 1`, execOptions);
119
- }
120
137
  /**
121
138
  * This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.
122
139
  */
@@ -3,7 +3,7 @@ import { type NxJsonConfiguration } from '../config/nx-json';
3
3
  import type { ProjectGraph } from '../config/project-graph';
4
4
  import type { TaskGraph } from '../config/task-graph';
5
5
  import type { ProjectConfiguration } from '../config/workspace-json-project-json';
6
- import { FsTree, type FileChange, type Tree } from '../generators/tree';
6
+ import { type FileChange, type Tree } from '../generators/tree';
7
7
  export type SyncGeneratorResult = void | {
8
8
  callback?: GeneratorCallback;
9
9
  outOfSyncMessage?: string;
@@ -18,7 +18,7 @@ export type SyncGeneratorChangesResult = {
18
18
  export declare function getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;
19
19
  export declare function flushSyncGeneratorChanges(results: SyncGeneratorChangesResult[]): Promise<void>;
20
20
  export declare function collectAllRegisteredSyncGenerators(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Promise<string[]>;
21
- export declare function runSyncGenerator(tree: FsTree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorChangesResult>;
21
+ export declare function runSyncGenerator(tree: Tree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorChangesResult>;
22
22
  export declare function collectEnabledTaskSyncGeneratorsFromProjectGraph(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;
23
23
  export declare function collectEnabledTaskSyncGeneratorsFromTaskGraph(taskGraph: TaskGraph, projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;
24
24
  export declare function collectRegisteredGlobalSyncGenerators(nxJson?: NxJsonConfiguration<string[] | "*">): Set<string>;
@@ -1 +0,0 @@
1
- export {};
@@ -1,12 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const fs_1 = require("fs");
4
- const git_utils_1 = require("./git-utils");
5
- // This script is used as an editor for git rebase -i
6
- // This is the file which git creates. When this script exits, the updates should be written to this file.
7
- const filePath = process.argv[2];
8
- // Change the second commit from pick to fixup
9
- const contents = (0, fs_1.readFileSync)(filePath).toString();
10
- const newContents = (0, git_utils_1.updateRebaseFile)(contents);
11
- // Write the updated contents back to the file
12
- (0, fs_1.writeFileSync)(filePath, newContents);