nx 19.7.0-beta.2 → 19.7.0-beta.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/package.json +12 -12
  2. package/release/changelog-renderer/index.d.ts +1 -1
  3. package/release/changelog-renderer/index.js +46 -11
  4. package/schemas/nx-schema.json +12 -0
  5. package/src/command-line/add/command-object.js +2 -2
  6. package/src/command-line/affected/command-object.js +1 -1
  7. package/src/command-line/connect/command-object.js +11 -4
  8. package/src/command-line/connect/connect-to-nx-cloud.d.ts +3 -1
  9. package/src/command-line/connect/connect-to-nx-cloud.js +7 -4
  10. package/src/command-line/daemon/command-object.js +1 -1
  11. package/src/command-line/deprecated/command-objects.js +2 -2
  12. package/src/command-line/exec/command-object.js +1 -1
  13. package/src/command-line/format/command-object.js +4 -4
  14. package/src/command-line/generate/command-object.js +4 -4
  15. package/src/command-line/graph/command-object.js +6 -6
  16. package/src/command-line/import/command-object.js +9 -5
  17. package/src/command-line/import/import.d.ts +4 -0
  18. package/src/command-line/import/import.js +147 -12
  19. package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
  20. package/src/command-line/import/utils/prepare-source-repo.js +31 -85
  21. package/src/command-line/list/command-object.js +1 -1
  22. package/src/command-line/login/command-object.js +1 -1
  23. package/src/command-line/logout/command-object.js +1 -1
  24. package/src/command-line/migrate/command-object.js +9 -9
  25. package/src/command-line/new/command-object.js +2 -2
  26. package/src/command-line/release/changelog.js +53 -12
  27. package/src/command-line/release/command-object.d.ts +2 -0
  28. package/src/command-line/release/command-object.js +37 -33
  29. package/src/command-line/release/config/version-plans.d.ts +14 -1
  30. package/src/command-line/release/config/version-plans.js +33 -1
  31. package/src/command-line/release/index.d.ts +6 -4
  32. package/src/command-line/release/plan-check.js +8 -61
  33. package/src/command-line/release/plan.js +131 -37
  34. package/src/command-line/release/publish.js +3 -0
  35. package/src/command-line/release/release.js +1 -1
  36. package/src/command-line/release/utils/get-touched-projects-for-group.d.ts +7 -0
  37. package/src/command-line/release/utils/get-touched-projects-for-group.js +78 -0
  38. package/src/command-line/release/utils/git.d.ts +1 -1
  39. package/src/command-line/release/utils/git.js +46 -19
  40. package/src/command-line/release/version.js +1 -1
  41. package/src/command-line/report/command-object.js +1 -1
  42. package/src/command-line/reset/command-object.js +1 -1
  43. package/src/command-line/run/command-object.js +1 -1
  44. package/src/command-line/run-many/command-object.js +1 -1
  45. package/src/command-line/show/command-object.js +10 -10
  46. package/src/command-line/sync/sync.js +12 -1
  47. package/src/command-line/watch/command-object.js +1 -1
  48. package/src/command-line/yargs-utils/shared-options.d.ts +2 -1
  49. package/src/command-line/yargs-utils/shared-options.js +26 -29
  50. package/src/config/nx-json.d.ts +5 -1
  51. package/src/core/graph/main.js +1 -1
  52. package/src/daemon/server/sync-generators.d.ts +4 -0
  53. package/src/daemon/server/sync-generators.js +183 -55
  54. package/src/native/nx.wasm32-wasi.wasm +0 -0
  55. package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.d.ts +2 -1
  56. package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +25 -23
  57. package/src/nx-cloud/generators/connect-to-nx-cloud/schema.json +4 -0
  58. package/src/nx-cloud/update-manager.d.ts +1 -1
  59. package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +8 -1
  60. package/src/project-graph/plugins/isolation/plugin-pool.js +1 -1
  61. package/src/tasks-runner/cache.d.ts +2 -0
  62. package/src/tasks-runner/cache.js +17 -3
  63. package/src/tasks-runner/run-command.js +5 -11
  64. package/src/tasks-runner/task-orchestrator.js +1 -10
  65. package/src/utils/command-line-utils.d.ts +1 -0
  66. package/src/utils/git-utils.d.ts +7 -10
  67. package/src/utils/git-utils.js +61 -44
  68. package/src/utils/sync-generators.d.ts +8 -5
  69. package/src/utils/sync-generators.js +27 -5
  70. package/src/utils/squash.d.ts +0 -1
  71. package/src/utils/squash.js +0 -12
@@ -4,3 +4,7 @@ export declare function getCachedSyncGeneratorChanges(generators: string[]): Pro
4
4
  export declare function flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
5
5
  export declare function collectAndScheduleSyncGenerators(projectGraph: ProjectGraph): void;
6
6
  export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
7
+ /**
8
+ * @internal
9
+ */
10
+ export declare function _getConflictingGeneratorGroups(results: SyncGeneratorChangesResult[]): string[][];
@@ -4,6 +4,7 @@ exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
4
4
  exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
5
5
  exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
6
6
  exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
7
+ exports._getConflictingGeneratorGroups = _getConflictingGeneratorGroups;
7
8
  const nx_json_1 = require("../../config/nx-json");
8
9
  const tree_1 = require("../../generators/tree");
9
10
  const file_hasher_1 = require("../../hasher/file-hasher");
@@ -21,10 +22,10 @@ let registeredSyncGenerators;
21
22
  let scheduledTimeoutId;
22
23
  let storedProjectGraphHash;
23
24
  let storedNxJsonHash;
25
+ let storedDisabledTaskSyncGeneratorsHash;
24
26
  const log = (...messageParts) => {
25
27
  logger_1.serverLogger.log('[SYNC]:', ...messageParts);
26
28
  };
27
- // TODO(leo): check conflicts and reuse the Tree where possible
28
29
  async function getCachedSyncGeneratorChanges(generators) {
29
30
  try {
30
31
  log('get sync generators changes on demand', generators);
@@ -36,51 +37,15 @@ async function getCachedSyncGeneratorChanges(generators) {
36
37
  }
37
38
  // reset the wait time
38
39
  waitPeriod = 100;
39
- let projects;
40
- let errored = false;
41
- const getProjectsConfigurations = async () => {
42
- if (projects || errored) {
43
- return projects;
44
- }
45
- const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
46
- projects = projectGraph
47
- ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
48
- : null;
49
- errored = error !== undefined;
50
- return projects;
51
- };
52
- return (await Promise.all(generators.map(async (generator) => {
53
- if (scheduledGenerators.has(generator) ||
54
- !syncGeneratorsCacheResultPromises.has(generator)) {
55
- // it's scheduled to run (there are pending changes to process) or
56
- // it's not scheduled and there's no cached result, so run it
57
- const projects = await getProjectsConfigurations();
58
- if (projects) {
59
- log(generator, 'already scheduled or not cached, running it now');
60
- runGenerator(generator, projects);
61
- }
62
- else {
63
- log(generator, 'already scheduled or not cached, project graph errored');
64
- /**
65
- * This should never happen. This is invoked imperatively, and by
66
- * the time it is invoked, the project graph would have already
67
- * been requested. If it errored, it would have been reported and
68
- * this wouldn't have been invoked. We handle it just in case.
69
- *
70
- * Since the project graph would be reported by the relevant
71
- * handlers separately, we just ignore the error, don't cache
72
- * any result and return an empty result, the next time this is
73
- * invoked the process will repeat until it eventually recovers
74
- * when the project graph is fixed.
75
- */
76
- return Promise.resolve({ changes: [], generatorName: generator });
77
- }
78
- }
79
- else {
80
- log(generator, 'not scheduled and has cached result, returning cached result');
81
- }
82
- return syncGeneratorsCacheResultPromises.get(generator);
83
- }))).flat();
40
+ const results = await getFromCacheOrRunGenerators(generators);
41
+ const conflicts = _getConflictingGeneratorGroups(results);
42
+ if (!conflicts.length) {
43
+ // there are no conflicts
44
+ return results;
45
+ }
46
+ // there are conflicts, so we need to re-run the conflicting generators
47
+ // using the same tree
48
+ return await processConflictingGenerators(conflicts, results);
84
49
  }
85
50
  catch (e) {
86
51
  console.error(e);
@@ -107,6 +72,10 @@ function collectAndScheduleSyncGenerators(projectGraph) {
107
72
  // a change imply we need to re-run all the generators
108
73
  // make sure to schedule all the collected generators
109
74
  scheduledGenerators.clear();
75
+ if (!registeredSyncGenerators.size) {
76
+ // there are no generators to run
77
+ return;
78
+ }
110
79
  for (const generator of registeredSyncGenerators) {
111
80
  scheduledGenerators.add(generator);
112
81
  }
@@ -126,7 +95,7 @@ function collectAndScheduleSyncGenerators(projectGraph) {
126
95
  }
127
96
  const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
128
97
  for (const generator of scheduledGenerators) {
129
- runGenerator(generator, projects);
98
+ syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
130
99
  }
131
100
  await Promise.all(syncGeneratorsCacheResultPromises.values());
132
101
  }, waitPeriod);
@@ -143,17 +112,177 @@ async function getCachedRegisteredSyncGenerators() {
143
112
  }
144
113
  return [...registeredSyncGenerators];
145
114
  }
115
+ async function getFromCacheOrRunGenerators(generators) {
116
+ let projects;
117
+ let errored = false;
118
+ const getProjectsConfigurations = async () => {
119
+ if (projects || errored) {
120
+ return projects;
121
+ }
122
+ const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
123
+ projects = projectGraph
124
+ ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
125
+ : null;
126
+ errored = error !== undefined;
127
+ return projects;
128
+ };
129
+ return (await Promise.all(generators.map(async (generator) => {
130
+ if (scheduledGenerators.has(generator) ||
131
+ !syncGeneratorsCacheResultPromises.has(generator)) {
132
+ // it's scheduled to run (there are pending changes to process) or
133
+ // it's not scheduled and there's no cached result, so run it
134
+ const projects = await getProjectsConfigurations();
135
+ if (projects) {
136
+ log(generator, 'already scheduled or not cached, running it now');
137
+ syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
138
+ }
139
+ else {
140
+ log(generator, 'already scheduled or not cached, project graph errored');
141
+ /**
142
+ * This should never happen. This is invoked imperatively, and by
143
+ * the time it is invoked, the project graph would have already
144
+ * been requested. If it errored, it would have been reported and
145
+ * this wouldn't have been invoked. We handle it just in case.
146
+ *
147
+ * Since the project graph would be reported by the relevant
148
+ * handlers separately, we just ignore the error, don't cache
149
+ * any result and return an empty result, the next time this is
150
+ * invoked the process will repeat until it eventually recovers
151
+ * when the project graph is fixed.
152
+ */
153
+ return Promise.resolve({ changes: [], generatorName: generator });
154
+ }
155
+ }
156
+ else {
157
+ log(generator, 'not scheduled and has cached result, returning cached result');
158
+ }
159
+ return syncGeneratorsCacheResultPromises.get(generator);
160
+ }))).flat();
161
+ }
162
+ async function runConflictingGenerators(tree, generators) {
163
+ const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
164
+ const projects = projectGraph
165
+ ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
166
+ : null;
167
+ if (!projects) {
168
+ /**
169
+ * This should never happen. This is invoked imperatively, and by
170
+ * the time it is invoked, the project graph would have already
171
+ * been requested. If it errored, it would have been reported and
172
+ * this wouldn't have been invoked. We handle it just in case.
173
+ *
174
+ * Since the project graph would be reported by the relevant
175
+ * handlers separately, we just ignore the error.
176
+ */
177
+ return generators.map((generator) => ({
178
+ changes: [],
179
+ generatorName: generator,
180
+ }));
181
+ }
182
+ // we need to run conflicting generators sequentially because they use the same tree
183
+ const results = [];
184
+ for (const generator of generators) {
185
+ log(generator, 'running it now');
186
+ results.push(await runGenerator(generator, projects, tree));
187
+ }
188
+ return results;
189
+ }
190
+ async function processConflictingGenerators(conflicts, initialResults) {
191
+ const conflictRunResults = (await Promise.all(conflicts.map((generators) => {
192
+ const [firstGenerator, ...generatorsToRun] = generators;
193
+ // it must exists because the conflicts were identified from the initial results
194
+ const firstGeneratorResult = initialResults.find((r) => r.generatorName === firstGenerator);
195
+ const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generators ${generators.join(',')}`);
196
+ // pre-apply the changes from the first generator to avoid running it
197
+ for (const change of firstGeneratorResult.changes) {
198
+ if (change.type === 'CREATE' || change.type === 'UPDATE') {
199
+ tree.write(change.path, change.content, change.options);
200
+ }
201
+ else if (change.type === 'DELETE') {
202
+ tree.delete(change.path);
203
+ }
204
+ }
205
+ /**
206
+ * We don't cache the results of conflicting generators because they
207
+ * use the same tree, so some files might contain results from multiple
208
+ * generators and we don't have guarantees that the same combination of
209
+ * generators will run together.
210
+ */
211
+ return runConflictingGenerators(tree, generatorsToRun);
212
+ }))).flat();
213
+ /**
214
+ * The order of the results from the re-run generators is important because
215
+ * the last result from a group of conflicting generators will contain the
216
+ * changes from the previous conflicting generators. So, instead of replacing
217
+ * in-place the initial results, we first add the results from the re-run
218
+ * generators, and then add the initial results that were not from a
219
+ * conflicting generator.
220
+ */
221
+ const results = [...conflictRunResults];
222
+ for (const result of initialResults) {
223
+ if (conflictRunResults.every((r) => r.generatorName !== result.generatorName)) {
224
+ // this result is not from a conflicting generator, so we add it to the
225
+ // results
226
+ results.push(result);
227
+ }
228
+ }
229
+ return results;
230
+ }
231
+ /**
232
+ * @internal
233
+ */
234
+ function _getConflictingGeneratorGroups(results) {
235
+ const changedFileToGeneratorMap = new Map();
236
+ for (const result of results) {
237
+ for (const change of result.changes) {
238
+ if (!changedFileToGeneratorMap.has(change.path)) {
239
+ changedFileToGeneratorMap.set(change.path, new Set());
240
+ }
241
+ changedFileToGeneratorMap.get(change.path).add(result.generatorName);
242
+ }
243
+ }
244
+ const conflicts = [];
245
+ for (const generatorSet of changedFileToGeneratorMap.values()) {
246
+ if (generatorSet.size === 1) {
247
+ // no conflicts
248
+ continue;
249
+ }
250
+ if (conflicts.length === 0) {
251
+ // there are no conflicts yet, so we just add the first group
252
+ conflicts.push(new Set(generatorSet));
253
+ continue;
254
+ }
255
+ // identify if any of the current generator sets intersect with any of the
256
+ // existing conflict groups
257
+ const generatorsArray = Array.from(generatorSet);
258
+ const existingConflictGroup = conflicts.find((group) => generatorsArray.some((generator) => group.has(generator)));
259
+ if (existingConflictGroup) {
260
+ // there's an intersecting group, so we merge the two
261
+ for (const generator of generatorsArray) {
262
+ existingConflictGroup.add(generator);
263
+ }
264
+ }
265
+ else {
266
+ // there's no intersecting group, so we create a new one
267
+ conflicts.push(new Set(generatorsArray));
268
+ }
269
+ }
270
+ return conflicts.map((group) => Array.from(group));
271
+ }
146
272
  function collectAllRegisteredSyncGenerators(projectGraph) {
273
+ const nxJson = (0, nx_json_1.readNxJson)();
147
274
  const projectGraphHash = hashProjectGraph(projectGraph);
148
- if (storedProjectGraphHash !== projectGraphHash) {
275
+ const disabledTaskSyncGeneratorsHash = (0, file_hasher_1.hashArray)(nxJson.sync?.disabledTaskSyncGenerators?.sort() ?? []);
276
+ if (projectGraphHash !== storedProjectGraphHash ||
277
+ disabledTaskSyncGeneratorsHash !== storedDisabledTaskSyncGeneratorsHash) {
149
278
  storedProjectGraphHash = projectGraphHash;
279
+ storedDisabledTaskSyncGeneratorsHash = disabledTaskSyncGeneratorsHash;
150
280
  registeredTaskSyncGenerators =
151
- (0, sync_generators_1.collectRegisteredTaskSyncGenerators)(projectGraph);
281
+ (0, sync_generators_1.collectEnabledTaskSyncGeneratorsFromProjectGraph)(projectGraph, nxJson);
152
282
  }
153
283
  else {
154
284
  log('project graph hash is the same, not collecting task sync generators');
155
285
  }
156
- const nxJson = (0, nx_json_1.readNxJson)();
157
286
  const nxJsonHash = (0, file_hasher_1.hashArray)(nxJson.sync?.globalGenerators?.sort() ?? []);
158
287
  if (storedNxJsonHash !== nxJsonHash) {
159
288
  storedNxJsonHash = nxJsonHash;
@@ -183,16 +312,15 @@ function collectAllRegisteredSyncGenerators(projectGraph) {
183
312
  }
184
313
  }
185
314
  }
186
- function runGenerator(generator, projects) {
315
+ function runGenerator(generator, projects, tree) {
187
316
  log('running scheduled generator', generator);
188
317
  // remove it from the scheduled set
189
318
  scheduledGenerators.delete(generator);
190
- const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
191
- // run the generator and cache the result
192
- syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
319
+ tree ??= new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
320
+ return (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
193
321
  log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
194
322
  return result;
195
- }));
323
+ });
196
324
  }
197
325
  function hashProjectGraph(projectGraph) {
198
326
  const stringifiedProjects = Object.entries(projectGraph.nodes)
Binary file
@@ -7,7 +7,8 @@ export interface ConnectToNxCloudOptions {
7
7
  hideFormatLogs?: boolean;
8
8
  github?: boolean;
9
9
  directory?: string;
10
+ generateToken?: boolean;
10
11
  }
11
- export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string>;
12
+ export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string | null>;
12
13
  declare function connectToNxCloudGenerator(tree: Tree, options: ConnectToNxCloudOptions): Promise<void>;
13
14
  export default connectToNxCloudGenerator;
@@ -113,30 +113,32 @@ async function connectToNxCloud(tree, schema, nxJson = (0, nx_json_1.readNxJson)
113
113
  printCloudConnectionDisabledMessage();
114
114
  return null;
115
115
  }
116
+ const isGitHubDetected = schema.github ?? (await (0, url_shorten_1.repoUsesGithub)(schema.github));
117
+ let responseFromCreateNxCloudWorkspaceV1;
118
+ let responseFromCreateNxCloudWorkspaceV2;
119
+ /**
120
+ * Do not create an Nx Cloud token if the user is using GitHub and
121
+ * is running `nx-connect` AND `token` is undefined (override)
122
+ */
123
+ if (!schema.generateToken &&
124
+ isGitHubDetected &&
125
+ schema.installationSource === 'nx-connect')
126
+ return null;
127
+ if (process.env.NX_ENABLE_LOGIN === 'true') {
128
+ responseFromCreateNxCloudWorkspaceV2 = await createNxCloudWorkspaceV2(getRootPackageName(tree), schema.installationSource, getNxInitDate());
129
+ addNxCloudIdToNxJson(tree, responseFromCreateNxCloudWorkspaceV2?.nxCloudId, schema.directory);
130
+ await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
131
+ silent: schema.hideFormatLogs,
132
+ });
133
+ return responseFromCreateNxCloudWorkspaceV2.nxCloudId;
134
+ }
116
135
  else {
117
- const usesGithub = schema.github ?? (await (0, url_shorten_1.repoUsesGithub)(schema.github));
118
- let responseFromCreateNxCloudWorkspaceV1;
119
- let responseFromCreateNxCloudWorkspaceV2;
120
- // do NOT create Nx Cloud token (createNxCloudWorkspace)
121
- // if user is using github and is running nx-connect
122
- if (!(usesGithub && schema.installationSource === 'nx-connect')) {
123
- if (process.env.NX_ENABLE_LOGIN === 'true') {
124
- responseFromCreateNxCloudWorkspaceV2 = await createNxCloudWorkspaceV2(getRootPackageName(tree), schema.installationSource, getNxInitDate());
125
- addNxCloudIdToNxJson(tree, responseFromCreateNxCloudWorkspaceV2?.nxCloudId, schema.directory);
126
- await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
127
- silent: schema.hideFormatLogs,
128
- });
129
- return responseFromCreateNxCloudWorkspaceV2.nxCloudId;
130
- }
131
- else {
132
- responseFromCreateNxCloudWorkspaceV1 = await createNxCloudWorkspaceV1(getRootPackageName(tree), schema.installationSource, getNxInitDate());
133
- addNxCloudOptionsToNxJson(tree, responseFromCreateNxCloudWorkspaceV1?.token, schema.directory);
134
- await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
135
- silent: schema.hideFormatLogs,
136
- });
137
- return responseFromCreateNxCloudWorkspaceV1.token;
138
- }
139
- }
136
+ responseFromCreateNxCloudWorkspaceV1 = await createNxCloudWorkspaceV1(getRootPackageName(tree), schema.installationSource, getNxInitDate());
137
+ addNxCloudOptionsToNxJson(tree, responseFromCreateNxCloudWorkspaceV1?.token, schema.directory);
138
+ await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
139
+ silent: schema.hideFormatLogs,
140
+ });
141
+ return responseFromCreateNxCloudWorkspaceV1.token;
140
142
  }
141
143
  }
142
144
  async function connectToNxCloudGenerator(tree, options) {
@@ -21,6 +21,10 @@
21
21
  "description": "Hide formatting logs",
22
22
  "x-priority": "internal"
23
23
  },
24
+ "generateToken": {
25
+ "type": "boolean",
26
+ "description": "Explicitly asks for a token to be created, do not override existing tokens from Nx Cloud"
27
+ },
24
28
  "github": {
25
29
  "type": "boolean",
26
30
  "description": "If the user will be using GitHub as their git hosting provider",
@@ -11,7 +11,7 @@ export interface NxCloudClient {
11
11
  configureLightClientRequire: () => (paths: string[]) => void;
12
12
  commands: Record<string, () => Promise<void>>;
13
13
  nxCloudTasksRunner: TasksRunner<CloudTaskRunnerOptions>;
14
- remoteCache: RemoteCacheV2;
14
+ getRemoteCache: () => RemoteCacheV2;
15
15
  }
16
16
  export declare function verifyOrUpdateNxCloudClient(options: CloudTaskRunnerOptions): Promise<{
17
17
  nxCloudClient: NxCloudClient;
@@ -18,6 +18,13 @@ const defaultNpmResolutionCache = new Map();
18
18
  const builtInModuleSet = new Set([
19
19
  ...node_module_1.builtinModules,
20
20
  ...node_module_1.builtinModules.map((x) => `node:${x}`),
21
+ // These are missing in the builtinModules list
22
+ // See: https://github.com/nodejs/node/issues/42785
23
+ // TODO(v20): We should be safe to use `isBuiltin` function instead of keep the set here (https://nodejs.org/api/module.html#moduleisbuiltinmodulename)
24
+ 'test',
25
+ 'node:test',
26
+ 'node:sea',
27
+ 'node:sqlite',
21
28
  ]);
22
29
  function isBuiltinModuleImport(importExpr) {
23
30
  const packageName = (0, get_package_name_from_import_path_1.getPackageNameFromImportPath)(importExpr);
@@ -265,7 +272,7 @@ class TargetProjectLocator {
265
272
  // Resolve the main entry point of the package
266
273
  const pathOfFileInPackage = packageJsonPath ?? (0, resolve_relative_to_dir_1.resolveRelativeToDir)(packageName, relativeToDir);
267
274
  let dir = (0, node_path_1.dirname)(pathOfFileInPackage);
268
- while (dir !== (0, node_path_1.parse)(dir).root) {
275
+ while (dir !== (0, node_path_1.dirname)(dir)) {
269
276
  const packageJsonPath = (0, node_path_1.join)(dir, 'package.json');
270
277
  try {
271
278
  const parsedPackageJson = (0, fileutils_1.readJsonFile)(packageJsonPath);
@@ -273,7 +273,7 @@ async function startPluginWorker() {
273
273
  socket,
274
274
  });
275
275
  }
276
- else if (attempts > 1000) {
276
+ else if (attempts > 10000) {
277
277
  // daemon fails to start, the process probably exited
278
278
  // we print the logs and exit the client
279
279
  reject('Failed to start plugin worker.');
@@ -10,6 +10,7 @@ export type TaskWithCachedResult = {
10
10
  task: Task;
11
11
  cachedResult: CachedResult;
12
12
  };
13
+ export declare function getCache(options: DefaultTasksRunnerOptions): DbCache | Cache;
13
14
  export declare class DbCache {
14
15
  private readonly options;
15
16
  private cache;
@@ -20,6 +21,7 @@ export declare class DbCache {
20
21
  nxCloudRemoteCache: RemoteCache;
21
22
  });
22
23
  get(task: Task): Promise<CachedResult | null>;
24
+ private applyRemoteCacheResults;
23
25
  put(task: Task, terminalOutput: string | null, outputs: string[], code: number): Promise<void>;
24
26
  copyFilesFromCache(_: string, cachedResult: CachedResult, outputs: string[]): Promise<void>;
25
27
  removeOldCacheRecords(): void;
@@ -1,6 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.Cache = exports.DbCache = void 0;
4
+ exports.getCache = getCache;
4
5
  const workspace_root_1 = require("../utils/workspace-root");
5
6
  const fs_extra_1 = require("fs-extra");
6
7
  const path_1 = require("path");
@@ -15,6 +16,16 @@ const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
15
16
  const nx_json_1 = require("../config/nx-json");
16
17
  const update_manager_1 = require("../nx-cloud/update-manager");
17
18
  const get_cloud_options_1 = require("../nx-cloud/utilities/get-cloud-options");
19
+ function getCache(options) {
20
+ return process.env.NX_DB_CACHE === 'true'
21
+ ? new DbCache({
22
+ // Remove this in Nx 21
23
+ nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
24
+ ? options.remoteCache
25
+ : null,
26
+ })
27
+ : new Cache(options);
28
+ }
18
29
  class DbCache {
19
30
  async setup() {
20
31
  this.remoteCache = await this.getRemoteCache();
@@ -37,7 +48,7 @@ class DbCache {
37
48
  // attempt remote cache
38
49
  const res = await this.remoteCache.retrieve(task.hash, this.cache.cacheDirectory);
39
50
  if (res) {
40
- this.cache.applyRemoteCacheResults(task.hash, res);
51
+ this.applyRemoteCacheResults(task.hash, res);
41
52
  return {
42
53
  ...res,
43
54
  remote: true,
@@ -51,6 +62,9 @@ class DbCache {
51
62
  return null;
52
63
  }
53
64
  }
65
+ applyRemoteCacheResults(hash, res) {
66
+ return this.cache.applyRemoteCacheResults(hash, res);
67
+ }
54
68
  async put(task, terminalOutput, outputs, code) {
55
69
  return tryAndRetry(async () => {
56
70
  this.cache.put(task.hash, terminalOutput, outputs, code);
@@ -81,8 +95,8 @@ class DbCache {
81
95
  if ((0, nx_cloud_utils_1.isNxCloudUsed)(nxJson)) {
82
96
  const options = (0, get_cloud_options_1.getCloudOptions)();
83
97
  const { nxCloudClient } = await (0, update_manager_1.verifyOrUpdateNxCloudClient)(options);
84
- if (nxCloudClient.remoteCache) {
85
- return nxCloudClient.remoteCache;
98
+ if (nxCloudClient.getRemoteCache) {
99
+ return nxCloudClient.getRemoteCache();
86
100
  }
87
101
  else {
88
102
  // old nx cloud instance
@@ -120,17 +120,11 @@ async function runCommand(projectsToRun, currentProjectGraph, { nxJson }, nxArgs
120
120
  }
121
121
  async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions) {
122
122
  let taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
123
- // collect unique syncGenerators from the tasks
124
- const uniqueSyncGenerators = new Set();
125
- for (const { target } of Object.values(taskGraph.tasks)) {
126
- const { syncGenerators } = projectGraph.nodes[target.project].data.targets[target.target];
127
- if (!syncGenerators) {
128
- continue;
129
- }
130
- for (const generator of syncGenerators) {
131
- uniqueSyncGenerators.add(generator);
132
- }
123
+ if (nxArgs.skipSync) {
124
+ return { projectGraph, taskGraph };
133
125
  }
126
+ // collect unique syncGenerators from the tasks
127
+ const uniqueSyncGenerators = (0, sync_generators_1.collectEnabledTaskSyncGeneratorsFromTaskGraph)(taskGraph, projectGraph, nxJson);
134
128
  if (!uniqueSyncGenerators.size) {
135
129
  // There are no sync generators registered in the tasks to run
136
130
  return { projectGraph, taskGraph };
@@ -143,7 +137,7 @@ async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, project
143
137
  }
144
138
  const outOfSyncTitle = 'The workspace is out of sync';
145
139
  const resultBodyLines = [...(0, sync_generators_1.syncGeneratorResultsToMessageLines)(results), ''];
146
- const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks.';
140
+ const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks in interactive environments.';
147
141
  const willErrorOnCiMessage = 'Please note that this will be an error on CI.';
148
142
  if ((0, is_ci_1.isCI)() || !process.stdout.isTTY) {
149
143
  // If the user is running in CI or is running in a non-TTY environment we
@@ -15,8 +15,6 @@ const task_env_1 = require("./task-env");
15
15
  const workspace_root_1 = require("../utils/workspace-root");
16
16
  const output_1 = require("../utils/output");
17
17
  const params_1 = require("../utils/params");
18
- const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
19
- const nx_json_1 = require("../config/nx-json");
20
18
  class TaskOrchestrator {
21
19
  // endregion internal state
22
20
  constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon, outputStyle) {
@@ -28,14 +26,7 @@ class TaskOrchestrator {
28
26
  this.bail = bail;
29
27
  this.daemon = daemon;
30
28
  this.outputStyle = outputStyle;
31
- this.cache = process.env.NX_DB_CACHE === 'true'
32
- ? new cache_1.DbCache({
33
- // Remove this in Nx 21
34
- nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
35
- ? this.options.remoteCache
36
- : null,
37
- })
38
- : new cache_1.Cache(this.options);
29
+ this.cache = (0, cache_1.getCache)(this.options);
39
30
  this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
40
31
  this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
41
32
  // region internal state
@@ -30,6 +30,7 @@ export interface NxArgs {
30
30
  type?: string;
31
31
  batch?: boolean;
32
32
  excludeTaskDependencies?: boolean;
33
+ skipSync?: boolean;
33
34
  }
34
35
  export declare function createOverrides(__overrides_unparsed__?: string[]): Record<string, any>;
35
36
  export declare function getBaseRef(nxJson: NxJsonConfiguration): string;
@@ -1,6 +1,6 @@
1
- import { ExecSyncOptions } from 'child_process';
2
- export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
1
+ export declare function cloneFromUpstream(url: string, destination: string, { originName, depth }?: {
3
2
  originName: string;
3
+ depth?: number;
4
4
  }): Promise<GitRepository>;
5
5
  export declare class GitRepository {
6
6
  private directory;
@@ -8,12 +8,10 @@ export declare class GitRepository {
8
8
  constructor(directory: string);
9
9
  getGitRootPath(cwd: string): string;
10
10
  addFetchRemote(remoteName: string, branch: string): Promise<string>;
11
- private execAsync;
12
11
  showStat(): Promise<string>;
13
12
  listBranches(): Promise<string[]>;
14
13
  getGitFiles(path: string): Promise<string[]>;
15
14
  reset(ref: string): Promise<string>;
16
- squashLastTwoCommits(): Promise<string>;
17
15
  mergeUnrelatedHistories(ref: string, message: string): Promise<string>;
18
16
  fetch(remote: string, ref?: string): Promise<string>;
19
17
  checkout(branch: string, opts: {
@@ -25,14 +23,13 @@ export declare class GitRepository {
25
23
  commit(message: string): Promise<string>;
26
24
  amendCommit(): Promise<string>;
27
25
  deleteGitRemote(name: string): Promise<string>;
28
- deleteBranch(branch: string): Promise<string>;
29
26
  addGitRemote(name: string, url: string): Promise<string>;
27
+ hasFilterRepoInstalled(): Promise<boolean>;
28
+ filterRepo(subdirectory: string): Promise<string>;
29
+ filterBranch(subdirectory: string, branchName: string): Promise<string>;
30
+ private execAsync;
31
+ private quotePath;
30
32
  }
31
- /**
32
- * This is used by the squash editor script to update the rebase file.
33
- */
34
- export declare function updateRebaseFile(contents: string): string;
35
- export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
36
33
  /**
37
34
  * This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.
38
35
  */