nx 19.6.3 → 19.6.5

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. package/package.json +12 -12
  2. package/release/changelog-renderer/index.d.ts +1 -1
  3. package/release/changelog-renderer/index.js +46 -11
  4. package/schemas/nx-schema.json +5 -0
  5. package/src/command-line/graph/graph.js +35 -18
  6. package/src/command-line/import/command-object.js +4 -0
  7. package/src/command-line/import/import.d.ts +4 -0
  8. package/src/command-line/import/import.js +147 -12
  9. package/src/command-line/import/utils/prepare-source-repo.d.ts +1 -1
  10. package/src/command-line/import/utils/prepare-source-repo.js +31 -85
  11. package/src/command-line/release/changelog.js +52 -11
  12. package/src/command-line/release/command-object.d.ts +1 -0
  13. package/src/command-line/release/command-object.js +6 -1
  14. package/src/command-line/release/config/version-plans.d.ts +14 -1
  15. package/src/command-line/release/config/version-plans.js +33 -1
  16. package/src/command-line/release/index.d.ts +6 -4
  17. package/src/command-line/release/plan-check.js +8 -61
  18. package/src/command-line/release/plan.js +131 -37
  19. package/src/command-line/release/release.js +1 -1
  20. package/src/command-line/release/utils/get-touched-projects-for-group.d.ts +7 -0
  21. package/src/command-line/release/utils/get-touched-projects-for-group.js +78 -0
  22. package/src/command-line/release/utils/git.d.ts +1 -1
  23. package/src/command-line/release/utils/git.js +45 -18
  24. package/src/command-line/release/version.js +1 -1
  25. package/src/daemon/server/sync-generators.d.ts +4 -0
  26. package/src/daemon/server/sync-generators.js +172 -52
  27. package/src/executors/run-commands/run-commands.impl.js +8 -3
  28. package/src/hasher/node-task-hasher-impl.d.ts +1 -1
  29. package/src/hasher/node-task-hasher-impl.js +34 -16
  30. package/src/native/nx.wasm32-wasi.wasm +0 -0
  31. package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +8 -1
  32. package/src/project-graph/plugins/isolation/plugin-pool.js +1 -1
  33. package/src/tasks-runner/default-tasks-runner.js +1 -1
  34. package/src/tasks-runner/init-tasks-runner.d.ts +2 -0
  35. package/src/tasks-runner/init-tasks-runner.js +1 -0
  36. package/src/tasks-runner/life-cycles/invoke-runner-terminal-output-life-cycle.d.ts +4 -6
  37. package/src/tasks-runner/life-cycles/invoke-runner-terminal-output-life-cycle.js +5 -0
  38. package/src/tasks-runner/task-env.d.ts +3 -3
  39. package/src/tasks-runner/task-env.js +3 -3
  40. package/src/tasks-runner/task-orchestrator.d.ts +2 -1
  41. package/src/tasks-runner/task-orchestrator.js +5 -2
  42. package/src/utils/git-utils.d.ts +7 -10
  43. package/src/utils/git-utils.js +61 -44
  44. package/src/utils/sync-generators.d.ts +2 -2
  45. package/src/utils/squash.d.ts +0 -1
  46. package/src/utils/squash.js +0 -12
@@ -4,6 +4,7 @@ exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
4
4
  exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
5
5
  exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
6
6
  exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
7
+ exports._getConflictingGeneratorGroups = _getConflictingGeneratorGroups;
7
8
  const nx_json_1 = require("../../config/nx-json");
8
9
  const tree_1 = require("../../generators/tree");
9
10
  const file_hasher_1 = require("../../hasher/file-hasher");
@@ -24,7 +25,6 @@ let storedNxJsonHash;
24
25
  const log = (...messageParts) => {
25
26
  logger_1.serverLogger.log('[SYNC]:', ...messageParts);
26
27
  };
27
- // TODO(leo): check conflicts and reuse the Tree where possible
28
28
  async function getCachedSyncGeneratorChanges(generators) {
29
29
  try {
30
30
  log('get sync generators changes on demand', generators);
@@ -36,51 +36,15 @@ async function getCachedSyncGeneratorChanges(generators) {
36
36
  }
37
37
  // reset the wait time
38
38
  waitPeriod = 100;
39
- let projects;
40
- let errored = false;
41
- const getProjectsConfigurations = async () => {
42
- if (projects || errored) {
43
- return projects;
44
- }
45
- const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
46
- projects = projectGraph
47
- ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
48
- : null;
49
- errored = error !== undefined;
50
- return projects;
51
- };
52
- return (await Promise.all(generators.map(async (generator) => {
53
- if (scheduledGenerators.has(generator) ||
54
- !syncGeneratorsCacheResultPromises.has(generator)) {
55
- // it's scheduled to run (there are pending changes to process) or
56
- // it's not scheduled and there's no cached result, so run it
57
- const projects = await getProjectsConfigurations();
58
- if (projects) {
59
- log(generator, 'already scheduled or not cached, running it now');
60
- runGenerator(generator, projects);
61
- }
62
- else {
63
- log(generator, 'already scheduled or not cached, project graph errored');
64
- /**
65
- * This should never happen. This is invoked imperatively, and by
66
- * the time it is invoked, the project graph would have already
67
- * been requested. If it errored, it would have been reported and
68
- * this wouldn't have been invoked. We handle it just in case.
69
- *
70
- * Since the project graph would be reported by the relevant
71
- * handlers separately, we just ignore the error, don't cache
72
- * any result and return an empty result, the next time this is
73
- * invoked the process will repeat until it eventually recovers
74
- * when the project graph is fixed.
75
- */
76
- return Promise.resolve({ changes: [], generatorName: generator });
77
- }
78
- }
79
- else {
80
- log(generator, 'not scheduled and has cached result, returning cached result');
81
- }
82
- return syncGeneratorsCacheResultPromises.get(generator);
83
- }))).flat();
39
+ const results = await getFromCacheOrRunGenerators(generators);
40
+ const conflicts = _getConflictingGeneratorGroups(results);
41
+ if (!conflicts.length) {
42
+ // there are no conflicts
43
+ return results;
44
+ }
45
+ // there are conflicts, so we need to re-run the conflicting generators
46
+ // using the same tree
47
+ return await processConflictingGenerators(conflicts, results);
84
48
  }
85
49
  catch (e) {
86
50
  console.error(e);
@@ -126,7 +90,7 @@ function collectAndScheduleSyncGenerators(projectGraph) {
126
90
  }
127
91
  const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
128
92
  for (const generator of scheduledGenerators) {
129
- runGenerator(generator, projects);
93
+ syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
130
94
  }
131
95
  await Promise.all(syncGeneratorsCacheResultPromises.values());
132
96
  }, waitPeriod);
@@ -143,6 +107,163 @@ async function getCachedRegisteredSyncGenerators() {
143
107
  }
144
108
  return [...registeredSyncGenerators];
145
109
  }
110
+ async function getFromCacheOrRunGenerators(generators) {
111
+ let projects;
112
+ let errored = false;
113
+ const getProjectsConfigurations = async () => {
114
+ if (projects || errored) {
115
+ return projects;
116
+ }
117
+ const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
118
+ projects = projectGraph
119
+ ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
120
+ : null;
121
+ errored = error !== undefined;
122
+ return projects;
123
+ };
124
+ return (await Promise.all(generators.map(async (generator) => {
125
+ if (scheduledGenerators.has(generator) ||
126
+ !syncGeneratorsCacheResultPromises.has(generator)) {
127
+ // it's scheduled to run (there are pending changes to process) or
128
+ // it's not scheduled and there's no cached result, so run it
129
+ const projects = await getProjectsConfigurations();
130
+ if (projects) {
131
+ log(generator, 'already scheduled or not cached, running it now');
132
+ syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
133
+ }
134
+ else {
135
+ log(generator, 'already scheduled or not cached, project graph errored');
136
+ /**
137
+ * This should never happen. This is invoked imperatively, and by
138
+ * the time it is invoked, the project graph would have already
139
+ * been requested. If it errored, it would have been reported and
140
+ * this wouldn't have been invoked. We handle it just in case.
141
+ *
142
+ * Since the project graph would be reported by the relevant
143
+ * handlers separately, we just ignore the error, don't cache
144
+ * any result and return an empty result, the next time this is
145
+ * invoked the process will repeat until it eventually recovers
146
+ * when the project graph is fixed.
147
+ */
148
+ return Promise.resolve({ changes: [], generatorName: generator });
149
+ }
150
+ }
151
+ else {
152
+ log(generator, 'not scheduled and has cached result, returning cached result');
153
+ }
154
+ return syncGeneratorsCacheResultPromises.get(generator);
155
+ }))).flat();
156
+ }
157
+ async function runConflictingGenerators(tree, generators) {
158
+ const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
159
+ const projects = projectGraph
160
+ ? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
161
+ : null;
162
+ if (!projects) {
163
+ /**
164
+ * This should never happen. This is invoked imperatively, and by
165
+ * the time it is invoked, the project graph would have already
166
+ * been requested. If it errored, it would have been reported and
167
+ * this wouldn't have been invoked. We handle it just in case.
168
+ *
169
+ * Since the project graph would be reported by the relevant
170
+ * handlers separately, we just ignore the error.
171
+ */
172
+ return generators.map((generator) => ({
173
+ changes: [],
174
+ generatorName: generator,
175
+ }));
176
+ }
177
+ // we need to run conflicting generators sequentially because they use the same tree
178
+ const results = [];
179
+ for (const generator of generators) {
180
+ log(generator, 'running it now');
181
+ results.push(await runGenerator(generator, projects, tree));
182
+ }
183
+ return results;
184
+ }
185
+ async function processConflictingGenerators(conflicts, initialResults) {
186
+ const conflictRunResults = (await Promise.all(conflicts.map((generators) => {
187
+ const [firstGenerator, ...generatorsToRun] = generators;
188
+ // it must exists because the conflicts were identified from the initial results
189
+ const firstGeneratorResult = initialResults.find((r) => r.generatorName === firstGenerator);
190
+ const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generators ${generators.join(',')}`);
191
+ // pre-apply the changes from the first generator to avoid running it
192
+ for (const change of firstGeneratorResult.changes) {
193
+ if (change.type === 'CREATE' || change.type === 'UPDATE') {
194
+ tree.write(change.path, change.content, change.options);
195
+ }
196
+ else if (change.type === 'DELETE') {
197
+ tree.delete(change.path);
198
+ }
199
+ }
200
+ /**
201
+ * We don't cache the results of conflicting generators because they
202
+ * use the same tree, so some files might contain results from multiple
203
+ * generators and we don't have guarantees that the same combination of
204
+ * generators will run together.
205
+ */
206
+ return runConflictingGenerators(tree, generatorsToRun);
207
+ }))).flat();
208
+ /**
209
+ * The order of the results from the re-run generators is important because
210
+ * the last result from a group of conflicting generators will contain the
211
+ * changes from the previous conflicting generators. So, instead of replacing
212
+ * in-place the initial results, we first add the results from the re-run
213
+ * generators, and then add the initial results that were not from a
214
+ * conflicting generator.
215
+ */
216
+ const results = [...conflictRunResults];
217
+ for (const result of initialResults) {
218
+ if (conflictRunResults.every((r) => r.generatorName !== result.generatorName)) {
219
+ // this result is not from a conflicting generator, so we add it to the
220
+ // results
221
+ results.push(result);
222
+ }
223
+ }
224
+ return results;
225
+ }
226
+ /**
227
+ * @internal
228
+ */
229
+ function _getConflictingGeneratorGroups(results) {
230
+ const changedFileToGeneratorMap = new Map();
231
+ for (const result of results) {
232
+ for (const change of result.changes) {
233
+ if (!changedFileToGeneratorMap.has(change.path)) {
234
+ changedFileToGeneratorMap.set(change.path, new Set());
235
+ }
236
+ changedFileToGeneratorMap.get(change.path).add(result.generatorName);
237
+ }
238
+ }
239
+ const conflicts = [];
240
+ for (const generatorSet of changedFileToGeneratorMap.values()) {
241
+ if (generatorSet.size === 1) {
242
+ // no conflicts
243
+ continue;
244
+ }
245
+ if (conflicts.length === 0) {
246
+ // there are no conflicts yet, so we just add the first group
247
+ conflicts.push(new Set(generatorSet));
248
+ continue;
249
+ }
250
+ // identify if any of the current generator sets intersect with any of the
251
+ // existing conflict groups
252
+ const generatorsArray = Array.from(generatorSet);
253
+ const existingConflictGroup = conflicts.find((group) => generatorsArray.some((generator) => group.has(generator)));
254
+ if (existingConflictGroup) {
255
+ // there's an intersecting group, so we merge the two
256
+ for (const generator of generatorsArray) {
257
+ existingConflictGroup.add(generator);
258
+ }
259
+ }
260
+ else {
261
+ // there's no intersecting group, so we create a new one
262
+ conflicts.push(new Set(generatorsArray));
263
+ }
264
+ }
265
+ return conflicts.map((group) => Array.from(group));
266
+ }
146
267
  function collectAllRegisteredSyncGenerators(projectGraph) {
147
268
  const projectGraphHash = hashProjectGraph(projectGraph);
148
269
  if (storedProjectGraphHash !== projectGraphHash) {
@@ -183,16 +304,15 @@ function collectAllRegisteredSyncGenerators(projectGraph) {
183
304
  }
184
305
  }
185
306
  }
186
- function runGenerator(generator, projects) {
307
+ function runGenerator(generator, projects, tree) {
187
308
  log('running scheduled generator', generator);
188
309
  // remove it from the scheduled set
189
310
  scheduledGenerators.delete(generator);
190
- const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
191
- // run the generator and cache the result
192
- syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
311
+ tree ??= new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
312
+ return (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
193
313
  log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
194
314
  return result;
195
- }));
315
+ });
196
316
  }
197
317
  function hashProjectGraph(projectGraph) {
198
318
  const stringifiedProjects = Object.entries(projectGraph.nodes)
@@ -16,7 +16,7 @@ let pseudoTerminal;
16
16
  const childProcesses = new Set();
17
17
  function loadEnvVarsFile(path, env = {}) {
18
18
  (0, task_env_1.unloadDotEnvFile)(path, env);
19
- const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env);
19
+ const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env, true);
20
20
  if (result.error) {
21
21
  throw result.error;
22
22
  }
@@ -293,14 +293,19 @@ function calculateCwd(cwd, context) {
293
293
  }
294
294
  function processEnv(color, cwd, env, envFile) {
295
295
  const localEnv = (0, npm_run_path_1.env)({ cwd: cwd ?? process.cwd() });
296
- const res = {
296
+ let res = {
297
297
  ...process.env,
298
298
  ...localEnv,
299
- ...env,
300
299
  };
300
+ // env file from envFile option takes priority over process env
301
301
  if (process.env.NX_LOAD_DOT_ENV_FILES !== 'false') {
302
302
  loadEnvVars(envFile, res);
303
303
  }
304
+ // env variables from env option takes priority over everything else
305
+ res = {
306
+ ...res,
307
+ ...env,
308
+ };
304
309
  // need to override PATH to make sure we are using the local node_modules
305
310
  if (localEnv.PATH)
306
311
  res.PATH = localEnv.PATH; // UNIX-like
@@ -38,7 +38,7 @@ export declare class NodeTaskHasherImpl implements TaskHasherImpl {
38
38
  private findExternalDependencyNodeName;
39
39
  private hashSingleProjectInputs;
40
40
  private hashProjectInputs;
41
- private hashRootFileset;
41
+ private hashRootFilesets;
42
42
  private hashProjectConfig;
43
43
  private hashTsConfig;
44
44
  private hashProjectFileset;
@@ -299,10 +299,10 @@ class NodeTaskHasherImpl {
299
299
  this.hashProjectFileset(projectName, projectFilesets),
300
300
  this.hashProjectConfig(projectName),
301
301
  this.hashTsConfig(projectName),
302
- ...[
303
- ...workspaceFilesets,
304
- ...this.legacyFilesetInputs.map((r) => r.fileset),
305
- ].map((fileset) => this.hashRootFileset(fileset)),
302
+ ...(workspaceFilesets.length
303
+ ? [this.hashRootFilesets(workspaceFilesets)]
304
+ : []),
305
+ this.hashRootFilesets(this.legacyFilesetInputs.map((r) => r.fileset)),
306
306
  ...[...notFilesets, ...this.legacyRuntimeInputs].map((r) => r['runtime']
307
307
  ? this.hashRuntime(env, r['runtime'])
308
308
  : this.hashEnv(env, r['env'])),
@@ -320,22 +320,40 @@ class NodeTaskHasherImpl {
320
320
  }
321
321
  return Promise.all(partialHashes).then((hashes) => hashes.flat());
322
322
  }
323
- async hashRootFileset(fileset) {
324
- const mapKey = fileset;
325
- const withoutWorkspaceRoot = fileset.substring(16);
323
+ async hashRootFilesets(filesets) {
324
+ const mapKey = `workspace:[${filesets.join(',')}]`;
326
325
  if (!this.filesetHashes[mapKey]) {
327
326
  this.filesetHashes[mapKey] = new Promise(async (res) => {
328
327
  const parts = [];
329
- const matchingFile = this.allWorkspaceFiles.find((t) => t.file === withoutWorkspaceRoot);
330
- if (matchingFile) {
331
- parts.push(matchingFile.hash);
328
+ const negativePatterns = [];
329
+ const positivePatterns = [];
330
+ for (const fileset of filesets) {
331
+ if (fileset.startsWith('!')) {
332
+ negativePatterns.push(fileset.substring(17));
333
+ }
334
+ else {
335
+ positivePatterns.push(fileset.substring(16));
336
+ }
332
337
  }
333
- else {
334
- this.allWorkspaceFiles
335
- .filter((f) => (0, minimatch_1.minimatch)(f.file, withoutWorkspaceRoot))
336
- .forEach((f) => {
337
- parts.push(f.hash);
338
- });
338
+ for (const fileset of positivePatterns) {
339
+ const withoutWorkspaceRoot = fileset;
340
+ // Used to shortcut minimatch if not necessary
341
+ const matchingFile = this.allWorkspaceFiles.find((t) => t.file === withoutWorkspaceRoot);
342
+ // shortcut because there is a direct match
343
+ if (matchingFile) {
344
+ if (!negativePatterns.some((p) => (0, minimatch_1.minimatch)(matchingFile.file, p))) {
345
+ parts.push(matchingFile.hash);
346
+ }
347
+ // No direct match, check if pattern matched
348
+ }
349
+ else {
350
+ this.allWorkspaceFiles
351
+ .filter((f) => (0, minimatch_1.minimatch)(f.file, withoutWorkspaceRoot) &&
352
+ !negativePatterns.some((p) => (0, minimatch_1.minimatch)(f.file, p)))
353
+ .forEach((f) => {
354
+ parts.push(f.hash);
355
+ });
356
+ }
339
357
  }
340
358
  const value = (0, file_hasher_1.hashArray)(parts);
341
359
  res({
Binary file
@@ -18,6 +18,13 @@ const defaultNpmResolutionCache = new Map();
18
18
  const builtInModuleSet = new Set([
19
19
  ...node_module_1.builtinModules,
20
20
  ...node_module_1.builtinModules.map((x) => `node:${x}`),
21
+ // These are missing in the builtinModules list
22
+ // See: https://github.com/nodejs/node/issues/42785
23
+ // TODO(v20): We should be safe to use `isBuiltin` function instead of keep the set here (https://nodejs.org/api/module.html#moduleisbuiltinmodulename)
24
+ 'test',
25
+ 'node:test',
26
+ 'node:sea',
27
+ 'node:sqlite',
21
28
  ]);
22
29
  function isBuiltinModuleImport(importExpr) {
23
30
  const packageName = (0, get_package_name_from_import_path_1.getPackageNameFromImportPath)(importExpr);
@@ -265,7 +272,7 @@ class TargetProjectLocator {
265
272
  // Resolve the main entry point of the package
266
273
  const pathOfFileInPackage = packageJsonPath ?? (0, resolve_relative_to_dir_1.resolveRelativeToDir)(packageName, relativeToDir);
267
274
  let dir = (0, node_path_1.dirname)(pathOfFileInPackage);
268
- while (dir !== (0, node_path_1.parse)(dir).root) {
275
+ while (dir !== (0, node_path_1.dirname)(dir)) {
269
276
  const packageJsonPath = (0, node_path_1.join)(dir, 'package.json');
270
277
  try {
271
278
  const parsedPackageJson = (0, fileutils_1.readJsonFile)(packageJsonPath);
@@ -273,7 +273,7 @@ async function startPluginWorker() {
273
273
  socket,
274
274
  });
275
275
  }
276
- else if (attempts > 1000) {
276
+ else if (attempts > 10000) {
277
277
  // daemon fails to start, the process probably exited
278
278
  // we print the logs and exit the client
279
279
  reject('Failed to start plugin worker.');
@@ -23,7 +23,7 @@ const defaultTasksRunner = async (tasks, options, context) => {
23
23
  };
24
24
  exports.defaultTasksRunner = defaultTasksRunner;
25
25
  async function runAllTasks(tasks, options, context) {
26
- const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, options, context.nxArgs?.nxBail, context.daemon);
26
+ const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, options, context.nxArgs?.nxBail, context.daemon, context.nxArgs?.outputStyle);
27
27
  return orchestrator.run();
28
28
  }
29
29
  exports.default = exports.defaultTasksRunner;
@@ -1,5 +1,6 @@
1
1
  import { NxArgs } from '../utils/command-line-utils';
2
2
  import { Task, TaskGraph } from '../config/task-graph';
3
+ import { TaskResult } from './life-cycle';
3
4
  export declare function initTasksRunner(nxArgs: NxArgs): Promise<{
4
5
  invoke: (opts: {
5
6
  tasks: Task[];
@@ -7,5 +8,6 @@ export declare function initTasksRunner(nxArgs: NxArgs): Promise<{
7
8
  }) => Promise<{
8
9
  status: number;
9
10
  taskGraph: TaskGraph;
11
+ taskResults: Record<string, TaskResult>;
10
12
  }>;
11
13
  }>;
@@ -52,6 +52,7 @@ async function initTasksRunner(nxArgs) {
52
52
  return {
53
53
  status,
54
54
  taskGraph,
55
+ taskResults: lifeCycle.getTaskResults(),
55
56
  };
56
57
  },
57
58
  };
@@ -1,17 +1,15 @@
1
1
  import { TaskStatus } from '../tasks-runner';
2
- import type { LifeCycle } from '../life-cycle';
2
+ import type { LifeCycle, TaskResult } from '../life-cycle';
3
3
  import { Task } from '../../config/task-graph';
4
4
  export declare class InvokeRunnerTerminalOutputLifeCycle implements LifeCycle {
5
5
  private readonly tasks;
6
6
  failedTasks: Task[];
7
7
  cachedTasks: Task[];
8
+ private taskResults;
8
9
  constructor(tasks: Task[]);
9
10
  startCommand(): void;
10
11
  endCommand(): void;
11
- endTasks(taskResults: {
12
- task: Task;
13
- status: TaskStatus;
14
- code: number;
15
- }[]): void;
12
+ endTasks(taskResults: TaskResult[]): void;
16
13
  printTaskTerminalOutput(task: Task, cacheStatus: TaskStatus, terminalOutput: string): void;
14
+ getTaskResults(): Record<string, TaskResult>;
17
15
  }
@@ -8,6 +8,7 @@ class InvokeRunnerTerminalOutputLifeCycle {
8
8
  this.tasks = tasks;
9
9
  this.failedTasks = [];
10
10
  this.cachedTasks = [];
11
+ this.taskResults = {};
11
12
  }
12
13
  startCommand() {
13
14
  output_1.output.log({
@@ -45,6 +46,7 @@ class InvokeRunnerTerminalOutputLifeCycle {
45
46
  }
46
47
  endTasks(taskResults) {
47
48
  for (let t of taskResults) {
49
+ this.taskResults[t.task.id] = t;
48
50
  if (t.status === 'failure') {
49
51
  this.failedTasks.push(t.task);
50
52
  }
@@ -63,5 +65,8 @@ class InvokeRunnerTerminalOutputLifeCycle {
63
65
  const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
64
66
  output_1.output.logCommandOutput(args.join(' '), cacheStatus, terminalOutput);
65
67
  }
68
+ getTaskResults() {
69
+ return this.taskResults;
70
+ }
66
71
  }
67
72
  exports.InvokeRunnerTerminalOutputLifeCycle = InvokeRunnerTerminalOutputLifeCycle;
@@ -7,9 +7,9 @@ export declare function getEnvVariablesForTask(task: Task, taskSpecificEnv: Node
7
7
  };
8
8
  /**
9
9
  * This function loads a .env file and expands the variables in it.
10
- * It is going to override existing environmentVariables.
11
- * @param filename
12
- * @param environmentVariables
10
+ * @param filename the .env file to load
11
+ * @param environmentVariables the object to load environment variables into
12
+ * @param override whether to override existing environment variables
13
13
  */
14
14
  export declare function loadAndExpandDotEnvFile(filename: string, environmentVariables: NodeJS.ProcessEnv, override?: boolean): import("dotenv-expand").DotenvExpandOutput;
15
15
  /**
@@ -79,9 +79,9 @@ function getNxEnvVariablesForTask(task, forceColor, skipNxCache, captureStderr,
79
79
  }
80
80
  /**
81
81
  * This function loads a .env file and expands the variables in it.
82
- * It is going to override existing environmentVariables.
83
- * @param filename
84
- * @param environmentVariables
82
+ * @param filename the .env file to load
83
+ * @param environmentVariables the object to load environment variables into
84
+ * @param override whether to override existing environment variables
85
85
  */
86
86
  function loadAndExpandDotEnvFile(filename, environmentVariables, override = false) {
87
87
  const myEnv = (0, dotenv_1.config)({
@@ -12,6 +12,7 @@ export declare class TaskOrchestrator {
12
12
  private readonly options;
13
13
  private readonly bail;
14
14
  private readonly daemon;
15
+ private readonly outputStyle;
15
16
  private cache;
16
17
  private forkedProcessTaskRunner;
17
18
  private tasksSchedule;
@@ -23,7 +24,7 @@ export declare class TaskOrchestrator {
23
24
  private waitingForTasks;
24
25
  private groups;
25
26
  private bailed;
26
- constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient);
27
+ constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
27
28
  run(): Promise<{
28
29
  [id: string]: TaskStatus;
29
30
  }>;
@@ -17,7 +17,7 @@ const output_1 = require("../utils/output");
17
17
  const params_1 = require("../utils/params");
18
18
  class TaskOrchestrator {
19
19
  // endregion internal state
20
- constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon) {
20
+ constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon, outputStyle) {
21
21
  this.hasher = hasher;
22
22
  this.initiatingProject = initiatingProject;
23
23
  this.projectGraph = projectGraph;
@@ -25,6 +25,7 @@ class TaskOrchestrator {
25
25
  this.options = options;
26
26
  this.bail = bail;
27
27
  this.daemon = daemon;
28
+ this.outputStyle = outputStyle;
28
29
  this.cache = new cache_1.Cache(this.options);
29
30
  this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
30
31
  this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
@@ -200,7 +201,9 @@ class TaskOrchestrator {
200
201
  const pipeOutput = await this.pipeOutputCapture(task);
201
202
  // obtain metadata
202
203
  const temporaryOutputPath = this.cache.temporaryOutputPath(task);
203
- const streamOutput = (0, utils_1.shouldStreamOutput)(task, this.initiatingProject);
204
+ const streamOutput = this.outputStyle === 'static'
205
+ ? false
206
+ : (0, utils_1.shouldStreamOutput)(task, this.initiatingProject);
204
207
  let env = pipeOutput
205
208
  ? (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, process.env.FORCE_COLOR === undefined
206
209
  ? 'true'
@@ -1,6 +1,6 @@
1
- import { ExecSyncOptions } from 'child_process';
2
- export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
1
+ export declare function cloneFromUpstream(url: string, destination: string, { originName, depth }?: {
3
2
  originName: string;
3
+ depth?: number;
4
4
  }): Promise<GitRepository>;
5
5
  export declare class GitRepository {
6
6
  private directory;
@@ -8,12 +8,10 @@ export declare class GitRepository {
8
8
  constructor(directory: string);
9
9
  getGitRootPath(cwd: string): string;
10
10
  addFetchRemote(remoteName: string, branch: string): Promise<string>;
11
- private execAsync;
12
11
  showStat(): Promise<string>;
13
12
  listBranches(): Promise<string[]>;
14
13
  getGitFiles(path: string): Promise<string[]>;
15
14
  reset(ref: string): Promise<string>;
16
- squashLastTwoCommits(): Promise<string>;
17
15
  mergeUnrelatedHistories(ref: string, message: string): Promise<string>;
18
16
  fetch(remote: string, ref?: string): Promise<string>;
19
17
  checkout(branch: string, opts: {
@@ -25,14 +23,13 @@ export declare class GitRepository {
25
23
  commit(message: string): Promise<string>;
26
24
  amendCommit(): Promise<string>;
27
25
  deleteGitRemote(name: string): Promise<string>;
28
- deleteBranch(branch: string): Promise<string>;
29
26
  addGitRemote(name: string, url: string): Promise<string>;
27
+ hasFilterRepoInstalled(): Promise<boolean>;
28
+ filterRepo(subdirectory: string): Promise<string>;
29
+ filterBranch(subdirectory: string, branchName: string): Promise<string>;
30
+ private execAsync;
31
+ private quotePath;
30
32
  }
31
- /**
32
- * This is used by the squash editor script to update the rebase file.
33
- */
34
- export declare function updateRebaseFile(contents: string): string;
35
- export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
36
33
  /**
37
34
  * This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.
38
35
  */