nx 19.6.3 → 19.6.4
Sign up to get free protection for your applications and to get access to all the features.
- package/package.json +12 -12
- package/src/command-line/graph/graph.js +35 -18
- package/src/command-line/release/index.d.ts +6 -4
- package/src/executors/run-commands/run-commands.impl.js +8 -3
- package/src/hasher/node-task-hasher-impl.d.ts +1 -1
- package/src/hasher/node-task-hasher-impl.js +34 -16
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +8 -1
- package/src/tasks-runner/default-tasks-runner.js +1 -1
- package/src/tasks-runner/init-tasks-runner.d.ts +2 -0
- package/src/tasks-runner/init-tasks-runner.js +1 -0
- package/src/tasks-runner/life-cycles/invoke-runner-terminal-output-life-cycle.d.ts +4 -6
- package/src/tasks-runner/life-cycles/invoke-runner-terminal-output-life-cycle.js +5 -0
- package/src/tasks-runner/task-env.d.ts +3 -3
- package/src/tasks-runner/task-env.js +3 -3
- package/src/tasks-runner/task-orchestrator.d.ts +2 -1
- package/src/tasks-runner/task-orchestrator.js +5 -2
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "19.6.
|
3
|
+
"version": "19.6.4",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -71,7 +71,7 @@
|
|
71
71
|
"yargs-parser": "21.1.1",
|
72
72
|
"node-machine-id": "1.1.12",
|
73
73
|
"ora": "5.3.0",
|
74
|
-
"@nrwl/tao": "19.6.
|
74
|
+
"@nrwl/tao": "19.6.4"
|
75
75
|
},
|
76
76
|
"peerDependencies": {
|
77
77
|
"@swc-node/register": "^1.8.0",
|
@@ -86,16 +86,16 @@
|
|
86
86
|
}
|
87
87
|
},
|
88
88
|
"optionalDependencies": {
|
89
|
-
"@nx/nx-darwin-x64": "19.6.
|
90
|
-
"@nx/nx-darwin-arm64": "19.6.
|
91
|
-
"@nx/nx-linux-x64-gnu": "19.6.
|
92
|
-
"@nx/nx-linux-x64-musl": "19.6.
|
93
|
-
"@nx/nx-win32-x64-msvc": "19.6.
|
94
|
-
"@nx/nx-linux-arm64-gnu": "19.6.
|
95
|
-
"@nx/nx-linux-arm64-musl": "19.6.
|
96
|
-
"@nx/nx-linux-arm-gnueabihf": "19.6.
|
97
|
-
"@nx/nx-win32-arm64-msvc": "19.6.
|
98
|
-
"@nx/nx-freebsd-x64": "19.6.
|
89
|
+
"@nx/nx-darwin-x64": "19.6.4",
|
90
|
+
"@nx/nx-darwin-arm64": "19.6.4",
|
91
|
+
"@nx/nx-linux-x64-gnu": "19.6.4",
|
92
|
+
"@nx/nx-linux-x64-musl": "19.6.4",
|
93
|
+
"@nx/nx-win32-x64-msvc": "19.6.4",
|
94
|
+
"@nx/nx-linux-arm64-gnu": "19.6.4",
|
95
|
+
"@nx/nx-linux-arm64-musl": "19.6.4",
|
96
|
+
"@nx/nx-linux-arm-gnueabihf": "19.6.4",
|
97
|
+
"@nx/nx-win32-arm64-msvc": "19.6.4",
|
98
|
+
"@nx/nx-freebsd-x64": "19.6.4"
|
99
99
|
},
|
100
100
|
"nx-migrations": {
|
101
101
|
"migrations": "./migrations.json",
|
@@ -675,8 +675,10 @@ function expandInputs(inputs, project, allWorkspaceFiles, depGraphClientResponse
|
|
675
675
|
const externalInputs = [];
|
676
676
|
const otherInputs = [];
|
677
677
|
inputs.forEach((input) => {
|
678
|
-
|
679
|
-
|
678
|
+
// grouped workspace inputs look like workspace:[pattern,otherPattern]
|
679
|
+
if (input.startsWith('workspace:[')) {
|
680
|
+
const inputs = input.substring(11, input.length - 1).split(',');
|
681
|
+
workspaceRootInputs.push(...inputs);
|
680
682
|
return;
|
681
683
|
}
|
682
684
|
const maybeProjectName = input.split(':')[0];
|
@@ -696,22 +698,7 @@ function expandInputs(inputs, project, allWorkspaceFiles, depGraphClientResponse
|
|
696
698
|
return;
|
697
699
|
}
|
698
700
|
});
|
699
|
-
const workspaceRootsExpanded = workspaceRootInputs
|
700
|
-
const matches = [];
|
701
|
-
const withoutWorkspaceRoot = input.substring(16);
|
702
|
-
const matchingFile = allWorkspaceFiles.find((t) => t.file === withoutWorkspaceRoot);
|
703
|
-
if (matchingFile) {
|
704
|
-
matches.push(matchingFile.file);
|
705
|
-
}
|
706
|
-
else {
|
707
|
-
allWorkspaceFiles
|
708
|
-
.filter((f) => (0, minimatch_1.minimatch)(f.file, withoutWorkspaceRoot))
|
709
|
-
.forEach((f) => {
|
710
|
-
matches.push(f.file);
|
711
|
-
});
|
712
|
-
}
|
713
|
-
return matches;
|
714
|
-
});
|
701
|
+
const workspaceRootsExpanded = getExpandedWorkspaceRoots(workspaceRootInputs, allWorkspaceFiles);
|
715
702
|
const otherInputsExpanded = otherInputs.map((input) => {
|
716
703
|
if (input === 'TsConfig') {
|
717
704
|
return (0, path_1.relative)(workspace_root_1.workspaceRoot, (0, typescript_1.getRootTsConfigPath)());
|
@@ -744,6 +731,36 @@ function expandInputs(inputs, project, allWorkspaceFiles, depGraphClientResponse
|
|
744
731
|
external: externalInputs,
|
745
732
|
};
|
746
733
|
}
|
734
|
+
function getExpandedWorkspaceRoots(workspaceRootInputs, allWorkspaceFiles) {
|
735
|
+
const workspaceRootsExpanded = [];
|
736
|
+
const negativeWRPatterns = [];
|
737
|
+
const positiveWRPatterns = [];
|
738
|
+
for (const fileset of workspaceRootInputs) {
|
739
|
+
if (fileset.startsWith('!')) {
|
740
|
+
negativeWRPatterns.push(fileset.substring(17));
|
741
|
+
}
|
742
|
+
else {
|
743
|
+
positiveWRPatterns.push(fileset.substring(16));
|
744
|
+
}
|
745
|
+
}
|
746
|
+
for (const pattern of positiveWRPatterns) {
|
747
|
+
const matchingFile = allWorkspaceFiles.find((t) => t.file === pattern);
|
748
|
+
if (matchingFile &&
|
749
|
+
!negativeWRPatterns.some((p) => (0, minimatch_1.minimatch)(matchingFile.file, p))) {
|
750
|
+
workspaceRootsExpanded.push(matchingFile.file);
|
751
|
+
}
|
752
|
+
else {
|
753
|
+
allWorkspaceFiles
|
754
|
+
.filter((f) => (0, minimatch_1.minimatch)(f.file, pattern) &&
|
755
|
+
!negativeWRPatterns.some((p) => (0, minimatch_1.minimatch)(f.file, p)))
|
756
|
+
.forEach((f) => {
|
757
|
+
workspaceRootsExpanded.push(f.file);
|
758
|
+
});
|
759
|
+
}
|
760
|
+
}
|
761
|
+
workspaceRootsExpanded.sort();
|
762
|
+
return workspaceRootsExpanded;
|
763
|
+
}
|
747
764
|
async function createJsonOutput(prunedGraph, rawGraph, projects, targets) {
|
748
765
|
const response = {
|
749
766
|
graph: prunedGraph,
|
@@ -10,19 +10,21 @@ export declare class ReleaseClient {
|
|
10
10
|
release: (args: import("./command-object").ReleaseOptions) => Promise<import("./version").NxReleaseVersionResult | number>;
|
11
11
|
constructor(overrideReleaseConfig: NxReleaseConfiguration);
|
12
12
|
}
|
13
|
+
declare const defaultClient: ReleaseClient;
|
13
14
|
/**
|
14
15
|
* @public
|
15
16
|
*/
|
16
|
-
export declare const releaseChangelog:
|
17
|
+
export declare const releaseChangelog: typeof defaultClient.releaseChangelog;
|
17
18
|
/**
|
18
19
|
* @public
|
19
20
|
*/
|
20
|
-
export declare const releasePublish:
|
21
|
+
export declare const releasePublish: typeof defaultClient.releasePublish;
|
21
22
|
/**
|
22
23
|
* @public
|
23
24
|
*/
|
24
|
-
export declare const releaseVersion:
|
25
|
+
export declare const releaseVersion: typeof defaultClient.releaseVersion;
|
25
26
|
/**
|
26
27
|
* @public
|
27
28
|
*/
|
28
|
-
export declare const release:
|
29
|
+
export declare const release: typeof defaultClient.release;
|
30
|
+
export {};
|
@@ -16,7 +16,7 @@ let pseudoTerminal;
|
|
16
16
|
const childProcesses = new Set();
|
17
17
|
function loadEnvVarsFile(path, env = {}) {
|
18
18
|
(0, task_env_1.unloadDotEnvFile)(path, env);
|
19
|
-
const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env);
|
19
|
+
const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env, true);
|
20
20
|
if (result.error) {
|
21
21
|
throw result.error;
|
22
22
|
}
|
@@ -293,14 +293,19 @@ function calculateCwd(cwd, context) {
|
|
293
293
|
}
|
294
294
|
function processEnv(color, cwd, env, envFile) {
|
295
295
|
const localEnv = (0, npm_run_path_1.env)({ cwd: cwd ?? process.cwd() });
|
296
|
-
|
296
|
+
let res = {
|
297
297
|
...process.env,
|
298
298
|
...localEnv,
|
299
|
-
...env,
|
300
299
|
};
|
300
|
+
// env file from envFile option takes priority over process env
|
301
301
|
if (process.env.NX_LOAD_DOT_ENV_FILES !== 'false') {
|
302
302
|
loadEnvVars(envFile, res);
|
303
303
|
}
|
304
|
+
// env variables from env option takes priority over everything else
|
305
|
+
res = {
|
306
|
+
...res,
|
307
|
+
...env,
|
308
|
+
};
|
304
309
|
// need to override PATH to make sure we are using the local node_modules
|
305
310
|
if (localEnv.PATH)
|
306
311
|
res.PATH = localEnv.PATH; // UNIX-like
|
@@ -38,7 +38,7 @@ export declare class NodeTaskHasherImpl implements TaskHasherImpl {
|
|
38
38
|
private findExternalDependencyNodeName;
|
39
39
|
private hashSingleProjectInputs;
|
40
40
|
private hashProjectInputs;
|
41
|
-
private
|
41
|
+
private hashRootFilesets;
|
42
42
|
private hashProjectConfig;
|
43
43
|
private hashTsConfig;
|
44
44
|
private hashProjectFileset;
|
@@ -299,10 +299,10 @@ class NodeTaskHasherImpl {
|
|
299
299
|
this.hashProjectFileset(projectName, projectFilesets),
|
300
300
|
this.hashProjectConfig(projectName),
|
301
301
|
this.hashTsConfig(projectName),
|
302
|
-
...
|
303
|
-
|
304
|
-
|
305
|
-
|
302
|
+
...(workspaceFilesets.length
|
303
|
+
? [this.hashRootFilesets(workspaceFilesets)]
|
304
|
+
: []),
|
305
|
+
this.hashRootFilesets(this.legacyFilesetInputs.map((r) => r.fileset)),
|
306
306
|
...[...notFilesets, ...this.legacyRuntimeInputs].map((r) => r['runtime']
|
307
307
|
? this.hashRuntime(env, r['runtime'])
|
308
308
|
: this.hashEnv(env, r['env'])),
|
@@ -320,22 +320,40 @@ class NodeTaskHasherImpl {
|
|
320
320
|
}
|
321
321
|
return Promise.all(partialHashes).then((hashes) => hashes.flat());
|
322
322
|
}
|
323
|
-
async
|
324
|
-
const mapKey =
|
325
|
-
const withoutWorkspaceRoot = fileset.substring(16);
|
323
|
+
async hashRootFilesets(filesets) {
|
324
|
+
const mapKey = `workspace:[${filesets.join(',')}]`;
|
326
325
|
if (!this.filesetHashes[mapKey]) {
|
327
326
|
this.filesetHashes[mapKey] = new Promise(async (res) => {
|
328
327
|
const parts = [];
|
329
|
-
const
|
330
|
-
|
331
|
-
|
328
|
+
const negativePatterns = [];
|
329
|
+
const positivePatterns = [];
|
330
|
+
for (const fileset of filesets) {
|
331
|
+
if (fileset.startsWith('!')) {
|
332
|
+
negativePatterns.push(fileset.substring(17));
|
333
|
+
}
|
334
|
+
else {
|
335
|
+
positivePatterns.push(fileset.substring(16));
|
336
|
+
}
|
332
337
|
}
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
338
|
+
for (const fileset of positivePatterns) {
|
339
|
+
const withoutWorkspaceRoot = fileset;
|
340
|
+
// Used to shortcut minimatch if not necessary
|
341
|
+
const matchingFile = this.allWorkspaceFiles.find((t) => t.file === withoutWorkspaceRoot);
|
342
|
+
// shortcut because there is a direct match
|
343
|
+
if (matchingFile) {
|
344
|
+
if (!negativePatterns.some((p) => (0, minimatch_1.minimatch)(matchingFile.file, p))) {
|
345
|
+
parts.push(matchingFile.hash);
|
346
|
+
}
|
347
|
+
// No direct match, check if pattern matched
|
348
|
+
}
|
349
|
+
else {
|
350
|
+
this.allWorkspaceFiles
|
351
|
+
.filter((f) => (0, minimatch_1.minimatch)(f.file, withoutWorkspaceRoot) &&
|
352
|
+
!negativePatterns.some((p) => (0, minimatch_1.minimatch)(f.file, p)))
|
353
|
+
.forEach((f) => {
|
354
|
+
parts.push(f.hash);
|
355
|
+
});
|
356
|
+
}
|
339
357
|
}
|
340
358
|
const value = (0, file_hasher_1.hashArray)(parts);
|
341
359
|
res({
|
Binary file
|
@@ -18,6 +18,13 @@ const defaultNpmResolutionCache = new Map();
|
|
18
18
|
const builtInModuleSet = new Set([
|
19
19
|
...node_module_1.builtinModules,
|
20
20
|
...node_module_1.builtinModules.map((x) => `node:${x}`),
|
21
|
+
// These are missing in the builtinModules list
|
22
|
+
// See: https://github.com/nodejs/node/issues/42785
|
23
|
+
// TODO(v20): We should be safe to use `isBuiltin` function instead of keep the set here (https://nodejs.org/api/module.html#moduleisbuiltinmodulename)
|
24
|
+
'test',
|
25
|
+
'node:test',
|
26
|
+
'node:sea',
|
27
|
+
'node:sqlite',
|
21
28
|
]);
|
22
29
|
function isBuiltinModuleImport(importExpr) {
|
23
30
|
const packageName = (0, get_package_name_from_import_path_1.getPackageNameFromImportPath)(importExpr);
|
@@ -265,7 +272,7 @@ class TargetProjectLocator {
|
|
265
272
|
// Resolve the main entry point of the package
|
266
273
|
const pathOfFileInPackage = packageJsonPath ?? (0, resolve_relative_to_dir_1.resolveRelativeToDir)(packageName, relativeToDir);
|
267
274
|
let dir = (0, node_path_1.dirname)(pathOfFileInPackage);
|
268
|
-
while (dir !== (0, node_path_1.
|
275
|
+
while (dir !== (0, node_path_1.dirname)(dir)) {
|
269
276
|
const packageJsonPath = (0, node_path_1.join)(dir, 'package.json');
|
270
277
|
try {
|
271
278
|
const parsedPackageJson = (0, fileutils_1.readJsonFile)(packageJsonPath);
|
@@ -23,7 +23,7 @@ const defaultTasksRunner = async (tasks, options, context) => {
|
|
23
23
|
};
|
24
24
|
exports.defaultTasksRunner = defaultTasksRunner;
|
25
25
|
async function runAllTasks(tasks, options, context) {
|
26
|
-
const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, options, context.nxArgs?.nxBail, context.daemon);
|
26
|
+
const orchestrator = new task_orchestrator_1.TaskOrchestrator(context.hasher, context.initiatingProject, context.projectGraph, context.taskGraph, options, context.nxArgs?.nxBail, context.daemon, context.nxArgs?.outputStyle);
|
27
27
|
return orchestrator.run();
|
28
28
|
}
|
29
29
|
exports.default = exports.defaultTasksRunner;
|
@@ -1,5 +1,6 @@
|
|
1
1
|
import { NxArgs } from '../utils/command-line-utils';
|
2
2
|
import { Task, TaskGraph } from '../config/task-graph';
|
3
|
+
import { TaskResult } from './life-cycle';
|
3
4
|
export declare function initTasksRunner(nxArgs: NxArgs): Promise<{
|
4
5
|
invoke: (opts: {
|
5
6
|
tasks: Task[];
|
@@ -7,5 +8,6 @@ export declare function initTasksRunner(nxArgs: NxArgs): Promise<{
|
|
7
8
|
}) => Promise<{
|
8
9
|
status: number;
|
9
10
|
taskGraph: TaskGraph;
|
11
|
+
taskResults: Record<string, TaskResult>;
|
10
12
|
}>;
|
11
13
|
}>;
|
@@ -1,17 +1,15 @@
|
|
1
1
|
import { TaskStatus } from '../tasks-runner';
|
2
|
-
import type { LifeCycle } from '../life-cycle';
|
2
|
+
import type { LifeCycle, TaskResult } from '../life-cycle';
|
3
3
|
import { Task } from '../../config/task-graph';
|
4
4
|
export declare class InvokeRunnerTerminalOutputLifeCycle implements LifeCycle {
|
5
5
|
private readonly tasks;
|
6
6
|
failedTasks: Task[];
|
7
7
|
cachedTasks: Task[];
|
8
|
+
private taskResults;
|
8
9
|
constructor(tasks: Task[]);
|
9
10
|
startCommand(): void;
|
10
11
|
endCommand(): void;
|
11
|
-
endTasks(taskResults:
|
12
|
-
task: Task;
|
13
|
-
status: TaskStatus;
|
14
|
-
code: number;
|
15
|
-
}[]): void;
|
12
|
+
endTasks(taskResults: TaskResult[]): void;
|
16
13
|
printTaskTerminalOutput(task: Task, cacheStatus: TaskStatus, terminalOutput: string): void;
|
14
|
+
getTaskResults(): Record<string, TaskResult>;
|
17
15
|
}
|
@@ -8,6 +8,7 @@ class InvokeRunnerTerminalOutputLifeCycle {
|
|
8
8
|
this.tasks = tasks;
|
9
9
|
this.failedTasks = [];
|
10
10
|
this.cachedTasks = [];
|
11
|
+
this.taskResults = {};
|
11
12
|
}
|
12
13
|
startCommand() {
|
13
14
|
output_1.output.log({
|
@@ -45,6 +46,7 @@ class InvokeRunnerTerminalOutputLifeCycle {
|
|
45
46
|
}
|
46
47
|
endTasks(taskResults) {
|
47
48
|
for (let t of taskResults) {
|
49
|
+
this.taskResults[t.task.id] = t;
|
48
50
|
if (t.status === 'failure') {
|
49
51
|
this.failedTasks.push(t.task);
|
50
52
|
}
|
@@ -63,5 +65,8 @@ class InvokeRunnerTerminalOutputLifeCycle {
|
|
63
65
|
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
|
64
66
|
output_1.output.logCommandOutput(args.join(' '), cacheStatus, terminalOutput);
|
65
67
|
}
|
68
|
+
getTaskResults() {
|
69
|
+
return this.taskResults;
|
70
|
+
}
|
66
71
|
}
|
67
72
|
exports.InvokeRunnerTerminalOutputLifeCycle = InvokeRunnerTerminalOutputLifeCycle;
|
@@ -7,9 +7,9 @@ export declare function getEnvVariablesForTask(task: Task, taskSpecificEnv: Node
|
|
7
7
|
};
|
8
8
|
/**
|
9
9
|
* This function loads a .env file and expands the variables in it.
|
10
|
-
*
|
11
|
-
* @param
|
12
|
-
* @param
|
10
|
+
* @param filename the .env file to load
|
11
|
+
* @param environmentVariables the object to load environment variables into
|
12
|
+
* @param override whether to override existing environment variables
|
13
13
|
*/
|
14
14
|
export declare function loadAndExpandDotEnvFile(filename: string, environmentVariables: NodeJS.ProcessEnv, override?: boolean): import("dotenv-expand").DotenvExpandOutput;
|
15
15
|
/**
|
@@ -79,9 +79,9 @@ function getNxEnvVariablesForTask(task, forceColor, skipNxCache, captureStderr,
|
|
79
79
|
}
|
80
80
|
/**
|
81
81
|
* This function loads a .env file and expands the variables in it.
|
82
|
-
*
|
83
|
-
* @param
|
84
|
-
* @param
|
82
|
+
* @param filename the .env file to load
|
83
|
+
* @param environmentVariables the object to load environment variables into
|
84
|
+
* @param override whether to override existing environment variables
|
85
85
|
*/
|
86
86
|
function loadAndExpandDotEnvFile(filename, environmentVariables, override = false) {
|
87
87
|
const myEnv = (0, dotenv_1.config)({
|
@@ -12,6 +12,7 @@ export declare class TaskOrchestrator {
|
|
12
12
|
private readonly options;
|
13
13
|
private readonly bail;
|
14
14
|
private readonly daemon;
|
15
|
+
private readonly outputStyle;
|
15
16
|
private cache;
|
16
17
|
private forkedProcessTaskRunner;
|
17
18
|
private tasksSchedule;
|
@@ -23,7 +24,7 @@ export declare class TaskOrchestrator {
|
|
23
24
|
private waitingForTasks;
|
24
25
|
private groups;
|
25
26
|
private bailed;
|
26
|
-
constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient);
|
27
|
+
constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
|
27
28
|
run(): Promise<{
|
28
29
|
[id: string]: TaskStatus;
|
29
30
|
}>;
|
@@ -17,7 +17,7 @@ const output_1 = require("../utils/output");
|
|
17
17
|
const params_1 = require("../utils/params");
|
18
18
|
class TaskOrchestrator {
|
19
19
|
// endregion internal state
|
20
|
-
constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon) {
|
20
|
+
constructor(hasher, initiatingProject, projectGraph, taskGraph, options, bail, daemon, outputStyle) {
|
21
21
|
this.hasher = hasher;
|
22
22
|
this.initiatingProject = initiatingProject;
|
23
23
|
this.projectGraph = projectGraph;
|
@@ -25,6 +25,7 @@ class TaskOrchestrator {
|
|
25
25
|
this.options = options;
|
26
26
|
this.bail = bail;
|
27
27
|
this.daemon = daemon;
|
28
|
+
this.outputStyle = outputStyle;
|
28
29
|
this.cache = new cache_1.Cache(this.options);
|
29
30
|
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);
|
30
31
|
this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);
|
@@ -200,7 +201,9 @@ class TaskOrchestrator {
|
|
200
201
|
const pipeOutput = await this.pipeOutputCapture(task);
|
201
202
|
// obtain metadata
|
202
203
|
const temporaryOutputPath = this.cache.temporaryOutputPath(task);
|
203
|
-
const streamOutput =
|
204
|
+
const streamOutput = this.outputStyle === 'static'
|
205
|
+
? false
|
206
|
+
: (0, utils_1.shouldStreamOutput)(task, this.initiatingProject);
|
204
207
|
let env = pipeOutput
|
205
208
|
? (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, process.env.FORCE_COLOR === undefined
|
206
209
|
? 'true'
|