nx 19.3.0-beta.0 → 19.3.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/post-install.js +1 -3
- package/migrations.json +12 -0
- package/package.json +12 -12
- package/src/command-line/init/implementation/dot-nx/add-nx-scripts.js +38 -13
- package/src/command-line/init/implementation/utils.js +15 -2
- package/src/command-line/release/command-object.js +1 -1
- package/src/command-line/release/utils/git.d.ts +2 -2
- package/src/command-line/release/utils/git.js +19 -11
- package/src/command-line/release/version.d.ts +1 -1
- package/src/command-line/release/version.js +1 -1
- package/src/command-line/yargs-utils/shared-options.d.ts +1 -1
- package/src/core/graph/main.js +1 -1
- package/src/daemon/client/client.js +0 -2
- package/src/executors/run-commands/run-commands.impl.d.ts +6 -2
- package/src/executors/run-commands/run-commands.impl.js +52 -25
- package/src/executors/run-commands/schema.json +5 -2
- package/src/generators/utils/project-configuration.js +1 -1
- package/src/hasher/file-hasher.js +1 -1
- package/src/migrations/update-19-2-4/set-project-name.d.ts +2 -0
- package/src/migrations/update-19-2-4/set-project-name.js +34 -0
- package/src/plugins/js/package-json/create-package-json.js +3 -0
- package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +9 -4
- package/src/plugins/package-json-workspaces/create-nodes.d.ts +2 -2
- package/src/plugins/package-json-workspaces/create-nodes.js +34 -11
- package/src/plugins/project-json/build-nodes/project-json.d.ts +1 -0
- package/src/plugins/project-json/build-nodes/project-json.js +16 -4
- package/src/project-graph/error-types.js +10 -0
- package/src/project-graph/file-utils.js +1 -1
- package/src/project-graph/plugins/internal-api.js +1 -1
- package/src/project-graph/utils/normalize-project-nodes.js +24 -6
- package/src/project-graph/utils/project-configuration-utils.js +9 -1
- package/src/tasks-runner/create-task-graph.js +32 -33
- package/src/utils/ab-testing.d.ts +1 -1
- package/src/utils/find-matching-projects.js +13 -1
- package/src/utils/package-json.d.ts +1 -0
@@ -19,7 +19,6 @@ const nx_json_1 = require("../../config/nx-json");
|
|
19
19
|
const daemon_socket_messenger_1 = require("./daemon-socket-messenger");
|
20
20
|
const cache_1 = require("../cache");
|
21
21
|
const error_types_1 = require("../../project-graph/error-types");
|
22
|
-
const dotenv_1 = require("../../utils/dotenv");
|
23
22
|
const get_nx_workspace_files_1 = require("../message-types/get-nx-workspace-files");
|
24
23
|
const get_context_file_data_1 = require("../message-types/get-context-file-data");
|
25
24
|
const get_files_in_directory_1 = require("../message-types/get-files-in-directory");
|
@@ -41,7 +40,6 @@ class DaemonClient {
|
|
41
40
|
this._daemonReady = null;
|
42
41
|
this._out = null;
|
43
42
|
this._err = null;
|
44
|
-
(0, dotenv_1.loadRootEnvFiles)(workspace_root_1.workspaceRoot);
|
45
43
|
try {
|
46
44
|
this.nxJson = (0, configuration_1.readNxJson)();
|
47
45
|
}
|
@@ -19,7 +19,7 @@ export interface RunCommandsOptions extends Json {
|
|
19
19
|
} | string)[];
|
20
20
|
color?: boolean;
|
21
21
|
parallel?: boolean;
|
22
|
-
readyWhen?: string;
|
22
|
+
readyWhen?: string | string[];
|
23
23
|
cwd?: string;
|
24
24
|
env?: Record<string, string>;
|
25
25
|
forwardAllArgs?: boolean;
|
@@ -42,9 +42,13 @@ export interface NormalizedRunCommandsOptions extends RunCommandsOptions {
|
|
42
42
|
[k: string]: any;
|
43
43
|
};
|
44
44
|
unparsedCommandArgs?: {
|
45
|
-
[k: string]: string;
|
45
|
+
[k: string]: string | string[];
|
46
46
|
};
|
47
47
|
args?: string;
|
48
|
+
readyWhenStatus: {
|
49
|
+
stringToMatch: string;
|
50
|
+
found: boolean;
|
51
|
+
}[];
|
48
52
|
}
|
49
53
|
export default function (options: RunCommandsOptions, context: ExecutorContext): Promise<{
|
50
54
|
success: boolean;
|
@@ -29,7 +29,9 @@ const propKeys = [
|
|
29
29
|
'command',
|
30
30
|
'commands',
|
31
31
|
'color',
|
32
|
+
'no-color',
|
32
33
|
'parallel',
|
34
|
+
'no-parallel',
|
33
35
|
'readyWhen',
|
34
36
|
'cwd',
|
35
37
|
'args',
|
@@ -48,7 +50,7 @@ async function default_1(options, context) {
|
|
48
50
|
await loadEnvVars(options.envFile);
|
49
51
|
}
|
50
52
|
const normalized = normalizeOptions(options);
|
51
|
-
if (
|
53
|
+
if (normalized.readyWhenStatus.length && !normalized.parallel) {
|
52
54
|
throw new Error('ERROR: Bad executor config for run-commands - "readyWhen" can only be used when "parallel=true".');
|
53
55
|
}
|
54
56
|
if (options.commands.find((c) => c.prefix || c.color || c.bgColor) &&
|
@@ -70,12 +72,12 @@ async function default_1(options, context) {
|
|
70
72
|
}
|
71
73
|
exports.default = default_1;
|
72
74
|
async function runInParallel(options, context) {
|
73
|
-
const procs = options.commands.map((c) => createProcess(null, c, options.
|
75
|
+
const procs = options.commands.map((c) => createProcess(null, c, options.readyWhenStatus, options.color, calculateCwd(options.cwd, context), options.env ?? {}, true, options.usePty, options.streamOutput, options.tty).then((result) => ({
|
74
76
|
result,
|
75
77
|
command: c.command,
|
76
78
|
})));
|
77
79
|
let terminalOutput = '';
|
78
|
-
if (options.
|
80
|
+
if (options.readyWhenStatus.length) {
|
79
81
|
const r = await Promise.race(procs);
|
80
82
|
terminalOutput += r.result.terminalOutput;
|
81
83
|
if (!r.result.success) {
|
@@ -116,9 +118,21 @@ async function runInParallel(options, context) {
|
|
116
118
|
}
|
117
119
|
}
|
118
120
|
function normalizeOptions(options) {
|
121
|
+
if (options.readyWhen && typeof options.readyWhen === 'string') {
|
122
|
+
options.readyWhenStatus = [
|
123
|
+
{ stringToMatch: options.readyWhen, found: false },
|
124
|
+
];
|
125
|
+
}
|
126
|
+
else {
|
127
|
+
options.readyWhenStatus =
|
128
|
+
options.readyWhen?.map((stringToMatch) => ({
|
129
|
+
stringToMatch,
|
130
|
+
found: false,
|
131
|
+
})) ?? [];
|
132
|
+
}
|
119
133
|
if (options.command) {
|
120
134
|
options.commands = [{ command: options.command }];
|
121
|
-
options.parallel =
|
135
|
+
options.parallel = options.readyWhenStatus?.length > 0;
|
122
136
|
}
|
123
137
|
else {
|
124
138
|
options.commands = options.commands.map((c) => typeof c === 'string' ? { command: c } : c);
|
@@ -131,6 +145,7 @@ function normalizeOptions(options) {
|
|
131
145
|
'parse-numbers': false,
|
132
146
|
'parse-positional-numbers': false,
|
133
147
|
'dot-notation': false,
|
148
|
+
'camel-case-expansion': false,
|
134
149
|
},
|
135
150
|
});
|
136
151
|
options.unknownOptions = Object.keys(options)
|
@@ -147,7 +162,7 @@ async function runSerially(options, context) {
|
|
147
162
|
pseudoTerminal ??= pseudo_terminal_1.PseudoTerminal.isSupported() ? (0, pseudo_terminal_1.getPseudoTerminal)() : null;
|
148
163
|
let terminalOutput = '';
|
149
164
|
for (const c of options.commands) {
|
150
|
-
const result = await createProcess(pseudoTerminal, c,
|
165
|
+
const result = await createProcess(pseudoTerminal, c, [], options.color, calculateCwd(options.cwd, context), options.env ?? {}, false, options.usePty, options.streamOutput, options.tty);
|
151
166
|
terminalOutput += result.terminalOutput;
|
152
167
|
if (!result.success) {
|
153
168
|
const output = `Warning: command "${c.command}" exited with non-zero status code`;
|
@@ -160,13 +175,14 @@ async function runSerially(options, context) {
|
|
160
175
|
}
|
161
176
|
return { success: true, terminalOutput };
|
162
177
|
}
|
163
|
-
async function createProcess(pseudoTerminal, commandConfig,
|
178
|
+
async function createProcess(pseudoTerminal, commandConfig, readyWhenStatus = [], color, cwd, env, isParallel, usePty = true, streamOutput = true, tty) {
|
164
179
|
env = processEnv(color, cwd, env);
|
165
180
|
// The rust runCommand is always a tty, so it will not look nice in parallel and if we need prefixes
|
166
181
|
// currently does not work properly in windows
|
167
182
|
if (pseudoTerminal &&
|
168
183
|
process.env.NX_NATIVE_COMMAND_RUNNER !== 'false' &&
|
169
184
|
!commandConfig.prefix &&
|
185
|
+
readyWhenStatus.length === 0 &&
|
170
186
|
!isParallel &&
|
171
187
|
usePty) {
|
172
188
|
let terminalOutput = chalk.dim('> ') + commandConfig.command + '\r\n\r\n';
|
@@ -183,9 +199,6 @@ async function createProcess(pseudoTerminal, commandConfig, readyWhen, color, cw
|
|
183
199
|
return new Promise((res) => {
|
184
200
|
cp.onOutput((output) => {
|
185
201
|
terminalOutput += output;
|
186
|
-
if (readyWhen && output.indexOf(readyWhen) > -1) {
|
187
|
-
res({ success: true, terminalOutput });
|
188
|
-
}
|
189
202
|
});
|
190
203
|
cp.onExit((code) => {
|
191
204
|
if (code >= 128) {
|
@@ -197,9 +210,9 @@ async function createProcess(pseudoTerminal, commandConfig, readyWhen, color, cw
|
|
197
210
|
});
|
198
211
|
});
|
199
212
|
}
|
200
|
-
return nodeProcess(commandConfig, cwd, env,
|
213
|
+
return nodeProcess(commandConfig, cwd, env, readyWhenStatus, streamOutput);
|
201
214
|
}
|
202
|
-
function nodeProcess(commandConfig, cwd, env,
|
215
|
+
function nodeProcess(commandConfig, cwd, env, readyWhenStatus, streamOutput = true) {
|
203
216
|
let terminalOutput = chalk.dim('> ') + commandConfig.command + '\r\n\r\n';
|
204
217
|
if (streamOutput) {
|
205
218
|
process.stdout.write(terminalOutput);
|
@@ -217,7 +230,7 @@ function nodeProcess(commandConfig, cwd, env, readyWhen, streamOutput = true) {
|
|
217
230
|
if (streamOutput) {
|
218
231
|
process.stdout.write(output);
|
219
232
|
}
|
220
|
-
if (
|
233
|
+
if (readyWhenStatus.length && isReady(readyWhenStatus, data.toString())) {
|
221
234
|
res({ success: true, terminalOutput });
|
222
235
|
}
|
223
236
|
});
|
@@ -227,7 +240,7 @@ function nodeProcess(commandConfig, cwd, env, readyWhen, streamOutput = true) {
|
|
227
240
|
if (streamOutput) {
|
228
241
|
process.stderr.write(output);
|
229
242
|
}
|
230
|
-
if (
|
243
|
+
if (readyWhenStatus.length && isReady(readyWhenStatus, err.toString())) {
|
231
244
|
res({ success: true, terminalOutput });
|
232
245
|
}
|
233
246
|
});
|
@@ -241,7 +254,7 @@ function nodeProcess(commandConfig, cwd, env, readyWhen, streamOutput = true) {
|
|
241
254
|
});
|
242
255
|
childProcess.on('exit', (code) => {
|
243
256
|
childProcesses.delete(childProcess);
|
244
|
-
if (!
|
257
|
+
if (!readyWhenStatus.length || isReady(readyWhenStatus)) {
|
245
258
|
res({ success: code === 0, terminalOutput });
|
246
259
|
}
|
247
260
|
});
|
@@ -294,20 +307,21 @@ function interpolateArgsIntoCommand(command, opts, forwardAllArgs) {
|
|
294
307
|
else if (forwardAllArgs) {
|
295
308
|
let args = '';
|
296
309
|
if (Object.keys(opts.unknownOptions ?? {}).length > 0) {
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
310
|
+
const unknownOptionsArgs = Object.keys(opts.unknownOptions)
|
311
|
+
.filter((k) => typeof opts.unknownOptions[k] !== 'object' &&
|
312
|
+
opts.parsedArgs[k] === opts.unknownOptions[k])
|
313
|
+
.map((k) => `--${k}=${opts.unknownOptions[k]}`)
|
314
|
+
.map(wrapArgIntoQuotesIfNeeded)
|
315
|
+
.join(' ');
|
316
|
+
if (unknownOptionsArgs) {
|
317
|
+
args += ` ${unknownOptionsArgs}`;
|
318
|
+
}
|
305
319
|
}
|
306
320
|
if (opts.args) {
|
307
321
|
args += ` ${opts.args}`;
|
308
322
|
}
|
309
323
|
if (opts.__unparsed__?.length > 0) {
|
310
|
-
const filterdParsedOptions = filterPropKeysFromUnParsedOptions(opts.__unparsed__, opts.
|
324
|
+
const filterdParsedOptions = filterPropKeysFromUnParsedOptions(opts.__unparsed__, opts.parsedArgs);
|
311
325
|
if (filterdParsedOptions.length > 0) {
|
312
326
|
args += ` ${filterdParsedOptions
|
313
327
|
.map(wrapArgIntoQuotesIfNeeded)
|
@@ -335,13 +349,14 @@ function parseArgs(unparsedCommandArgs, unknownOptions, args) {
|
|
335
349
|
* @param unparsedCommandArgs e.g. { prop1: 'value1', prop2: 'value2', args: 'test'}
|
336
350
|
* @returns filtered options that are not part of the propKeys array e.g. ['--prop1', 'value1', '--prop2=value2']
|
337
351
|
*/
|
338
|
-
function filterPropKeysFromUnParsedOptions(__unparsed__,
|
352
|
+
function filterPropKeysFromUnParsedOptions(__unparsed__, parseArgs = {}) {
|
339
353
|
const parsedOptions = [];
|
340
354
|
for (let index = 0; index < __unparsed__.length; index++) {
|
341
355
|
const element = __unparsed__[index];
|
342
356
|
if (element.startsWith('--')) {
|
343
357
|
const key = element.replace('--', '');
|
344
358
|
if (element.includes('=')) {
|
359
|
+
// key can be in the format of --key=value or --key.subkey=value (e.g. env.foo=bar)
|
345
360
|
if (!propKeys.includes(key.split('=')[0].split('.')[0])) {
|
346
361
|
// check if the key is part of the propKeys array
|
347
362
|
parsedOptions.push(element);
|
@@ -351,7 +366,8 @@ function filterPropKeysFromUnParsedOptions(__unparsed__, unparsedCommandArgs = {
|
|
351
366
|
// check if the next element is a value for the key
|
352
367
|
if (propKeys.includes(key)) {
|
353
368
|
if (index + 1 < __unparsed__.length &&
|
354
|
-
|
369
|
+
parseArgs[key] &&
|
370
|
+
__unparsed__[index + 1].toString() === parseArgs[key].toString()) {
|
355
371
|
index++; // skip the next element
|
356
372
|
}
|
357
373
|
}
|
@@ -437,3 +453,14 @@ function wrapArgIntoQuotesIfNeeded(arg) {
|
|
437
453
|
return arg;
|
438
454
|
}
|
439
455
|
}
|
456
|
+
function isReady(readyWhenStatus = [], data) {
|
457
|
+
if (data) {
|
458
|
+
for (const readyWhenElement of readyWhenStatus) {
|
459
|
+
if (data.toString().indexOf(readyWhenElement.stringToMatch) > -1) {
|
460
|
+
readyWhenElement.found = true;
|
461
|
+
break;
|
462
|
+
}
|
463
|
+
}
|
464
|
+
}
|
465
|
+
return readyWhenStatus.every((readyWhenElement) => readyWhenElement.found);
|
466
|
+
}
|
@@ -95,8 +95,11 @@
|
|
95
95
|
"x-priority": "important"
|
96
96
|
},
|
97
97
|
"readyWhen": {
|
98
|
-
"
|
99
|
-
"
|
98
|
+
"description": "String or array of strings to appear in `stdout` or `stderr` that indicate that the task is done. When running multiple commands, this option can only be used when `parallel` is set to `true`. If not specified, the task is done when all the child processes complete.",
|
99
|
+
"oneOf": [
|
100
|
+
{ "type": "string" },
|
101
|
+
{ "type": "array", "items": { "type": "string" } }
|
102
|
+
]
|
100
103
|
},
|
101
104
|
"args": {
|
102
105
|
"oneOf": [
|
@@ -147,7 +147,7 @@ function readAndCombineAllProjectConfigurations(tree) {
|
|
147
147
|
}
|
148
148
|
else if ((0, path_1.basename)(projectFile) === 'package.json') {
|
149
149
|
const packageJson = (0, json_1.readJson)(tree, projectFile);
|
150
|
-
const config = (0, package_json_workspaces_1.buildProjectConfigurationFromPackageJson)(packageJson, projectFile, (0, nx_json_1.readNxJson)(tree));
|
150
|
+
const config = (0, package_json_workspaces_1.buildProjectConfigurationFromPackageJson)(packageJson, tree.root, projectFile, (0, nx_json_1.readNxJson)(tree));
|
151
151
|
if (!rootMap[config.root]) {
|
152
152
|
(0, project_configuration_utils_1.mergeProjectConfigurationIntoRootMap)(rootMap,
|
153
153
|
// Inferred targets, tags, etc don't show up when running generators
|
@@ -10,7 +10,7 @@ exports.hashArray = hashArray;
|
|
10
10
|
function hashObject(obj) {
|
11
11
|
const { hashArray } = require('../native');
|
12
12
|
const parts = [];
|
13
|
-
for (const key of Object.keys(obj).sort()) {
|
13
|
+
for (const key of Object.keys(obj ?? {}).sort()) {
|
14
14
|
parts.push(key);
|
15
15
|
parts.push(JSON.stringify(obj[key]));
|
16
16
|
}
|
@@ -0,0 +1,34 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
const to_project_name_1 = require("../../config/to-project-name");
|
4
|
+
const format_changed_files_with_prettier_if_available_1 = require("../../generators/internal-utils/format-changed-files-with-prettier-if-available");
|
5
|
+
const json_1 = require("../../generators/utils/json");
|
6
|
+
const project_configuration_1 = require("../../generators/utils/project-configuration");
|
7
|
+
async function setProjectName(tree) {
|
8
|
+
// We are explicitly looking for project.json files here, so getProjects is fine.
|
9
|
+
const projects = (0, project_configuration_1.getProjects)(tree);
|
10
|
+
for (const { root } of projects.values()) {
|
11
|
+
const projectJsonPath = `${root}/project.json`;
|
12
|
+
const packageJsonPath = `${root}/package.json`;
|
13
|
+
// If either of these files doesn't exist, theres no behavioral difference
|
14
|
+
if (!tree.exists(projectJsonPath) || !tree.exists(packageJsonPath)) {
|
15
|
+
continue;
|
16
|
+
}
|
17
|
+
const projectJson = (0, json_1.readJson)(tree, projectJsonPath);
|
18
|
+
// In Nx 19.1+, the way the project name is inferred is different.
|
19
|
+
// For existing projects, if the name is not set, we can inline it
|
20
|
+
// based on the existing logic. This makes sure folks aren't caught
|
21
|
+
// off guard by the new behavior.
|
22
|
+
if (!projectJson.name) {
|
23
|
+
const siblingPackageJson = (0, json_1.readJson)(tree, packageJsonPath);
|
24
|
+
const newName = siblingPackageJson.nx?.name ?? siblingPackageJson.name;
|
25
|
+
const oldName = (0, to_project_name_1.toProjectName)(projectJsonPath);
|
26
|
+
if (newName && oldName !== newName) {
|
27
|
+
projectJson.name = oldName;
|
28
|
+
(0, json_1.writeJson)(tree, projectJsonPath, projectJson);
|
29
|
+
}
|
30
|
+
}
|
31
|
+
}
|
32
|
+
await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree);
|
33
|
+
}
|
34
|
+
exports.default = setProjectName;
|
@@ -46,6 +46,9 @@ function createPackageJson(projectName, graph, options = {}, fileMap = null) {
|
|
46
46
|
delete packageJson.dependencies;
|
47
47
|
delete packageJson.devDependencies;
|
48
48
|
}
|
49
|
+
if (options.isProduction) {
|
50
|
+
delete packageJson.devDependencies;
|
51
|
+
}
|
49
52
|
}
|
50
53
|
catch (e) { }
|
51
54
|
}
|
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.TargetProjectLocator = exports.isBuiltinModuleImport = void 0;
|
4
4
|
const node_module_1 = require("node:module");
|
5
5
|
const node_path_1 = require("node:path");
|
6
|
+
const semver_1 = require("semver");
|
6
7
|
const find_project_for_path_1 = require("../../../../project-graph/utils/find-project-for-path");
|
7
8
|
const fileutils_1 = require("../../../../utils/fileutils");
|
8
9
|
const workspace_root_1 = require("../../../../utils/workspace-root");
|
@@ -138,7 +139,8 @@ class TargetProjectLocator {
|
|
138
139
|
this.npmResolutionCache.set(npmImportForProject, externalNodeName);
|
139
140
|
return externalNodeName;
|
140
141
|
}
|
141
|
-
const
|
142
|
+
const version = (0, semver_1.clean)(externalPackageJson.version);
|
143
|
+
const npmProjectKey = `npm:${externalPackageJson.name}@${version}`;
|
142
144
|
const matchingExternalNode = this.npmProjects[npmProjectKey];
|
143
145
|
if (!matchingExternalNode) {
|
144
146
|
return null;
|
@@ -244,12 +246,15 @@ class TargetProjectLocator {
|
|
244
246
|
// The package.json is directly resolvable
|
245
247
|
const packageJsonPath = (0, resolve_relative_to_dir_1.resolveRelativeToDir)((0, node_path_1.join)(packageName, 'package.json'), relativeToDir);
|
246
248
|
if (packageJsonPath) {
|
247
|
-
|
249
|
+
const parsedPackageJson = (0, fileutils_1.readJsonFile)(packageJsonPath);
|
250
|
+
if (parsedPackageJson.name && parsedPackageJson.version) {
|
251
|
+
return parsedPackageJson;
|
252
|
+
}
|
248
253
|
}
|
249
254
|
try {
|
250
255
|
// Resolve the main entry point of the package
|
251
|
-
const
|
252
|
-
let dir = (0, node_path_1.dirname)(
|
256
|
+
const pathOfFileInPackage = packageJsonPath ?? (0, resolve_relative_to_dir_1.resolveRelativeToDir)(packageName, relativeToDir);
|
257
|
+
let dir = (0, node_path_1.dirname)(pathOfFileInPackage);
|
253
258
|
while (dir !== (0, node_path_1.parse)(dir).root) {
|
254
259
|
const packageJsonPath = (0, node_path_1.join)(dir, 'package.json');
|
255
260
|
try {
|
@@ -4,14 +4,14 @@ import { PackageJson } from '../../utils/package-json';
|
|
4
4
|
import { CreateNodes } from '../../project-graph/plugins';
|
5
5
|
export declare const createNodes: CreateNodes;
|
6
6
|
export declare function buildPackageJsonWorkspacesMatcher(workspaceRoot: string, readJson: (string: any) => any): (p: string) => boolean;
|
7
|
-
export declare function createNodeFromPackageJson(pkgJsonPath: string,
|
7
|
+
export declare function createNodeFromPackageJson(pkgJsonPath: string, workspaceRoot: string): {
|
8
8
|
projects: {
|
9
9
|
[x: string]: ProjectConfiguration & {
|
10
10
|
name: string;
|
11
11
|
};
|
12
12
|
};
|
13
13
|
};
|
14
|
-
export declare function buildProjectConfigurationFromPackageJson(packageJson: PackageJson,
|
14
|
+
export declare function buildProjectConfigurationFromPackageJson(packageJson: PackageJson, workspaceRoot: string, packageJsonPath: string, nxJson: NxJsonConfiguration): ProjectConfiguration & {
|
15
15
|
name: string;
|
16
16
|
};
|
17
17
|
/**
|
@@ -38,12 +38,21 @@ function buildPackageJsonWorkspacesMatcher(workspaceRoot, readJson) {
|
|
38
38
|
positivePatterns.push('**/package.json');
|
39
39
|
}
|
40
40
|
return (p) => positivePatterns.some((positive) => (0, minimatch_1.minimatch)(p, positive)) &&
|
41
|
-
|
41
|
+
/**
|
42
|
+
* minimatch will return true if the given p is NOT excluded by the negative pattern.
|
43
|
+
*
|
44
|
+
* For example if the negative pattern is "!packages/vite", then the given p "packages/vite" will return false,
|
45
|
+
* the given p "packages/something-else/package.json" will return true.
|
46
|
+
*
|
47
|
+
* Therefore, we need to ensure that every negative pattern returns true to validate that the given p is not
|
48
|
+
* excluded by any of the negative patterns.
|
49
|
+
*/
|
50
|
+
negativePatterns.every((negative) => (0, minimatch_1.minimatch)(p, negative));
|
42
51
|
}
|
43
52
|
exports.buildPackageJsonWorkspacesMatcher = buildPackageJsonWorkspacesMatcher;
|
44
|
-
function createNodeFromPackageJson(pkgJsonPath,
|
45
|
-
const json = (0, fileutils_1.readJsonFile)((0, node_path_1.join)(
|
46
|
-
const project = buildProjectConfigurationFromPackageJson(json, pkgJsonPath, (0, nx_json_1.readNxJson)(
|
53
|
+
function createNodeFromPackageJson(pkgJsonPath, workspaceRoot) {
|
54
|
+
const json = (0, fileutils_1.readJsonFile)((0, node_path_1.join)(workspaceRoot, pkgJsonPath));
|
55
|
+
const project = buildProjectConfigurationFromPackageJson(json, workspaceRoot, pkgJsonPath, (0, nx_json_1.readNxJson)(workspaceRoot));
|
47
56
|
return {
|
48
57
|
projects: {
|
49
58
|
[project.root]: project,
|
@@ -51,21 +60,27 @@ function createNodeFromPackageJson(pkgJsonPath, root) {
|
|
51
60
|
};
|
52
61
|
}
|
53
62
|
exports.createNodeFromPackageJson = createNodeFromPackageJson;
|
54
|
-
function buildProjectConfigurationFromPackageJson(packageJson,
|
55
|
-
const normalizedPath =
|
56
|
-
const
|
57
|
-
|
63
|
+
function buildProjectConfigurationFromPackageJson(packageJson, workspaceRoot, packageJsonPath, nxJson) {
|
64
|
+
const normalizedPath = packageJsonPath.split('\\').join('/');
|
65
|
+
const projectRoot = (0, node_path_1.dirname)(normalizedPath);
|
66
|
+
const siblingProjectJson = tryReadJson((0, node_path_1.join)(workspaceRoot, projectRoot, 'project.json'));
|
67
|
+
if (siblingProjectJson) {
|
68
|
+
for (const target of Object.keys(siblingProjectJson?.targets ?? {})) {
|
69
|
+
delete packageJson.scripts?.[target];
|
70
|
+
}
|
71
|
+
}
|
72
|
+
if (!packageJson.name && projectRoot === '.') {
|
58
73
|
throw new Error('Nx requires the root package.json to specify a name if it is being used as an Nx project.');
|
59
74
|
}
|
60
75
|
let name = packageJson.name ?? (0, to_project_name_1.toProjectName)(normalizedPath);
|
61
76
|
const projectType = nxJson?.workspaceLayout?.appsDir != nxJson?.workspaceLayout?.libsDir &&
|
62
77
|
nxJson?.workspaceLayout?.appsDir &&
|
63
|
-
|
78
|
+
projectRoot.startsWith(nxJson.workspaceLayout.appsDir)
|
64
79
|
? 'application'
|
65
80
|
: 'library';
|
66
81
|
return {
|
67
|
-
root:
|
68
|
-
sourceRoot:
|
82
|
+
root: projectRoot,
|
83
|
+
sourceRoot: projectRoot,
|
69
84
|
name,
|
70
85
|
projectType,
|
71
86
|
...packageJson.nx,
|
@@ -127,3 +142,11 @@ function normalizePatterns(patterns) {
|
|
127
142
|
function removeRelativePath(pattern) {
|
128
143
|
return pattern.startsWith('./') ? pattern.substring(2) : pattern;
|
129
144
|
}
|
145
|
+
function tryReadJson(path) {
|
146
|
+
try {
|
147
|
+
return (0, fileutils_1.readJsonFile)(path);
|
148
|
+
}
|
149
|
+
catch {
|
150
|
+
return null;
|
151
|
+
}
|
152
|
+
}
|
@@ -3,3 +3,4 @@ import { NxPluginV2 } from '../../../project-graph/plugins';
|
|
3
3
|
export declare const ProjectJsonProjectsPlugin: NxPluginV2;
|
4
4
|
export default ProjectJsonProjectsPlugin;
|
5
5
|
export declare function buildProjectFromProjectJson(json: Partial<ProjectConfiguration>, path: string): ProjectConfiguration;
|
6
|
+
export declare function readNameFromPackageJson(path: string): string;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.buildProjectFromProjectJson = exports.ProjectJsonProjectsPlugin = void 0;
|
3
|
+
exports.readNameFromPackageJson = exports.buildProjectFromProjectJson = exports.ProjectJsonProjectsPlugin = void 0;
|
4
4
|
const node_path_1 = require("node:path");
|
5
5
|
const to_project_name_1 = require("../../../config/to-project-name");
|
6
6
|
const fileutils_1 = require("../../../utils/fileutils");
|
@@ -21,10 +21,22 @@ exports.ProjectJsonProjectsPlugin = {
|
|
21
21
|
};
|
22
22
|
exports.default = exports.ProjectJsonProjectsPlugin;
|
23
23
|
function buildProjectFromProjectJson(json, path) {
|
24
|
+
const packageJsonPath = (0, node_path_1.join)((0, node_path_1.dirname)(path), 'package.json');
|
25
|
+
const { name, root, ...rest } = json;
|
24
26
|
return {
|
25
|
-
name: (0, to_project_name_1.toProjectName)(path),
|
26
|
-
root: (0, node_path_1.dirname)(path),
|
27
|
-
...
|
27
|
+
name: name ?? readNameFromPackageJson(packageJsonPath) ?? (0, to_project_name_1.toProjectName)(path),
|
28
|
+
root: root ?? (0, node_path_1.dirname)(path),
|
29
|
+
...rest,
|
28
30
|
};
|
29
31
|
}
|
30
32
|
exports.buildProjectFromProjectJson = buildProjectFromProjectJson;
|
33
|
+
function readNameFromPackageJson(path) {
|
34
|
+
try {
|
35
|
+
const json = (0, fileutils_1.readJsonFile)(path);
|
36
|
+
return json.nx?.name ?? json.name;
|
37
|
+
}
|
38
|
+
catch {
|
39
|
+
return undefined;
|
40
|
+
}
|
41
|
+
}
|
42
|
+
exports.readNameFromPackageJson = readNameFromPackageJson;
|
@@ -161,6 +161,16 @@ class AggregateCreateNodesError extends Error {
|
|
161
161
|
this.errors = errors;
|
162
162
|
this.partialResults = partialResults;
|
163
163
|
this.name = this.constructor.name;
|
164
|
+
if (
|
165
|
+
// Errors should be an array
|
166
|
+
!Array.isArray(errors) ||
|
167
|
+
!errors.every(
|
168
|
+
// Where every element is a tuple
|
169
|
+
(errorTuple) => Array.isArray(errorTuple) &&
|
170
|
+
// That has a length of 2
|
171
|
+
errorTuple.length === 2)) {
|
172
|
+
throw new Error('AggregateCreateNodesError must be constructed with an array of tuples where the first element is a filename or undefined and the second element is the underlying error.');
|
173
|
+
}
|
164
174
|
}
|
165
175
|
}
|
166
176
|
exports.AggregateCreateNodesError = AggregateCreateNodesError;
|
@@ -162,7 +162,7 @@ function getProjectsSync(root, nxJson) {
|
|
162
162
|
}
|
163
163
|
else if ((0, path_1.basename)(projectFile) === 'package.json') {
|
164
164
|
const packageJson = (0, fileutils_1.readJsonFile)(projectFile);
|
165
|
-
const config = (0, package_json_workspaces_1.buildProjectConfigurationFromPackageJson)(packageJson, projectFile, nxJson);
|
165
|
+
const config = (0, package_json_workspaces_1.buildProjectConfigurationFromPackageJson)(packageJson, root, projectFile, nxJson);
|
166
166
|
if (!rootMap[config.root]) {
|
167
167
|
(0, project_configuration_utils_1.mergeProjectConfigurationIntoRootMap)(rootMap,
|
168
168
|
// Inferred targets, tags, etc don't show up when running generators
|
@@ -45,7 +45,7 @@ class LoadedNxPlugin {
|
|
45
45
|
throw e;
|
46
46
|
}
|
47
47
|
// The underlying plugin errored out. We can't know any partial results.
|
48
|
-
throw new error_types_1.AggregateCreateNodesError([null, e], []);
|
48
|
+
throw new error_types_1.AggregateCreateNodesError([[null, e]], []);
|
49
49
|
}
|
50
50
|
finally {
|
51
51
|
performance.mark(`${plugin.name}:createNodes - end`);
|
@@ -76,11 +76,29 @@ function normalizeImplicitDependencies(source, implicitDependencies, projects) {
|
|
76
76
|
if (!implicitDependencies?.length) {
|
77
77
|
return implicitDependencies ?? [];
|
78
78
|
}
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
79
|
+
// Implicit dependencies handle negatives in a different
|
80
|
+
// way from most other `projects` fields. This is because
|
81
|
+
// they are used for multiple purposes.
|
82
|
+
const positivePatterns = [];
|
83
|
+
const negativePatterns = [];
|
84
|
+
for (const dep of implicitDependencies) {
|
85
|
+
if (dep.startsWith('!')) {
|
86
|
+
negativePatterns.push(dep);
|
87
|
+
}
|
88
|
+
else {
|
89
|
+
positivePatterns.push(dep);
|
90
|
+
}
|
91
|
+
}
|
92
|
+
// Finds all projects that match a positive pattern and are not excluded by a negative pattern
|
93
|
+
const deps = positivePatterns.length
|
94
|
+
? (0, find_matching_projects_1.findMatchingProjects)(positivePatterns.concat(negativePatterns), projects).filter((x) => x !== source)
|
95
|
+
: [];
|
96
|
+
// Expands negative patterns to equal project names
|
97
|
+
const alwaysIgnoredDeps = (0, find_matching_projects_1.findMatchingProjects)(negativePatterns.map((x) => x.slice(1)), projects);
|
98
|
+
// We return the matching deps, but keep the negative patterns in the list
|
99
|
+
// so that they can be processed later by implicit-project-dependencies.ts
|
100
|
+
// This is what allows using a negative implicit dep to remove a dependency
|
101
|
+
// detected by createDependencies.
|
102
|
+
return deps.concat(alwaysIgnoredDeps.map((x) => '!' + x));
|
85
103
|
}
|
86
104
|
exports.normalizeImplicitDependencies = normalizeImplicitDependencies;
|
@@ -245,7 +245,15 @@ plugins) {
|
|
245
245
|
e
|
246
246
|
: // This represents a single plugin erroring out with a hard error.
|
247
247
|
new error_types_1.AggregateCreateNodesError([[null, e]], []);
|
248
|
-
|
248
|
+
const innerErrors = error.errors;
|
249
|
+
for (const [file, e] of innerErrors) {
|
250
|
+
if (file) {
|
251
|
+
errorBodyLines.push(` - ${file}: ${e.message}`);
|
252
|
+
}
|
253
|
+
else {
|
254
|
+
errorBodyLines.push(` - ${e.message}`);
|
255
|
+
}
|
256
|
+
}
|
249
257
|
error.message = errorBodyLines.join('\n');
|
250
258
|
// This represents a single plugin erroring out with a hard error.
|
251
259
|
errors.push(error);
|