nx 19.3.0-canary.20240612-4a5eb23 → 19.3.0
Sign up to get free protection for your applications and to get access to all the features.
- package/migrations.json +6 -0
- package/package.json +12 -12
- package/src/command-line/release/command-object.js +1 -1
- package/src/executors/run-commands/run-commands.impl.d.ts +5 -1
- package/src/executors/run-commands/run-commands.impl.js +35 -14
- package/src/executors/run-commands/schema.json +5 -2
- package/src/migrations/update-19-2-4/set-project-name.d.ts +2 -0
- package/src/migrations/update-19-2-4/set-project-name.js +34 -0
- package/src/plugins/js/lock-file/npm-parser.js +22 -18
- package/src/plugins/js/project-graph/build-dependencies/target-project-locator.js +6 -3
- package/src/plugins/project-json/build-nodes/project-json.d.ts +1 -0
- package/src/plugins/project-json/build-nodes/project-json.js +16 -4
- package/src/utils/package-json.d.ts +1 -0
package/migrations.json
CHANGED
@@ -83,6 +83,12 @@
|
|
83
83
|
"version": "19.2.2-beta.0",
|
84
84
|
"description": "Updates the nx wrapper.",
|
85
85
|
"implementation": "./src/migrations/update-17-3-0/update-nxw"
|
86
|
+
},
|
87
|
+
"19-2-4-set-project-name": {
|
88
|
+
"version": "19.2.4-beta.0",
|
89
|
+
"description": "Set project name in nx.json explicitly",
|
90
|
+
"implementation": "./src/migrations/update-19-2-4/set-project-name",
|
91
|
+
"x-repair-skip": true
|
86
92
|
}
|
87
93
|
}
|
88
94
|
}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "19.3.0
|
3
|
+
"version": "19.3.0",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -70,7 +70,7 @@
|
|
70
70
|
"yargs-parser": "21.1.1",
|
71
71
|
"node-machine-id": "1.1.12",
|
72
72
|
"ora": "5.3.0",
|
73
|
-
"@nrwl/tao": "19.3.0
|
73
|
+
"@nrwl/tao": "19.3.0"
|
74
74
|
},
|
75
75
|
"peerDependencies": {
|
76
76
|
"@swc-node/register": "^1.8.0",
|
@@ -85,16 +85,16 @@
|
|
85
85
|
}
|
86
86
|
},
|
87
87
|
"optionalDependencies": {
|
88
|
-
"@nx/nx-darwin-x64": "19.3.0
|
89
|
-
"@nx/nx-darwin-arm64": "19.3.0
|
90
|
-
"@nx/nx-linux-x64-gnu": "19.3.0
|
91
|
-
"@nx/nx-linux-x64-musl": "19.3.0
|
92
|
-
"@nx/nx-win32-x64-msvc": "19.3.0
|
93
|
-
"@nx/nx-linux-arm64-gnu": "19.3.0
|
94
|
-
"@nx/nx-linux-arm64-musl": "19.3.0
|
95
|
-
"@nx/nx-linux-arm-gnueabihf": "19.3.0
|
96
|
-
"@nx/nx-win32-arm64-msvc": "19.3.0
|
97
|
-
"@nx/nx-freebsd-x64": "19.3.0
|
88
|
+
"@nx/nx-darwin-x64": "19.3.0",
|
89
|
+
"@nx/nx-darwin-arm64": "19.3.0",
|
90
|
+
"@nx/nx-linux-x64-gnu": "19.3.0",
|
91
|
+
"@nx/nx-linux-x64-musl": "19.3.0",
|
92
|
+
"@nx/nx-win32-x64-msvc": "19.3.0",
|
93
|
+
"@nx/nx-linux-arm64-gnu": "19.3.0",
|
94
|
+
"@nx/nx-linux-arm64-musl": "19.3.0",
|
95
|
+
"@nx/nx-linux-arm-gnueabihf": "19.3.0",
|
96
|
+
"@nx/nx-win32-arm64-msvc": "19.3.0",
|
97
|
+
"@nx/nx-freebsd-x64": "19.3.0"
|
98
98
|
},
|
99
99
|
"nx-migrations": {
|
100
100
|
"migrations": "./migrations.json",
|
@@ -101,7 +101,7 @@ const versionCommand = {
|
|
101
101
|
})
|
102
102
|
.option('preid', {
|
103
103
|
type: 'string',
|
104
|
-
describe: 'The optional prerelease identifier to apply to the version
|
104
|
+
describe: 'The optional prerelease identifier to apply to the version. This will only be applied in the case that the specifier argument has been set to `prerelease` OR when conventional commits are enabled, in which case it will modify the resolved specifier from conventional commits to be its prerelease equivalent. E.g. minor -> preminor',
|
105
105
|
default: '',
|
106
106
|
})
|
107
107
|
.option('stage-changes', {
|
@@ -19,7 +19,7 @@ export interface RunCommandsOptions extends Json {
|
|
19
19
|
} | string)[];
|
20
20
|
color?: boolean;
|
21
21
|
parallel?: boolean;
|
22
|
-
readyWhen?: string;
|
22
|
+
readyWhen?: string | string[];
|
23
23
|
cwd?: string;
|
24
24
|
env?: Record<string, string>;
|
25
25
|
forwardAllArgs?: boolean;
|
@@ -45,6 +45,10 @@ export interface NormalizedRunCommandsOptions extends RunCommandsOptions {
|
|
45
45
|
[k: string]: string | string[];
|
46
46
|
};
|
47
47
|
args?: string;
|
48
|
+
readyWhenStatus: {
|
49
|
+
stringToMatch: string;
|
50
|
+
found: boolean;
|
51
|
+
}[];
|
48
52
|
}
|
49
53
|
export default function (options: RunCommandsOptions, context: ExecutorContext): Promise<{
|
50
54
|
success: boolean;
|
@@ -50,7 +50,7 @@ async function default_1(options, context) {
|
|
50
50
|
await loadEnvVars(options.envFile);
|
51
51
|
}
|
52
52
|
const normalized = normalizeOptions(options);
|
53
|
-
if (
|
53
|
+
if (normalized.readyWhenStatus.length && !normalized.parallel) {
|
54
54
|
throw new Error('ERROR: Bad executor config for run-commands - "readyWhen" can only be used when "parallel=true".');
|
55
55
|
}
|
56
56
|
if (options.commands.find((c) => c.prefix || c.color || c.bgColor) &&
|
@@ -72,12 +72,12 @@ async function default_1(options, context) {
|
|
72
72
|
}
|
73
73
|
exports.default = default_1;
|
74
74
|
async function runInParallel(options, context) {
|
75
|
-
const procs = options.commands.map((c) => createProcess(null, c, options.
|
75
|
+
const procs = options.commands.map((c) => createProcess(null, c, options.readyWhenStatus, options.color, calculateCwd(options.cwd, context), options.env ?? {}, true, options.usePty, options.streamOutput, options.tty).then((result) => ({
|
76
76
|
result,
|
77
77
|
command: c.command,
|
78
78
|
})));
|
79
79
|
let terminalOutput = '';
|
80
|
-
if (options.
|
80
|
+
if (options.readyWhenStatus.length) {
|
81
81
|
const r = await Promise.race(procs);
|
82
82
|
terminalOutput += r.result.terminalOutput;
|
83
83
|
if (!r.result.success) {
|
@@ -118,9 +118,21 @@ async function runInParallel(options, context) {
|
|
118
118
|
}
|
119
119
|
}
|
120
120
|
function normalizeOptions(options) {
|
121
|
+
if (options.readyWhen && typeof options.readyWhen === 'string') {
|
122
|
+
options.readyWhenStatus = [
|
123
|
+
{ stringToMatch: options.readyWhen, found: false },
|
124
|
+
];
|
125
|
+
}
|
126
|
+
else {
|
127
|
+
options.readyWhenStatus =
|
128
|
+
options.readyWhen?.map((stringToMatch) => ({
|
129
|
+
stringToMatch,
|
130
|
+
found: false,
|
131
|
+
})) ?? [];
|
132
|
+
}
|
121
133
|
if (options.command) {
|
122
134
|
options.commands = [{ command: options.command }];
|
123
|
-
options.parallel =
|
135
|
+
options.parallel = options.readyWhenStatus?.length > 0;
|
124
136
|
}
|
125
137
|
else {
|
126
138
|
options.commands = options.commands.map((c) => typeof c === 'string' ? { command: c } : c);
|
@@ -150,7 +162,7 @@ async function runSerially(options, context) {
|
|
150
162
|
pseudoTerminal ??= pseudo_terminal_1.PseudoTerminal.isSupported() ? (0, pseudo_terminal_1.getPseudoTerminal)() : null;
|
151
163
|
let terminalOutput = '';
|
152
164
|
for (const c of options.commands) {
|
153
|
-
const result = await createProcess(pseudoTerminal, c,
|
165
|
+
const result = await createProcess(pseudoTerminal, c, [], options.color, calculateCwd(options.cwd, context), options.env ?? {}, false, options.usePty, options.streamOutput, options.tty);
|
154
166
|
terminalOutput += result.terminalOutput;
|
155
167
|
if (!result.success) {
|
156
168
|
const output = `Warning: command "${c.command}" exited with non-zero status code`;
|
@@ -163,13 +175,14 @@ async function runSerially(options, context) {
|
|
163
175
|
}
|
164
176
|
return { success: true, terminalOutput };
|
165
177
|
}
|
166
|
-
async function createProcess(pseudoTerminal, commandConfig,
|
178
|
+
async function createProcess(pseudoTerminal, commandConfig, readyWhenStatus = [], color, cwd, env, isParallel, usePty = true, streamOutput = true, tty) {
|
167
179
|
env = processEnv(color, cwd, env);
|
168
180
|
// The rust runCommand is always a tty, so it will not look nice in parallel and if we need prefixes
|
169
181
|
// currently does not work properly in windows
|
170
182
|
if (pseudoTerminal &&
|
171
183
|
process.env.NX_NATIVE_COMMAND_RUNNER !== 'false' &&
|
172
184
|
!commandConfig.prefix &&
|
185
|
+
readyWhenStatus.length === 0 &&
|
173
186
|
!isParallel &&
|
174
187
|
usePty) {
|
175
188
|
let terminalOutput = chalk.dim('> ') + commandConfig.command + '\r\n\r\n';
|
@@ -186,9 +199,6 @@ async function createProcess(pseudoTerminal, commandConfig, readyWhen, color, cw
|
|
186
199
|
return new Promise((res) => {
|
187
200
|
cp.onOutput((output) => {
|
188
201
|
terminalOutput += output;
|
189
|
-
if (readyWhen && output.indexOf(readyWhen) > -1) {
|
190
|
-
res({ success: true, terminalOutput });
|
191
|
-
}
|
192
202
|
});
|
193
203
|
cp.onExit((code) => {
|
194
204
|
if (code >= 128) {
|
@@ -200,9 +210,9 @@ async function createProcess(pseudoTerminal, commandConfig, readyWhen, color, cw
|
|
200
210
|
});
|
201
211
|
});
|
202
212
|
}
|
203
|
-
return nodeProcess(commandConfig, cwd, env,
|
213
|
+
return nodeProcess(commandConfig, cwd, env, readyWhenStatus, streamOutput);
|
204
214
|
}
|
205
|
-
function nodeProcess(commandConfig, cwd, env,
|
215
|
+
function nodeProcess(commandConfig, cwd, env, readyWhenStatus, streamOutput = true) {
|
206
216
|
let terminalOutput = chalk.dim('> ') + commandConfig.command + '\r\n\r\n';
|
207
217
|
if (streamOutput) {
|
208
218
|
process.stdout.write(terminalOutput);
|
@@ -220,7 +230,7 @@ function nodeProcess(commandConfig, cwd, env, readyWhen, streamOutput = true) {
|
|
220
230
|
if (streamOutput) {
|
221
231
|
process.stdout.write(output);
|
222
232
|
}
|
223
|
-
if (
|
233
|
+
if (readyWhenStatus.length && isReady(readyWhenStatus, data.toString())) {
|
224
234
|
res({ success: true, terminalOutput });
|
225
235
|
}
|
226
236
|
});
|
@@ -230,7 +240,7 @@ function nodeProcess(commandConfig, cwd, env, readyWhen, streamOutput = true) {
|
|
230
240
|
if (streamOutput) {
|
231
241
|
process.stderr.write(output);
|
232
242
|
}
|
233
|
-
if (
|
243
|
+
if (readyWhenStatus.length && isReady(readyWhenStatus, err.toString())) {
|
234
244
|
res({ success: true, terminalOutput });
|
235
245
|
}
|
236
246
|
});
|
@@ -244,7 +254,7 @@ function nodeProcess(commandConfig, cwd, env, readyWhen, streamOutput = true) {
|
|
244
254
|
});
|
245
255
|
childProcess.on('exit', (code) => {
|
246
256
|
childProcesses.delete(childProcess);
|
247
|
-
if (!
|
257
|
+
if (!readyWhenStatus.length || isReady(readyWhenStatus)) {
|
248
258
|
res({ success: code === 0, terminalOutput });
|
249
259
|
}
|
250
260
|
});
|
@@ -443,3 +453,14 @@ function wrapArgIntoQuotesIfNeeded(arg) {
|
|
443
453
|
return arg;
|
444
454
|
}
|
445
455
|
}
|
456
|
+
function isReady(readyWhenStatus = [], data) {
|
457
|
+
if (data) {
|
458
|
+
for (const readyWhenElement of readyWhenStatus) {
|
459
|
+
if (data.toString().indexOf(readyWhenElement.stringToMatch) > -1) {
|
460
|
+
readyWhenElement.found = true;
|
461
|
+
break;
|
462
|
+
}
|
463
|
+
}
|
464
|
+
}
|
465
|
+
return readyWhenStatus.every((readyWhenElement) => readyWhenElement.found);
|
466
|
+
}
|
@@ -95,8 +95,11 @@
|
|
95
95
|
"x-priority": "important"
|
96
96
|
},
|
97
97
|
"readyWhen": {
|
98
|
-
"
|
99
|
-
"
|
98
|
+
"description": "String or array of strings to appear in `stdout` or `stderr` that indicate that the task is done. When running multiple commands, this option can only be used when `parallel` is set to `true`. If not specified, the task is done when all the child processes complete.",
|
99
|
+
"oneOf": [
|
100
|
+
{ "type": "string" },
|
101
|
+
{ "type": "array", "items": { "type": "string" } }
|
102
|
+
]
|
100
103
|
},
|
101
104
|
"args": {
|
102
105
|
"oneOf": [
|
@@ -0,0 +1,34 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
const to_project_name_1 = require("../../config/to-project-name");
|
4
|
+
const format_changed_files_with_prettier_if_available_1 = require("../../generators/internal-utils/format-changed-files-with-prettier-if-available");
|
5
|
+
const json_1 = require("../../generators/utils/json");
|
6
|
+
const project_configuration_1 = require("../../generators/utils/project-configuration");
|
7
|
+
async function setProjectName(tree) {
|
8
|
+
// We are explicitly looking for project.json files here, so getProjects is fine.
|
9
|
+
const projects = (0, project_configuration_1.getProjects)(tree);
|
10
|
+
for (const { root } of projects.values()) {
|
11
|
+
const projectJsonPath = `${root}/project.json`;
|
12
|
+
const packageJsonPath = `${root}/package.json`;
|
13
|
+
// If either of these files doesn't exist, theres no behavioral difference
|
14
|
+
if (!tree.exists(projectJsonPath) || !tree.exists(packageJsonPath)) {
|
15
|
+
continue;
|
16
|
+
}
|
17
|
+
const projectJson = (0, json_1.readJson)(tree, projectJsonPath);
|
18
|
+
// In Nx 19.1+, the way the project name is inferred is different.
|
19
|
+
// For existing projects, if the name is not set, we can inline it
|
20
|
+
// based on the existing logic. This makes sure folks aren't caught
|
21
|
+
// off guard by the new behavior.
|
22
|
+
if (!projectJson.name) {
|
23
|
+
const siblingPackageJson = (0, json_1.readJson)(tree, packageJsonPath);
|
24
|
+
const newName = siblingPackageJson.nx?.name ?? siblingPackageJson.name;
|
25
|
+
const oldName = (0, to_project_name_1.toProjectName)(projectJsonPath);
|
26
|
+
if (newName && oldName !== newName) {
|
27
|
+
projectJson.name = oldName;
|
28
|
+
(0, json_1.writeJson)(tree, projectJsonPath, projectJson);
|
29
|
+
}
|
30
|
+
}
|
31
|
+
}
|
32
|
+
await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree);
|
33
|
+
}
|
34
|
+
exports.default = setProjectName;
|
@@ -289,15 +289,16 @@ function getPackageParent(path, packages) {
|
|
289
289
|
function mapSnapshots(rootLockFile, graph) {
|
290
290
|
const nestedNodes = new Set();
|
291
291
|
const visitedNodes = new Map();
|
292
|
-
const visitedPaths = new Set();
|
293
292
|
const remappedPackages = new Map();
|
294
293
|
// add first level children
|
295
294
|
Object.values(graph.externalNodes).forEach((node) => {
|
296
295
|
if (node.name === `npm:${node.data.packageName}`) {
|
297
296
|
const mappedPackage = mapPackage(rootLockFile, node.data.packageName, node.data.version);
|
298
297
|
remappedPackages.set(mappedPackage.path, mappedPackage);
|
299
|
-
visitedNodes.set(node,
|
300
|
-
|
298
|
+
visitedNodes.set(node, {
|
299
|
+
packagePaths: new Set([mappedPackage.path]),
|
300
|
+
unresolvedParents: new Set(),
|
301
|
+
});
|
301
302
|
}
|
302
303
|
else {
|
303
304
|
nestedNodes.add(node);
|
@@ -306,7 +307,7 @@ function mapSnapshots(rootLockFile, graph) {
|
|
306
307
|
let remappedPackagesArray;
|
307
308
|
if (nestedNodes.size) {
|
308
309
|
const invertedGraph = (0, operators_1.reverse)(graph);
|
309
|
-
nestMappedPackages(invertedGraph, remappedPackages, nestedNodes, visitedNodes,
|
310
|
+
nestMappedPackages(invertedGraph, remappedPackages, nestedNodes, visitedNodes, rootLockFile);
|
310
311
|
// initially we naively map package paths to topParent/../parent/child
|
311
312
|
// but some of those should be nested higher up the tree
|
312
313
|
remappedPackagesArray = elevateNestedPaths(remappedPackages);
|
@@ -332,31 +333,34 @@ function mapPackage(rootLockFile, packageName, version, parentPath = '') {
|
|
332
333
|
valueV3,
|
333
334
|
};
|
334
335
|
}
|
335
|
-
function nestMappedPackages(invertedGraph, result, nestedNodes, visitedNodes,
|
336
|
+
function nestMappedPackages(invertedGraph, result, nestedNodes, visitedNodes, rootLockFile) {
|
336
337
|
const initialSize = nestedNodes.size;
|
337
338
|
if (!initialSize) {
|
338
339
|
return;
|
339
340
|
}
|
340
341
|
nestedNodes.forEach((node) => {
|
341
|
-
|
342
|
+
if (!visitedNodes.has(node)) {
|
343
|
+
visitedNodes.set(node, {
|
344
|
+
packagePaths: new Set(),
|
345
|
+
unresolvedParents: new Set(invertedGraph.dependencies[node.name].map(({ target }) => target)),
|
346
|
+
});
|
347
|
+
}
|
342
348
|
invertedGraph.dependencies[node.name].forEach(({ target }) => {
|
349
|
+
if (!visitedNodes.get(node).unresolvedParents.has(target)) {
|
350
|
+
return;
|
351
|
+
}
|
343
352
|
const targetNode = invertedGraph.externalNodes[target];
|
344
|
-
if (visitedNodes.has(targetNode)
|
345
|
-
visitedNodes.get(targetNode).
|
353
|
+
if (visitedNodes.has(targetNode) &&
|
354
|
+
!visitedNodes.get(targetNode).unresolvedParents.size) {
|
355
|
+
visitedNodes.get(targetNode).packagePaths.forEach((path) => {
|
346
356
|
const mappedPackage = mapPackage(rootLockFile, node.data.packageName, node.data.version, path + '/');
|
347
357
|
result.set(mappedPackage.path, mappedPackage);
|
348
|
-
|
349
|
-
|
350
|
-
}
|
351
|
-
else {
|
352
|
-
visitedNodes.set(node, new Set([mappedPackage.path]));
|
353
|
-
}
|
354
|
-
visitedPaths.add(mappedPackage.path);
|
358
|
+
visitedNodes.get(node).packagePaths.add(mappedPackage.path);
|
359
|
+
visitedNodes.get(node).unresolvedParents.delete(target);
|
355
360
|
});
|
356
|
-
unresolvedParents--;
|
357
361
|
}
|
358
362
|
});
|
359
|
-
if (!unresolvedParents) {
|
363
|
+
if (!visitedNodes.get(node).unresolvedParents.size) {
|
360
364
|
nestedNodes.delete(node);
|
361
365
|
}
|
362
366
|
});
|
@@ -367,7 +371,7 @@ function nestMappedPackages(invertedGraph, result, nestedNodes, visitedNodes, vi
|
|
367
371
|
].join('\n'));
|
368
372
|
}
|
369
373
|
else {
|
370
|
-
nestMappedPackages(invertedGraph, result, nestedNodes, visitedNodes,
|
374
|
+
nestMappedPackages(invertedGraph, result, nestedNodes, visitedNodes, rootLockFile);
|
371
375
|
}
|
372
376
|
}
|
373
377
|
// sort paths by number of segments and then alphabetically
|
@@ -246,12 +246,15 @@ class TargetProjectLocator {
|
|
246
246
|
// The package.json is directly resolvable
|
247
247
|
const packageJsonPath = (0, resolve_relative_to_dir_1.resolveRelativeToDir)((0, node_path_1.join)(packageName, 'package.json'), relativeToDir);
|
248
248
|
if (packageJsonPath) {
|
249
|
-
|
249
|
+
const parsedPackageJson = (0, fileutils_1.readJsonFile)(packageJsonPath);
|
250
|
+
if (parsedPackageJson.name && parsedPackageJson.version) {
|
251
|
+
return parsedPackageJson;
|
252
|
+
}
|
250
253
|
}
|
251
254
|
try {
|
252
255
|
// Resolve the main entry point of the package
|
253
|
-
const
|
254
|
-
let dir = (0, node_path_1.dirname)(
|
256
|
+
const pathOfFileInPackage = packageJsonPath ?? (0, resolve_relative_to_dir_1.resolveRelativeToDir)(packageName, relativeToDir);
|
257
|
+
let dir = (0, node_path_1.dirname)(pathOfFileInPackage);
|
255
258
|
while (dir !== (0, node_path_1.parse)(dir).root) {
|
256
259
|
const packageJsonPath = (0, node_path_1.join)(dir, 'package.json');
|
257
260
|
try {
|
@@ -3,3 +3,4 @@ import { NxPluginV2 } from '../../../project-graph/plugins';
|
|
3
3
|
export declare const ProjectJsonProjectsPlugin: NxPluginV2;
|
4
4
|
export default ProjectJsonProjectsPlugin;
|
5
5
|
export declare function buildProjectFromProjectJson(json: Partial<ProjectConfiguration>, path: string): ProjectConfiguration;
|
6
|
+
export declare function readNameFromPackageJson(path: string): string;
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
-
exports.buildProjectFromProjectJson = exports.ProjectJsonProjectsPlugin = void 0;
|
3
|
+
exports.readNameFromPackageJson = exports.buildProjectFromProjectJson = exports.ProjectJsonProjectsPlugin = void 0;
|
4
4
|
const node_path_1 = require("node:path");
|
5
5
|
const to_project_name_1 = require("../../../config/to-project-name");
|
6
6
|
const fileutils_1 = require("../../../utils/fileutils");
|
@@ -21,10 +21,22 @@ exports.ProjectJsonProjectsPlugin = {
|
|
21
21
|
};
|
22
22
|
exports.default = exports.ProjectJsonProjectsPlugin;
|
23
23
|
function buildProjectFromProjectJson(json, path) {
|
24
|
+
const packageJsonPath = (0, node_path_1.join)((0, node_path_1.dirname)(path), 'package.json');
|
25
|
+
const { name, root, ...rest } = json;
|
24
26
|
return {
|
25
|
-
name: (0, to_project_name_1.toProjectName)(path),
|
26
|
-
root: (0, node_path_1.dirname)(path),
|
27
|
-
...
|
27
|
+
name: name ?? readNameFromPackageJson(packageJsonPath) ?? (0, to_project_name_1.toProjectName)(path),
|
28
|
+
root: root ?? (0, node_path_1.dirname)(path),
|
29
|
+
...rest,
|
28
30
|
};
|
29
31
|
}
|
30
32
|
exports.buildProjectFromProjectJson = buildProjectFromProjectJson;
|
33
|
+
function readNameFromPackageJson(path) {
|
34
|
+
try {
|
35
|
+
const json = (0, fileutils_1.readJsonFile)(path);
|
36
|
+
return json.nx?.name ?? json.name;
|
37
|
+
}
|
38
|
+
catch {
|
39
|
+
return undefined;
|
40
|
+
}
|
41
|
+
}
|
42
|
+
exports.readNameFromPackageJson = readNameFromPackageJson;
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import { InputDefinition, ProjectMetadata, TargetConfiguration } from '../config/workspace-json-project-json';
|
2
2
|
import { PackageManagerCommands } from './package-manager';
|
3
3
|
export interface NxProjectPackageJsonConfiguration {
|
4
|
+
name?: string;
|
4
5
|
implicitDependencies?: string[];
|
5
6
|
tags?: string[];
|
6
7
|
namedInputs?: {
|