nx 19.6.0-canary.20240809-d3747e0 → 19.6.0-canary.20240813-c72ba9b
Sign up to get free protection for your applications and to get access to all the features.
- package/bin/post-install.js +8 -0
- package/package.json +12 -12
- package/schemas/nx-schema.json +55 -4
- package/schemas/project-schema.json +7 -0
- package/src/adapter/compat.d.ts +1 -1
- package/src/adapter/compat.js +1 -0
- package/src/command-line/import/command-object.d.ts +2 -0
- package/src/command-line/import/command-object.js +38 -0
- package/src/command-line/import/import.d.ts +21 -0
- package/src/command-line/import/import.js +173 -0
- package/src/command-line/import/utils/merge-remote-source.d.ts +2 -0
- package/src/command-line/import/utils/merge-remote-source.js +14 -0
- package/src/command-line/import/utils/needs-install.d.ts +3 -0
- package/src/command-line/import/utils/needs-install.js +31 -0
- package/src/command-line/import/utils/prepare-source-repo.d.ts +2 -0
- package/src/command-line/import/utils/prepare-source-repo.js +104 -0
- package/src/command-line/init/init-v2.d.ts +7 -0
- package/src/command-line/init/init-v2.js +49 -16
- package/src/command-line/nx-commands.js +33 -28
- package/src/command-line/release/changelog.js +9 -9
- package/src/command-line/release/command-object.d.ts +12 -3
- package/src/command-line/release/command-object.js +16 -1
- package/src/command-line/release/config/config.js +4 -2
- package/src/command-line/release/config/filter-release-groups.d.ts +2 -2
- package/src/command-line/release/config/filter-release-groups.js +1 -1
- package/src/command-line/release/config/version-plans.d.ts +1 -1
- package/src/command-line/release/config/version-plans.js +12 -12
- package/src/command-line/release/plan-check.d.ts +4 -0
- package/src/command-line/release/plan-check.js +225 -0
- package/src/command-line/release/plan.js +1 -1
- package/src/command-line/release/release.js +3 -3
- package/src/command-line/release/version.js +1 -1
- package/src/command-line/sync/command-object.d.ts +6 -0
- package/src/command-line/sync/command-object.js +25 -0
- package/src/command-line/sync/sync.d.ts +6 -0
- package/src/command-line/sync/sync.js +30 -0
- package/src/command-line/yargs-utils/shared-options.d.ts +1 -1
- package/src/config/nx-json.d.ts +32 -2
- package/src/config/workspace-json-project-json.d.ts +5 -0
- package/src/core/graph/main.js +1 -1
- package/src/daemon/cache.d.ts +1 -0
- package/src/daemon/cache.js +25 -18
- package/src/daemon/client/client.d.ts +5 -0
- package/src/daemon/client/client.js +42 -1
- package/src/daemon/message-types/flush-sync-generator-changes-to-disk.d.ts +6 -0
- package/src/daemon/message-types/flush-sync-generator-changes-to-disk.js +11 -0
- package/src/daemon/message-types/force-shutdown.d.ts +5 -0
- package/src/daemon/message-types/force-shutdown.js +11 -0
- package/src/daemon/message-types/get-registered-sync-generators.d.ts +5 -0
- package/src/daemon/message-types/get-registered-sync-generators.js +11 -0
- package/src/daemon/message-types/get-sync-generator-changes.d.ts +6 -0
- package/src/daemon/message-types/get-sync-generator-changes.js +11 -0
- package/src/daemon/message-types/update-workspace-context.d.ts +8 -0
- package/src/daemon/message-types/update-workspace-context.js +11 -0
- package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.d.ts +2 -0
- package/src/daemon/server/handle-flush-sync-generator-changes-to-disk.js +11 -0
- package/src/daemon/server/handle-force-shutdown.d.ts +5 -0
- package/src/daemon/server/handle-force-shutdown.js +18 -0
- package/src/daemon/server/handle-get-registered-sync-generators.d.ts +2 -0
- package/src/daemon/server/handle-get-registered-sync-generators.js +11 -0
- package/src/daemon/server/handle-get-sync-generator-changes.d.ts +2 -0
- package/src/daemon/server/handle-get-sync-generator-changes.js +17 -0
- package/src/daemon/server/handle-request-shutdown.js +2 -0
- package/src/daemon/server/handle-update-workspace-context.d.ts +2 -0
- package/src/daemon/server/handle-update-workspace-context.js +11 -0
- package/src/daemon/server/project-graph-incremental-recomputation.d.ts +1 -0
- package/src/daemon/server/project-graph-incremental-recomputation.js +19 -2
- package/src/daemon/server/server.d.ts +1 -0
- package/src/daemon/server/server.js +39 -0
- package/src/daemon/server/shutdown-utils.d.ts +2 -1
- package/src/daemon/server/shutdown-utils.js +11 -4
- package/src/daemon/server/sync-generators.d.ts +6 -0
- package/src/daemon/server/sync-generators.js +202 -0
- package/src/daemon/server/watcher.js +3 -0
- package/src/daemon/socket-utils.js +18 -5
- package/src/daemon/tmp-dir.js +2 -1
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud.js +1 -1
- package/src/nx-cloud/models/onboarding-status.d.ts +1 -0
- package/src/nx-cloud/models/onboarding-status.js +2 -0
- package/src/nx-cloud/utilities/is-workspace-claimed.d.ts +1 -0
- package/src/nx-cloud/utilities/is-workspace-claimed.js +24 -0
- package/src/nx-cloud/utilities/onboarding.d.ts +5 -0
- package/src/nx-cloud/utilities/onboarding.js +28 -0
- package/src/project-graph/plugins/internal-api.js +16 -5
- package/src/project-graph/plugins/isolation/messaging.d.ts +5 -1
- package/src/project-graph/plugins/isolation/messaging.js +1 -0
- package/src/project-graph/plugins/isolation/plugin-pool.js +4 -6
- package/src/project-graph/plugins/isolation/plugin-worker.js +15 -0
- package/src/project-graph/utils/project-configuration-utils.js +5 -2
- package/src/tasks-runner/run-command.d.ts +1 -1
- package/src/tasks-runner/run-command.js +120 -2
- package/src/utils/command-line-utils.d.ts +1 -0
- package/src/utils/command-line-utils.js +6 -3
- package/src/utils/git-utils.d.ts +35 -0
- package/src/utils/git-utils.js +111 -0
- package/src/utils/package-manager.js +1 -1
- package/src/utils/plugins/output.js +1 -1
- package/src/utils/squash.d.ts +1 -0
- package/src/utils/squash.js +12 -0
- package/src/utils/sync-generators.d.ts +22 -0
- package/src/utils/sync-generators.js +161 -0
- package/src/utils/workspace-context.d.ts +1 -0
- package/src/utils/workspace-context.js +16 -0
- package/src/daemon/message-types/update-context-files.d.ts +0 -7
- package/src/daemon/message-types/update-context-files.js +0 -11
@@ -0,0 +1,202 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.getCachedSyncGeneratorChanges = getCachedSyncGeneratorChanges;
|
4
|
+
exports.flushSyncGeneratorChangesToDisk = flushSyncGeneratorChangesToDisk;
|
5
|
+
exports.collectAndScheduleSyncGenerators = collectAndScheduleSyncGenerators;
|
6
|
+
exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
|
7
|
+
const nx_json_1 = require("../../config/nx-json");
|
8
|
+
const tree_1 = require("../../generators/tree");
|
9
|
+
const file_hasher_1 = require("../../hasher/file-hasher");
|
10
|
+
const project_graph_1 = require("../../project-graph/project-graph");
|
11
|
+
const sync_generators_1 = require("../../utils/sync-generators");
|
12
|
+
const workspace_root_1 = require("../../utils/workspace-root");
|
13
|
+
const logger_1 = require("./logger");
|
14
|
+
const project_graph_incremental_recomputation_1 = require("./project-graph-incremental-recomputation");
|
15
|
+
const syncGeneratorsCacheResultPromises = new Map();
|
16
|
+
let registeredTaskSyncGenerators = new Set();
|
17
|
+
let registeredGlobalSyncGenerators = new Set();
|
18
|
+
const scheduledGenerators = new Set();
|
19
|
+
let waitPeriod = 100;
|
20
|
+
let registeredSyncGenerators;
|
21
|
+
let scheduledTimeoutId;
|
22
|
+
let storedProjectGraphHash;
|
23
|
+
let storedNxJsonHash;
|
24
|
+
const log = (...messageParts) => {
|
25
|
+
logger_1.serverLogger.log('[SYNC]:', ...messageParts);
|
26
|
+
};
|
27
|
+
// TODO(leo): check conflicts and reuse the Tree where possible
|
28
|
+
async function getCachedSyncGeneratorChanges(generators) {
|
29
|
+
try {
|
30
|
+
log('get sync generators changes on demand', generators);
|
31
|
+
// this is invoked imperatively, so we clear any scheduled run
|
32
|
+
if (scheduledTimeoutId) {
|
33
|
+
log('clearing scheduled run');
|
34
|
+
clearTimeout(scheduledTimeoutId);
|
35
|
+
scheduledTimeoutId = undefined;
|
36
|
+
}
|
37
|
+
// reset the wait time
|
38
|
+
waitPeriod = 100;
|
39
|
+
let projects;
|
40
|
+
let errored = false;
|
41
|
+
const getProjectsConfigurations = async () => {
|
42
|
+
if (projects || errored) {
|
43
|
+
return projects;
|
44
|
+
}
|
45
|
+
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
46
|
+
projects = projectGraph
|
47
|
+
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
|
48
|
+
: null;
|
49
|
+
errored = error !== undefined;
|
50
|
+
return projects;
|
51
|
+
};
|
52
|
+
return (await Promise.all(generators.map(async (generator) => {
|
53
|
+
if (scheduledGenerators.has(generator) ||
|
54
|
+
!syncGeneratorsCacheResultPromises.has(generator)) {
|
55
|
+
// it's scheduled to run (there are pending changes to process) or
|
56
|
+
// it's not scheduled and there's no cached result, so run it
|
57
|
+
const projects = await getProjectsConfigurations();
|
58
|
+
if (projects) {
|
59
|
+
log(generator, 'already scheduled or not cached, running it now');
|
60
|
+
runGenerator(generator, projects);
|
61
|
+
}
|
62
|
+
else {
|
63
|
+
log(generator, 'already scheduled or not cached, project graph errored');
|
64
|
+
/**
|
65
|
+
* This should never happen. This is invoked imperatively, and by
|
66
|
+
* the time it is invoked, the project graph would have already
|
67
|
+
* been requested. If it errored, it would have been reported and
|
68
|
+
* this wouldn't have been invoked. We handle it just in case.
|
69
|
+
*
|
70
|
+
* Since the project graph would be reported by the relevant
|
71
|
+
* handlers separately, we just ignore the error, don't cache
|
72
|
+
* any result and return an empty result, the next time this is
|
73
|
+
* invoked the process will repeat until it eventually recovers
|
74
|
+
* when the project graph is fixed.
|
75
|
+
*/
|
76
|
+
return Promise.resolve({ changes: [], generatorName: generator });
|
77
|
+
}
|
78
|
+
}
|
79
|
+
else {
|
80
|
+
log(generator, 'not scheduled and has cached result, returning cached result');
|
81
|
+
}
|
82
|
+
return syncGeneratorsCacheResultPromises.get(generator);
|
83
|
+
}))).flat();
|
84
|
+
}
|
85
|
+
catch (e) {
|
86
|
+
console.error(e);
|
87
|
+
syncGeneratorsCacheResultPromises.clear();
|
88
|
+
return [];
|
89
|
+
}
|
90
|
+
}
|
91
|
+
async function flushSyncGeneratorChangesToDisk(generators) {
|
92
|
+
log('flush sync generators changes', generators);
|
93
|
+
const results = await getCachedSyncGeneratorChanges(generators);
|
94
|
+
for (const generator of generators) {
|
95
|
+
syncGeneratorsCacheResultPromises.delete(generator);
|
96
|
+
}
|
97
|
+
await (0, sync_generators_1.flushSyncGeneratorChanges)(results);
|
98
|
+
}
|
99
|
+
function collectAndScheduleSyncGenerators(projectGraph) {
|
100
|
+
if (!projectGraph) {
|
101
|
+
// If the project graph is not available, we can't collect and schedule
|
102
|
+
// sync generators. The project graph error will be reported separately.
|
103
|
+
return;
|
104
|
+
}
|
105
|
+
log('collect registered sync generators');
|
106
|
+
collectAllRegisteredSyncGenerators(projectGraph);
|
107
|
+
// a change imply we need to re-run all the generators
|
108
|
+
// make sure to schedule all the collected generators
|
109
|
+
scheduledGenerators.clear();
|
110
|
+
for (const generator of registeredSyncGenerators) {
|
111
|
+
scheduledGenerators.add(generator);
|
112
|
+
}
|
113
|
+
log('scheduling:', [...scheduledGenerators]);
|
114
|
+
if (scheduledTimeoutId) {
|
115
|
+
// we have a scheduled run already, so we don't need to do anything
|
116
|
+
return;
|
117
|
+
}
|
118
|
+
scheduledTimeoutId = setTimeout(async () => {
|
119
|
+
scheduledTimeoutId = undefined;
|
120
|
+
if (waitPeriod < 4000) {
|
121
|
+
waitPeriod = waitPeriod * 2;
|
122
|
+
}
|
123
|
+
if (scheduledGenerators.size === 0) {
|
124
|
+
// no generators to run
|
125
|
+
return;
|
126
|
+
}
|
127
|
+
const { projects } = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);
|
128
|
+
for (const generator of scheduledGenerators) {
|
129
|
+
runGenerator(generator, projects);
|
130
|
+
}
|
131
|
+
await Promise.all(syncGeneratorsCacheResultPromises.values());
|
132
|
+
}, waitPeriod);
|
133
|
+
}
|
134
|
+
async function getCachedRegisteredSyncGenerators() {
|
135
|
+
log('get registered sync generators');
|
136
|
+
if (!registeredSyncGenerators) {
|
137
|
+
log('no registered sync generators, collecting them');
|
138
|
+
const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
|
139
|
+
collectAllRegisteredSyncGenerators(projectGraph);
|
140
|
+
}
|
141
|
+
else {
|
142
|
+
log('registered sync generators already collected, returning them');
|
143
|
+
}
|
144
|
+
return [...registeredSyncGenerators];
|
145
|
+
}
|
146
|
+
function collectAllRegisteredSyncGenerators(projectGraph) {
|
147
|
+
const projectGraphHash = hashProjectGraph(projectGraph);
|
148
|
+
if (storedProjectGraphHash !== projectGraphHash) {
|
149
|
+
storedProjectGraphHash = projectGraphHash;
|
150
|
+
registeredTaskSyncGenerators =
|
151
|
+
(0, sync_generators_1.collectRegisteredTaskSyncGenerators)(projectGraph);
|
152
|
+
}
|
153
|
+
else {
|
154
|
+
log('project graph hash is the same, not collecting task sync generators');
|
155
|
+
}
|
156
|
+
const nxJson = (0, nx_json_1.readNxJson)();
|
157
|
+
const nxJsonHash = (0, file_hasher_1.hashArray)(nxJson.sync?.globalGenerators?.sort() ?? []);
|
158
|
+
if (storedNxJsonHash !== nxJsonHash) {
|
159
|
+
storedNxJsonHash = nxJsonHash;
|
160
|
+
registeredGlobalSyncGenerators =
|
161
|
+
(0, sync_generators_1.collectRegisteredGlobalSyncGenerators)(nxJson);
|
162
|
+
}
|
163
|
+
else {
|
164
|
+
log('nx.json hash is the same, not collecting global sync generators');
|
165
|
+
}
|
166
|
+
const generators = new Set([
|
167
|
+
...registeredTaskSyncGenerators,
|
168
|
+
...registeredGlobalSyncGenerators,
|
169
|
+
]);
|
170
|
+
if (!registeredSyncGenerators) {
|
171
|
+
registeredSyncGenerators = generators;
|
172
|
+
return;
|
173
|
+
}
|
174
|
+
for (const generator of registeredSyncGenerators) {
|
175
|
+
if (!generators.has(generator)) {
|
176
|
+
registeredSyncGenerators.delete(generator);
|
177
|
+
syncGeneratorsCacheResultPromises.delete(generator);
|
178
|
+
}
|
179
|
+
}
|
180
|
+
for (const generator of generators) {
|
181
|
+
if (!registeredSyncGenerators.has(generator)) {
|
182
|
+
registeredSyncGenerators.add(generator);
|
183
|
+
}
|
184
|
+
}
|
185
|
+
}
|
186
|
+
function runGenerator(generator, projects) {
|
187
|
+
log('running scheduled generator', generator);
|
188
|
+
// remove it from the scheduled set
|
189
|
+
scheduledGenerators.delete(generator);
|
190
|
+
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
|
191
|
+
// run the generator and cache the result
|
192
|
+
syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
|
193
|
+
log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
|
194
|
+
return result;
|
195
|
+
}));
|
196
|
+
}
|
197
|
+
function hashProjectGraph(projectGraph) {
|
198
|
+
const stringifiedProjects = Object.entries(projectGraph.nodes)
|
199
|
+
.sort(([projectNameA], [projectNameB]) => projectNameA.localeCompare(projectNameB))
|
200
|
+
.map(([projectName, projectConfig]) => `${projectName}:${JSON.stringify(projectConfig)}`);
|
201
|
+
return (0, file_hasher_1.hashArray)(stringifiedProjects);
|
202
|
+
}
|
@@ -10,6 +10,7 @@ const shutdown_utils_1 = require("./shutdown-utils");
|
|
10
10
|
const path_2 = require("../../utils/path");
|
11
11
|
const ignore_1 = require("../../utils/ignore");
|
12
12
|
const cache_1 = require("../cache");
|
13
|
+
const server_1 = require("./server");
|
13
14
|
const ALWAYS_IGNORE = [
|
14
15
|
...(0, ignore_1.getAlwaysIgnore)(workspace_root_1.workspaceRoot),
|
15
16
|
(0, socket_utils_1.getFullOsSocketPath)(),
|
@@ -28,6 +29,7 @@ async function watchWorkspace(server, cb) {
|
|
28
29
|
(0, shutdown_utils_1.handleServerProcessTermination)({
|
29
30
|
server,
|
30
31
|
reason: 'this process is no longer the current daemon (native)',
|
32
|
+
sockets: server_1.openSockets,
|
31
33
|
});
|
32
34
|
}
|
33
35
|
if (event.path.endsWith('.gitignore') || event.path === '.nxignore') {
|
@@ -35,6 +37,7 @@ async function watchWorkspace(server, cb) {
|
|
35
37
|
(0, shutdown_utils_1.handleServerProcessTermination)({
|
36
38
|
server,
|
37
39
|
reason: 'Stopping the daemon the set of ignored files changed (native)',
|
40
|
+
sockets: server_1.openSockets,
|
38
41
|
});
|
39
42
|
}
|
40
43
|
}
|
@@ -15,20 +15,33 @@ exports.isWindows = (0, os_1.platform)() === 'win32';
|
|
15
15
|
* See https://nodejs.org/dist/latest-v14.x/docs/api/net.html#net_identifying_paths_for_ipc_connections for a full breakdown
|
16
16
|
* of OS differences between Unix domain sockets and named pipes.
|
17
17
|
*/
|
18
|
-
const getFullOsSocketPath = () =>
|
19
|
-
|
20
|
-
|
18
|
+
const getFullOsSocketPath = () => {
|
19
|
+
const path = (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)());
|
20
|
+
assertValidSocketPath(path);
|
21
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
22
|
+
};
|
21
23
|
exports.getFullOsSocketPath = getFullOsSocketPath;
|
22
24
|
const getForkedProcessOsSocketPath = (id) => {
|
23
25
|
let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(), 'fp' + id + '.sock'));
|
24
|
-
|
26
|
+
assertValidSocketPath(path);
|
27
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
25
28
|
};
|
26
29
|
exports.getForkedProcessOsSocketPath = getForkedProcessOsSocketPath;
|
27
30
|
const getPluginOsSocketPath = (id) => {
|
28
31
|
let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(true), 'plugin' + id + '.sock'));
|
29
|
-
|
32
|
+
assertValidSocketPath(path);
|
33
|
+
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
|
30
34
|
};
|
31
35
|
exports.getPluginOsSocketPath = getPluginOsSocketPath;
|
36
|
+
function assertValidSocketPath(path) {
|
37
|
+
if (path.length > 95) {
|
38
|
+
throw new Error([
|
39
|
+
'Attempted to open socket that exceeds the maximum socket length.',
|
40
|
+
'',
|
41
|
+
`Set NX_SOCKET_DIR to a shorter path (e.g. ${exports.isWindows ? '%TMP%/nx-tmp' : '/tmp/nx-tmp'}) to avoid this issue.`,
|
42
|
+
].join('\n'));
|
43
|
+
}
|
44
|
+
}
|
32
45
|
function killSocketOrPath() {
|
33
46
|
try {
|
34
47
|
(0, fs_1.unlinkSync)((0, exports.getFullOsSocketPath)());
|
package/src/daemon/tmp-dir.js
CHANGED
@@ -53,7 +53,8 @@ function socketDirName() {
|
|
53
53
|
*/
|
54
54
|
function getSocketDir(alreadyUnique = false) {
|
55
55
|
try {
|
56
|
-
const dir = process.env.
|
56
|
+
const dir = process.env.NX_SOCKET_DIR ??
|
57
|
+
process.env.NX_DAEMON_SOCKET_DIR ??
|
57
58
|
(alreadyUnique ? tmp_1.tmpdir : socketDirName());
|
58
59
|
(0, fs_extra_1.ensureDirSync)(dir);
|
59
60
|
return dir;
|
Binary file
|
@@ -94,7 +94,7 @@ function addNxCloudOptionsToNxJson(tree, token, directory = '') {
|
|
94
94
|
});
|
95
95
|
}
|
96
96
|
}
|
97
|
-
function addNxCloudIdToNxJson(tree, nxCloudId, directory =
|
97
|
+
function addNxCloudIdToNxJson(tree, nxCloudId, directory = '') {
|
98
98
|
const nxJsonPath = (0, path_1.join)(directory, 'nx.json');
|
99
99
|
if (tree.exists(nxJsonPath)) {
|
100
100
|
(0, json_1.updateJson)(tree, (0, path_1.join)(directory, 'nx.json'), (nxJson) => {
|
@@ -0,0 +1 @@
|
|
1
|
+
export type NxCloudOnBoardingStatus = 'claimed' | 'unclaimed' | 'not-configured';
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare function isWorkspaceClaimed(nxCloudAccessToken: any): Promise<any>;
|
@@ -0,0 +1,24 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.isWorkspaceClaimed = isWorkspaceClaimed;
|
4
|
+
const get_cloud_options_1 = require("./get-cloud-options");
|
5
|
+
async function isWorkspaceClaimed(nxCloudAccessToken) {
|
6
|
+
if (!nxCloudAccessToken)
|
7
|
+
return false;
|
8
|
+
const apiUrl = (0, get_cloud_options_1.getCloudUrl)();
|
9
|
+
try {
|
10
|
+
const response = await require('axios').post(`${apiUrl}/nx-cloud/is-workspace-claimed`, {
|
11
|
+
nxCloudAccessToken,
|
12
|
+
});
|
13
|
+
if (response.data.message) {
|
14
|
+
return false;
|
15
|
+
}
|
16
|
+
else {
|
17
|
+
return response.data;
|
18
|
+
}
|
19
|
+
}
|
20
|
+
catch (e) {
|
21
|
+
// We want to handle cases the if the request fails for any reason
|
22
|
+
return false;
|
23
|
+
}
|
24
|
+
}
|
@@ -0,0 +1,5 @@
|
|
1
|
+
import type { Tree } from '../../generators/tree';
|
2
|
+
import { NxCloudOnBoardingStatus } from '../models/onboarding-status';
|
3
|
+
export declare function createNxCloudOnboardingURLForWelcomeApp(tree: Tree, token?: string): Promise<NxCloudOnBoardingStatus>;
|
4
|
+
export declare function getNxCloudAppOnBoardingUrl(token: string): Promise<string>;
|
5
|
+
export declare function readNxCloudToken(tree: Tree): any;
|
@@ -0,0 +1,28 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.createNxCloudOnboardingURLForWelcomeApp = createNxCloudOnboardingURLForWelcomeApp;
|
4
|
+
exports.getNxCloudAppOnBoardingUrl = getNxCloudAppOnBoardingUrl;
|
5
|
+
exports.readNxCloudToken = readNxCloudToken;
|
6
|
+
const devkit_exports_1 = require("../../devkit-exports");
|
7
|
+
const is_workspace_claimed_1 = require("./is-workspace-claimed");
|
8
|
+
const url_shorten_1 = require("./url-shorten");
|
9
|
+
const run_command_1 = require("../../tasks-runner/run-command");
|
10
|
+
async function createNxCloudOnboardingURLForWelcomeApp(tree, token) {
|
11
|
+
token = token || readNxCloudToken(tree);
|
12
|
+
if (!token) {
|
13
|
+
return 'not-configured';
|
14
|
+
}
|
15
|
+
return (await (0, is_workspace_claimed_1.isWorkspaceClaimed)(token)) ? 'claimed' : 'unclaimed';
|
16
|
+
}
|
17
|
+
async function getNxCloudAppOnBoardingUrl(token) {
|
18
|
+
if (!token) {
|
19
|
+
return null;
|
20
|
+
}
|
21
|
+
const onboardingUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-welcome-app', token);
|
22
|
+
return onboardingUrl;
|
23
|
+
}
|
24
|
+
function readNxCloudToken(tree) {
|
25
|
+
const nxJson = (0, devkit_exports_1.readNxJson)(tree);
|
26
|
+
const { accessToken } = (0, run_command_1.getRunnerOptions)('default', nxJson, {}, true);
|
27
|
+
return accessToken;
|
28
|
+
}
|
@@ -13,7 +13,6 @@ const loader_1 = require("./loader");
|
|
13
13
|
const utils_1 = require("./utils");
|
14
14
|
const error_types_1 = require("../error-types");
|
15
15
|
const native_1 = require("../../native");
|
16
|
-
const os_1 = require("os");
|
17
16
|
class LoadedNxPlugin {
|
18
17
|
constructor(plugin, pluginDefinition) {
|
19
18
|
this.name = plugin.name;
|
@@ -72,12 +71,24 @@ exports.LoadedNxPlugin = LoadedNxPlugin;
|
|
72
71
|
// Allows loaded plugins to not be reloaded when
|
73
72
|
// referenced multiple times.
|
74
73
|
exports.nxPluginCache = new Map();
|
74
|
+
function isIsolationEnabled() {
|
75
|
+
// Explicitly enabled, regardless of further conditions
|
76
|
+
if (process.env.NX_ISOLATE_PLUGINS === 'true') {
|
77
|
+
return true;
|
78
|
+
}
|
79
|
+
if (
|
80
|
+
// Explicitly disabled
|
81
|
+
process.env.NX_ISOLATE_PLUGINS === 'false' ||
|
82
|
+
// Isolation is disabled on WASM builds currently.
|
83
|
+
native_1.IS_WASM) {
|
84
|
+
return false;
|
85
|
+
}
|
86
|
+
// Default value
|
87
|
+
return true;
|
88
|
+
}
|
75
89
|
async function loadNxPlugins(plugins, root = workspace_root_1.workspaceRoot) {
|
76
90
|
performance.mark('loadNxPlugins:start');
|
77
|
-
const loadingMethod =
|
78
|
-
(!native_1.IS_WASM &&
|
79
|
-
(0, os_1.platform)() !== 'win32' &&
|
80
|
-
process.env.NX_ISOLATE_PLUGINS !== 'false')
|
91
|
+
const loadingMethod = isIsolationEnabled()
|
81
92
|
? isolation_1.loadNxPluginInIsolation
|
82
93
|
: loader_1.loadNxPlugin;
|
83
94
|
plugins = await normalizePlugins(plugins, root);
|
@@ -106,7 +106,11 @@ export interface PluginWorkerProcessProjectGraphResult {
|
|
106
106
|
tx: string;
|
107
107
|
};
|
108
108
|
}
|
109
|
-
export
|
109
|
+
export interface PluginWorkerShutdownMessage {
|
110
|
+
type: 'shutdown';
|
111
|
+
payload: {};
|
112
|
+
}
|
113
|
+
export type PluginWorkerMessage = PluginWorkerLoadMessage | PluginWorkerShutdownMessage | PluginWorkerCreateNodesMessage | PluginCreateDependenciesMessage | PluginWorkerProcessProjectGraphMessage | PluginCreateMetadataMessage;
|
110
114
|
export type PluginWorkerResult = PluginWorkerLoadResult | PluginWorkerCreateNodesResult | PluginCreateDependenciesResult | PluginWorkerProcessProjectGraphResult | PluginCreateMetadataResult;
|
111
115
|
export declare function isPluginWorkerMessage(message: Serializable): message is PluginWorkerMessage;
|
112
116
|
export declare function isPluginWorkerResult(message: Serializable): message is PluginWorkerResult;
|
@@ -29,8 +29,8 @@ async function loadRemoteNxPlugin(plugin, root) {
|
|
29
29
|
const exitHandler = createWorkerExitHandler(worker, pendingPromises);
|
30
30
|
const cleanupFunction = () => {
|
31
31
|
worker.off('exit', exitHandler);
|
32
|
+
shutdownPluginWorker(socket);
|
32
33
|
socket.destroy();
|
33
|
-
shutdownPluginWorker(worker);
|
34
34
|
nxPluginWorkerCache.delete(cacheKey);
|
35
35
|
};
|
36
36
|
cleanupFunctions.add(cleanupFunction);
|
@@ -55,11 +55,8 @@ async function loadRemoteNxPlugin(plugin, root) {
|
|
55
55
|
nxPluginWorkerCache.set(cacheKey, pluginPromise);
|
56
56
|
return [pluginPromise, cleanupFunction];
|
57
57
|
}
|
58
|
-
function shutdownPluginWorker(
|
59
|
-
|
60
|
-
internal_api_1.nxPluginCache.clear();
|
61
|
-
// logger.verbose(`[plugin-pool] starting worker shutdown`);
|
62
|
-
worker.kill('SIGINT');
|
58
|
+
function shutdownPluginWorker(socket) {
|
59
|
+
(0, messaging_1.sendMessageOverSocket)(socket, { type: 'shutdown', payload: {} });
|
63
60
|
}
|
64
61
|
/**
|
65
62
|
* Creates a message handler for the given worker.
|
@@ -200,6 +197,7 @@ function createWorkerExitHandler(worker, pendingPromises) {
|
|
200
197
|
}
|
201
198
|
let cleanedUp = false;
|
202
199
|
const exitHandler = () => {
|
200
|
+
internal_api_1.nxPluginCache.clear();
|
203
201
|
for (const fn of cleanupFunctions) {
|
204
202
|
fn();
|
205
203
|
}
|
@@ -49,6 +49,21 @@ const server = (0, net_1.createServer)((socket) => {
|
|
49
49
|
};
|
50
50
|
}
|
51
51
|
},
|
52
|
+
shutdown: async () => {
|
53
|
+
// Stops accepting new connections, but existing connections are
|
54
|
+
// not closed immediately.
|
55
|
+
server.close(() => {
|
56
|
+
try {
|
57
|
+
(0, fs_1.unlinkSync)(socketPath);
|
58
|
+
}
|
59
|
+
catch (e) { }
|
60
|
+
process.exit(0);
|
61
|
+
});
|
62
|
+
// Closes existing connection.
|
63
|
+
socket.end();
|
64
|
+
// Destroys the socket once it's fully closed.
|
65
|
+
socket.destroySoon();
|
66
|
+
},
|
52
67
|
createNodes: async ({ configFiles, context, tx }) => {
|
53
68
|
try {
|
54
69
|
const result = await plugin.createNodes[1](configFiles, context);
|
@@ -446,7 +446,7 @@ function normalizeTargets(project, sourceMaps, nxJsonConfiguration) {
|
|
446
446
|
project.targets[targetName] = normalizeTarget(project.targets[targetName], project);
|
447
447
|
const projectSourceMaps = sourceMaps[project.root];
|
448
448
|
const targetConfig = project.targets[targetName];
|
449
|
-
const targetDefaults = readTargetDefaultsForTarget(targetName, nxJsonConfiguration.targetDefaults, targetConfig.executor);
|
449
|
+
const targetDefaults = deepClone(readTargetDefaultsForTarget(targetName, nxJsonConfiguration.targetDefaults, targetConfig.executor));
|
450
450
|
// We only apply defaults if they exist
|
451
451
|
if (targetDefaults && isCompatibleTarget(targetConfig, targetDefaults)) {
|
452
452
|
project.targets[targetName] = mergeTargetDefaultWithTargetDefinition(targetName, project, normalizeTarget(targetDefaults, project), projectSourceMaps);
|
@@ -499,9 +499,12 @@ function targetDefaultShouldBeApplied(key, sourceMap) {
|
|
499
499
|
const [, plugin] = sourceInfo;
|
500
500
|
return !plugin?.startsWith('nx/');
|
501
501
|
}
|
502
|
+
function deepClone(obj) {
|
503
|
+
return JSON.parse(JSON.stringify(obj));
|
504
|
+
}
|
502
505
|
function mergeTargetDefaultWithTargetDefinition(targetName, project, targetDefault, sourceMap) {
|
503
506
|
const targetDefinition = project.targets[targetName] ?? {};
|
504
|
-
const result =
|
507
|
+
const result = deepClone(targetDefinition);
|
505
508
|
for (const key in targetDefault) {
|
506
509
|
switch (key) {
|
507
510
|
case 'options': {
|
@@ -5,7 +5,7 @@ import { TargetDependencyConfig } from '../config/workspace-json-project-json';
|
|
5
5
|
import { NxArgs } from '../utils/command-line-utils';
|
6
6
|
import { LifeCycle } from './life-cycle';
|
7
7
|
import { TasksRunner } from './tasks-runner';
|
8
|
-
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[],
|
8
|
+
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[], currentProjectGraph: ProjectGraph, { nxJson }: {
|
9
9
|
nxJson: NxJsonConfiguration;
|
10
10
|
}, nxArgs: NxArgs, overrides: any, initiatingProject: string | null, extraTargetDependencies: Record<string, (TargetDependencyConfig | string)[]>, extraOptions: {
|
11
11
|
excludeTaskDependencies: boolean;
|