nx 19.8.1 → 19.8.2
Sign up to get free protection for your applications and to get access to all the features.
- package/.eslintrc.json +9 -1
- package/package.json +12 -13
- package/src/command-line/graph/graph.js +9 -9
- package/src/command-line/init/implementation/add-nx-to-nest.js +5 -5
- package/src/command-line/init/implementation/react/clean-up-files.js +7 -7
- package/src/command-line/init/implementation/react/index.js +19 -12
- package/src/command-line/init/implementation/react/rename-js-to-jsx.js +3 -3
- package/src/command-line/release/changelog.js +1 -2
- package/src/command-line/release/config/version-plans.js +6 -7
- package/src/command-line/release/plan.js +6 -5
- package/src/command-line/release/release.js +2 -2
- package/src/command-line/reset/reset.js +4 -4
- package/src/daemon/cache.d.ts +1 -2
- package/src/daemon/cache.js +12 -21
- package/src/daemon/client/client.js +9 -8
- package/src/daemon/tmp-dir.js +6 -7
- package/src/generators/tree.d.ts +1 -1
- package/src/generators/tree.js +11 -11
- package/src/native/nx.wasm32-wasi.wasm +0 -0
- package/src/plugins/js/index.js +1 -2
- package/src/project-graph/nx-deps-cache.js +5 -6
- package/src/tasks-runner/cache.js +17 -16
- package/src/tasks-runner/remove-old-cache-records.js +2 -3
- package/src/utils/fileutils.d.ts +9 -1
- package/src/utils/fileutils.js +29 -12
- package/src/utils/ignore.js +2 -2
- package/src/utils/package-manager.js +2 -2
package/.eslintrc.json
CHANGED
@@ -5,7 +5,15 @@
|
|
5
5
|
"overrides": [
|
6
6
|
{
|
7
7
|
"files": ["*.ts", "*.tsx", "*.js", "*.jsx"],
|
8
|
-
"rules": {
|
8
|
+
"rules": {
|
9
|
+
"no-restricted-imports": [
|
10
|
+
"error",
|
11
|
+
{
|
12
|
+
"name": "fs-extra",
|
13
|
+
"message": "Please use equivalent utilities from `node:fs` instead."
|
14
|
+
}
|
15
|
+
]
|
16
|
+
}
|
9
17
|
},
|
10
18
|
{
|
11
19
|
"files": ["*.ts"],
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "nx",
|
3
|
-
"version": "19.8.
|
3
|
+
"version": "19.8.2",
|
4
4
|
"private": false,
|
5
5
|
"description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",
|
6
6
|
"repository": {
|
@@ -49,7 +49,6 @@
|
|
49
49
|
"figures": "3.2.0",
|
50
50
|
"flat": "^5.0.2",
|
51
51
|
"front-matter": "^4.0.2",
|
52
|
-
"fs-extra": "^11.1.0",
|
53
52
|
"ignore": "^5.0.4",
|
54
53
|
"jest-diff": "^29.4.1",
|
55
54
|
"jsonc-parser": "3.2.0",
|
@@ -68,7 +67,7 @@
|
|
68
67
|
"yargs-parser": "21.1.1",
|
69
68
|
"node-machine-id": "1.1.12",
|
70
69
|
"ora": "5.3.0",
|
71
|
-
"@nrwl/tao": "19.8.
|
70
|
+
"@nrwl/tao": "19.8.2"
|
72
71
|
},
|
73
72
|
"peerDependencies": {
|
74
73
|
"@swc-node/register": "^1.8.0",
|
@@ -83,16 +82,16 @@
|
|
83
82
|
}
|
84
83
|
},
|
85
84
|
"optionalDependencies": {
|
86
|
-
"@nx/nx-darwin-x64": "19.8.
|
87
|
-
"@nx/nx-darwin-arm64": "19.8.
|
88
|
-
"@nx/nx-linux-x64-gnu": "19.8.
|
89
|
-
"@nx/nx-linux-x64-musl": "19.8.
|
90
|
-
"@nx/nx-win32-x64-msvc": "19.8.
|
91
|
-
"@nx/nx-linux-arm64-gnu": "19.8.
|
92
|
-
"@nx/nx-linux-arm64-musl": "19.8.
|
93
|
-
"@nx/nx-linux-arm-gnueabihf": "19.8.
|
94
|
-
"@nx/nx-win32-arm64-msvc": "19.8.
|
95
|
-
"@nx/nx-freebsd-x64": "19.8.
|
85
|
+
"@nx/nx-darwin-x64": "19.8.2",
|
86
|
+
"@nx/nx-darwin-arm64": "19.8.2",
|
87
|
+
"@nx/nx-linux-x64-gnu": "19.8.2",
|
88
|
+
"@nx/nx-linux-x64-musl": "19.8.2",
|
89
|
+
"@nx/nx-win32-x64-msvc": "19.8.2",
|
90
|
+
"@nx/nx-linux-arm64-gnu": "19.8.2",
|
91
|
+
"@nx/nx-linux-arm64-musl": "19.8.2",
|
92
|
+
"@nx/nx-linux-arm-gnueabihf": "19.8.2",
|
93
|
+
"@nx/nx-win32-arm64-msvc": "19.8.2",
|
94
|
+
"@nx/nx-freebsd-x64": "19.8.2"
|
96
95
|
},
|
97
96
|
"nx-migrations": {
|
98
97
|
"migrations": "./migrations.json",
|
@@ -3,8 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.generateGraph = generateGraph;
|
4
4
|
const crypto_1 = require("crypto");
|
5
5
|
const node_child_process_1 = require("node:child_process");
|
6
|
-
const
|
7
|
-
const fs_extra_1 = require("fs-extra");
|
6
|
+
const node_fs_1 = require("node:fs");
|
8
7
|
const http = require("http");
|
9
8
|
const minimatch_1 = require("minimatch");
|
10
9
|
const node_url_1 = require("node:url");
|
@@ -225,7 +224,7 @@ async function generateGraph(args, affectedProjects) {
|
|
225
224
|
process.exit(1);
|
226
225
|
}
|
227
226
|
}
|
228
|
-
let html = (0,
|
227
|
+
let html = (0, node_fs_1.readFileSync)((0, path_1.join)(__dirname, '../../core/graph/index.html'), 'utf-8');
|
229
228
|
prunedGraph = filterGraph(prunedGraph, args.focus || null, args.exclude || []);
|
230
229
|
if (args.file) {
|
231
230
|
// stdout is a magical constant that doesn't actually write a file
|
@@ -243,7 +242,7 @@ async function generateGraph(args, affectedProjects) {
|
|
243
242
|
if (ext === '.html') {
|
244
243
|
const assetsFolder = (0, path_1.join)(fileFolderPath, 'static');
|
245
244
|
const assets = [];
|
246
|
-
(0,
|
245
|
+
(0, node_fs_1.cpSync)((0, path_1.join)(__dirname, '../../core/graph'), assetsFolder, {
|
247
246
|
filter: (_src, dest) => {
|
248
247
|
const isntHtml = !/index\.html/.test(dest);
|
249
248
|
if (isntHtml && dest.includes('.')) {
|
@@ -251,6 +250,7 @@ async function generateGraph(args, affectedProjects) {
|
|
251
250
|
}
|
252
251
|
return isntHtml;
|
253
252
|
},
|
253
|
+
recursive: true,
|
254
254
|
});
|
255
255
|
const { projectGraphClientResponse } = await createProjectGraphAndSourceMapClientResponse(affectedProjects);
|
256
256
|
const taskGraphClientResponse = await createTaskGraphClientResponse();
|
@@ -260,15 +260,15 @@ async function generateGraph(args, affectedProjects) {
|
|
260
260
|
html = html.replace(/href="styles/g, 'href="static/styles');
|
261
261
|
html = html.replace(/<base href="\/".*>/g, '');
|
262
262
|
html = html.replace(/type="module"/g, '');
|
263
|
-
(0,
|
264
|
-
(0,
|
263
|
+
(0, node_fs_1.writeFileSync)(fullFilePath, html);
|
264
|
+
(0, node_fs_1.writeFileSync)((0, path_1.join)(assetsFolder, 'environment.js'), environmentJs);
|
265
265
|
output_1.output.success({
|
266
266
|
title: `HTML output created in ${fileFolderPath}`,
|
267
267
|
bodyLines: [fileFolderPath, ...assets],
|
268
268
|
});
|
269
269
|
}
|
270
270
|
else if (ext === '.json') {
|
271
|
-
(0,
|
271
|
+
(0, node_fs_1.mkdirSync)((0, path_1.dirname)(fullFilePath), { recursive: true });
|
272
272
|
const json = await createJsonOutput(prunedGraph, rawGraph, args.projects, args.targets);
|
273
273
|
(0, fileutils_1.writeJsonFile)(fullFilePath, json);
|
274
274
|
output_1.output.success({
|
@@ -394,13 +394,13 @@ async function startServer(html, environmentJs, host, port = 4211, watchForChang
|
|
394
394
|
}
|
395
395
|
let pathname = (0, path_1.join)(__dirname, '../../core/graph/', sanitizePath);
|
396
396
|
// if the file is not found or is a directory, return index.html
|
397
|
-
if (!(0,
|
397
|
+
if (!(0, node_fs_1.existsSync)(pathname) || (0, node_fs_1.statSync)(pathname).isDirectory()) {
|
398
398
|
res.writeHead(200, { 'Content-Type': 'text/html' });
|
399
399
|
res.end(html);
|
400
400
|
return;
|
401
401
|
}
|
402
402
|
try {
|
403
|
-
const data = (0,
|
403
|
+
const data = (0, node_fs_1.readFileSync)(pathname);
|
404
404
|
const ext = (0, path_1.parse)(pathname).ext;
|
405
405
|
res.setHeader('Content-type', mimeType[ext] || 'text/plain');
|
406
406
|
res.end(data);
|
@@ -2,7 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.addNxToNest = addNxToNest;
|
4
4
|
const enquirer = require("enquirer");
|
5
|
-
const
|
5
|
+
const node_fs_1 = require("node:fs");
|
6
6
|
const path_1 = require("path");
|
7
7
|
const fileutils_1 = require("../../../utils/fileutils");
|
8
8
|
const output_1 = require("../../../utils/output");
|
@@ -214,7 +214,7 @@ function getJestOptions(isE2E, repoRoot, packageName, existingOptions) {
|
|
214
214
|
function tryCreateJestPreset(repoRoot) {
|
215
215
|
const jestPresetPath = (0, path_1.join)(repoRoot, 'jest.preset.js');
|
216
216
|
if (!(0, fileutils_1.fileExists)(jestPresetPath)) {
|
217
|
-
(0,
|
217
|
+
(0, node_fs_1.writeFileSync)(jestPresetPath, `
|
218
218
|
const nxPreset = require('@nx/jest/preset').default;
|
219
219
|
module.exports = {...nxPreset};
|
220
220
|
`, 'utf8');
|
@@ -231,8 +231,8 @@ function addJestTargets(repoRoot, packageName, projectJson, packageJson) {
|
|
231
231
|
if (isPresetCreated) {
|
232
232
|
unitTestOptions['preset'] = e2eTestOptions['preset'] = './jest.preset.js';
|
233
233
|
}
|
234
|
-
(0,
|
235
|
-
(0,
|
234
|
+
(0, node_fs_1.writeFileSync)(unitTestConfigPath, `export default ${JSON.stringify(unitTestOptions, null, 2)}`, 'utf8');
|
235
|
+
(0, node_fs_1.writeFileSync)(e2eTestConfigPath, `export default ${JSON.stringify(e2eTestOptions, null, 2)}`, 'utf8');
|
236
236
|
projectJson.targets['test'] = {
|
237
237
|
executor: '@nx/jest:jest',
|
238
238
|
outputs: [`{workspaceRoot}/coverage/${packageName}`],
|
@@ -312,7 +312,7 @@ function updateTsConfig(repoRoot, sourceRoot) {
|
|
312
312
|
}
|
313
313
|
function removeFile(repoRoot, file) {
|
314
314
|
const path = (0, path_1.join)(repoRoot, file);
|
315
|
-
(0,
|
315
|
+
(0, node_fs_1.unlinkSync)(path);
|
316
316
|
}
|
317
317
|
function mergeWithDefaultConfig(config) {
|
318
318
|
const defaultNestCliConfigurations = {
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.cleanUpFiles = cleanUpFiles;
|
4
|
-
const
|
4
|
+
const node_fs_1 = require("node:fs");
|
5
5
|
const fileutils_1 = require("../../../../utils/fileutils");
|
6
6
|
function cleanUpFiles(appName, isStandalone) {
|
7
7
|
// Delete targets from project since we delegate to npm scripts.
|
@@ -19,12 +19,12 @@ function cleanUpFiles(appName, isStandalone) {
|
|
19
19
|
}
|
20
20
|
}
|
21
21
|
(0, fileutils_1.writeJsonFile)(projectJsonPath, json);
|
22
|
-
(0,
|
22
|
+
(0, node_fs_1.rmSync)('temp-workspace', { recursive: true, force: true });
|
23
23
|
if (isStandalone) {
|
24
|
-
(0,
|
25
|
-
(0,
|
26
|
-
(0,
|
27
|
-
(0,
|
28
|
-
(0,
|
24
|
+
(0, node_fs_1.rmSync)('babel.config.json', { recursive: true, force: true });
|
25
|
+
(0, node_fs_1.rmSync)('jest.preset.js', { recursive: true, force: true });
|
26
|
+
(0, node_fs_1.rmSync)('jest.config.ts', { recursive: true, force: true });
|
27
|
+
(0, node_fs_1.rmSync)('libs', { recursive: true, force: true });
|
28
|
+
(0, node_fs_1.rmSync)('tools', { recursive: true, force: true });
|
29
29
|
}
|
30
30
|
}
|
@@ -2,7 +2,7 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.addNxToCraRepo = addNxToCraRepo;
|
4
4
|
const child_process_1 = require("child_process");
|
5
|
-
const
|
5
|
+
const node_fs_1 = require("node:fs");
|
6
6
|
const path_1 = require("path");
|
7
7
|
const fileutils_1 = require("../../../../utils/fileutils");
|
8
8
|
const output_1 = require("../../../../utils/output");
|
@@ -116,13 +116,16 @@ async function reorgnizeWorkspaceStructure(options) {
|
|
116
116
|
}
|
117
117
|
}
|
118
118
|
function createTempWorkspace(options) {
|
119
|
-
(0,
|
119
|
+
(0, node_fs_1.rmSync)('temp-workspace', { recursive: true, force: true });
|
120
120
|
(0, child_process_1.execSync)(`npx ${options.npxYesFlagNeeded ? '-y' : ''} create-nx-workspace@latest temp-workspace --appName=${options.reactAppName} --preset=react-monorepo --style=css --bundler=${options.isVite ? 'vite' : 'webpack'} --packageManager=${options.packageManager} ${options.nxCloud ? '--nxCloud=yes' : '--nxCloud=skip'} ${options.addE2e ? '--e2eTestRunner=playwright' : '--e2eTestRunner=none'}`, { stdio: [0, 1, 2], windowsHide: true });
|
121
121
|
output_1.output.log({ title: '👋 Welcome to Nx!' });
|
122
122
|
output_1.output.log({ title: '🧹 Clearing unused files' });
|
123
|
-
(0,
|
124
|
-
(0,
|
125
|
-
|
123
|
+
(0, node_fs_1.cpSync)((0, path_1.join)('temp-workspace', 'apps', options.reactAppName, 'project.json'), 'project.json', { recursive: true });
|
124
|
+
(0, node_fs_1.rmSync)((0, path_1.join)('temp-workspace', 'apps', options.reactAppName), {
|
125
|
+
recursive: true,
|
126
|
+
force: true,
|
127
|
+
});
|
128
|
+
(0, node_fs_1.rmSync)('node_modules', { recursive: true, force: true });
|
126
129
|
}
|
127
130
|
function copyPackageJsonDepsFromTempWorkspace() {
|
128
131
|
const repoRoot = process.cwd();
|
@@ -151,6 +154,12 @@ function overridePackageDeps(depConfigName, base, override) {
|
|
151
154
|
});
|
152
155
|
return base;
|
153
156
|
}
|
157
|
+
function moveSync(src, dest) {
|
158
|
+
const destParentDir = (0, path_1.dirname)(dest);
|
159
|
+
(0, node_fs_1.mkdirSync)(destParentDir, { recursive: true });
|
160
|
+
(0, node_fs_1.rmSync)(dest, { recursive: true, force: true });
|
161
|
+
return (0, node_fs_1.renameSync)(src, dest);
|
162
|
+
}
|
154
163
|
function moveFilesToTempWorkspace(options) {
|
155
164
|
output_1.output.log({ title: '🚚 Moving your React app in your new Nx workspace' });
|
156
165
|
copyPackageJsonDepsFromTempWorkspace();
|
@@ -169,11 +178,9 @@ function moveFilesToTempWorkspace(options) {
|
|
169
178
|
const filesToMove = [...requiredCraFiles, ...optionalCraFiles].filter(Boolean);
|
170
179
|
filesToMove.forEach((f) => {
|
171
180
|
try {
|
172
|
-
|
181
|
+
moveSync(f, options.isStandalone
|
173
182
|
? (0, path_1.join)('temp-workspace', f)
|
174
|
-
: (0, path_1.join)('temp-workspace', 'apps', options.reactAppName, f)
|
175
|
-
overwrite: true,
|
176
|
-
});
|
183
|
+
: (0, path_1.join)('temp-workspace', 'apps', options.reactAppName, f));
|
177
184
|
}
|
178
185
|
catch (error) {
|
179
186
|
if (requiredCraFiles.includes(f)) {
|
@@ -208,8 +215,8 @@ async function addBundler(options) {
|
|
208
215
|
}
|
209
216
|
function copyFromTempWorkspaceToRoot() {
|
210
217
|
output_1.output.log({ title: '🚚 Folder restructuring.' });
|
211
|
-
(0,
|
212
|
-
|
218
|
+
(0, node_fs_1.readdirSync)('temp-workspace').forEach((f) => {
|
219
|
+
moveSync((0, path_1.join)('temp-workspace', f), f);
|
213
220
|
});
|
214
221
|
}
|
215
222
|
function cleanUpUnusedFilesAndAddConfigFiles(options) {
|
@@ -218,6 +225,6 @@ function cleanUpUnusedFilesAndAddConfigFiles(options) {
|
|
218
225
|
output_1.output.log({ title: "📃 Extend the app's tsconfig.json from the base" });
|
219
226
|
(0, tsconfig_setup_1.setupTsConfig)(options.reactAppName, options.isStandalone);
|
220
227
|
if (options.isStandalone) {
|
221
|
-
(0,
|
228
|
+
(0, node_fs_1.rmSync)('apps', { recursive: true, force: true });
|
222
229
|
}
|
223
230
|
}
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.renameJsToJsx = renameJsToJsx;
|
4
|
-
const
|
4
|
+
const node_fs_1 = require("node:fs");
|
5
5
|
const workspace_context_1 = require("../../../../utils/workspace-context");
|
6
6
|
const fileutils_1 = require("../../../../utils/fileutils");
|
7
7
|
// Vite cannot process JSX like <div> or <Header> unless the file is named .jsx or .tsx
|
@@ -11,11 +11,11 @@ async function renameJsToJsx(appName, isStandalone) {
|
|
11
11
|
]);
|
12
12
|
files.forEach((file) => {
|
13
13
|
if ((0, fileutils_1.fileExists)(file)) {
|
14
|
-
const content = (0,
|
14
|
+
const content = (0, node_fs_1.readFileSync)(file).toString();
|
15
15
|
// Try to detect JSX before renaming to .jsx
|
16
16
|
// Files like setupTests.js from CRA should not be renamed
|
17
17
|
if (/<[a-zA-Z0-9]+/.test(content)) {
|
18
|
-
(0,
|
18
|
+
(0, node_fs_1.renameSync)(file, `${file}x`);
|
19
19
|
}
|
20
20
|
}
|
21
21
|
});
|
@@ -5,7 +5,6 @@ exports.createAPI = createAPI;
|
|
5
5
|
exports.shouldCreateGitHubRelease = shouldCreateGitHubRelease;
|
6
6
|
const chalk = require("chalk");
|
7
7
|
const enquirer_1 = require("enquirer");
|
8
|
-
const fs_extra_1 = require("fs-extra");
|
9
8
|
const node_fs_1 = require("node:fs");
|
10
9
|
const semver_1 = require("semver");
|
11
10
|
const tmp_1 = require("tmp");
|
@@ -620,7 +619,7 @@ async function applyChangesAndExit(args, nxReleaseConfig, tree, toSHA, postGitTa
|
|
620
619
|
if (group.resolvedVersionPlans) {
|
621
620
|
group.resolvedVersionPlans.forEach((plan) => {
|
622
621
|
if (!args.dryRun) {
|
623
|
-
(0,
|
622
|
+
(0, node_fs_1.rmSync)(plan.absolutePath, { recursive: true, force: true });
|
624
623
|
if (args.verbose) {
|
625
624
|
console.log(`Removing ${plan.relativePath}`);
|
626
625
|
}
|
@@ -3,9 +3,9 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.readRawVersionPlans = readRawVersionPlans;
|
4
4
|
exports.setResolvedVersionPlansOnGroups = setResolvedVersionPlansOnGroups;
|
5
5
|
exports.getVersionPlansAbsolutePath = getVersionPlansAbsolutePath;
|
6
|
-
const fs_1 = require("fs");
|
7
|
-
const fs_extra_1 = require("fs-extra");
|
8
6
|
const node_child_process_1 = require("node:child_process");
|
7
|
+
const node_fs_1 = require("node:fs");
|
8
|
+
const promises_1 = require("node:fs/promises");
|
9
9
|
const path_1 = require("path");
|
10
10
|
const semver_1 = require("semver");
|
11
11
|
const workspace_root_1 = require("../../../utils/workspace-root");
|
@@ -14,16 +14,15 @@ const fm = require('front-matter');
|
|
14
14
|
const versionPlansDirectory = (0, path_1.join)('.nx', 'version-plans');
|
15
15
|
async function readRawVersionPlans() {
|
16
16
|
const versionPlansPath = getVersionPlansAbsolutePath();
|
17
|
-
|
18
|
-
if (!versionPlansPathExists) {
|
17
|
+
if (!(0, node_fs_1.existsSync)(versionPlansPath)) {
|
19
18
|
return [];
|
20
19
|
}
|
21
20
|
const versionPlans = [];
|
22
|
-
const versionPlanFiles = (0,
|
21
|
+
const versionPlanFiles = (0, node_fs_1.readdirSync)(versionPlansPath);
|
23
22
|
for (const versionPlanFile of versionPlanFiles) {
|
24
23
|
const filePath = (0, path_1.join)(versionPlansPath, versionPlanFile);
|
25
|
-
const versionPlanContent = (0,
|
26
|
-
const versionPlanStats = await (0,
|
24
|
+
const versionPlanContent = (0, node_fs_1.readFileSync)(filePath).toString();
|
25
|
+
const versionPlanStats = await (0, promises_1.stat)(filePath);
|
27
26
|
const parsedContent = fm(versionPlanContent);
|
28
27
|
versionPlans.push({
|
29
28
|
absolutePath: filePath,
|
@@ -3,7 +3,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.releasePlanCLIHandler = void 0;
|
4
4
|
exports.createAPI = createAPI;
|
5
5
|
const enquirer_1 = require("enquirer");
|
6
|
-
const
|
6
|
+
const node_fs_1 = require("node:fs");
|
7
|
+
const promises_1 = require("node:fs/promises");
|
7
8
|
const node_path_1 = require("node:path");
|
8
9
|
const semver_1 = require("semver");
|
9
10
|
const tmp_1 = require("tmp");
|
@@ -193,8 +194,8 @@ async function createVersionPlanFileForBumps(args, versionPlanBumps) {
|
|
193
194
|
output_1.output.logSingleLine(`Creating version plan file "${versionPlanFileName}"`);
|
194
195
|
(0, print_changes_1.printDiff)('', versionPlanFileContent, 1);
|
195
196
|
const versionPlansAbsolutePath = (0, version_plans_1.getVersionPlansAbsolutePath)();
|
196
|
-
await (0,
|
197
|
-
await (0,
|
197
|
+
await (0, promises_1.mkdir)(versionPlansAbsolutePath, { recursive: true });
|
198
|
+
await (0, promises_1.writeFile)((0, node_path_1.join)(versionPlansAbsolutePath, versionPlanFileName), versionPlanFileContent);
|
198
199
|
}
|
199
200
|
}
|
200
201
|
async function promptForVersion(message) {
|
@@ -236,9 +237,9 @@ async function _promptForMessage(versionPlanName) {
|
|
236
237
|
if (!message.length) {
|
237
238
|
const tmpDir = (0, tmp_1.dirSync)().name;
|
238
239
|
const messageFilePath = (0, node_path_1.join)(tmpDir, `DRAFT_MESSAGE__${versionPlanName}.md`);
|
239
|
-
(0,
|
240
|
+
(0, node_fs_1.writeFileSync)(messageFilePath, '');
|
240
241
|
await (0, launch_editor_1.launchEditor)(messageFilePath);
|
241
|
-
message = (0,
|
242
|
+
message = (0, node_fs_1.readFileSync)(messageFilePath, 'utf-8');
|
242
243
|
}
|
243
244
|
message = message.trim();
|
244
245
|
if (!message) {
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.releaseCLIHandler = void 0;
|
4
4
|
exports.createAPI = createAPI;
|
5
5
|
const enquirer_1 = require("enquirer");
|
6
|
-
const
|
6
|
+
const node_fs_1 = require("node:fs");
|
7
7
|
const nx_json_1 = require("../../config/nx-json");
|
8
8
|
const file_map_utils_1 = require("../../project-graph/file-map-utils");
|
9
9
|
const project_graph_1 = require("../../project-graph/project-graph");
|
@@ -106,7 +106,7 @@ function createAPI(overrideReleaseConfig) {
|
|
106
106
|
}
|
107
107
|
group.resolvedVersionPlans.forEach((plan) => {
|
108
108
|
if (!args.dryRun) {
|
109
|
-
(0,
|
109
|
+
(0, node_fs_1.rmSync)(plan.absolutePath, { recursive: true, force: true });
|
110
110
|
if (args.verbose) {
|
111
111
|
console.log(`Removing ${plan.relativePath}`);
|
112
112
|
}
|
@@ -1,7 +1,7 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.resetHandler = resetHandler;
|
4
|
-
const
|
4
|
+
const node_fs_1 = require("node:fs");
|
5
5
|
const client_1 = require("../../daemon/client/client");
|
6
6
|
const cache_directory_1 = require("../../utils/cache-directory");
|
7
7
|
const output_1 = require("../../utils/output");
|
@@ -92,17 +92,17 @@ async function resetCloudClient() {
|
|
92
92
|
}
|
93
93
|
function cleanupCacheEntries() {
|
94
94
|
return incrementalBackoff(INCREMENTAL_BACKOFF_FIRST_DELAY, INCREMENTAL_BACKOFF_MAX_DURATION, () => {
|
95
|
-
(0,
|
95
|
+
(0, node_fs_1.rmSync)(cache_directory_1.cacheDir, { recursive: true, force: true });
|
96
96
|
});
|
97
97
|
}
|
98
98
|
function cleanupNativeFileCache() {
|
99
99
|
return incrementalBackoff(INCREMENTAL_BACKOFF_FIRST_DELAY, INCREMENTAL_BACKOFF_MAX_DURATION, () => {
|
100
|
-
(0,
|
100
|
+
(0, node_fs_1.rmSync)((0, native_file_cache_location_1.getNativeFileCacheLocation)(), { recursive: true, force: true });
|
101
101
|
});
|
102
102
|
}
|
103
103
|
function cleanupWorkspaceData() {
|
104
104
|
return incrementalBackoff(INCREMENTAL_BACKOFF_FIRST_DELAY, INCREMENTAL_BACKOFF_MAX_DURATION, () => {
|
105
|
-
(0,
|
105
|
+
(0, node_fs_1.rmSync)(cache_directory_1.workspaceDataDirectory, { recursive: true, force: true });
|
106
106
|
});
|
107
107
|
}
|
108
108
|
async function incrementalBackoff(ms, maxDuration, callback) {
|
package/src/daemon/cache.d.ts
CHANGED
@@ -2,9 +2,8 @@ export interface DaemonProcessJson {
|
|
2
2
|
processId: number;
|
3
3
|
}
|
4
4
|
export declare const serverProcessJsonPath: string;
|
5
|
-
export declare function readDaemonProcessJsonCache():
|
5
|
+
export declare function readDaemonProcessJsonCache(): DaemonProcessJson | null;
|
6
6
|
export declare function deleteDaemonJsonProcessCache(): void;
|
7
7
|
export declare function writeDaemonJsonProcessCache(daemonJson: DaemonProcessJson): Promise<void>;
|
8
8
|
export declare function waitForDaemonToExitAndCleanupProcessJson(): Promise<void>;
|
9
|
-
export declare function safelyCleanUpExistingProcess(): Promise<void>;
|
10
9
|
export declare function getDaemonProcessIdSync(): number | null;
|
package/src/daemon/cache.js
CHANGED
@@ -5,31 +5,33 @@ exports.readDaemonProcessJsonCache = readDaemonProcessJsonCache;
|
|
5
5
|
exports.deleteDaemonJsonProcessCache = deleteDaemonJsonProcessCache;
|
6
6
|
exports.writeDaemonJsonProcessCache = writeDaemonJsonProcessCache;
|
7
7
|
exports.waitForDaemonToExitAndCleanupProcessJson = waitForDaemonToExitAndCleanupProcessJson;
|
8
|
-
exports.safelyCleanUpExistingProcess = safelyCleanUpExistingProcess;
|
9
8
|
exports.getDaemonProcessIdSync = getDaemonProcessIdSync;
|
10
|
-
const
|
9
|
+
const node_fs_1 = require("node:fs");
|
11
10
|
const path_1 = require("path");
|
12
11
|
const tmp_dir_1 = require("./tmp-dir");
|
12
|
+
const fileutils_1 = require("../utils/fileutils");
|
13
13
|
exports.serverProcessJsonPath = (0, path_1.join)(tmp_dir_1.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'server-process.json');
|
14
|
-
|
15
|
-
if (!(0,
|
14
|
+
function readDaemonProcessJsonCache() {
|
15
|
+
if (!(0, node_fs_1.existsSync)(exports.serverProcessJsonPath)) {
|
16
16
|
return null;
|
17
17
|
}
|
18
|
-
return
|
18
|
+
return (0, fileutils_1.readJsonFile)(exports.serverProcessJsonPath);
|
19
19
|
}
|
20
20
|
function deleteDaemonJsonProcessCache() {
|
21
21
|
try {
|
22
22
|
if (getDaemonProcessIdSync() === process.pid) {
|
23
|
-
(0,
|
23
|
+
(0, node_fs_1.unlinkSync)(exports.serverProcessJsonPath);
|
24
24
|
}
|
25
25
|
}
|
26
26
|
catch { }
|
27
27
|
}
|
28
28
|
async function writeDaemonJsonProcessCache(daemonJson) {
|
29
|
-
await (0,
|
29
|
+
await (0, fileutils_1.writeJsonFileAsync)(exports.serverProcessJsonPath, daemonJson, {
|
30
|
+
appendNewLine: true,
|
31
|
+
});
|
30
32
|
}
|
31
33
|
async function waitForDaemonToExitAndCleanupProcessJson() {
|
32
|
-
const daemonProcessJson =
|
34
|
+
const daemonProcessJson = readDaemonProcessJsonCache();
|
33
35
|
if (daemonProcessJson && daemonProcessJson.processId) {
|
34
36
|
await new Promise((resolve, reject) => {
|
35
37
|
let count = 0;
|
@@ -51,24 +53,13 @@ async function waitForDaemonToExitAndCleanupProcessJson() {
|
|
51
53
|
deleteDaemonJsonProcessCache();
|
52
54
|
}
|
53
55
|
}
|
54
|
-
async function safelyCleanUpExistingProcess() {
|
55
|
-
const daemonProcessJson = await readDaemonProcessJsonCache();
|
56
|
-
if (daemonProcessJson && daemonProcessJson.processId) {
|
57
|
-
try {
|
58
|
-
process.kill(daemonProcessJson.processId);
|
59
|
-
// we wait for the process to actually shut down before returning
|
60
|
-
await waitForDaemonToExitAndCleanupProcessJson();
|
61
|
-
}
|
62
|
-
catch { }
|
63
|
-
}
|
64
|
-
}
|
65
56
|
// Must be sync for the help output use case
|
66
57
|
function getDaemonProcessIdSync() {
|
67
|
-
if (!(0,
|
58
|
+
if (!(0, node_fs_1.existsSync)(exports.serverProcessJsonPath)) {
|
68
59
|
return null;
|
69
60
|
}
|
70
61
|
try {
|
71
|
-
const daemonProcessJson = (0,
|
62
|
+
const daemonProcessJson = (0, fileutils_1.readJsonFile)(exports.serverProcessJsonPath);
|
72
63
|
return daemonProcessJson.processId;
|
73
64
|
}
|
74
65
|
catch {
|
@@ -4,9 +4,8 @@ exports.daemonClient = exports.DaemonClient = void 0;
|
|
4
4
|
exports.isDaemonEnabled = isDaemonEnabled;
|
5
5
|
const workspace_root_1 = require("../../utils/workspace-root");
|
6
6
|
const child_process_1 = require("child_process");
|
7
|
-
const
|
7
|
+
const node_fs_1 = require("node:fs");
|
8
8
|
const promises_1 = require("fs/promises");
|
9
|
-
const fs_extra_1 = require("fs-extra");
|
10
9
|
const net_1 = require("net");
|
11
10
|
const path_1 = require("path");
|
12
11
|
const perf_hooks_1 = require("perf_hooks");
|
@@ -409,8 +408,10 @@ class DaemonClient {
|
|
409
408
|
}
|
410
409
|
}
|
411
410
|
async startInBackground() {
|
412
|
-
(0,
|
413
|
-
(0,
|
411
|
+
(0, node_fs_1.mkdirSync)(tmp_dir_1.DAEMON_DIR_FOR_CURRENT_WORKSPACE, { recursive: true });
|
412
|
+
if (!(0, node_fs_1.existsSync)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE)) {
|
413
|
+
(0, node_fs_1.writeFileSync)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE, '');
|
414
|
+
}
|
414
415
|
this._out = await (0, promises_1.open)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE, 'a');
|
415
416
|
this._err = await (0, promises_1.open)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE, 'a');
|
416
417
|
const backgroundProcess = (0, child_process_1.spawn)(process.execPath, [(0, path_1.join)(__dirname, `../server/start.js`)], {
|
@@ -454,7 +455,7 @@ class DaemonClient {
|
|
454
455
|
catch (err) {
|
455
456
|
output_1.output.error({
|
456
457
|
title: err?.message ||
|
457
|
-
'Something unexpected went wrong when stopping the server',
|
458
|
+
'Something unexpected went wrong when stopping the daemon server',
|
458
459
|
});
|
459
460
|
}
|
460
461
|
(0, tmp_dir_1.removeSocketDir)();
|
@@ -467,12 +468,12 @@ function isDaemonEnabled() {
|
|
467
468
|
}
|
468
469
|
function isDocker() {
|
469
470
|
try {
|
470
|
-
(0,
|
471
|
+
(0, node_fs_1.statSync)('/.dockerenv');
|
471
472
|
return true;
|
472
473
|
}
|
473
474
|
catch {
|
474
475
|
try {
|
475
|
-
return (0,
|
476
|
+
return (0, node_fs_1.readFileSync)('/proc/self/cgroup', 'utf8')?.includes('docker');
|
476
477
|
}
|
477
478
|
catch { }
|
478
479
|
return false;
|
@@ -483,7 +484,7 @@ function nxJsonIsNotPresent() {
|
|
483
484
|
}
|
484
485
|
function daemonProcessException(message) {
|
485
486
|
try {
|
486
|
-
let log = (0,
|
487
|
+
let log = (0, node_fs_1.readFileSync)(tmp_dir_1.DAEMON_OUTPUT_LOG_FILE).toString().split('\n');
|
487
488
|
if (log.length > 20) {
|
488
489
|
log = log.slice(log.length - 20);
|
489
490
|
}
|
package/src/daemon/tmp-dir.js
CHANGED
@@ -11,8 +11,7 @@ exports.removeSocketDir = removeSocketDir;
|
|
11
11
|
* location within the OS's tmp directory where we write log files for background processes
|
12
12
|
* and where we create the actual unix socket/named pipe for the daemon.
|
13
13
|
*/
|
14
|
-
const
|
15
|
-
const fs_extra_1 = require("fs-extra");
|
14
|
+
const node_fs_1 = require("node:fs");
|
16
15
|
const path_1 = require("path");
|
17
16
|
const cache_directory_1 = require("../utils/cache-directory");
|
18
17
|
const crypto_1 = require("crypto");
|
@@ -26,15 +25,15 @@ const getDaemonSocketDir = () => (0, path_1.join)(getSocketDir(),
|
|
26
25
|
exports.getDaemonSocketDir = getDaemonSocketDir;
|
27
26
|
function writeDaemonLogs(error) {
|
28
27
|
const file = (0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'daemon-error.log');
|
29
|
-
(0,
|
28
|
+
(0, node_fs_1.writeFileSync)(file, error);
|
30
29
|
return file;
|
31
30
|
}
|
32
31
|
function markDaemonAsDisabled() {
|
33
|
-
(0,
|
32
|
+
(0, node_fs_1.writeFileSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'), 'true');
|
34
33
|
}
|
35
34
|
function isDaemonDisabled() {
|
36
35
|
try {
|
37
|
-
(0,
|
36
|
+
(0, node_fs_1.statSync)((0, path_1.join)(exports.DAEMON_DIR_FOR_CURRENT_WORKSPACE, 'disabled'));
|
38
37
|
return true;
|
39
38
|
}
|
40
39
|
catch (e) {
|
@@ -56,7 +55,7 @@ function getSocketDir(alreadyUnique = false) {
|
|
56
55
|
const dir = process.env.NX_SOCKET_DIR ??
|
57
56
|
process.env.NX_DAEMON_SOCKET_DIR ??
|
58
57
|
(alreadyUnique ? tmp_1.tmpdir : socketDirName());
|
59
|
-
(0,
|
58
|
+
(0, node_fs_1.mkdirSync)(dir, { recursive: true });
|
60
59
|
return dir;
|
61
60
|
}
|
62
61
|
catch (e) {
|
@@ -65,7 +64,7 @@ function getSocketDir(alreadyUnique = false) {
|
|
65
64
|
}
|
66
65
|
function removeSocketDir() {
|
67
66
|
try {
|
68
|
-
(0,
|
67
|
+
(0, node_fs_1.rmSync)(getSocketDir(), { recursive: true, force: true });
|
69
68
|
}
|
70
69
|
catch (e) { }
|
71
70
|
}
|
package/src/generators/tree.d.ts
CHANGED
package/src/generators/tree.js
CHANGED
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.FsTree = void 0;
|
4
4
|
exports.flushChanges = flushChanges;
|
5
5
|
exports.printChanges = printChanges;
|
6
|
-
const
|
6
|
+
const node_fs_1 = require("node:fs");
|
7
7
|
const logger_1 = require("../utils/logger");
|
8
8
|
const output_1 = require("../utils/output");
|
9
9
|
const path_1 = require("path");
|
@@ -228,22 +228,22 @@ class FsTree {
|
|
228
228
|
}
|
229
229
|
fsReadDir(dirPath) {
|
230
230
|
try {
|
231
|
-
return (0,
|
231
|
+
return (0, node_fs_1.readdirSync)((0, path_1.join)(this.root, dirPath));
|
232
232
|
}
|
233
233
|
catch {
|
234
234
|
return [];
|
235
235
|
}
|
236
236
|
}
|
237
237
|
fsIsFile(filePath) {
|
238
|
-
const stat = (0,
|
238
|
+
const stat = (0, node_fs_1.statSync)((0, path_1.join)(this.root, filePath));
|
239
239
|
return stat.isFile();
|
240
240
|
}
|
241
241
|
fsReadFile(filePath) {
|
242
|
-
return (0,
|
242
|
+
return (0, node_fs_1.readFileSync)((0, path_1.join)(this.root, filePath));
|
243
243
|
}
|
244
244
|
fsExists(filePath) {
|
245
245
|
try {
|
246
|
-
const stat = (0,
|
246
|
+
const stat = (0, node_fs_1.statSync)((0, path_1.join)(this.root, filePath));
|
247
247
|
return stat.isFile() || stat.isDirectory();
|
248
248
|
}
|
249
249
|
catch {
|
@@ -279,18 +279,18 @@ function flushChanges(root, fileChanges) {
|
|
279
279
|
fileChanges.forEach((f) => {
|
280
280
|
const fpath = (0, path_1.join)(root, f.path);
|
281
281
|
if (f.type === 'CREATE') {
|
282
|
-
(0,
|
283
|
-
(0,
|
282
|
+
(0, node_fs_1.mkdirSync)((0, path_1.dirname)(fpath), { recursive: true });
|
283
|
+
(0, node_fs_1.writeFileSync)(fpath, f.content);
|
284
284
|
if (f.options?.mode)
|
285
|
-
(0,
|
285
|
+
(0, node_fs_1.chmodSync)(fpath, f.options.mode);
|
286
286
|
}
|
287
287
|
else if (f.type === 'UPDATE') {
|
288
|
-
(0,
|
288
|
+
(0, node_fs_1.writeFileSync)(fpath, f.content);
|
289
289
|
if (f.options?.mode)
|
290
|
-
(0,
|
290
|
+
(0, node_fs_1.chmodSync)(fpath, f.options.mode);
|
291
291
|
}
|
292
292
|
else if (f.type === 'DELETE') {
|
293
|
-
(0,
|
293
|
+
(0, node_fs_1.rmSync)(fpath, { recursive: true, force: true });
|
294
294
|
}
|
295
295
|
});
|
296
296
|
}
|
Binary file
|
package/src/plugins/js/index.js
CHANGED
@@ -2,7 +2,6 @@
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
exports.createDependencies = exports.createNodes = exports.name = void 0;
|
4
4
|
const fs_1 = require("fs");
|
5
|
-
const fs_extra_1 = require("fs-extra");
|
6
5
|
const path_1 = require("path");
|
7
6
|
const perf_hooks_1 = require("perf_hooks");
|
8
7
|
const cache_directory_1 = require("../../utils/cache-directory");
|
@@ -96,7 +95,7 @@ function lockFileNeedsReprocessing(lockHash) {
|
|
96
95
|
}
|
97
96
|
}
|
98
97
|
function writeLastProcessedLockfileHash(hash, lockFile) {
|
99
|
-
(0,
|
98
|
+
(0, fs_1.mkdirSync)((0, path_1.dirname)(lockFileHashFile), { recursive: true });
|
100
99
|
(0, fs_1.writeFileSync)(cachedParsedLockFile, JSON.stringify(lockFile, null, 2));
|
101
100
|
(0, fs_1.writeFileSync)(lockFileHashFile, hash);
|
102
101
|
}
|
@@ -8,8 +8,7 @@ exports.createProjectFileMapCache = createProjectFileMapCache;
|
|
8
8
|
exports.writeCache = writeCache;
|
9
9
|
exports.shouldRecomputeWholeGraph = shouldRecomputeWholeGraph;
|
10
10
|
exports.extractCachedFileData = extractCachedFileData;
|
11
|
-
const
|
12
|
-
const fs_extra_1 = require("fs-extra");
|
11
|
+
const node_fs_1 = require("node:fs");
|
13
12
|
const path_1 = require("path");
|
14
13
|
const perf_hooks_1 = require("perf_hooks");
|
15
14
|
const cache_directory_1 = require("../utils/cache-directory");
|
@@ -19,8 +18,8 @@ exports.nxProjectGraph = (0, path_1.join)(cache_directory_1.workspaceDataDirecto
|
|
19
18
|
exports.nxFileMap = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'file-map.json');
|
20
19
|
function ensureCacheDirectory() {
|
21
20
|
try {
|
22
|
-
if (!(0,
|
23
|
-
(0,
|
21
|
+
if (!(0, node_fs_1.existsSync)(cache_directory_1.workspaceDataDirectory)) {
|
22
|
+
(0, node_fs_1.mkdirSync)(cache_directory_1.workspaceDataDirectory, { recursive: true });
|
24
23
|
}
|
25
24
|
}
|
26
25
|
catch (e) {
|
@@ -102,9 +101,9 @@ function writeCache(cache, projectGraph) {
|
|
102
101
|
const tmpFileMapPath = `${exports.nxFileMap}~${unique}`;
|
103
102
|
try {
|
104
103
|
(0, fileutils_1.writeJsonFile)(tmpProjectGraphPath, projectGraph);
|
105
|
-
(0,
|
104
|
+
(0, node_fs_1.renameSync)(tmpProjectGraphPath, exports.nxProjectGraph);
|
106
105
|
(0, fileutils_1.writeJsonFile)(tmpFileMapPath, cache);
|
107
|
-
(0,
|
106
|
+
(0, node_fs_1.renameSync)(tmpFileMapPath, exports.nxFileMap);
|
108
107
|
done = true;
|
109
108
|
}
|
110
109
|
catch (err) {
|
@@ -3,11 +3,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Cache = exports.DbCache = void 0;
|
4
4
|
exports.getCache = getCache;
|
5
5
|
const workspace_root_1 = require("../utils/workspace-root");
|
6
|
-
const fs_extra_1 = require("fs-extra");
|
7
6
|
const path_1 = require("path");
|
8
7
|
const perf_hooks_1 = require("perf_hooks");
|
9
8
|
const default_tasks_runner_1 = require("./default-tasks-runner");
|
10
9
|
const child_process_1 = require("child_process");
|
10
|
+
const node_fs_1 = require("node:fs");
|
11
|
+
const promises_1 = require("node:fs/promises");
|
11
12
|
const cache_directory_1 = require("../utils/cache-directory");
|
12
13
|
const node_machine_id_1 = require("node-machine-id");
|
13
14
|
const native_1 = require("../native");
|
@@ -233,13 +234,13 @@ class Cache {
|
|
233
234
|
// might be left overs from partially-completed cache invocations
|
234
235
|
await this.remove(tdCommit);
|
235
236
|
await this.remove(td);
|
236
|
-
await (0,
|
237
|
-
await (0,
|
238
|
-
await (0,
|
237
|
+
await (0, promises_1.mkdir)(td, { recursive: true });
|
238
|
+
await (0, promises_1.writeFile)((0, path_1.join)(td, 'terminalOutput'), terminalOutput ?? 'no terminal output');
|
239
|
+
await (0, promises_1.mkdir)((0, path_1.join)(td, 'outputs'));
|
239
240
|
const expandedOutputs = await this.expandOutputsInWorkspace(outputs);
|
240
241
|
await Promise.all(expandedOutputs.map(async (f) => {
|
241
242
|
const src = (0, path_1.join)(this.root, f);
|
242
|
-
if (
|
243
|
+
if ((0, node_fs_1.existsSync)(src)) {
|
243
244
|
const cached = (0, path_1.join)(td, 'outputs', f);
|
244
245
|
await this.copy(src, cached);
|
245
246
|
}
|
@@ -248,15 +249,15 @@ class Cache {
|
|
248
249
|
// creating this file is atomic, whereas creating a folder is not.
|
249
250
|
// so if the process gets terminated while we are copying stuff into cache,
|
250
251
|
// the cache entry won't be used.
|
251
|
-
await (0,
|
252
|
-
await (0,
|
253
|
-
await (0,
|
252
|
+
await (0, promises_1.writeFile)((0, path_1.join)(td, 'code'), code.toString());
|
253
|
+
await (0, promises_1.writeFile)((0, path_1.join)(td, 'source'), await this.currentMachineId());
|
254
|
+
await (0, promises_1.writeFile)(tdCommit, 'true');
|
254
255
|
if (this.options.remoteCache) {
|
255
256
|
await this.options.remoteCache.store(task.hash, this.cachePath);
|
256
257
|
}
|
257
258
|
if (terminalOutput) {
|
258
259
|
const outputPath = this.temporaryOutputPath(task);
|
259
|
-
await (0,
|
260
|
+
await (0, promises_1.writeFile)(outputPath, terminalOutput);
|
260
261
|
}
|
261
262
|
});
|
262
263
|
}
|
@@ -265,7 +266,7 @@ class Cache {
|
|
265
266
|
const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);
|
266
267
|
await Promise.all(expandedOutputs.map(async (f) => {
|
267
268
|
const cached = (0, path_1.join)(cachedResult.outputsPath, f);
|
268
|
-
if (
|
269
|
+
if ((0, node_fs_1.existsSync)(cached)) {
|
269
270
|
const src = (0, path_1.join)(this.root, f);
|
270
271
|
await this.remove(src);
|
271
272
|
await this.copy(cached, src);
|
@@ -321,11 +322,11 @@ class Cache {
|
|
321
322
|
async getFromLocalDir(task) {
|
322
323
|
const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);
|
323
324
|
const td = (0, path_1.join)(this.cachePath, task.hash);
|
324
|
-
if (
|
325
|
-
const terminalOutput = await (0,
|
325
|
+
if ((0, node_fs_1.existsSync)(tdCommit)) {
|
326
|
+
const terminalOutput = await (0, promises_1.readFile)((0, path_1.join)(td, 'terminalOutput'), 'utf-8');
|
326
327
|
let code = 0;
|
327
328
|
try {
|
328
|
-
code = Number(await (0,
|
329
|
+
code = Number(await (0, promises_1.readFile)((0, path_1.join)(td, 'code'), 'utf-8'));
|
329
330
|
}
|
330
331
|
catch { }
|
331
332
|
return {
|
@@ -342,7 +343,7 @@ class Cache {
|
|
342
343
|
const td = (0, path_1.join)(this.cachePath, task.hash);
|
343
344
|
let sourceMachineId = null;
|
344
345
|
try {
|
345
|
-
sourceMachineId = await (0,
|
346
|
+
sourceMachineId = await (0, promises_1.readFile)((0, path_1.join)(td, 'source'), 'utf-8');
|
346
347
|
}
|
347
348
|
catch { }
|
348
349
|
if (sourceMachineId && sourceMachineId != (await this.currentMachineId())) {
|
@@ -361,12 +362,12 @@ class Cache {
|
|
361
362
|
}
|
362
363
|
}
|
363
364
|
createCacheDir() {
|
364
|
-
(0,
|
365
|
+
(0, node_fs_1.mkdirSync)(cache_directory_1.cacheDir, { recursive: true });
|
365
366
|
return cache_directory_1.cacheDir;
|
366
367
|
}
|
367
368
|
createTerminalOutputsDir() {
|
368
369
|
const path = (0, path_1.join)(this.cachePath, 'terminalOutputs');
|
369
|
-
(0,
|
370
|
+
(0, node_fs_1.mkdirSync)(path, { recursive: true });
|
370
371
|
return path;
|
371
372
|
}
|
372
373
|
}
|
@@ -1,7 +1,6 @@
|
|
1
1
|
"use strict";
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
3
3
|
const fs_1 = require("fs");
|
4
|
-
const fs_extra_1 = require("fs-extra");
|
5
4
|
const path_1 = require("path");
|
6
5
|
const WEEK_IN_MS = 1000 * 60 * 60 * 24 * 7;
|
7
6
|
const folder = process.argv[2];
|
@@ -34,11 +33,11 @@ function removeOld(records) {
|
|
34
33
|
if (time - s.mtimeMs > WEEK_IN_MS) {
|
35
34
|
if (s.isDirectory()) {
|
36
35
|
try {
|
37
|
-
(0,
|
36
|
+
(0, fs_1.rmSync)(`${r}.commit`, { recursive: true, force: true });
|
38
37
|
}
|
39
38
|
catch (e) { }
|
40
39
|
}
|
41
|
-
(0,
|
40
|
+
(0, fs_1.rmSync)(r, { recursive: true, force: true });
|
42
41
|
}
|
43
42
|
}
|
44
43
|
catch (e) { }
|
package/src/utils/fileutils.d.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
import type { JsonParseOptions, JsonSerializeOptions } from './json';
|
2
|
-
import { PathLike } from 'fs';
|
2
|
+
import { PathLike } from 'node:fs';
|
3
3
|
export interface JsonReadOptions extends JsonParseOptions {
|
4
4
|
/**
|
5
5
|
* mutable field recording whether JSON ends with new line
|
@@ -43,6 +43,14 @@ export declare function readYamlFile<T extends object = any>(path: string, optio
|
|
43
43
|
* @param options JSON serialize options
|
44
44
|
*/
|
45
45
|
export declare function writeJsonFile<T extends object = object>(path: string, data: T, options?: JsonWriteOptions): void;
|
46
|
+
/**
|
47
|
+
* Serializes the given data to JSON and writes it to a file asynchronously.
|
48
|
+
*
|
49
|
+
* @param path A path to a file.
|
50
|
+
* @param data data which should be serialized to JSON and written to the file
|
51
|
+
* @param options JSON serialize options
|
52
|
+
*/
|
53
|
+
export declare function writeJsonFileAsync<T extends object = object>(path: string, data: T, options?: JsonWriteOptions): Promise<void>;
|
46
54
|
/**
|
47
55
|
* Check if a directory exists
|
48
56
|
* @param path Path to directory
|
package/src/utils/fileutils.js
CHANGED
@@ -3,6 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.readJsonFile = readJsonFile;
|
4
4
|
exports.readYamlFile = readYamlFile;
|
5
5
|
exports.writeJsonFile = writeJsonFile;
|
6
|
+
exports.writeJsonFileAsync = writeJsonFileAsync;
|
6
7
|
exports.directoryExists = directoryExists;
|
7
8
|
exports.fileExists = fileExists;
|
8
9
|
exports.createDirectory = createDirectory;
|
@@ -10,7 +11,8 @@ exports.isRelativePath = isRelativePath;
|
|
10
11
|
exports.extractFileFromTarball = extractFileFromTarball;
|
11
12
|
exports.readFileIfExisting = readFileIfExisting;
|
12
13
|
const json_1 = require("./json");
|
13
|
-
const
|
14
|
+
const node_fs_1 = require("node:fs");
|
15
|
+
const promises_1 = require("node:fs/promises");
|
14
16
|
const path_1 = require("path");
|
15
17
|
const tar = require("tar-stream");
|
16
18
|
const zlib_1 = require("zlib");
|
@@ -22,7 +24,7 @@ const zlib_1 = require("zlib");
|
|
22
24
|
* @returns Object the JSON content of the file represents
|
23
25
|
*/
|
24
26
|
function readJsonFile(path, options) {
|
25
|
-
const content = (0,
|
27
|
+
const content = (0, node_fs_1.readFileSync)(path, 'utf-8');
|
26
28
|
if (options) {
|
27
29
|
options.endsWithNewline = content.charCodeAt(content.length - 1) === 10;
|
28
30
|
}
|
@@ -41,7 +43,7 @@ function readJsonFile(path, options) {
|
|
41
43
|
* @returns
|
42
44
|
*/
|
43
45
|
function readYamlFile(path, options) {
|
44
|
-
const content = (0,
|
46
|
+
const content = (0, node_fs_1.readFileSync)(path, 'utf-8');
|
45
47
|
const { load } = require('@zkochan/js-yaml');
|
46
48
|
return load(content, { ...options, filename: path });
|
47
49
|
}
|
@@ -53,12 +55,27 @@ function readYamlFile(path, options) {
|
|
53
55
|
* @param options JSON serialize options
|
54
56
|
*/
|
55
57
|
function writeJsonFile(path, data, options) {
|
56
|
-
(0,
|
58
|
+
(0, node_fs_1.mkdirSync)((0, path_1.dirname)(path), { recursive: true });
|
57
59
|
const serializedJson = (0, json_1.serializeJson)(data, options);
|
58
60
|
const content = options?.appendNewLine
|
59
61
|
? `${serializedJson}\n`
|
60
62
|
: serializedJson;
|
61
|
-
(0,
|
63
|
+
(0, node_fs_1.writeFileSync)(path, content, { encoding: 'utf-8' });
|
64
|
+
}
|
65
|
+
/**
|
66
|
+
* Serializes the given data to JSON and writes it to a file asynchronously.
|
67
|
+
*
|
68
|
+
* @param path A path to a file.
|
69
|
+
* @param data data which should be serialized to JSON and written to the file
|
70
|
+
* @param options JSON serialize options
|
71
|
+
*/
|
72
|
+
async function writeJsonFileAsync(path, data, options) {
|
73
|
+
await (0, promises_1.mkdir)((0, path_1.dirname)(path), { recursive: true });
|
74
|
+
const serializedJson = (0, json_1.serializeJson)(data, options);
|
75
|
+
const content = options?.appendNewLine
|
76
|
+
? `${serializedJson}\n`
|
77
|
+
: serializedJson;
|
78
|
+
await (0, promises_1.writeFile)(path, content, { encoding: 'utf-8' });
|
62
79
|
}
|
63
80
|
/**
|
64
81
|
* Check if a directory exists
|
@@ -66,7 +83,7 @@ function writeJsonFile(path, data, options) {
|
|
66
83
|
*/
|
67
84
|
function directoryExists(path) {
|
68
85
|
try {
|
69
|
-
return (0,
|
86
|
+
return (0, node_fs_1.statSync)(path).isDirectory();
|
70
87
|
}
|
71
88
|
catch {
|
72
89
|
return false;
|
@@ -78,14 +95,14 @@ function directoryExists(path) {
|
|
78
95
|
*/
|
79
96
|
function fileExists(path) {
|
80
97
|
try {
|
81
|
-
return (0,
|
98
|
+
return (0, node_fs_1.statSync)(path).isFile();
|
82
99
|
}
|
83
100
|
catch {
|
84
101
|
return false;
|
85
102
|
}
|
86
103
|
}
|
87
104
|
function createDirectory(path) {
|
88
|
-
(0,
|
105
|
+
(0, node_fs_1.mkdirSync)(path, { recursive: true });
|
89
106
|
}
|
90
107
|
function isRelativePath(path) {
|
91
108
|
return (path === '.' ||
|
@@ -102,9 +119,9 @@ function isRelativePath(path) {
|
|
102
119
|
*/
|
103
120
|
async function extractFileFromTarball(tarballPath, file, destinationFilePath) {
|
104
121
|
return new Promise((resolve, reject) => {
|
105
|
-
(0,
|
122
|
+
(0, node_fs_1.mkdirSync)((0, path_1.dirname)(destinationFilePath), { recursive: true });
|
106
123
|
var tarExtractStream = tar.extract();
|
107
|
-
const destinationFileStream = (0,
|
124
|
+
const destinationFileStream = (0, node_fs_1.createWriteStream)(destinationFilePath);
|
108
125
|
let isFileExtracted = false;
|
109
126
|
tarExtractStream.on('entry', function (header, stream, next) {
|
110
127
|
if (header.name === file) {
|
@@ -126,9 +143,9 @@ async function extractFileFromTarball(tarballPath, file, destinationFilePath) {
|
|
126
143
|
reject();
|
127
144
|
}
|
128
145
|
});
|
129
|
-
(0,
|
146
|
+
(0, node_fs_1.createReadStream)(tarballPath).pipe((0, zlib_1.createGunzip)()).pipe(tarExtractStream);
|
130
147
|
});
|
131
148
|
}
|
132
149
|
function readFileIfExisting(path) {
|
133
|
-
return (0,
|
150
|
+
return (0, node_fs_1.existsSync)(path) ? (0, node_fs_1.readFileSync)(path, 'utf-8') : '';
|
134
151
|
}
|
package/src/utils/ignore.js
CHANGED
@@ -4,7 +4,7 @@ exports.ALWAYS_IGNORE = void 0;
|
|
4
4
|
exports.getIgnoredGlobs = getIgnoredGlobs;
|
5
5
|
exports.getAlwaysIgnore = getAlwaysIgnore;
|
6
6
|
exports.getIgnoreObject = getIgnoreObject;
|
7
|
-
const
|
7
|
+
const node_fs_1 = require("node:fs");
|
8
8
|
const ignore_1 = require("ignore");
|
9
9
|
const fileutils_1 = require("./fileutils");
|
10
10
|
const path_1 = require("./path");
|
@@ -48,7 +48,7 @@ function getIgnoreObject(root = workspace_root_1.workspaceRoot) {
|
|
48
48
|
function getIgnoredGlobsFromFile(file, root) {
|
49
49
|
try {
|
50
50
|
const results = [];
|
51
|
-
const contents = (0,
|
51
|
+
const contents = (0, node_fs_1.readFileSync)(file, 'utf-8');
|
52
52
|
const lines = contents.split('\n');
|
53
53
|
for (const line of lines) {
|
54
54
|
const trimmed = line.trim();
|
@@ -15,7 +15,7 @@ exports.packageRegistryView = packageRegistryView;
|
|
15
15
|
exports.packageRegistryPack = packageRegistryPack;
|
16
16
|
const child_process_1 = require("child_process");
|
17
17
|
const fs_1 = require("fs");
|
18
|
-
const
|
18
|
+
const promises_1 = require("node:fs/promises");
|
19
19
|
const path_1 = require("path");
|
20
20
|
const semver_1 = require("semver");
|
21
21
|
const tmp_1 = require("tmp");
|
@@ -301,7 +301,7 @@ function createTempNpmDirectory() {
|
|
301
301
|
copyPackageManagerConfigurationFiles(workspace_root_1.workspaceRoot, dir);
|
302
302
|
const cleanup = async () => {
|
303
303
|
try {
|
304
|
-
await (0,
|
304
|
+
await (0, promises_1.rm)(dir, { recursive: true, force: true });
|
305
305
|
}
|
306
306
|
catch {
|
307
307
|
// It's okay if this fails, the OS will clean it up eventually
|