pkgprn 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.changeset/README.md +8 -0
- package/.changeset/config.json +11 -0
- package/.github/workflows/main.yml +28 -0
- package/.prettierrc +6 -0
- package/CHANGELOG.md +7 -0
- package/LICENSE +21 -0
- package/README.md +3 -0
- package/ci.env +1 -0
- package/index.js +106 -0
- package/jsconfig.json +12 -0
- package/package.json +23 -0
- package/pnpm-workspace.yaml +2 -0
- package/prune.js +540 -0
- package/tests/test.js +205 -0
- package/tests/tests.json +232 -0
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# Changesets
|
|
2
|
+
|
|
3
|
+
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
|
|
4
|
+
with multi-package repos, or single-package repos to help you version and publish your code. You can
|
|
5
|
+
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
|
|
6
|
+
|
|
7
|
+
We have a quick list of common questions to get you started engaging with this project in
|
|
8
|
+
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://unpkg.com/@changesets/config@2.3.0/schema.json",
|
|
3
|
+
"changelog": "@changesets/cli/changelog",
|
|
4
|
+
"commit": false,
|
|
5
|
+
"fixed": [],
|
|
6
|
+
"linked": [],
|
|
7
|
+
"access": "public",
|
|
8
|
+
"baseBranch": "main",
|
|
9
|
+
"updateInternalDependencies": "patch",
|
|
10
|
+
"ignore": []
|
|
11
|
+
}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
on:
|
|
2
|
+
push:
|
|
3
|
+
branches:
|
|
4
|
+
- main
|
|
5
|
+
- '[0-9]+.[0-9]+.x'
|
|
6
|
+
- '[0-9]+.x.x'
|
|
7
|
+
- '[0-9]+.x'
|
|
8
|
+
- next
|
|
9
|
+
- next-major
|
|
10
|
+
- alpha
|
|
11
|
+
- beta
|
|
12
|
+
pull_request:
|
|
13
|
+
branches:
|
|
14
|
+
- main
|
|
15
|
+
- '[0-9]+.[0-9]+.x'
|
|
16
|
+
- '[0-9]+.x.x'
|
|
17
|
+
- '[0-9]+.x'
|
|
18
|
+
- next
|
|
19
|
+
- next-major
|
|
20
|
+
- alpha
|
|
21
|
+
- beta
|
|
22
|
+
|
|
23
|
+
jobs:
|
|
24
|
+
pipeline:
|
|
25
|
+
uses: kshutkin/pipeline/.github/workflows/main.yml@main
|
|
26
|
+
secrets: inherit
|
|
27
|
+
with:
|
|
28
|
+
has-dist: false
|
package/.prettierrc
ADDED
package/CHANGELOG.md
ADDED
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Konstantin Shutkin
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
package/ci.env
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
CI=true
|
package/index.js
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { createLogger } from '@niceties/logger';
|
|
3
|
+
import { cli } from 'cleye';
|
|
4
|
+
import { readFile, writeFile } from 'node:fs/promises';
|
|
5
|
+
import { dirname, resolve } from 'node:path';
|
|
6
|
+
import { fileURLToPath } from 'node:url';
|
|
7
|
+
import { prunePkg } from './prune.js';
|
|
8
|
+
|
|
9
|
+
// globals
|
|
10
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
11
|
+
|
|
12
|
+
const logger = createLogger();
|
|
13
|
+
logger.update('preparing..');
|
|
14
|
+
|
|
15
|
+
try {
|
|
16
|
+
const version = await getMyVersion();
|
|
17
|
+
|
|
18
|
+
logger.update('');
|
|
19
|
+
process.stdout.moveCursor?.(0, -1);
|
|
20
|
+
|
|
21
|
+
const cliOptions = cli({
|
|
22
|
+
name: 'pkgprn',
|
|
23
|
+
version: version ?? '<unknown>',
|
|
24
|
+
description:
|
|
25
|
+
'prune devDependencies and redundant scripts from package.json',
|
|
26
|
+
flags: {
|
|
27
|
+
profile: {
|
|
28
|
+
type: String,
|
|
29
|
+
description: 'profile to use',
|
|
30
|
+
default: 'library',
|
|
31
|
+
},
|
|
32
|
+
flatten: {
|
|
33
|
+
type: FlattenParam,
|
|
34
|
+
description: 'flatten package files',
|
|
35
|
+
default: false,
|
|
36
|
+
},
|
|
37
|
+
removeSourcemaps: {
|
|
38
|
+
type: Boolean,
|
|
39
|
+
description: 'remove sourcemaps',
|
|
40
|
+
default: false,
|
|
41
|
+
},
|
|
42
|
+
optimizeFiles: {
|
|
43
|
+
type: Boolean,
|
|
44
|
+
description: 'optimize files array',
|
|
45
|
+
default: true,
|
|
46
|
+
},
|
|
47
|
+
},
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
const pkg = await readPackage('.');
|
|
51
|
+
if (!pkg) {
|
|
52
|
+
throw new Error('Could not read package.json');
|
|
53
|
+
}
|
|
54
|
+
await prunePkg(pkg, cliOptions.flags, logger);
|
|
55
|
+
|
|
56
|
+
await writePackage(pkg);
|
|
57
|
+
} catch (error) {
|
|
58
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
59
|
+
logger.finish(`Error: ${errorMessage}`, 3);
|
|
60
|
+
process.exit(255);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* @returns {Promise<string>}
|
|
65
|
+
*/
|
|
66
|
+
async function getMyVersion() {
|
|
67
|
+
const pkg = await readPackage(resolve(__dirname));
|
|
68
|
+
|
|
69
|
+
return pkg && 'version' in pkg && typeof pkg.version === 'string'
|
|
70
|
+
? pkg.version
|
|
71
|
+
: '<unknown>';
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* @param {string} dir
|
|
76
|
+
* @returns {Promise<import('./prune.js').PackageJson | undefined>}
|
|
77
|
+
*/
|
|
78
|
+
async function readPackage(dir) {
|
|
79
|
+
const packageFileName = resolve(dir, 'package.json');
|
|
80
|
+
try {
|
|
81
|
+
const pkgFile = await readFile(packageFileName);
|
|
82
|
+
return JSON.parse(pkgFile.toString());
|
|
83
|
+
} catch {
|
|
84
|
+
/**/
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* @param {import('./prune.js').PackageJson} pkg
|
|
90
|
+
*/
|
|
91
|
+
async function writePackage(pkg) {
|
|
92
|
+
await writeFile('./package.json', `${JSON.stringify(pkg, null, 2)}\n`);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* @param {string | false} value
|
|
97
|
+
*/
|
|
98
|
+
function FlattenParam(value) {
|
|
99
|
+
if (typeof value === 'boolean') {
|
|
100
|
+
return value; // false
|
|
101
|
+
}
|
|
102
|
+
if (value === '') {
|
|
103
|
+
return true; // means auto
|
|
104
|
+
}
|
|
105
|
+
return value; // string
|
|
106
|
+
}
|
package/jsconfig.json
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "pkgprn",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"license": "MIT",
|
|
5
|
+
"author": "Konstantin Shutkin",
|
|
6
|
+
"bin": "./index.js",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"description": "Prune package.json before publish",
|
|
9
|
+
"homepage": "https://github.com/kshutkin/package-prune#readme",
|
|
10
|
+
"bugs": {
|
|
11
|
+
"url": "https://github.com/kshutkin/package-prune/issues"
|
|
12
|
+
},
|
|
13
|
+
"repository": {
|
|
14
|
+
"type": "git",
|
|
15
|
+
"url": "git+https://github.com/kshutkin/package-prune.git"
|
|
16
|
+
},
|
|
17
|
+
"main": "prune.js",
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"cleye": "1.3.2",
|
|
20
|
+
"jsonata": "^2.1.0",
|
|
21
|
+
"@niceties/logger": "^1.1.13"
|
|
22
|
+
}
|
|
23
|
+
}
|
package/prune.js
ADDED
|
@@ -0,0 +1,540 @@
|
|
|
1
|
+
import {
|
|
2
|
+
access,
|
|
3
|
+
mkdir,
|
|
4
|
+
readdir,
|
|
5
|
+
readFile,
|
|
6
|
+
rename,
|
|
7
|
+
rm,
|
|
8
|
+
stat,
|
|
9
|
+
writeFile,
|
|
10
|
+
} from 'node:fs/promises';
|
|
11
|
+
import path from 'node:path';
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* @typedef {Object} Logger
|
|
15
|
+
* @property {function(string): void} update
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* @typedef {Object} PackageJson
|
|
20
|
+
* @property {Object.<string, string>} [scripts]
|
|
21
|
+
* @property {Object.<string, string>} [devDependencies]
|
|
22
|
+
* @property {string} [packageManager]
|
|
23
|
+
* @property {string} [main]
|
|
24
|
+
* @property {string|Object.<string, string>} [bin]
|
|
25
|
+
* @property {Array<string>} [files]
|
|
26
|
+
* @property {Object} [directories]
|
|
27
|
+
* @property {Object} [exports]
|
|
28
|
+
* @property {Object} [typesVersions]
|
|
29
|
+
*/
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* @typedef {Object} PruneOptions
|
|
33
|
+
* @property {string} profile
|
|
34
|
+
* @property {string|boolean} flatten
|
|
35
|
+
* @property {boolean} removeSourcemaps
|
|
36
|
+
* @property {boolean} optimizeFiles
|
|
37
|
+
*/
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Prunes a package.json object according to the given options.
|
|
41
|
+
* @param {PackageJson} pkg
|
|
42
|
+
* @param {PruneOptions} options
|
|
43
|
+
* @param {Logger} logger
|
|
44
|
+
*/
|
|
45
|
+
export async function prunePkg(pkg, options, logger) {
|
|
46
|
+
const scriptsToKeep = getScriptsData();
|
|
47
|
+
|
|
48
|
+
const keys =
|
|
49
|
+
scriptsToKeep[/** @type {'library'|'app'} */ (options.profile)];
|
|
50
|
+
|
|
51
|
+
if (!keys) {
|
|
52
|
+
throw new Error(`unknown profile ${options.profile}`);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
pkg.devDependencies = undefined;
|
|
56
|
+
pkg.packageManager = undefined;
|
|
57
|
+
|
|
58
|
+
if (pkg.scripts) {
|
|
59
|
+
for (const key of Object.keys(pkg.scripts)) {
|
|
60
|
+
if (!keys.has(key)) {
|
|
61
|
+
delete pkg.scripts[key];
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (Object.keys(pkg.scripts).length === 0) {
|
|
66
|
+
pkg.scripts = undefined;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (options.flatten) {
|
|
71
|
+
await flatten(pkg, options.flatten, logger);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (options.removeSourcemaps) {
|
|
75
|
+
const sourceMaps = await walkDir('.', ['node_modules']).then((files) =>
|
|
76
|
+
files.filter((file) => file.endsWith('.map'))
|
|
77
|
+
);
|
|
78
|
+
for (const sourceMap of sourceMaps) {
|
|
79
|
+
// find corresponding file
|
|
80
|
+
const sourceFile = sourceMap.slice(0, -4);
|
|
81
|
+
// load file
|
|
82
|
+
const sourceFileContent = await readFile(sourceFile, 'utf8');
|
|
83
|
+
// find sourceMappingURL
|
|
84
|
+
const sourceMappingUrl = `\n//# sourceMappingURL=${path.basename(sourceMap)}`;
|
|
85
|
+
// remove sourceMappingURL
|
|
86
|
+
const newContent = sourceFileContent.replace(sourceMappingUrl, '');
|
|
87
|
+
// write file
|
|
88
|
+
await writeFile(sourceFile, newContent, 'utf8');
|
|
89
|
+
// remove sourceMap
|
|
90
|
+
await rm(sourceMap);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (pkg.files && Array.isArray(pkg.files) && options.optimizeFiles) {
|
|
95
|
+
const filterFiles = ['package.json'];
|
|
96
|
+
const specialFiles = ['README', 'LICENSE', 'LICENCE'];
|
|
97
|
+
if (pkg.main && typeof pkg.main === 'string') {
|
|
98
|
+
filterFiles.push(normalizePath(pkg.main));
|
|
99
|
+
}
|
|
100
|
+
if (pkg.bin) {
|
|
101
|
+
if (typeof pkg.bin === 'string') {
|
|
102
|
+
filterFiles.push(normalizePath(pkg.bin));
|
|
103
|
+
}
|
|
104
|
+
if (typeof pkg.bin === 'object' && pkg.bin !== null) {
|
|
105
|
+
filterFiles.push(...Object.values(pkg.bin).map(normalizePath));
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
const depthToFiles = new Map();
|
|
110
|
+
|
|
111
|
+
for (const file of pkg.files.concat(filterFiles)) {
|
|
112
|
+
const dirname = path.dirname(file);
|
|
113
|
+
const depth = dirname.split('/').length;
|
|
114
|
+
if (!depthToFiles.has(depth)) {
|
|
115
|
+
depthToFiles.set(depth, [file]);
|
|
116
|
+
} else {
|
|
117
|
+
depthToFiles.get(depth)?.push(file);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// walk depth keys from the highest to the lowest
|
|
122
|
+
const maxDepth = Math.max(...depthToFiles.keys());
|
|
123
|
+
for (let depth = maxDepth; depth > 0; --depth) {
|
|
124
|
+
const files = depthToFiles.get(depth);
|
|
125
|
+
const mapDirToFiles = new Map();
|
|
126
|
+
for (const file of files) {
|
|
127
|
+
const dirname = path.dirname(file);
|
|
128
|
+
const basename = normalizePath(path.basename(file));
|
|
129
|
+
if (!mapDirToFiles.has(dirname)) {
|
|
130
|
+
mapDirToFiles.set(dirname, [basename]);
|
|
131
|
+
} else {
|
|
132
|
+
mapDirToFiles.get(dirname)?.push(basename);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
for (const [dirname, filesInDir] of mapDirToFiles) {
|
|
136
|
+
// find out real content of the directory
|
|
137
|
+
const realFiles = await readdir(dirname);
|
|
138
|
+
// check if all files in the directory are in the filesInDir
|
|
139
|
+
const allFilesInDir =
|
|
140
|
+
realFiles.every((file) => filesInDir.includes(file)) ||
|
|
141
|
+
realFiles.length === 0;
|
|
142
|
+
if (allFilesInDir && dirname !== '.') {
|
|
143
|
+
if (!depthToFiles.has(depth - 1)) {
|
|
144
|
+
depthToFiles.set(depth - 1, [dirname]);
|
|
145
|
+
} else {
|
|
146
|
+
depthToFiles.get(depth - 1).push(dirname);
|
|
147
|
+
}
|
|
148
|
+
const thisDepth = depthToFiles.get(depth);
|
|
149
|
+
depthToFiles.set(
|
|
150
|
+
depth,
|
|
151
|
+
thisDepth.filter((/** @type {string} */ file) =>
|
|
152
|
+
filesInDir.every(
|
|
153
|
+
(/** @type {string} */ fileInDir) =>
|
|
154
|
+
path.join(dirname, fileInDir) !== file
|
|
155
|
+
)
|
|
156
|
+
)
|
|
157
|
+
);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
pkg.files = [...new Set(Array.from(depthToFiles.values()).flat())];
|
|
163
|
+
|
|
164
|
+
pkg.files = pkg.files.filter((/** @type {string} */ file) => {
|
|
165
|
+
const fileNormalized = normalizePath(file);
|
|
166
|
+
const dirname = path.dirname(fileNormalized);
|
|
167
|
+
const basenameWithoutExtension = path
|
|
168
|
+
.basename(fileNormalized, path.extname(fileNormalized))
|
|
169
|
+
.toUpperCase();
|
|
170
|
+
return (
|
|
171
|
+
!filterFiles.includes(fileNormalized) &&
|
|
172
|
+
((dirname !== '' && dirname !== '.') ||
|
|
173
|
+
!specialFiles.includes(basenameWithoutExtension))
|
|
174
|
+
);
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* @type {string[]}
|
|
179
|
+
*/
|
|
180
|
+
const ignoreDirs = [];
|
|
181
|
+
|
|
182
|
+
for (const fileOrDir of pkg.files) {
|
|
183
|
+
if (await isDirectory(fileOrDir)) {
|
|
184
|
+
const allFiles = await walkDir(fileOrDir);
|
|
185
|
+
if (
|
|
186
|
+
allFiles.every((/** @type {string} */ file) => {
|
|
187
|
+
const fileNormalized = normalizePath(file);
|
|
188
|
+
return filterFiles.includes(fileNormalized);
|
|
189
|
+
})
|
|
190
|
+
) {
|
|
191
|
+
ignoreDirs.push(fileOrDir);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
pkg.files = pkg.files.filter((dir) => !ignoreDirs.includes(dir));
|
|
197
|
+
|
|
198
|
+
if (pkg.files.length === 0) {
|
|
199
|
+
pkg.files = undefined;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Flattens the dist directory and updates package.json references.
|
|
206
|
+
* @param {PackageJson} pkg
|
|
207
|
+
* @param {string|true} flatten
|
|
208
|
+
* @param {Logger} logger
|
|
209
|
+
*/
|
|
210
|
+
async function flatten(pkg, flatten, logger) {
|
|
211
|
+
const { default: jsonata } = await import('jsonata');
|
|
212
|
+
|
|
213
|
+
// find out where is the dist folder
|
|
214
|
+
|
|
215
|
+
const expression = jsonata(
|
|
216
|
+
'[bin, bin.*, main, module, unpkg, umd, types, typings, exports[].*.*, typesVersions.*.*, directories.bin]'
|
|
217
|
+
);
|
|
218
|
+
const allReferences = await expression.evaluate(pkg);
|
|
219
|
+
let distDir;
|
|
220
|
+
|
|
221
|
+
// at this point we requested directories.bin, but it is the only one that is directory and not a file
|
|
222
|
+
// later when we get dirname we can't flatten directories.bin completely
|
|
223
|
+
// it is easy to fix by checking element is a directory but it is kind of good
|
|
224
|
+
// to have it as a separate directory, but user still can flatten it by specifying the directory
|
|
225
|
+
|
|
226
|
+
if (flatten === true) {
|
|
227
|
+
let commonSegments;
|
|
228
|
+
|
|
229
|
+
for (const entry of allReferences) {
|
|
230
|
+
if (typeof entry !== 'string') {
|
|
231
|
+
continue;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
const dirname = path.dirname(entry);
|
|
235
|
+
|
|
236
|
+
const cleanedSegments = dirname
|
|
237
|
+
.split('/')
|
|
238
|
+
.filter((path) => path && path !== '.');
|
|
239
|
+
if (!commonSegments) {
|
|
240
|
+
commonSegments = cleanedSegments;
|
|
241
|
+
} else {
|
|
242
|
+
for (let i = 0; i < commonSegments.length; ++i) {
|
|
243
|
+
if (commonSegments[i] !== cleanedSegments[i]) {
|
|
244
|
+
commonSegments.length = i;
|
|
245
|
+
break;
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
distDir = commonSegments?.join('/');
|
|
251
|
+
} else {
|
|
252
|
+
distDir = normalizePath(flatten);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
if (!distDir) {
|
|
256
|
+
throw new Error('could not find dist folder');
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
logger.update(`flattening ${distDir}...`);
|
|
260
|
+
|
|
261
|
+
// check if dist can be flattened
|
|
262
|
+
|
|
263
|
+
const relativeDistDir = `./${distDir}`;
|
|
264
|
+
|
|
265
|
+
const existsPromises = [];
|
|
266
|
+
|
|
267
|
+
const filesInDist = await walkDir(relativeDistDir);
|
|
268
|
+
|
|
269
|
+
for (const file of filesInDist) {
|
|
270
|
+
// check file is not in root dir
|
|
271
|
+
const relativePath = path.relative(relativeDistDir, file);
|
|
272
|
+
existsPromises.push(isExists(relativePath));
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
const exists = await Promise.all(existsPromises);
|
|
276
|
+
|
|
277
|
+
const filesAlreadyExist = exists.filter(Boolean);
|
|
278
|
+
|
|
279
|
+
if (filesAlreadyExist.length) {
|
|
280
|
+
throw new Error(
|
|
281
|
+
`dist folder cannot be flattened because files already exist: ${filesAlreadyExist.join(', ')}`
|
|
282
|
+
);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
if (
|
|
286
|
+
typeof flatten === 'string' &&
|
|
287
|
+
'directories' in pkg &&
|
|
288
|
+
pkg.directories != null &&
|
|
289
|
+
typeof pkg.directories === 'object' &&
|
|
290
|
+
'bin' in pkg.directories &&
|
|
291
|
+
typeof pkg.directories.bin === 'string' &&
|
|
292
|
+
normalizePath(pkg.directories.bin) === normalizePath(flatten)
|
|
293
|
+
) {
|
|
294
|
+
// biome-ignore lint/performance/noDelete: <explanation>
|
|
295
|
+
delete pkg.directories.bin;
|
|
296
|
+
if (Object.keys(pkg.directories).length === 0) {
|
|
297
|
+
pkg.directories = undefined;
|
|
298
|
+
}
|
|
299
|
+
const files = await readdir(flatten);
|
|
300
|
+
if (files.length === 1) {
|
|
301
|
+
pkg.bin = files[0];
|
|
302
|
+
} else {
|
|
303
|
+
pkg.bin = {};
|
|
304
|
+
for (const file of files) {
|
|
305
|
+
pkg.bin[path.basename(file, path.extname(file))] = file;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// create new directory structure
|
|
311
|
+
const mkdirPromises = [];
|
|
312
|
+
for (const file of filesInDist) {
|
|
313
|
+
// check file is not in root dir
|
|
314
|
+
const relativePath = path.relative(relativeDistDir, file);
|
|
315
|
+
mkdirPromises.push(
|
|
316
|
+
mkdir(path.dirname(relativePath), { recursive: true })
|
|
317
|
+
);
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
await Promise.all(mkdirPromises);
|
|
321
|
+
|
|
322
|
+
// move files to root dir (rename)
|
|
323
|
+
const renamePromises = [];
|
|
324
|
+
const newFiles = [];
|
|
325
|
+
|
|
326
|
+
for (const file of filesInDist) {
|
|
327
|
+
// check file is not in root dir
|
|
328
|
+
const relativePath = path.relative(relativeDistDir, file);
|
|
329
|
+
newFiles.push(relativePath);
|
|
330
|
+
renamePromises.push(rename(file, relativePath));
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
await Promise.all(renamePromises);
|
|
334
|
+
|
|
335
|
+
let cleanedDir = relativeDistDir;
|
|
336
|
+
while (await isEmptyDir(cleanedDir)) {
|
|
337
|
+
await rm(cleanedDir, { recursive: true, force: true });
|
|
338
|
+
const parentDir = path.dirname(cleanedDir);
|
|
339
|
+
if (parentDir === '.') {
|
|
340
|
+
break;
|
|
341
|
+
}
|
|
342
|
+
cleanedDir = parentDir;
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
const normalizedCleanDir = normalizePath(cleanedDir);
|
|
346
|
+
|
|
347
|
+
const allReferencesSet = new Set(allReferences);
|
|
348
|
+
|
|
349
|
+
// update package.json
|
|
350
|
+
const stringToReplace = `${distDir}/`; // we append / to remove in from the middle of the string
|
|
351
|
+
const pkgClone = cloneAndUpdate(pkg, (value) =>
|
|
352
|
+
allReferencesSet.has(value) ? value.replace(stringToReplace, '') : value
|
|
353
|
+
);
|
|
354
|
+
Object.assign(pkg, pkgClone);
|
|
355
|
+
|
|
356
|
+
// update files
|
|
357
|
+
let files = pkg.files;
|
|
358
|
+
if (files) {
|
|
359
|
+
files = files.filter((file) => {
|
|
360
|
+
const fileNormalized = normalizePath(file);
|
|
361
|
+
return (
|
|
362
|
+
!isSubDirectory(cleanedDir, fileNormalized) &&
|
|
363
|
+
fileNormalized !== normalizedCleanDir
|
|
364
|
+
);
|
|
365
|
+
});
|
|
366
|
+
files.push(...newFiles);
|
|
367
|
+
pkg.files = [...files];
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// remove extra directories with package.json
|
|
371
|
+
const exports = pkg.exports ? Object.keys(pkg.exports) : [];
|
|
372
|
+
for (const key of exports) {
|
|
373
|
+
if (key === '.') {
|
|
374
|
+
continue;
|
|
375
|
+
}
|
|
376
|
+
const isDir = await isDirectory(key);
|
|
377
|
+
if (isDir) {
|
|
378
|
+
const pkgPath = path.join(key, 'package.json');
|
|
379
|
+
const pkgExists = await isExists(pkgPath);
|
|
380
|
+
// ensure nothing else is in the directory
|
|
381
|
+
const files = await readdir(key);
|
|
382
|
+
if (files.length === 1 && pkgExists) {
|
|
383
|
+
await rm(key, { recursive: true, force: true });
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
/**
|
|
390
|
+
* @param {string} file
|
|
391
|
+
* @returns {string}
|
|
392
|
+
*/
|
|
393
|
+
function normalizePath(file) {
|
|
394
|
+
let fileNormalized = path.normalize(file);
|
|
395
|
+
if (fileNormalized.endsWith('/') || fileNormalized.endsWith('\\')) {
|
|
396
|
+
// remove trailing slash
|
|
397
|
+
fileNormalized = fileNormalized.slice(0, -1);
|
|
398
|
+
}
|
|
399
|
+
return fileNormalized;
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
/**
|
|
403
|
+
* Deep clones an object/array and updates all string values using the updater function
|
|
404
|
+
* @param {unknown} pkg
|
|
405
|
+
* @param {(value: string) => string} updater
|
|
406
|
+
* @returns {unknown}
|
|
407
|
+
*/
|
|
408
|
+
function cloneAndUpdate(pkg, updater) {
|
|
409
|
+
if (typeof pkg === 'string') {
|
|
410
|
+
return updater(pkg);
|
|
411
|
+
}
|
|
412
|
+
if (Array.isArray(pkg)) {
|
|
413
|
+
return pkg.map((value) => cloneAndUpdate(value, updater));
|
|
414
|
+
}
|
|
415
|
+
if (typeof pkg === 'object' && pkg !== null) {
|
|
416
|
+
/** @type {Record<string, unknown>} */
|
|
417
|
+
const clone = {};
|
|
418
|
+
for (const key of Object.keys(pkg)) {
|
|
419
|
+
clone[key] = cloneAndUpdate(
|
|
420
|
+
/** @type {Record<string, unknown>} */ (pkg)[key],
|
|
421
|
+
updater
|
|
422
|
+
);
|
|
423
|
+
}
|
|
424
|
+
return clone;
|
|
425
|
+
}
|
|
426
|
+
return pkg;
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
/**
|
|
430
|
+
* @param {string} parent
|
|
431
|
+
* @param {string} child
|
|
432
|
+
* @returns {boolean}
|
|
433
|
+
*/
|
|
434
|
+
function isSubDirectory(parent, child) {
|
|
435
|
+
return path.relative(child, parent).startsWith('..');
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
/**
|
|
439
|
+
* @param {string} dir
|
|
440
|
+
* @returns {Promise<boolean>}
|
|
441
|
+
*/
|
|
442
|
+
async function isEmptyDir(dir) {
|
|
443
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
444
|
+
return entries.filter((entry) => !entry.isDirectory()).length === 0;
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
/**
|
|
448
|
+
* @param {string} file
|
|
449
|
+
* @returns {Promise<boolean>}
|
|
450
|
+
*/
|
|
451
|
+
async function isDirectory(file) {
|
|
452
|
+
const fileStat = await stat(file);
|
|
453
|
+
return fileStat.isDirectory();
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
/**
|
|
457
|
+
* @param {string} dir
|
|
458
|
+
* @param {Array<string>} [ignoreDirs=[]]
|
|
459
|
+
* @returns {Promise<Array<string>>}
|
|
460
|
+
*/
|
|
461
|
+
async function walkDir(dir, ignoreDirs = []) {
|
|
462
|
+
const entries = await readdir(dir, {
|
|
463
|
+
recursive: true,
|
|
464
|
+
withFileTypes: true,
|
|
465
|
+
});
|
|
466
|
+
const files = [];
|
|
467
|
+
|
|
468
|
+
for (const entry of entries) {
|
|
469
|
+
if (entry.isFile()) {
|
|
470
|
+
const childPath = entry.parentPath
|
|
471
|
+
? path.join(entry.parentPath, entry.name)
|
|
472
|
+
: entry.name;
|
|
473
|
+
|
|
474
|
+
// Check if any part of the path contains ignored directories
|
|
475
|
+
const pathParts = path.relative(dir, childPath).split(path.sep);
|
|
476
|
+
const shouldIgnore = pathParts.some((part) =>
|
|
477
|
+
ignoreDirs.includes(part)
|
|
478
|
+
);
|
|
479
|
+
|
|
480
|
+
if (!shouldIgnore) {
|
|
481
|
+
files.push(childPath);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
return files;
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
/**
|
|
490
|
+
* @param {string} file
|
|
491
|
+
*/
|
|
492
|
+
async function isExists(file) {
|
|
493
|
+
try {
|
|
494
|
+
await access(file);
|
|
495
|
+
} catch (e) {
|
|
496
|
+
if (
|
|
497
|
+
typeof e === 'object' &&
|
|
498
|
+
e != null &&
|
|
499
|
+
'code' in e &&
|
|
500
|
+
e.code === 'ENOENT'
|
|
501
|
+
) {
|
|
502
|
+
return false;
|
|
503
|
+
}
|
|
504
|
+
throw e;
|
|
505
|
+
}
|
|
506
|
+
return file;
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
function getScriptsData() {
|
|
510
|
+
const libraryScripts = new Set([
|
|
511
|
+
'preinstall',
|
|
512
|
+
'install',
|
|
513
|
+
'postinstall',
|
|
514
|
+
'prepublish',
|
|
515
|
+
'preprepare',
|
|
516
|
+
'prepare',
|
|
517
|
+
'postprepare',
|
|
518
|
+
]);
|
|
519
|
+
|
|
520
|
+
const appScripts = new Set([
|
|
521
|
+
...libraryScripts,
|
|
522
|
+
'prestart',
|
|
523
|
+
'start',
|
|
524
|
+
'poststart',
|
|
525
|
+
'prerestart',
|
|
526
|
+
'restart',
|
|
527
|
+
'postrestart',
|
|
528
|
+
'prestop',
|
|
529
|
+
'stop',
|
|
530
|
+
'poststop',
|
|
531
|
+
'pretest',
|
|
532
|
+
'test',
|
|
533
|
+
'posttest',
|
|
534
|
+
]);
|
|
535
|
+
|
|
536
|
+
return {
|
|
537
|
+
library: libraryScripts,
|
|
538
|
+
app: appScripts,
|
|
539
|
+
};
|
|
540
|
+
}
|
package/tests/test.js
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
import cd from 'node:child_process';
|
|
2
|
+
import { promisify, parseArgs } from 'node:util';
|
|
3
|
+
import assert from 'node:assert';
|
|
4
|
+
import test, { after, describe } from 'node:test';
|
|
5
|
+
import fs from 'node:fs/promises';
|
|
6
|
+
import process from 'node:process';
|
|
7
|
+
import { filesToString, stringToFiles } from 'cli-test-helper';
|
|
8
|
+
import tests from './tests.json' with { type: 'json' };
|
|
9
|
+
|
|
10
|
+
const exec = promisify(cd.exec);
|
|
11
|
+
|
|
12
|
+
const dir = './tests/tmp';
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @typedef {Object} TestCase
|
|
16
|
+
* @property {number} id
|
|
17
|
+
* @property {string} name
|
|
18
|
+
* @property {string} input
|
|
19
|
+
* @property {string} output
|
|
20
|
+
* @property {string} stdout
|
|
21
|
+
* @property {string} stderr
|
|
22
|
+
* @property {string} [args]
|
|
23
|
+
* @property {number} [exitCode]
|
|
24
|
+
*/
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* @typedef {Record<string, TestCase[]>} TestSuites
|
|
28
|
+
*/
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @typedef {Object} ExecResult
|
|
32
|
+
* @property {number} [code]
|
|
33
|
+
* @property {string} [stdout]
|
|
34
|
+
* @property {string} [stderr]
|
|
35
|
+
*/
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* @typedef {Object} ExecError
|
|
39
|
+
* @property {number} code
|
|
40
|
+
* @property {string} [stdout]
|
|
41
|
+
* @property {string} [stderr]
|
|
42
|
+
*/
|
|
43
|
+
|
|
44
|
+
const args = parseArgs({
|
|
45
|
+
options: {
|
|
46
|
+
update: {
|
|
47
|
+
type: 'boolean',
|
|
48
|
+
short: 'u',
|
|
49
|
+
default: false,
|
|
50
|
+
},
|
|
51
|
+
capture: {
|
|
52
|
+
type: 'string',
|
|
53
|
+
short: 'c',
|
|
54
|
+
},
|
|
55
|
+
export: {
|
|
56
|
+
type: 'string',
|
|
57
|
+
short: 'e',
|
|
58
|
+
},
|
|
59
|
+
result: {
|
|
60
|
+
type: 'string',
|
|
61
|
+
short: 'r',
|
|
62
|
+
},
|
|
63
|
+
},
|
|
64
|
+
}).values;
|
|
65
|
+
|
|
66
|
+
/** @type {TestCase[]} */
|
|
67
|
+
const allTestCases = Object.entries(/** @type {TestSuites} */ (tests)).flatMap(
|
|
68
|
+
(entry) => entry[1].map((testCase) => /** @type {TestCase} */ (testCase))
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
if ('capture' in args) {
|
|
72
|
+
const capture =
|
|
73
|
+
Number(args.capture) ||
|
|
74
|
+
allTestCases.reduce((max, testCase) => Math.max(max, testCase.id), 0) +
|
|
75
|
+
1;
|
|
76
|
+
let testCase = allTestCases.find((testCase) => testCase.id === capture);
|
|
77
|
+
if (!testCase) {
|
|
78
|
+
/** @type {TestCase} */
|
|
79
|
+
testCase = {
|
|
80
|
+
id: capture,
|
|
81
|
+
name: '',
|
|
82
|
+
input: '',
|
|
83
|
+
output: '',
|
|
84
|
+
stdout: '',
|
|
85
|
+
stderr: '',
|
|
86
|
+
};
|
|
87
|
+
const testsObj = /** @type {Record<string, TestCase[]>} */ (tests);
|
|
88
|
+
if (!testsObj.capture) {
|
|
89
|
+
testsObj.capture = [];
|
|
90
|
+
}
|
|
91
|
+
testsObj.capture.push(testCase);
|
|
92
|
+
}
|
|
93
|
+
testCase.input = await captureFiles();
|
|
94
|
+
await writeTestCases();
|
|
95
|
+
process.exit(0);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if ('export' in args) {
|
|
99
|
+
const exportN = Number(args.export);
|
|
100
|
+
const testCase = allTestCases.find((testCase) => testCase.id === exportN);
|
|
101
|
+
if (!testCase) {
|
|
102
|
+
console.error(`Test case not found: ${JSON.stringify(exportN)}`);
|
|
103
|
+
process.exit(1);
|
|
104
|
+
}
|
|
105
|
+
await exportFiles(testCase);
|
|
106
|
+
process.exit(0);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if ('result' in args) {
|
|
110
|
+
const exportN = Number(args.result);
|
|
111
|
+
const testCase = allTestCases.find((testCase) => testCase.id === exportN);
|
|
112
|
+
if (!testCase) {
|
|
113
|
+
console.error(`Test case not found: ${JSON.stringify(exportN)}`);
|
|
114
|
+
process.exit(1);
|
|
115
|
+
}
|
|
116
|
+
await exportFiles(testCase, true);
|
|
117
|
+
process.exit(0);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
for (const [suiteName, suiteTestCases] of Object.entries(tests)) {
|
|
121
|
+
describe(suiteName, () => {
|
|
122
|
+
for (const testCase of suiteTestCases) {
|
|
123
|
+
const tc = /** @type {TestCase} */ (testCase);
|
|
124
|
+
test(tc.name, async () => {
|
|
125
|
+
await exportFiles(tc);
|
|
126
|
+
/** @type {ExecResult | ExecError} */
|
|
127
|
+
let result;
|
|
128
|
+
try {
|
|
129
|
+
result = await exec(
|
|
130
|
+
`cd ${dir}; node ../../index.js ${tc.args || ''}`
|
|
131
|
+
);
|
|
132
|
+
} catch (e) {
|
|
133
|
+
result = /** @type {ExecError} */ (e);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const actualOutput = await captureFiles();
|
|
137
|
+
|
|
138
|
+
if (args.update) {
|
|
139
|
+
tc.output = actualOutput;
|
|
140
|
+
tc.exitCode = result?.code || 0;
|
|
141
|
+
tc.stdout = replaceTime(result?.stdout || '');
|
|
142
|
+
tc.stderr = result?.stderr || '';
|
|
143
|
+
assert.ok(true);
|
|
144
|
+
} else {
|
|
145
|
+
assert.strictEqual(result?.code || 0, tc.exitCode || 0);
|
|
146
|
+
assert.strictEqual(actualOutput, tc.output);
|
|
147
|
+
assert.strictEqual(
|
|
148
|
+
replaceTime(result?.stdout || ''),
|
|
149
|
+
tc.stdout
|
|
150
|
+
);
|
|
151
|
+
assert.strictEqual(result?.stderr || '', tc.stderr);
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
after(async () => {
|
|
159
|
+
await cleanDir();
|
|
160
|
+
if (args.update) {
|
|
161
|
+
await writeTestCases();
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* @param {TestCase} testCase
|
|
167
|
+
* @param {boolean} [output=false]
|
|
168
|
+
*/
|
|
169
|
+
async function exportFiles(testCase, output = false) {
|
|
170
|
+
await cleanDir();
|
|
171
|
+
await fs.mkdir(dir, { recursive: true });
|
|
172
|
+
await stringToFiles(output ? testCase.output : testCase.input, dir);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
/**
|
|
176
|
+
* @returns {Promise<string>}
|
|
177
|
+
*/
|
|
178
|
+
async function captureFiles() {
|
|
179
|
+
return await filesToString(dir, ['node_modules']);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* @returns {Promise<void>}
|
|
184
|
+
*/
|
|
185
|
+
function cleanDir() {
|
|
186
|
+
return fs.rm(dir, { recursive: true, force: true });
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* @param {string} str
|
|
191
|
+
* @returns {string}
|
|
192
|
+
*/
|
|
193
|
+
function replaceTime(str) {
|
|
194
|
+
if (!str) {
|
|
195
|
+
return str;
|
|
196
|
+
}
|
|
197
|
+
return str.replaceAll(/in (\d+\.?\d+)m?s$/gm, 'in XXX');
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* @returns {Promise<void>}
|
|
202
|
+
*/
|
|
203
|
+
function writeTestCases() {
|
|
204
|
+
return fs.writeFile('./tests/tests.json', JSON.stringify(tests, null, 4));
|
|
205
|
+
}
|
package/tests/tests.json
ADDED
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
{
|
|
2
|
+
"prune": [
|
|
3
|
+
{
|
|
4
|
+
"id": 3,
|
|
5
|
+
"name": "simple prune",
|
|
6
|
+
"args": "",
|
|
7
|
+
"input": "dist\n index.js\n|\npackage.json\n|{\n| \"scripts\": {},\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
8
|
+
"output": "dist\n index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\"\n|}\n|",
|
|
9
|
+
"stdout": "preparing..\n\n",
|
|
10
|
+
"stderr": ""
|
|
11
|
+
},
|
|
12
|
+
{
|
|
13
|
+
"id": 4,
|
|
14
|
+
"name": "prune flatten",
|
|
15
|
+
"args": "--flatten",
|
|
16
|
+
"input": "dist\n index.js\n|\npackage.json\n|{\n| \"scripts\": {},\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
17
|
+
"output": "index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\"\n|}\n|",
|
|
18
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
19
|
+
"stderr": ""
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"id": 5,
|
|
23
|
+
"name": "prune flatten (special files)",
|
|
24
|
+
"args": "--flatten",
|
|
25
|
+
"input": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"scripts\": {},\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
26
|
+
"output": "index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\"\n|}\n|\nREADME.md\n|",
|
|
27
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
28
|
+
"stderr": ""
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
"id": 6,
|
|
32
|
+
"name": "prune unknown profile",
|
|
33
|
+
"args": "--profile=garbage",
|
|
34
|
+
"input": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"scripts\": {},\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
35
|
+
"output": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"scripts\": {},\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
36
|
+
"stdout": "preparing..\n\n✕ Error: unknown profile garbage\n",
|
|
37
|
+
"stderr": "",
|
|
38
|
+
"exitCode": 255
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
"id": 7,
|
|
42
|
+
"name": "prune with library profile removes all but postinstall from scripts",
|
|
43
|
+
"args": "--profile=library",
|
|
44
|
+
"input": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"scripts\": {\n| \"serve\": \"rollup -c -w\",\n| \"build\": \"rollup -c\",\n| \"postinstall\": \"npm run build\",\n| \"prepack\": \"pkgbld prune\"\n| },\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
45
|
+
"output": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"scripts\": {\n| \"postinstall\": \"npm run build\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\"\n|}\n|\nREADME.md\n|",
|
|
46
|
+
"stdout": "preparing..\n\n",
|
|
47
|
+
"stderr": ""
|
|
48
|
+
},
|
|
49
|
+
{
|
|
50
|
+
"id": 8,
|
|
51
|
+
"name": "prune with app profile removes all but postinstall and start from scripts",
|
|
52
|
+
"args": "prune --profile=app",
|
|
53
|
+
"input": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"scripts\": {\n| \"start\": \"rollup -c -w\",\n| \"build\": \"rollup -c\",\n| \"postinstall\": \"npm run build\",\n| \"prepack\": \"pkgbld prune\"\n| },\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
54
|
+
"output": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"scripts\": {\n| \"start\": \"rollup -c -w\",\n| \"postinstall\": \"npm run build\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\"\n|}\n|\nREADME.md\n|",
|
|
55
|
+
"stdout": "preparing..\n\n",
|
|
56
|
+
"stderr": ""
|
|
57
|
+
},
|
|
58
|
+
{
|
|
59
|
+
"id": 9,
|
|
60
|
+
"name": "prune with bin (object form) and flatten",
|
|
61
|
+
"args": "--flatten",
|
|
62
|
+
"input": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"bin\": {\n| \"mylib\": \"./dist/index.js\"\n| },\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
63
|
+
"output": "index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"bin\": {\n| \"mylib\": \"./index.js\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\"\n|}\n|\nREADME.md\n|",
|
|
64
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
65
|
+
"stderr": ""
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
"id": 10,
|
|
69
|
+
"name": "prune flatten with deep folder",
|
|
70
|
+
"args": "--flatten",
|
|
71
|
+
"input": "dist\n dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"bin\": {\n| \"mylib\": \"./dist/dist/index.js\"\n| },\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
72
|
+
"output": "index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"bin\": {\n| \"mylib\": \"./index.js\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\"\n|}\n|\nREADME.md\n|",
|
|
73
|
+
"stdout": "preparing..\n\nflattening dist/dist...\n",
|
|
74
|
+
"stderr": ""
|
|
75
|
+
},
|
|
76
|
+
{
|
|
77
|
+
"id": 11,
|
|
78
|
+
"name": "prune with bin (string form) and flatten",
|
|
79
|
+
"args": "--flatten",
|
|
80
|
+
"input": "dist\n index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"bin\": \"./dist/index.js\",\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}\nREADME.md\n|",
|
|
81
|
+
"output": "index.js\n|\nlicence.txt\n|\npackage.json\n|{\n| \"bin\": \"./index.js\",\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\"\n|}\n|\nREADME.md\n|",
|
|
82
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
83
|
+
"stderr": ""
|
|
84
|
+
},
|
|
85
|
+
{
|
|
86
|
+
"id": 12,
|
|
87
|
+
"name": "multilevel",
|
|
88
|
+
"args": "--flatten",
|
|
89
|
+
"input": "dist\n index.js\n|\n lib\n index.js\n|\npackage.json\n|{\n| \"bin\": {\n| \"mylib\": \"./dist/index.js\"\n| },\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/lib/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ]\n|}",
|
|
90
|
+
"output": "index.js\n|\nlib\n index.js\n|\npackage.json\n|{\n| \"bin\": {\n| \"mylib\": \"./index.js\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./lib/index.js\"\n|}\n|",
|
|
91
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
92
|
+
"stderr": ""
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
"id": 13,
|
|
96
|
+
"name": "multilevel (another order)",
|
|
97
|
+
"args": "--flatten",
|
|
98
|
+
"input": "dist\n index.js\n|\n lib\n index.js\n|\npackage.json\n|{\n| \"devDependencies\": {},\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\",\n| \"./licence.txt\",\n| \"README.md\"\n| ],\n| \"bin\": {\n| \"mylib\": \"./dist/lib/index.js\"\n| }\n|}",
|
|
99
|
+
"output": "index.js\n|\nlib\n index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\",\n| \"bin\": {\n| \"mylib\": \"./lib/index.js\"\n| }\n|}\n|",
|
|
100
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
101
|
+
"stderr": ""
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
"id": 14,
|
|
105
|
+
"name": "flatten specific folder",
|
|
106
|
+
"args": "--flatten=dist",
|
|
107
|
+
"input": "dist\n index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
108
|
+
"output": "index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\"\n|}\n|",
|
|
109
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
110
|
+
"stderr": ""
|
|
111
|
+
},
|
|
112
|
+
{
|
|
113
|
+
"id": 15,
|
|
114
|
+
"name": "file already exists",
|
|
115
|
+
"args": "--flatten",
|
|
116
|
+
"input": "dist\n index.js\n|\nindex.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
117
|
+
"output": "dist\n index.js\n|\nindex.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
118
|
+
"exitCode": 255,
|
|
119
|
+
"stdout": "preparing..\n\nflattening dist...\n✕ Error: dist folder cannot be flattened because files already exist: index.js\n",
|
|
120
|
+
"stderr": ""
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
"id": 16,
|
|
124
|
+
"name": "2 dist folders",
|
|
125
|
+
"args": "--flatten",
|
|
126
|
+
"input": "dist\n index.js\n|\ndist2\n index2.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"module\": \"./dist2/index2.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
127
|
+
"output": "dist\n index.js\n|\ndist2\n index2.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"module\": \"./dist2/index2.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
128
|
+
"exitCode": 255,
|
|
129
|
+
"stdout": "preparing..\n\n✕ Error: could not find dist folder\n",
|
|
130
|
+
"stderr": ""
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
"id": 17,
|
|
134
|
+
"name": "2 dist folders (flatten both separately)",
|
|
135
|
+
"args": "--flatten=dist && node ../../index.js --flatten=dist2",
|
|
136
|
+
"input": "dist\n index.js\n|\ndist2\n index2.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"module\": \"./dist2/index2.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
137
|
+
"output": "index.js\n|\nindex2.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\",\n| \"module\": \"./index2.js\"\n|}\n|",
|
|
138
|
+
"stdout": "preparing..\n\nflattening dist...\npreparing..\n\nflattening dist2...\n",
|
|
139
|
+
"stderr": ""
|
|
140
|
+
},
|
|
141
|
+
{
|
|
142
|
+
"id": 18,
|
|
143
|
+
"name": "folder with / at the end",
|
|
144
|
+
"args": "--flatten",
|
|
145
|
+
"input": "dist\n index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist/\"\n| ]\n|}",
|
|
146
|
+
"output": "index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\"\n|}\n|",
|
|
147
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
148
|
+
"stderr": ""
|
|
149
|
+
},
|
|
150
|
+
{
|
|
151
|
+
"id": 19,
|
|
152
|
+
"name": "flatten with exports",
|
|
153
|
+
"args": "--flatten",
|
|
154
|
+
"input": "dist\n index.js\n|\n index.mjs\n|\n second.js\n|\n second.mjs\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"exports\": {\n| \".\": {\n| \"import\": \"./dist/index.mjs\",\n| \"require\": \"./dist/index.js\"\n| },\n| \"./second\": {\n| \"import\": \"./dist/second.mjs\",\n| \"require\": \"./dist/second.js\"\n| }\n| },\n| \"files\": [\n| \"dist\"\n| ]\n|}\nsecond\n package.json\n|",
|
|
155
|
+
"output": "index.js\n|\nindex.mjs\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\",\n| \"exports\": {\n| \".\": {\n| \"import\": \"./index.mjs\",\n| \"require\": \"./index.js\"\n| },\n| \"./second\": {\n| \"import\": \"./second.mjs\",\n| \"require\": \"./second.js\"\n| }\n| },\n| \"files\": [\n| \"index.mjs\",\n| \"second.js\",\n| \"second.mjs\"\n| ]\n|}\n|\nsecond.js\n|\nsecond.mjs\n|",
|
|
156
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
157
|
+
"stderr": ""
|
|
158
|
+
},
|
|
159
|
+
{
|
|
160
|
+
"id": 20,
|
|
161
|
+
"name": "flatten pkg with extra number property",
|
|
162
|
+
"args": "--flatten",
|
|
163
|
+
"input": "dist\n index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ],\n| \"extra\": 100\n|}",
|
|
164
|
+
"output": "index.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\",\n| \"extra\": 100\n|}\n|",
|
|
165
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
166
|
+
"stderr": ""
|
|
167
|
+
},
|
|
168
|
+
{
|
|
169
|
+
"id": 21,
|
|
170
|
+
"name": "flatten with directories.bin",
|
|
171
|
+
"args": "--flatten",
|
|
172
|
+
"input": "dist\n bin\n 1.js\n|\n 2.js\n|\n index.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
173
|
+
"output": "bin\n 1.js\n|\n 2.js\n|\nindex.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\",\n| \"files\": [\n| \"bin\"\n| ]\n|}\n|",
|
|
174
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
175
|
+
"stderr": ""
|
|
176
|
+
},
|
|
177
|
+
{
|
|
178
|
+
"id": 22,
|
|
179
|
+
"name": "flatten with directories.bin 2",
|
|
180
|
+
"args": "--flatten --optimizeFiles=false",
|
|
181
|
+
"input": "dist\n bin\n 1.js\n|\n 2.js\n|\n index.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
182
|
+
"output": "bin\n 1.js\n|\n 2.js\n|\nindex.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./index.js\",\n| \"files\": [\n| \"index.js\",\n| \"bin/1.js\",\n| \"bin/2.js\"\n| ]\n|}\n|",
|
|
183
|
+
"stdout": "preparing..\n\nflattening dist...\n",
|
|
184
|
+
"stderr": ""
|
|
185
|
+
},
|
|
186
|
+
{
|
|
187
|
+
"id": 23,
|
|
188
|
+
"name": "deep optimize",
|
|
189
|
+
"args": "",
|
|
190
|
+
"input": "dist\n 2\n bin\n 1.js\n|\n 2.js\n|\n index.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/2/bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/2/index.js\",\n| \"files\": [\n| \"dist/2/bin/1.js\",\n| \"dist/2/bin/2.js\",\n| \"dist/2/index.js\"\n| ]\n|}",
|
|
191
|
+
"output": "dist\n 2\n bin\n 1.js\n|\n 2.js\n|\n index.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/2/bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/2/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}\n|",
|
|
192
|
+
"stdout": "preparing..\n\n",
|
|
193
|
+
"stderr": ""
|
|
194
|
+
},
|
|
195
|
+
{
|
|
196
|
+
"id": 24,
|
|
197
|
+
"name": "deep optimize 2",
|
|
198
|
+
"args": "",
|
|
199
|
+
"input": "dist\n 2\n 1\n index.js\n|\n bin\n 1.js\n|\n 2.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/2/bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/2/1/index.js\",\n| \"files\": [\n| \"dist/2/bin/1.js\",\n| \"dist/2/bin/2.js\",\n| \"dist/2/1/index.js\"\n| ]\n|}",
|
|
200
|
+
"output": "dist\n 2\n 1\n index.js\n|\n bin\n 1.js\n|\n 2.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/2/bin\"\n| },\n| \"name\": \"mylib\",\n| \"main\": \"./dist/2/1/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}\n|",
|
|
201
|
+
"stdout": "preparing..\n\n",
|
|
202
|
+
"stderr": ""
|
|
203
|
+
},
|
|
204
|
+
{
|
|
205
|
+
"id": 25,
|
|
206
|
+
"name": "remove sourcemaps",
|
|
207
|
+
"args": "--removeSourcemaps",
|
|
208
|
+
"input": "dist\n index.js\n|// something\n|\n|console.log('Hello, world!');\n|\n|//# sourceMappingURL=index.js.map\n|\n index.js.map\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
209
|
+
"output": "dist\n index.js\n|// something\n|\n|console.log('Hello, world!');\n|\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"main\": \"./dist/index.js\"\n|}\n|",
|
|
210
|
+
"stdout": "preparing..\n\n",
|
|
211
|
+
"stderr": ""
|
|
212
|
+
},
|
|
213
|
+
{
|
|
214
|
+
"id": 26,
|
|
215
|
+
"name": "flatten directories.bin (2 files)",
|
|
216
|
+
"args": "--flatten dist/bin",
|
|
217
|
+
"input": "dist\n bin\n 1.js\n|\n 2.js\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/bin\"\n| },\n| \"name\": \"mylib\",\n| \"files\": [\n| \"dist\"\n| ]\n|}",
|
|
218
|
+
"output": "1.js\n|\n2.js\n|\npackage.json\n|{\n| \"name\": \"mylib\",\n| \"bin\": {\n| \"1\": \"1.js\",\n| \"2\": \"2.js\"\n| }\n|}\n|",
|
|
219
|
+
"stdout": "preparing..\n\nflattening dist/bin...\n",
|
|
220
|
+
"stderr": ""
|
|
221
|
+
},
|
|
222
|
+
{
|
|
223
|
+
"id": 27,
|
|
224
|
+
"name": "flatten directories.bin (1 file + man)",
|
|
225
|
+
"args": "--flatten dist/bin",
|
|
226
|
+
"input": "dist\n bin\n 2.js\n|\nman\n doc.txt\n|\npackage.json\n|{\n| \"directories\": {\n| \"bin\": \"./dist/bin\",\n| \"man\": \"./man\"\n| },\n| \"name\": \"mylib\",\n| \"files\": [\n| \"dist\",\n| \"man\"\n| ]\n|}",
|
|
227
|
+
"output": "2.js\n|\nman\n doc.txt\n|\npackage.json\n|{\n| \"directories\": {\n| \"man\": \"./man\"\n| },\n| \"name\": \"mylib\",\n| \"bin\": \"2.js\"\n|}\n|",
|
|
228
|
+
"stdout": "preparing..\n\nflattening dist/bin...\n",
|
|
229
|
+
"stderr": ""
|
|
230
|
+
}
|
|
231
|
+
]
|
|
232
|
+
}
|