@cubing/dev-config 0.7.0 → 0.7.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/package.json/index.js +580 -0
- package/bin/package.json/index.js.map +7 -0
- package/chunks/chunk-NX2753TH.js +28 -0
- package/chunks/chunk-NX2753TH.js.map +7 -0
- package/chunks/chunk-OZDRWEHO.js +51 -0
- package/chunks/chunk-OZDRWEHO.js.map +7 -0
- package/esbuild/es2022/index.js +5 -22
- package/esbuild/es2022/index.js.map +3 -3
- package/lib/check-allowed-imports/checkAllowedImports.d.ts +20 -0
- package/lib/check-allowed-imports/index.d.ts +1 -0
- package/lib/check-allowed-imports/index.js +113 -0
- package/lib/check-allowed-imports/index.js.map +7 -0
- package/package.json +6 -3
- package/bin/package.json.ts +0 -700
- package/lib/check-allowed-imports/checkAllowedImports.ts +0 -152
- package/lib/check-allowed-imports/index.ts +0 -4
package/bin/package.json.ts
DELETED
|
@@ -1,700 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env -S bun run --
|
|
2
|
-
|
|
3
|
-
/** biome-ignore-all lint/complexity/useLiteralKeys: https://github.com/biomejs/biome/discussions/7404 */
|
|
4
|
-
|
|
5
|
-
import assert from "node:assert";
|
|
6
|
-
import { constants } from "node:fs/promises";
|
|
7
|
-
import { argv, exit } from "node:process";
|
|
8
|
-
import type { JSONSchemaForNPMPackageJsonFiles } from "@schemastore/package";
|
|
9
|
-
import { semver } from "bun";
|
|
10
|
-
import { Path, ResolutionPrefix, stringifyIfPath } from "path-class";
|
|
11
|
-
import { PrintableShellCommand } from "printable-shell-command";
|
|
12
|
-
|
|
13
|
-
// Licenses from https://github.com/cubing/infra?tab=readme-ov-file#conventions
|
|
14
|
-
const PERMITTED_LICENSES = new Set([
|
|
15
|
-
"MPL-2.0",
|
|
16
|
-
"MIT",
|
|
17
|
-
"Unlicense",
|
|
18
|
-
"GPL-3.0-or-later",
|
|
19
|
-
]);
|
|
20
|
-
|
|
21
|
-
// TODO: proper CLI parsing once this gets more complicated.
|
|
22
|
-
const subcommand: "check" | "format" = (() => {
|
|
23
|
-
const subcommand = argv[2];
|
|
24
|
-
if (!["check", "format"].includes(subcommand)) {
|
|
25
|
-
console.error("Must specify subcommand: `check` or `format`");
|
|
26
|
-
exit(1);
|
|
27
|
-
}
|
|
28
|
-
return subcommand as "check" | "format";
|
|
29
|
-
})();
|
|
30
|
-
|
|
31
|
-
let exitCode: number = 0;
|
|
32
|
-
let foundFixableErrors: boolean = false;
|
|
33
|
-
|
|
34
|
-
const PACKAGE_JSON_PATH = new Path("./package.json");
|
|
35
|
-
|
|
36
|
-
/*
|
|
37
|
-
|
|
38
|
-
Note: this checker is opinionated, and does not allow certain patterns.
|
|
39
|
-
|
|
40
|
-
It also assumes certain conventions about package structure and maintenance.
|
|
41
|
-
|
|
42
|
-
*/
|
|
43
|
-
|
|
44
|
-
// TODO: Schema validation.
|
|
45
|
-
|
|
46
|
-
console.log("Parsing `package.json`:");
|
|
47
|
-
const packageJSONString = await PACKAGE_JSON_PATH.readText();
|
|
48
|
-
let packageJSON: JSONSchemaForNPMPackageJsonFiles = (() => {
|
|
49
|
-
try {
|
|
50
|
-
const packageJSON: JSONSchemaForNPMPackageJsonFiles =
|
|
51
|
-
JSON.parse(packageJSONString);
|
|
52
|
-
console.log("✅ `package.json` is valid JSON.");
|
|
53
|
-
return packageJSON;
|
|
54
|
-
} catch {
|
|
55
|
-
console.log(
|
|
56
|
-
"❌ `package.json` must be valid JSON (not JSONC or JSON5 or anything else).",
|
|
57
|
-
);
|
|
58
|
-
exit(1);
|
|
59
|
-
}
|
|
60
|
-
})();
|
|
61
|
-
|
|
62
|
-
console.log("Checking field order:");
|
|
63
|
-
const opinionatedFieldOrder = [
|
|
64
|
-
"name",
|
|
65
|
-
"version",
|
|
66
|
-
"homepage",
|
|
67
|
-
"description",
|
|
68
|
-
"author",
|
|
69
|
-
"license",
|
|
70
|
-
"repository",
|
|
71
|
-
"engines",
|
|
72
|
-
"os",
|
|
73
|
-
"cpu",
|
|
74
|
-
"type",
|
|
75
|
-
"main",
|
|
76
|
-
"types",
|
|
77
|
-
"module",
|
|
78
|
-
"browser",
|
|
79
|
-
"exports",
|
|
80
|
-
"bin",
|
|
81
|
-
"dependencies",
|
|
82
|
-
"devDependencies",
|
|
83
|
-
"optionalDependencies",
|
|
84
|
-
"peerDependencies",
|
|
85
|
-
"bundleDependencies",
|
|
86
|
-
"devEngines",
|
|
87
|
-
"files",
|
|
88
|
-
"scripts",
|
|
89
|
-
"keywords",
|
|
90
|
-
"@cubing/deploy",
|
|
91
|
-
"$schema",
|
|
92
|
-
] as const;
|
|
93
|
-
const opinionatedFields = new Set(opinionatedFieldOrder);
|
|
94
|
-
|
|
95
|
-
const packageJSONOrder: string[] = [];
|
|
96
|
-
for (const key in packageJSON) {
|
|
97
|
-
// biome-ignore lint/suspicious/noExplicitAny: Type wrangling
|
|
98
|
-
if (opinionatedFields.has(key as any)) {
|
|
99
|
-
packageJSONOrder.push(key);
|
|
100
|
-
} else {
|
|
101
|
-
console.warn(`⚠️ [${JSON.stringify(key)}] Unexpected field.`);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
const packageJSONByOpinionatedOrder: string[] = [];
|
|
105
|
-
for (const field of opinionatedFieldOrder) {
|
|
106
|
-
if (field in packageJSON) {
|
|
107
|
-
packageJSONByOpinionatedOrder.push(field);
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
try {
|
|
112
|
-
assert.deepEqual(packageJSONOrder, packageJSONByOpinionatedOrder);
|
|
113
|
-
console.log(`✅ Field order is good.`);
|
|
114
|
-
} catch {
|
|
115
|
-
switch (subcommand) {
|
|
116
|
-
case "check": {
|
|
117
|
-
console.log(`❌ Found opinionated fields out of order:`);
|
|
118
|
-
console.log(`↤ ${packageJSONOrder.join(", ")}`);
|
|
119
|
-
console.log("Expected:");
|
|
120
|
-
console.log(`↦ ${packageJSONByOpinionatedOrder.join(", ")}`);
|
|
121
|
-
console.log(
|
|
122
|
-
"📝 Run with the `sort` subcommand to sort. (Additional fields will kept after the field they previously followed.)",
|
|
123
|
-
);
|
|
124
|
-
foundFixableErrors = true;
|
|
125
|
-
exitCode = 1;
|
|
126
|
-
break;
|
|
127
|
-
}
|
|
128
|
-
case "format": {
|
|
129
|
-
console.log("📝 Invalid field order. Formatting…");
|
|
130
|
-
exitCode = 1;
|
|
131
|
-
const newKeyOrder: string[] = [];
|
|
132
|
-
for (const key of packageJSONByOpinionatedOrder) {
|
|
133
|
-
newKeyOrder.push(key);
|
|
134
|
-
}
|
|
135
|
-
for (const { value: key, previous } of withOrderingMetadata(
|
|
136
|
-
Object.keys(packageJSON),
|
|
137
|
-
)) {
|
|
138
|
-
if (newKeyOrder.includes(key)) {
|
|
139
|
-
continue;
|
|
140
|
-
}
|
|
141
|
-
if (!previous) {
|
|
142
|
-
newKeyOrder.unshift(key);
|
|
143
|
-
} else {
|
|
144
|
-
const { value: previousKey } = previous;
|
|
145
|
-
const idx = newKeyOrder.indexOf(previousKey);
|
|
146
|
-
newKeyOrder.splice(idx + 1, 0, key);
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
const newPackageJSON: JSONSchemaForNPMPackageJsonFiles = {};
|
|
150
|
-
for (const key of newKeyOrder) {
|
|
151
|
-
newPackageJSON[key] = packageJSON[key];
|
|
152
|
-
}
|
|
153
|
-
packageJSON = newPackageJSON;
|
|
154
|
-
break;
|
|
155
|
-
}
|
|
156
|
-
default:
|
|
157
|
-
throw new Error("Invalid subcommand.") as never;
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/typeof#description
|
|
162
|
-
type TypeOfType =
|
|
163
|
-
| "undefined"
|
|
164
|
-
| "object"
|
|
165
|
-
| "boolean"
|
|
166
|
-
| "number"
|
|
167
|
-
| "bigint"
|
|
168
|
-
| "string"
|
|
169
|
-
| "symbol"
|
|
170
|
-
| "function"
|
|
171
|
-
| "object";
|
|
172
|
-
type Categorization = "array" | "null" | TypeOfType;
|
|
173
|
-
// biome-ignore lint/suspicious/noExplicitAny: `any` is correct.
|
|
174
|
-
function categorize(v: any): Categorization {
|
|
175
|
-
if (Array.isArray(v)) {
|
|
176
|
-
return "array";
|
|
177
|
-
}
|
|
178
|
-
if (v === null) {
|
|
179
|
-
return "null";
|
|
180
|
-
}
|
|
181
|
-
return typeof v;
|
|
182
|
-
}
|
|
183
|
-
|
|
184
|
-
interface OrderingMetadata<T> {
|
|
185
|
-
value: T;
|
|
186
|
-
previous: { value: T } | null;
|
|
187
|
-
isLast: boolean;
|
|
188
|
-
}
|
|
189
|
-
function* withOrderingMetadata<T>(
|
|
190
|
-
iter: Iterable<T>,
|
|
191
|
-
): Iterable<OrderingMetadata<T>> {
|
|
192
|
-
// The following functions as an `Option<T>`, even when `T` is undefined.
|
|
193
|
-
let previous: [OrderingMetadata<T>] | undefined;
|
|
194
|
-
for (const value of iter) {
|
|
195
|
-
if (previous) {
|
|
196
|
-
yield previous[0];
|
|
197
|
-
previous = [
|
|
198
|
-
{ value, previous: { value: previous[0].value }, isLast: false },
|
|
199
|
-
];
|
|
200
|
-
} else {
|
|
201
|
-
previous = [{ value, previous: null, isLast: false }];
|
|
202
|
-
}
|
|
203
|
-
}
|
|
204
|
-
if (previous) {
|
|
205
|
-
yield { ...previous[0], isLast: true };
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
type Breadcrumbs = (string | [string] | number)[];
|
|
209
|
-
function traverse<T>(
|
|
210
|
-
breadcrumbs: Breadcrumbs,
|
|
211
|
-
options?: { set?: T },
|
|
212
|
-
): {
|
|
213
|
-
breadcrumbString: string;
|
|
214
|
-
maybeValue: [T] | null;
|
|
215
|
-
} {
|
|
216
|
-
assert(breadcrumbs.length > 0);
|
|
217
|
-
// biome-ignore lint/suspicious/noExplicitAny: Type wrangling
|
|
218
|
-
let maybeValue: [T | any] | null = [packageJSON];
|
|
219
|
-
let breadcrumbString = "";
|
|
220
|
-
for (let { value: breadcrumb, isLast } of withOrderingMetadata(breadcrumbs)) {
|
|
221
|
-
if (Array.isArray(breadcrumb)) {
|
|
222
|
-
assert(breadcrumb.length === 1);
|
|
223
|
-
assert(typeof breadcrumb[0] === "string");
|
|
224
|
-
breadcrumb = breadcrumb[0];
|
|
225
|
-
breadcrumbString += `[${JSON.stringify(breadcrumb)}]`;
|
|
226
|
-
} else if (typeof breadcrumb === "string") {
|
|
227
|
-
breadcrumbString += `.${breadcrumb}`;
|
|
228
|
-
} else {
|
|
229
|
-
breadcrumbString += `[${breadcrumb}]`;
|
|
230
|
-
}
|
|
231
|
-
if (options && "set" in options && isLast) {
|
|
232
|
-
if (
|
|
233
|
-
!maybeValue ||
|
|
234
|
-
!["array", "object"].includes(categorize(maybeValue[0]))
|
|
235
|
-
) {
|
|
236
|
-
// This okay for now, because we currently only write to values we have read.
|
|
237
|
-
throw new Error(
|
|
238
|
-
"Missing (but expected) traversal path while setting a value",
|
|
239
|
-
) as never;
|
|
240
|
-
}
|
|
241
|
-
maybeValue[0][breadcrumb] = stringifyIfPath(options.set);
|
|
242
|
-
} else if (
|
|
243
|
-
maybeValue &&
|
|
244
|
-
["array", "object"].includes(categorize(maybeValue[0])) &&
|
|
245
|
-
breadcrumb in maybeValue[0]
|
|
246
|
-
) {
|
|
247
|
-
maybeValue = [maybeValue[0][breadcrumb]];
|
|
248
|
-
} else {
|
|
249
|
-
maybeValue = null;
|
|
250
|
-
}
|
|
251
|
-
}
|
|
252
|
-
return { breadcrumbString, maybeValue };
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
function field<T>(
|
|
256
|
-
breadcrumbs: Breadcrumbs,
|
|
257
|
-
type: Categorization | Categorization[],
|
|
258
|
-
options?: {
|
|
259
|
-
optional?: boolean;
|
|
260
|
-
additionalChecks?: { [requirementMessage: string]: (t: T) => boolean };
|
|
261
|
-
skipPrintingSuccess?: boolean;
|
|
262
|
-
mustBePopulatedMessage?: string;
|
|
263
|
-
},
|
|
264
|
-
) {
|
|
265
|
-
const mustBePopulatedMessage = () =>
|
|
266
|
-
options?.mustBePopulatedMessage ?? "Field must be populated.";
|
|
267
|
-
const { breadcrumbString, maybeValue } = traverse(breadcrumbs);
|
|
268
|
-
if (!maybeValue) {
|
|
269
|
-
if (options?.optional) {
|
|
270
|
-
if (!options.skipPrintingSuccess) {
|
|
271
|
-
console.log(`☑️ ${breadcrumbString}`);
|
|
272
|
-
}
|
|
273
|
-
return;
|
|
274
|
-
} else {
|
|
275
|
-
console.log(`❌ ${breadcrumbString} — ${mustBePopulatedMessage()}`);
|
|
276
|
-
exitCode = 1;
|
|
277
|
-
return;
|
|
278
|
-
}
|
|
279
|
-
}
|
|
280
|
-
const [value] = maybeValue;
|
|
281
|
-
|
|
282
|
-
const typeArray = Array.isArray(type) ? type : [type];
|
|
283
|
-
const category = categorize(value);
|
|
284
|
-
if (typeArray.includes(category)) {
|
|
285
|
-
for (const [failureMessage, fn] of Object.entries(
|
|
286
|
-
options?.additionalChecks ?? {},
|
|
287
|
-
)) {
|
|
288
|
-
if (!fn) {
|
|
289
|
-
console.log(`❌ ${breadcrumbString} | ${failureMessage}`);
|
|
290
|
-
exitCode = 1;
|
|
291
|
-
return;
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
if (!options?.skipPrintingSuccess) {
|
|
295
|
-
console.log(`✅ ${breadcrumbString}`);
|
|
296
|
-
}
|
|
297
|
-
} else {
|
|
298
|
-
if (category === "undefined") {
|
|
299
|
-
console.log(`❌ ${breadcrumbString} — ${mustBePopulatedMessage()}.`);
|
|
300
|
-
} else if (type === "undefined") {
|
|
301
|
-
console.log(
|
|
302
|
-
`❌ ${breadcrumbString} — Field is populated (but must not be).`,
|
|
303
|
-
);
|
|
304
|
-
} else {
|
|
305
|
-
if (Array.isArray(type)) {
|
|
306
|
-
console.log(
|
|
307
|
-
`❌ ${breadcrumbString} — Does not match an expected type: ${type.join(", ")}`,
|
|
308
|
-
);
|
|
309
|
-
} else {
|
|
310
|
-
console.log(
|
|
311
|
-
`❌ ${breadcrumbString} — Does not match expected type: ${type}`,
|
|
312
|
-
);
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
exitCode = 1;
|
|
316
|
-
return;
|
|
317
|
-
}
|
|
318
|
-
}
|
|
319
|
-
|
|
320
|
-
function mustNotBePopulated(breadcrumbs: Breadcrumbs) {
|
|
321
|
-
const { breadcrumbString, maybeValue } = traverse(breadcrumbs);
|
|
322
|
-
if (maybeValue) {
|
|
323
|
-
console.log(`❌ ${breadcrumbString} — Must not be present.`);
|
|
324
|
-
exitCode = 1;
|
|
325
|
-
return;
|
|
326
|
-
}
|
|
327
|
-
}
|
|
328
|
-
|
|
329
|
-
console.log("Checking presence and type of fields:");
|
|
330
|
-
|
|
331
|
-
field(["name"], "string");
|
|
332
|
-
field(["version"], "string", {
|
|
333
|
-
additionalChecks: {
|
|
334
|
-
"Version must parse successfully.": (version: string) =>
|
|
335
|
-
semver.order(version, version) === 0,
|
|
336
|
-
},
|
|
337
|
-
});
|
|
338
|
-
field(["homepage"], "string", { optional: true });
|
|
339
|
-
field(["description"], "string");
|
|
340
|
-
// TODO: format author.
|
|
341
|
-
field(["author"], ["string", "object"]);
|
|
342
|
-
if (categorize(packageJSON["author"]) === "object") {
|
|
343
|
-
field(["author", "name"], "string");
|
|
344
|
-
field(["author", "email"], "string");
|
|
345
|
-
field(["author", "url"], "string", {
|
|
346
|
-
additionalChecks: {
|
|
347
|
-
"URL must parse.": (url: string) => {
|
|
348
|
-
try {
|
|
349
|
-
new URL(url);
|
|
350
|
-
return true;
|
|
351
|
-
} catch {
|
|
352
|
-
return false;
|
|
353
|
-
}
|
|
354
|
-
},
|
|
355
|
-
},
|
|
356
|
-
});
|
|
357
|
-
}
|
|
358
|
-
field(["license"], "string", {
|
|
359
|
-
additionalChecks: {
|
|
360
|
-
"Must contain a non-permitted license.": (license: string) => {
|
|
361
|
-
for (const licenseEntry of license.split(" OR ")) {
|
|
362
|
-
if (!PERMITTED_LICENSES.has(licenseEntry)) {
|
|
363
|
-
return false;
|
|
364
|
-
}
|
|
365
|
-
}
|
|
366
|
-
return true;
|
|
367
|
-
},
|
|
368
|
-
},
|
|
369
|
-
});
|
|
370
|
-
// TODO: format repo.
|
|
371
|
-
field(["repository"], "object");
|
|
372
|
-
field(["repository", "type"], "string");
|
|
373
|
-
const GIT_URL_PREFIX = "git+";
|
|
374
|
-
const GIT_URL_SUFFIX = ".";
|
|
375
|
-
field(["repository", "url"], "string", {
|
|
376
|
-
additionalChecks: {
|
|
377
|
-
[`URL must be prefixed with \`${GIT_URL_PREFIX}\`.`]: (url: string) =>
|
|
378
|
-
url.startsWith(GIT_URL_PREFIX),
|
|
379
|
-
[`URL must end with with \`.${GIT_URL_SUFFIX}\`.`]: (url: string) =>
|
|
380
|
-
url.endsWith(GIT_URL_SUFFIX),
|
|
381
|
-
"URL must parse.": (url: string) => {
|
|
382
|
-
try {
|
|
383
|
-
new URL(url.slice());
|
|
384
|
-
return true;
|
|
385
|
-
} catch {
|
|
386
|
-
return false;
|
|
387
|
-
}
|
|
388
|
-
},
|
|
389
|
-
},
|
|
390
|
-
});
|
|
391
|
-
// TODO: Validate version range syntax.
|
|
392
|
-
field(["engines"], "object", { optional: true });
|
|
393
|
-
field(["os"], "array", { optional: true });
|
|
394
|
-
field(["cpu"], "array", { optional: true });
|
|
395
|
-
field(["type"], "string", {
|
|
396
|
-
additionalChecks: {
|
|
397
|
-
'Type must be `"module"`.': (type: string) => type === "module",
|
|
398
|
-
},
|
|
399
|
-
});
|
|
400
|
-
const mainOrTypesArePopoulated = (() => {
|
|
401
|
-
if ("main" in packageJSON || "types" in packageJSON) {
|
|
402
|
-
field(["main"], "string", {
|
|
403
|
-
mustBePopulatedMessage: 'Must be populated if "types" is populated.',
|
|
404
|
-
});
|
|
405
|
-
field(["types"], "string", {
|
|
406
|
-
mustBePopulatedMessage: 'Must be populated if "main" is populated.',
|
|
407
|
-
});
|
|
408
|
-
return true;
|
|
409
|
-
} else {
|
|
410
|
-
console.log("☑️ .main");
|
|
411
|
-
console.log("☑️ .types");
|
|
412
|
-
return false;
|
|
413
|
-
}
|
|
414
|
-
})();
|
|
415
|
-
mustNotBePopulated(["module"]);
|
|
416
|
-
mustNotBePopulated(["browser"]);
|
|
417
|
-
field(["exports"], "object", { optional: !mainOrTypesArePopoulated });
|
|
418
|
-
field(["bin"], "object", { optional: true });
|
|
419
|
-
field(["dependencies"], "object", { optional: true });
|
|
420
|
-
field(["devDependencies"], "object", { optional: true });
|
|
421
|
-
field(["optionalDependencies"], "object", { optional: true });
|
|
422
|
-
field(["peerDependencies"], "object", { optional: true });
|
|
423
|
-
field(["bundleDependencies"], "object", { optional: true });
|
|
424
|
-
field(["devEngines"], "object", { optional: true });
|
|
425
|
-
// TODO: check for path resolution prefix?
|
|
426
|
-
// Set to `["*"]` if needed.
|
|
427
|
-
field(["files"], "array");
|
|
428
|
-
field(["scripts"], "object");
|
|
429
|
-
// Set to `"# no-op"` if needed.
|
|
430
|
-
field(["scripts", "prepublishOnly"], "string");
|
|
431
|
-
|
|
432
|
-
console.log("Checking paths of binaries and exports:");
|
|
433
|
-
|
|
434
|
-
const tempDir = await Path.makeTempDir();
|
|
435
|
-
await using tempDirDisposable = {
|
|
436
|
-
[Symbol.asyncDispose]: async () => {
|
|
437
|
-
console.log("Disposing…");
|
|
438
|
-
await tempDir.rm_rf();
|
|
439
|
-
},
|
|
440
|
-
};
|
|
441
|
-
const extractionDir = await tempDir.join("extracted").mkdir();
|
|
442
|
-
// TODO: is there a 100% reliable way to test against paths that *will* be packed?
|
|
443
|
-
// Note that this has to take into account `.gitignore`, `.npmignore`, and `"files"` — with globs and excludes.
|
|
444
|
-
// For now, we print the command to make it clear that some heavy lifting is going on (and that it's not our fault that it's slow).
|
|
445
|
-
const data: { filename: string }[] = await new PrintableShellCommand("npm", [
|
|
446
|
-
"pack",
|
|
447
|
-
"--json",
|
|
448
|
-
"--ignore-scripts",
|
|
449
|
-
["--pack-destination", tempDir],
|
|
450
|
-
])
|
|
451
|
-
.print()
|
|
452
|
-
.json();
|
|
453
|
-
const tgzPath = tempDir.join(data[0].filename);
|
|
454
|
-
await new PrintableShellCommand("tar", [
|
|
455
|
-
["-C", extractionDir],
|
|
456
|
-
["-xvzf", tgzPath],
|
|
457
|
-
]).spawn().success;
|
|
458
|
-
|
|
459
|
-
const extractedRoot = extractionDir.join("package/");
|
|
460
|
-
assert(await extractedRoot.existsAsDir());
|
|
461
|
-
|
|
462
|
-
const checks: Promise<string>[] = [];
|
|
463
|
-
|
|
464
|
-
// TODO: check compilability
|
|
465
|
-
function checkPath(
|
|
466
|
-
breadcrumbs: Breadcrumbs,
|
|
467
|
-
options: { expectPrefix: ResolutionPrefix; mustBeExecutable?: true },
|
|
468
|
-
) {
|
|
469
|
-
const { breadcrumbString, maybeValue } = traverse(breadcrumbs);
|
|
470
|
-
if (!maybeValue) {
|
|
471
|
-
return;
|
|
472
|
-
}
|
|
473
|
-
const [value] = maybeValue;
|
|
474
|
-
checks.push(
|
|
475
|
-
(async () => {
|
|
476
|
-
if (typeof value !== "string") {
|
|
477
|
-
exitCode = 1;
|
|
478
|
-
return `❌ ${breadcrumbString} — Non-string value`;
|
|
479
|
-
}
|
|
480
|
-
if (value.includes("*")) {
|
|
481
|
-
return `⏭️ ${breadcrumbString} — Skipping due to glob (*) — ${value}`;
|
|
482
|
-
}
|
|
483
|
-
const unresolvedPath = new Path(value);
|
|
484
|
-
if (unresolvedPath.resolutionPrefix !== options.expectPrefix) {
|
|
485
|
-
if (unresolvedPath.resolutionPrefix === ResolutionPrefix.Absolute) {
|
|
486
|
-
exitCode = 1;
|
|
487
|
-
return `❌ ${breadcrumbString} — Incorrect resolution prefix (${unresolvedPath.resolutionPrefix}) — ${value}`;
|
|
488
|
-
} else {
|
|
489
|
-
switch (subcommand) {
|
|
490
|
-
case "check": {
|
|
491
|
-
exitCode = 1;
|
|
492
|
-
foundFixableErrors = true;
|
|
493
|
-
return `❌ ${breadcrumbString} — Incorrect resolution prefix (${unresolvedPath.resolutionPrefix}) — 📝 fixable! — ${value}`;
|
|
494
|
-
}
|
|
495
|
-
case "format": {
|
|
496
|
-
console.log(
|
|
497
|
-
`📝 — Incorrect resolution prefix (${unresolvedPath.resolutionPrefix}) — fixing! — ${value}`,
|
|
498
|
-
);
|
|
499
|
-
// TODO: do this calculation before reporting as fixable
|
|
500
|
-
const newPath =
|
|
501
|
-
options.expectPrefix === ResolutionPrefix.Bare
|
|
502
|
-
? unresolvedPath.asBare()
|
|
503
|
-
: unresolvedPath.asRelative();
|
|
504
|
-
traverse(breadcrumbs, { set: newPath });
|
|
505
|
-
break;
|
|
506
|
-
}
|
|
507
|
-
default:
|
|
508
|
-
throw new Error("Invalid subcommand.") as never;
|
|
509
|
-
}
|
|
510
|
-
}
|
|
511
|
-
}
|
|
512
|
-
if (
|
|
513
|
-
unresolvedPath.path.startsWith("../") ||
|
|
514
|
-
unresolvedPath.path === ".."
|
|
515
|
-
) {
|
|
516
|
-
exitCode = 1;
|
|
517
|
-
return `❌ ${breadcrumbString} — Invalid traversal of parent path. — ${value}`;
|
|
518
|
-
}
|
|
519
|
-
const resolvedPath = Path.resolve(unresolvedPath, extractedRoot);
|
|
520
|
-
// TODO: allow folders (with a required trailing slash)?
|
|
521
|
-
if (!(await resolvedPath.existsAsFile())) {
|
|
522
|
-
exitCode = 1;
|
|
523
|
-
return `❌ ${breadcrumbString} — Path must be present in the package. — ${value}`;
|
|
524
|
-
}
|
|
525
|
-
if (options.mustBeExecutable) {
|
|
526
|
-
if (!((await resolvedPath.stat()).mode ^ constants.X_OK)) {
|
|
527
|
-
// This is not considered fixable because the binary may be the output
|
|
528
|
-
// of a build process. In that case, the build process is responsible
|
|
529
|
-
// for marking it as executable.
|
|
530
|
-
return `❌ ${breadcrumbString} — File at path must be executable. — ${value}`;
|
|
531
|
-
}
|
|
532
|
-
}
|
|
533
|
-
return `✅ ${breadcrumbString} — Path must be present in the package. — ${value}`;
|
|
534
|
-
})(),
|
|
535
|
-
);
|
|
536
|
-
}
|
|
537
|
-
|
|
538
|
-
checkPath(["main"], { expectPrefix: ResolutionPrefix.Relative });
|
|
539
|
-
checkPath(["types"], { expectPrefix: ResolutionPrefix.Relative });
|
|
540
|
-
checkPath(["module"], { expectPrefix: ResolutionPrefix.Relative });
|
|
541
|
-
checkPath(["browser"], { expectPrefix: ResolutionPrefix.Relative });
|
|
542
|
-
|
|
543
|
-
const { exports } = packageJSON;
|
|
544
|
-
if (exports) {
|
|
545
|
-
for (const [subpath, value] of Object.entries(exports)) {
|
|
546
|
-
if (!value) {
|
|
547
|
-
// biome-ignore lint/complexity/noUselessContinue: Explicit control flow.
|
|
548
|
-
continue;
|
|
549
|
-
} else if (typeof value === "string") {
|
|
550
|
-
// TODO: error?
|
|
551
|
-
checkPath(["exports", [subpath]], {
|
|
552
|
-
expectPrefix: ResolutionPrefix.Relative,
|
|
553
|
-
});
|
|
554
|
-
} else if (value === null) {
|
|
555
|
-
// biome-ignore lint/complexity/noUselessContinue: Explicit control flow.
|
|
556
|
-
continue;
|
|
557
|
-
} else if (Array.isArray(value)) {
|
|
558
|
-
throw new Error(
|
|
559
|
-
"❌ .exports — Must use an object (instead of an array).",
|
|
560
|
-
);
|
|
561
|
-
} else {
|
|
562
|
-
const keys = Object.keys(value as Record<string, string>);
|
|
563
|
-
|
|
564
|
-
checks.push(
|
|
565
|
-
(async () => {
|
|
566
|
-
const { breadcrumbString } = traverse(["exports", [subpath]]);
|
|
567
|
-
const fixingLines = [];
|
|
568
|
-
const orderingErrorLines = [];
|
|
569
|
-
/**
|
|
570
|
-
* https://nodejs.org/api/packages.html#conditional-exports
|
|
571
|
-
*/
|
|
572
|
-
let updateKeys = false;
|
|
573
|
-
if (keys.includes("types")) {
|
|
574
|
-
if (keys[0] !== "types") {
|
|
575
|
-
switch (subcommand) {
|
|
576
|
-
case "check": {
|
|
577
|
-
orderingErrorLines.push(
|
|
578
|
-
` ↪ "types" must be the first export if present — 📝 fixable!`,
|
|
579
|
-
);
|
|
580
|
-
break;
|
|
581
|
-
}
|
|
582
|
-
case "format": {
|
|
583
|
-
fixingLines.push(
|
|
584
|
-
` ↪ "types" must be the first export if present — 📝 fixing!`,
|
|
585
|
-
);
|
|
586
|
-
keys.splice(keys.indexOf("types"), 1);
|
|
587
|
-
keys.splice(0, 0, "types");
|
|
588
|
-
updateKeys = true;
|
|
589
|
-
break;
|
|
590
|
-
}
|
|
591
|
-
default:
|
|
592
|
-
throw new Error("Invalid subcommand.") as never;
|
|
593
|
-
}
|
|
594
|
-
}
|
|
595
|
-
}
|
|
596
|
-
if (keys.includes("default")) {
|
|
597
|
-
if (keys.at(-1) !== "default") {
|
|
598
|
-
switch (subcommand) {
|
|
599
|
-
case "check": {
|
|
600
|
-
orderingErrorLines.push(
|
|
601
|
-
` ↪ "default" must be the last export if present — 📝 fixable!`,
|
|
602
|
-
);
|
|
603
|
-
break;
|
|
604
|
-
}
|
|
605
|
-
case "format": {
|
|
606
|
-
fixingLines.push(
|
|
607
|
-
` ↪ "default" must be the last export if present — 📝 fixing!`,
|
|
608
|
-
);
|
|
609
|
-
keys.splice(keys.indexOf("default"), 1);
|
|
610
|
-
keys.push("default");
|
|
611
|
-
updateKeys = true;
|
|
612
|
-
break;
|
|
613
|
-
}
|
|
614
|
-
default:
|
|
615
|
-
throw new Error("Invalid subcommand.") as never;
|
|
616
|
-
}
|
|
617
|
-
}
|
|
618
|
-
}
|
|
619
|
-
if (updateKeys) {
|
|
620
|
-
// TODO: avoid type wrangling.
|
|
621
|
-
const newConditionalExports: Record<string, string> = {};
|
|
622
|
-
for (const key of keys) {
|
|
623
|
-
newConditionalExports[key] = (value as Record<string, string>)[
|
|
624
|
-
key
|
|
625
|
-
];
|
|
626
|
-
}
|
|
627
|
-
(exports as Record<string, Record<string, string>>)[subpath] =
|
|
628
|
-
newConditionalExports;
|
|
629
|
-
}
|
|
630
|
-
for (const key of keys) {
|
|
631
|
-
// Note `"require"` is *emphatically not allowed*.
|
|
632
|
-
if (!["types", "import", "default"].includes(key)) {
|
|
633
|
-
orderingErrorLines.push(
|
|
634
|
-
` ↪ Key must not be present: ${JSON.stringify(key)}`,
|
|
635
|
-
);
|
|
636
|
-
}
|
|
637
|
-
}
|
|
638
|
-
if (orderingErrorLines.length > 0) {
|
|
639
|
-
exitCode = 1;
|
|
640
|
-
return [
|
|
641
|
-
`❌ ${breadcrumbString} — Invalid keys:`,
|
|
642
|
-
...orderingErrorLines,
|
|
643
|
-
].join("\n");
|
|
644
|
-
} else {
|
|
645
|
-
if (fixingLines.length > 0) {
|
|
646
|
-
return [
|
|
647
|
-
`✅ ${breadcrumbString} — Fixing key ordering:`,
|
|
648
|
-
...fixingLines,
|
|
649
|
-
].join("\n");
|
|
650
|
-
} else {
|
|
651
|
-
return `✅ ${breadcrumbString} — Key set and ordering is OK.`;
|
|
652
|
-
}
|
|
653
|
-
}
|
|
654
|
-
})(),
|
|
655
|
-
);
|
|
656
|
-
for (const secondaryKey of keys) {
|
|
657
|
-
checkPath(["exports", [subpath], secondaryKey], {
|
|
658
|
-
expectPrefix: ResolutionPrefix.Relative,
|
|
659
|
-
});
|
|
660
|
-
}
|
|
661
|
-
}
|
|
662
|
-
}
|
|
663
|
-
}
|
|
664
|
-
|
|
665
|
-
const { bin } = packageJSON;
|
|
666
|
-
if (bin) {
|
|
667
|
-
for (const binEntry of Object.keys(bin as Record<string, string>)) {
|
|
668
|
-
checkPath(["bin", [binEntry]], {
|
|
669
|
-
// `npm pkg fix` prefers bare paths for `bin` entries for some reason. 🤷
|
|
670
|
-
expectPrefix: ResolutionPrefix.Bare,
|
|
671
|
-
// `npm` will technically make binary entry points executable, but we want
|
|
672
|
-
// to enforce that the unpackaged path also is. This is particularly
|
|
673
|
-
// important when the package is linked.
|
|
674
|
-
mustBeExecutable: true,
|
|
675
|
-
});
|
|
676
|
-
}
|
|
677
|
-
}
|
|
678
|
-
|
|
679
|
-
console.log((await Promise.all(checks)).join("\n"));
|
|
680
|
-
|
|
681
|
-
if (subcommand === "format") {
|
|
682
|
-
console.log("📝 Writing formatting fixes.");
|
|
683
|
-
// TODO: support trailing space in `path-class`.
|
|
684
|
-
await PACKAGE_JSON_PATH.write(`${JSON.stringify(packageJSON, null, " ")}\n`);
|
|
685
|
-
console.log(PACKAGE_JSON_PATH.path);
|
|
686
|
-
console.log("📝 Running `npm pkg fix.");
|
|
687
|
-
await new PrintableShellCommand("npm", ["pkg", "fix"])
|
|
688
|
-
.print({ argumentLineWrapping: "inline" })
|
|
689
|
-
.spawn().success;
|
|
690
|
-
} else if (foundFixableErrors) {
|
|
691
|
-
console.log();
|
|
692
|
-
console.log(
|
|
693
|
-
"📝 Found fixable errors. Run with the `format` subcommand to fix.",
|
|
694
|
-
);
|
|
695
|
-
console.log();
|
|
696
|
-
}
|
|
697
|
-
|
|
698
|
-
await tempDirDisposable[Symbol.asyncDispose]();
|
|
699
|
-
|
|
700
|
-
exit(exitCode);
|