screw-up 0.12.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +21 -7
- package/dist/analyzer.d.ts +13 -1
- package/dist/analyzer.d.ts.map +1 -1
- package/dist/cli-internal.d.ts +30 -7
- package/dist/cli-internal.d.ts.map +1 -1
- package/dist/cli.d.ts +12 -2
- package/dist/cli.d.ts.map +1 -1
- package/dist/generated/packageMetadata.d.ts +18 -0
- package/dist/generated/packageMetadata.d.ts.map +1 -0
- package/dist/index.cjs +36 -13
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +10 -1
- package/dist/index.js +36 -13
- package/dist/index.js.map +1 -1
- package/dist/{internal-Di0s8LQa.cjs → internal-BHSe5LIZ.cjs} +349 -322
- package/dist/internal-BHSe5LIZ.cjs.map +1 -0
- package/dist/{internal-BaMzTKS2.js → internal-BgCvktPU.js} +351 -324
- package/dist/internal-BgCvktPU.js.map +1 -0
- package/dist/internal.d.ts +64 -14
- package/dist/internal.d.ts.map +1 -1
- package/dist/main.cjs +1166 -0
- package/dist/main.cjs.map +1 -0
- package/dist/main.d.ts +13 -0
- package/dist/main.d.ts.map +1 -0
- package/dist/main.js +1165 -0
- package/dist/main.js.map +1 -0
- package/dist/packageMetadata-D9nXAoK9.cjs +20 -0
- package/dist/packageMetadata-D9nXAoK9.cjs.map +1 -0
- package/dist/packageMetadata-Dsxn2dKN.js +20 -0
- package/dist/packageMetadata-Dsxn2dKN.js.map +1 -0
- package/dist/types.d.ts +15 -0
- package/dist/types.d.ts.map +1 -1
- package/dist/vite-plugin.d.ts +10 -1
- package/dist/vite-plugin.d.ts.map +1 -1
- package/images/screw-up-120.png +0 -0
- package/package.json +13 -14
- package/README_pack.md +0 -63
- package/dist/cli.cjs +0 -765
- package/dist/cli.cjs.map +0 -1
- package/dist/cli.js +0 -764
- package/dist/cli.js.map +0 -1
- package/dist/internal-BaMzTKS2.js.map +0 -1
- package/dist/internal-Di0s8LQa.cjs.map +0 -1
package/dist/cli.cjs
DELETED
|
@@ -1,765 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
"use strict";
|
|
3
|
-
const path = require("path");
|
|
4
|
-
const fs = require("fs");
|
|
5
|
-
const promises = require("fs/promises");
|
|
6
|
-
const child_process = require("child_process");
|
|
7
|
-
const glob = require("glob");
|
|
8
|
-
const stream = require("stream");
|
|
9
|
-
const zlib = require("zlib");
|
|
10
|
-
const internal = require("./internal-Di0s8LQa.cjs");
|
|
11
|
-
/*!
|
|
12
|
-
* name: tar-vern
|
|
13
|
-
* version: 0.3.0
|
|
14
|
-
* description: Tape archiver library for Typescript
|
|
15
|
-
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
|
16
|
-
* license: MIT
|
|
17
|
-
* repository.url: https://github.com/kekyo/tar-vern.git
|
|
18
|
-
*/
|
|
19
|
-
const getUName = (candidateName, candidateId, reflectStat) => {
|
|
20
|
-
return "root";
|
|
21
|
-
};
|
|
22
|
-
const getBuffer = (data) => {
|
|
23
|
-
return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
|
24
|
-
};
|
|
25
|
-
const createFileItem = async (path2, content, options) => {
|
|
26
|
-
var _a, _b, _c, _d, _e, _f;
|
|
27
|
-
const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : 420;
|
|
28
|
-
const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : 0;
|
|
29
|
-
const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : 0;
|
|
30
|
-
const date = (_d = options == null ? void 0 : options.date) != null ? _d : /* @__PURE__ */ new Date();
|
|
31
|
-
const uname = (_e = options == null ? void 0 : options.uname) != null ? _e : "root";
|
|
32
|
-
const gname = (_f = options == null ? void 0 : options.gname) != null ? _f : "root";
|
|
33
|
-
return {
|
|
34
|
-
kind: "file",
|
|
35
|
-
path: path2,
|
|
36
|
-
mode,
|
|
37
|
-
uname,
|
|
38
|
-
gname,
|
|
39
|
-
uid,
|
|
40
|
-
gid,
|
|
41
|
-
date,
|
|
42
|
-
content
|
|
43
|
-
};
|
|
44
|
-
};
|
|
45
|
-
const createReadableFileItem = async (path2, readable, options) => {
|
|
46
|
-
var _a, _b, _c, _d, _e, _f;
|
|
47
|
-
const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : 420;
|
|
48
|
-
const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : 0;
|
|
49
|
-
const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : 0;
|
|
50
|
-
const date = (_d = options == null ? void 0 : options.date) != null ? _d : /* @__PURE__ */ new Date();
|
|
51
|
-
const uname = (_e = options == null ? void 0 : options.uname) != null ? _e : "root";
|
|
52
|
-
const gname = (_f = options == null ? void 0 : options.gname) != null ? _f : "root";
|
|
53
|
-
let length = options == null ? void 0 : options.length;
|
|
54
|
-
if (!length) {
|
|
55
|
-
const chunks = [];
|
|
56
|
-
length = 0;
|
|
57
|
-
for await (const chunk of readable) {
|
|
58
|
-
const buffer = getBuffer(chunk);
|
|
59
|
-
chunks.push(buffer);
|
|
60
|
-
length += buffer.length;
|
|
61
|
-
}
|
|
62
|
-
return {
|
|
63
|
-
kind: "file",
|
|
64
|
-
path: path2,
|
|
65
|
-
mode,
|
|
66
|
-
uname,
|
|
67
|
-
gname,
|
|
68
|
-
uid,
|
|
69
|
-
gid,
|
|
70
|
-
date,
|
|
71
|
-
content: {
|
|
72
|
-
kind: "readable",
|
|
73
|
-
length,
|
|
74
|
-
readable: stream.Readable.from(chunks)
|
|
75
|
-
}
|
|
76
|
-
};
|
|
77
|
-
} else {
|
|
78
|
-
return {
|
|
79
|
-
kind: "file",
|
|
80
|
-
path: path2,
|
|
81
|
-
mode,
|
|
82
|
-
uname,
|
|
83
|
-
gname,
|
|
84
|
-
uid,
|
|
85
|
-
gid,
|
|
86
|
-
date,
|
|
87
|
-
content: {
|
|
88
|
-
kind: "readable",
|
|
89
|
-
length,
|
|
90
|
-
readable
|
|
91
|
-
}
|
|
92
|
-
};
|
|
93
|
-
}
|
|
94
|
-
};
|
|
95
|
-
const createReadFileItem = async (path2, filePath, reflectStat, options) => {
|
|
96
|
-
const stats = await promises.stat(filePath);
|
|
97
|
-
const reader = fs.createReadStream(filePath);
|
|
98
|
-
const mode = stats.mode;
|
|
99
|
-
const uid = stats.uid;
|
|
100
|
-
const gid = stats.gid;
|
|
101
|
-
const date = stats.mtime;
|
|
102
|
-
const uname = getUName(options == null ? void 0 : options.uname, stats.uid);
|
|
103
|
-
const gname = getUName(options == null ? void 0 : options.gname, stats.gid);
|
|
104
|
-
return await createReadableFileItem(path2, reader, {
|
|
105
|
-
length: stats.size,
|
|
106
|
-
mode,
|
|
107
|
-
uname,
|
|
108
|
-
gname,
|
|
109
|
-
uid,
|
|
110
|
-
gid,
|
|
111
|
-
date
|
|
112
|
-
});
|
|
113
|
-
};
|
|
114
|
-
const storeReaderToFile = (reader, path2) => {
|
|
115
|
-
const writer = fs.createWriteStream(path2);
|
|
116
|
-
reader.pipe(writer);
|
|
117
|
-
return new Promise((res, rej) => {
|
|
118
|
-
writer.on("finish", res);
|
|
119
|
-
writer.on("error", rej);
|
|
120
|
-
reader.on("error", rej);
|
|
121
|
-
});
|
|
122
|
-
};
|
|
123
|
-
const utf8ByteLength = (str) => {
|
|
124
|
-
return Buffer.byteLength(str, "utf8");
|
|
125
|
-
};
|
|
126
|
-
const truncateUtf8Safe = (str, maxBytes) => {
|
|
127
|
-
let total = 0;
|
|
128
|
-
let i = 0;
|
|
129
|
-
while (i < str.length) {
|
|
130
|
-
const codePoint = str.codePointAt(i);
|
|
131
|
-
const char = String.fromCodePoint(codePoint);
|
|
132
|
-
const charBytes = Buffer.byteLength(char, "utf8");
|
|
133
|
-
if (total + charBytes > maxBytes) break;
|
|
134
|
-
total += charBytes;
|
|
135
|
-
i += char.length;
|
|
136
|
-
}
|
|
137
|
-
return str.slice(0, i);
|
|
138
|
-
};
|
|
139
|
-
const MAX_NAME = 100;
|
|
140
|
-
const MAX_PREFIX = 155;
|
|
141
|
-
const splitPath = (path2) => {
|
|
142
|
-
var _a;
|
|
143
|
-
if (utf8ByteLength(path2) <= MAX_NAME) {
|
|
144
|
-
return { prefix: "", name: path2 };
|
|
145
|
-
}
|
|
146
|
-
const parts = path2.split("/");
|
|
147
|
-
let name = (_a = parts.pop()) != null ? _a : "";
|
|
148
|
-
let prefix = parts.join("/");
|
|
149
|
-
if (utf8ByteLength(name) > MAX_NAME) {
|
|
150
|
-
name = truncateUtf8Safe(name, MAX_NAME);
|
|
151
|
-
}
|
|
152
|
-
while (utf8ByteLength(prefix) > MAX_PREFIX) {
|
|
153
|
-
prefix = truncateUtf8Safe(prefix, MAX_PREFIX);
|
|
154
|
-
}
|
|
155
|
-
return { prefix, name };
|
|
156
|
-
};
|
|
157
|
-
const getOctalBytes = (value, length) => {
|
|
158
|
-
const str = value.toString(8).padStart(length - 1, "0") + "\0";
|
|
159
|
-
return Buffer.from(str, "ascii");
|
|
160
|
-
};
|
|
161
|
-
const getPaddedBytes = (buffer) => {
|
|
162
|
-
const extra = buffer.length % 512;
|
|
163
|
-
if (extra === 0) {
|
|
164
|
-
return buffer;
|
|
165
|
-
} else {
|
|
166
|
-
return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);
|
|
167
|
-
}
|
|
168
|
-
};
|
|
169
|
-
const terminatorBytes = Buffer.alloc(1024, 0);
|
|
170
|
-
const createTarHeader = (type, path2, size, mode, uname, gname, uid, gid, date) => {
|
|
171
|
-
const buffer = Buffer.alloc(512, 0);
|
|
172
|
-
const { name, prefix } = splitPath(path2);
|
|
173
|
-
buffer.write(name, 0, 100, "utf8");
|
|
174
|
-
getOctalBytes(mode & 4095, 8).copy(buffer, 100);
|
|
175
|
-
getOctalBytes(uid, 8).copy(buffer, 108);
|
|
176
|
-
getOctalBytes(gid, 8).copy(buffer, 116);
|
|
177
|
-
getOctalBytes(size, 12).copy(buffer, 124);
|
|
178
|
-
getOctalBytes(Math.floor(date.getTime() / 1e3), 12).copy(buffer, 136);
|
|
179
|
-
Buffer.from(" ", "ascii").copy(buffer, 148);
|
|
180
|
-
if (type === "file") {
|
|
181
|
-
buffer.write("0", 156, 1, "ascii");
|
|
182
|
-
} else {
|
|
183
|
-
buffer.write("5", 156, 1, "ascii");
|
|
184
|
-
}
|
|
185
|
-
buffer.write("ustar\0", 257, 6, "ascii");
|
|
186
|
-
buffer.write("00", 263, 2, "ascii");
|
|
187
|
-
buffer.write(uname, 265, 32, "utf8");
|
|
188
|
-
buffer.write(gname, 297, 32, "utf8");
|
|
189
|
-
buffer.write(prefix, 345, 155, "utf8");
|
|
190
|
-
let sum = 0;
|
|
191
|
-
for (let i = 0; i < 512; i++) {
|
|
192
|
-
sum += buffer[i];
|
|
193
|
-
}
|
|
194
|
-
getOctalBytes(sum, 8).copy(buffer, 148);
|
|
195
|
-
return buffer;
|
|
196
|
-
};
|
|
197
|
-
const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
198
|
-
const entryItemIterator = async function* () {
|
|
199
|
-
for await (const entryItem of entryItemGenerator) {
|
|
200
|
-
switch (entryItem.kind) {
|
|
201
|
-
case "file": {
|
|
202
|
-
const entryItemContent = entryItem.content;
|
|
203
|
-
if (typeof entryItemContent === "string" || Buffer.isBuffer(entryItemContent)) {
|
|
204
|
-
const contentBytes = getBuffer(entryItemContent);
|
|
205
|
-
const tarHeaderBytes = createTarHeader(
|
|
206
|
-
"file",
|
|
207
|
-
entryItem.path,
|
|
208
|
-
contentBytes.length,
|
|
209
|
-
entryItem.mode,
|
|
210
|
-
entryItem.uname,
|
|
211
|
-
entryItem.gname,
|
|
212
|
-
entryItem.uid,
|
|
213
|
-
entryItem.gid,
|
|
214
|
-
entryItem.date
|
|
215
|
-
);
|
|
216
|
-
yield tarHeaderBytes;
|
|
217
|
-
const totalPaddedContentBytes = getPaddedBytes(contentBytes);
|
|
218
|
-
yield totalPaddedContentBytes;
|
|
219
|
-
} else {
|
|
220
|
-
const tarHeaderBytes = createTarHeader(
|
|
221
|
-
"file",
|
|
222
|
-
entryItem.path,
|
|
223
|
-
entryItemContent.length,
|
|
224
|
-
entryItem.mode,
|
|
225
|
-
entryItem.uname,
|
|
226
|
-
entryItem.gname,
|
|
227
|
-
entryItem.uid,
|
|
228
|
-
entryItem.gid,
|
|
229
|
-
entryItem.date
|
|
230
|
-
);
|
|
231
|
-
yield tarHeaderBytes;
|
|
232
|
-
let position = 0;
|
|
233
|
-
switch (entryItemContent.kind) {
|
|
234
|
-
case "generator": {
|
|
235
|
-
for await (const contentBytes of entryItemContent.generator) {
|
|
236
|
-
yield contentBytes;
|
|
237
|
-
position += contentBytes.length;
|
|
238
|
-
}
|
|
239
|
-
break;
|
|
240
|
-
}
|
|
241
|
-
case "readable": {
|
|
242
|
-
for await (const content of entryItemContent.readable) {
|
|
243
|
-
const contentBytes = getBuffer(content);
|
|
244
|
-
yield contentBytes;
|
|
245
|
-
position += contentBytes.length;
|
|
246
|
-
}
|
|
247
|
-
break;
|
|
248
|
-
}
|
|
249
|
-
}
|
|
250
|
-
if (position % 512 !== 0) {
|
|
251
|
-
yield Buffer.alloc(512 - position % 512, 0);
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
break;
|
|
255
|
-
}
|
|
256
|
-
case "directory": {
|
|
257
|
-
const tarHeaderBytes = createTarHeader(
|
|
258
|
-
"directory",
|
|
259
|
-
entryItem.path,
|
|
260
|
-
0,
|
|
261
|
-
entryItem.mode,
|
|
262
|
-
entryItem.uname,
|
|
263
|
-
entryItem.gname,
|
|
264
|
-
entryItem.uid,
|
|
265
|
-
entryItem.gid,
|
|
266
|
-
entryItem.date
|
|
267
|
-
);
|
|
268
|
-
yield tarHeaderBytes;
|
|
269
|
-
break;
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
}
|
|
273
|
-
yield terminatorBytes;
|
|
274
|
-
};
|
|
275
|
-
const ct = compressionType;
|
|
276
|
-
switch (ct) {
|
|
277
|
-
case "none": {
|
|
278
|
-
return stream.Readable.from(entryItemIterator());
|
|
279
|
-
}
|
|
280
|
-
case "gzip": {
|
|
281
|
-
const gzipStream = zlib.createGzip({ level: 9 });
|
|
282
|
-
const entryItemStream = stream.Readable.from(entryItemIterator());
|
|
283
|
-
entryItemStream.pipe(gzipStream);
|
|
284
|
-
return gzipStream;
|
|
285
|
-
}
|
|
286
|
-
}
|
|
287
|
-
};
|
|
288
|
-
const createPackEntryGenerator = async function* (targetDir, resolvedPackageJson, readmeReplacementPath) {
|
|
289
|
-
var _a;
|
|
290
|
-
const packageJsonContent = JSON.stringify(resolvedPackageJson, null, 2);
|
|
291
|
-
yield await createFileItem("package/package.json", packageJsonContent);
|
|
292
|
-
const distributionFileGlobs = (_a = resolvedPackageJson == null ? void 0 : resolvedPackageJson.files) != null ? _a : ["**/*"];
|
|
293
|
-
const packingFilePaths = (await Promise.all(
|
|
294
|
-
distributionFileGlobs.map(async (pattern) => {
|
|
295
|
-
const fullPath = path.resolve(targetDir, pattern);
|
|
296
|
-
try {
|
|
297
|
-
if (fs.existsSync(fullPath) && (await promises.lstat(fullPath)).isDirectory()) {
|
|
298
|
-
return await glob.glob(`${pattern}/**/*`, { cwd: targetDir });
|
|
299
|
-
}
|
|
300
|
-
return await glob.glob(pattern, { cwd: targetDir });
|
|
301
|
-
} catch (error) {
|
|
302
|
-
return await glob.glob(pattern, { cwd: targetDir });
|
|
303
|
-
}
|
|
304
|
-
})
|
|
305
|
-
)).flat();
|
|
306
|
-
for (const packingFilePath of packingFilePaths) {
|
|
307
|
-
if (packingFilePath !== "package.json") {
|
|
308
|
-
const fullPath = path.resolve(targetDir, packingFilePath);
|
|
309
|
-
const stat = await promises.lstat(fullPath);
|
|
310
|
-
if (stat.isFile()) {
|
|
311
|
-
if (packingFilePath === "README.md" && readmeReplacementPath) {
|
|
312
|
-
yield await createReadFileItem("package/README.md", readmeReplacementPath);
|
|
313
|
-
} else {
|
|
314
|
-
yield await createReadFileItem(`package/${packingFilePath}`, fullPath);
|
|
315
|
-
}
|
|
316
|
-
}
|
|
317
|
-
}
|
|
318
|
-
}
|
|
319
|
-
if (readmeReplacementPath && !packingFilePaths.includes("README.md")) {
|
|
320
|
-
yield await createReadFileItem("package/README.md", readmeReplacementPath);
|
|
321
|
-
}
|
|
322
|
-
};
|
|
323
|
-
const packAssets = async (targetDir, outputDir, checkWorkingDirectoryStatus, inheritableFields, readmeReplacementPath, replacePeerDepsWildcards = true, peerDepsVersionPrefix = "^") => {
|
|
324
|
-
var _a, _b, _c, _d;
|
|
325
|
-
if (!fs.existsSync(targetDir)) {
|
|
326
|
-
return void 0;
|
|
327
|
-
}
|
|
328
|
-
let result;
|
|
329
|
-
try {
|
|
330
|
-
result = await internal.resolveRawPackageJsonObject(
|
|
331
|
-
targetDir,
|
|
332
|
-
checkWorkingDirectoryStatus,
|
|
333
|
-
inheritableFields
|
|
334
|
-
);
|
|
335
|
-
} catch (error) {
|
|
336
|
-
return void 0;
|
|
337
|
-
}
|
|
338
|
-
let { packageJson: resolvedPackageJson, sourceMap } = result;
|
|
339
|
-
if (resolvedPackageJson == null ? void 0 : resolvedPackageJson.private) {
|
|
340
|
-
return void 0;
|
|
341
|
-
}
|
|
342
|
-
if (replacePeerDepsWildcards) {
|
|
343
|
-
const workspaceRoot = await internal.findWorkspaceRoot(targetDir);
|
|
344
|
-
if (workspaceRoot) {
|
|
345
|
-
const siblings = await internal.collectWorkspaceSiblings(workspaceRoot);
|
|
346
|
-
if (siblings.size > 0) {
|
|
347
|
-
resolvedPackageJson = internal.replacePeerDependenciesWildcards(
|
|
348
|
-
resolvedPackageJson,
|
|
349
|
-
siblings,
|
|
350
|
-
peerDepsVersionPrefix
|
|
351
|
-
);
|
|
352
|
-
}
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
let finalReadmeReplacementPath = readmeReplacementPath;
|
|
356
|
-
if (!finalReadmeReplacementPath && (resolvedPackageJson == null ? void 0 : resolvedPackageJson.readme)) {
|
|
357
|
-
const readmeSourceDir = (_a = sourceMap.get("readme")) != null ? _a : targetDir;
|
|
358
|
-
const packageReadmePath = path.resolve(readmeSourceDir, resolvedPackageJson.readme);
|
|
359
|
-
if (fs.existsSync(packageReadmePath)) {
|
|
360
|
-
finalReadmeReplacementPath = packageReadmePath;
|
|
361
|
-
}
|
|
362
|
-
}
|
|
363
|
-
if (finalReadmeReplacementPath && !fs.existsSync(finalReadmeReplacementPath)) {
|
|
364
|
-
throw new Error(`README replacement file not found: ${finalReadmeReplacementPath}`);
|
|
365
|
-
}
|
|
366
|
-
const outputFileName = `${(_c = (_b = resolvedPackageJson == null ? void 0 : resolvedPackageJson.name) == null ? void 0 : _b.replace("/", "-")) != null ? _c : "package"}-${(_d = resolvedPackageJson == null ? void 0 : resolvedPackageJson.version) != null ? _d : "0.0.0"}.tgz`;
|
|
367
|
-
if (!fs.existsSync(outputDir)) {
|
|
368
|
-
await promises.mkdir(outputDir, { recursive: true });
|
|
369
|
-
}
|
|
370
|
-
const packer = createTarPacker(
|
|
371
|
-
createPackEntryGenerator(targetDir, resolvedPackageJson, finalReadmeReplacementPath),
|
|
372
|
-
"gzip"
|
|
373
|
-
);
|
|
374
|
-
const outputFile = path.resolve(outputDir, outputFileName);
|
|
375
|
-
await storeReaderToFile(packer, outputFile);
|
|
376
|
-
return resolvedPackageJson;
|
|
377
|
-
};
|
|
378
|
-
const getComputedPackageJsonObject = async (targetDir, checkWorkingDirectoryStatus, inheritableFields) => {
|
|
379
|
-
if (!fs.existsSync(targetDir)) {
|
|
380
|
-
return void 0;
|
|
381
|
-
}
|
|
382
|
-
const result = await internal.resolveRawPackageJsonObject(
|
|
383
|
-
targetDir,
|
|
384
|
-
checkWorkingDirectoryStatus,
|
|
385
|
-
inheritableFields
|
|
386
|
-
);
|
|
387
|
-
return result.packageJson;
|
|
388
|
-
};
|
|
389
|
-
const parseArgs = (argv) => {
|
|
390
|
-
const args = argv.slice(2);
|
|
391
|
-
const result = {
|
|
392
|
-
positional: [],
|
|
393
|
-
options: {}
|
|
394
|
-
};
|
|
395
|
-
if (args.length === 0) {
|
|
396
|
-
return result;
|
|
397
|
-
}
|
|
398
|
-
if (args[0].startsWith("-")) {
|
|
399
|
-
let i2 = 0;
|
|
400
|
-
while (i2 < args.length) {
|
|
401
|
-
const arg = args[i2];
|
|
402
|
-
if (arg.startsWith("--")) {
|
|
403
|
-
const optionName = arg.slice(2);
|
|
404
|
-
const nextArg = args[i2 + 1];
|
|
405
|
-
if (nextArg !== void 0 && !nextArg.startsWith("-")) {
|
|
406
|
-
result.options[optionName] = nextArg;
|
|
407
|
-
i2 += 2;
|
|
408
|
-
} else {
|
|
409
|
-
result.options[optionName] = true;
|
|
410
|
-
i2 += 1;
|
|
411
|
-
}
|
|
412
|
-
} else if (arg.startsWith("-")) {
|
|
413
|
-
const optionName = arg.slice(1);
|
|
414
|
-
result.options[optionName] = true;
|
|
415
|
-
i2 += 1;
|
|
416
|
-
} else {
|
|
417
|
-
result.positional.push(arg);
|
|
418
|
-
i2 += 1;
|
|
419
|
-
}
|
|
420
|
-
}
|
|
421
|
-
return result;
|
|
422
|
-
}
|
|
423
|
-
result.command = args[0];
|
|
424
|
-
let i = 1;
|
|
425
|
-
while (i < args.length) {
|
|
426
|
-
const arg = args[i];
|
|
427
|
-
if (arg.startsWith("--")) {
|
|
428
|
-
const optionName = arg.slice(2);
|
|
429
|
-
const nextArg = args[i + 1];
|
|
430
|
-
if (nextArg !== void 0 && !nextArg.startsWith("-")) {
|
|
431
|
-
result.options[optionName] = nextArg;
|
|
432
|
-
i += 2;
|
|
433
|
-
} else {
|
|
434
|
-
result.options[optionName] = true;
|
|
435
|
-
i += 1;
|
|
436
|
-
}
|
|
437
|
-
} else if (arg.startsWith("-")) {
|
|
438
|
-
const optionName = arg.slice(1);
|
|
439
|
-
result.options[optionName] = true;
|
|
440
|
-
i += 1;
|
|
441
|
-
} else {
|
|
442
|
-
result.positional.push(arg);
|
|
443
|
-
i += 1;
|
|
444
|
-
}
|
|
445
|
-
}
|
|
446
|
-
return result;
|
|
447
|
-
};
|
|
448
|
-
const defaultInheritableFields = /* @__PURE__ */ new Set([
|
|
449
|
-
"version",
|
|
450
|
-
"description",
|
|
451
|
-
"author",
|
|
452
|
-
"license",
|
|
453
|
-
"repository",
|
|
454
|
-
"keywords",
|
|
455
|
-
"homepage",
|
|
456
|
-
"bugs",
|
|
457
|
-
"readme"
|
|
458
|
-
]);
|
|
459
|
-
const parseInheritableFields = (inheritableFieldsOption) => {
|
|
460
|
-
if (typeof inheritableFieldsOption !== "string") {
|
|
461
|
-
return defaultInheritableFields;
|
|
462
|
-
}
|
|
463
|
-
if (!inheritableFieldsOption.trim()) {
|
|
464
|
-
return /* @__PURE__ */ new Set();
|
|
465
|
-
}
|
|
466
|
-
return new Set(inheritableFieldsOption.split(",").map((field) => field.trim()).filter((field) => field.length > 0));
|
|
467
|
-
};
|
|
468
|
-
const showHelp = () => {
|
|
469
|
-
console.log(`screw-up - Easy package metadata inserter CLI [${void 0}]
|
|
470
|
-
Copyright (c) ${"Kouji Matsui (@kekyo@mi.kekyo.net)"}
|
|
471
|
-
Repository: ${"https://github.com/kekyo/screw-up.git"}
|
|
472
|
-
License: ${"MIT"}
|
|
473
|
-
|
|
474
|
-
Usage: screw-up <command> [options]
|
|
475
|
-
|
|
476
|
-
Commands:
|
|
477
|
-
pack [directory] Pack the project into a tar archive
|
|
478
|
-
publish [directory|package.tgz] Publish the project
|
|
479
|
-
dump [directory] Dump computed package.json as JSON
|
|
480
|
-
|
|
481
|
-
Options:
|
|
482
|
-
-h, --help Show help
|
|
483
|
-
|
|
484
|
-
Pack Options:
|
|
485
|
-
--pack-destination <path> Directory to write the tarball
|
|
486
|
-
--readme <path> Replace README.md with specified file
|
|
487
|
-
--inheritable-fields <list> Comma-separated list of fields to inherit from parent (default: version,description,author,license,repository,keywords,homepage,bugs,readme)
|
|
488
|
-
--no-wds Do not check working directory status to increase version
|
|
489
|
-
--no-replace-peer-deps Disable replacing "*" in peerDependencies with actual versions
|
|
490
|
-
--peer-deps-prefix <prefix> Version prefix for replaced peerDependencies (default: "^")
|
|
491
|
-
|
|
492
|
-
Publish Options:
|
|
493
|
-
All npm publish options are supported (e.g., --dry-run, --tag, --access, --registry)
|
|
494
|
-
|
|
495
|
-
Examples:
|
|
496
|
-
screw-up pack # Pack current directory
|
|
497
|
-
screw-up pack ./my-project # Pack specific directory
|
|
498
|
-
screw-up pack --pack-destination ./dist # Pack to specific output directory
|
|
499
|
-
screw-up pack --readme ./README_pack.md # Pack with custom README
|
|
500
|
-
screw-up publish # Publish current directory
|
|
501
|
-
screw-up publish ./my-project # Publish specific directory
|
|
502
|
-
screw-up publish package.tgz # Publish existing tarball
|
|
503
|
-
screw-up publish --dry-run --tag beta # Publish with npm options
|
|
504
|
-
`);
|
|
505
|
-
};
|
|
506
|
-
const showPackHelp = () => {
|
|
507
|
-
console.log(`Usage: screw-up pack [options] [directory]
|
|
508
|
-
|
|
509
|
-
Pack the project into a tar archive
|
|
510
|
-
|
|
511
|
-
Arguments:
|
|
512
|
-
directory Directory to pack (default: current directory)
|
|
513
|
-
|
|
514
|
-
Options:
|
|
515
|
-
--pack-destination <path> Directory to write the tarball
|
|
516
|
-
--readme <path> Replace README.md with specified file
|
|
517
|
-
--inheritable-fields <list> Comma-separated list of fields to inherit from parent
|
|
518
|
-
--no-wds Do not check working directory status to increase version
|
|
519
|
-
--no-replace-peer-deps Disable replacing "*" in peerDependencies with actual versions
|
|
520
|
-
--peer-deps-prefix <prefix> Version prefix for replaced peerDependencies (default: "^")
|
|
521
|
-
-h, --help Show help for pack command
|
|
522
|
-
`);
|
|
523
|
-
};
|
|
524
|
-
const showPublishHelp = () => {
|
|
525
|
-
console.log(`Usage: screw-up publish [options] [directory|package.tgz]
|
|
526
|
-
|
|
527
|
-
Publish the project
|
|
528
|
-
|
|
529
|
-
Arguments:
|
|
530
|
-
directory|package.tgz Directory to pack and publish, or existing tarball to publish
|
|
531
|
-
|
|
532
|
-
Options:
|
|
533
|
-
All npm publish options are supported, including:
|
|
534
|
-
--dry-run Perform a dry run
|
|
535
|
-
--tag <tag> Tag for the published version
|
|
536
|
-
--access <access> Access level (public or restricted)
|
|
537
|
-
--registry <registry> Registry URL
|
|
538
|
-
-h, --help Show help for publish command
|
|
539
|
-
|
|
540
|
-
Examples:
|
|
541
|
-
screw-up publish # Publish current directory
|
|
542
|
-
screw-up publish ./my-project # Publish specific directory
|
|
543
|
-
screw-up publish package.tgz # Publish existing tarball
|
|
544
|
-
screw-up publish --dry-run --tag beta # Publish with options
|
|
545
|
-
`);
|
|
546
|
-
};
|
|
547
|
-
const packCommand = async (args) => {
|
|
548
|
-
var _a;
|
|
549
|
-
if (args.options.help || args.options.h) {
|
|
550
|
-
showPackHelp();
|
|
551
|
-
return;
|
|
552
|
-
}
|
|
553
|
-
const directory = args.positional[0];
|
|
554
|
-
const packDestination = args.options["pack-destination"];
|
|
555
|
-
const readmeOption = args.options["readme"];
|
|
556
|
-
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
557
|
-
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
558
|
-
const replacePeerDepsWildcards = !args.options["no-replace-peer-deps"];
|
|
559
|
-
const peerDepsVersionPrefix = (_a = args.options["peer-deps-prefix"]) != null ? _a : "^";
|
|
560
|
-
const targetDir = path.resolve(directory != null ? directory : process.cwd());
|
|
561
|
-
const outputDir = packDestination ? path.resolve(packDestination) : process.cwd();
|
|
562
|
-
const readmeReplacementPath = readmeOption ? path.resolve(readmeOption) : void 0;
|
|
563
|
-
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
564
|
-
console.log(`[screw-up/cli]: pack: Creating archive of ${targetDir}...`);
|
|
565
|
-
try {
|
|
566
|
-
const metadata = await packAssets(
|
|
567
|
-
targetDir,
|
|
568
|
-
outputDir,
|
|
569
|
-
checkWorkingDirectoryStatus,
|
|
570
|
-
inheritableFields,
|
|
571
|
-
readmeReplacementPath,
|
|
572
|
-
replacePeerDepsWildcards,
|
|
573
|
-
peerDepsVersionPrefix
|
|
574
|
-
);
|
|
575
|
-
if (metadata) {
|
|
576
|
-
console.log(`[screw-up/cli]: pack: Archive created successfully: ${outputDir}`);
|
|
577
|
-
} else {
|
|
578
|
-
console.error(`[screw-up/cli]: pack: Unable to find any files to pack: ${targetDir}`);
|
|
579
|
-
process.exit(1);
|
|
580
|
-
}
|
|
581
|
-
} catch (error) {
|
|
582
|
-
console.error("[screw-up/cli]: pack: Failed to create archive:", error);
|
|
583
|
-
process.exit(1);
|
|
584
|
-
}
|
|
585
|
-
};
|
|
586
|
-
const publishCommand = async (args) => {
|
|
587
|
-
var _a;
|
|
588
|
-
if (args.options.help || args.options.h) {
|
|
589
|
-
showPublishHelp();
|
|
590
|
-
return;
|
|
591
|
-
}
|
|
592
|
-
const runNpmPublish = async (tarballPath, npmOptions2) => {
|
|
593
|
-
console.log(`[screw-up/cli]: publish: Publishing ${tarballPath} to npm...`);
|
|
594
|
-
const publishArgs = ["publish", tarballPath, ...npmOptions2];
|
|
595
|
-
if (process.env.SCREW_UP_TEST_MODE === "true") {
|
|
596
|
-
console.log(`[screw-up/cli]: TEST_MODE: Would execute: npm ${publishArgs.join(" ")}`);
|
|
597
|
-
console.log(`[screw-up/cli]: TEST_MODE: Tarball path: ${tarballPath}`);
|
|
598
|
-
console.log(`[screw-up/cli]: TEST_MODE: Options: ${npmOptions2.join(" ")}`);
|
|
599
|
-
console.log(`[screw-up/cli]: publish: Successfully published ${tarballPath}`);
|
|
600
|
-
return;
|
|
601
|
-
}
|
|
602
|
-
const npmProcess = child_process.spawn("npm", publishArgs, { stdio: "inherit" });
|
|
603
|
-
return new Promise((resolve2, reject) => {
|
|
604
|
-
npmProcess.on("close", (code) => {
|
|
605
|
-
if (code === 0) {
|
|
606
|
-
console.log(`[screw-up/cli]: publish: Successfully published ${tarballPath}`);
|
|
607
|
-
resolve2();
|
|
608
|
-
} else {
|
|
609
|
-
reject(new Error(`npm publish failed with exit code ${code}`));
|
|
610
|
-
}
|
|
611
|
-
});
|
|
612
|
-
npmProcess.on("error", reject);
|
|
613
|
-
});
|
|
614
|
-
};
|
|
615
|
-
const path$1 = args.positional[0];
|
|
616
|
-
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
617
|
-
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
618
|
-
const replacePeerDepsWildcards = !args.options["no-replace-peer-deps"];
|
|
619
|
-
const peerDepsVersionPrefix = (_a = args.options["peer-deps-prefix"]) != null ? _a : "^";
|
|
620
|
-
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
621
|
-
const npmOptions = [];
|
|
622
|
-
Object.entries(args.options).forEach(([key, value]) => {
|
|
623
|
-
if (key === "help" || key === "h" || key === "no-wds" || key === "inheritable-fields" || key === "no-replace-peer-deps" || key === "peer-deps-prefix") return;
|
|
624
|
-
if (value === true) {
|
|
625
|
-
npmOptions.push(`--${key}`);
|
|
626
|
-
} else {
|
|
627
|
-
npmOptions.push(`--${key}`, value);
|
|
628
|
-
}
|
|
629
|
-
});
|
|
630
|
-
try {
|
|
631
|
-
if (!path$1) {
|
|
632
|
-
const targetDir = process.cwd();
|
|
633
|
-
const outputDir = await promises.mkdtemp("screw-up-publish-");
|
|
634
|
-
console.log(`[screw-up/cli]: publish: Creating archive of ${targetDir}...`);
|
|
635
|
-
try {
|
|
636
|
-
const metadata = await packAssets(
|
|
637
|
-
targetDir,
|
|
638
|
-
outputDir,
|
|
639
|
-
checkWorkingDirectoryStatus,
|
|
640
|
-
inheritableFields,
|
|
641
|
-
void 0,
|
|
642
|
-
replacePeerDepsWildcards,
|
|
643
|
-
peerDepsVersionPrefix
|
|
644
|
-
);
|
|
645
|
-
if (metadata) {
|
|
646
|
-
const archiveName = `${metadata.name}-${metadata.version}.tgz`;
|
|
647
|
-
const archivePath = path.join(outputDir, archiveName);
|
|
648
|
-
await runNpmPublish(archivePath, npmOptions);
|
|
649
|
-
} else {
|
|
650
|
-
console.error(`[screw-up/cli]: publish: Unable to find any files to pack: ${targetDir}`);
|
|
651
|
-
process.exit(1);
|
|
652
|
-
}
|
|
653
|
-
} finally {
|
|
654
|
-
await promises.rm(outputDir, { recursive: true, force: true });
|
|
655
|
-
}
|
|
656
|
-
} else if (fs.existsSync(path$1)) {
|
|
657
|
-
const pathStat = await promises.stat(path$1);
|
|
658
|
-
if (pathStat.isFile() && (path$1.endsWith(".tgz") || path$1.endsWith(".tar.gz"))) {
|
|
659
|
-
await runNpmPublish(path.resolve(path$1), npmOptions);
|
|
660
|
-
} else if (pathStat.isDirectory()) {
|
|
661
|
-
const targetDir = path.resolve(path$1);
|
|
662
|
-
const outputDir = await promises.mkdtemp("screw-up-publish-");
|
|
663
|
-
console.log(`[screw-up/cli]: publish: Creating archive of ${targetDir}...`);
|
|
664
|
-
try {
|
|
665
|
-
const metadata = await packAssets(
|
|
666
|
-
targetDir,
|
|
667
|
-
outputDir,
|
|
668
|
-
checkWorkingDirectoryStatus,
|
|
669
|
-
inheritableFields,
|
|
670
|
-
void 0,
|
|
671
|
-
replacePeerDepsWildcards,
|
|
672
|
-
peerDepsVersionPrefix
|
|
673
|
-
);
|
|
674
|
-
if (metadata) {
|
|
675
|
-
const archiveName = `${metadata.name}-${metadata.version}.tgz`;
|
|
676
|
-
const archivePath = path.join(outputDir, archiveName);
|
|
677
|
-
await runNpmPublish(archivePath, npmOptions);
|
|
678
|
-
} else {
|
|
679
|
-
console.error(`[screw-up/cli]: publish: Unable to find any files to pack: ${targetDir}`);
|
|
680
|
-
process.exit(1);
|
|
681
|
-
}
|
|
682
|
-
} finally {
|
|
683
|
-
await promises.rm(outputDir, { recursive: true, force: true });
|
|
684
|
-
}
|
|
685
|
-
} else {
|
|
686
|
-
console.error(`[screw-up/cli]: publish: Invalid path - must be a directory or .tgz/.tar.gz file: ${path$1}`);
|
|
687
|
-
process.exit(1);
|
|
688
|
-
}
|
|
689
|
-
} else {
|
|
690
|
-
console.error(`[screw-up/cli]: publish: Path does not exist: ${path$1}`);
|
|
691
|
-
process.exit(1);
|
|
692
|
-
}
|
|
693
|
-
} catch (error) {
|
|
694
|
-
console.error("[screw-up/cli]: publish: Failed to publish:", error);
|
|
695
|
-
process.exit(1);
|
|
696
|
-
}
|
|
697
|
-
};
|
|
698
|
-
const showDumpHelp = () => {
|
|
699
|
-
console.log(`Usage: screw-up dump [options] [directory]
|
|
700
|
-
|
|
701
|
-
Dump computed package.json as JSON
|
|
702
|
-
|
|
703
|
-
Arguments:
|
|
704
|
-
directory Directory to dump package.json from (default: current directory)
|
|
705
|
-
|
|
706
|
-
Options:
|
|
707
|
-
--inheritable-fields <list> Comma-separated list of fields to inherit from parent
|
|
708
|
-
--no-wds Do not check working directory status to increase version
|
|
709
|
-
-h, --help Show help for dump command
|
|
710
|
-
`);
|
|
711
|
-
};
|
|
712
|
-
const dumpCommand = async (args) => {
|
|
713
|
-
if (args.options.help || args.options.h) {
|
|
714
|
-
showDumpHelp();
|
|
715
|
-
return;
|
|
716
|
-
}
|
|
717
|
-
const directory = args.positional[0];
|
|
718
|
-
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
719
|
-
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
720
|
-
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
721
|
-
const targetDir = path.resolve(directory != null ? directory : process.cwd());
|
|
722
|
-
try {
|
|
723
|
-
const computedPackageJson = await getComputedPackageJsonObject(
|
|
724
|
-
targetDir,
|
|
725
|
-
checkWorkingDirectoryStatus,
|
|
726
|
-
inheritableFields
|
|
727
|
-
);
|
|
728
|
-
if (computedPackageJson) {
|
|
729
|
-
console.log(JSON.stringify(computedPackageJson, null, 2));
|
|
730
|
-
} else {
|
|
731
|
-
console.error(`[screw-up/cli]: dump: Unable to read package.json from: ${targetDir}`);
|
|
732
|
-
process.exit(1);
|
|
733
|
-
}
|
|
734
|
-
} catch (error) {
|
|
735
|
-
console.error("[screw-up/cli]: dump: Failed to dump package.json:", error);
|
|
736
|
-
process.exit(1);
|
|
737
|
-
}
|
|
738
|
-
};
|
|
739
|
-
const main = async () => {
|
|
740
|
-
const args = parseArgs(process.argv);
|
|
741
|
-
if (args.options.help || args.options.h || !args.command || args.command === "help" || args.command === "--help") {
|
|
742
|
-
showHelp();
|
|
743
|
-
return;
|
|
744
|
-
}
|
|
745
|
-
switch (args.command) {
|
|
746
|
-
case "pack":
|
|
747
|
-
await packCommand(args);
|
|
748
|
-
break;
|
|
749
|
-
case "publish":
|
|
750
|
-
await publishCommand(args);
|
|
751
|
-
break;
|
|
752
|
-
case "dump":
|
|
753
|
-
await dumpCommand(args);
|
|
754
|
-
break;
|
|
755
|
-
default:
|
|
756
|
-
console.error(`Unknown command: ${args.command}`);
|
|
757
|
-
console.error('Run "screw-up --help" for usage information.');
|
|
758
|
-
process.exit(1);
|
|
759
|
-
}
|
|
760
|
-
};
|
|
761
|
-
main().catch((error) => {
|
|
762
|
-
console.error("CLI error:", error);
|
|
763
|
-
process.exit(1);
|
|
764
|
-
});
|
|
765
|
-
//# sourceMappingURL=cli.cjs.map
|