screw-up 0.12.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +21 -7
  3. package/dist/analyzer.d.ts +13 -1
  4. package/dist/analyzer.d.ts.map +1 -1
  5. package/dist/cli-internal.d.ts +30 -7
  6. package/dist/cli-internal.d.ts.map +1 -1
  7. package/dist/cli.d.ts +12 -2
  8. package/dist/cli.d.ts.map +1 -1
  9. package/dist/generated/packageMetadata.d.ts +18 -0
  10. package/dist/generated/packageMetadata.d.ts.map +1 -0
  11. package/dist/index.cjs +36 -13
  12. package/dist/index.cjs.map +1 -1
  13. package/dist/index.d.ts +10 -1
  14. package/dist/index.js +36 -13
  15. package/dist/index.js.map +1 -1
  16. package/dist/{internal-Di0s8LQa.cjs → internal-BHSe5LIZ.cjs} +349 -322
  17. package/dist/internal-BHSe5LIZ.cjs.map +1 -0
  18. package/dist/{internal-BaMzTKS2.js → internal-BgCvktPU.js} +351 -324
  19. package/dist/internal-BgCvktPU.js.map +1 -0
  20. package/dist/internal.d.ts +64 -14
  21. package/dist/internal.d.ts.map +1 -1
  22. package/dist/main.cjs +1166 -0
  23. package/dist/main.cjs.map +1 -0
  24. package/dist/main.d.ts +13 -0
  25. package/dist/main.d.ts.map +1 -0
  26. package/dist/main.js +1165 -0
  27. package/dist/main.js.map +1 -0
  28. package/dist/packageMetadata-D9nXAoK9.cjs +20 -0
  29. package/dist/packageMetadata-D9nXAoK9.cjs.map +1 -0
  30. package/dist/packageMetadata-Dsxn2dKN.js +20 -0
  31. package/dist/packageMetadata-Dsxn2dKN.js.map +1 -0
  32. package/dist/types.d.ts +15 -0
  33. package/dist/types.d.ts.map +1 -1
  34. package/dist/vite-plugin.d.ts +10 -1
  35. package/dist/vite-plugin.d.ts.map +1 -1
  36. package/images/screw-up-120.png +0 -0
  37. package/package.json +13 -14
  38. package/README_pack.md +0 -63
  39. package/dist/cli.cjs +0 -765
  40. package/dist/cli.cjs.map +0 -1
  41. package/dist/cli.js +0 -764
  42. package/dist/cli.js.map +0 -1
  43. package/dist/internal-BaMzTKS2.js.map +0 -1
  44. package/dist/internal-Di0s8LQa.cjs.map +0 -1
package/dist/main.cjs ADDED
@@ -0,0 +1,1166 @@
1
+ #!/usr/bin/env node
2
+ /*!
3
+ * name: screw-up
4
+ * version: 0.14.0
5
+ * description: Simply package metadata inserter on Vite plugin
6
+ * author: Kouji Matsui (@kekyo@mi.kekyo.net)
7
+ * license: MIT
8
+ * repository.url: https://github.com/kekyo/screw-up.git
9
+ * git.commit.hash: f1871df4c43aee9ab389a6ab1b2769b32322793b
10
+ */
11
+ "use strict";
12
+ const path = require("path");
13
+ const fs = require("fs");
14
+ const promises = require("fs/promises");
15
+ const child_process = require("child_process");
16
+ const stream = require("stream");
17
+ const zlib = require("zlib");
18
+ const promises$1 = require("stream/promises");
19
+ const os = require("os");
20
+ const internal = require("./internal-BHSe5LIZ.cjs");
21
+ /*!
22
+ * name: tar-vern
23
+ * version: 1.2.0
24
+ * description: Tape archiver library for Typescript
25
+ * author: Kouji Matsui (@kekyo@mi.kekyo.net)
26
+ * license: MIT
27
+ * repository.url: https://github.com/kekyo/tar-vern.git
28
+ * git.commit.hash: 26ff2d96bfbd226ff79106604ff5f9e5193f91bc
29
+ */
30
+ const MAX_NAME = 100;
31
+ const MAX_PREFIX = 155;
32
+ const getUName = (candidateName, candidateId, reflectStat) => {
33
+ return candidateName != null ? candidateName : reflectStat === "all" ? candidateId.toString() : "root";
34
+ };
35
+ const getBuffer = (data) => {
36
+ return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
37
+ };
38
+ const createDirectoryItem = async (path2, reflectStat, options, signal) => {
39
+ var _a, _b, _c, _d, _e, _f, _g, _h;
40
+ const rs = reflectStat;
41
+ if (options == null ? void 0 : options.directoryPath) {
42
+ const stats = await promises.stat(options.directoryPath);
43
+ const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : stats.mode;
44
+ const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : stats.uid;
45
+ const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : stats.gid;
46
+ const date = (_d = options == null ? void 0 : options.date) != null ? _d : stats.mtime;
47
+ const uname = getUName(options == null ? void 0 : options.uname, stats.uid, rs);
48
+ const gname = getUName(options == null ? void 0 : options.gname, stats.gid, rs);
49
+ return {
50
+ kind: "directory",
51
+ path: path2,
52
+ mode,
53
+ uname,
54
+ gname,
55
+ uid,
56
+ gid,
57
+ date
58
+ };
59
+ } else {
60
+ const mode = (_e = options == null ? void 0 : options.mode) != null ? _e : 493;
61
+ const uid = (_f = options == null ? void 0 : options.uid) != null ? _f : 0;
62
+ const gid = (_g = options == null ? void 0 : options.gid) != null ? _g : 0;
63
+ const date = (_h = options == null ? void 0 : options.date) != null ? _h : /* @__PURE__ */ new Date();
64
+ const uname = getUName(options == null ? void 0 : options.uname, void 0, rs);
65
+ const gname = getUName(options == null ? void 0 : options.gname, void 0, rs);
66
+ return {
67
+ kind: "directory",
68
+ path: path2,
69
+ mode,
70
+ uname,
71
+ gname,
72
+ uid,
73
+ gid,
74
+ date
75
+ };
76
+ }
77
+ };
78
+ const createReadableFileItem = async (path2, readable, options, signal) => {
79
+ var _a, _b, _c, _d, _e, _f;
80
+ const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : 420;
81
+ const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : 0;
82
+ const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : 0;
83
+ const date = (_d = options == null ? void 0 : options.date) != null ? _d : /* @__PURE__ */ new Date();
84
+ const uname = (_e = options == null ? void 0 : options.uname) != null ? _e : "root";
85
+ const gname = (_f = options == null ? void 0 : options.gname) != null ? _f : "root";
86
+ let length = options == null ? void 0 : options.length;
87
+ if (!length) {
88
+ const chunks = [];
89
+ length = 0;
90
+ for await (const chunk of readable) {
91
+ const buffer = getBuffer(chunk);
92
+ chunks.push(buffer);
93
+ length += buffer.length;
94
+ }
95
+ return {
96
+ kind: "file",
97
+ path: path2,
98
+ mode,
99
+ uname,
100
+ gname,
101
+ uid,
102
+ gid,
103
+ date,
104
+ content: {
105
+ kind: "readable",
106
+ length,
107
+ readable: stream.Readable.from(chunks, { signal })
108
+ }
109
+ };
110
+ } else {
111
+ return {
112
+ kind: "file",
113
+ path: path2,
114
+ mode,
115
+ uname,
116
+ gname,
117
+ uid,
118
+ gid,
119
+ date,
120
+ content: {
121
+ kind: "readable",
122
+ length,
123
+ readable
124
+ }
125
+ };
126
+ }
127
+ };
128
+ const createReadFileItem = async (path2, filePath, reflectStat, options, signal) => {
129
+ const rs = reflectStat;
130
+ const stats = await promises.stat(filePath);
131
+ const reader = fs.createReadStream(filePath, { signal });
132
+ const mode = stats.mode;
133
+ const uid = stats.uid;
134
+ const gid = stats.gid;
135
+ const date = stats.mtime;
136
+ const uname = getUName(options == null ? void 0 : options.uname, stats.uid, rs);
137
+ const gname = getUName(options == null ? void 0 : options.gname, stats.gid, rs);
138
+ return await createReadableFileItem(path2, reader, {
139
+ length: stats.size,
140
+ mode,
141
+ uname,
142
+ gname,
143
+ uid,
144
+ gid,
145
+ date
146
+ }, signal);
147
+ };
148
+ const storeReaderToFile = async (reader, path2, signal) => {
149
+ const writer = fs.createWriteStream(path2, { signal });
150
+ await promises$1.pipeline(reader, writer, { signal });
151
+ };
152
+ const getAllFilesInDirectory = async (baseDir, signal) => {
153
+ const collectFiles = async (currentDir, relativePath) => {
154
+ try {
155
+ const entries = await promises.readdir(currentDir, { withFileTypes: true });
156
+ const result = [];
157
+ const tasks = entries.map(async (entry) => {
158
+ signal == null ? void 0 : signal.throwIfAborted();
159
+ const entryRelativePath = path.join(relativePath, entry.name);
160
+ if (entry.isDirectory()) {
161
+ const entryFullPath = path.join(currentDir, entry.name);
162
+ const directoryContents = await collectFiles(entryFullPath, entryRelativePath);
163
+ return [entryRelativePath, ...directoryContents];
164
+ } else {
165
+ return [entryRelativePath];
166
+ }
167
+ });
168
+ const allResults = await Promise.all(tasks);
169
+ for (const entryResults of allResults) {
170
+ result.push(...entryResults);
171
+ }
172
+ return result;
173
+ } catch (error) {
174
+ console.warn(`Warning: Could not read directory ${currentDir}:`, error);
175
+ return [];
176
+ }
177
+ };
178
+ return await collectFiles(baseDir, "");
179
+ };
180
+ const createEntryItemGenerator = async function* (baseDir, relativePaths, includeDirectory, reflectStat, signal) {
181
+ const rs = "exceptName";
182
+ const includeDir = true;
183
+ const pathsToProcess = await getAllFilesInDirectory(baseDir, signal);
184
+ for (const relativePath of pathsToProcess) {
185
+ const fsPath = path.join(baseDir, relativePath);
186
+ try {
187
+ signal == null ? void 0 : signal.throwIfAborted();
188
+ const stats = await promises.stat(fsPath);
189
+ if (includeDir && stats.isDirectory()) {
190
+ yield await createDirectoryItem(relativePath, rs, {
191
+ directoryPath: fsPath
192
+ }, signal);
193
+ } else if (stats.isFile()) {
194
+ yield await createReadFileItem(relativePath, fsPath, rs, void 0, signal);
195
+ }
196
+ } catch (error) {
197
+ console.warn(`Warning: Could not access ${fsPath}:`, error);
198
+ continue;
199
+ }
200
+ }
201
+ };
202
+ const extractTo = async (iterator, basePath, signal) => {
203
+ for await (const entry of iterator) {
204
+ const targetPath = path.join(basePath, entry.path);
205
+ if (entry.kind === "directory") {
206
+ try {
207
+ signal == null ? void 0 : signal.throwIfAborted();
208
+ await promises.mkdir(targetPath, { recursive: true, mode: entry.mode });
209
+ } catch (error) {
210
+ if (error.code !== "EEXIST") {
211
+ throw error;
212
+ }
213
+ }
214
+ } else if (entry.kind === "file") {
215
+ const parentDir = path.dirname(targetPath);
216
+ await promises.mkdir(parentDir, { recursive: true });
217
+ const fileEntry = entry;
218
+ const content = await fileEntry.getContent("buffer");
219
+ await promises.writeFile(targetPath, content, { mode: entry.mode, signal });
220
+ }
221
+ }
222
+ };
223
+ const utf8ByteLength = (str) => {
224
+ return Buffer.byteLength(str, "utf8");
225
+ };
226
+ const truncateUtf8Safe = (str, maxBytes) => {
227
+ let total = 0;
228
+ let i = 0;
229
+ while (i < str.length) {
230
+ const codePoint = str.codePointAt(i);
231
+ const char = String.fromCodePoint(codePoint);
232
+ const charBytes = Buffer.byteLength(char, "utf8");
233
+ if (total + charBytes > maxBytes) break;
234
+ total += charBytes;
235
+ i += char.length;
236
+ }
237
+ return str.slice(0, i);
238
+ };
239
+ const splitPath = (path2) => {
240
+ var _a;
241
+ if (utf8ByteLength(path2) <= MAX_NAME) {
242
+ return { prefix: "", name: path2 };
243
+ }
244
+ const parts = path2.split("/");
245
+ let name = (_a = parts.pop()) != null ? _a : "";
246
+ let prefix = parts.join("/");
247
+ if (utf8ByteLength(name) > MAX_NAME) {
248
+ name = truncateUtf8Safe(name, MAX_NAME);
249
+ }
250
+ while (utf8ByteLength(prefix) > MAX_PREFIX) {
251
+ prefix = truncateUtf8Safe(prefix, MAX_PREFIX);
252
+ }
253
+ return { prefix, name };
254
+ };
255
+ const getOctalBytes = (value, length) => {
256
+ const str = value.toString(8).padStart(length - 1, "0") + "\0";
257
+ return Buffer.from(str, "ascii");
258
+ };
259
+ const getPaddedBytes = (buffer) => {
260
+ const extra = buffer.length % 512;
261
+ if (extra === 0) {
262
+ return buffer;
263
+ } else {
264
+ return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);
265
+ }
266
+ };
267
+ const terminatorBytes = Buffer.alloc(1024, 0);
268
+ const createTarHeader = (type, path2, size, mode, uname, gname, uid, gid, date) => {
269
+ const buffer = Buffer.alloc(512, 0);
270
+ const { name, prefix } = splitPath(path2);
271
+ buffer.write(name, 0, 100, "utf8");
272
+ getOctalBytes(mode & 4095, 8).copy(buffer, 100);
273
+ getOctalBytes(uid, 8).copy(buffer, 108);
274
+ getOctalBytes(gid, 8).copy(buffer, 116);
275
+ getOctalBytes(size, 12).copy(buffer, 124);
276
+ getOctalBytes(Math.floor(date.getTime() / 1e3), 12).copy(buffer, 136);
277
+ Buffer.from(" ", "ascii").copy(buffer, 148);
278
+ if (type === "file") {
279
+ buffer.write("0", 156, 1, "ascii");
280
+ } else {
281
+ buffer.write("5", 156, 1, "ascii");
282
+ }
283
+ buffer.write("ustar\0", 257, 6, "ascii");
284
+ buffer.write("00", 263, 2, "ascii");
285
+ buffer.write(uname, 265, 32, "utf8");
286
+ buffer.write(gname, 297, 32, "utf8");
287
+ buffer.write(prefix, 345, 155, "utf8");
288
+ let sum = 0;
289
+ for (let i = 0; i < 512; i++) {
290
+ sum += buffer[i];
291
+ }
292
+ getOctalBytes(sum, 8).copy(buffer, 148);
293
+ return buffer;
294
+ };
295
+ const createTarPacker = (entryItemGenerator, compressionType, signal) => {
296
+ const entryItemIterator = async function* () {
297
+ for await (const entryItem of entryItemGenerator) {
298
+ switch (entryItem.kind) {
299
+ // Entry is a file
300
+ case "file": {
301
+ const entryItemContent = entryItem.content;
302
+ if (typeof entryItemContent === "string" || Buffer.isBuffer(entryItemContent)) {
303
+ const contentBytes = getBuffer(entryItemContent);
304
+ const tarHeaderBytes = createTarHeader(
305
+ "file",
306
+ entryItem.path,
307
+ contentBytes.length,
308
+ entryItem.mode,
309
+ entryItem.uname,
310
+ entryItem.gname,
311
+ entryItem.uid,
312
+ entryItem.gid,
313
+ entryItem.date
314
+ );
315
+ yield tarHeaderBytes;
316
+ const totalPaddedContentBytes = getPaddedBytes(contentBytes);
317
+ yield totalPaddedContentBytes;
318
+ } else {
319
+ const content = entryItemContent;
320
+ const tarHeaderBytes = createTarHeader(
321
+ "file",
322
+ entryItem.path,
323
+ content.length,
324
+ entryItem.mode,
325
+ entryItem.uname,
326
+ entryItem.gname,
327
+ entryItem.uid,
328
+ entryItem.gid,
329
+ entryItem.date
330
+ );
331
+ yield tarHeaderBytes;
332
+ let position = 0;
333
+ switch (content.kind) {
334
+ // Content is a generator
335
+ case "generator": {
336
+ for await (const contentBytes of content.generator) {
337
+ yield contentBytes;
338
+ position += contentBytes.length;
339
+ }
340
+ break;
341
+ }
342
+ // Content is a readable stream
343
+ case "readable": {
344
+ for await (const chunk of content.readable) {
345
+ const contentBytes = getBuffer(chunk);
346
+ yield contentBytes;
347
+ position += contentBytes.length;
348
+ }
349
+ break;
350
+ }
351
+ }
352
+ if (position % 512 !== 0) {
353
+ yield Buffer.alloc(512 - position % 512, 0);
354
+ }
355
+ }
356
+ break;
357
+ }
358
+ // Entry is a directory
359
+ case "directory": {
360
+ const tarHeaderBytes = createTarHeader(
361
+ "directory",
362
+ entryItem.path,
363
+ 0,
364
+ entryItem.mode,
365
+ entryItem.uname,
366
+ entryItem.gname,
367
+ entryItem.uid,
368
+ entryItem.gid,
369
+ entryItem.date
370
+ );
371
+ yield tarHeaderBytes;
372
+ break;
373
+ }
374
+ }
375
+ }
376
+ yield terminatorBytes;
377
+ };
378
+ const ct = compressionType;
379
+ switch (ct) {
380
+ // No compression
381
+ case "none": {
382
+ return stream.Readable.from(entryItemIterator(), { signal });
383
+ }
384
+ // Gzip compression
385
+ case "gzip": {
386
+ const gzipStream = zlib.createGzip({ level: 9 });
387
+ const entryItemStream = stream.Readable.from(entryItemIterator(), { signal });
388
+ entryItemStream.pipe(gzipStream);
389
+ return gzipStream;
390
+ }
391
+ }
392
+ };
393
+ const parseOctalBytes = (buffer, offset, length) => {
394
+ const str = buffer.subarray(offset, offset + length).toString("ascii").replace(/\0/g, "").trim();
395
+ return str ? parseInt(str, 8) : 0;
396
+ };
397
+ const parseString = (buffer, offset, length) => {
398
+ return buffer.subarray(offset, offset + length).toString("utf8").replace(/\0/g, "").trim();
399
+ };
400
+ const readExactBytes = async (iterator, size, signal) => {
401
+ var _a;
402
+ const chunks = [];
403
+ let totalRead = 0;
404
+ while (totalRead < size) {
405
+ const { value, done } = await iterator.next();
406
+ if (done) {
407
+ if (totalRead === 0) {
408
+ return void 0;
409
+ } else {
410
+ throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);
411
+ }
412
+ }
413
+ const chunk = getBuffer(value);
414
+ const needed = size - totalRead;
415
+ if (chunk.length <= needed) {
416
+ chunks.push(chunk);
417
+ totalRead += chunk.length;
418
+ } else {
419
+ chunks.push(chunk.subarray(0, needed));
420
+ await ((_a = iterator.return) == null ? void 0 : _a.call(iterator, chunk.subarray(needed)));
421
+ totalRead = size;
422
+ }
423
+ }
424
+ return Buffer.concat(chunks, size);
425
+ };
426
+ const skipExactBytes = async (iterator, size, signal) => {
427
+ var _a;
428
+ let totalSkipped = 0;
429
+ while (totalSkipped < size) {
430
+ const { value, done } = await iterator.next();
431
+ if (done) {
432
+ throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);
433
+ }
434
+ const chunk = getBuffer(value);
435
+ const needed = size - totalSkipped;
436
+ if (chunk.length <= needed) {
437
+ totalSkipped += chunk.length;
438
+ } else {
439
+ await ((_a = iterator.return) == null ? void 0 : _a.call(iterator, chunk.subarray(needed)));
440
+ totalSkipped = size;
441
+ }
442
+ }
443
+ };
444
+ const skipPaddingBytesTo512Boundary = async (iterator, contentSize, signal) => {
445
+ const padding = (512 - contentSize % 512) % 512;
446
+ if (padding > 0) {
447
+ await skipExactBytes(iterator, padding);
448
+ }
449
+ };
450
+ const parseTarHeader = (buffer) => {
451
+ if (buffer.every((b) => b === 0)) {
452
+ return void 0;
453
+ }
454
+ const name = parseString(buffer, 0, 100);
455
+ const mode = parseOctalBytes(buffer, 100, 8);
456
+ const uid = parseOctalBytes(buffer, 108, 8);
457
+ const gid = parseOctalBytes(buffer, 116, 8);
458
+ const size = parseOctalBytes(buffer, 124, 12);
459
+ const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1e3);
460
+ const checksum = parseOctalBytes(buffer, 148, 8);
461
+ const typeflag = parseString(buffer, 156, 1);
462
+ const magic = parseString(buffer, 257, 6);
463
+ const uname = parseString(buffer, 265, 32);
464
+ const gname = parseString(buffer, 297, 32);
465
+ const prefix = parseString(buffer, 345, 155);
466
+ if (magic !== "ustar") {
467
+ throw new Error(`Invalid tar format: magic="${magic}"`);
468
+ }
469
+ let calculatedSum = 0;
470
+ for (let i = 0; i < 512; i++) {
471
+ if (i >= 148 && i < 156) {
472
+ calculatedSum += 32;
473
+ } else {
474
+ calculatedSum += buffer[i];
475
+ }
476
+ }
477
+ if (calculatedSum !== checksum) {
478
+ throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);
479
+ }
480
+ let path2 = prefix ? `${prefix}/${name}` : name;
481
+ if (path2.endsWith("/")) {
482
+ path2 = path2.slice(0, -1);
483
+ }
484
+ const kind = typeflag === "5" ? "directory" : "file";
485
+ return {
486
+ kind,
487
+ path: path2,
488
+ size,
489
+ mode,
490
+ uid,
491
+ gid,
492
+ mtime,
493
+ uname: uname || uid.toString(),
494
+ gname: gname || gid.toString(),
495
+ checksum,
496
+ consumed: false
497
+ };
498
+ };
499
+ const createBufferedAsyncIterator = (iterable, signal) => {
500
+ const buffer = [];
501
+ const iterator = iterable[Symbol.asyncIterator]();
502
+ return {
503
+ next: async () => {
504
+ if (buffer.length > 0) {
505
+ return { value: buffer.shift(), done: false };
506
+ }
507
+ return iterator.next();
508
+ },
509
+ return: async (value) => {
510
+ if (value !== void 0) {
511
+ buffer.unshift(value);
512
+ }
513
+ return { value: void 0, done: false };
514
+ }
515
+ };
516
+ };
517
+ const createReadableFromIterator = (iterator, size, signal, consumedRef) => {
518
+ const generator = async function* () {
519
+ var _a;
520
+ let remainingBytes = size;
521
+ while (remainingBytes > 0) {
522
+ const { value, done } = await iterator.next();
523
+ if (done) {
524
+ throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);
525
+ }
526
+ const chunk = getBuffer(value);
527
+ if (chunk.length <= remainingBytes) {
528
+ remainingBytes -= chunk.length;
529
+ yield chunk;
530
+ } else {
531
+ const needed = chunk.subarray(0, remainingBytes);
532
+ const excess = chunk.subarray(remainingBytes);
533
+ remainingBytes = 0;
534
+ await ((_a = iterator.return) == null ? void 0 : _a.call(iterator, excess));
535
+ yield needed;
536
+ break;
537
+ }
538
+ }
539
+ await skipPaddingBytesTo512Boundary(iterator, size);
540
+ consumedRef.consumed = true;
541
+ };
542
+ return stream.Readable.from(generator(), { signal });
543
+ };
544
+ const createTarExtractor = async function* (readable, compressionType, signal) {
545
+ const ct = compressionType;
546
+ let inputStream;
547
+ switch (ct) {
548
+ case "gzip":
549
+ const gunzip = zlib.createGunzip();
550
+ readable.pipe(gunzip);
551
+ inputStream = gunzip;
552
+ break;
553
+ case "none":
554
+ default:
555
+ inputStream = readable;
556
+ break;
557
+ }
558
+ const iterator = createBufferedAsyncIterator(inputStream);
559
+ let header;
560
+ while (true) {
561
+ if ((header == null ? void 0 : header.kind) === "file" && !header.consumed) {
562
+ await skipExactBytes(iterator, header.size);
563
+ await skipPaddingBytesTo512Boundary(iterator, header.size);
564
+ header.consumed = true;
565
+ }
566
+ let headerBuffer;
567
+ try {
568
+ headerBuffer = await readExactBytes(iterator, 512, signal);
569
+ } catch (error) {
570
+ if (error instanceof Error && error.message.includes("Unexpected end of stream")) {
571
+ throw new Error("Invalid tar format: incomplete header");
572
+ }
573
+ throw error;
574
+ }
575
+ if (headerBuffer === void 0) {
576
+ break;
577
+ }
578
+ header = parseTarHeader(headerBuffer);
579
+ if (!header) {
580
+ const secondBlock = await readExactBytes(iterator, 512);
581
+ if (secondBlock === void 0 || secondBlock.every((b) => b === 0)) {
582
+ break;
583
+ }
584
+ throw new Error("Invalid tar format: expected terminator block");
585
+ }
586
+ if (header.kind === "directory") {
587
+ yield {
588
+ kind: "directory",
589
+ path: header.path,
590
+ mode: header.mode,
591
+ uid: header.uid,
592
+ gid: header.gid,
593
+ uname: header.uname,
594
+ gname: header.gname,
595
+ date: header.mtime
596
+ };
597
+ } else {
598
+ const currentHeader = header;
599
+ yield {
600
+ kind: "file",
601
+ path: currentHeader.path,
602
+ mode: currentHeader.mode,
603
+ uid: currentHeader.uid,
604
+ gid: currentHeader.gid,
605
+ uname: currentHeader.uname,
606
+ gname: currentHeader.gname,
607
+ date: currentHeader.mtime,
608
+ getContent: async (type) => {
609
+ if (currentHeader.consumed) {
610
+ throw new Error("Content has already been consumed. Multiple calls to getContent are not supported.");
611
+ }
612
+ switch (type) {
613
+ // For string
614
+ case "string": {
615
+ const dataBuffer = await readExactBytes(iterator, currentHeader.size);
616
+ if (dataBuffer === void 0) {
617
+ throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
618
+ }
619
+ await skipPaddingBytesTo512Boundary(iterator, currentHeader.size);
620
+ currentHeader.consumed = true;
621
+ return dataBuffer.toString("utf8");
622
+ }
623
+ // For buffer
624
+ case "buffer": {
625
+ const dataBuffer = await readExactBytes(iterator, currentHeader.size);
626
+ if (dataBuffer === void 0) {
627
+ throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
628
+ }
629
+ await skipPaddingBytesTo512Boundary(iterator, currentHeader.size);
630
+ currentHeader.consumed = true;
631
+ return dataBuffer;
632
+ }
633
+ // For Readble stream
634
+ case "readable": {
635
+ const readable2 = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);
636
+ return readable2;
637
+ }
638
+ default:
639
+ throw new Error(`Unsupported content type: ${type}`);
640
+ }
641
+ }
642
+ };
643
+ }
644
+ }
645
+ };
646
+ const runNpmPack = async (targetDir, packDestDir) => {
647
+ return new Promise((res, rej) => {
648
+ const npmProcess = child_process.spawn("npm", ["pack", "--pack-destination", packDestDir], {
649
+ cwd: targetDir,
650
+ stdio: ["ignore", "pipe", "pipe"]
651
+ });
652
+ let stdout = "";
653
+ let stderr = "";
654
+ npmProcess.stdout.on("data", (data) => {
655
+ stdout += data.toString();
656
+ });
657
+ npmProcess.stderr.on("data", (data) => {
658
+ stderr += data.toString();
659
+ });
660
+ npmProcess.on("close", (code) => {
661
+ if (code === 0) {
662
+ const lines = stdout.trim().split("\n");
663
+ const filename = lines.find((line) => line.trim().endsWith(".tgz")) || lines[lines.length - 1];
664
+ if (filename && filename.trim().endsWith(".tgz")) {
665
+ const fullPath = path.join(packDestDir, filename.trim());
666
+ res(fullPath);
667
+ } else {
668
+ rej(new Error("npm pack did not output a valid .tgz filename"));
669
+ }
670
+ } else {
671
+ const errorMessage = `npm pack failed with exit code ${code}`;
672
+ const fullError = stderr ? `${errorMessage}
673
+ stderr: ${stderr}` : errorMessage;
674
+ if (stdout) {
675
+ rej(new Error(`${fullError}
676
+ stdout: ${stdout}`));
677
+ } else {
678
+ rej(new Error(fullError));
679
+ }
680
+ }
681
+ });
682
+ npmProcess.on("error", (error) => {
683
+ rej(new Error(`Failed to spawn npm pack: ${error.message}`));
684
+ });
685
+ });
686
+ };
687
+ const packAssets = async (targetDir, outputDir, checkWorkingDirectoryStatus, alwaysOverrideVersionFromGit, inheritableFields, readmeReplacementPath, replacePeerDepsWildcards, peerDepsVersionPrefix, logger2) => {
688
+ var _a, _b, _c;
689
+ if (!fs.existsSync(targetDir)) {
690
+ throw new Error(`Target directory is not found: ${targetDir}`);
691
+ }
692
+ let readmeReplacementCandidatePath = readmeReplacementPath;
693
+ if (readmeReplacementCandidatePath && !fs.existsSync(readmeReplacementCandidatePath)) {
694
+ throw new Error(`README replacement file is not found: ${readmeReplacementCandidatePath}`);
695
+ }
696
+ const result = await internal.resolveRawPackageJsonObject(
697
+ targetDir,
698
+ checkWorkingDirectoryStatus,
699
+ alwaysOverrideVersionFromGit,
700
+ inheritableFields,
701
+ logger2
702
+ );
703
+ let resolvedPackageJson = result.metadata;
704
+ if (resolvedPackageJson == null ? void 0 : resolvedPackageJson.private) {
705
+ return void 0;
706
+ }
707
+ const packageJsonReadme = resolvedPackageJson.readme;
708
+ if (packageJsonReadme) {
709
+ if (!readmeReplacementCandidatePath) {
710
+ const packageJsonReadmeDir = result.sourceMap.get("readme");
711
+ const packageJsonReadmePath = path.join(packageJsonReadmeDir, packageJsonReadme);
712
+ if (!fs.existsSync(packageJsonReadmePath)) {
713
+ throw new Error(`README replacement file is not found: ${packageJsonReadmePath}`);
714
+ }
715
+ readmeReplacementCandidatePath = packageJsonReadmePath;
716
+ }
717
+ delete resolvedPackageJson.readme;
718
+ }
719
+ if (replacePeerDepsWildcards) {
720
+ const workspaceRoot = await internal.findWorkspaceRoot(targetDir, logger2);
721
+ if (workspaceRoot) {
722
+ const siblings = await internal.collectWorkspaceSiblings(workspaceRoot, logger2);
723
+ if (siblings.size > 0) {
724
+ resolvedPackageJson = internal.replacePeerDependenciesWildcards(
725
+ resolvedPackageJson,
726
+ siblings,
727
+ peerDepsVersionPrefix
728
+ );
729
+ }
730
+ }
731
+ }
732
+ const baseTempDir = await promises.mkdtemp(path.join(os.tmpdir(), "screw-up-npm-pack-"));
733
+ await promises.mkdir(baseTempDir, { recursive: true });
734
+ try {
735
+ const npmTarballPath = await runNpmPack(targetDir, baseTempDir);
736
+ const stagingDir = path.join(baseTempDir, "staging");
737
+ await promises.mkdir(stagingDir, { recursive: true });
738
+ const stream2 = fs.createReadStream(npmTarballPath);
739
+ await extractTo(createTarExtractor(stream2, "gzip"), stagingDir);
740
+ const packageJsonPath = path.join(stagingDir, "package", "package.json");
741
+ if (fs.existsSync(packageJsonPath)) {
742
+ await promises.writeFile(packageJsonPath, JSON.stringify(resolvedPackageJson, null, 2));
743
+ }
744
+ if (readmeReplacementCandidatePath) {
745
+ const readmeDestPath = path.join(stagingDir, "package", "README.md");
746
+ await promises.copyFile(readmeReplacementCandidatePath, readmeDestPath);
747
+ }
748
+ const outputFileName = `${(_b = (_a = resolvedPackageJson == null ? void 0 : resolvedPackageJson.name) == null ? void 0 : _a.replace("/", "-")) != null ? _b : "package"}-${(_c = resolvedPackageJson == null ? void 0 : resolvedPackageJson.version) != null ? _c : "0.0.0"}.tgz`;
749
+ await promises.mkdir(outputDir, { recursive: true });
750
+ const outputFile = path.join(outputDir, outputFileName);
751
+ const itemGenerator = createEntryItemGenerator(stagingDir);
752
+ const packer = createTarPacker(itemGenerator, "gzip");
753
+ await storeReaderToFile(packer, outputFile);
754
+ return {
755
+ packageFileName: outputFileName,
756
+ metadata: resolvedPackageJson
757
+ };
758
+ } finally {
759
+ await promises.rm(baseTempDir, { recursive: true, force: true });
760
+ }
761
+ };
762
+ const getComputedPackageJsonObject = async (targetDir, checkWorkingDirectoryStatus, alwaysOverrideVersionFromGit, inheritableFields, logger2) => {
763
+ if (!fs.existsSync(targetDir)) {
764
+ return void 0;
765
+ }
766
+ const result = await internal.resolveRawPackageJsonObject(
767
+ targetDir,
768
+ checkWorkingDirectoryStatus,
769
+ alwaysOverrideVersionFromGit,
770
+ inheritableFields,
771
+ logger2
772
+ );
773
+ return result.metadata;
774
+ };
775
+ const parseArgs = (args, argOptionMap2) => {
776
+ const result = {
777
+ argv: args,
778
+ positional: [],
779
+ options: {}
780
+ };
781
+ for (let i = 0; i < args.length; i++) {
782
+ const arg = args[i];
783
+ if (arg.startsWith("--")) {
784
+ const optionName = arg.slice(2);
785
+ if (!result.command) {
786
+ result.options[optionName] = true;
787
+ } else {
788
+ const argOptions = argOptionMap2.get(result.command);
789
+ if (argOptions.has(optionName)) {
790
+ i++;
791
+ result.options[optionName] = args[i];
792
+ } else {
793
+ result.options[optionName] = true;
794
+ }
795
+ }
796
+ } else if (arg.startsWith("-")) {
797
+ const optionName = arg.slice(1);
798
+ if (optionName.length == 1) {
799
+ result.options[optionName] = true;
800
+ }
801
+ } else if (!result.command) {
802
+ result.command = arg;
803
+ } else {
804
+ result.positional.push(arg);
805
+ }
806
+ }
807
+ return result;
808
+ };
809
+ const defaultInheritableFields = /* @__PURE__ */ new Set([
810
+ "version",
811
+ "description",
812
+ "author",
813
+ "license",
814
+ "repository",
815
+ "keywords",
816
+ "homepage",
817
+ "bugs",
818
+ "readme"
819
+ ]);
820
+ const parseInheritableFields = (inheritableFieldsOption) => {
821
+ if (typeof inheritableFieldsOption !== "string") {
822
+ return defaultInheritableFields;
823
+ }
824
+ if (!inheritableFieldsOption.trim()) {
825
+ return /* @__PURE__ */ new Set();
826
+ }
827
+ return new Set(inheritableFieldsOption.split(",").map((field) => field.trim()).filter((field) => field.length > 0));
828
+ };
829
+ const showDumpHelp = (logger2) => {
830
+ logger2.info(`Usage: screw-up dump [options] [directory]
831
+
832
+ Dump computed package.json as JSON
833
+
834
+ Arguments:
835
+ directory Directory to dump package.json from (default: current directory)
836
+
837
+ Options:
838
+ --inheritable-fields <list> Comma-separated list of fields to inherit from parent
839
+ --no-wds Do not check working directory status to increase version
840
+ --no-git-version-override Do not override version from Git (use package.json version)
841
+ -h, --help Show help for dump command
842
+ `);
843
+ };
844
+ const dumpCommand = async (args, logger2) => {
845
+ if (args.options.help || args.options.h) {
846
+ showDumpHelp(logger2);
847
+ return 1;
848
+ }
849
+ const directory = args.positional[0];
850
+ const inheritableFieldsOption = args.options["inheritable-fields"];
851
+ const alwaysOverrideVersionFromGit = !args.options["no-git-version-override"];
852
+ const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
853
+ const inheritableFields = parseInheritableFields(inheritableFieldsOption);
854
+ const targetDir = path.resolve(directory != null ? directory : process.cwd());
855
+ try {
856
+ const computedPackageJson = await getComputedPackageJsonObject(
857
+ targetDir,
858
+ checkWorkingDirectoryStatus,
859
+ alwaysOverrideVersionFromGit,
860
+ inheritableFields,
861
+ logger2
862
+ );
863
+ if (computedPackageJson) {
864
+ logger2.info(JSON.stringify(computedPackageJson, null, 2));
865
+ } else {
866
+ logger2.error(`[screw-up:cli]: dump: Unable to read package.json from: ${targetDir}`);
867
+ return 1;
868
+ }
869
+ } catch (error) {
870
+ logger2.error(`[screw-up:cli]: dump: Failed to dump package.json: ${error}`);
871
+ return 1;
872
+ }
873
+ return 0;
874
+ };
875
+ const showPackHelp = (logger2) => {
876
+ logger2.info(`Usage: screw-up pack [options] [directory]
877
+
878
+ Pack the project into a tar archive
879
+
880
+ Arguments:
881
+ directory Directory to pack (default: current directory)
882
+
883
+ Options:
884
+ --pack-destination <path> Directory to write the tarball
885
+ --readme <path> Replace README.md with specified file
886
+ --inheritable-fields <list> Comma-separated list of fields to inherit from parent
887
+ --no-wds Do not check working directory status to increase version
888
+ --no-git-version-override Do not override version from Git (use package.json version)
889
+ --no-replace-peer-deps Disable replacing "*" in peerDependencies with actual versions
890
+ --peer-deps-prefix <prefix> Version prefix for replaced peerDependencies (default: "^")
891
+ --verbose Print verbose log
892
+ -h, --help Show help for pack command
893
+ `);
894
+ };
895
+ const packCommand = async (args, logger2) => {
896
+ var _a;
897
+ if (args.options.help || args.options.h) {
898
+ showPackHelp(logger2);
899
+ return 1;
900
+ }
901
+ const directory = args.positional[0];
902
+ const packDestination = args.options["pack-destination"];
903
+ const readmeOption = args.options["readme"];
904
+ const inheritableFieldsOption = args.options["inheritable-fields"];
905
+ const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
906
+ const alwaysOverrideVersionFromGit = !args.options["no-git-version-override"];
907
+ const replacePeerDepsWildcards = !args.options["no-replace-peer-deps"];
908
+ const peerDepsVersionPrefix = (_a = args.options["peer-deps-prefix"]) != null ? _a : "^";
909
+ const verbose = args.options["verbose"] ? true : false;
910
+ const targetDir = path.resolve(directory != null ? directory : process.cwd());
911
+ const outputDir = packDestination ? path.resolve(packDestination) : process.cwd();
912
+ const readmeReplacementPath = readmeOption ? path.resolve(readmeOption) : void 0;
913
+ const inheritableFields = parseInheritableFields(inheritableFieldsOption);
914
+ if (verbose) {
915
+ logger2.info(`[screw-up:cli]: pack: Creating archive of ${targetDir}...`);
916
+ }
917
+ try {
918
+ const result = await packAssets(
919
+ targetDir,
920
+ outputDir,
921
+ checkWorkingDirectoryStatus,
922
+ alwaysOverrideVersionFromGit,
923
+ inheritableFields,
924
+ readmeReplacementPath,
925
+ replacePeerDepsWildcards,
926
+ peerDepsVersionPrefix,
927
+ logger2
928
+ );
929
+ if (result) {
930
+ if (verbose) {
931
+ logger2.info(`[screw-up:cli]: pack: Archive created successfully: ${result.packageFileName}`);
932
+ } else {
933
+ logger2.info(result.packageFileName);
934
+ }
935
+ } else {
936
+ logger2.error(`[screw-up:cli]: pack: Unable to find any files to pack: ${targetDir}`);
937
+ return 1;
938
+ }
939
+ } catch (error) {
940
+ logger2.error(`[screw-up:cli]: pack: Failed to create archive: ${error}`);
941
+ return 1;
942
+ }
943
+ return 0;
944
+ };
945
+ const showPublishHelp = (logger2) => {
946
+ logger2.info(`Usage: screw-up publish [options] [directory|package.tgz]
947
+
948
+ Publish the project
949
+
950
+ Arguments:
951
+ directory|package.tgz Directory to pack and publish, or existing tarball to publish
952
+
953
+ Options:
954
+ All npm publish options are supported, including:
955
+ --dry-run Perform a dry run
956
+ --tag <tag> Tag for the published version
957
+ --access <access> Access level (public or restricted)
958
+ --registry <registry> Registry URL
959
+ -h, --help Show help for publish command
960
+
961
+ Examples:
962
+ screw-up publish # Publish current directory
963
+ screw-up publish ./my-project # Publish specific directory
964
+ screw-up publish package.tgz # Publish existing tarball
965
+ screw-up publish --dry-run --tag beta # Publish with options
966
+ `);
967
+ };
968
+ const runNpmPublish = async (tarballPath, npmOptions, verbose, logger2) => {
969
+ if (verbose) {
970
+ logger2.info(`[screw-up:cli]: publish: Publishing ${tarballPath} to npm...`);
971
+ }
972
+ const publishArgs = ["publish", tarballPath, ...npmOptions];
973
+ if (process.env.SCREW_UP_TEST_MODE === "true") {
974
+ logger2.info(`[screw-up:cli]: TEST_MODE: Would execute: npm ${publishArgs.join(" ")}`);
975
+ logger2.info(`[screw-up:cli]: TEST_MODE: Tarball path: ${tarballPath}`);
976
+ logger2.info(`[screw-up:cli]: TEST_MODE: Options: ${npmOptions.join(" ")}`);
977
+ logger2.info(`[screw-up:cli]: publish: Successfully published ${tarballPath}`);
978
+ return 0;
979
+ }
980
+ const npmProcess = child_process.spawn("npm", publishArgs, { stdio: "inherit" });
981
+ return new Promise((resolve2, reject) => {
982
+ npmProcess.on("close", (code) => {
983
+ if (code === 0) {
984
+ if (verbose) {
985
+ logger2.info(`[screw-up:cli]: publish: Successfully published ${tarballPath}`);
986
+ }
987
+ resolve2(code);
988
+ } else {
989
+ logger2.error(`[screw-up:cli]: publish: npm publish failed: ${tarballPath}`);
990
+ resolve2(code);
991
+ }
992
+ });
993
+ npmProcess.on("error", reject);
994
+ });
995
+ };
996
+ const publishCommand = async (args, logger2) => {
997
+ var _a;
998
+ if (args.options.help || args.options.h) {
999
+ showPublishHelp(logger2);
1000
+ return 1;
1001
+ }
1002
+ const path$1 = args.positional[0];
1003
+ const readmeOption = args.options["readme"];
1004
+ const inheritableFieldsOption = args.options["inheritable-fields"];
1005
+ const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
1006
+ const alwaysOverrideVersionFromGit = !args.options["no-git-version-override"];
1007
+ const replacePeerDepsWildcards = !args.options["no-replace-peer-deps"];
1008
+ const peerDepsVersionPrefix = (_a = args.options["peer-deps-prefix"]) != null ? _a : "^";
1009
+ const verbose = args.options["verbose"] ? true : false;
1010
+ const inheritableFields = parseInheritableFields(inheritableFieldsOption);
1011
+ const readmeReplacementPath = readmeOption ? path.resolve(readmeOption) : void 0;
1012
+ const npmOptions = [];
1013
+ for (let i = 0; i < args.argv.length; i++) {
1014
+ const arg = args.argv[i];
1015
+ if (arg === "--help" || arg === "--verbose" || arg === "-h" || arg === "--no-wds" || arg === "--no-git-version-override" || arg === "--no-replace-peer-deps") ;
1016
+ else if (arg === "--readme" || arg === "--inheritable-fields" || arg === "--peer-deps-prefix") {
1017
+ i++;
1018
+ } else {
1019
+ npmOptions.push(arg);
1020
+ }
1021
+ }
1022
+ try {
1023
+ if (!path$1) {
1024
+ const targetDir = process.cwd();
1025
+ const outputDir = await promises.mkdtemp("screw-up-publish-");
1026
+ if (verbose) {
1027
+ logger2.info(`[screw-up:cli]: publish: Creating archive of ${targetDir}...`);
1028
+ }
1029
+ try {
1030
+ const result = await packAssets(
1031
+ targetDir,
1032
+ outputDir,
1033
+ checkWorkingDirectoryStatus,
1034
+ alwaysOverrideVersionFromGit,
1035
+ inheritableFields,
1036
+ readmeReplacementPath,
1037
+ replacePeerDepsWildcards,
1038
+ peerDepsVersionPrefix,
1039
+ logger2
1040
+ );
1041
+ if (result == null ? void 0 : result.metadata) {
1042
+ if (verbose) {
1043
+ logger2.info(`[screw-up:cli]: publish: Archive created successfully: ${result.packageFileName}`);
1044
+ }
1045
+ const archiveName = `${result.metadata.name}-${result.metadata.version}.tgz`;
1046
+ const archivePath = path.join(outputDir, archiveName);
1047
+ return await runNpmPublish(archivePath, npmOptions, verbose, logger2);
1048
+ } else {
1049
+ logger2.error(`[screw-up:cli]: publish: Unable to find any files to pack: ${targetDir}`);
1050
+ return 1;
1051
+ }
1052
+ } finally {
1053
+ await promises.rm(outputDir, { recursive: true, force: true });
1054
+ }
1055
+ } else if (fs.existsSync(path$1)) {
1056
+ const pathStat = await promises.stat(path$1);
1057
+ if (pathStat.isFile() && (path$1.endsWith(".tgz") || path$1.endsWith(".tar.gz"))) {
1058
+ return await runNpmPublish(path.resolve(path$1), npmOptions, verbose, logger2);
1059
+ } else if (pathStat.isDirectory()) {
1060
+ const targetDir = path.resolve(path$1);
1061
+ const outputDir = await promises.mkdtemp("screw-up-publish-");
1062
+ if (verbose) {
1063
+ logger2.info(`[screw-up:cli]: publish: Creating archive of ${targetDir}...`);
1064
+ }
1065
+ try {
1066
+ const result = await packAssets(
1067
+ targetDir,
1068
+ outputDir,
1069
+ checkWorkingDirectoryStatus,
1070
+ alwaysOverrideVersionFromGit,
1071
+ inheritableFields,
1072
+ readmeReplacementPath,
1073
+ replacePeerDepsWildcards,
1074
+ peerDepsVersionPrefix,
1075
+ logger2
1076
+ );
1077
+ if (result == null ? void 0 : result.metadata) {
1078
+ if (verbose) {
1079
+ logger2.info(`[screw-up:cli]: publish: Archive created successfully: ${result.packageFileName}`);
1080
+ }
1081
+ const archiveName = `${result.metadata.name}-${result.metadata.version}.tgz`;
1082
+ const archivePath = path.join(outputDir, archiveName);
1083
+ return await runNpmPublish(archivePath, npmOptions, verbose, logger2);
1084
+ } else {
1085
+ logger2.error(`[screw-up:cli]: publish: Unable to find any files to pack: ${targetDir}`);
1086
+ return 1;
1087
+ }
1088
+ } finally {
1089
+ await promises.rm(outputDir, { recursive: true, force: true });
1090
+ }
1091
+ } else {
1092
+ logger2.error(`[screw-up:cli]: publish: Invalid path - must be a directory or .tgz/.tar.gz file: ${path$1}`);
1093
+ return 1;
1094
+ }
1095
+ } else {
1096
+ logger2.error(`[screw-up:cli]: publish: Path does not exist: ${path$1}`);
1097
+ return 1;
1098
+ }
1099
+ } catch (error) {
1100
+ logger2.error(`[screw-up:cli]: publish: Failed to publish: ${error}`);
1101
+ return 1;
1102
+ }
1103
+ };
1104
+ const showHelp = async (logger2) => {
1105
+ const { author, license, repository_url, version } = await Promise.resolve().then(() => require("./packageMetadata-D9nXAoK9.cjs"));
1106
+ logger2.info(`screw-up - Easy package metadata inserter CLI [${version}]
1107
+ Copyright (c) ${author}
1108
+ Repository: ${repository_url}
1109
+ License: ${license}
1110
+
1111
+ Usage: screw-up <command> [options]
1112
+
1113
+ Commands:
1114
+ dump [directory] Dump computed package.json as JSON
1115
+ pack [directory] Pack the project into a tar archive
1116
+ publish [directory|package.tgz] Publish the project
1117
+
1118
+ Options:
1119
+ -h, --help Show help
1120
+
1121
+ Examples:
1122
+ screw-up dump # Dump computed package.json as JSON
1123
+ screw-up pack # Pack current directory
1124
+ screw-up pack --pack-destination ./dist # Pack to specific output directory
1125
+ screw-up publish # Publish current directory
1126
+ screw-up publish package.tgz # Publish existing tarball
1127
+ `);
1128
+ };
1129
+ const argOptionMap = /* @__PURE__ */ new Map([
1130
+ ["dump", /* @__PURE__ */ new Set(["inheritable-fields"])],
1131
+ ["pack", /* @__PURE__ */ new Set(["pack-destination", "readme", "inheritable-fields", "peer-deps-prefix"])],
1132
+ ["publish", /* @__PURE__ */ new Set(["inheritable-fields", "peer-deps-prefix"])]
1133
+ ]);
1134
+ const cliMain = async (args, logger2) => {
1135
+ const parsedArgs = parseArgs(args, argOptionMap);
1136
+ if (!parsedArgs.command && (parsedArgs.options.help || parsedArgs.options.h)) {
1137
+ await showHelp(logger2);
1138
+ return 1;
1139
+ }
1140
+ switch (parsedArgs.command) {
1141
+ case "dump":
1142
+ return await dumpCommand(parsedArgs, logger2);
1143
+ case "pack":
1144
+ return await packCommand(parsedArgs, logger2);
1145
+ case "publish":
1146
+ return await publishCommand(parsedArgs, logger2);
1147
+ default:
1148
+ if (parsedArgs.command) {
1149
+ logger2.error(`Unknown command: ${parsedArgs.command}`);
1150
+ } else {
1151
+ logger2.error(`Unknown command`);
1152
+ }
1153
+ logger2.error('Run "screw-up --help" for usage information.');
1154
+ return 1;
1155
+ }
1156
+ };
1157
+ const logger = internal.createConsoleLogger();
1158
+ cliMain(
1159
+ process.argv.slice(2),
1160
+ // Remove 'node' and script path
1161
+ logger
1162
+ ).then((code) => process.exit(code)).catch((error) => {
1163
+ logger.error(`CLI error: ${error}`);
1164
+ process.exit(1);
1165
+ });
1166
+ //# sourceMappingURL=main.cjs.map