screw-up 0.12.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +21 -7
- package/dist/analyzer.d.ts +13 -1
- package/dist/analyzer.d.ts.map +1 -1
- package/dist/cli-internal.d.ts +30 -7
- package/dist/cli-internal.d.ts.map +1 -1
- package/dist/cli.d.ts +12 -2
- package/dist/cli.d.ts.map +1 -1
- package/dist/generated/packageMetadata.d.ts +18 -0
- package/dist/generated/packageMetadata.d.ts.map +1 -0
- package/dist/index.cjs +36 -13
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.ts +10 -1
- package/dist/index.js +36 -13
- package/dist/index.js.map +1 -1
- package/dist/{internal-Di0s8LQa.cjs → internal-BHSe5LIZ.cjs} +349 -322
- package/dist/internal-BHSe5LIZ.cjs.map +1 -0
- package/dist/{internal-BaMzTKS2.js → internal-BgCvktPU.js} +351 -324
- package/dist/internal-BgCvktPU.js.map +1 -0
- package/dist/internal.d.ts +64 -14
- package/dist/internal.d.ts.map +1 -1
- package/dist/main.cjs +1166 -0
- package/dist/main.cjs.map +1 -0
- package/dist/main.d.ts +13 -0
- package/dist/main.d.ts.map +1 -0
- package/dist/main.js +1165 -0
- package/dist/main.js.map +1 -0
- package/dist/packageMetadata-D9nXAoK9.cjs +20 -0
- package/dist/packageMetadata-D9nXAoK9.cjs.map +1 -0
- package/dist/packageMetadata-Dsxn2dKN.js +20 -0
- package/dist/packageMetadata-Dsxn2dKN.js.map +1 -0
- package/dist/types.d.ts +15 -0
- package/dist/types.d.ts.map +1 -1
- package/dist/vite-plugin.d.ts +10 -1
- package/dist/vite-plugin.d.ts.map +1 -1
- package/images/screw-up-120.png +0 -0
- package/package.json +13 -14
- package/README_pack.md +0 -63
- package/dist/cli.cjs +0 -765
- package/dist/cli.cjs.map +0 -1
- package/dist/cli.js +0 -764
- package/dist/cli.js.map +0 -1
- package/dist/internal-BaMzTKS2.js.map +0 -1
- package/dist/internal-Di0s8LQa.cjs.map +0 -1
package/dist/main.js
ADDED
|
@@ -0,0 +1,1165 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/*!
|
|
3
|
+
* name: screw-up
|
|
4
|
+
* version: 0.14.0
|
|
5
|
+
* description: Simply package metadata inserter on Vite plugin
|
|
6
|
+
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
|
7
|
+
* license: MIT
|
|
8
|
+
* repository.url: https://github.com/kekyo/screw-up.git
|
|
9
|
+
* git.commit.hash: f1871df4c43aee9ab389a6ab1b2769b32322793b
|
|
10
|
+
*/
|
|
11
|
+
import { join, dirname, resolve } from "path";
|
|
12
|
+
import { createWriteStream, createReadStream, existsSync } from "fs";
|
|
13
|
+
import { mkdir, writeFile, stat, readdir, mkdtemp, copyFile, rm } from "fs/promises";
|
|
14
|
+
import { spawn } from "child_process";
|
|
15
|
+
import { Readable } from "stream";
|
|
16
|
+
import { createGunzip, createGzip } from "zlib";
|
|
17
|
+
import { pipeline } from "stream/promises";
|
|
18
|
+
import { tmpdir } from "os";
|
|
19
|
+
import { a as resolveRawPackageJsonObject, f as findWorkspaceRoot, b as collectWorkspaceSiblings, d as replacePeerDependenciesWildcards, c as createConsoleLogger } from "./internal-BgCvktPU.js";
|
|
20
|
+
/*!
|
|
21
|
+
* name: tar-vern
|
|
22
|
+
* version: 1.2.0
|
|
23
|
+
* description: Tape archiver library for Typescript
|
|
24
|
+
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
|
25
|
+
* license: MIT
|
|
26
|
+
* repository.url: https://github.com/kekyo/tar-vern.git
|
|
27
|
+
* git.commit.hash: 26ff2d96bfbd226ff79106604ff5f9e5193f91bc
|
|
28
|
+
*/
|
|
29
|
+
const MAX_NAME = 100;
|
|
30
|
+
const MAX_PREFIX = 155;
|
|
31
|
+
const getUName = (candidateName, candidateId, reflectStat) => {
|
|
32
|
+
return candidateName != null ? candidateName : reflectStat === "all" ? candidateId.toString() : "root";
|
|
33
|
+
};
|
|
34
|
+
const getBuffer = (data) => {
|
|
35
|
+
return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
|
36
|
+
};
|
|
37
|
+
const createDirectoryItem = async (path, reflectStat, options, signal) => {
|
|
38
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
39
|
+
const rs = reflectStat;
|
|
40
|
+
if (options == null ? void 0 : options.directoryPath) {
|
|
41
|
+
const stats = await stat(options.directoryPath);
|
|
42
|
+
const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : stats.mode;
|
|
43
|
+
const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : stats.uid;
|
|
44
|
+
const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : stats.gid;
|
|
45
|
+
const date = (_d = options == null ? void 0 : options.date) != null ? _d : stats.mtime;
|
|
46
|
+
const uname = getUName(options == null ? void 0 : options.uname, stats.uid, rs);
|
|
47
|
+
const gname = getUName(options == null ? void 0 : options.gname, stats.gid, rs);
|
|
48
|
+
return {
|
|
49
|
+
kind: "directory",
|
|
50
|
+
path,
|
|
51
|
+
mode,
|
|
52
|
+
uname,
|
|
53
|
+
gname,
|
|
54
|
+
uid,
|
|
55
|
+
gid,
|
|
56
|
+
date
|
|
57
|
+
};
|
|
58
|
+
} else {
|
|
59
|
+
const mode = (_e = options == null ? void 0 : options.mode) != null ? _e : 493;
|
|
60
|
+
const uid = (_f = options == null ? void 0 : options.uid) != null ? _f : 0;
|
|
61
|
+
const gid = (_g = options == null ? void 0 : options.gid) != null ? _g : 0;
|
|
62
|
+
const date = (_h = options == null ? void 0 : options.date) != null ? _h : /* @__PURE__ */ new Date();
|
|
63
|
+
const uname = getUName(options == null ? void 0 : options.uname, void 0, rs);
|
|
64
|
+
const gname = getUName(options == null ? void 0 : options.gname, void 0, rs);
|
|
65
|
+
return {
|
|
66
|
+
kind: "directory",
|
|
67
|
+
path,
|
|
68
|
+
mode,
|
|
69
|
+
uname,
|
|
70
|
+
gname,
|
|
71
|
+
uid,
|
|
72
|
+
gid,
|
|
73
|
+
date
|
|
74
|
+
};
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
const createReadableFileItem = async (path, readable, options, signal) => {
|
|
78
|
+
var _a, _b, _c, _d, _e, _f;
|
|
79
|
+
const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : 420;
|
|
80
|
+
const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : 0;
|
|
81
|
+
const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : 0;
|
|
82
|
+
const date = (_d = options == null ? void 0 : options.date) != null ? _d : /* @__PURE__ */ new Date();
|
|
83
|
+
const uname = (_e = options == null ? void 0 : options.uname) != null ? _e : "root";
|
|
84
|
+
const gname = (_f = options == null ? void 0 : options.gname) != null ? _f : "root";
|
|
85
|
+
let length = options == null ? void 0 : options.length;
|
|
86
|
+
if (!length) {
|
|
87
|
+
const chunks = [];
|
|
88
|
+
length = 0;
|
|
89
|
+
for await (const chunk of readable) {
|
|
90
|
+
const buffer = getBuffer(chunk);
|
|
91
|
+
chunks.push(buffer);
|
|
92
|
+
length += buffer.length;
|
|
93
|
+
}
|
|
94
|
+
return {
|
|
95
|
+
kind: "file",
|
|
96
|
+
path,
|
|
97
|
+
mode,
|
|
98
|
+
uname,
|
|
99
|
+
gname,
|
|
100
|
+
uid,
|
|
101
|
+
gid,
|
|
102
|
+
date,
|
|
103
|
+
content: {
|
|
104
|
+
kind: "readable",
|
|
105
|
+
length,
|
|
106
|
+
readable: Readable.from(chunks, { signal })
|
|
107
|
+
}
|
|
108
|
+
};
|
|
109
|
+
} else {
|
|
110
|
+
return {
|
|
111
|
+
kind: "file",
|
|
112
|
+
path,
|
|
113
|
+
mode,
|
|
114
|
+
uname,
|
|
115
|
+
gname,
|
|
116
|
+
uid,
|
|
117
|
+
gid,
|
|
118
|
+
date,
|
|
119
|
+
content: {
|
|
120
|
+
kind: "readable",
|
|
121
|
+
length,
|
|
122
|
+
readable
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
const createReadFileItem = async (path, filePath, reflectStat, options, signal) => {
|
|
128
|
+
const rs = reflectStat;
|
|
129
|
+
const stats = await stat(filePath);
|
|
130
|
+
const reader = createReadStream(filePath, { signal });
|
|
131
|
+
const mode = stats.mode;
|
|
132
|
+
const uid = stats.uid;
|
|
133
|
+
const gid = stats.gid;
|
|
134
|
+
const date = stats.mtime;
|
|
135
|
+
const uname = getUName(options == null ? void 0 : options.uname, stats.uid, rs);
|
|
136
|
+
const gname = getUName(options == null ? void 0 : options.gname, stats.gid, rs);
|
|
137
|
+
return await createReadableFileItem(path, reader, {
|
|
138
|
+
length: stats.size,
|
|
139
|
+
mode,
|
|
140
|
+
uname,
|
|
141
|
+
gname,
|
|
142
|
+
uid,
|
|
143
|
+
gid,
|
|
144
|
+
date
|
|
145
|
+
}, signal);
|
|
146
|
+
};
|
|
147
|
+
const storeReaderToFile = async (reader, path, signal) => {
|
|
148
|
+
const writer = createWriteStream(path, { signal });
|
|
149
|
+
await pipeline(reader, writer, { signal });
|
|
150
|
+
};
|
|
151
|
+
const getAllFilesInDirectory = async (baseDir, signal) => {
|
|
152
|
+
const collectFiles = async (currentDir, relativePath) => {
|
|
153
|
+
try {
|
|
154
|
+
const entries = await readdir(currentDir, { withFileTypes: true });
|
|
155
|
+
const result = [];
|
|
156
|
+
const tasks = entries.map(async (entry) => {
|
|
157
|
+
signal == null ? void 0 : signal.throwIfAborted();
|
|
158
|
+
const entryRelativePath = join(relativePath, entry.name);
|
|
159
|
+
if (entry.isDirectory()) {
|
|
160
|
+
const entryFullPath = join(currentDir, entry.name);
|
|
161
|
+
const directoryContents = await collectFiles(entryFullPath, entryRelativePath);
|
|
162
|
+
return [entryRelativePath, ...directoryContents];
|
|
163
|
+
} else {
|
|
164
|
+
return [entryRelativePath];
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
const allResults = await Promise.all(tasks);
|
|
168
|
+
for (const entryResults of allResults) {
|
|
169
|
+
result.push(...entryResults);
|
|
170
|
+
}
|
|
171
|
+
return result;
|
|
172
|
+
} catch (error) {
|
|
173
|
+
console.warn(`Warning: Could not read directory ${currentDir}:`, error);
|
|
174
|
+
return [];
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
return await collectFiles(baseDir, "");
|
|
178
|
+
};
|
|
179
|
+
const createEntryItemGenerator = async function* (baseDir, relativePaths, includeDirectory, reflectStat, signal) {
|
|
180
|
+
const rs = "exceptName";
|
|
181
|
+
const includeDir = true;
|
|
182
|
+
const pathsToProcess = await getAllFilesInDirectory(baseDir, signal);
|
|
183
|
+
for (const relativePath of pathsToProcess) {
|
|
184
|
+
const fsPath = join(baseDir, relativePath);
|
|
185
|
+
try {
|
|
186
|
+
signal == null ? void 0 : signal.throwIfAborted();
|
|
187
|
+
const stats = await stat(fsPath);
|
|
188
|
+
if (includeDir && stats.isDirectory()) {
|
|
189
|
+
yield await createDirectoryItem(relativePath, rs, {
|
|
190
|
+
directoryPath: fsPath
|
|
191
|
+
}, signal);
|
|
192
|
+
} else if (stats.isFile()) {
|
|
193
|
+
yield await createReadFileItem(relativePath, fsPath, rs, void 0, signal);
|
|
194
|
+
}
|
|
195
|
+
} catch (error) {
|
|
196
|
+
console.warn(`Warning: Could not access ${fsPath}:`, error);
|
|
197
|
+
continue;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
};
|
|
201
|
+
const extractTo = async (iterator, basePath, signal) => {
|
|
202
|
+
for await (const entry of iterator) {
|
|
203
|
+
const targetPath = join(basePath, entry.path);
|
|
204
|
+
if (entry.kind === "directory") {
|
|
205
|
+
try {
|
|
206
|
+
signal == null ? void 0 : signal.throwIfAborted();
|
|
207
|
+
await mkdir(targetPath, { recursive: true, mode: entry.mode });
|
|
208
|
+
} catch (error) {
|
|
209
|
+
if (error.code !== "EEXIST") {
|
|
210
|
+
throw error;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
} else if (entry.kind === "file") {
|
|
214
|
+
const parentDir = dirname(targetPath);
|
|
215
|
+
await mkdir(parentDir, { recursive: true });
|
|
216
|
+
const fileEntry = entry;
|
|
217
|
+
const content = await fileEntry.getContent("buffer");
|
|
218
|
+
await writeFile(targetPath, content, { mode: entry.mode, signal });
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
};
|
|
222
|
+
const utf8ByteLength = (str) => {
|
|
223
|
+
return Buffer.byteLength(str, "utf8");
|
|
224
|
+
};
|
|
225
|
+
const truncateUtf8Safe = (str, maxBytes) => {
|
|
226
|
+
let total = 0;
|
|
227
|
+
let i = 0;
|
|
228
|
+
while (i < str.length) {
|
|
229
|
+
const codePoint = str.codePointAt(i);
|
|
230
|
+
const char = String.fromCodePoint(codePoint);
|
|
231
|
+
const charBytes = Buffer.byteLength(char, "utf8");
|
|
232
|
+
if (total + charBytes > maxBytes) break;
|
|
233
|
+
total += charBytes;
|
|
234
|
+
i += char.length;
|
|
235
|
+
}
|
|
236
|
+
return str.slice(0, i);
|
|
237
|
+
};
|
|
238
|
+
const splitPath = (path) => {
|
|
239
|
+
var _a;
|
|
240
|
+
if (utf8ByteLength(path) <= MAX_NAME) {
|
|
241
|
+
return { prefix: "", name: path };
|
|
242
|
+
}
|
|
243
|
+
const parts = path.split("/");
|
|
244
|
+
let name = (_a = parts.pop()) != null ? _a : "";
|
|
245
|
+
let prefix = parts.join("/");
|
|
246
|
+
if (utf8ByteLength(name) > MAX_NAME) {
|
|
247
|
+
name = truncateUtf8Safe(name, MAX_NAME);
|
|
248
|
+
}
|
|
249
|
+
while (utf8ByteLength(prefix) > MAX_PREFIX) {
|
|
250
|
+
prefix = truncateUtf8Safe(prefix, MAX_PREFIX);
|
|
251
|
+
}
|
|
252
|
+
return { prefix, name };
|
|
253
|
+
};
|
|
254
|
+
const getOctalBytes = (value, length) => {
|
|
255
|
+
const str = value.toString(8).padStart(length - 1, "0") + "\0";
|
|
256
|
+
return Buffer.from(str, "ascii");
|
|
257
|
+
};
|
|
258
|
+
const getPaddedBytes = (buffer) => {
|
|
259
|
+
const extra = buffer.length % 512;
|
|
260
|
+
if (extra === 0) {
|
|
261
|
+
return buffer;
|
|
262
|
+
} else {
|
|
263
|
+
return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
const terminatorBytes = Buffer.alloc(1024, 0);
|
|
267
|
+
const createTarHeader = (type, path, size, mode, uname, gname, uid, gid, date) => {
|
|
268
|
+
const buffer = Buffer.alloc(512, 0);
|
|
269
|
+
const { name, prefix } = splitPath(path);
|
|
270
|
+
buffer.write(name, 0, 100, "utf8");
|
|
271
|
+
getOctalBytes(mode & 4095, 8).copy(buffer, 100);
|
|
272
|
+
getOctalBytes(uid, 8).copy(buffer, 108);
|
|
273
|
+
getOctalBytes(gid, 8).copy(buffer, 116);
|
|
274
|
+
getOctalBytes(size, 12).copy(buffer, 124);
|
|
275
|
+
getOctalBytes(Math.floor(date.getTime() / 1e3), 12).copy(buffer, 136);
|
|
276
|
+
Buffer.from(" ", "ascii").copy(buffer, 148);
|
|
277
|
+
if (type === "file") {
|
|
278
|
+
buffer.write("0", 156, 1, "ascii");
|
|
279
|
+
} else {
|
|
280
|
+
buffer.write("5", 156, 1, "ascii");
|
|
281
|
+
}
|
|
282
|
+
buffer.write("ustar\0", 257, 6, "ascii");
|
|
283
|
+
buffer.write("00", 263, 2, "ascii");
|
|
284
|
+
buffer.write(uname, 265, 32, "utf8");
|
|
285
|
+
buffer.write(gname, 297, 32, "utf8");
|
|
286
|
+
buffer.write(prefix, 345, 155, "utf8");
|
|
287
|
+
let sum = 0;
|
|
288
|
+
for (let i = 0; i < 512; i++) {
|
|
289
|
+
sum += buffer[i];
|
|
290
|
+
}
|
|
291
|
+
getOctalBytes(sum, 8).copy(buffer, 148);
|
|
292
|
+
return buffer;
|
|
293
|
+
};
|
|
294
|
+
const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
295
|
+
const entryItemIterator = async function* () {
|
|
296
|
+
for await (const entryItem of entryItemGenerator) {
|
|
297
|
+
switch (entryItem.kind) {
|
|
298
|
+
// Entry is a file
|
|
299
|
+
case "file": {
|
|
300
|
+
const entryItemContent = entryItem.content;
|
|
301
|
+
if (typeof entryItemContent === "string" || Buffer.isBuffer(entryItemContent)) {
|
|
302
|
+
const contentBytes = getBuffer(entryItemContent);
|
|
303
|
+
const tarHeaderBytes = createTarHeader(
|
|
304
|
+
"file",
|
|
305
|
+
entryItem.path,
|
|
306
|
+
contentBytes.length,
|
|
307
|
+
entryItem.mode,
|
|
308
|
+
entryItem.uname,
|
|
309
|
+
entryItem.gname,
|
|
310
|
+
entryItem.uid,
|
|
311
|
+
entryItem.gid,
|
|
312
|
+
entryItem.date
|
|
313
|
+
);
|
|
314
|
+
yield tarHeaderBytes;
|
|
315
|
+
const totalPaddedContentBytes = getPaddedBytes(contentBytes);
|
|
316
|
+
yield totalPaddedContentBytes;
|
|
317
|
+
} else {
|
|
318
|
+
const content = entryItemContent;
|
|
319
|
+
const tarHeaderBytes = createTarHeader(
|
|
320
|
+
"file",
|
|
321
|
+
entryItem.path,
|
|
322
|
+
content.length,
|
|
323
|
+
entryItem.mode,
|
|
324
|
+
entryItem.uname,
|
|
325
|
+
entryItem.gname,
|
|
326
|
+
entryItem.uid,
|
|
327
|
+
entryItem.gid,
|
|
328
|
+
entryItem.date
|
|
329
|
+
);
|
|
330
|
+
yield tarHeaderBytes;
|
|
331
|
+
let position = 0;
|
|
332
|
+
switch (content.kind) {
|
|
333
|
+
// Content is a generator
|
|
334
|
+
case "generator": {
|
|
335
|
+
for await (const contentBytes of content.generator) {
|
|
336
|
+
yield contentBytes;
|
|
337
|
+
position += contentBytes.length;
|
|
338
|
+
}
|
|
339
|
+
break;
|
|
340
|
+
}
|
|
341
|
+
// Content is a readable stream
|
|
342
|
+
case "readable": {
|
|
343
|
+
for await (const chunk of content.readable) {
|
|
344
|
+
const contentBytes = getBuffer(chunk);
|
|
345
|
+
yield contentBytes;
|
|
346
|
+
position += contentBytes.length;
|
|
347
|
+
}
|
|
348
|
+
break;
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
if (position % 512 !== 0) {
|
|
352
|
+
yield Buffer.alloc(512 - position % 512, 0);
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
break;
|
|
356
|
+
}
|
|
357
|
+
// Entry is a directory
|
|
358
|
+
case "directory": {
|
|
359
|
+
const tarHeaderBytes = createTarHeader(
|
|
360
|
+
"directory",
|
|
361
|
+
entryItem.path,
|
|
362
|
+
0,
|
|
363
|
+
entryItem.mode,
|
|
364
|
+
entryItem.uname,
|
|
365
|
+
entryItem.gname,
|
|
366
|
+
entryItem.uid,
|
|
367
|
+
entryItem.gid,
|
|
368
|
+
entryItem.date
|
|
369
|
+
);
|
|
370
|
+
yield tarHeaderBytes;
|
|
371
|
+
break;
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
yield terminatorBytes;
|
|
376
|
+
};
|
|
377
|
+
const ct = compressionType;
|
|
378
|
+
switch (ct) {
|
|
379
|
+
// No compression
|
|
380
|
+
case "none": {
|
|
381
|
+
return Readable.from(entryItemIterator(), { signal });
|
|
382
|
+
}
|
|
383
|
+
// Gzip compression
|
|
384
|
+
case "gzip": {
|
|
385
|
+
const gzipStream = createGzip({ level: 9 });
|
|
386
|
+
const entryItemStream = Readable.from(entryItemIterator(), { signal });
|
|
387
|
+
entryItemStream.pipe(gzipStream);
|
|
388
|
+
return gzipStream;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
};
|
|
392
|
+
const parseOctalBytes = (buffer, offset, length) => {
|
|
393
|
+
const str = buffer.subarray(offset, offset + length).toString("ascii").replace(/\0/g, "").trim();
|
|
394
|
+
return str ? parseInt(str, 8) : 0;
|
|
395
|
+
};
|
|
396
|
+
const parseString = (buffer, offset, length) => {
|
|
397
|
+
return buffer.subarray(offset, offset + length).toString("utf8").replace(/\0/g, "").trim();
|
|
398
|
+
};
|
|
399
|
+
const readExactBytes = async (iterator, size, signal) => {
|
|
400
|
+
var _a;
|
|
401
|
+
const chunks = [];
|
|
402
|
+
let totalRead = 0;
|
|
403
|
+
while (totalRead < size) {
|
|
404
|
+
const { value, done } = await iterator.next();
|
|
405
|
+
if (done) {
|
|
406
|
+
if (totalRead === 0) {
|
|
407
|
+
return void 0;
|
|
408
|
+
} else {
|
|
409
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, got ${totalRead} bytes`);
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
const chunk = getBuffer(value);
|
|
413
|
+
const needed = size - totalRead;
|
|
414
|
+
if (chunk.length <= needed) {
|
|
415
|
+
chunks.push(chunk);
|
|
416
|
+
totalRead += chunk.length;
|
|
417
|
+
} else {
|
|
418
|
+
chunks.push(chunk.subarray(0, needed));
|
|
419
|
+
await ((_a = iterator.return) == null ? void 0 : _a.call(iterator, chunk.subarray(needed)));
|
|
420
|
+
totalRead = size;
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
return Buffer.concat(chunks, size);
|
|
424
|
+
};
|
|
425
|
+
const skipExactBytes = async (iterator, size, signal) => {
|
|
426
|
+
var _a;
|
|
427
|
+
let totalSkipped = 0;
|
|
428
|
+
while (totalSkipped < size) {
|
|
429
|
+
const { value, done } = await iterator.next();
|
|
430
|
+
if (done) {
|
|
431
|
+
throw new Error(`Unexpected end of stream: expected to skip ${size} bytes, skipped ${totalSkipped} bytes`);
|
|
432
|
+
}
|
|
433
|
+
const chunk = getBuffer(value);
|
|
434
|
+
const needed = size - totalSkipped;
|
|
435
|
+
if (chunk.length <= needed) {
|
|
436
|
+
totalSkipped += chunk.length;
|
|
437
|
+
} else {
|
|
438
|
+
await ((_a = iterator.return) == null ? void 0 : _a.call(iterator, chunk.subarray(needed)));
|
|
439
|
+
totalSkipped = size;
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
};
|
|
443
|
+
const skipPaddingBytesTo512Boundary = async (iterator, contentSize, signal) => {
|
|
444
|
+
const padding = (512 - contentSize % 512) % 512;
|
|
445
|
+
if (padding > 0) {
|
|
446
|
+
await skipExactBytes(iterator, padding);
|
|
447
|
+
}
|
|
448
|
+
};
|
|
449
|
+
const parseTarHeader = (buffer) => {
|
|
450
|
+
if (buffer.every((b) => b === 0)) {
|
|
451
|
+
return void 0;
|
|
452
|
+
}
|
|
453
|
+
const name = parseString(buffer, 0, 100);
|
|
454
|
+
const mode = parseOctalBytes(buffer, 100, 8);
|
|
455
|
+
const uid = parseOctalBytes(buffer, 108, 8);
|
|
456
|
+
const gid = parseOctalBytes(buffer, 116, 8);
|
|
457
|
+
const size = parseOctalBytes(buffer, 124, 12);
|
|
458
|
+
const mtime = new Date(parseOctalBytes(buffer, 136, 12) * 1e3);
|
|
459
|
+
const checksum = parseOctalBytes(buffer, 148, 8);
|
|
460
|
+
const typeflag = parseString(buffer, 156, 1);
|
|
461
|
+
const magic = parseString(buffer, 257, 6);
|
|
462
|
+
const uname = parseString(buffer, 265, 32);
|
|
463
|
+
const gname = parseString(buffer, 297, 32);
|
|
464
|
+
const prefix = parseString(buffer, 345, 155);
|
|
465
|
+
if (magic !== "ustar") {
|
|
466
|
+
throw new Error(`Invalid tar format: magic="${magic}"`);
|
|
467
|
+
}
|
|
468
|
+
let calculatedSum = 0;
|
|
469
|
+
for (let i = 0; i < 512; i++) {
|
|
470
|
+
if (i >= 148 && i < 156) {
|
|
471
|
+
calculatedSum += 32;
|
|
472
|
+
} else {
|
|
473
|
+
calculatedSum += buffer[i];
|
|
474
|
+
}
|
|
475
|
+
}
|
|
476
|
+
if (calculatedSum !== checksum) {
|
|
477
|
+
throw new Error(`Invalid checksum: expected ${checksum}, got ${calculatedSum}`);
|
|
478
|
+
}
|
|
479
|
+
let path = prefix ? `${prefix}/${name}` : name;
|
|
480
|
+
if (path.endsWith("/")) {
|
|
481
|
+
path = path.slice(0, -1);
|
|
482
|
+
}
|
|
483
|
+
const kind = typeflag === "5" ? "directory" : "file";
|
|
484
|
+
return {
|
|
485
|
+
kind,
|
|
486
|
+
path,
|
|
487
|
+
size,
|
|
488
|
+
mode,
|
|
489
|
+
uid,
|
|
490
|
+
gid,
|
|
491
|
+
mtime,
|
|
492
|
+
uname: uname || uid.toString(),
|
|
493
|
+
gname: gname || gid.toString(),
|
|
494
|
+
checksum,
|
|
495
|
+
consumed: false
|
|
496
|
+
};
|
|
497
|
+
};
|
|
498
|
+
const createBufferedAsyncIterator = (iterable, signal) => {
|
|
499
|
+
const buffer = [];
|
|
500
|
+
const iterator = iterable[Symbol.asyncIterator]();
|
|
501
|
+
return {
|
|
502
|
+
next: async () => {
|
|
503
|
+
if (buffer.length > 0) {
|
|
504
|
+
return { value: buffer.shift(), done: false };
|
|
505
|
+
}
|
|
506
|
+
return iterator.next();
|
|
507
|
+
},
|
|
508
|
+
return: async (value) => {
|
|
509
|
+
if (value !== void 0) {
|
|
510
|
+
buffer.unshift(value);
|
|
511
|
+
}
|
|
512
|
+
return { value: void 0, done: false };
|
|
513
|
+
}
|
|
514
|
+
};
|
|
515
|
+
};
|
|
516
|
+
const createReadableFromIterator = (iterator, size, signal, consumedRef) => {
|
|
517
|
+
const generator = async function* () {
|
|
518
|
+
var _a;
|
|
519
|
+
let remainingBytes = size;
|
|
520
|
+
while (remainingBytes > 0) {
|
|
521
|
+
const { value, done } = await iterator.next();
|
|
522
|
+
if (done) {
|
|
523
|
+
throw new Error(`Unexpected end of stream: expected ${size} bytes, remaining ${remainingBytes} bytes`);
|
|
524
|
+
}
|
|
525
|
+
const chunk = getBuffer(value);
|
|
526
|
+
if (chunk.length <= remainingBytes) {
|
|
527
|
+
remainingBytes -= chunk.length;
|
|
528
|
+
yield chunk;
|
|
529
|
+
} else {
|
|
530
|
+
const needed = chunk.subarray(0, remainingBytes);
|
|
531
|
+
const excess = chunk.subarray(remainingBytes);
|
|
532
|
+
remainingBytes = 0;
|
|
533
|
+
await ((_a = iterator.return) == null ? void 0 : _a.call(iterator, excess));
|
|
534
|
+
yield needed;
|
|
535
|
+
break;
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
await skipPaddingBytesTo512Boundary(iterator, size);
|
|
539
|
+
consumedRef.consumed = true;
|
|
540
|
+
};
|
|
541
|
+
return Readable.from(generator(), { signal });
|
|
542
|
+
};
|
|
543
|
+
const createTarExtractor = async function* (readable, compressionType, signal) {
|
|
544
|
+
const ct = compressionType;
|
|
545
|
+
let inputStream;
|
|
546
|
+
switch (ct) {
|
|
547
|
+
case "gzip":
|
|
548
|
+
const gunzip = createGunzip();
|
|
549
|
+
readable.pipe(gunzip);
|
|
550
|
+
inputStream = gunzip;
|
|
551
|
+
break;
|
|
552
|
+
case "none":
|
|
553
|
+
default:
|
|
554
|
+
inputStream = readable;
|
|
555
|
+
break;
|
|
556
|
+
}
|
|
557
|
+
const iterator = createBufferedAsyncIterator(inputStream);
|
|
558
|
+
let header;
|
|
559
|
+
while (true) {
|
|
560
|
+
if ((header == null ? void 0 : header.kind) === "file" && !header.consumed) {
|
|
561
|
+
await skipExactBytes(iterator, header.size);
|
|
562
|
+
await skipPaddingBytesTo512Boundary(iterator, header.size);
|
|
563
|
+
header.consumed = true;
|
|
564
|
+
}
|
|
565
|
+
let headerBuffer;
|
|
566
|
+
try {
|
|
567
|
+
headerBuffer = await readExactBytes(iterator, 512, signal);
|
|
568
|
+
} catch (error) {
|
|
569
|
+
if (error instanceof Error && error.message.includes("Unexpected end of stream")) {
|
|
570
|
+
throw new Error("Invalid tar format: incomplete header");
|
|
571
|
+
}
|
|
572
|
+
throw error;
|
|
573
|
+
}
|
|
574
|
+
if (headerBuffer === void 0) {
|
|
575
|
+
break;
|
|
576
|
+
}
|
|
577
|
+
header = parseTarHeader(headerBuffer);
|
|
578
|
+
if (!header) {
|
|
579
|
+
const secondBlock = await readExactBytes(iterator, 512);
|
|
580
|
+
if (secondBlock === void 0 || secondBlock.every((b) => b === 0)) {
|
|
581
|
+
break;
|
|
582
|
+
}
|
|
583
|
+
throw new Error("Invalid tar format: expected terminator block");
|
|
584
|
+
}
|
|
585
|
+
if (header.kind === "directory") {
|
|
586
|
+
yield {
|
|
587
|
+
kind: "directory",
|
|
588
|
+
path: header.path,
|
|
589
|
+
mode: header.mode,
|
|
590
|
+
uid: header.uid,
|
|
591
|
+
gid: header.gid,
|
|
592
|
+
uname: header.uname,
|
|
593
|
+
gname: header.gname,
|
|
594
|
+
date: header.mtime
|
|
595
|
+
};
|
|
596
|
+
} else {
|
|
597
|
+
const currentHeader = header;
|
|
598
|
+
yield {
|
|
599
|
+
kind: "file",
|
|
600
|
+
path: currentHeader.path,
|
|
601
|
+
mode: currentHeader.mode,
|
|
602
|
+
uid: currentHeader.uid,
|
|
603
|
+
gid: currentHeader.gid,
|
|
604
|
+
uname: currentHeader.uname,
|
|
605
|
+
gname: currentHeader.gname,
|
|
606
|
+
date: currentHeader.mtime,
|
|
607
|
+
getContent: async (type) => {
|
|
608
|
+
if (currentHeader.consumed) {
|
|
609
|
+
throw new Error("Content has already been consumed. Multiple calls to getContent are not supported.");
|
|
610
|
+
}
|
|
611
|
+
switch (type) {
|
|
612
|
+
// For string
|
|
613
|
+
case "string": {
|
|
614
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size);
|
|
615
|
+
if (dataBuffer === void 0) {
|
|
616
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
|
617
|
+
}
|
|
618
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size);
|
|
619
|
+
currentHeader.consumed = true;
|
|
620
|
+
return dataBuffer.toString("utf8");
|
|
621
|
+
}
|
|
622
|
+
// For buffer
|
|
623
|
+
case "buffer": {
|
|
624
|
+
const dataBuffer = await readExactBytes(iterator, currentHeader.size);
|
|
625
|
+
if (dataBuffer === void 0) {
|
|
626
|
+
throw new Error(`Unexpected end of stream while reading file data for ${currentHeader.path}`);
|
|
627
|
+
}
|
|
628
|
+
await skipPaddingBytesTo512Boundary(iterator, currentHeader.size);
|
|
629
|
+
currentHeader.consumed = true;
|
|
630
|
+
return dataBuffer;
|
|
631
|
+
}
|
|
632
|
+
// For Readble stream
|
|
633
|
+
case "readable": {
|
|
634
|
+
const readable2 = createReadableFromIterator(iterator, currentHeader.size, signal, currentHeader);
|
|
635
|
+
return readable2;
|
|
636
|
+
}
|
|
637
|
+
default:
|
|
638
|
+
throw new Error(`Unsupported content type: ${type}`);
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
};
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
};
|
|
645
|
+
const runNpmPack = async (targetDir, packDestDir) => {
|
|
646
|
+
return new Promise((res, rej) => {
|
|
647
|
+
const npmProcess = spawn("npm", ["pack", "--pack-destination", packDestDir], {
|
|
648
|
+
cwd: targetDir,
|
|
649
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
650
|
+
});
|
|
651
|
+
let stdout = "";
|
|
652
|
+
let stderr = "";
|
|
653
|
+
npmProcess.stdout.on("data", (data) => {
|
|
654
|
+
stdout += data.toString();
|
|
655
|
+
});
|
|
656
|
+
npmProcess.stderr.on("data", (data) => {
|
|
657
|
+
stderr += data.toString();
|
|
658
|
+
});
|
|
659
|
+
npmProcess.on("close", (code) => {
|
|
660
|
+
if (code === 0) {
|
|
661
|
+
const lines = stdout.trim().split("\n");
|
|
662
|
+
const filename = lines.find((line) => line.trim().endsWith(".tgz")) || lines[lines.length - 1];
|
|
663
|
+
if (filename && filename.trim().endsWith(".tgz")) {
|
|
664
|
+
const fullPath = join(packDestDir, filename.trim());
|
|
665
|
+
res(fullPath);
|
|
666
|
+
} else {
|
|
667
|
+
rej(new Error("npm pack did not output a valid .tgz filename"));
|
|
668
|
+
}
|
|
669
|
+
} else {
|
|
670
|
+
const errorMessage = `npm pack failed with exit code ${code}`;
|
|
671
|
+
const fullError = stderr ? `${errorMessage}
|
|
672
|
+
stderr: ${stderr}` : errorMessage;
|
|
673
|
+
if (stdout) {
|
|
674
|
+
rej(new Error(`${fullError}
|
|
675
|
+
stdout: ${stdout}`));
|
|
676
|
+
} else {
|
|
677
|
+
rej(new Error(fullError));
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
});
|
|
681
|
+
npmProcess.on("error", (error) => {
|
|
682
|
+
rej(new Error(`Failed to spawn npm pack: ${error.message}`));
|
|
683
|
+
});
|
|
684
|
+
});
|
|
685
|
+
};
|
|
686
|
+
const packAssets = async (targetDir, outputDir, checkWorkingDirectoryStatus, alwaysOverrideVersionFromGit, inheritableFields, readmeReplacementPath, replacePeerDepsWildcards, peerDepsVersionPrefix, logger2) => {
|
|
687
|
+
var _a, _b, _c;
|
|
688
|
+
if (!existsSync(targetDir)) {
|
|
689
|
+
throw new Error(`Target directory is not found: ${targetDir}`);
|
|
690
|
+
}
|
|
691
|
+
let readmeReplacementCandidatePath = readmeReplacementPath;
|
|
692
|
+
if (readmeReplacementCandidatePath && !existsSync(readmeReplacementCandidatePath)) {
|
|
693
|
+
throw new Error(`README replacement file is not found: ${readmeReplacementCandidatePath}`);
|
|
694
|
+
}
|
|
695
|
+
const result = await resolveRawPackageJsonObject(
|
|
696
|
+
targetDir,
|
|
697
|
+
checkWorkingDirectoryStatus,
|
|
698
|
+
alwaysOverrideVersionFromGit,
|
|
699
|
+
inheritableFields,
|
|
700
|
+
logger2
|
|
701
|
+
);
|
|
702
|
+
let resolvedPackageJson = result.metadata;
|
|
703
|
+
if (resolvedPackageJson == null ? void 0 : resolvedPackageJson.private) {
|
|
704
|
+
return void 0;
|
|
705
|
+
}
|
|
706
|
+
const packageJsonReadme = resolvedPackageJson.readme;
|
|
707
|
+
if (packageJsonReadme) {
|
|
708
|
+
if (!readmeReplacementCandidatePath) {
|
|
709
|
+
const packageJsonReadmeDir = result.sourceMap.get("readme");
|
|
710
|
+
const packageJsonReadmePath = join(packageJsonReadmeDir, packageJsonReadme);
|
|
711
|
+
if (!existsSync(packageJsonReadmePath)) {
|
|
712
|
+
throw new Error(`README replacement file is not found: ${packageJsonReadmePath}`);
|
|
713
|
+
}
|
|
714
|
+
readmeReplacementCandidatePath = packageJsonReadmePath;
|
|
715
|
+
}
|
|
716
|
+
delete resolvedPackageJson.readme;
|
|
717
|
+
}
|
|
718
|
+
if (replacePeerDepsWildcards) {
|
|
719
|
+
const workspaceRoot = await findWorkspaceRoot(targetDir, logger2);
|
|
720
|
+
if (workspaceRoot) {
|
|
721
|
+
const siblings = await collectWorkspaceSiblings(workspaceRoot, logger2);
|
|
722
|
+
if (siblings.size > 0) {
|
|
723
|
+
resolvedPackageJson = replacePeerDependenciesWildcards(
|
|
724
|
+
resolvedPackageJson,
|
|
725
|
+
siblings,
|
|
726
|
+
peerDepsVersionPrefix
|
|
727
|
+
);
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
const baseTempDir = await mkdtemp(join(tmpdir(), "screw-up-npm-pack-"));
|
|
732
|
+
await mkdir(baseTempDir, { recursive: true });
|
|
733
|
+
try {
|
|
734
|
+
const npmTarballPath = await runNpmPack(targetDir, baseTempDir);
|
|
735
|
+
const stagingDir = join(baseTempDir, "staging");
|
|
736
|
+
await mkdir(stagingDir, { recursive: true });
|
|
737
|
+
const stream = createReadStream(npmTarballPath);
|
|
738
|
+
await extractTo(createTarExtractor(stream, "gzip"), stagingDir);
|
|
739
|
+
const packageJsonPath = join(stagingDir, "package", "package.json");
|
|
740
|
+
if (existsSync(packageJsonPath)) {
|
|
741
|
+
await writeFile(packageJsonPath, JSON.stringify(resolvedPackageJson, null, 2));
|
|
742
|
+
}
|
|
743
|
+
if (readmeReplacementCandidatePath) {
|
|
744
|
+
const readmeDestPath = join(stagingDir, "package", "README.md");
|
|
745
|
+
await copyFile(readmeReplacementCandidatePath, readmeDestPath);
|
|
746
|
+
}
|
|
747
|
+
const outputFileName = `${(_b = (_a = resolvedPackageJson == null ? void 0 : resolvedPackageJson.name) == null ? void 0 : _a.replace("/", "-")) != null ? _b : "package"}-${(_c = resolvedPackageJson == null ? void 0 : resolvedPackageJson.version) != null ? _c : "0.0.0"}.tgz`;
|
|
748
|
+
await mkdir(outputDir, { recursive: true });
|
|
749
|
+
const outputFile = join(outputDir, outputFileName);
|
|
750
|
+
const itemGenerator = createEntryItemGenerator(stagingDir);
|
|
751
|
+
const packer = createTarPacker(itemGenerator, "gzip");
|
|
752
|
+
await storeReaderToFile(packer, outputFile);
|
|
753
|
+
return {
|
|
754
|
+
packageFileName: outputFileName,
|
|
755
|
+
metadata: resolvedPackageJson
|
|
756
|
+
};
|
|
757
|
+
} finally {
|
|
758
|
+
await rm(baseTempDir, { recursive: true, force: true });
|
|
759
|
+
}
|
|
760
|
+
};
|
|
761
|
+
const getComputedPackageJsonObject = async (targetDir, checkWorkingDirectoryStatus, alwaysOverrideVersionFromGit, inheritableFields, logger2) => {
|
|
762
|
+
if (!existsSync(targetDir)) {
|
|
763
|
+
return void 0;
|
|
764
|
+
}
|
|
765
|
+
const result = await resolveRawPackageJsonObject(
|
|
766
|
+
targetDir,
|
|
767
|
+
checkWorkingDirectoryStatus,
|
|
768
|
+
alwaysOverrideVersionFromGit,
|
|
769
|
+
inheritableFields,
|
|
770
|
+
logger2
|
|
771
|
+
);
|
|
772
|
+
return result.metadata;
|
|
773
|
+
};
|
|
774
|
+
const parseArgs = (args, argOptionMap2) => {
|
|
775
|
+
const result = {
|
|
776
|
+
argv: args,
|
|
777
|
+
positional: [],
|
|
778
|
+
options: {}
|
|
779
|
+
};
|
|
780
|
+
for (let i = 0; i < args.length; i++) {
|
|
781
|
+
const arg = args[i];
|
|
782
|
+
if (arg.startsWith("--")) {
|
|
783
|
+
const optionName = arg.slice(2);
|
|
784
|
+
if (!result.command) {
|
|
785
|
+
result.options[optionName] = true;
|
|
786
|
+
} else {
|
|
787
|
+
const argOptions = argOptionMap2.get(result.command);
|
|
788
|
+
if (argOptions.has(optionName)) {
|
|
789
|
+
i++;
|
|
790
|
+
result.options[optionName] = args[i];
|
|
791
|
+
} else {
|
|
792
|
+
result.options[optionName] = true;
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
} else if (arg.startsWith("-")) {
|
|
796
|
+
const optionName = arg.slice(1);
|
|
797
|
+
if (optionName.length == 1) {
|
|
798
|
+
result.options[optionName] = true;
|
|
799
|
+
}
|
|
800
|
+
} else if (!result.command) {
|
|
801
|
+
result.command = arg;
|
|
802
|
+
} else {
|
|
803
|
+
result.positional.push(arg);
|
|
804
|
+
}
|
|
805
|
+
}
|
|
806
|
+
return result;
|
|
807
|
+
};
|
|
808
|
+
const defaultInheritableFields = /* @__PURE__ */ new Set([
|
|
809
|
+
"version",
|
|
810
|
+
"description",
|
|
811
|
+
"author",
|
|
812
|
+
"license",
|
|
813
|
+
"repository",
|
|
814
|
+
"keywords",
|
|
815
|
+
"homepage",
|
|
816
|
+
"bugs",
|
|
817
|
+
"readme"
|
|
818
|
+
]);
|
|
819
|
+
const parseInheritableFields = (inheritableFieldsOption) => {
|
|
820
|
+
if (typeof inheritableFieldsOption !== "string") {
|
|
821
|
+
return defaultInheritableFields;
|
|
822
|
+
}
|
|
823
|
+
if (!inheritableFieldsOption.trim()) {
|
|
824
|
+
return /* @__PURE__ */ new Set();
|
|
825
|
+
}
|
|
826
|
+
return new Set(inheritableFieldsOption.split(",").map((field) => field.trim()).filter((field) => field.length > 0));
|
|
827
|
+
};
|
|
828
|
+
const showDumpHelp = (logger2) => {
|
|
829
|
+
logger2.info(`Usage: screw-up dump [options] [directory]
|
|
830
|
+
|
|
831
|
+
Dump computed package.json as JSON
|
|
832
|
+
|
|
833
|
+
Arguments:
|
|
834
|
+
directory Directory to dump package.json from (default: current directory)
|
|
835
|
+
|
|
836
|
+
Options:
|
|
837
|
+
--inheritable-fields <list> Comma-separated list of fields to inherit from parent
|
|
838
|
+
--no-wds Do not check working directory status to increase version
|
|
839
|
+
--no-git-version-override Do not override version from Git (use package.json version)
|
|
840
|
+
-h, --help Show help for dump command
|
|
841
|
+
`);
|
|
842
|
+
};
|
|
843
|
+
const dumpCommand = async (args, logger2) => {
|
|
844
|
+
if (args.options.help || args.options.h) {
|
|
845
|
+
showDumpHelp(logger2);
|
|
846
|
+
return 1;
|
|
847
|
+
}
|
|
848
|
+
const directory = args.positional[0];
|
|
849
|
+
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
850
|
+
const alwaysOverrideVersionFromGit = !args.options["no-git-version-override"];
|
|
851
|
+
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
852
|
+
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
853
|
+
const targetDir = resolve(directory != null ? directory : process.cwd());
|
|
854
|
+
try {
|
|
855
|
+
const computedPackageJson = await getComputedPackageJsonObject(
|
|
856
|
+
targetDir,
|
|
857
|
+
checkWorkingDirectoryStatus,
|
|
858
|
+
alwaysOverrideVersionFromGit,
|
|
859
|
+
inheritableFields,
|
|
860
|
+
logger2
|
|
861
|
+
);
|
|
862
|
+
if (computedPackageJson) {
|
|
863
|
+
logger2.info(JSON.stringify(computedPackageJson, null, 2));
|
|
864
|
+
} else {
|
|
865
|
+
logger2.error(`[screw-up:cli]: dump: Unable to read package.json from: ${targetDir}`);
|
|
866
|
+
return 1;
|
|
867
|
+
}
|
|
868
|
+
} catch (error) {
|
|
869
|
+
logger2.error(`[screw-up:cli]: dump: Failed to dump package.json: ${error}`);
|
|
870
|
+
return 1;
|
|
871
|
+
}
|
|
872
|
+
return 0;
|
|
873
|
+
};
|
|
874
|
+
const showPackHelp = (logger2) => {
|
|
875
|
+
logger2.info(`Usage: screw-up pack [options] [directory]
|
|
876
|
+
|
|
877
|
+
Pack the project into a tar archive
|
|
878
|
+
|
|
879
|
+
Arguments:
|
|
880
|
+
directory Directory to pack (default: current directory)
|
|
881
|
+
|
|
882
|
+
Options:
|
|
883
|
+
--pack-destination <path> Directory to write the tarball
|
|
884
|
+
--readme <path> Replace README.md with specified file
|
|
885
|
+
--inheritable-fields <list> Comma-separated list of fields to inherit from parent
|
|
886
|
+
--no-wds Do not check working directory status to increase version
|
|
887
|
+
--no-git-version-override Do not override version from Git (use package.json version)
|
|
888
|
+
--no-replace-peer-deps Disable replacing "*" in peerDependencies with actual versions
|
|
889
|
+
--peer-deps-prefix <prefix> Version prefix for replaced peerDependencies (default: "^")
|
|
890
|
+
--verbose Print verbose log
|
|
891
|
+
-h, --help Show help for pack command
|
|
892
|
+
`);
|
|
893
|
+
};
|
|
894
|
+
const packCommand = async (args, logger2) => {
|
|
895
|
+
var _a;
|
|
896
|
+
if (args.options.help || args.options.h) {
|
|
897
|
+
showPackHelp(logger2);
|
|
898
|
+
return 1;
|
|
899
|
+
}
|
|
900
|
+
const directory = args.positional[0];
|
|
901
|
+
const packDestination = args.options["pack-destination"];
|
|
902
|
+
const readmeOption = args.options["readme"];
|
|
903
|
+
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
904
|
+
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
905
|
+
const alwaysOverrideVersionFromGit = !args.options["no-git-version-override"];
|
|
906
|
+
const replacePeerDepsWildcards = !args.options["no-replace-peer-deps"];
|
|
907
|
+
const peerDepsVersionPrefix = (_a = args.options["peer-deps-prefix"]) != null ? _a : "^";
|
|
908
|
+
const verbose = args.options["verbose"] ? true : false;
|
|
909
|
+
const targetDir = resolve(directory != null ? directory : process.cwd());
|
|
910
|
+
const outputDir = packDestination ? resolve(packDestination) : process.cwd();
|
|
911
|
+
const readmeReplacementPath = readmeOption ? resolve(readmeOption) : void 0;
|
|
912
|
+
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
913
|
+
if (verbose) {
|
|
914
|
+
logger2.info(`[screw-up:cli]: pack: Creating archive of ${targetDir}...`);
|
|
915
|
+
}
|
|
916
|
+
try {
|
|
917
|
+
const result = await packAssets(
|
|
918
|
+
targetDir,
|
|
919
|
+
outputDir,
|
|
920
|
+
checkWorkingDirectoryStatus,
|
|
921
|
+
alwaysOverrideVersionFromGit,
|
|
922
|
+
inheritableFields,
|
|
923
|
+
readmeReplacementPath,
|
|
924
|
+
replacePeerDepsWildcards,
|
|
925
|
+
peerDepsVersionPrefix,
|
|
926
|
+
logger2
|
|
927
|
+
);
|
|
928
|
+
if (result) {
|
|
929
|
+
if (verbose) {
|
|
930
|
+
logger2.info(`[screw-up:cli]: pack: Archive created successfully: ${result.packageFileName}`);
|
|
931
|
+
} else {
|
|
932
|
+
logger2.info(result.packageFileName);
|
|
933
|
+
}
|
|
934
|
+
} else {
|
|
935
|
+
logger2.error(`[screw-up:cli]: pack: Unable to find any files to pack: ${targetDir}`);
|
|
936
|
+
return 1;
|
|
937
|
+
}
|
|
938
|
+
} catch (error) {
|
|
939
|
+
logger2.error(`[screw-up:cli]: pack: Failed to create archive: ${error}`);
|
|
940
|
+
return 1;
|
|
941
|
+
}
|
|
942
|
+
return 0;
|
|
943
|
+
};
|
|
944
|
+
const showPublishHelp = (logger2) => {
|
|
945
|
+
logger2.info(`Usage: screw-up publish [options] [directory|package.tgz]
|
|
946
|
+
|
|
947
|
+
Publish the project
|
|
948
|
+
|
|
949
|
+
Arguments:
|
|
950
|
+
directory|package.tgz Directory to pack and publish, or existing tarball to publish
|
|
951
|
+
|
|
952
|
+
Options:
|
|
953
|
+
All npm publish options are supported, including:
|
|
954
|
+
--dry-run Perform a dry run
|
|
955
|
+
--tag <tag> Tag for the published version
|
|
956
|
+
--access <access> Access level (public or restricted)
|
|
957
|
+
--registry <registry> Registry URL
|
|
958
|
+
-h, --help Show help for publish command
|
|
959
|
+
|
|
960
|
+
Examples:
|
|
961
|
+
screw-up publish # Publish current directory
|
|
962
|
+
screw-up publish ./my-project # Publish specific directory
|
|
963
|
+
screw-up publish package.tgz # Publish existing tarball
|
|
964
|
+
screw-up publish --dry-run --tag beta # Publish with options
|
|
965
|
+
`);
|
|
966
|
+
};
|
|
967
|
+
const runNpmPublish = async (tarballPath, npmOptions, verbose, logger2) => {
|
|
968
|
+
if (verbose) {
|
|
969
|
+
logger2.info(`[screw-up:cli]: publish: Publishing ${tarballPath} to npm...`);
|
|
970
|
+
}
|
|
971
|
+
const publishArgs = ["publish", tarballPath, ...npmOptions];
|
|
972
|
+
if (process.env.SCREW_UP_TEST_MODE === "true") {
|
|
973
|
+
logger2.info(`[screw-up:cli]: TEST_MODE: Would execute: npm ${publishArgs.join(" ")}`);
|
|
974
|
+
logger2.info(`[screw-up:cli]: TEST_MODE: Tarball path: ${tarballPath}`);
|
|
975
|
+
logger2.info(`[screw-up:cli]: TEST_MODE: Options: ${npmOptions.join(" ")}`);
|
|
976
|
+
logger2.info(`[screw-up:cli]: publish: Successfully published ${tarballPath}`);
|
|
977
|
+
return 0;
|
|
978
|
+
}
|
|
979
|
+
const npmProcess = spawn("npm", publishArgs, { stdio: "inherit" });
|
|
980
|
+
return new Promise((resolve2, reject) => {
|
|
981
|
+
npmProcess.on("close", (code) => {
|
|
982
|
+
if (code === 0) {
|
|
983
|
+
if (verbose) {
|
|
984
|
+
logger2.info(`[screw-up:cli]: publish: Successfully published ${tarballPath}`);
|
|
985
|
+
}
|
|
986
|
+
resolve2(code);
|
|
987
|
+
} else {
|
|
988
|
+
logger2.error(`[screw-up:cli]: publish: npm publish failed: ${tarballPath}`);
|
|
989
|
+
resolve2(code);
|
|
990
|
+
}
|
|
991
|
+
});
|
|
992
|
+
npmProcess.on("error", reject);
|
|
993
|
+
});
|
|
994
|
+
};
|
|
995
|
+
const publishCommand = async (args, logger2) => {
|
|
996
|
+
var _a;
|
|
997
|
+
if (args.options.help || args.options.h) {
|
|
998
|
+
showPublishHelp(logger2);
|
|
999
|
+
return 1;
|
|
1000
|
+
}
|
|
1001
|
+
const path = args.positional[0];
|
|
1002
|
+
const readmeOption = args.options["readme"];
|
|
1003
|
+
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
1004
|
+
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
1005
|
+
const alwaysOverrideVersionFromGit = !args.options["no-git-version-override"];
|
|
1006
|
+
const replacePeerDepsWildcards = !args.options["no-replace-peer-deps"];
|
|
1007
|
+
const peerDepsVersionPrefix = (_a = args.options["peer-deps-prefix"]) != null ? _a : "^";
|
|
1008
|
+
const verbose = args.options["verbose"] ? true : false;
|
|
1009
|
+
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
1010
|
+
const readmeReplacementPath = readmeOption ? resolve(readmeOption) : void 0;
|
|
1011
|
+
const npmOptions = [];
|
|
1012
|
+
for (let i = 0; i < args.argv.length; i++) {
|
|
1013
|
+
const arg = args.argv[i];
|
|
1014
|
+
if (arg === "--help" || arg === "--verbose" || arg === "-h" || arg === "--no-wds" || arg === "--no-git-version-override" || arg === "--no-replace-peer-deps") ;
|
|
1015
|
+
else if (arg === "--readme" || arg === "--inheritable-fields" || arg === "--peer-deps-prefix") {
|
|
1016
|
+
i++;
|
|
1017
|
+
} else {
|
|
1018
|
+
npmOptions.push(arg);
|
|
1019
|
+
}
|
|
1020
|
+
}
|
|
1021
|
+
try {
|
|
1022
|
+
if (!path) {
|
|
1023
|
+
const targetDir = process.cwd();
|
|
1024
|
+
const outputDir = await mkdtemp("screw-up-publish-");
|
|
1025
|
+
if (verbose) {
|
|
1026
|
+
logger2.info(`[screw-up:cli]: publish: Creating archive of ${targetDir}...`);
|
|
1027
|
+
}
|
|
1028
|
+
try {
|
|
1029
|
+
const result = await packAssets(
|
|
1030
|
+
targetDir,
|
|
1031
|
+
outputDir,
|
|
1032
|
+
checkWorkingDirectoryStatus,
|
|
1033
|
+
alwaysOverrideVersionFromGit,
|
|
1034
|
+
inheritableFields,
|
|
1035
|
+
readmeReplacementPath,
|
|
1036
|
+
replacePeerDepsWildcards,
|
|
1037
|
+
peerDepsVersionPrefix,
|
|
1038
|
+
logger2
|
|
1039
|
+
);
|
|
1040
|
+
if (result == null ? void 0 : result.metadata) {
|
|
1041
|
+
if (verbose) {
|
|
1042
|
+
logger2.info(`[screw-up:cli]: publish: Archive created successfully: ${result.packageFileName}`);
|
|
1043
|
+
}
|
|
1044
|
+
const archiveName = `${result.metadata.name}-${result.metadata.version}.tgz`;
|
|
1045
|
+
const archivePath = join(outputDir, archiveName);
|
|
1046
|
+
return await runNpmPublish(archivePath, npmOptions, verbose, logger2);
|
|
1047
|
+
} else {
|
|
1048
|
+
logger2.error(`[screw-up:cli]: publish: Unable to find any files to pack: ${targetDir}`);
|
|
1049
|
+
return 1;
|
|
1050
|
+
}
|
|
1051
|
+
} finally {
|
|
1052
|
+
await rm(outputDir, { recursive: true, force: true });
|
|
1053
|
+
}
|
|
1054
|
+
} else if (existsSync(path)) {
|
|
1055
|
+
const pathStat = await stat(path);
|
|
1056
|
+
if (pathStat.isFile() && (path.endsWith(".tgz") || path.endsWith(".tar.gz"))) {
|
|
1057
|
+
return await runNpmPublish(resolve(path), npmOptions, verbose, logger2);
|
|
1058
|
+
} else if (pathStat.isDirectory()) {
|
|
1059
|
+
const targetDir = resolve(path);
|
|
1060
|
+
const outputDir = await mkdtemp("screw-up-publish-");
|
|
1061
|
+
if (verbose) {
|
|
1062
|
+
logger2.info(`[screw-up:cli]: publish: Creating archive of ${targetDir}...`);
|
|
1063
|
+
}
|
|
1064
|
+
try {
|
|
1065
|
+
const result = await packAssets(
|
|
1066
|
+
targetDir,
|
|
1067
|
+
outputDir,
|
|
1068
|
+
checkWorkingDirectoryStatus,
|
|
1069
|
+
alwaysOverrideVersionFromGit,
|
|
1070
|
+
inheritableFields,
|
|
1071
|
+
readmeReplacementPath,
|
|
1072
|
+
replacePeerDepsWildcards,
|
|
1073
|
+
peerDepsVersionPrefix,
|
|
1074
|
+
logger2
|
|
1075
|
+
);
|
|
1076
|
+
if (result == null ? void 0 : result.metadata) {
|
|
1077
|
+
if (verbose) {
|
|
1078
|
+
logger2.info(`[screw-up:cli]: publish: Archive created successfully: ${result.packageFileName}`);
|
|
1079
|
+
}
|
|
1080
|
+
const archiveName = `${result.metadata.name}-${result.metadata.version}.tgz`;
|
|
1081
|
+
const archivePath = join(outputDir, archiveName);
|
|
1082
|
+
return await runNpmPublish(archivePath, npmOptions, verbose, logger2);
|
|
1083
|
+
} else {
|
|
1084
|
+
logger2.error(`[screw-up:cli]: publish: Unable to find any files to pack: ${targetDir}`);
|
|
1085
|
+
return 1;
|
|
1086
|
+
}
|
|
1087
|
+
} finally {
|
|
1088
|
+
await rm(outputDir, { recursive: true, force: true });
|
|
1089
|
+
}
|
|
1090
|
+
} else {
|
|
1091
|
+
logger2.error(`[screw-up:cli]: publish: Invalid path - must be a directory or .tgz/.tar.gz file: ${path}`);
|
|
1092
|
+
return 1;
|
|
1093
|
+
}
|
|
1094
|
+
} else {
|
|
1095
|
+
logger2.error(`[screw-up:cli]: publish: Path does not exist: ${path}`);
|
|
1096
|
+
return 1;
|
|
1097
|
+
}
|
|
1098
|
+
} catch (error) {
|
|
1099
|
+
logger2.error(`[screw-up:cli]: publish: Failed to publish: ${error}`);
|
|
1100
|
+
return 1;
|
|
1101
|
+
}
|
|
1102
|
+
};
|
|
1103
|
+
const showHelp = async (logger2) => {
|
|
1104
|
+
const { author, license, repository_url, version } = await import("./packageMetadata-Dsxn2dKN.js");
|
|
1105
|
+
logger2.info(`screw-up - Easy package metadata inserter CLI [${version}]
|
|
1106
|
+
Copyright (c) ${author}
|
|
1107
|
+
Repository: ${repository_url}
|
|
1108
|
+
License: ${license}
|
|
1109
|
+
|
|
1110
|
+
Usage: screw-up <command> [options]
|
|
1111
|
+
|
|
1112
|
+
Commands:
|
|
1113
|
+
dump [directory] Dump computed package.json as JSON
|
|
1114
|
+
pack [directory] Pack the project into a tar archive
|
|
1115
|
+
publish [directory|package.tgz] Publish the project
|
|
1116
|
+
|
|
1117
|
+
Options:
|
|
1118
|
+
-h, --help Show help
|
|
1119
|
+
|
|
1120
|
+
Examples:
|
|
1121
|
+
screw-up dump # Dump computed package.json as JSON
|
|
1122
|
+
screw-up pack # Pack current directory
|
|
1123
|
+
screw-up pack --pack-destination ./dist # Pack to specific output directory
|
|
1124
|
+
screw-up publish # Publish current directory
|
|
1125
|
+
screw-up publish package.tgz # Publish existing tarball
|
|
1126
|
+
`);
|
|
1127
|
+
};
|
|
1128
|
+
const argOptionMap = /* @__PURE__ */ new Map([
|
|
1129
|
+
["dump", /* @__PURE__ */ new Set(["inheritable-fields"])],
|
|
1130
|
+
["pack", /* @__PURE__ */ new Set(["pack-destination", "readme", "inheritable-fields", "peer-deps-prefix"])],
|
|
1131
|
+
["publish", /* @__PURE__ */ new Set(["inheritable-fields", "peer-deps-prefix"])]
|
|
1132
|
+
]);
|
|
1133
|
+
const cliMain = async (args, logger2) => {
|
|
1134
|
+
const parsedArgs = parseArgs(args, argOptionMap);
|
|
1135
|
+
if (!parsedArgs.command && (parsedArgs.options.help || parsedArgs.options.h)) {
|
|
1136
|
+
await showHelp(logger2);
|
|
1137
|
+
return 1;
|
|
1138
|
+
}
|
|
1139
|
+
switch (parsedArgs.command) {
|
|
1140
|
+
case "dump":
|
|
1141
|
+
return await dumpCommand(parsedArgs, logger2);
|
|
1142
|
+
case "pack":
|
|
1143
|
+
return await packCommand(parsedArgs, logger2);
|
|
1144
|
+
case "publish":
|
|
1145
|
+
return await publishCommand(parsedArgs, logger2);
|
|
1146
|
+
default:
|
|
1147
|
+
if (parsedArgs.command) {
|
|
1148
|
+
logger2.error(`Unknown command: ${parsedArgs.command}`);
|
|
1149
|
+
} else {
|
|
1150
|
+
logger2.error(`Unknown command`);
|
|
1151
|
+
}
|
|
1152
|
+
logger2.error('Run "screw-up --help" for usage information.');
|
|
1153
|
+
return 1;
|
|
1154
|
+
}
|
|
1155
|
+
};
|
|
1156
|
+
const logger = createConsoleLogger();
|
|
1157
|
+
cliMain(
|
|
1158
|
+
process.argv.slice(2),
|
|
1159
|
+
// Remove 'node' and script path
|
|
1160
|
+
logger
|
|
1161
|
+
).then((code) => process.exit(code)).catch((error) => {
|
|
1162
|
+
logger.error(`CLI error: ${error}`);
|
|
1163
|
+
process.exit(1);
|
|
1164
|
+
});
|
|
1165
|
+
//# sourceMappingURL=main.js.map
|