screw-up 0.9.1 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -323
- package/README_pack.md +63 -0
- package/dist/analyzer.d.ts +8 -0
- package/dist/analyzer.d.ts.map +1 -0
- package/dist/cli-internal.d.ts +12 -1
- package/dist/cli-internal.d.ts.map +1 -1
- package/dist/cli.cjs +462 -69
- package/dist/cli.cjs.map +1 -0
- package/dist/cli.js +465 -72
- package/dist/cli.js.map +1 -0
- package/dist/index.cjs +31 -24
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.ts +2 -45
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +31 -23
- package/dist/index.js.map +1 -0
- package/dist/internal--D7IlmDn.cjs +1827 -0
- package/dist/internal--D7IlmDn.cjs.map +1 -0
- package/dist/internal-D-ECO0sh.js +1811 -0
- package/dist/internal-D-ECO0sh.js.map +1 -0
- package/dist/internal.d.ts +15 -23
- package/dist/internal.d.ts.map +1 -1
- package/dist/types.d.ts +60 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/vite-plugin.d.ts +17 -0
- package/dist/vite-plugin.d.ts.map +1 -0
- package/package.json +28 -7
- package/LICENSE +0 -21
- package/dist/internal-BJ2gdqpB.cjs +0 -119
- package/dist/internal-JwF_Mrdt.js +0 -120
package/dist/cli.js
CHANGED
|
@@ -1,83 +1,377 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { resolve, join } from "path";
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
3
|
+
import { createWriteStream, createReadStream, existsSync } from "fs";
|
|
4
|
+
import { stat, mkdir, lstat, mkdtemp, rm } from "fs/promises";
|
|
5
5
|
import { spawn } from "child_process";
|
|
6
6
|
import { glob } from "glob";
|
|
7
|
-
import {
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
7
|
+
import { Readable } from "stream";
|
|
8
|
+
import { createGzip } from "zlib";
|
|
9
|
+
import { a as resolveRawPackageJsonObject } from "./internal-D-ECO0sh.js";
|
|
10
|
+
/*!
|
|
11
|
+
* name: tar-vern
|
|
12
|
+
* version: 0.3.0
|
|
13
|
+
* description: Tape archiver library for Typescript
|
|
14
|
+
* author: Kouji Matsui (@kekyo@mi.kekyo.net)
|
|
15
|
+
* license: MIT
|
|
16
|
+
* repository.url: https://github.com/kekyo/tar-vern.git
|
|
17
|
+
*/
|
|
18
|
+
const getUName = (candidateName, candidateId, reflectStat) => {
|
|
19
|
+
return "root";
|
|
18
20
|
};
|
|
19
|
-
const
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
21
|
+
const getBuffer = (data) => {
|
|
22
|
+
return Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
|
23
|
+
};
|
|
24
|
+
const createFileItem = async (path, content, options) => {
|
|
25
|
+
var _a, _b, _c, _d, _e, _f;
|
|
26
|
+
const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : 420;
|
|
27
|
+
const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : 0;
|
|
28
|
+
const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : 0;
|
|
29
|
+
const date = (_d = options == null ? void 0 : options.date) != null ? _d : /* @__PURE__ */ new Date();
|
|
30
|
+
const uname = (_e = options == null ? void 0 : options.uname) != null ? _e : "root";
|
|
31
|
+
const gname = (_f = options == null ? void 0 : options.gname) != null ? _f : "root";
|
|
32
|
+
return {
|
|
33
|
+
kind: "file",
|
|
34
|
+
path,
|
|
35
|
+
mode,
|
|
36
|
+
uname,
|
|
37
|
+
gname,
|
|
38
|
+
uid,
|
|
39
|
+
gid,
|
|
40
|
+
date,
|
|
41
|
+
content
|
|
42
|
+
};
|
|
43
|
+
};
|
|
44
|
+
const createReadableFileItem = async (path, readable, options) => {
|
|
45
|
+
var _a, _b, _c, _d, _e, _f;
|
|
46
|
+
const mode = (_a = options == null ? void 0 : options.mode) != null ? _a : 420;
|
|
47
|
+
const uid = (_b = options == null ? void 0 : options.uid) != null ? _b : 0;
|
|
48
|
+
const gid = (_c = options == null ? void 0 : options.gid) != null ? _c : 0;
|
|
49
|
+
const date = (_d = options == null ? void 0 : options.date) != null ? _d : /* @__PURE__ */ new Date();
|
|
50
|
+
const uname = (_e = options == null ? void 0 : options.uname) != null ? _e : "root";
|
|
51
|
+
const gname = (_f = options == null ? void 0 : options.gname) != null ? _f : "root";
|
|
52
|
+
let length = options == null ? void 0 : options.length;
|
|
53
|
+
if (!length) {
|
|
54
|
+
const chunks = [];
|
|
55
|
+
length = 0;
|
|
56
|
+
for await (const chunk of readable) {
|
|
57
|
+
const buffer = getBuffer(chunk);
|
|
58
|
+
chunks.push(buffer);
|
|
59
|
+
length += buffer.length;
|
|
60
|
+
}
|
|
61
|
+
return {
|
|
62
|
+
kind: "file",
|
|
63
|
+
path,
|
|
64
|
+
mode,
|
|
65
|
+
uname,
|
|
66
|
+
gname,
|
|
67
|
+
uid,
|
|
68
|
+
gid,
|
|
69
|
+
date,
|
|
70
|
+
content: {
|
|
71
|
+
kind: "readable",
|
|
72
|
+
length,
|
|
73
|
+
readable: Readable.from(chunks)
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
} else {
|
|
77
|
+
return {
|
|
78
|
+
kind: "file",
|
|
79
|
+
path,
|
|
80
|
+
mode,
|
|
81
|
+
uname,
|
|
82
|
+
gname,
|
|
83
|
+
uid,
|
|
84
|
+
gid,
|
|
85
|
+
date,
|
|
86
|
+
content: {
|
|
87
|
+
kind: "readable",
|
|
88
|
+
length,
|
|
89
|
+
readable
|
|
90
|
+
}
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
const createReadFileItem = async (path, filePath, reflectStat, options) => {
|
|
95
|
+
const stats = await stat(filePath);
|
|
96
|
+
const reader = createReadStream(filePath);
|
|
97
|
+
const mode = stats.mode;
|
|
98
|
+
const uid = stats.uid;
|
|
99
|
+
const gid = stats.gid;
|
|
100
|
+
const date = stats.mtime;
|
|
101
|
+
const uname = getUName(options == null ? void 0 : options.uname, stats.uid);
|
|
102
|
+
const gname = getUName(options == null ? void 0 : options.gname, stats.gid);
|
|
103
|
+
return await createReadableFileItem(path, reader, {
|
|
104
|
+
length: stats.size,
|
|
105
|
+
mode,
|
|
106
|
+
uname,
|
|
107
|
+
gname,
|
|
108
|
+
uid,
|
|
109
|
+
gid,
|
|
110
|
+
date
|
|
25
111
|
});
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
writer.on("
|
|
112
|
+
};
|
|
113
|
+
const storeReaderToFile = (reader, path) => {
|
|
114
|
+
const writer = createWriteStream(path);
|
|
115
|
+
reader.pipe(writer);
|
|
116
|
+
return new Promise((res, rej) => {
|
|
117
|
+
writer.on("finish", res);
|
|
118
|
+
writer.on("error", rej);
|
|
119
|
+
reader.on("error", rej);
|
|
32
120
|
});
|
|
33
121
|
};
|
|
34
|
-
const
|
|
122
|
+
const utf8ByteLength = (str) => {
|
|
123
|
+
return Buffer.byteLength(str, "utf8");
|
|
124
|
+
};
|
|
125
|
+
const truncateUtf8Safe = (str, maxBytes) => {
|
|
126
|
+
let total = 0;
|
|
127
|
+
let i = 0;
|
|
128
|
+
while (i < str.length) {
|
|
129
|
+
const codePoint = str.codePointAt(i);
|
|
130
|
+
const char = String.fromCodePoint(codePoint);
|
|
131
|
+
const charBytes = Buffer.byteLength(char, "utf8");
|
|
132
|
+
if (total + charBytes > maxBytes) break;
|
|
133
|
+
total += charBytes;
|
|
134
|
+
i += char.length;
|
|
135
|
+
}
|
|
136
|
+
return str.slice(0, i);
|
|
137
|
+
};
|
|
138
|
+
const MAX_NAME = 100;
|
|
139
|
+
const MAX_PREFIX = 155;
|
|
140
|
+
const splitPath = (path) => {
|
|
141
|
+
var _a;
|
|
142
|
+
if (utf8ByteLength(path) <= MAX_NAME) {
|
|
143
|
+
return { prefix: "", name: path };
|
|
144
|
+
}
|
|
145
|
+
const parts = path.split("/");
|
|
146
|
+
let name = (_a = parts.pop()) != null ? _a : "";
|
|
147
|
+
let prefix = parts.join("/");
|
|
148
|
+
if (utf8ByteLength(name) > MAX_NAME) {
|
|
149
|
+
name = truncateUtf8Safe(name, MAX_NAME);
|
|
150
|
+
}
|
|
151
|
+
while (utf8ByteLength(prefix) > MAX_PREFIX) {
|
|
152
|
+
prefix = truncateUtf8Safe(prefix, MAX_PREFIX);
|
|
153
|
+
}
|
|
154
|
+
return { prefix, name };
|
|
155
|
+
};
|
|
156
|
+
const getOctalBytes = (value, length) => {
|
|
157
|
+
const str = value.toString(8).padStart(length - 1, "0") + "\0";
|
|
158
|
+
return Buffer.from(str, "ascii");
|
|
159
|
+
};
|
|
160
|
+
const getPaddedBytes = (buffer) => {
|
|
161
|
+
const extra = buffer.length % 512;
|
|
162
|
+
if (extra === 0) {
|
|
163
|
+
return buffer;
|
|
164
|
+
} else {
|
|
165
|
+
return Buffer.concat([buffer, Buffer.alloc(512 - extra, 0)]);
|
|
166
|
+
}
|
|
167
|
+
};
|
|
168
|
+
const terminatorBytes = Buffer.alloc(1024, 0);
|
|
169
|
+
const createTarHeader = (type, path, size, mode, uname, gname, uid, gid, date) => {
|
|
170
|
+
const buffer = Buffer.alloc(512, 0);
|
|
171
|
+
const { name, prefix } = splitPath(path);
|
|
172
|
+
buffer.write(name, 0, 100, "utf8");
|
|
173
|
+
getOctalBytes(mode & 4095, 8).copy(buffer, 100);
|
|
174
|
+
getOctalBytes(uid, 8).copy(buffer, 108);
|
|
175
|
+
getOctalBytes(gid, 8).copy(buffer, 116);
|
|
176
|
+
getOctalBytes(size, 12).copy(buffer, 124);
|
|
177
|
+
getOctalBytes(Math.floor(date.getTime() / 1e3), 12).copy(buffer, 136);
|
|
178
|
+
Buffer.from(" ", "ascii").copy(buffer, 148);
|
|
179
|
+
if (type === "file") {
|
|
180
|
+
buffer.write("0", 156, 1, "ascii");
|
|
181
|
+
} else {
|
|
182
|
+
buffer.write("5", 156, 1, "ascii");
|
|
183
|
+
}
|
|
184
|
+
buffer.write("ustar\0", 257, 6, "ascii");
|
|
185
|
+
buffer.write("00", 263, 2, "ascii");
|
|
186
|
+
buffer.write(uname, 265, 32, "utf8");
|
|
187
|
+
buffer.write(gname, 297, 32, "utf8");
|
|
188
|
+
buffer.write(prefix, 345, 155, "utf8");
|
|
189
|
+
let sum = 0;
|
|
190
|
+
for (let i = 0; i < 512; i++) {
|
|
191
|
+
sum += buffer[i];
|
|
192
|
+
}
|
|
193
|
+
getOctalBytes(sum, 8).copy(buffer, 148);
|
|
194
|
+
return buffer;
|
|
195
|
+
};
|
|
196
|
+
const createTarPacker = (entryItemGenerator, compressionType, signal) => {
|
|
197
|
+
const entryItemIterator = async function* () {
|
|
198
|
+
for await (const entryItem of entryItemGenerator) {
|
|
199
|
+
switch (entryItem.kind) {
|
|
200
|
+
case "file": {
|
|
201
|
+
const entryItemContent = entryItem.content;
|
|
202
|
+
if (typeof entryItemContent === "string" || Buffer.isBuffer(entryItemContent)) {
|
|
203
|
+
const contentBytes = getBuffer(entryItemContent);
|
|
204
|
+
const tarHeaderBytes = createTarHeader(
|
|
205
|
+
"file",
|
|
206
|
+
entryItem.path,
|
|
207
|
+
contentBytes.length,
|
|
208
|
+
entryItem.mode,
|
|
209
|
+
entryItem.uname,
|
|
210
|
+
entryItem.gname,
|
|
211
|
+
entryItem.uid,
|
|
212
|
+
entryItem.gid,
|
|
213
|
+
entryItem.date
|
|
214
|
+
);
|
|
215
|
+
yield tarHeaderBytes;
|
|
216
|
+
const totalPaddedContentBytes = getPaddedBytes(contentBytes);
|
|
217
|
+
yield totalPaddedContentBytes;
|
|
218
|
+
} else {
|
|
219
|
+
const tarHeaderBytes = createTarHeader(
|
|
220
|
+
"file",
|
|
221
|
+
entryItem.path,
|
|
222
|
+
entryItemContent.length,
|
|
223
|
+
entryItem.mode,
|
|
224
|
+
entryItem.uname,
|
|
225
|
+
entryItem.gname,
|
|
226
|
+
entryItem.uid,
|
|
227
|
+
entryItem.gid,
|
|
228
|
+
entryItem.date
|
|
229
|
+
);
|
|
230
|
+
yield tarHeaderBytes;
|
|
231
|
+
let position = 0;
|
|
232
|
+
switch (entryItemContent.kind) {
|
|
233
|
+
case "generator": {
|
|
234
|
+
for await (const contentBytes of entryItemContent.generator) {
|
|
235
|
+
yield contentBytes;
|
|
236
|
+
position += contentBytes.length;
|
|
237
|
+
}
|
|
238
|
+
break;
|
|
239
|
+
}
|
|
240
|
+
case "readable": {
|
|
241
|
+
for await (const content of entryItemContent.readable) {
|
|
242
|
+
const contentBytes = getBuffer(content);
|
|
243
|
+
yield contentBytes;
|
|
244
|
+
position += contentBytes.length;
|
|
245
|
+
}
|
|
246
|
+
break;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
if (position % 512 !== 0) {
|
|
250
|
+
yield Buffer.alloc(512 - position % 512, 0);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
case "directory": {
|
|
256
|
+
const tarHeaderBytes = createTarHeader(
|
|
257
|
+
"directory",
|
|
258
|
+
entryItem.path,
|
|
259
|
+
0,
|
|
260
|
+
entryItem.mode,
|
|
261
|
+
entryItem.uname,
|
|
262
|
+
entryItem.gname,
|
|
263
|
+
entryItem.uid,
|
|
264
|
+
entryItem.gid,
|
|
265
|
+
entryItem.date
|
|
266
|
+
);
|
|
267
|
+
yield tarHeaderBytes;
|
|
268
|
+
break;
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
yield terminatorBytes;
|
|
273
|
+
};
|
|
274
|
+
const ct = compressionType;
|
|
275
|
+
switch (ct) {
|
|
276
|
+
case "none": {
|
|
277
|
+
return Readable.from(entryItemIterator());
|
|
278
|
+
}
|
|
279
|
+
case "gzip": {
|
|
280
|
+
const gzipStream = createGzip({ level: 9 });
|
|
281
|
+
const entryItemStream = Readable.from(entryItemIterator());
|
|
282
|
+
entryItemStream.pipe(gzipStream);
|
|
283
|
+
return gzipStream;
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
};
|
|
287
|
+
const createPackEntryGenerator = async function* (targetDir, resolvedPackageJson, readmeReplacementPath) {
|
|
288
|
+
var _a;
|
|
289
|
+
const packageJsonContent = JSON.stringify(resolvedPackageJson, null, 2);
|
|
290
|
+
yield await createFileItem("package/package.json", packageJsonContent);
|
|
291
|
+
const distributionFileGlobs = (_a = resolvedPackageJson == null ? void 0 : resolvedPackageJson.files) != null ? _a : ["**/*"];
|
|
292
|
+
const packingFilePaths = (await Promise.all(
|
|
293
|
+
distributionFileGlobs.map(async (pattern) => {
|
|
294
|
+
const fullPath = resolve(targetDir, pattern);
|
|
295
|
+
try {
|
|
296
|
+
if (existsSync(fullPath) && (await lstat(fullPath)).isDirectory()) {
|
|
297
|
+
return await glob(`${pattern}/**/*`, { cwd: targetDir });
|
|
298
|
+
}
|
|
299
|
+
return await glob(pattern, { cwd: targetDir });
|
|
300
|
+
} catch (error) {
|
|
301
|
+
return await glob(pattern, { cwd: targetDir });
|
|
302
|
+
}
|
|
303
|
+
})
|
|
304
|
+
)).flat();
|
|
305
|
+
for (const packingFilePath of packingFilePaths) {
|
|
306
|
+
if (packingFilePath !== "package.json") {
|
|
307
|
+
const fullPath = resolve(targetDir, packingFilePath);
|
|
308
|
+
const stat2 = await lstat(fullPath);
|
|
309
|
+
if (stat2.isFile()) {
|
|
310
|
+
if (packingFilePath === "README.md" && readmeReplacementPath) {
|
|
311
|
+
yield await createReadFileItem("package/README.md", readmeReplacementPath);
|
|
312
|
+
} else {
|
|
313
|
+
yield await createReadFileItem(`package/${packingFilePath}`, fullPath);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
if (readmeReplacementPath && !packingFilePaths.includes("README.md")) {
|
|
319
|
+
yield await createReadFileItem("package/README.md", readmeReplacementPath);
|
|
320
|
+
}
|
|
321
|
+
};
|
|
322
|
+
const packAssets = async (targetDir, outputDir, checkWorkingDirectoryStatus, inheritableFields, readmeReplacementPath) => {
|
|
35
323
|
var _a, _b, _c, _d;
|
|
36
324
|
if (!existsSync(targetDir)) {
|
|
37
325
|
return void 0;
|
|
38
326
|
}
|
|
39
|
-
let
|
|
327
|
+
let result;
|
|
40
328
|
try {
|
|
41
|
-
|
|
329
|
+
result = await resolveRawPackageJsonObject(
|
|
330
|
+
targetDir,
|
|
331
|
+
checkWorkingDirectoryStatus,
|
|
332
|
+
inheritableFields
|
|
333
|
+
);
|
|
42
334
|
} catch (error) {
|
|
43
335
|
return void 0;
|
|
44
336
|
}
|
|
337
|
+
const { packageJson: resolvedPackageJson, sourceMap } = result;
|
|
45
338
|
if (resolvedPackageJson == null ? void 0 : resolvedPackageJson.private) {
|
|
46
339
|
return void 0;
|
|
47
340
|
}
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
const
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
const packingFilePaths = distributionFileGlobs.map((fg) => glob.sync(fg, { cwd: targetDir })).flat();
|
|
55
|
-
for (const packingFilePath of packingFilePaths) {
|
|
56
|
-
const fullPath = resolve(targetDir, packingFilePath);
|
|
57
|
-
const stat2 = await lstat(fullPath);
|
|
58
|
-
if (stat2.isFile() && packingFilePath !== "package.json") {
|
|
59
|
-
await addPackFileEntry(pack, targetDir, packingFilePath, stat2);
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
pack.finalize();
|
|
63
|
-
if (!existsSync(outputDir)) {
|
|
64
|
-
await mkdir(outputDir, { recursive: true });
|
|
341
|
+
let finalReadmeReplacementPath = readmeReplacementPath;
|
|
342
|
+
if (!finalReadmeReplacementPath && (resolvedPackageJson == null ? void 0 : resolvedPackageJson.readme)) {
|
|
343
|
+
const readmeSourceDir = (_a = sourceMap.get("readme")) != null ? _a : targetDir;
|
|
344
|
+
const packageReadmePath = resolve(readmeSourceDir, resolvedPackageJson.readme);
|
|
345
|
+
if (existsSync(packageReadmePath)) {
|
|
346
|
+
finalReadmeReplacementPath = packageReadmePath;
|
|
65
347
|
}
|
|
66
|
-
const outputFile = resolve(outputDir, outputFileName);
|
|
67
|
-
const outputStream = createWriteStream(outputFile);
|
|
68
|
-
const gzip = zlib.createGzip();
|
|
69
|
-
await new Promise((resolve2, reject) => {
|
|
70
|
-
pack.pipe(gzip).pipe(outputStream);
|
|
71
|
-
outputStream.on("finish", () => resolve2());
|
|
72
|
-
outputStream.on("error", reject);
|
|
73
|
-
pack.on("error", reject);
|
|
74
|
-
gzip.on("error", reject);
|
|
75
|
-
});
|
|
76
|
-
} finally {
|
|
77
|
-
pack.destroy();
|
|
78
348
|
}
|
|
349
|
+
if (finalReadmeReplacementPath && !existsSync(finalReadmeReplacementPath)) {
|
|
350
|
+
throw new Error(`README replacement file not found: ${finalReadmeReplacementPath}`);
|
|
351
|
+
}
|
|
352
|
+
const outputFileName = `${(_c = (_b = resolvedPackageJson == null ? void 0 : resolvedPackageJson.name) == null ? void 0 : _b.replace("/", "-")) != null ? _c : "package"}-${(_d = resolvedPackageJson == null ? void 0 : resolvedPackageJson.version) != null ? _d : "0.0.0"}.tgz`;
|
|
353
|
+
if (!existsSync(outputDir)) {
|
|
354
|
+
await mkdir(outputDir, { recursive: true });
|
|
355
|
+
}
|
|
356
|
+
const packer = createTarPacker(
|
|
357
|
+
createPackEntryGenerator(targetDir, resolvedPackageJson, finalReadmeReplacementPath),
|
|
358
|
+
"gzip"
|
|
359
|
+
);
|
|
360
|
+
const outputFile = resolve(outputDir, outputFileName);
|
|
361
|
+
await storeReaderToFile(packer, outputFile);
|
|
79
362
|
return resolvedPackageJson;
|
|
80
363
|
};
|
|
364
|
+
const getComputedPackageJsonObject = async (targetDir, checkWorkingDirectoryStatus, inheritableFields) => {
|
|
365
|
+
if (!existsSync(targetDir)) {
|
|
366
|
+
return void 0;
|
|
367
|
+
}
|
|
368
|
+
const result = await resolveRawPackageJsonObject(
|
|
369
|
+
targetDir,
|
|
370
|
+
checkWorkingDirectoryStatus,
|
|
371
|
+
inheritableFields
|
|
372
|
+
);
|
|
373
|
+
return result.packageJson;
|
|
374
|
+
};
|
|
81
375
|
const parseArgs = (argv) => {
|
|
82
376
|
const args = argv.slice(2);
|
|
83
377
|
const result = {
|
|
@@ -94,7 +388,7 @@ const parseArgs = (argv) => {
|
|
|
94
388
|
if (arg.startsWith("--")) {
|
|
95
389
|
const optionName = arg.slice(2);
|
|
96
390
|
const nextArg = args[i2 + 1];
|
|
97
|
-
if (nextArg && !nextArg.startsWith("-")) {
|
|
391
|
+
if (nextArg !== void 0 && !nextArg.startsWith("-")) {
|
|
98
392
|
result.options[optionName] = nextArg;
|
|
99
393
|
i2 += 2;
|
|
100
394
|
} else {
|
|
@@ -119,7 +413,7 @@ const parseArgs = (argv) => {
|
|
|
119
413
|
if (arg.startsWith("--")) {
|
|
120
414
|
const optionName = arg.slice(2);
|
|
121
415
|
const nextArg = args[i + 1];
|
|
122
|
-
if (nextArg && !nextArg.startsWith("-")) {
|
|
416
|
+
if (nextArg !== void 0 && !nextArg.startsWith("-")) {
|
|
123
417
|
result.options[optionName] = nextArg;
|
|
124
418
|
i += 2;
|
|
125
419
|
} else {
|
|
@@ -137,8 +431,28 @@ const parseArgs = (argv) => {
|
|
|
137
431
|
}
|
|
138
432
|
return result;
|
|
139
433
|
};
|
|
434
|
+
const defaultInheritableFields = /* @__PURE__ */ new Set([
|
|
435
|
+
"version",
|
|
436
|
+
"description",
|
|
437
|
+
"author",
|
|
438
|
+
"license",
|
|
439
|
+
"repository",
|
|
440
|
+
"keywords",
|
|
441
|
+
"homepage",
|
|
442
|
+
"bugs",
|
|
443
|
+
"readme"
|
|
444
|
+
]);
|
|
445
|
+
const parseInheritableFields = (inheritableFieldsOption) => {
|
|
446
|
+
if (typeof inheritableFieldsOption !== "string") {
|
|
447
|
+
return defaultInheritableFields;
|
|
448
|
+
}
|
|
449
|
+
if (!inheritableFieldsOption.trim()) {
|
|
450
|
+
return /* @__PURE__ */ new Set();
|
|
451
|
+
}
|
|
452
|
+
return new Set(inheritableFieldsOption.split(",").map((field) => field.trim()).filter((field) => field.length > 0));
|
|
453
|
+
};
|
|
140
454
|
const showHelp = () => {
|
|
141
|
-
console.log(`screw-up - Easy package metadata inserter CLI [${
|
|
455
|
+
console.log(`screw-up - Easy package metadata inserter CLI [${void 0}]
|
|
142
456
|
Copyright (c) ${"Kouji Matsui (@kekyo@mi.kekyo.net)"}
|
|
143
457
|
Repository: ${"https://github.com/kekyo/screw-up.git"}
|
|
144
458
|
License: ${"MIT"}
|
|
@@ -148,24 +462,29 @@ Usage: screw-up <command> [options]
|
|
|
148
462
|
Commands:
|
|
149
463
|
pack [directory] Pack the project into a tar archive
|
|
150
464
|
publish [directory|package.tgz] Publish the project
|
|
465
|
+
dump [directory] Dump computed package.json as JSON
|
|
151
466
|
|
|
152
467
|
Options:
|
|
153
468
|
-h, --help Show help
|
|
154
469
|
|
|
155
470
|
Pack Options:
|
|
156
471
|
--pack-destination <path> Directory to write the tarball
|
|
472
|
+
--readme <path> Replace README.md with specified file
|
|
473
|
+
--inheritable-fields <list> Comma-separated list of fields to inherit from parent (default: version,description,author,license,repository,keywords,homepage,bugs,readme)
|
|
474
|
+
--no-wds Do not check working directory status to increase version
|
|
157
475
|
|
|
158
476
|
Publish Options:
|
|
159
477
|
All npm publish options are supported (e.g., --dry-run, --tag, --access, --registry)
|
|
160
478
|
|
|
161
479
|
Examples:
|
|
162
|
-
screw-up pack
|
|
163
|
-
screw-up pack ./my-project
|
|
164
|
-
screw-up pack --pack-destination ./dist
|
|
165
|
-
screw-up
|
|
166
|
-
screw-up publish
|
|
167
|
-
screw-up publish
|
|
168
|
-
screw-up publish
|
|
480
|
+
screw-up pack # Pack current directory
|
|
481
|
+
screw-up pack ./my-project # Pack specific directory
|
|
482
|
+
screw-up pack --pack-destination ./dist # Pack to specific output directory
|
|
483
|
+
screw-up pack --readme ./README_pack.md # Pack with custom README
|
|
484
|
+
screw-up publish # Publish current directory
|
|
485
|
+
screw-up publish ./my-project # Publish specific directory
|
|
486
|
+
screw-up publish package.tgz # Publish existing tarball
|
|
487
|
+
screw-up publish --dry-run --tag beta # Publish with npm options
|
|
169
488
|
`);
|
|
170
489
|
};
|
|
171
490
|
const showPackHelp = () => {
|
|
@@ -178,6 +497,9 @@ Arguments:
|
|
|
178
497
|
|
|
179
498
|
Options:
|
|
180
499
|
--pack-destination <path> Directory to write the tarball
|
|
500
|
+
--readme <path> Replace README.md with specified file
|
|
501
|
+
--inheritable-fields <list> Comma-separated list of fields to inherit from parent
|
|
502
|
+
--no-wds Do not check working directory status to increase version
|
|
181
503
|
-h, --help Show help for pack command
|
|
182
504
|
`);
|
|
183
505
|
};
|
|
@@ -211,11 +533,22 @@ const packCommand = async (args) => {
|
|
|
211
533
|
}
|
|
212
534
|
const directory = args.positional[0];
|
|
213
535
|
const packDestination = args.options["pack-destination"];
|
|
536
|
+
const readmeOption = args.options["readme"];
|
|
537
|
+
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
538
|
+
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
214
539
|
const targetDir = resolve(directory != null ? directory : process.cwd());
|
|
215
540
|
const outputDir = packDestination ? resolve(packDestination) : process.cwd();
|
|
541
|
+
const readmeReplacementPath = readmeOption ? resolve(readmeOption) : void 0;
|
|
542
|
+
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
216
543
|
console.log(`[screw-up/cli]: pack: Creating archive of ${targetDir}...`);
|
|
217
544
|
try {
|
|
218
|
-
const metadata = await packAssets(
|
|
545
|
+
const metadata = await packAssets(
|
|
546
|
+
targetDir,
|
|
547
|
+
outputDir,
|
|
548
|
+
checkWorkingDirectoryStatus,
|
|
549
|
+
inheritableFields,
|
|
550
|
+
readmeReplacementPath
|
|
551
|
+
);
|
|
219
552
|
if (metadata) {
|
|
220
553
|
console.log(`[screw-up/cli]: pack: Archive created successfully: ${outputDir}`);
|
|
221
554
|
} else {
|
|
@@ -232,7 +565,7 @@ const publishCommand = async (args) => {
|
|
|
232
565
|
showPublishHelp();
|
|
233
566
|
return;
|
|
234
567
|
}
|
|
235
|
-
const runNpmPublish = async (tarballPath, npmOptions2
|
|
568
|
+
const runNpmPublish = async (tarballPath, npmOptions2) => {
|
|
236
569
|
console.log(`[screw-up/cli]: publish: Publishing ${tarballPath} to npm...`);
|
|
237
570
|
const publishArgs = ["publish", tarballPath, ...npmOptions2];
|
|
238
571
|
if (process.env.SCREW_UP_TEST_MODE === "true") {
|
|
@@ -256,9 +589,12 @@ const publishCommand = async (args) => {
|
|
|
256
589
|
});
|
|
257
590
|
};
|
|
258
591
|
const path = args.positional[0];
|
|
592
|
+
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
593
|
+
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
594
|
+
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
259
595
|
const npmOptions = [];
|
|
260
596
|
Object.entries(args.options).forEach(([key, value]) => {
|
|
261
|
-
if (key === "help" || key === "h") return;
|
|
597
|
+
if (key === "help" || key === "h" || key === "no-wds" || key === "inheritable-fields") return;
|
|
262
598
|
if (value === true) {
|
|
263
599
|
npmOptions.push(`--${key}`);
|
|
264
600
|
} else {
|
|
@@ -271,7 +607,13 @@ const publishCommand = async (args) => {
|
|
|
271
607
|
const outputDir = await mkdtemp("screw-up-publish-");
|
|
272
608
|
console.log(`[screw-up/cli]: publish: Creating archive of ${targetDir}...`);
|
|
273
609
|
try {
|
|
274
|
-
const metadata = await packAssets(
|
|
610
|
+
const metadata = await packAssets(
|
|
611
|
+
targetDir,
|
|
612
|
+
outputDir,
|
|
613
|
+
checkWorkingDirectoryStatus,
|
|
614
|
+
inheritableFields,
|
|
615
|
+
void 0
|
|
616
|
+
);
|
|
275
617
|
if (metadata) {
|
|
276
618
|
const archiveName = `${metadata.name}-${metadata.version}.tgz`;
|
|
277
619
|
const archivePath = join(outputDir, archiveName);
|
|
@@ -292,7 +634,13 @@ const publishCommand = async (args) => {
|
|
|
292
634
|
const outputDir = await mkdtemp("screw-up-publish-");
|
|
293
635
|
console.log(`[screw-up/cli]: publish: Creating archive of ${targetDir}...`);
|
|
294
636
|
try {
|
|
295
|
-
const metadata = await packAssets(
|
|
637
|
+
const metadata = await packAssets(
|
|
638
|
+
targetDir,
|
|
639
|
+
outputDir,
|
|
640
|
+
checkWorkingDirectoryStatus,
|
|
641
|
+
inheritableFields,
|
|
642
|
+
void 0
|
|
643
|
+
);
|
|
296
644
|
if (metadata) {
|
|
297
645
|
const archiveName = `${metadata.name}-${metadata.version}.tgz`;
|
|
298
646
|
const archivePath = join(outputDir, archiveName);
|
|
@@ -317,6 +665,47 @@ const publishCommand = async (args) => {
|
|
|
317
665
|
process.exit(1);
|
|
318
666
|
}
|
|
319
667
|
};
|
|
668
|
+
const showDumpHelp = () => {
|
|
669
|
+
console.log(`Usage: screw-up dump [options] [directory]
|
|
670
|
+
|
|
671
|
+
Dump computed package.json as JSON
|
|
672
|
+
|
|
673
|
+
Arguments:
|
|
674
|
+
directory Directory to dump package.json from (default: current directory)
|
|
675
|
+
|
|
676
|
+
Options:
|
|
677
|
+
--inheritable-fields <list> Comma-separated list of fields to inherit from parent
|
|
678
|
+
--no-wds Do not check working directory status to increase version
|
|
679
|
+
-h, --help Show help for dump command
|
|
680
|
+
`);
|
|
681
|
+
};
|
|
682
|
+
const dumpCommand = async (args) => {
|
|
683
|
+
if (args.options.help || args.options.h) {
|
|
684
|
+
showDumpHelp();
|
|
685
|
+
return;
|
|
686
|
+
}
|
|
687
|
+
const directory = args.positional[0];
|
|
688
|
+
const inheritableFieldsOption = args.options["inheritable-fields"];
|
|
689
|
+
const checkWorkingDirectoryStatus = args.options["no-wds"] ? false : true;
|
|
690
|
+
const inheritableFields = parseInheritableFields(inheritableFieldsOption);
|
|
691
|
+
const targetDir = resolve(directory != null ? directory : process.cwd());
|
|
692
|
+
try {
|
|
693
|
+
const computedPackageJson = await getComputedPackageJsonObject(
|
|
694
|
+
targetDir,
|
|
695
|
+
checkWorkingDirectoryStatus,
|
|
696
|
+
inheritableFields
|
|
697
|
+
);
|
|
698
|
+
if (computedPackageJson) {
|
|
699
|
+
console.log(JSON.stringify(computedPackageJson, null, 2));
|
|
700
|
+
} else {
|
|
701
|
+
console.error(`[screw-up/cli]: dump: Unable to read package.json from: ${targetDir}`);
|
|
702
|
+
process.exit(1);
|
|
703
|
+
}
|
|
704
|
+
} catch (error) {
|
|
705
|
+
console.error("[screw-up/cli]: dump: Failed to dump package.json:", error);
|
|
706
|
+
process.exit(1);
|
|
707
|
+
}
|
|
708
|
+
};
|
|
320
709
|
const main = async () => {
|
|
321
710
|
const args = parseArgs(process.argv);
|
|
322
711
|
if (args.options.help || args.options.h || !args.command || args.command === "help" || args.command === "--help") {
|
|
@@ -330,6 +719,9 @@ const main = async () => {
|
|
|
330
719
|
case "publish":
|
|
331
720
|
await publishCommand(args);
|
|
332
721
|
break;
|
|
722
|
+
case "dump":
|
|
723
|
+
await dumpCommand(args);
|
|
724
|
+
break;
|
|
333
725
|
default:
|
|
334
726
|
console.error(`Unknown command: ${args.command}`);
|
|
335
727
|
console.error('Run "screw-up --help" for usage information.');
|
|
@@ -340,3 +732,4 @@ main().catch((error) => {
|
|
|
340
732
|
console.error("CLI error:", error);
|
|
341
733
|
process.exit(1);
|
|
342
734
|
});
|
|
735
|
+
//# sourceMappingURL=cli.js.map
|