screw-up 0.7.1 → 0.9.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +141 -1
- package/dist/cli-internal.d.ts +14 -0
- package/dist/cli-internal.d.ts.map +1 -0
- package/dist/cli.cjs +343 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +342 -0
- package/dist/index.cjs +43 -82
- package/dist/index.d.ts +23 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +43 -83
- package/dist/internal-BJ2gdqpB.cjs +119 -0
- package/dist/internal-JwF_Mrdt.js +120 -0
- package/dist/internal.d.ts +18 -12
- package/dist/internal.d.ts.map +1 -1
- package/package.json +15 -7
package/dist/cli.js
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { resolve, join } from "path";
|
|
3
|
+
import { existsSync, createWriteStream, createReadStream } from "fs";
|
|
4
|
+
import { lstat, mkdir, mkdtemp, rm, stat } from "fs/promises";
|
|
5
|
+
import { spawn } from "child_process";
|
|
6
|
+
import { glob } from "glob";
|
|
7
|
+
import { a as resolveRawPackageJson } from "./internal-JwF_Mrdt.js";
|
|
8
|
+
import tar from "tar-stream";
|
|
9
|
+
import zlib from "zlib";
|
|
10
|
+
const addPackContentEntry = async (pack, name, content) => {
|
|
11
|
+
pack.entry({
|
|
12
|
+
name,
|
|
13
|
+
type: "file",
|
|
14
|
+
mode: 420,
|
|
15
|
+
mtime: /* @__PURE__ */ new Date(),
|
|
16
|
+
size: Buffer.byteLength(content, "utf8")
|
|
17
|
+
}, content);
|
|
18
|
+
};
|
|
19
|
+
const addPackFileEntry = async (pack, baseDir, path, stat2) => {
|
|
20
|
+
const writer = pack.entry({
|
|
21
|
+
name: path,
|
|
22
|
+
mode: stat2.mode,
|
|
23
|
+
mtime: stat2.mtime,
|
|
24
|
+
size: stat2.size
|
|
25
|
+
});
|
|
26
|
+
const stream = createReadStream(resolve(baseDir, path));
|
|
27
|
+
stream.pipe(writer);
|
|
28
|
+
return new Promise((resolve2, reject) => {
|
|
29
|
+
stream.on("end", resolve2);
|
|
30
|
+
stream.on("error", reject);
|
|
31
|
+
writer.on("error", reject);
|
|
32
|
+
});
|
|
33
|
+
};
|
|
34
|
+
const packAssets = async (targetDir, outputDir) => {
|
|
35
|
+
var _a, _b, _c, _d;
|
|
36
|
+
if (!existsSync(targetDir)) {
|
|
37
|
+
return void 0;
|
|
38
|
+
}
|
|
39
|
+
let resolvedPackageJson;
|
|
40
|
+
try {
|
|
41
|
+
resolvedPackageJson = await resolveRawPackageJson(targetDir);
|
|
42
|
+
} catch (error) {
|
|
43
|
+
return void 0;
|
|
44
|
+
}
|
|
45
|
+
if (resolvedPackageJson == null ? void 0 : resolvedPackageJson.private) {
|
|
46
|
+
return void 0;
|
|
47
|
+
}
|
|
48
|
+
const outputFileName = `${(_b = (_a = resolvedPackageJson == null ? void 0 : resolvedPackageJson.name) == null ? void 0 : _a.replace("/", "-")) != null ? _b : "package"}-${(_c = resolvedPackageJson == null ? void 0 : resolvedPackageJson.version) != null ? _c : "0.0.0"}.tgz`;
|
|
49
|
+
const pack = tar.pack();
|
|
50
|
+
try {
|
|
51
|
+
const packageJsonContent = JSON.stringify(resolvedPackageJson, null, 2);
|
|
52
|
+
await addPackContentEntry(pack, "package.json", packageJsonContent);
|
|
53
|
+
const distributionFileGlobs = (_d = resolvedPackageJson == null ? void 0 : resolvedPackageJson.files) != null ? _d : ["**/*"];
|
|
54
|
+
const packingFilePaths = distributionFileGlobs.map((fg) => glob.sync(fg, { cwd: targetDir })).flat();
|
|
55
|
+
for (const packingFilePath of packingFilePaths) {
|
|
56
|
+
const fullPath = resolve(targetDir, packingFilePath);
|
|
57
|
+
const stat2 = await lstat(fullPath);
|
|
58
|
+
if (stat2.isFile() && packingFilePath !== "package.json") {
|
|
59
|
+
await addPackFileEntry(pack, targetDir, packingFilePath, stat2);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
pack.finalize();
|
|
63
|
+
if (!existsSync(outputDir)) {
|
|
64
|
+
await mkdir(outputDir, { recursive: true });
|
|
65
|
+
}
|
|
66
|
+
const outputFile = resolve(outputDir, outputFileName);
|
|
67
|
+
const outputStream = createWriteStream(outputFile);
|
|
68
|
+
const gzip = zlib.createGzip();
|
|
69
|
+
await new Promise((resolve2, reject) => {
|
|
70
|
+
pack.pipe(gzip).pipe(outputStream);
|
|
71
|
+
outputStream.on("finish", () => resolve2());
|
|
72
|
+
outputStream.on("error", reject);
|
|
73
|
+
pack.on("error", reject);
|
|
74
|
+
gzip.on("error", reject);
|
|
75
|
+
});
|
|
76
|
+
} finally {
|
|
77
|
+
pack.destroy();
|
|
78
|
+
}
|
|
79
|
+
return resolvedPackageJson;
|
|
80
|
+
};
|
|
81
|
+
const parseArgs = (argv) => {
|
|
82
|
+
const args = argv.slice(2);
|
|
83
|
+
const result = {
|
|
84
|
+
positional: [],
|
|
85
|
+
options: {}
|
|
86
|
+
};
|
|
87
|
+
if (args.length === 0) {
|
|
88
|
+
return result;
|
|
89
|
+
}
|
|
90
|
+
if (args[0].startsWith("-")) {
|
|
91
|
+
let i2 = 0;
|
|
92
|
+
while (i2 < args.length) {
|
|
93
|
+
const arg = args[i2];
|
|
94
|
+
if (arg.startsWith("--")) {
|
|
95
|
+
const optionName = arg.slice(2);
|
|
96
|
+
const nextArg = args[i2 + 1];
|
|
97
|
+
if (nextArg && !nextArg.startsWith("-")) {
|
|
98
|
+
result.options[optionName] = nextArg;
|
|
99
|
+
i2 += 2;
|
|
100
|
+
} else {
|
|
101
|
+
result.options[optionName] = true;
|
|
102
|
+
i2 += 1;
|
|
103
|
+
}
|
|
104
|
+
} else if (arg.startsWith("-")) {
|
|
105
|
+
const optionName = arg.slice(1);
|
|
106
|
+
result.options[optionName] = true;
|
|
107
|
+
i2 += 1;
|
|
108
|
+
} else {
|
|
109
|
+
result.positional.push(arg);
|
|
110
|
+
i2 += 1;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return result;
|
|
114
|
+
}
|
|
115
|
+
result.command = args[0];
|
|
116
|
+
let i = 1;
|
|
117
|
+
while (i < args.length) {
|
|
118
|
+
const arg = args[i];
|
|
119
|
+
if (arg.startsWith("--")) {
|
|
120
|
+
const optionName = arg.slice(2);
|
|
121
|
+
const nextArg = args[i + 1];
|
|
122
|
+
if (nextArg && !nextArg.startsWith("-")) {
|
|
123
|
+
result.options[optionName] = nextArg;
|
|
124
|
+
i += 2;
|
|
125
|
+
} else {
|
|
126
|
+
result.options[optionName] = true;
|
|
127
|
+
i += 1;
|
|
128
|
+
}
|
|
129
|
+
} else if (arg.startsWith("-")) {
|
|
130
|
+
const optionName = arg.slice(1);
|
|
131
|
+
result.options[optionName] = true;
|
|
132
|
+
i += 1;
|
|
133
|
+
} else {
|
|
134
|
+
result.positional.push(arg);
|
|
135
|
+
i += 1;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
return result;
|
|
139
|
+
};
|
|
140
|
+
const showHelp = () => {
|
|
141
|
+
console.log(`screw-up - Easy package metadata inserter CLI [${"0.9.1"}]
|
|
142
|
+
Copyright (c) ${"Kouji Matsui (@kekyo@mi.kekyo.net)"}
|
|
143
|
+
Repository: ${"https://github.com/kekyo/screw-up.git"}
|
|
144
|
+
License: ${"MIT"}
|
|
145
|
+
|
|
146
|
+
Usage: screw-up <command> [options]
|
|
147
|
+
|
|
148
|
+
Commands:
|
|
149
|
+
pack [directory] Pack the project into a tar archive
|
|
150
|
+
publish [directory|package.tgz] Publish the project
|
|
151
|
+
|
|
152
|
+
Options:
|
|
153
|
+
-h, --help Show help
|
|
154
|
+
|
|
155
|
+
Pack Options:
|
|
156
|
+
--pack-destination <path> Directory to write the tarball
|
|
157
|
+
|
|
158
|
+
Publish Options:
|
|
159
|
+
All npm publish options are supported (e.g., --dry-run, --tag, --access, --registry)
|
|
160
|
+
|
|
161
|
+
Examples:
|
|
162
|
+
screw-up pack # Pack current directory
|
|
163
|
+
screw-up pack ./my-project # Pack specific directory
|
|
164
|
+
screw-up pack --pack-destination ./dist # Pack to specific output directory
|
|
165
|
+
screw-up publish # Publish current directory
|
|
166
|
+
screw-up publish ./my-project # Publish specific directory
|
|
167
|
+
screw-up publish package.tgz # Publish existing tarball
|
|
168
|
+
screw-up publish --dry-run --tag beta # Publish with npm options
|
|
169
|
+
`);
|
|
170
|
+
};
|
|
171
|
+
const showPackHelp = () => {
|
|
172
|
+
console.log(`Usage: screw-up pack [options] [directory]
|
|
173
|
+
|
|
174
|
+
Pack the project into a tar archive
|
|
175
|
+
|
|
176
|
+
Arguments:
|
|
177
|
+
directory Directory to pack (default: current directory)
|
|
178
|
+
|
|
179
|
+
Options:
|
|
180
|
+
--pack-destination <path> Directory to write the tarball
|
|
181
|
+
-h, --help Show help for pack command
|
|
182
|
+
`);
|
|
183
|
+
};
|
|
184
|
+
const showPublishHelp = () => {
|
|
185
|
+
console.log(`Usage: screw-up publish [options] [directory|package.tgz]
|
|
186
|
+
|
|
187
|
+
Publish the project
|
|
188
|
+
|
|
189
|
+
Arguments:
|
|
190
|
+
directory|package.tgz Directory to pack and publish, or existing tarball to publish
|
|
191
|
+
|
|
192
|
+
Options:
|
|
193
|
+
All npm publish options are supported, including:
|
|
194
|
+
--dry-run Perform a dry run
|
|
195
|
+
--tag <tag> Tag for the published version
|
|
196
|
+
--access <access> Access level (public or restricted)
|
|
197
|
+
--registry <registry> Registry URL
|
|
198
|
+
-h, --help Show help for publish command
|
|
199
|
+
|
|
200
|
+
Examples:
|
|
201
|
+
screw-up publish # Publish current directory
|
|
202
|
+
screw-up publish ./my-project # Publish specific directory
|
|
203
|
+
screw-up publish package.tgz # Publish existing tarball
|
|
204
|
+
screw-up publish --dry-run --tag beta # Publish with options
|
|
205
|
+
`);
|
|
206
|
+
};
|
|
207
|
+
const packCommand = async (args) => {
|
|
208
|
+
if (args.options.help || args.options.h) {
|
|
209
|
+
showPackHelp();
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
const directory = args.positional[0];
|
|
213
|
+
const packDestination = args.options["pack-destination"];
|
|
214
|
+
const targetDir = resolve(directory != null ? directory : process.cwd());
|
|
215
|
+
const outputDir = packDestination ? resolve(packDestination) : process.cwd();
|
|
216
|
+
console.log(`[screw-up/cli]: pack: Creating archive of ${targetDir}...`);
|
|
217
|
+
try {
|
|
218
|
+
const metadata = await packAssets(targetDir, outputDir);
|
|
219
|
+
if (metadata) {
|
|
220
|
+
console.log(`[screw-up/cli]: pack: Archive created successfully: ${outputDir}`);
|
|
221
|
+
} else {
|
|
222
|
+
console.error(`[screw-up/cli]: pack: Unable to find any files to pack: ${targetDir}`);
|
|
223
|
+
process.exit(1);
|
|
224
|
+
}
|
|
225
|
+
} catch (error) {
|
|
226
|
+
console.error("[screw-up/cli]: pack: Failed to create archive:", error);
|
|
227
|
+
process.exit(1);
|
|
228
|
+
}
|
|
229
|
+
};
|
|
230
|
+
const publishCommand = async (args) => {
|
|
231
|
+
if (args.options.help || args.options.h) {
|
|
232
|
+
showPublishHelp();
|
|
233
|
+
return;
|
|
234
|
+
}
|
|
235
|
+
const runNpmPublish = async (tarballPath, npmOptions2 = []) => {
|
|
236
|
+
console.log(`[screw-up/cli]: publish: Publishing ${tarballPath} to npm...`);
|
|
237
|
+
const publishArgs = ["publish", tarballPath, ...npmOptions2];
|
|
238
|
+
if (process.env.SCREW_UP_TEST_MODE === "true") {
|
|
239
|
+
console.log(`[screw-up/cli]: TEST_MODE: Would execute: npm ${publishArgs.join(" ")}`);
|
|
240
|
+
console.log(`[screw-up/cli]: TEST_MODE: Tarball path: ${tarballPath}`);
|
|
241
|
+
console.log(`[screw-up/cli]: TEST_MODE: Options: ${npmOptions2.join(" ")}`);
|
|
242
|
+
console.log(`[screw-up/cli]: publish: Successfully published ${tarballPath}`);
|
|
243
|
+
return;
|
|
244
|
+
}
|
|
245
|
+
const npmProcess = spawn("npm", publishArgs, { stdio: "inherit" });
|
|
246
|
+
return new Promise((resolve2, reject) => {
|
|
247
|
+
npmProcess.on("close", (code) => {
|
|
248
|
+
if (code === 0) {
|
|
249
|
+
console.log(`[screw-up/cli]: publish: Successfully published ${tarballPath}`);
|
|
250
|
+
resolve2();
|
|
251
|
+
} else {
|
|
252
|
+
reject(new Error(`npm publish failed with exit code ${code}`));
|
|
253
|
+
}
|
|
254
|
+
});
|
|
255
|
+
npmProcess.on("error", reject);
|
|
256
|
+
});
|
|
257
|
+
};
|
|
258
|
+
const path = args.positional[0];
|
|
259
|
+
const npmOptions = [];
|
|
260
|
+
Object.entries(args.options).forEach(([key, value]) => {
|
|
261
|
+
if (key === "help" || key === "h") return;
|
|
262
|
+
if (value === true) {
|
|
263
|
+
npmOptions.push(`--${key}`);
|
|
264
|
+
} else {
|
|
265
|
+
npmOptions.push(`--${key}`, value);
|
|
266
|
+
}
|
|
267
|
+
});
|
|
268
|
+
try {
|
|
269
|
+
if (!path) {
|
|
270
|
+
const targetDir = process.cwd();
|
|
271
|
+
const outputDir = await mkdtemp("screw-up-publish-");
|
|
272
|
+
console.log(`[screw-up/cli]: publish: Creating archive of ${targetDir}...`);
|
|
273
|
+
try {
|
|
274
|
+
const metadata = await packAssets(targetDir, outputDir);
|
|
275
|
+
if (metadata) {
|
|
276
|
+
const archiveName = `${metadata.name}-${metadata.version}.tgz`;
|
|
277
|
+
const archivePath = join(outputDir, archiveName);
|
|
278
|
+
await runNpmPublish(archivePath, npmOptions);
|
|
279
|
+
} else {
|
|
280
|
+
console.error(`[screw-up/cli]: publish: Unable to find any files to pack: ${targetDir}`);
|
|
281
|
+
process.exit(1);
|
|
282
|
+
}
|
|
283
|
+
} finally {
|
|
284
|
+
await rm(outputDir, { recursive: true, force: true });
|
|
285
|
+
}
|
|
286
|
+
} else if (existsSync(path)) {
|
|
287
|
+
const pathStat = await stat(path);
|
|
288
|
+
if (pathStat.isFile() && (path.endsWith(".tgz") || path.endsWith(".tar.gz"))) {
|
|
289
|
+
await runNpmPublish(resolve(path), npmOptions);
|
|
290
|
+
} else if (pathStat.isDirectory()) {
|
|
291
|
+
const targetDir = resolve(path);
|
|
292
|
+
const outputDir = await mkdtemp("screw-up-publish-");
|
|
293
|
+
console.log(`[screw-up/cli]: publish: Creating archive of ${targetDir}...`);
|
|
294
|
+
try {
|
|
295
|
+
const metadata = await packAssets(targetDir, outputDir);
|
|
296
|
+
if (metadata) {
|
|
297
|
+
const archiveName = `${metadata.name}-${metadata.version}.tgz`;
|
|
298
|
+
const archivePath = join(outputDir, archiveName);
|
|
299
|
+
await runNpmPublish(archivePath, npmOptions);
|
|
300
|
+
} else {
|
|
301
|
+
console.error(`[screw-up/cli]: publish: Unable to find any files to pack: ${targetDir}`);
|
|
302
|
+
process.exit(1);
|
|
303
|
+
}
|
|
304
|
+
} finally {
|
|
305
|
+
await rm(outputDir, { recursive: true, force: true });
|
|
306
|
+
}
|
|
307
|
+
} else {
|
|
308
|
+
console.error(`[screw-up/cli]: publish: Invalid path - must be a directory or .tgz/.tar.gz file: ${path}`);
|
|
309
|
+
process.exit(1);
|
|
310
|
+
}
|
|
311
|
+
} else {
|
|
312
|
+
console.error(`[screw-up/cli]: publish: Path does not exist: ${path}`);
|
|
313
|
+
process.exit(1);
|
|
314
|
+
}
|
|
315
|
+
} catch (error) {
|
|
316
|
+
console.error("[screw-up/cli]: publish: Failed to publish:", error);
|
|
317
|
+
process.exit(1);
|
|
318
|
+
}
|
|
319
|
+
};
|
|
320
|
+
const main = async () => {
|
|
321
|
+
const args = parseArgs(process.argv);
|
|
322
|
+
if (args.options.help || args.options.h || !args.command || args.command === "help" || args.command === "--help") {
|
|
323
|
+
showHelp();
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
switch (args.command) {
|
|
327
|
+
case "pack":
|
|
328
|
+
await packCommand(args);
|
|
329
|
+
break;
|
|
330
|
+
case "publish":
|
|
331
|
+
await publishCommand(args);
|
|
332
|
+
break;
|
|
333
|
+
default:
|
|
334
|
+
console.error(`Unknown command: ${args.command}`);
|
|
335
|
+
console.error('Run "screw-up --help" for usage information.');
|
|
336
|
+
process.exit(1);
|
|
337
|
+
}
|
|
338
|
+
};
|
|
339
|
+
main().catch((error) => {
|
|
340
|
+
console.error("CLI error:", error);
|
|
341
|
+
process.exit(1);
|
|
342
|
+
});
|
package/dist/index.cjs
CHANGED
|
@@ -1,85 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
Object.defineProperties(exports, { __esModule: { value: true }, [Symbol.toStringTag]: { value: "Module" } });
|
|
2
3
|
const promises = require("fs/promises");
|
|
3
4
|
const path = require("path");
|
|
4
|
-
const
|
|
5
|
-
const flattenObject = (obj, prefix, map) => {
|
|
6
|
-
for (const [key, value] of Object.entries(obj)) {
|
|
7
|
-
if (!value)
|
|
8
|
-
continue;
|
|
9
|
-
const fullKey = prefix ? `${prefix}.${key}` : key;
|
|
10
|
-
if (typeof value === "string") {
|
|
11
|
-
map[fullKey] = value;
|
|
12
|
-
} else if (Array.isArray(value)) {
|
|
13
|
-
map[fullKey] = value.map((v) => String(v)).join(",");
|
|
14
|
-
} else if (typeof value === "object") {
|
|
15
|
-
flattenObject(value, fullKey, map);
|
|
16
|
-
} else {
|
|
17
|
-
map[fullKey] = String(value);
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
};
|
|
21
|
-
const readPackageMetadata = async (packagePath) => {
|
|
22
|
-
try {
|
|
23
|
-
const content = await promises.readFile(packagePath, "utf-8");
|
|
24
|
-
const json = JSON.parse(content);
|
|
25
|
-
const map = {};
|
|
26
|
-
flattenObject(json, "", map);
|
|
27
|
-
return map;
|
|
28
|
-
} catch (error) {
|
|
29
|
-
console.warn(`Failed to read package.json from ${packagePath}:`, error);
|
|
30
|
-
return {};
|
|
31
|
-
}
|
|
32
|
-
};
|
|
33
|
-
const findWorkspaceRoot = async (startPath) => {
|
|
34
|
-
let currentPath = startPath;
|
|
35
|
-
while (currentPath !== path.dirname(currentPath)) {
|
|
36
|
-
const packageJsonPath = path.join(currentPath, "package.json");
|
|
37
|
-
if (fs.existsSync(packageJsonPath)) {
|
|
38
|
-
try {
|
|
39
|
-
const content = await promises.readFile(packageJsonPath, "utf-8");
|
|
40
|
-
const packageJson = JSON.parse(content);
|
|
41
|
-
if (packageJson.workspaces || fs.existsSync(path.join(currentPath, "pnpm-workspace.yaml")) || fs.existsSync(path.join(currentPath, "lerna.json"))) {
|
|
42
|
-
return currentPath;
|
|
43
|
-
}
|
|
44
|
-
} catch (error) {
|
|
45
|
-
console.warn(`Failed to parse package.json at ${packageJsonPath}:`, error);
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
currentPath = path.dirname(currentPath);
|
|
49
|
-
}
|
|
50
|
-
return void 0;
|
|
51
|
-
};
|
|
52
|
-
const mergePackageMetadata = (parentMetadata, childMetadata) => {
|
|
53
|
-
const merged = {};
|
|
54
|
-
for (const key in parentMetadata) {
|
|
55
|
-
const value = parentMetadata[key];
|
|
56
|
-
if (value !== void 0) {
|
|
57
|
-
merged[key] = value;
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
for (const key in childMetadata) {
|
|
61
|
-
const value = childMetadata[key];
|
|
62
|
-
if (value !== void 0) {
|
|
63
|
-
merged[key] = value;
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
return merged;
|
|
67
|
-
};
|
|
68
|
-
const resolvePackageMetadata = async (projectRoot) => {
|
|
69
|
-
const workspaceRoot = await findWorkspaceRoot(projectRoot);
|
|
70
|
-
if (!workspaceRoot) {
|
|
71
|
-
const localPackagePath = path.join(projectRoot, "package.json");
|
|
72
|
-
return await readPackageMetadata(localPackagePath);
|
|
73
|
-
}
|
|
74
|
-
const projectPackagePath = path.join(projectRoot, "package.json");
|
|
75
|
-
const rootPackagePath = path.join(workspaceRoot, "package.json");
|
|
76
|
-
let metadata = await readPackageMetadata(rootPackagePath);
|
|
77
|
-
if (projectPackagePath !== rootPackagePath && fs.existsSync(projectPackagePath)) {
|
|
78
|
-
const projectMetadata = await readPackageMetadata(projectPackagePath);
|
|
79
|
-
metadata = mergePackageMetadata(metadata, projectMetadata);
|
|
80
|
-
}
|
|
81
|
-
return metadata;
|
|
82
|
-
};
|
|
5
|
+
const internal = require("./internal-BJ2gdqpB.cjs");
|
|
83
6
|
const generateBanner = (metadata, outputKeys) => {
|
|
84
7
|
const parts = [];
|
|
85
8
|
for (const key of outputKeys) {
|
|
@@ -100,20 +23,57 @@ const insertBannerHeader = (content, banner) => {
|
|
|
100
23
|
return banner + "\n" + content;
|
|
101
24
|
}
|
|
102
25
|
};
|
|
26
|
+
const sanitizeKey = (key) => {
|
|
27
|
+
return key.replace(/[^a-zA-Z0-9_]/g, "_").replace(/^(\d)/, "_$1");
|
|
28
|
+
};
|
|
29
|
+
const generateMetadataFile = (metadata, outputKeys) => {
|
|
30
|
+
const lines = [];
|
|
31
|
+
lines.push("// This file is auto-generated by screw-up plugin");
|
|
32
|
+
lines.push("// Do not edit manually");
|
|
33
|
+
lines.push("");
|
|
34
|
+
for (const key of outputKeys) {
|
|
35
|
+
const value = metadata[key];
|
|
36
|
+
if (value) {
|
|
37
|
+
const sanitizedKey = sanitizeKey(key);
|
|
38
|
+
const escapedValue = JSON.stringify(value);
|
|
39
|
+
lines.push(`export const ${sanitizedKey} = ${escapedValue};`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
lines.push("");
|
|
43
|
+
return lines.join("\n");
|
|
44
|
+
};
|
|
103
45
|
const screwUp = (options = {}) => {
|
|
104
46
|
const {
|
|
105
47
|
outputKeys = ["name", "version", "description", "author", "license", "repository.url"],
|
|
106
|
-
assetFilters = ["\\.d\\.ts$"]
|
|
48
|
+
assetFilters = ["\\.d\\.ts$"],
|
|
49
|
+
outputMetadataFile = false,
|
|
50
|
+
outputMetadataFilePath = "src/generated/packageMetadata.ts",
|
|
51
|
+
outputMetadataKeys = ["name", "version", "description", "author", "license", "repository.url"]
|
|
107
52
|
} = options;
|
|
108
53
|
const assetFiltersRegex = assetFilters.map((filter) => new RegExp(filter));
|
|
109
54
|
let banner;
|
|
55
|
+
let metadata;
|
|
56
|
+
let projectRoot;
|
|
110
57
|
return {
|
|
111
58
|
name: "screw-up",
|
|
112
59
|
apply: "build",
|
|
113
60
|
async configResolved(config) {
|
|
114
|
-
|
|
61
|
+
projectRoot = config.root;
|
|
62
|
+
metadata = await internal.resolvePackageMetadata(config.root);
|
|
115
63
|
banner = generateBanner(metadata, outputKeys);
|
|
116
64
|
},
|
|
65
|
+
async buildStart() {
|
|
66
|
+
if (outputMetadataFile) {
|
|
67
|
+
const metadataContent = generateMetadataFile(metadata, outputMetadataKeys);
|
|
68
|
+
const metadataPath = path.join(projectRoot, outputMetadataFilePath);
|
|
69
|
+
try {
|
|
70
|
+
await promises.mkdir(path.dirname(metadataPath), { recursive: true });
|
|
71
|
+
await promises.writeFile(metadataPath, metadataContent);
|
|
72
|
+
} catch (error) {
|
|
73
|
+
console.warn(`Failed to write metadata file to ${metadataPath}:`, error);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
},
|
|
117
77
|
generateBundle(_options, bundle) {
|
|
118
78
|
for (const fileName in bundle) {
|
|
119
79
|
const chunk = bundle[fileName];
|
|
@@ -147,4 +107,5 @@ const screwUp = (options = {}) => {
|
|
|
147
107
|
}
|
|
148
108
|
};
|
|
149
109
|
};
|
|
150
|
-
|
|
110
|
+
exports.default = screwUp;
|
|
111
|
+
exports.generateBanner = generateBanner;
|
package/dist/index.d.ts
CHANGED
|
@@ -1,5 +1,13 @@
|
|
|
1
1
|
import { Plugin } from 'vite';
|
|
2
|
+
import { PackageMetadata } from './internal.js';
|
|
2
3
|
|
|
4
|
+
/**
|
|
5
|
+
* Generate banner string from package.json metadata
|
|
6
|
+
* @param metadata - Package metadata
|
|
7
|
+
* @param outputKeys - Array of keys to output in specified order
|
|
8
|
+
* @returns Banner string
|
|
9
|
+
*/
|
|
10
|
+
export declare const generateBanner: (metadata: PackageMetadata, outputKeys: string[]) => string;
|
|
3
11
|
/**
|
|
4
12
|
* screw-up options
|
|
5
13
|
*/
|
|
@@ -14,6 +22,21 @@ export interface ScrewUpOptions {
|
|
|
14
22
|
* @default ['\.d\.ts$']
|
|
15
23
|
*/
|
|
16
24
|
assetFilters?: string[];
|
|
25
|
+
/**
|
|
26
|
+
* Enable TypeScript metadata file generation
|
|
27
|
+
* @default false
|
|
28
|
+
*/
|
|
29
|
+
outputMetadataFile?: boolean;
|
|
30
|
+
/**
|
|
31
|
+
* Output path for TypeScript metadata file
|
|
32
|
+
* @default 'src/generated/packageMetadata.ts'
|
|
33
|
+
*/
|
|
34
|
+
outputMetadataFilePath?: string;
|
|
35
|
+
/**
|
|
36
|
+
* Array of keys to output in metadata file in the specified order
|
|
37
|
+
* @default ['name', 'version', 'description', 'author', 'license', 'repository.url']
|
|
38
|
+
*/
|
|
39
|
+
outputMetadataKeys?: string[];
|
|
17
40
|
}
|
|
18
41
|
/**
|
|
19
42
|
* Vite plugin that adds banner to the bundled code
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,MAAM,CAAC;
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,MAAM,CAAC;AAGnC,OAAO,EAAE,eAAe,EAA0B,MAAM,eAAe,CAAC;AAExE;;;;;GAKG;AACH,eAAO,MAAM,cAAc,GAAI,UAAU,eAAe,EAAE,YAAY,MAAM,EAAE,KAAG,MAWhF,CAAC;AA4DF;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,EAAE,CAAC;IACtB;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;IACxB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;;OAGG;IACH,sBAAsB,CAAC,EAAE,MAAM,CAAC;IAChC;;;OAGG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;CAC/B;AAED;;;;GAIG;AACH,QAAA,MAAM,OAAO,GAAI,UAAS,cAAmB,KAAG,MAiF/C,CAAC;AAEF,eAAe,OAAO,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -1,84 +1,6 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { readdir, readFile, writeFile, mkdir } from "fs/promises";
|
|
2
2
|
import { join, dirname } from "path";
|
|
3
|
-
import {
|
|
4
|
-
const flattenObject = (obj, prefix, map) => {
|
|
5
|
-
for (const [key, value] of Object.entries(obj)) {
|
|
6
|
-
if (!value)
|
|
7
|
-
continue;
|
|
8
|
-
const fullKey = prefix ? `${prefix}.${key}` : key;
|
|
9
|
-
if (typeof value === "string") {
|
|
10
|
-
map[fullKey] = value;
|
|
11
|
-
} else if (Array.isArray(value)) {
|
|
12
|
-
map[fullKey] = value.map((v) => String(v)).join(",");
|
|
13
|
-
} else if (typeof value === "object") {
|
|
14
|
-
flattenObject(value, fullKey, map);
|
|
15
|
-
} else {
|
|
16
|
-
map[fullKey] = String(value);
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
};
|
|
20
|
-
const readPackageMetadata = async (packagePath) => {
|
|
21
|
-
try {
|
|
22
|
-
const content = await readFile(packagePath, "utf-8");
|
|
23
|
-
const json = JSON.parse(content);
|
|
24
|
-
const map = {};
|
|
25
|
-
flattenObject(json, "", map);
|
|
26
|
-
return map;
|
|
27
|
-
} catch (error) {
|
|
28
|
-
console.warn(`Failed to read package.json from ${packagePath}:`, error);
|
|
29
|
-
return {};
|
|
30
|
-
}
|
|
31
|
-
};
|
|
32
|
-
const findWorkspaceRoot = async (startPath) => {
|
|
33
|
-
let currentPath = startPath;
|
|
34
|
-
while (currentPath !== dirname(currentPath)) {
|
|
35
|
-
const packageJsonPath = join(currentPath, "package.json");
|
|
36
|
-
if (existsSync(packageJsonPath)) {
|
|
37
|
-
try {
|
|
38
|
-
const content = await readFile(packageJsonPath, "utf-8");
|
|
39
|
-
const packageJson = JSON.parse(content);
|
|
40
|
-
if (packageJson.workspaces || existsSync(join(currentPath, "pnpm-workspace.yaml")) || existsSync(join(currentPath, "lerna.json"))) {
|
|
41
|
-
return currentPath;
|
|
42
|
-
}
|
|
43
|
-
} catch (error) {
|
|
44
|
-
console.warn(`Failed to parse package.json at ${packageJsonPath}:`, error);
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
currentPath = dirname(currentPath);
|
|
48
|
-
}
|
|
49
|
-
return void 0;
|
|
50
|
-
};
|
|
51
|
-
const mergePackageMetadata = (parentMetadata, childMetadata) => {
|
|
52
|
-
const merged = {};
|
|
53
|
-
for (const key in parentMetadata) {
|
|
54
|
-
const value = parentMetadata[key];
|
|
55
|
-
if (value !== void 0) {
|
|
56
|
-
merged[key] = value;
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
for (const key in childMetadata) {
|
|
60
|
-
const value = childMetadata[key];
|
|
61
|
-
if (value !== void 0) {
|
|
62
|
-
merged[key] = value;
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
return merged;
|
|
66
|
-
};
|
|
67
|
-
const resolvePackageMetadata = async (projectRoot) => {
|
|
68
|
-
const workspaceRoot = await findWorkspaceRoot(projectRoot);
|
|
69
|
-
if (!workspaceRoot) {
|
|
70
|
-
const localPackagePath = join(projectRoot, "package.json");
|
|
71
|
-
return await readPackageMetadata(localPackagePath);
|
|
72
|
-
}
|
|
73
|
-
const projectPackagePath = join(projectRoot, "package.json");
|
|
74
|
-
const rootPackagePath = join(workspaceRoot, "package.json");
|
|
75
|
-
let metadata = await readPackageMetadata(rootPackagePath);
|
|
76
|
-
if (projectPackagePath !== rootPackagePath && existsSync(projectPackagePath)) {
|
|
77
|
-
const projectMetadata = await readPackageMetadata(projectPackagePath);
|
|
78
|
-
metadata = mergePackageMetadata(metadata, projectMetadata);
|
|
79
|
-
}
|
|
80
|
-
return metadata;
|
|
81
|
-
};
|
|
3
|
+
import { r as resolvePackageMetadata } from "./internal-JwF_Mrdt.js";
|
|
82
4
|
const generateBanner = (metadata, outputKeys) => {
|
|
83
5
|
const parts = [];
|
|
84
6
|
for (const key of outputKeys) {
|
|
@@ -99,20 +21,57 @@ const insertBannerHeader = (content, banner) => {
|
|
|
99
21
|
return banner + "\n" + content;
|
|
100
22
|
}
|
|
101
23
|
};
|
|
24
|
+
const sanitizeKey = (key) => {
|
|
25
|
+
return key.replace(/[^a-zA-Z0-9_]/g, "_").replace(/^(\d)/, "_$1");
|
|
26
|
+
};
|
|
27
|
+
const generateMetadataFile = (metadata, outputKeys) => {
|
|
28
|
+
const lines = [];
|
|
29
|
+
lines.push("// This file is auto-generated by screw-up plugin");
|
|
30
|
+
lines.push("// Do not edit manually");
|
|
31
|
+
lines.push("");
|
|
32
|
+
for (const key of outputKeys) {
|
|
33
|
+
const value = metadata[key];
|
|
34
|
+
if (value) {
|
|
35
|
+
const sanitizedKey = sanitizeKey(key);
|
|
36
|
+
const escapedValue = JSON.stringify(value);
|
|
37
|
+
lines.push(`export const ${sanitizedKey} = ${escapedValue};`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
lines.push("");
|
|
41
|
+
return lines.join("\n");
|
|
42
|
+
};
|
|
102
43
|
const screwUp = (options = {}) => {
|
|
103
44
|
const {
|
|
104
45
|
outputKeys = ["name", "version", "description", "author", "license", "repository.url"],
|
|
105
|
-
assetFilters = ["\\.d\\.ts$"]
|
|
46
|
+
assetFilters = ["\\.d\\.ts$"],
|
|
47
|
+
outputMetadataFile = false,
|
|
48
|
+
outputMetadataFilePath = "src/generated/packageMetadata.ts",
|
|
49
|
+
outputMetadataKeys = ["name", "version", "description", "author", "license", "repository.url"]
|
|
106
50
|
} = options;
|
|
107
51
|
const assetFiltersRegex = assetFilters.map((filter) => new RegExp(filter));
|
|
108
52
|
let banner;
|
|
53
|
+
let metadata;
|
|
54
|
+
let projectRoot;
|
|
109
55
|
return {
|
|
110
56
|
name: "screw-up",
|
|
111
57
|
apply: "build",
|
|
112
58
|
async configResolved(config) {
|
|
113
|
-
|
|
59
|
+
projectRoot = config.root;
|
|
60
|
+
metadata = await resolvePackageMetadata(config.root);
|
|
114
61
|
banner = generateBanner(metadata, outputKeys);
|
|
115
62
|
},
|
|
63
|
+
async buildStart() {
|
|
64
|
+
if (outputMetadataFile) {
|
|
65
|
+
const metadataContent = generateMetadataFile(metadata, outputMetadataKeys);
|
|
66
|
+
const metadataPath = join(projectRoot, outputMetadataFilePath);
|
|
67
|
+
try {
|
|
68
|
+
await mkdir(dirname(metadataPath), { recursive: true });
|
|
69
|
+
await writeFile(metadataPath, metadataContent);
|
|
70
|
+
} catch (error) {
|
|
71
|
+
console.warn(`Failed to write metadata file to ${metadataPath}:`, error);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
},
|
|
116
75
|
generateBundle(_options, bundle) {
|
|
117
76
|
for (const fileName in bundle) {
|
|
118
77
|
const chunk = bundle[fileName];
|
|
@@ -147,5 +106,6 @@ const screwUp = (options = {}) => {
|
|
|
147
106
|
};
|
|
148
107
|
};
|
|
149
108
|
export {
|
|
150
|
-
screwUp as default
|
|
109
|
+
screwUp as default,
|
|
110
|
+
generateBanner
|
|
151
111
|
};
|