mdat 1.3.4 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/dist/.DS_Store +0 -0
  2. package/dist/bin/cli.js +1399 -0
  3. package/dist/bin/typescript-DT_UEyMt.js +1 -0
  4. package/dist/bin/typescript-HSmT12zI.js +449 -0
  5. package/dist/lib/index.d.ts +155 -0
  6. package/dist/lib/index.js +842 -0
  7. package/package.json +49 -36
  8. package/readme.md +17 -14
  9. package/bin/cli.js +0 -29419
  10. package/dist/api.d.ts +0 -31
  11. package/dist/config.d.ts +0 -74
  12. package/dist/index.d.ts +0 -5
  13. package/dist/index.js +0 -24796
  14. package/dist/mdat-json-loader.d.ts +0 -7
  15. package/dist/processors.d.ts +0 -12
  16. package/dist/readme/api.d.ts +0 -46
  17. package/dist/readme/config.d.ts +0 -14
  18. package/dist/readme/init.d.ts +0 -18
  19. package/dist/readme/rules/badges.d.ts +0 -7
  20. package/dist/readme/rules/banner.d.ts +0 -7
  21. package/dist/readme/rules/code.d.ts +0 -6
  22. package/dist/readme/rules/contributing.d.ts +0 -7
  23. package/dist/readme/rules/description.d.ts +0 -10
  24. package/dist/readme/rules/footer.d.ts +0 -10
  25. package/dist/readme/rules/header.d.ts +0 -11
  26. package/dist/readme/rules/index.d.ts +0 -66
  27. package/dist/readme/rules/license.d.ts +0 -7
  28. package/dist/readme/rules/short-description.d.ts +0 -12
  29. package/dist/readme/rules/size-table.d.ts +0 -6
  30. package/dist/readme/rules/size.d.ts +0 -6
  31. package/dist/readme/rules/table-of-contents.d.ts +0 -9
  32. package/dist/readme/rules/title.d.ts +0 -8
  33. package/dist/readme/rules/toc.d.ts +0 -11
  34. package/dist/readme/rules/utilities/size/size-report.d.ts +0 -19
  35. package/dist/readme/templates/index.d.ts +0 -27
  36. package/dist/readme/utilities.d.ts +0 -15
  37. package/dist/utilities.d.ts +0 -10
@@ -0,0 +1,842 @@
1
+ import { cosmiconfig, defaultLoaders } from "cosmiconfig";
2
+ import { TypeScriptLoader } from "cosmiconfig-typescript-loader";
3
+ import fs from "node:fs/promises";
4
+ import path from "node:path";
5
+ import picocolors from "picocolors";
6
+ import plur from "plur";
7
+ import { readPackage } from "read-pkg";
8
+ import { deepMergeDefined, getSoleRule, log, mdatCheck, mdatClean, mdatExpand, mdatSplit, optionsSchema, rulesSchema } from "remark-mdat";
9
+ import { z } from "zod";
10
+ import { packageUp } from "package-up";
11
+ import { isFile, isFileSync } from "path-type";
12
+ import { Configuration } from "unified-engine";
13
+ import untildify from "untildify";
14
+ import { remark } from "remark";
15
+ import remarkGfm from "remark-gfm";
16
+ import { read } from "to-vfile";
17
+ import { VFile } from "vfile";
18
+ import { globby } from "globby";
19
+ import { promisify } from "node:util";
20
+ import { brotliCompress, gzip } from "node:zlib";
21
+ import prettyBytes from "pretty-bytes";
22
+ import { toc } from "mdast-util-toc";
23
+ import { findUp } from "find-up";
24
+ import { packageDirectory } from "package-directory";
25
+
26
+ //#region src/lib/mdat-json-loader.ts
27
+ /**
28
+ * Lets arbitrary JSON objects (like from package.json) become reasonably good mdat rule sets
29
+ * HOWEVER cosmiconfig treats package.json as a special case and will always load only specific keys from it
30
+ * So we have to intercept and load them manually in config.ts
31
+ */
32
+ function mdatJsonLoader(filePath, content) {
33
+ const defaultJsonLoader = defaultLoaders[".json"];
34
+ return flattenJson(defaultJsonLoader(filePath, content));
35
+ }
36
+ function flattenJson(jsonObject, parentKey = "", result = {}) {
37
+ for (const [key, value] of Object.entries(jsonObject)) {
38
+ const fullPath = parentKey ? `${parentKey}.${key}` : key;
39
+ if (typeof value === "object" && value !== null && !Array.isArray(value)) flattenJson(value, fullPath, result);
40
+ else if (value === null) result[fullPath] = "null";
41
+ else result[fullPath] = value.toString();
42
+ }
43
+ return result;
44
+ }
45
+
46
+ //#endregion
47
+ //#region src/lib/utilities.ts
48
+ function zeroPad(n, nMax) {
49
+ const places = nMax === 0 ? 1 : Math.floor(Math.log10(Math.abs(nMax)) + 1);
50
+ return n.toString().padStart(places, "0");
51
+ }
52
+ async function getInputOutputPaths(inputs, output, name, extension) {
53
+ const paths = [];
54
+ for (const [index, file] of inputs.entries()) {
55
+ const inputOutputPath = await getInputOutputPath(file, output, name, extension, name && inputs.length > 1 ? `-${zeroPad(index + 1, inputs.length)}` : "");
56
+ paths.push(inputOutputPath);
57
+ }
58
+ return paths;
59
+ }
60
+ async function getInputOutputPath(input, output, name, extension, nameSuffix = "") {
61
+ const resolvedInput = expandPath(input);
62
+ const resolvedOutput = output ? expandPath(output) : void 0;
63
+ if (!isFileSync(resolvedInput)) throw new Error(`Input file not found: "${resolvedInput}"`);
64
+ if (resolvedOutput) {
65
+ if (isFileSync(resolvedOutput)) throw new Error(`Output path must be a directory, received a file path: "${resolvedOutput}"`);
66
+ await fs.mkdir(resolvedOutput, { recursive: true });
67
+ }
68
+ return {
69
+ input: resolvedInput,
70
+ name: `${name ? path.basename(name, path.extname(name)) : path.basename(resolvedInput, path.extname(resolvedInput))}${nameSuffix}${`.${extension ?? (name && path.extname(name) !== "" ? path.extname(name) : path.extname(input) === "" ? "" : path.extname(input))}`}`,
71
+ output: resolvedOutput ?? path.dirname(resolvedInput)
72
+ };
73
+ }
74
+ function expandPath(file) {
75
+ return untildify(file);
76
+ }
77
+ async function findPackage() {
78
+ return packageUp();
79
+ }
80
+ function ensureArray(value) {
81
+ if (value === void 0 || value === null) return [];
82
+ return Array.isArray(value) ? value : [value];
83
+ }
84
+ async function loadAmbientRemarkConfig() {
85
+ const ambientConfig = new Configuration({
86
+ cwd: process.cwd(),
87
+ detectConfig: true,
88
+ packageField: "remarkConfig",
89
+ rcName: ".remarkrc"
90
+ });
91
+ const configResult = await new Promise((resolve) => {
92
+ ambientConfig.load("", (error, result) => {
93
+ if (error) {
94
+ log.error(String(error));
95
+ resolve(void 0);
96
+ return;
97
+ }
98
+ resolve(result);
99
+ });
100
+ });
101
+ if (configResult) {
102
+ const { filePath } = configResult;
103
+ if (filePath === void 0) log.info("No ambient Remark configuration file found");
104
+ else log.info(`Found and loaded ambient Remark configuration from "${filePath}"`);
105
+ return configResult;
106
+ }
107
+ log.info("No ambient Remark configuration found");
108
+ return {
109
+ filePath: void 0,
110
+ plugins: [],
111
+ settings: {}
112
+ };
113
+ }
114
+
115
+ //#endregion
116
+ //#region src/lib/config.ts
117
+ const configSchema = optionsSchema.merge(z.object({
118
+ assetsPath: z.string().optional(),
119
+ packageFile: z.string().optional()
120
+ })).describe("Config Extension");
121
+ /**
122
+ * Load and validate mdat configuration / rule sets
123
+ * Uses cosmiconfig to search in the usual places.
124
+ * Merge precedence: Base Defaults < Readme Defaults < Searched Config < Additional Config Paths
125
+ *
126
+ * Generic to accommodate additional Config options, so set T to your custom config type if needed. You must provide a matching configExtensionSchema as well.
127
+ */
128
+ async function loadConfig(options) {
129
+ const { additionalConfig, additionalRules, readmeDefaults, searchFrom } = options ?? {};
130
+ config = void 0;
131
+ packageJson = void 0;
132
+ let finalConfig = {
133
+ addMetaComment: false,
134
+ assetsPath: "./assets",
135
+ closingPrefix: "/",
136
+ keywordPrefix: "",
137
+ metaCommentIdentifier: "+",
138
+ packageFile: await findPackage(),
139
+ rules: { mdat: `Powered by the Markdown Autophagic Template system: [mdat](https://github.com/kitschpatrol/mdat).` }
140
+ };
141
+ if (readmeDefaults) finalConfig = deepMergeDefined(finalConfig, readmeDefaults);
142
+ const configExplorer = cosmiconfig("mdat", { loaders: { ".ts": TypeScriptLoader() } });
143
+ const results = await configExplorer.search(searchFrom);
144
+ if (results) {
145
+ const { config, filepath } = results;
146
+ let possibleConfig = config;
147
+ log.info(`Using config from "${filepath}"`);
148
+ if (filepath.endsWith("package.json") && typeof config === "string") {
149
+ log.info(`Detected shared config string: "${config}"`);
150
+ const { default: sharedConfig } = await import(config);
151
+ possibleConfig = sharedConfig;
152
+ }
153
+ const configFromObject = getAndValidateConfigFromObject(possibleConfig, configSchema);
154
+ if (configFromObject) finalConfig = deepMergeDefined(finalConfig, configFromObject);
155
+ }
156
+ if (additionalConfig !== void 0) {
157
+ const additionalConfigsArray = Array.isArray(additionalConfig) ? additionalConfig : [additionalConfig];
158
+ for (const configOrPath of additionalConfigsArray) {
159
+ let config;
160
+ if (typeof configOrPath === "string") {
161
+ const results = await configExplorer.load(configOrPath);
162
+ if (results === null || results === void 0) continue;
163
+ const { config: loadedConfig, filepath } = results;
164
+ log.info(`Loaded additional config from "${filepath}"`);
165
+ config = loadedConfig;
166
+ } else config = configOrPath;
167
+ if (config === void 0) continue;
168
+ log.info("Merging configuration object");
169
+ const configFromObject = getAndValidateConfigFromObject(config, configSchema);
170
+ if (configFromObject !== void 0) finalConfig = deepMergeDefined(finalConfig, configFromObject);
171
+ }
172
+ }
173
+ if (additionalRules !== void 0) {
174
+ const additionalRulesArray = Array.isArray(additionalRules) ? additionalRules : [additionalRules];
175
+ const rulesExplorer = cosmiconfig("mdat", { loaders: {
176
+ ".json": mdatJsonLoader,
177
+ ".ts": TypeScriptLoader()
178
+ } });
179
+ for (const rulesOrPath of additionalRulesArray) {
180
+ let rules;
181
+ if (typeof rulesOrPath === "string") {
182
+ let results;
183
+ if (path.basename(rulesOrPath).endsWith("package.json")) results = {
184
+ config: mdatJsonLoader(rulesOrPath, await fs.readFile(rulesOrPath, "utf8")),
185
+ filepath: rulesOrPath
186
+ };
187
+ else results = await rulesExplorer.load(rulesOrPath);
188
+ if (results === null || results === void 0) continue;
189
+ const { config: loadedRules, filepath } = results;
190
+ log.info(`Loaded additional config from "${filepath}"`);
191
+ rules = loadedRules;
192
+ } else rules = rulesOrPath;
193
+ if (rules === void 0) continue;
194
+ log.info("Merging rules into configuration object");
195
+ const configFromRulesObject = getAndValidateConfigFromRulesObject(rules, rulesSchema);
196
+ if (configFromRulesObject !== void 0) finalConfig = deepMergeDefined(finalConfig, configFromRulesObject);
197
+ }
198
+ }
199
+ if (finalConfig.rules) {
200
+ const prettyRules = Object.keys(finalConfig.rules).toSorted().map((rule) => `"${picocolors.green(picocolors.bold(rule))}"`);
201
+ log.info(`Loaded ${picocolors.bold(prettyRules.length)} mdat comment expansion ${plur("rule", prettyRules.length)}:`);
202
+ for (const rule of prettyRules) log.info(`\t${rule}`);
203
+ } else log.error("No rules loaded from additional configurations or rules, using default.");
204
+ config = finalConfig;
205
+ return finalConfig;
206
+ }
207
+ function getAndValidateConfigFromRulesObject(rulesObject, rulesSchema) {
208
+ if (rulesSchema.safeParse(rulesObject).success) return { rules: rulesObject };
209
+ log.error(`Rules object has the wrong shape. Ignoring and using default configuration:\n${JSON.stringify(rulesObject, void 0, 2)}`);
210
+ }
211
+ function getAndValidateConfigFromObject(configObject, configSchema) {
212
+ if (configSchema.safeParse(configObject).success) return configObject;
213
+ log.error(`Config object has the wrong shape. Ignoring and using default configuration:\n${JSON.stringify(configObject, void 0, 2)}`);
214
+ }
215
+ let config;
216
+ /**
217
+ * Get the current MDAT config object, loading it if necessary
218
+ */
219
+ async function getConfig() {
220
+ if (config === void 0) {
221
+ log.warn("getConfig(): config was undefined");
222
+ config ??= await loadConfig();
223
+ }
224
+ return config;
225
+ }
226
+ let packageJson;
227
+ /**
228
+ * Convenience function for rules
229
+ * Load as package json only as needed, memoize
230
+ * Rules could call this themselves, but this is more convenient and efficient
231
+ * @throws {Error} If no package.json is found
232
+ */
233
+ async function getPackageJson() {
234
+ const { packageFile } = await getConfig();
235
+ if (packageFile === void 0) throw new Error("No packageFile found or set in config");
236
+ packageJson ??= await readPackage({ cwd: path.dirname(packageFile) });
237
+ if (packageJson === void 0) throw new Error("No package.json found");
238
+ return packageJson;
239
+ }
240
+ /**
241
+ * Convenience function for merging configs
242
+ * Performs a deep merge, with the rightmost object taking precedence
243
+ */
244
+ function mergeConfigs(a, b) {
245
+ return deepMergeDefined(a, b);
246
+ }
247
+
248
+ //#endregion
249
+ //#region src/lib/processors.ts
250
+ async function processFiles(files, loader, processorGetter, name, output, config, rules) {
251
+ const resolvedConfig = await loader({
252
+ additionalConfig: config,
253
+ additionalRules: rules
254
+ });
255
+ const localRemarkConfiguration = await loadAmbientRemarkConfig();
256
+ const inputOutputPaths = await getInputOutputPaths(ensureArray(files), output, name, "md");
257
+ const results = [];
258
+ const resolvedProcessor = processorGetter(resolvedConfig, localRemarkConfiguration);
259
+ for (const { input, name, output } of inputOutputPaths) {
260
+ const inputFile = await read(input);
261
+ const result = await resolvedProcessor.process(inputFile);
262
+ result.dirname = output;
263
+ result.basename = name;
264
+ results.push(result);
265
+ }
266
+ return results;
267
+ }
268
+ async function processString(markdown, loader, processorGetter, config, rules) {
269
+ return processorGetter(await loader({
270
+ additionalConfig: config,
271
+ additionalRules: rules
272
+ }), await loadAmbientRemarkConfig()).process(new VFile(markdown));
273
+ }
274
+ function getExpandProcessor(options, ambientRemarkConfig) {
275
+ return remark().use({ settings: {
276
+ bullet: "-",
277
+ emphasis: "_"
278
+ } }).use(remarkGfm).use(ambientRemarkConfig).use(() => async function(tree, file) {
279
+ mdatSplit(tree, file);
280
+ mdatClean(tree, file, options);
281
+ await mdatExpand(tree, file, options);
282
+ });
283
+ }
284
+ function getCleanProcessor(options, ambientRemarkConfig) {
285
+ return remark().use({ settings: {
286
+ bullet: "-",
287
+ emphasis: "_"
288
+ } }).use(remarkGfm).use(ambientRemarkConfig).use(() => function(tree, file) {
289
+ mdatSplit(tree, file);
290
+ mdatClean(tree, file, options);
291
+ });
292
+ }
293
+ function getCheckProcessor(options, ambientRemarkConfig) {
294
+ return remark().use({ settings: {
295
+ bullet: "-",
296
+ emphasis: "_"
297
+ } }).use(remarkGfm).use(ambientRemarkConfig).use(() => async function(tree, file) {
298
+ await mdatCheck(tree, file, {
299
+ ...options,
300
+ paranoid: false
301
+ });
302
+ });
303
+ }
304
+
305
+ //#endregion
306
+ //#region src/lib/api.ts
307
+ /**
308
+ * Expand MDAT comments in one or more Markdown files
309
+ * Writing is the responsibility of the caller (e.g. via `await write(result)`)
310
+ * @returns an array of VFiles (Even if you only pass a single file path!)
311
+ */
312
+ async function expandFiles(files, name, output, config, rules) {
313
+ return processFiles(files, loadConfig, getExpandProcessor, name, output, config, rules);
314
+ }
315
+ /**
316
+ * Expand MDAT comments in a Markdown string
317
+ */
318
+ async function expandString(markdown, config, rules) {
319
+ return processString(markdown, loadConfig, getExpandProcessor, config, rules);
320
+ }
321
+ /**
322
+ * Check and validate MDAT comments in one or more Markdown files
323
+ * @returns an array of VFiles (Even if you only pass a single file path!)
324
+ */
325
+ async function checkFiles(files, name, output, config, rules) {
326
+ return processFiles(files, loadConfig, getCheckProcessor, name, output, config, rules);
327
+ }
328
+ /**
329
+ * Check and validate MDAT comments in a Markdown string
330
+ */
331
+ async function checkString(markdown, config, rules) {
332
+ return processString(markdown, loadConfig, getCheckProcessor, config, rules);
333
+ }
334
+ /**
335
+ * Collapse MDAT comments in one or more Markdown files
336
+ * Writing is the responsibility of the caller (e.g. via `await write(result)`)
337
+ * @returns an array of VFiles (Even if you only pass a single file path!)
338
+ */
339
+ async function collapseFiles(files, name, output, config, rules) {
340
+ return processFiles(files, loadConfig, getCleanProcessor, name, output, config, rules);
341
+ }
342
+ /**
343
+ * Collapse MDAT comments in a Markdown string
344
+ */
345
+ async function collapseString(markdown, config, rules) {
346
+ return processString(markdown, loadConfig, getCleanProcessor, config, rules);
347
+ }
348
+
349
+ //#endregion
350
+ //#region src/lib/readme/rules/badges.ts
351
+ var badges_default = { badges: {
352
+ async content(options) {
353
+ const validOptions = z.object({
354
+ custom: z.record(z.object({
355
+ image: z.string(),
356
+ link: z.string()
357
+ })).optional(),
358
+ npm: z.array(z.string()).optional()
359
+ }).optional().parse(options);
360
+ const packageJson = await getPackageJson();
361
+ const { name } = packageJson;
362
+ const badges = [];
363
+ if (validOptions?.npm === void 0) {
364
+ if (!packageJson.name.startsWith("@") || packageJson.publishConfig?.access === "public") badges.push(`[![NPM Package ${name}](https://img.shields.io/npm/v/${name}.svg)](https://npmjs.com/package/${name})`);
365
+ } else for (const name of validOptions.npm) badges.push(`[![NPM Package ${name}](https://img.shields.io/npm/v/${name}.svg)](https://npmjs.com/package/${name})`);
366
+ const { license } = packageJson;
367
+ if (license !== void 0) badges.push(`[![License: ${license}](https://img.shields.io/badge/License-${license.replaceAll("-", "--")}-yellow.svg)](https://opensource.org/licenses/${license})`);
368
+ if (validOptions?.custom !== void 0) for (const [name, { image, link }] of Object.entries(validOptions.custom)) badges.push(`[![${name}](${image})](${link})`);
369
+ return badges.join("\n");
370
+ },
371
+ order: 3
372
+ } };
373
+
374
+ //#endregion
375
+ //#region src/lib/readme/rules/banner.ts
376
+ var banner_default = { banner: {
377
+ async content(options) {
378
+ const validOptions = z.object({
379
+ alt: z.string().optional(),
380
+ src: z.string().optional()
381
+ }).optional().parse(options);
382
+ const { assetsPath, packageFile } = await getConfig();
383
+ if (packageFile === void 0) throw new Error("No package.json found");
384
+ const src = validOptions?.src ?? await getBannerSrc(assetsPath) ?? await getBannerSrc();
385
+ if (src === void 0) throw new Error("Banner image not found at any typical location, consider adding something at ./assets/banner.webp");
386
+ else if (!isUrl(src) && !await isFile(src)) throw new Error(`Banner image not found at "${src}"`);
387
+ let alt = validOptions?.alt;
388
+ if (alt === void 0) {
389
+ const packageName = (await readPackage({ cwd: path.dirname(packageFile) })).name;
390
+ if (packageName === void 0) throw new Error("Banner image alt text not available");
391
+ alt = `${packageName} banner`;
392
+ }
393
+ return `![${alt}](${src})`;
394
+ },
395
+ order: 2
396
+ } };
397
+ async function getBannerSrc(specificPath) {
398
+ const { packageFile } = await getConfig();
399
+ if (packageFile === void 0) throw new Error("No package.json found");
400
+ const packageDirectory = path.dirname(packageFile);
401
+ const paths = await globby((specificPath === void 0 ? [
402
+ ".",
403
+ "assets",
404
+ "media",
405
+ "readme-assets",
406
+ "readme-media",
407
+ "readme",
408
+ "images",
409
+ ".github/assets"
410
+ ] : [specificPath]).map((location) => path.join(packageDirectory, location)), {
411
+ deep: 1,
412
+ expandDirectories: {
413
+ extensions: [
414
+ "png",
415
+ "gif",
416
+ "jpg",
417
+ "jpeg",
418
+ "svg",
419
+ "webp"
420
+ ],
421
+ files: [
422
+ "banner",
423
+ "cover",
424
+ "demo",
425
+ "header",
426
+ "hero",
427
+ "image",
428
+ "logo",
429
+ "overview",
430
+ "readme",
431
+ "screenshot",
432
+ "screenshots",
433
+ "splash"
434
+ ]
435
+ }
436
+ });
437
+ if (paths.length > 0) return path.relative(process.cwd(), paths[0]);
438
+ }
439
+ function isUrl(text, lenient = true) {
440
+ if (typeof text !== "string") throw new TypeError("Expected a string");
441
+ text = text.trim();
442
+ if (text.includes(" ")) return false;
443
+ try {
444
+ new URL(text);
445
+ return true;
446
+ } catch {
447
+ if (lenient) return isUrl(`https://${text}`, false);
448
+ return false;
449
+ }
450
+ }
451
+
452
+ //#endregion
453
+ //#region src/lib/readme/rules/code.ts
454
+ var code_default = { code: { async content(options) {
455
+ const validOptions = z.object({
456
+ file: z.string(),
457
+ language: z.string().optional(),
458
+ trim: z.boolean().default(true)
459
+ }).parse(options);
460
+ const lang = (path.extname(validOptions.file) ?? "").replace(/^\./, "");
461
+ const exampleCode = await fs.readFile(path.join(process.cwd(), validOptions.file), "utf8");
462
+ return `\`\`\`${lang}\n${validOptions.trim ? exampleCode.trim() : exampleCode}\n\`\`\``;
463
+ } } };
464
+
465
+ //#endregion
466
+ //#region src/lib/readme/rules/contributing.ts
467
+ var contributing_default = { contributing: {
468
+ async content() {
469
+ const issuesUrl = (await getPackageJson()).bugs?.url;
470
+ if (issuesUrl === void 0) throw new Error("Could not find \"bugs.url\" entry in package.json");
471
+ return `## Contributing\n[Issues](${issuesUrl}) and pull requests are welcome.`;
472
+ },
473
+ order: 15
474
+ } };
475
+
476
+ //#endregion
477
+ //#region src/lib/readme/rules/description.ts
478
+ /**
479
+ * Simple alias for short-description
480
+ */
481
+ var description_default = { description: {
482
+ async content() {
483
+ const packageJson = await getPackageJson();
484
+ if (packageJson.description === void 0) throw new Error("Could not find \"description\" entry in package.json");
485
+ return `**${packageJson.description}**`;
486
+ },
487
+ order: 4
488
+ } };
489
+
490
+ //#endregion
491
+ //#region src/lib/readme/rules/license.ts
492
+ var license_default = { license: {
493
+ async content() {
494
+ const { author, license } = await getPackageJson();
495
+ if (author?.name === void 0) throw new Error("Could not find \"author.name\" entry in package.json");
496
+ if (license === void 0) throw new Error("Could not find \"license\" entry in package.json");
497
+ return `## License\n[${license}](license.txt) © ${author.name}`;
498
+ },
499
+ order: 16
500
+ } };
501
+
502
+ //#endregion
503
+ //#region src/lib/readme/rules/footer.ts
504
+ var footer_default = { footer: {
505
+ content: [getSoleRule(contributing_default), getSoleRule(license_default)],
506
+ order: 17
507
+ } };
508
+
509
+ //#endregion
510
+ //#region src/lib/readme/rules/short-description.ts
511
+ /**
512
+ * Simple alias for `description` rule, to match nomenclature in
513
+ * [standard-readme](https://github.com/RichardLitt/standard-readme/blob/main/spec.md#short-description)
514
+ * spec.
515
+ */
516
+ var short_description_default = { "short-description": getSoleRule(description_default) };
517
+
518
+ //#endregion
519
+ //#region src/lib/readme/rules/title.ts
520
+ var title_default = { title: {
521
+ applicationOrder: 2,
522
+ async content(options) {
523
+ const { postfix, prefix, titleCase } = z.object({
524
+ postfix: z.string().optional().default(""),
525
+ prefix: z.string().optional().default(""),
526
+ titleCase: z.boolean().optional().default(false)
527
+ }).parse(options ?? {});
528
+ const { name: packageName } = await getPackageJson();
529
+ return `# ${prefix}${titleCase ? makeTitleCase(packageName) : packageName}${postfix}`;
530
+ },
531
+ order: 1
532
+ } };
533
+ function makeTitleCase(text) {
534
+ return text.split(/[ _-]+/).filter(Boolean).map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" ");
535
+ }
536
+
537
+ //#endregion
538
+ //#region src/lib/readme/rules/header.ts
539
+ var header_default = { header: {
540
+ applicationOrder: 2,
541
+ content: [
542
+ getSoleRule(title_default),
543
+ getSoleRule(banner_default),
544
+ getSoleRule(badges_default),
545
+ getSoleRule(short_description_default)
546
+ ],
547
+ order: 1
548
+ } };
549
+
550
+ //#endregion
551
+ //#region src/lib/readme/rules/utilities/size/size-report.ts
552
+ const brotliCompressAsync = promisify(brotliCompress);
553
+ const gzipCompressAsync = promisify(gzip);
554
+ /**
555
+ * Creates a SizeInfo object with formatted values
556
+ * @param bytes - Size in bytes
557
+ * @param originalSize - Original file size for percentage calculation
558
+ */
559
+ function createSizeInfo(bytes, originalSize) {
560
+ const percent = originalSize === 0 ? 0 : (originalSize - bytes) / originalSize * 100;
561
+ return {
562
+ bytes,
563
+ bytesPretty: prettyBytes(bytes, { maximumFractionDigits: 1 }),
564
+ percent,
565
+ percentPretty: `${Math.round(percent)}%`
566
+ };
567
+ }
568
+ /**
569
+ * Analyzes a file's size and its compressed sizes using Brotli and Gzip
570
+ * @param filePath - Path to the file to analyze
571
+ * @returns Promise containing detailed size report
572
+ * @throws {Error} if file cannot be read or compressed
573
+ */
574
+ async function createSizeReport(filePath) {
575
+ try {
576
+ const fileContent = await fs.readFile(filePath);
577
+ const originalSize = fileContent.length;
578
+ const [brotliCompressed, gzipCompressed] = await Promise.all([brotliCompressAsync(fileContent), gzipCompressAsync(fileContent)]);
579
+ return {
580
+ brotli: createSizeInfo(brotliCompressed.length, originalSize),
581
+ gzip: createSizeInfo(gzipCompressed.length, originalSize),
582
+ original: createSizeInfo(originalSize, originalSize)
583
+ };
584
+ } catch (error) {
585
+ throw new Error(`Failed to analyze file: ${error instanceof Error ? error.message : "Unknown error"}`);
586
+ }
587
+ }
588
+
589
+ //#endregion
590
+ //#region src/lib/readme/rules/size.ts
591
+ const optionsSchema$2 = z.object({
592
+ compression: z.enum([
593
+ "none",
594
+ "brotli",
595
+ "gzip"
596
+ ]).optional().default("none"),
597
+ file: z.string()
598
+ });
599
+ function getSizeForCompression(report, compression) {
600
+ switch (compression) {
601
+ case "brotli": return report.brotli.bytesPretty;
602
+ case "gzip": return report.gzip.bytesPretty;
603
+ case "none": return report.original.bytesPretty;
604
+ }
605
+ }
606
+ var size_default = { size: { async content(options) {
607
+ const validOptions = optionsSchema$2.parse(options);
608
+ return getSizeForCompression(await createSizeReport(path.join(process.cwd(), validOptions.file)), validOptions.compression);
609
+ } } };
610
+
611
+ //#endregion
612
+ //#region src/lib/readme/rules/size-table.ts
613
+ const fileSchema = z.union([z.string(), z.array(z.string())]).transform((files) => Array.isArray(files) ? files : [files]);
614
+ const optionsSchema$1 = z.union([z.object({
615
+ brotli: z.boolean().optional().default(true),
616
+ file: fileSchema,
617
+ gzip: z.boolean().optional().default(true),
618
+ original: z.boolean().optional().default(true),
619
+ showPercentage: z.boolean().optional().default(false)
620
+ }), z.object({
621
+ brotli: z.boolean().optional().default(true),
622
+ files: fileSchema,
623
+ gzip: z.boolean().optional().default(true),
624
+ original: z.boolean().optional().default(true),
625
+ showPercentage: z.boolean().optional().default(false)
626
+ })]).transform((options) => {
627
+ if ("file" in options) {
628
+ const { file, ...rest } = options;
629
+ return {
630
+ files: file,
631
+ ...rest
632
+ };
633
+ }
634
+ return options;
635
+ });
636
+ function formatMarkdownTable(reports, options) {
637
+ const headers = ["File"];
638
+ if (options.original) headers.push("Original");
639
+ if (options.gzip) headers.push("Gzip");
640
+ if (options.brotli) headers.push("Brotli");
641
+ const separators = headers.map(() => "---");
642
+ const rows = reports.map(([file, report]) => {
643
+ const row = [path.basename(file)];
644
+ if (options.original) row.push(report.original.bytesPretty);
645
+ if (options.gzip) row.push(options.showPercentage ? `${report.gzip.bytesPretty} (${report.gzip.percentPretty})` : report.gzip.bytesPretty);
646
+ if (options.brotli) row.push(options.showPercentage ? `${report.brotli.bytesPretty} (${report.brotli.percentPretty})` : report.brotli.bytesPretty);
647
+ return row;
648
+ });
649
+ return [
650
+ `| ${headers.join(" | ")} |`,
651
+ `| ${separators.join(" | ")} |`,
652
+ ...rows.map((row) => `| ${row.join(" | ")} |`)
653
+ ].join("\n");
654
+ }
655
+ var size_table_default = { "size-table": { async content(options) {
656
+ const validOptions = optionsSchema$1.parse(options);
657
+ return formatMarkdownTable(await Promise.all(validOptions.files.map(async (file) => {
658
+ return [file, await createSizeReport(path.join(process.cwd(), file))];
659
+ })), {
660
+ brotli: validOptions.brotli,
661
+ gzip: validOptions.gzip,
662
+ original: validOptions.original,
663
+ showPercentage: validOptions.showPercentage
664
+ });
665
+ } } };
666
+
667
+ //#endregion
668
+ //#region src/lib/readme/rules/table-of-contents.ts
669
+ var table_of_contents_default = { "table-of-contents": {
670
+ applicationOrder: 1,
671
+ async content(options, tree) {
672
+ const result = toc(tree, {
673
+ heading: null,
674
+ maxDepth: z.object({ depth: z.union([
675
+ z.literal(1),
676
+ z.literal(2),
677
+ z.literal(3),
678
+ z.literal(4),
679
+ z.literal(5),
680
+ z.literal(6)
681
+ ]).optional() }).optional().parse(options)?.depth ?? 3,
682
+ tight: true
683
+ });
684
+ const heading = `## Table of contents`;
685
+ if (result.map === void 0) throw new Error("Could not generate table of contents");
686
+ const rootWrapper = {
687
+ children: result.map.children,
688
+ type: "root"
689
+ };
690
+ return [heading, remark().use(remarkGfm).stringify(rootWrapper).replaceAll("\n\n", "\n")].join("\n");
691
+ },
692
+ order: 6
693
+ } };
694
+
695
+ //#endregion
696
+ //#region src/lib/readme/rules/toc.ts
697
+ /**
698
+ * Simple alias for table-of-contents
699
+ */
700
+ var toc_default = { toc: getSoleRule(table_of_contents_default) };
701
+
702
+ //#endregion
703
+ //#region src/lib/readme/rules/index.ts
704
+ var rules_default = {
705
+ ...badges_default,
706
+ ...banner_default,
707
+ ...code_default,
708
+ ...contributing_default,
709
+ ...description_default,
710
+ ...footer_default,
711
+ ...header_default,
712
+ ...license_default,
713
+ ...short_description_default,
714
+ ...size_default,
715
+ ...size_table_default,
716
+ ...table_of_contents_default,
717
+ ...title_default,
718
+ ...toc_default
719
+ };
720
+
721
+ //#endregion
722
+ //#region src/lib/readme/config.ts
723
+ /**
724
+ * Convenience loader to always include the default readme config.
725
+ * The readme defaults should have lower priority than searched/user config,
726
+ * but higher priority than base mdat defaults.
727
+ */
728
+ async function loadConfigReadme(options) {
729
+ const defaultReadmeConfig = {
730
+ addMetaComment: true,
731
+ rules: rules_default
732
+ };
733
+ const { additionalConfig = [], readmeDefaults = defaultReadmeConfig, ...rest } = options ?? {};
734
+ const result = await loadConfig({
735
+ additionalConfig: Array.isArray(additionalConfig) ? additionalConfig : [additionalConfig],
736
+ readmeDefaults,
737
+ ...rest
738
+ });
739
+ if (result.packageFile === void 0) throw new Error("Package file path is required in `mdat readme` config");
740
+ return result;
741
+ }
742
+
743
+ //#endregion
744
+ //#region src/lib/readme/utilities.ts
745
+ /**
746
+ * Searches for a readme file in the following order:
747
+ * 1. Searches the current working directly for readme.md
748
+ * 2. If there's no readme.md in the current directory, search up to the closest package directory
749
+ * 3. Give up and return undefined if no readme is found
750
+ * @returns The path to the readme file or undefined if not found
751
+ */
752
+ async function findReadme() {
753
+ log.info(`Searching for package directory...`);
754
+ const closestReadme = await findUp("readme.md", {
755
+ stopAt: await packageDirectory() ?? process.cwd(),
756
+ type: "file"
757
+ });
758
+ if (closestReadme !== void 0) {
759
+ log.info(`Found closest readme at "${closestReadme}"`);
760
+ return closestReadme;
761
+ }
762
+ }
763
+ /**
764
+ * Searches up for a readme.md file
765
+ * @see `findReadme()` for more details on the search process.
766
+ * @returns The path to the readme file
767
+ * @throws {Error} if no readme is found
768
+ */
769
+ async function findReadmeThrows() {
770
+ const readme = await findReadme();
771
+ if (readme === void 0) throw new Error("No readme found");
772
+ return readme;
773
+ }
774
+
775
+ //#endregion
776
+ //#region src/lib/readme/api.ts
777
+ /**
778
+ * Expands MDAT readme comments in the closest readme.md file
779
+ * Basically an alias to `expandReadmeFiles()` with certain arguments elided.
780
+ * @see `findReadme()` for more details on the search process.
781
+ */
782
+ async function expandReadme(config, rules) {
783
+ return expandReadmeFiles(void 0, void 0, void 0, config, rules);
784
+ }
785
+ /**
786
+ * Expands MDAT readme comments in one or more Markdown files
787
+ * Searches up for a readme.md file if none is provided.
788
+ * @see `findReadme()` for more details on the search process.
789
+ */
790
+ async function expandReadmeFiles(files, name, output, config, rules) {
791
+ files ??= await findReadmeThrows();
792
+ return processFiles(files, loadConfigReadme, getExpandProcessor, name, output, config, rules);
793
+ }
794
+ /**
795
+ * Expands MDAT readme comments in a Markdown string
796
+ */
797
+ async function expandReadmeString(markdown, config, rules) {
798
+ return processString(markdown, loadConfigReadme, getExpandProcessor, config, rules);
799
+ }
800
+ /**
801
+ * Checks and validates MDAT readme comments in the closest readme.md file
802
+ * Basically an alias to `checkReadmeFiles()` with certain arguments elided.
803
+ * @see `findReadme()` for more details on the search process.
804
+ */
805
+ async function checkReadme(config, rules) {
806
+ return checkReadmeFiles(void 0, void 0, void 0, config, rules);
807
+ }
808
+ /**
809
+ * Checks and validates MDAT readme comments in one or more Markdown files
810
+ * Searches up for a readme.md file if none is provided.
811
+ * @see `findReadme()` for more details on the search process.
812
+ */
813
+ async function checkReadmeFiles(files, name, output, config, rules) {
814
+ files ??= await findReadmeThrows();
815
+ return processFiles(files, loadConfigReadme, getCheckProcessor, name, output, config, rules);
816
+ }
817
+ /**
818
+ * Checks and validates MDAT readme comments in a Markdown string
819
+ */
820
+ async function checkReadmeString(markdown, config, rules) {
821
+ return processString(markdown, loadConfigReadme, getCheckProcessor, config, rules);
822
+ }
823
+ /**
824
+ * Collapses MDAT readme comments in the closest readme.md file
825
+ * Basically an alias to `collapseReadmeFiles()` with certain arguments elided.
826
+ * @see `findReadme()` for more details on the search process.
827
+ */
828
+ async function collapseReadme(config, rules) {
829
+ return collapseReadmeFiles(void 0, void 0, void 0, config, rules);
830
+ }
831
+ /**
832
+ * Collapses MDAT readme comments in one or more Markdown files
833
+ * Searches up for a readme.md file if none is provided.
834
+ * @see `findReadme()` for more details on the search process.
835
+ */
836
+ async function collapseReadmeFiles(files, name, output, config, rules) {
837
+ files ??= await findReadmeThrows();
838
+ return processFiles(files, loadConfigReadme, getCleanProcessor, name, output, config, rules);
839
+ }
840
+
841
+ //#endregion
842
+ export { checkFiles, checkReadme, checkReadmeFiles, checkReadmeString, checkString, collapseFiles, collapseReadme, collapseReadmeFiles, collapseString, expandFiles, expandReadme, expandReadmeFiles, expandReadmeString, expandString, loadConfig, loadConfigReadme, mergeConfigs };