@mono-labs/cli 0.0.207 → 0.0.209

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,205 +1,500 @@
1
1
  "use strict";
2
- // scripts/generate-readme.mjs
3
- // Node >= 18 recommended
4
2
  var __importDefault = (this && this.__importDefault) || function (mod) {
5
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
6
4
  };
7
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
+ // scripts/generate-readme.mjs
7
+ // Node >= 18 recommended
8
8
  const node_fs_1 = require("node:fs");
9
9
  const node_path_1 = __importDefault(require("node:path"));
10
10
  const generate_docs_js_1 = require("./generate-docs.js");
11
- /* -------------------------------------------------------------------------- */
12
- /* Constants */
13
- /* -------------------------------------------------------------------------- */
14
11
  const REPO_ROOT = node_path_1.default.resolve(process.cwd());
15
12
  const MONO_DIR = node_path_1.default.join(REPO_ROOT, '.mono');
16
13
  const ROOT_PKG_JSON = node_path_1.default.join(REPO_ROOT, 'package.json');
17
14
  const OUTPUT_PATH = node_path_1.default.join(REPO_ROOT, 'docs');
18
15
  const OUTPUT_README = node_path_1.default.join(OUTPUT_PATH, 'command-line.md');
19
- /* -------------------------------------------------------------------------- */
20
- /* Utils */
21
- /* -------------------------------------------------------------------------- */
22
16
  async function ensureParentDir(filePath) {
23
- await node_fs_1.promises.mkdir(node_path_1.default.dirname(filePath), { recursive: true });
17
+ const dir = node_path_1.default.dirname(filePath);
18
+ console.log(`[ensureParentDir] Ensuring directory:`, dir);
19
+ await node_fs_1.promises.mkdir(dir, { recursive: true });
24
20
  }
21
+ // ---------- utils ----------
25
22
  async function exists(p) {
26
23
  try {
27
24
  await node_fs_1.promises.access(p);
25
+ // Log existence check
26
+ console.log(`[exists] Path exists:`, p);
28
27
  return true;
29
28
  }
30
29
  catch {
30
+ console.log(`[exists] Path does NOT exist:`, p);
31
31
  return false;
32
32
  }
33
33
  }
34
34
  function isObject(v) {
35
- return typeof v === 'object' && v !== null && !Array.isArray(v);
35
+ return v !== null && typeof v === 'object' && !Array.isArray(v);
36
36
  }
37
37
  function toPosix(p) {
38
38
  return p.split(node_path_1.default.sep).join('/');
39
39
  }
40
40
  async function readJson(filePath) {
41
- return JSON.parse(await node_fs_1.promises.readFile(filePath, 'utf8'));
41
+ console.log(`[readJson] Reading JSON file:`, filePath);
42
+ const raw = await node_fs_1.promises.readFile(filePath, 'utf8');
43
+ try {
44
+ const parsed = JSON.parse(raw);
45
+ console.log(`[readJson] Successfully parsed:`, filePath);
46
+ return parsed;
47
+ }
48
+ catch (err) {
49
+ console.error(`[readJson] Failed to parse JSON:`, filePath, err);
50
+ throw err;
51
+ }
42
52
  }
43
53
  async function listDir(dir) {
44
- return node_fs_1.promises.readdir(dir, { withFileTypes: true });
54
+ console.log(`[listDir] Listing directory:`, dir);
55
+ const entries = await node_fs_1.promises.readdir(dir, { withFileTypes: true });
56
+ console.log(`[listDir] Found ${entries.length} entries in:`, dir);
57
+ return entries;
45
58
  }
46
59
  function normalizeWorkspacePatterns(workspacesField) {
47
60
  if (Array.isArray(workspacesField))
48
61
  return workspacesField;
49
62
  if (isObject(workspacesField) &&
50
- Array.isArray(workspacesField.packages)) {
63
+ Array.isArray(workspacesField.packages))
51
64
  return workspacesField.packages;
52
- }
53
65
  return [];
54
66
  }
55
67
  function mdEscapeInline(s) {
56
- return s.replaceAll('`', '\\`');
68
+ return String(s ?? '').replaceAll('`', '\`');
57
69
  }
58
70
  function indentLines(s, spaces = 2) {
59
71
  const pad = ' '.repeat(spaces);
60
- return s
72
+ return String(s ?? '')
61
73
  .split('\n')
62
74
  .map((l) => pad + l)
63
75
  .join('\n');
64
76
  }
65
- /* -------------------------------------------------------------------------- */
66
- /* Workspace globbing */
67
- /* -------------------------------------------------------------------------- */
77
+ // ---------- workspace glob matching (supports *, **, and plain segments) ----------
68
78
  function matchSegment(patternSeg, name) {
69
79
  if (patternSeg === '*')
70
80
  return true;
71
81
  if (!patternSeg.includes('*'))
72
82
  return patternSeg === name;
73
83
  const escaped = patternSeg.replace(/[.+?^${}()|[\]\\]/g, '\\$&');
74
- return new RegExp(`^${escaped.replaceAll('*', '.*')}$`).test(name);
84
+ const regex = new RegExp('^' + escaped.replaceAll('*', '.*') + '$');
85
+ return regex.test(name);
75
86
  }
76
87
  async function expandWorkspacePattern(root, pattern) {
88
+ console.log(`[expandWorkspacePattern] Expanding pattern:`, pattern, `from root:`, root);
77
89
  const segs = toPosix(pattern).split('/').filter(Boolean);
78
- async function expandFrom(dir, idx) {
79
- if (idx >= segs.length)
90
+ async function expandFrom(dir, segIndex) {
91
+ console.log(`[expandFrom] Directory:`, dir, `Segment index:`, segIndex);
92
+ if (segIndex >= segs.length)
80
93
  return [dir];
94
+ const seg = segs[segIndex];
95
+ console.log(`[expandFrom] Segment:`, seg);
96
+ if (seg === '**') {
97
+ const results = [];
98
+ results.push(...(await expandFrom(dir, segIndex + 1)));
99
+ const entries = await node_fs_1.promises
100
+ .readdir(dir, { withFileTypes: true })
101
+ .catch(() => []);
102
+ console.log(`[expandFrom] '**' entries in ${dir}:`, entries.map((e) => e.name));
103
+ for (const e of entries) {
104
+ if (!e.isDirectory())
105
+ continue;
106
+ console.log(`[expandFrom] Recursing into subdir:`, node_path_1.default.join(dir, e.name));
107
+ results.push(...(await expandFrom(node_path_1.default.join(dir, e.name), segIndex)));
108
+ }
109
+ return results;
110
+ }
81
111
  const entries = await node_fs_1.promises
82
112
  .readdir(dir, { withFileTypes: true })
83
113
  .catch(() => []);
84
- const seg = segs[idx];
85
- if (seg === '**') {
86
- const nested = await Promise.all(entries
87
- .filter((e) => e.isDirectory())
88
- .map((e) => expandFrom(node_path_1.default.join(dir, e.name), idx)));
89
- return [...(await expandFrom(dir, idx + 1)), ...nested.flat()];
114
+ console.log(`[expandFrom] Entries in ${dir}:`, entries.map((e) => e.name));
115
+ const results = [];
116
+ for (const e of entries) {
117
+ if (!e.isDirectory())
118
+ continue;
119
+ if (!matchSegment(seg, e.name))
120
+ continue;
121
+ console.log(`[expandFrom] Matched segment '${seg}' with directory:`, e.name);
122
+ results.push(...(await expandFrom(node_path_1.default.join(dir, e.name), segIndex + 1)));
90
123
  }
91
- const nested = await Promise.all(entries
92
- .filter((e) => e.isDirectory() && matchSegment(seg, e.name))
93
- .map((e) => expandFrom(node_path_1.default.join(dir, e.name), idx + 1)));
94
- return nested.flat();
124
+ return results;
95
125
  }
96
126
  const dirs = await expandFrom(root, 0);
97
- const pkgDirs = (await Promise.all(dirs.map(async (d) => (await exists(node_path_1.default.join(d, 'package.json'))) ? d : null))).filter(Boolean);
127
+ console.log(`[expandWorkspacePattern] Expanded directories:`, dirs);
128
+ const pkgDirs = [];
129
+ for (const d of dirs) {
130
+ const pkgPath = node_path_1.default.join(d, 'package.json');
131
+ if (await exists(pkgPath)) {
132
+ console.log(`[expandWorkspacePattern] Found package.json:`, pkgPath);
133
+ pkgDirs.push(d);
134
+ }
135
+ else {
136
+ console.log(`[expandWorkspacePattern] No package.json in:`, d);
137
+ }
138
+ }
139
+ console.log(`[expandWorkspacePattern] Final package directories:`, pkgDirs);
98
140
  return [...new Set(pkgDirs)];
99
141
  }
100
142
  async function findWorkspacePackageDirs(repoRoot, workspacePatterns) {
101
- const resolved = await Promise.all(workspacePatterns.map((p) => expandWorkspacePattern(repoRoot, p)));
102
- return [...new Set(resolved.flat())];
143
+ console.log(`[findWorkspacePackageDirs] repoRoot:`, repoRoot, `workspacePatterns:`, workspacePatterns);
144
+ const dirs = [];
145
+ for (const pat of workspacePatterns) {
146
+ console.log(`[findWorkspacePackageDirs] Expanding pattern:`, pat);
147
+ const expanded = await expandWorkspacePattern(repoRoot, pat);
148
+ console.log(`[findWorkspacePackageDirs] Expanded dirs for pattern '${pat}':`, expanded);
149
+ dirs.push(...expanded);
150
+ }
151
+ const uniqueDirs = [...new Set(dirs)];
152
+ console.log(`[findWorkspacePackageDirs] Final unique package dirs:`, uniqueDirs);
153
+ return uniqueDirs;
103
154
  }
104
- /* -------------------------------------------------------------------------- */
105
- /* Mono config + commands */
106
- /* -------------------------------------------------------------------------- */
155
+ // ---------- .mono parsing ----------
107
156
  async function readMonoConfig() {
108
157
  const configPath = node_path_1.default.join(MONO_DIR, 'config.json');
109
- if (!(await exists(configPath)))
158
+ console.log(`[readMonoConfig] Looking for mono config at:`, configPath);
159
+ if (!(await exists(configPath))) {
160
+ console.log(`[readMonoConfig] No mono config found.`);
110
161
  return null;
111
- return {
112
- path: configPath,
113
- config: await readJson(configPath),
114
- };
162
+ }
163
+ try {
164
+ const config = await readJson(configPath);
165
+ console.log(`[readMonoConfig] Loaded mono config.`);
166
+ return { path: configPath, config };
167
+ }
168
+ catch (err) {
169
+ console.error(`[readMonoConfig] Failed to load mono config:`, err);
170
+ return null;
171
+ }
115
172
  }
116
173
  function commandNameFromFile(filePath) {
117
174
  return node_path_1.default.basename(filePath).replace(/\.json$/i, '');
118
175
  }
119
176
  async function readMonoCommands() {
120
- if (!(await exists(MONO_DIR)))
177
+ console.log(`[readMonoCommands] Reading mono commands from:`, MONO_DIR);
178
+ if (!(await exists(MONO_DIR))) {
179
+ console.log(`[readMonoCommands] Mono directory does not exist.`);
121
180
  return [];
181
+ }
122
182
  const entries = await listDir(MONO_DIR);
123
- const commands = await Promise.all(entries
124
- .filter((e) => e.isFile() && e.name.endsWith('.json') && e.name !== 'config.json')
125
- .map(async (e) => {
126
- const file = node_path_1.default.join(MONO_DIR, e.name);
127
- const json = await readJson(file);
128
- return {
129
- name: commandNameFromFile(file),
130
- file,
131
- json,
132
- };
133
- }));
134
- return commands.sort((a, b) => a.name.localeCompare(b.name));
183
+ const jsonFiles = entries
184
+ .filter((e) => e.isFile() && e.name.toLowerCase().endsWith('.json'))
185
+ .map((e) => node_path_1.default.join(MONO_DIR, e.name))
186
+ .filter((p) => node_path_1.default.basename(p).toLowerCase() !== 'config.json');
187
+ console.log(`[readMonoCommands] Found JSON files:`, jsonFiles);
188
+ const commands = [];
189
+ for (const file of jsonFiles) {
190
+ try {
191
+ console.log(`[readMonoCommands] Reading command file:`, file);
192
+ const j = await readJson(file);
193
+ commands.push({
194
+ name: commandNameFromFile(file),
195
+ file,
196
+ json: j,
197
+ });
198
+ console.log(`[readMonoCommands] Successfully loaded command:`, commandNameFromFile(file));
199
+ }
200
+ catch (err) {
201
+ console.error(`[readMonoCommands] Failed to load command file:`, file, err);
202
+ // skip invalid json
203
+ }
204
+ }
205
+ commands.sort((a, b) => a.name.localeCompare(b.name));
206
+ console.log(`[readMonoCommands] Final sorted commands:`, commands.map((c) => c.name));
207
+ return commands;
135
208
  }
136
- /* -------------------------------------------------------------------------- */
137
- /* Options parsing */
138
- /* -------------------------------------------------------------------------- */
139
209
  function parseOptionsSchema(optionsObj) {
210
+ // New structure supports:
211
+ // - optionKey: { type: "string", default, options: [], allowAll, shortcut, description }
212
+ // - boolean toggle: { shortcut, description } (no type)
140
213
  if (!isObject(optionsObj))
141
214
  return [];
142
- return Object.entries(optionsObj)
143
- .map(([key, raw]) => {
215
+ const entries = Object.entries(optionsObj).map(([key, raw]) => {
144
216
  const o = isObject(raw) ? raw : {};
145
- const hasType = typeof o.type === 'string';
217
+ const hasType = typeof o.type === 'string' && o.type.trim().length > 0;
218
+ const isBoolToggle = !hasType; // in your examples, booleans omit `type`
146
219
  return {
147
220
  key,
148
- kind: hasType ? 'value' : 'boolean',
149
- type: hasType ? o.type : 'boolean',
221
+ kind: isBoolToggle ? 'boolean' : 'value',
222
+ type: hasType ? String(o.type) : 'boolean',
150
223
  description: typeof o.description === 'string' ? o.description : '',
151
224
  shortcut: typeof o.shortcut === 'string' ? o.shortcut : '',
152
225
  default: o.default,
153
- allowed: Array.isArray(o.options) ?
154
- o.options.filter((x) => typeof x === 'string')
155
- : null,
226
+ allowed: Array.isArray(o.options) ? o.options : null,
156
227
  allowAll: o.allowAll === true,
157
228
  };
158
- })
159
- .sort((a, b) => a.key.localeCompare(b.key));
229
+ });
230
+ entries.sort((a, b) => a.key.localeCompare(b.key));
231
+ return entries;
160
232
  }
161
- /* -------------------------------------------------------------------------- */
162
- /* Main */
163
- /* -------------------------------------------------------------------------- */
233
+ function buildUsageExample(commandName, cmdJson, options) {
234
+ const arg = cmdJson?.argument;
235
+ const hasArg = isObject(arg);
236
+ const argToken = hasArg ? `<${commandName}-arg>` : '';
237
+ // choose a representative value option to show
238
+ const valueOpts = options.filter((o) => o.kind === 'value');
239
+ const boolOpts = options.filter((o) => o.kind === 'boolean');
240
+ const exampleParts = [`yarn mono ${commandName}`];
241
+ if (argToken)
242
+ exampleParts.push(argToken);
243
+ // include at most 2 value options and 1 boolean in the example for readability
244
+ for (const o of valueOpts.slice(0, 2)) {
245
+ const flag = `--${o.key}`;
246
+ const val = o.default !== undefined ? o.default : (o.allowed?.[0] ?? '<value>');
247
+ exampleParts.push(`${flag} ${val}`);
248
+ }
249
+ if (boolOpts.length) {
250
+ exampleParts.push(`--${boolOpts[0].key}`);
251
+ }
252
+ return exampleParts.join(' ');
253
+ }
254
+ function formatMonoConfigSection(monoConfig) {
255
+ const lines = [];
256
+ lines.push('## Mono configuration');
257
+ lines.push('');
258
+ if (!monoConfig) {
259
+ lines.push('_No `.mono/config.json` found._');
260
+ return lines.join('\n');
261
+ }
262
+ const c = monoConfig.config;
263
+ lines.push(`Source: \`${toPosix(node_path_1.default.relative(REPO_ROOT, monoConfig.path))}\``);
264
+ lines.push('');
265
+ if (Array.isArray(c.envMap) && c.envMap.length) {
266
+ lines.push('### envMap');
267
+ lines.push('');
268
+ lines.push('- ' + c.envMap.map((x) => `\`${mdEscapeInline(x)}\``).join(', '));
269
+ lines.push('');
270
+ }
271
+ const pkgMaps = c?.workspace?.packageMaps;
272
+ if (pkgMaps && isObject(pkgMaps) && Object.keys(pkgMaps).length) {
273
+ lines.push('### Workspace aliases (packageMaps)');
274
+ lines.push('');
275
+ const entries = Object.entries(pkgMaps).sort(([a], [b]) => a.localeCompare(b));
276
+ for (const [alias, target] of entries) {
277
+ lines.push(`- \`${mdEscapeInline(alias)}\` → \`${mdEscapeInline(String(target))}\``);
278
+ }
279
+ lines.push('');
280
+ }
281
+ const pre = c?.workspace?.preactions;
282
+ if (Array.isArray(pre) && pre.length) {
283
+ lines.push('### Global preactions');
284
+ lines.push('');
285
+ lines.push('```bash');
286
+ for (const p of pre)
287
+ lines.push(String(p));
288
+ lines.push('```');
289
+ lines.push('');
290
+ }
291
+ if (typeof c.prodFlag === 'string' && c.prodFlag.trim()) {
292
+ lines.push('### prodFlag');
293
+ lines.push('');
294
+ lines.push(`Production flag keyword: \`${mdEscapeInline(c.prodFlag.trim())}\``);
295
+ lines.push('');
296
+ }
297
+ return lines.join('\n');
298
+ }
299
+ function formatMonoCommandsSection(commands) {
300
+ const lines = [];
301
+ lines.push('## Mono commands');
302
+ lines.push('');
303
+ lines.push('Generated from `.mono/*.json` (excluding `config.json`). Each filename becomes a command:');
304
+ lines.push('');
305
+ lines.push('```bash');
306
+ lines.push('yarn mono <command> [argument] [--options]');
307
+ lines.push('```');
308
+ lines.push('');
309
+ if (!commands.length) {
310
+ lines.push('_No mono command JSON files found._');
311
+ return lines.join('\n');
312
+ }
313
+ // Index
314
+ lines.push('### Command index');
315
+ lines.push('');
316
+ for (const c of commands) {
317
+ const desc = typeof c.json?.description === 'string' ? c.json.description.trim() : '';
318
+ const suffix = desc ? ` — ${desc}` : '';
319
+ lines.push(`- [\`${mdEscapeInline(c.name)}\`](#mono-command-${mdEscapeInline(c.name).toLowerCase()})${suffix}`);
320
+ }
321
+ lines.push('');
322
+ for (const c of commands) {
323
+ const j = c.json || {};
324
+ const rel = toPosix(node_path_1.default.relative(REPO_ROOT, c.file));
325
+ const anchor = `mono-command-${c.name.toLowerCase()}`;
326
+ const desc = typeof j.description === 'string' ? j.description.trim() : '';
327
+ const arg = j.argument;
328
+ const options = parseOptionsSchema(j.options);
329
+ lines.push('---');
330
+ lines.push(`### Mono command: ${c.name}`);
331
+ lines.push(`<a id="${anchor}"></a>`);
332
+ lines.push('');
333
+ lines.push(`Source: \`${rel}\``);
334
+ lines.push('');
335
+ if (desc) {
336
+ lines.push(`**Description:** ${mdEscapeInline(desc)}`);
337
+ lines.push('');
338
+ }
339
+ // Usage
340
+ lines.push('**Usage**');
341
+ lines.push('');
342
+ lines.push('```bash');
343
+ lines.push(`yarn mono ${c.name}${isObject(arg) ? ` <${c.name}-arg>` : ''} [--options]`);
344
+ lines.push('```');
345
+ lines.push('');
346
+ lines.push('Example:');
347
+ lines.push('');
348
+ lines.push('```bash');
349
+ lines.push(buildUsageExample(c.name, j, options));
350
+ lines.push('```');
351
+ lines.push('');
352
+ // Argument
353
+ if (isObject(arg)) {
354
+ lines.push('**Argument**');
355
+ lines.push('');
356
+ const bits = [];
357
+ if (typeof arg.type === 'string')
358
+ bits.push(`type: \`${mdEscapeInline(arg.type)}\``);
359
+ if (arg.default !== undefined)
360
+ bits.push(`default: \`${mdEscapeInline(String(arg.default))}\``);
361
+ if (typeof arg.description === 'string')
362
+ bits.push(mdEscapeInline(arg.description));
363
+ lines.push(`- ${bits.join(' • ') || '_(no details)_'} `);
364
+ lines.push('');
365
+ }
366
+ // Options
367
+ if (options.length) {
368
+ lines.push('**Options**');
369
+ lines.push('');
370
+ lines.push('| Option | Type | Shortcut | Default | Allowed | Notes |');
371
+ lines.push('|---|---:|:---:|---:|---|---|');
372
+ for (const o of options) {
373
+ const optCol = o.kind === 'boolean' ?
374
+ `\`--${mdEscapeInline(o.key)}\``
375
+ : `\`--${mdEscapeInline(o.key)} <${mdEscapeInline(o.key)}>\``;
376
+ const typeCol = `\`${mdEscapeInline(o.type)}\``;
377
+ const shortCol = o.shortcut ? `\`-${mdEscapeInline(o.shortcut)}\`` : '';
378
+ const defCol = o.default !== undefined ? `\`${mdEscapeInline(o.default)}\`` : '';
379
+ const allowedCol = o.allowed ?
380
+ o.allowed.map((x) => `\`${mdEscapeInline(x)}\``).join(', ')
381
+ : '';
382
+ const notes = [
383
+ o.allowAll ? 'allowAll' : '',
384
+ o.description ? mdEscapeInline(o.description) : '',
385
+ ]
386
+ .filter(Boolean)
387
+ .join(' • ');
388
+ lines.push(`| ${optCol} | ${typeCol} | ${shortCol} | ${defCol} | ${allowedCol} | ${notes} |`);
389
+ }
390
+ lines.push('');
391
+ }
392
+ // Environments
393
+ if (j.environments &&
394
+ isObject(j.environments) &&
395
+ Object.keys(j.environments).length) {
396
+ lines.push('**Environment Variables**');
397
+ lines.push('');
398
+ const envs = Object.entries(j.environments).sort(([a], [b]) => a.localeCompare(b));
399
+ for (const [envName, envObj] of envs) {
400
+ lines.push(`- \`${mdEscapeInline(envName)}\``);
401
+ if (isObject(envObj) && Object.keys(envObj).length) {
402
+ const kv = Object.entries(envObj).sort(([a], [b]) => a.localeCompare(b));
403
+ lines.push(indentLines(kv
404
+ .map(([k, v]) => `- \`${mdEscapeInline(k)}\` = \`${mdEscapeInline(String(v))}\``)
405
+ .join('\n'), 2));
406
+ }
407
+ }
408
+ lines.push('');
409
+ }
410
+ // preactions/actions
411
+ if (Array.isArray(j.preactions) && j.preactions.length) {
412
+ lines.push('**Preactions**');
413
+ lines.push('');
414
+ lines.push('```bash');
415
+ for (const p of j.preactions)
416
+ lines.push(String(p));
417
+ lines.push('```');
418
+ lines.push('');
419
+ }
420
+ if (Array.isArray(j.actions) && j.actions.length) {
421
+ lines.push('**Actions**');
422
+ lines.push('');
423
+ lines.push('```bash');
424
+ for (const a of j.actions)
425
+ lines.push(String(a));
426
+ lines.push('```');
427
+ lines.push('');
428
+ }
429
+ }
430
+ return lines.join('\n');
431
+ }
432
+ function collectScripts(packages) {
433
+ const scriptToPackages = new Map();
434
+ for (const p of packages) {
435
+ for (const scriptName of Object.keys(p.scripts || {})) {
436
+ if (!scriptToPackages.has(scriptName))
437
+ scriptToPackages.set(scriptName, []);
438
+ scriptToPackages.get(scriptName).push(p.name);
439
+ }
440
+ }
441
+ return scriptToPackages;
442
+ }
443
+ // ---------- main ----------
164
444
  async function main() {
165
- if (!(await exists(ROOT_PKG_JSON))) {
445
+ if (!(await exists(ROOT_PKG_JSON)))
166
446
  throw new Error(`Missing: ${ROOT_PKG_JSON}`);
167
- }
168
447
  await ensureParentDir(OUTPUT_PATH);
169
448
  const rootPkg = await readJson(ROOT_PKG_JSON);
170
449
  const workspacePatterns = normalizeWorkspacePatterns(rootPkg.workspaces);
171
450
  const monoConfig = await readMonoConfig();
172
451
  const monoCommands = await readMonoCommands();
173
452
  const pkgDirs = await findWorkspacePackageDirs(REPO_ROOT, workspacePatterns);
174
- const packages = await Promise.all(pkgDirs.map(async (dir) => {
175
- const pj = await readJson(node_path_1.default.join(dir, 'package.json'));
176
- return {
177
- name: pj.name ||
178
- toPosix(node_path_1.default.relative(REPO_ROOT, dir)) ||
179
- node_path_1.default.basename(dir),
180
- dir,
181
- scripts: pj.scripts ?? {},
182
- };
183
- }));
453
+ console.log(`[main] Package directories found:`, pkgDirs);
454
+ const packages = [];
455
+ for (const dir of pkgDirs) {
456
+ try {
457
+ const pkgPath = node_path_1.default.join(dir, 'package.json');
458
+ console.log(`[main] Reading package.json:`, pkgPath);
459
+ const pj = await readJson(pkgPath);
460
+ packages.push({
461
+ name: pj.name ||
462
+ toPosix(node_path_1.default.relative(REPO_ROOT, dir)) ||
463
+ node_path_1.default.basename(dir),
464
+ dir,
465
+ scripts: pj.scripts || {},
466
+ });
467
+ console.log(`[main] Loaded package:`, pj.name || dir);
468
+ }
469
+ catch (err) {
470
+ console.error(`[main] Failed to load package.json for:`, dir, err);
471
+ // skip
472
+ }
473
+ }
184
474
  const parts = [];
185
- parts.push(`# Mono Command-Line Reference
186
-
475
+ parts.push(`# ⚙️ Command Line Reference
476
+
187
477
  > Generated by \`scripts/generate-readme.mjs\`.
188
478
  > Update \`.mono/config.json\`, \`.mono/*.json\`, and workspace package scripts to change this output.
189
479
 
190
480
  `);
191
- const docsIndex = await (0, generate_docs_js_1.generateDocsIndex)({
481
+ parts.push(formatMonoConfigSection(monoConfig));
482
+ parts.push('');
483
+ parts.push(formatMonoCommandsSection(monoCommands));
484
+ parts.push('');
485
+ const val = await (0, generate_docs_js_1.generateDocsIndex)({
192
486
  docsDir: node_path_1.default.join(REPO_ROOT, 'docs'),
193
487
  excludeFile: 'command-line.md',
194
488
  });
195
- parts.push(docsIndex);
489
+ val.split('\n').forEach((line) => parts.push(line));
490
+ await ensureParentDir(OUTPUT_README);
196
491
  await node_fs_1.promises.writeFile(OUTPUT_README, parts.join('\n'), 'utf8');
197
- console.log(`Generated: ${OUTPUT_README}`);
198
- console.log(`- mono config: ${monoConfig ? 'yes' : 'no'}`);
199
- console.log(`- mono commands: ${monoCommands.length}`);
200
- console.log(`- workspace packages: ${packages.length}`);
492
+ console.log(`[main] Generated: ${OUTPUT_README}`);
493
+ console.log(`[main] mono config: ${monoConfig ? 'yes' : 'no'}`);
494
+ console.log(`[main] mono commands: ${monoCommands.length}`);
495
+ console.log(`[main] workspace packages: ${packages.length}`);
201
496
  }
202
497
  main().catch((err) => {
203
- console.error(err);
498
+ console.error(err?.stack || String(err));
204
499
  process.exitCode = 1;
205
500
  });
@@ -54,6 +54,6 @@ async function generateDocsIndex({ docsDir, excludeFile, }) {
54
54
  links.sort((a, b) => a.localeCompare(b));
55
55
  // Append Back to Readme (hardcoded)
56
56
  links.push('');
57
- links.push('[Back to Readme](../README.md)');
57
+ links.push('🏠 ← [Back to README](../README.md)');
58
58
  return links.join('\n');
59
59
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mono-labs/cli",
3
- "version": "0.0.207",
3
+ "version": "0.0.209",
4
4
  "description": "A CLI tool for building and deploying projects",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/types.d.ts",