bmad-method 5.0.0-beta.2 → 5.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. package/.github/ISSUE_TEMPLATE/bug_report.md +3 -3
  2. package/.github/ISSUE_TEMPLATE/feature_request.md +3 -3
  3. package/.github/workflows/discord.yaml +11 -2
  4. package/.github/workflows/format-check.yaml +42 -0
  5. package/.github/workflows/manual-release.yaml +173 -0
  6. package/.husky/pre-commit +3 -0
  7. package/.vscode/settings.json +26 -1
  8. package/CHANGELOG.md +0 -11
  9. package/README.md +2 -0
  10. package/bmad-core/agent-teams/team-all.yaml +1 -1
  11. package/bmad-core/agents/bmad-orchestrator.md +1 -1
  12. package/bmad-core/agents/dev.md +4 -4
  13. package/bmad-core/data/bmad-kb.md +1 -1
  14. package/bmad-core/data/test-levels-framework.md +12 -12
  15. package/bmad-core/tasks/facilitate-brainstorming-session.md +1 -1
  16. package/bmad-core/tasks/nfr-assess.md +10 -10
  17. package/bmad-core/tasks/qa-gate.md +23 -23
  18. package/bmad-core/tasks/review-story.md +18 -18
  19. package/bmad-core/tasks/risk-profile.md +25 -25
  20. package/bmad-core/tasks/test-design.md +9 -9
  21. package/bmad-core/tasks/trace-requirements.md +21 -21
  22. package/bmad-core/templates/architecture-tmpl.yaml +49 -49
  23. package/bmad-core/templates/brainstorming-output-tmpl.yaml +5 -5
  24. package/bmad-core/templates/brownfield-architecture-tmpl.yaml +31 -31
  25. package/bmad-core/templates/brownfield-prd-tmpl.yaml +13 -13
  26. package/bmad-core/templates/competitor-analysis-tmpl.yaml +19 -6
  27. package/bmad-core/templates/front-end-architecture-tmpl.yaml +21 -9
  28. package/bmad-core/templates/front-end-spec-tmpl.yaml +24 -24
  29. package/bmad-core/templates/fullstack-architecture-tmpl.yaml +122 -104
  30. package/bmad-core/templates/market-research-tmpl.yaml +2 -2
  31. package/bmad-core/templates/prd-tmpl.yaml +9 -9
  32. package/bmad-core/templates/project-brief-tmpl.yaml +4 -4
  33. package/bmad-core/templates/qa-gate-tmpl.yaml +9 -9
  34. package/bmad-core/templates/story-tmpl.yaml +12 -12
  35. package/bmad-core/workflows/brownfield-fullstack.yaml +9 -9
  36. package/bmad-core/workflows/brownfield-service.yaml +1 -1
  37. package/bmad-core/workflows/brownfield-ui.yaml +1 -1
  38. package/bmad-core/workflows/greenfield-fullstack.yaml +1 -1
  39. package/bmad-core/workflows/greenfield-service.yaml +1 -1
  40. package/bmad-core/workflows/greenfield-ui.yaml +1 -1
  41. package/common/utils/bmad-doc-template.md +5 -5
  42. package/dist/agents/analyst.txt +28 -15
  43. package/dist/agents/architect.txt +220 -190
  44. package/dist/agents/bmad-master.txt +298 -255
  45. package/dist/agents/bmad-orchestrator.txt +1 -1
  46. package/dist/agents/pm.txt +20 -20
  47. package/dist/agents/po.txt +11 -11
  48. package/dist/agents/qa.txt +275 -618
  49. package/dist/agents/sm.txt +11 -11
  50. package/dist/agents/ux-expert.txt +23 -23
  51. package/dist/expansion-packs/bmad-2d-phaser-game-dev/agents/game-designer.txt +109 -109
  52. package/dist/expansion-packs/bmad-2d-phaser-game-dev/agents/game-developer.txt +75 -77
  53. package/dist/expansion-packs/bmad-2d-phaser-game-dev/agents/game-sm.txt +41 -41
  54. package/dist/expansion-packs/bmad-2d-phaser-game-dev/teams/phaser-2d-nodejs-game-team.txt +483 -474
  55. package/dist/expansion-packs/bmad-2d-unity-game-dev/agents/game-architect.txt +1 -1
  56. package/dist/expansion-packs/bmad-2d-unity-game-dev/agents/game-designer.txt +149 -149
  57. package/dist/expansion-packs/bmad-2d-unity-game-dev/agents/game-sm.txt +20 -20
  58. package/dist/expansion-packs/bmad-2d-unity-game-dev/teams/unity-2d-game-team.txt +371 -358
  59. package/dist/expansion-packs/bmad-infrastructure-devops/agents/infra-devops-platform.txt +25 -25
  60. package/dist/teams/team-all.txt +581 -881
  61. package/dist/teams/team-fullstack.txt +316 -273
  62. package/dist/teams/team-ide-minimal.txt +276 -619
  63. package/dist/teams/team-no-ui.txt +281 -238
  64. package/docs/versioning-and-releases.md +114 -44
  65. package/eslint.config.mjs +119 -0
  66. package/expansion-packs/Complete AI Agent System - Blank Templates & Google Cloud Setup/PART 1 - Google Cloud Vertex AI Setup Documentation/1.4 Deployment Configuration/1.4.2 - cloudbuild.yaml +26 -26
  67. package/expansion-packs/bmad-2d-phaser-game-dev/agents/game-developer.md +4 -4
  68. package/expansion-packs/bmad-2d-phaser-game-dev/agents/game-sm.md +1 -1
  69. package/expansion-packs/bmad-2d-phaser-game-dev/data/development-guidelines.md +26 -28
  70. package/expansion-packs/bmad-2d-phaser-game-dev/templates/game-architecture-tmpl.yaml +50 -50
  71. package/expansion-packs/bmad-2d-phaser-game-dev/templates/game-brief-tmpl.yaml +23 -23
  72. package/expansion-packs/bmad-2d-phaser-game-dev/templates/game-design-doc-tmpl.yaml +24 -24
  73. package/expansion-packs/bmad-2d-phaser-game-dev/templates/game-story-tmpl.yaml +42 -42
  74. package/expansion-packs/bmad-2d-phaser-game-dev/templates/level-design-doc-tmpl.yaml +65 -65
  75. package/expansion-packs/bmad-2d-phaser-game-dev/workflows/game-dev-greenfield.yaml +5 -5
  76. package/expansion-packs/bmad-2d-phaser-game-dev/workflows/game-prototype.yaml +1 -1
  77. package/expansion-packs/bmad-2d-unity-game-dev/agents/game-developer.md +3 -3
  78. package/expansion-packs/bmad-2d-unity-game-dev/data/bmad-kb.md +1 -1
  79. package/expansion-packs/bmad-2d-unity-game-dev/templates/game-brief-tmpl.yaml +23 -23
  80. package/expansion-packs/bmad-2d-unity-game-dev/templates/game-design-doc-tmpl.yaml +63 -63
  81. package/expansion-packs/bmad-2d-unity-game-dev/templates/game-story-tmpl.yaml +20 -20
  82. package/expansion-packs/bmad-2d-unity-game-dev/templates/level-design-doc-tmpl.yaml +65 -65
  83. package/expansion-packs/bmad-2d-unity-game-dev/workflows/game-dev-greenfield.yaml +5 -5
  84. package/expansion-packs/bmad-2d-unity-game-dev/workflows/game-prototype.yaml +1 -1
  85. package/expansion-packs/bmad-infrastructure-devops/templates/infrastructure-architecture-tmpl.yaml +20 -20
  86. package/expansion-packs/bmad-infrastructure-devops/templates/infrastructure-platform-from-arch-tmpl.yaml +7 -7
  87. package/package.json +62 -39
  88. package/prettier.config.mjs +32 -0
  89. package/release_notes.md +30 -0
  90. package/tools/bmad-npx-wrapper.js +10 -10
  91. package/tools/builders/web-builder.js +124 -130
  92. package/tools/bump-all-versions.js +42 -33
  93. package/tools/bump-expansion-version.js +23 -16
  94. package/tools/cli.js +10 -12
  95. package/tools/flattener/aggregate.js +10 -10
  96. package/tools/flattener/binary.js +44 -17
  97. package/tools/flattener/discovery.js +19 -18
  98. package/tools/flattener/files.js +6 -6
  99. package/tools/flattener/ignoreRules.js +125 -125
  100. package/tools/flattener/main.js +201 -304
  101. package/tools/flattener/projectRoot.js +75 -73
  102. package/tools/flattener/prompts.js +9 -9
  103. package/tools/flattener/stats.helpers.js +131 -67
  104. package/tools/flattener/stats.js +3 -3
  105. package/tools/flattener/test-matrix.js +201 -193
  106. package/tools/flattener/xml.js +33 -31
  107. package/tools/installer/bin/bmad.js +130 -89
  108. package/tools/installer/config/ide-agent-config.yaml +1 -1
  109. package/tools/installer/config/install.config.yaml +2 -2
  110. package/tools/installer/lib/config-loader.js +46 -42
  111. package/tools/installer/lib/file-manager.js +91 -113
  112. package/tools/installer/lib/ide-base-setup.js +57 -56
  113. package/tools/installer/lib/ide-setup.js +375 -343
  114. package/tools/installer/lib/installer.js +875 -714
  115. package/tools/installer/lib/memory-profiler.js +54 -53
  116. package/tools/installer/lib/module-manager.js +19 -15
  117. package/tools/installer/lib/resource-locator.js +26 -28
  118. package/tools/installer/package.json +19 -19
  119. package/tools/lib/dependency-resolver.js +26 -30
  120. package/tools/lib/yaml-utils.js +7 -7
  121. package/tools/preview-release-notes.js +66 -0
  122. package/tools/shared/bannerArt.js +3 -3
  123. package/tools/sync-installer-version.js +7 -9
  124. package/tools/update-expansion-version.js +14 -15
  125. package/tools/upgraders/v3-to-v4-upgrader.js +203 -294
  126. package/tools/version-bump.js +41 -26
  127. package/tools/yaml-format.js +56 -43
  128. package/.github/workflows/promote-to-stable.yml +0 -144
  129. package/.github/workflows/release.yaml +0 -60
  130. package/.releaserc.json +0 -21
  131. package/tools/semantic-release-sync-installer.js +0 -30
@@ -1,10 +1,10 @@
1
- const fs = require("fs-extra");
2
- const path = require("node:path");
1
+ const fs = require('fs-extra');
2
+ const path = require('node:path');
3
3
 
4
4
  // Deno/Node compatibility: explicitly import process
5
- const process = require("node:process");
6
- const { execFile } = require("node:child_process");
7
- const { promisify } = require("node:util");
5
+ const process = require('node:process');
6
+ const { execFile } = require('node:child_process');
7
+ const { promisify } = require('node:util');
8
8
  const execFileAsync = promisify(execFile);
9
9
 
10
10
  // Simple memoization across calls (keyed by realpath of startDir)
@@ -18,7 +18,7 @@ async function _tryRun(cmd, args, cwd, timeoutMs = 500) {
18
18
  windowsHide: true,
19
19
  maxBuffer: 1024 * 1024,
20
20
  });
21
- const out = String(stdout || "").trim();
21
+ const out = String(stdout || '').trim();
22
22
  return out || null;
23
23
  } catch {
24
24
  return null;
@@ -27,15 +27,17 @@ async function _tryRun(cmd, args, cwd, timeoutMs = 500) {
27
27
 
28
28
  async function _detectVcsTopLevel(startDir) {
29
29
  // Run common VCS root queries in parallel; ignore failures
30
- const gitP = _tryRun("git", ["rev-parse", "--show-toplevel"], startDir);
31
- const hgP = _tryRun("hg", ["root"], startDir);
30
+ const gitP = _tryRun('git', ['rev-parse', '--show-toplevel'], startDir);
31
+ const hgP = _tryRun('hg', ['root'], startDir);
32
32
  const svnP = (async () => {
33
- const show = await _tryRun("svn", ["info", "--show-item", "wc-root"], startDir);
33
+ const show = await _tryRun('svn', ['info', '--show-item', 'wc-root'], startDir);
34
34
  if (show) return show;
35
- const info = await _tryRun("svn", ["info"], startDir);
35
+ const info = await _tryRun('svn', ['info'], startDir);
36
36
  if (info) {
37
- const line = info.split(/\r?\n/).find((l) => l.toLowerCase().startsWith("working copy root path:"));
38
- if (line) return line.split(":").slice(1).join(":").trim();
37
+ const line = info
38
+ .split(/\r?\n/)
39
+ .find((l) => l.toLowerCase().startsWith('working copy root path:'));
40
+ if (line) return line.split(':').slice(1).join(':').trim();
39
41
  }
40
42
  return null;
41
43
  })();
@@ -71,90 +73,92 @@ async function findProjectRoot(startDir) {
71
73
  const checks = [];
72
74
 
73
75
  const add = (rel, weight) => {
74
- const makePath = (d) => Array.isArray(rel) ? path.join(d, ...rel) : path.join(d, rel);
76
+ const makePath = (d) => (Array.isArray(rel) ? path.join(d, ...rel) : path.join(d, rel));
75
77
  checks.push({ makePath, weight });
76
78
  };
77
79
 
78
80
  // Highest priority: explicit sentinel markers
79
- add(".project-root", 110);
80
- add(".workspace-root", 110);
81
- add(".repo-root", 110);
81
+ add('.project-root', 110);
82
+ add('.workspace-root', 110);
83
+ add('.repo-root', 110);
82
84
 
83
85
  // Highest priority: VCS roots
84
- add(".git", 100);
85
- add(".hg", 95);
86
- add(".svn", 95);
86
+ add('.git', 100);
87
+ add('.hg', 95);
88
+ add('.svn', 95);
87
89
 
88
90
  // Monorepo/workspace indicators
89
- add("pnpm-workspace.yaml", 90);
90
- add("lerna.json", 90);
91
- add("turbo.json", 90);
92
- add("nx.json", 90);
93
- add("rush.json", 90);
94
- add("go.work", 90);
95
- add("WORKSPACE", 90);
96
- add("WORKSPACE.bazel", 90);
97
- add("MODULE.bazel", 90);
98
- add("pants.toml", 90);
91
+ add('pnpm-workspace.yaml', 90);
92
+ add('lerna.json', 90);
93
+ add('turbo.json', 90);
94
+ add('nx.json', 90);
95
+ add('rush.json', 90);
96
+ add('go.work', 90);
97
+ add('WORKSPACE', 90);
98
+ add('WORKSPACE.bazel', 90);
99
+ add('MODULE.bazel', 90);
100
+ add('pants.toml', 90);
99
101
 
100
102
  // Lockfiles and package-manager/top-level locks
101
- add("yarn.lock", 85);
102
- add("pnpm-lock.yaml", 85);
103
- add("package-lock.json", 85);
104
- add("bun.lockb", 85);
105
- add("Cargo.lock", 85);
106
- add("composer.lock", 85);
107
- add("poetry.lock", 85);
108
- add("Pipfile.lock", 85);
109
- add("Gemfile.lock", 85);
103
+ add('yarn.lock', 85);
104
+ add('pnpm-lock.yaml', 85);
105
+ add('package-lock.json', 85);
106
+ add('bun.lockb', 85);
107
+ add('Cargo.lock', 85);
108
+ add('composer.lock', 85);
109
+ add('poetry.lock', 85);
110
+ add('Pipfile.lock', 85);
111
+ add('Gemfile.lock', 85);
110
112
 
111
113
  // Build-system root indicators
112
- add("settings.gradle", 80);
113
- add("settings.gradle.kts", 80);
114
- add("gradlew", 80);
115
- add("pom.xml", 80);
116
- add("build.sbt", 80);
117
- add(["project", "build.properties"], 80);
114
+ add('settings.gradle', 80);
115
+ add('settings.gradle.kts', 80);
116
+ add('gradlew', 80);
117
+ add('pom.xml', 80);
118
+ add('build.sbt', 80);
119
+ add(['project', 'build.properties'], 80);
118
120
 
119
121
  // Language/project config markers
120
- add("deno.json", 75);
121
- add("deno.jsonc", 75);
122
- add("pyproject.toml", 75);
123
- add("Pipfile", 75);
124
- add("requirements.txt", 75);
125
- add("go.mod", 75);
126
- add("Cargo.toml", 75);
127
- add("composer.json", 75);
128
- add("mix.exs", 75);
129
- add("Gemfile", 75);
130
- add("CMakeLists.txt", 75);
131
- add("stack.yaml", 75);
132
- add("cabal.project", 75);
133
- add("rebar.config", 75);
134
- add("pubspec.yaml", 75);
135
- add("flake.nix", 75);
136
- add("shell.nix", 75);
137
- add("default.nix", 75);
138
- add(".tool-versions", 75);
139
- add("package.json", 74); // generic Node project (lower than lockfiles/workspaces)
122
+ add('deno.json', 75);
123
+ add('deno.jsonc', 75);
124
+ add('pyproject.toml', 75);
125
+ add('Pipfile', 75);
126
+ add('requirements.txt', 75);
127
+ add('go.mod', 75);
128
+ add('Cargo.toml', 75);
129
+ add('composer.json', 75);
130
+ add('mix.exs', 75);
131
+ add('Gemfile', 75);
132
+ add('CMakeLists.txt', 75);
133
+ add('stack.yaml', 75);
134
+ add('cabal.project', 75);
135
+ add('rebar.config', 75);
136
+ add('pubspec.yaml', 75);
137
+ add('flake.nix', 75);
138
+ add('shell.nix', 75);
139
+ add('default.nix', 75);
140
+ add('.tool-versions', 75);
141
+ add('package.json', 74); // generic Node project (lower than lockfiles/workspaces)
140
142
 
141
143
  // Changesets
142
- add([".changeset", "config.json"], 70);
143
- add(".changeset", 70);
144
+ add(['.changeset', 'config.json'], 70);
145
+ add('.changeset', 70);
144
146
 
145
147
  // Custom markers via env (comma-separated names)
146
148
  if (process.env.PROJECT_ROOT_MARKERS) {
147
- for (const name of process.env.PROJECT_ROOT_MARKERS.split(",").map((s) => s.trim()).filter(Boolean)) {
149
+ for (const name of process.env.PROJECT_ROOT_MARKERS.split(',')
150
+ .map((s) => s.trim())
151
+ .filter(Boolean)) {
148
152
  add(name, 72);
149
153
  }
150
154
  }
151
155
 
152
156
  /** Check for package.json with "workspaces" */
153
157
  const hasWorkspacePackageJson = async (d) => {
154
- const pkgPath = path.join(d, "package.json");
158
+ const pkgPath = path.join(d, 'package.json');
155
159
  if (!(await exists(pkgPath))) return false;
156
160
  try {
157
- const raw = await fs.readFile(pkgPath, "utf8");
161
+ const raw = await fs.readFile(pkgPath, 'utf8');
158
162
  const pkg = JSON.parse(raw);
159
163
  return Boolean(pkg && pkg.workspaces);
160
164
  } catch {
@@ -172,9 +176,8 @@ async function findProjectRoot(startDir) {
172
176
 
173
177
  while (true) {
174
178
  // Special check: package.json with "workspaces"
175
- if (await hasWorkspacePackageJson(dir)) {
176
- if (!best || 90 >= best.weight) best = { dir, weight: 90 };
177
- }
179
+ if ((await hasWorkspacePackageJson(dir)) && (!best || 90 >= best.weight))
180
+ best = { dir, weight: 90 };
178
181
 
179
182
  // Evaluate all other checks in parallel
180
183
  const results = await Promise.all(
@@ -201,4 +204,3 @@ async function findProjectRoot(startDir) {
201
204
  }
202
205
 
203
206
  module.exports = { findProjectRoot };
204
-
@@ -1,11 +1,11 @@
1
- const os = require("node:os");
2
- const path = require("node:path");
3
- const readline = require("node:readline");
4
- const process = require("node:process");
1
+ const os = require('node:os');
2
+ const path = require('node:path');
3
+ const readline = require('node:readline');
4
+ const process = require('node:process');
5
5
 
6
6
  function expandHome(p) {
7
7
  if (!p) return p;
8
- if (p.startsWith("~")) return path.join(os.homedir(), p.slice(1));
8
+ if (p.startsWith('~')) return path.join(os.homedir(), p.slice(1));
9
9
  return p;
10
10
  }
11
11
 
@@ -27,16 +27,16 @@ function promptQuestion(question) {
27
27
  }
28
28
 
29
29
  async function promptYesNo(question, defaultYes = true) {
30
- const suffix = defaultYes ? " [Y/n] " : " [y/N] ";
30
+ const suffix = defaultYes ? ' [Y/n] ' : ' [y/N] ';
31
31
  const ans = (await promptQuestion(`${question}${suffix}`)).trim().toLowerCase();
32
32
  if (!ans) return defaultYes;
33
- if (["y", "yes"].includes(ans)) return true;
34
- if (["n", "no"].includes(ans)) return false;
33
+ if (['y', 'yes'].includes(ans)) return true;
34
+ if (['n', 'no'].includes(ans)) return false;
35
35
  return promptYesNo(question, defaultYes);
36
36
  }
37
37
 
38
38
  async function promptPath(question, defaultValue) {
39
- const prompt = `${question}${defaultValue ? ` (default: ${defaultValue})` : ""}: `;
39
+ const prompt = `${question}${defaultValue ? ` (default: ${defaultValue})` : ''}: `;
40
40
  const ans = (await promptQuestion(prompt)).trim();
41
41
  return expandHome(ans || defaultValue);
42
42
  }
@@ -1,11 +1,11 @@
1
- "use strict";
1
+ 'use strict';
2
2
 
3
- const fs = require("node:fs/promises");
4
- const path = require("node:path");
5
- const zlib = require("node:zlib");
6
- const { Buffer } = require("node:buffer");
7
- const crypto = require("node:crypto");
8
- const cp = require("node:child_process");
3
+ const fs = require('node:fs/promises');
4
+ const path = require('node:path');
5
+ const zlib = require('node:zlib');
6
+ const { Buffer } = require('node:buffer');
7
+ const crypto = require('node:crypto');
8
+ const cp = require('node:child_process');
9
9
 
10
10
  const KB = 1024;
11
11
  const MB = 1024 * KB;
@@ -34,17 +34,19 @@ async function enrichAllFiles(textFiles, binaryFiles) {
34
34
  const allFiles = [];
35
35
 
36
36
  async function enrich(file, isBinary) {
37
- const ext = (path.extname(file.path) || "").toLowerCase();
38
- const dir = path.dirname(file.path) || ".";
37
+ const ext = (path.extname(file.path) || '').toLowerCase();
38
+ const dir = path.dirname(file.path) || '.';
39
39
  const depth = file.path.split(path.sep).filter(Boolean).length;
40
- const hidden = file.path.split(path.sep).some((seg) => seg.startsWith("."));
40
+ const hidden = file.path.split(path.sep).some((seg) => seg.startsWith('.'));
41
41
  let mtimeMs = 0;
42
42
  let isSymlink = false;
43
43
  try {
44
44
  const lst = await fs.lstat(file.absolutePath);
45
45
  mtimeMs = lst.mtimeMs;
46
46
  isSymlink = lst.isSymbolicLink();
47
- } catch (_) { /* ignore lstat errors during enrichment */ }
47
+ } catch {
48
+ /* ignore lstat errors during enrichment */
49
+ }
48
50
  allFiles.push({
49
51
  path: file.path,
50
52
  absolutePath: file.absolutePath,
@@ -67,18 +69,18 @@ async function enrichAllFiles(textFiles, binaryFiles) {
67
69
 
68
70
  function buildHistogram(allFiles) {
69
71
  const buckets = [
70
- [1 * KB, "0–1KB"],
71
- [10 * KB, "1–10KB"],
72
- [100 * KB, "10–100KB"],
73
- [1 * MB, "100KB–1MB"],
74
- [10 * MB, "1–10MB"],
75
- [100 * MB, "10–100MB"],
76
- [Infinity, ">=100MB"],
72
+ [1 * KB, '0–1KB'],
73
+ [10 * KB, '1–10KB'],
74
+ [100 * KB, '10–100KB'],
75
+ [1 * MB, '100KB–1MB'],
76
+ [10 * MB, '1–10MB'],
77
+ [100 * MB, '10–100MB'],
78
+ [Infinity, '>=100MB'],
77
79
  ];
78
80
  const histogram = buckets.map(([_, label]) => ({ label, count: 0, bytes: 0 }));
79
81
  for (const f of allFiles) {
80
- for (let i = 0; i < buckets.length; i++) {
81
- if (f.size < buckets[i][0]) {
82
+ for (const [i, bucket] of buckets.entries()) {
83
+ if (f.size < bucket[0]) {
82
84
  histogram[i].count++;
83
85
  histogram[i].bytes += f.size;
84
86
  break;
@@ -91,13 +93,13 @@ function buildHistogram(allFiles) {
91
93
  function aggregateByExtension(allFiles) {
92
94
  const byExtension = new Map();
93
95
  for (const f of allFiles) {
94
- const key = f.ext || "<none>";
96
+ const key = f.ext || '<none>';
95
97
  const v = byExtension.get(key) || { ext: key, count: 0, bytes: 0 };
96
98
  v.count++;
97
99
  v.bytes += f.size;
98
100
  byExtension.set(key, v);
99
101
  }
100
- return Array.from(byExtension.values()).sort((a, b) => b.bytes - a.bytes);
102
+ return [...byExtension.values()].sort((a, b) => b.bytes - a.bytes);
101
103
  }
102
104
 
103
105
  function aggregateByDirectory(allFiles) {
@@ -109,15 +111,15 @@ function aggregateByDirectory(allFiles) {
109
111
  byDirectory.set(dir, v);
110
112
  }
111
113
  for (const f of allFiles) {
112
- const parts = f.dir === "." ? [] : f.dir.split(path.sep);
113
- let acc = "";
114
+ const parts = f.dir === '.' ? [] : f.dir.split(path.sep);
115
+ let acc = '';
114
116
  for (let i = 0; i < parts.length; i++) {
115
117
  acc = i === 0 ? parts[0] : acc + path.sep + parts[i];
116
118
  addDirBytes(acc, f.size);
117
119
  }
118
- if (parts.length === 0) addDirBytes(".", f.size);
120
+ if (parts.length === 0) addDirBytes('.', f.size);
119
121
  }
120
- return Array.from(byDirectory.values()).sort((a, b) => b.bytes - a.bytes);
122
+ return [...byDirectory.values()].sort((a, b) => b.bytes - a.bytes);
121
123
  }
122
124
 
123
125
  function computeDepthAndLongest(allFiles) {
@@ -129,21 +131,22 @@ function computeDepthAndLongest(allFiles) {
129
131
  .sort((a, b) => b.path.length - a.path.length)
130
132
  .slice(0, 25)
131
133
  .map((f) => ({ path: f.path, length: f.path.length, size: f.size }));
132
- const depthDist = Array.from(depthDistribution.entries())
134
+ const depthDist = [...depthDistribution.entries()]
133
135
  .sort((a, b) => a[0] - b[0])
134
136
  .map(([depth, count]) => ({ depth, count }));
135
137
  return { depthDist, longestPaths };
136
138
  }
137
139
 
138
140
  function computeTemporal(allFiles, nowMs) {
139
- let oldest = null, newest = null;
141
+ let oldest = null,
142
+ newest = null;
140
143
  const ageBuckets = [
141
- { label: "> 1 year", minDays: 365, maxDays: Infinity, count: 0, bytes: 0 },
142
- { label: "6–12 months", minDays: 180, maxDays: 365, count: 0, bytes: 0 },
143
- { label: "1–6 months", minDays: 30, maxDays: 180, count: 0, bytes: 0 },
144
- { label: "7–30 days", minDays: 7, maxDays: 30, count: 0, bytes: 0 },
145
- { label: "1–7 days", minDays: 1, maxDays: 7, count: 0, bytes: 0 },
146
- { label: "< 1 day", minDays: 0, maxDays: 1, count: 0, bytes: 0 },
144
+ { label: '> 1 year', minDays: 365, maxDays: Infinity, count: 0, bytes: 0 },
145
+ { label: '6–12 months', minDays: 180, maxDays: 365, count: 0, bytes: 0 },
146
+ { label: '1–6 months', minDays: 30, maxDays: 180, count: 0, bytes: 0 },
147
+ { label: '7–30 days', minDays: 7, maxDays: 30, count: 0, bytes: 0 },
148
+ { label: '1–7 days', minDays: 1, maxDays: 7, count: 0, bytes: 0 },
149
+ { label: '< 1 day', minDays: 0, maxDays: 1, count: 0, bytes: 0 },
147
150
  ];
148
151
  for (const f of allFiles) {
149
152
  const ageDays = Math.max(0, (nowMs - (f.mtimeMs || nowMs)) / (24 * 60 * 60 * 1000));
@@ -158,15 +161,21 @@ function computeTemporal(allFiles, nowMs) {
158
161
  if (!newest || f.mtimeMs > newest.mtimeMs) newest = f;
159
162
  }
160
163
  return {
161
- oldest: oldest ? { path: oldest.path, mtime: oldest.mtimeMs ? new Date(oldest.mtimeMs).toISOString() : null } : null,
162
- newest: newest ? { path: newest.path, mtime: newest.mtimeMs ? new Date(newest.mtimeMs).toISOString() : null } : null,
164
+ oldest: oldest
165
+ ? { path: oldest.path, mtime: oldest.mtimeMs ? new Date(oldest.mtimeMs).toISOString() : null }
166
+ : null,
167
+ newest: newest
168
+ ? { path: newest.path, mtime: newest.mtimeMs ? new Date(newest.mtimeMs).toISOString() : null }
169
+ : null,
163
170
  ageBuckets,
164
171
  };
165
172
  }
166
173
 
167
174
  function computeQuality(allFiles, textFiles) {
168
175
  const zeroByteFiles = allFiles.filter((f) => f.size === 0).length;
169
- const emptyTextFiles = textFiles.filter((f) => (f.size || 0) === 0 || (f.lines || 0) === 0).length;
176
+ const emptyTextFiles = textFiles.filter(
177
+ (f) => (f.size || 0) === 0 || (f.lines || 0) === 0,
178
+ ).length;
170
179
  const hiddenFiles = allFiles.filter((f) => f.hidden).length;
171
180
  const symlinks = allFiles.filter((f) => f.isSymlink).length;
172
181
  const largeThreshold = 50 * MB;
@@ -201,18 +210,31 @@ function computeDuplicates(allFiles, textFiles) {
201
210
  for (const tf of textGroup) {
202
211
  try {
203
212
  const src = textFiles.find((x) => x.absolutePath === tf.absolutePath);
204
- const content = src ? src.content : "";
205
- const h = crypto.createHash("sha1").update(content).digest("hex");
213
+ const content = src ? src.content : '';
214
+ const h = crypto.createHash('sha1').update(content).digest('hex');
206
215
  const g = contentHashGroups.get(h) || [];
207
216
  g.push(tf);
208
217
  contentHashGroups.set(h, g);
209
- } catch (_) { /* ignore hashing errors for duplicate detection */ }
218
+ } catch {
219
+ /* ignore hashing errors for duplicate detection */
220
+ }
210
221
  }
211
222
  for (const [_h, g] of contentHashGroups.entries()) {
212
- if (g.length > 1) duplicateCandidates.push({ reason: "same-size+text-hash", size: Number(sizeKey), count: g.length, files: g.map((f) => f.path) });
223
+ if (g.length > 1)
224
+ duplicateCandidates.push({
225
+ reason: 'same-size+text-hash',
226
+ size: Number(sizeKey),
227
+ count: g.length,
228
+ files: g.map((f) => f.path),
229
+ });
213
230
  }
214
231
  if (otherGroup.length > 1) {
215
- duplicateCandidates.push({ reason: "same-size", size: Number(sizeKey), count: otherGroup.length, files: otherGroup.map((f) => f.path) });
232
+ duplicateCandidates.push({
233
+ reason: 'same-size',
234
+ size: Number(sizeKey),
235
+ count: otherGroup.length,
236
+ files: otherGroup.map((f) => f.path),
237
+ });
216
238
  }
217
239
  }
218
240
  return duplicateCandidates;
@@ -226,10 +248,12 @@ function estimateCompressibility(textFiles) {
226
248
  const sampleLen = Math.min(256 * 1024, tf.size || 0);
227
249
  if (sampleLen <= 0) continue;
228
250
  const sample = tf.content.slice(0, sampleLen);
229
- const gz = zlib.gzipSync(Buffer.from(sample, "utf8"));
251
+ const gz = zlib.gzipSync(Buffer.from(sample, 'utf8'));
230
252
  compSampleBytes += sampleLen;
231
253
  compCompressedBytes += gz.length;
232
- } catch (_) { /* ignore compression errors during sampling */ }
254
+ } catch {
255
+ /* ignore compression errors during sampling */
256
+ }
233
257
  }
234
258
  return compSampleBytes > 0 ? compCompressedBytes / compSampleBytes : null;
235
259
  }
@@ -245,20 +269,34 @@ function computeGitInfo(allFiles, rootDir, largeThreshold) {
245
269
  };
246
270
  try {
247
271
  if (!rootDir) return info;
248
- const top = cp.execFileSync("git", ["rev-parse", "--show-toplevel"], { cwd: rootDir, stdio: ["ignore", "pipe", "ignore"] }).toString().trim();
272
+ const top = cp
273
+ .execFileSync('git', ['rev-parse', '--show-toplevel'], {
274
+ cwd: rootDir,
275
+ stdio: ['ignore', 'pipe', 'ignore'],
276
+ })
277
+ .toString()
278
+ .trim();
249
279
  if (!top) return info;
250
280
  info.isRepo = true;
251
- const out = cp.execFileSync("git", ["ls-files", "-z"], { cwd: rootDir, stdio: ["ignore", "pipe", "ignore"] });
252
- const tracked = new Set(out.toString().split("\0").filter(Boolean));
253
- let trackedBytes = 0, trackedCount = 0, untrackedBytes = 0, untrackedCount = 0;
281
+ const out = cp.execFileSync('git', ['ls-files', '-z'], {
282
+ cwd: rootDir,
283
+ stdio: ['ignore', 'pipe', 'ignore'],
284
+ });
285
+ const tracked = new Set(out.toString().split('\0').filter(Boolean));
286
+ let trackedBytes = 0,
287
+ trackedCount = 0,
288
+ untrackedBytes = 0,
289
+ untrackedCount = 0;
254
290
  const lfsCandidates = [];
255
291
  for (const f of allFiles) {
256
292
  const isTracked = tracked.has(f.path);
257
293
  if (isTracked) {
258
- trackedCount++; trackedBytes += f.size;
294
+ trackedCount++;
295
+ trackedBytes += f.size;
259
296
  if (f.size >= largeThreshold) lfsCandidates.push({ path: f.path, size: f.size });
260
297
  } else {
261
- untrackedCount++; untrackedBytes += f.size;
298
+ untrackedCount++;
299
+ untrackedBytes += f.size;
262
300
  }
263
301
  }
264
302
  info.trackedCount = trackedCount;
@@ -266,7 +304,9 @@ function computeGitInfo(allFiles, rootDir, largeThreshold) {
266
304
  info.untrackedCount = untrackedCount;
267
305
  info.untrackedBytes = untrackedBytes;
268
306
  info.lfsCandidates = lfsCandidates.sort((a, b) => b.size - a.size).slice(0, 50);
269
- } catch (_) { /* git not available or not a repo, ignore */ }
307
+ } catch {
308
+ /* git not available or not a repo, ignore */
309
+ }
270
310
  return info;
271
311
  }
272
312
 
@@ -280,34 +320,58 @@ function computeLargestFiles(allFiles, totalBytes) {
280
320
  size: f.size,
281
321
  sizeFormatted: formatSize(f.size),
282
322
  percentOfTotal: toPct(f.size, totalBytes),
283
- ext: f.ext || "",
323
+ ext: f.ext || '',
284
324
  isBinary: f.isBinary,
285
325
  mtime: f.mtimeMs ? new Date(f.mtimeMs).toISOString() : null,
286
326
  }));
287
327
  }
288
328
 
289
329
  function mdTable(rows, headers) {
290
- const header = `| ${headers.join(" | ")} |`;
291
- const sep = `| ${headers.map(() => "---").join(" | ")} |`;
292
- const body = rows.map((r) => `| ${r.join(" | ")} |`).join("\n");
330
+ const header = `| ${headers.join(' | ')} |`;
331
+ const sep = `| ${headers.map(() => '---').join(' | ')} |`;
332
+ const body = rows.map((r) => `| ${r.join(' | ')} |`).join('\n');
293
333
  return `${header}\n${sep}\n${body}`;
294
334
  }
295
335
 
296
336
  function buildMarkdownReport(largestFiles, byExtensionArr, byDirectoryArr, totalBytes) {
297
337
  const toPct = (num, den) => (den === 0 ? 0 : (num / den) * 100);
298
338
  const md = [];
299
- md.push("\n### Top Largest Files (Top 50)\n");
300
- md.push(mdTable(
301
- largestFiles.map((f) => [f.path, f.sizeFormatted, `${f.percentOfTotal.toFixed(2)}%`, f.ext || "", f.isBinary ? "binary" : "text"]),
302
- ["Path", "Size", "% of total", "Ext", "Type"],
303
- ));
304
- md.push("\n\n### Top Extensions by Bytes (Top 20)\n");
305
- const topExtRows = byExtensionArr.slice(0, 20).map((e) => [e.ext, String(e.count), formatSize(e.bytes), `${toPct(e.bytes, totalBytes).toFixed(2)}%`]);
306
- md.push(mdTable(topExtRows, ["Ext", "Count", "Bytes", "% of total"]));
307
- md.push("\n\n### Top Directories by Bytes (Top 20)\n");
308
- const topDirRows = byDirectoryArr.slice(0, 20).map((d) => [d.dir, String(d.count), formatSize(d.bytes), `${toPct(d.bytes, totalBytes).toFixed(2)}%`]);
309
- md.push(mdTable(topDirRows, ["Directory", "Files", "Bytes", "% of total"]));
310
- return md.join("\n");
339
+ md.push(
340
+ '\n### Top Largest Files (Top 50)\n',
341
+ mdTable(
342
+ largestFiles.map((f) => [
343
+ f.path,
344
+ f.sizeFormatted,
345
+ `${f.percentOfTotal.toFixed(2)}%`,
346
+ f.ext || '',
347
+ f.isBinary ? 'binary' : 'text',
348
+ ]),
349
+ ['Path', 'Size', '% of total', 'Ext', 'Type'],
350
+ ),
351
+ '\n\n### Top Extensions by Bytes (Top 20)\n',
352
+ );
353
+ const topExtRows = byExtensionArr
354
+ .slice(0, 20)
355
+ .map((e) => [
356
+ e.ext,
357
+ String(e.count),
358
+ formatSize(e.bytes),
359
+ `${toPct(e.bytes, totalBytes).toFixed(2)}%`,
360
+ ]);
361
+ md.push(
362
+ mdTable(topExtRows, ['Ext', 'Count', 'Bytes', '% of total']),
363
+ '\n\n### Top Directories by Bytes (Top 20)\n',
364
+ );
365
+ const topDirRows = byDirectoryArr
366
+ .slice(0, 20)
367
+ .map((d) => [
368
+ d.dir,
369
+ String(d.count),
370
+ formatSize(d.bytes),
371
+ `${toPct(d.bytes, totalBytes).toFixed(2)}%`,
372
+ ]);
373
+ md.push(mdTable(topDirRows, ['Directory', 'Files', 'Bytes', '% of total']));
374
+ return md.join('\n');
311
375
  }
312
376
 
313
377
  module.exports = {
@@ -1,4 +1,4 @@
1
- const H = require("./stats.helpers.js");
1
+ const H = require('./stats.helpers.js');
2
2
 
3
3
  async function calculateStatistics(aggregatedContent, xmlFileSize, rootDir) {
4
4
  const { textFiles, binaryFiles, errors } = aggregatedContent;
@@ -10,8 +10,8 @@ async function calculateStatistics(aggregatedContent, xmlFileSize, rootDir) {
10
10
  const allFiles = await H.enrichAllFiles(textFiles, binaryFiles);
11
11
  const totalBytes = allFiles.reduce((s, f) => s + f.size, 0);
12
12
  const sizes = allFiles.map((f) => f.size).sort((a, b) => a - b);
13
- const avgSize = sizes.length ? totalBytes / sizes.length : 0;
14
- const medianSize = sizes.length ? H.percentile(sizes, 50) : 0;
13
+ const avgSize = sizes.length > 0 ? totalBytes / sizes.length : 0;
14
+ const medianSize = sizes.length > 0 ? H.percentile(sizes, 50) : 0;
15
15
  const p90 = H.percentile(sizes, 90);
16
16
  const p95 = H.percentile(sizes, 95);
17
17
  const p99 = H.percentile(sizes, 99);