specweave 0.30.10 → 0.30.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/dist/plugins/specweave-ado/lib/ado-client-v2.d.ts.map +1 -1
  2. package/dist/plugins/specweave-ado/lib/ado-client-v2.js +46 -12
  3. package/dist/plugins/specweave-ado/lib/ado-client-v2.js.map +1 -1
  4. package/dist/src/cli/commands/init.d.ts.map +1 -1
  5. package/dist/src/cli/commands/init.js +12 -2
  6. package/dist/src/cli/commands/init.js.map +1 -1
  7. package/dist/src/cli/helpers/init/living-docs-preflight.d.ts +5 -1
  8. package/dist/src/cli/helpers/init/living-docs-preflight.d.ts.map +1 -1
  9. package/dist/src/cli/helpers/init/living-docs-preflight.js +80 -28
  10. package/dist/src/cli/helpers/init/living-docs-preflight.js.map +1 -1
  11. package/dist/src/cli/helpers/init/repository-setup.d.ts.map +1 -1
  12. package/dist/src/cli/helpers/init/repository-setup.js +64 -2
  13. package/dist/src/cli/helpers/init/repository-setup.js.map +1 -1
  14. package/dist/src/cli/helpers/project-count-fetcher.js +33 -2
  15. package/dist/src/cli/helpers/project-count-fetcher.js.map +1 -1
  16. package/dist/src/cli/workers/brownfield-worker.d.ts +13 -0
  17. package/dist/src/cli/workers/brownfield-worker.d.ts.map +1 -1
  18. package/dist/src/cli/workers/brownfield-worker.js +154 -0
  19. package/dist/src/cli/workers/brownfield-worker.js.map +1 -1
  20. package/dist/src/cli/workers/living-docs-worker.js +272 -11
  21. package/dist/src/cli/workers/living-docs-worker.js.map +1 -1
  22. package/dist/src/core/background/brownfield-launcher.d.ts +2 -1
  23. package/dist/src/core/background/brownfield-launcher.d.ts.map +1 -1
  24. package/dist/src/core/background/brownfield-launcher.js.map +1 -1
  25. package/dist/src/core/background/types.d.ts +10 -2
  26. package/dist/src/core/background/types.d.ts.map +1 -1
  27. package/dist/src/core/discrepancy/brownfield-types.d.ts +3 -1
  28. package/dist/src/core/discrepancy/brownfield-types.d.ts.map +1 -1
  29. package/dist/src/core/living-docs/feature-archiver.d.ts +39 -0
  30. package/dist/src/core/living-docs/feature-archiver.d.ts.map +1 -1
  31. package/dist/src/core/living-docs/feature-archiver.js +197 -0
  32. package/dist/src/core/living-docs/feature-archiver.js.map +1 -1
  33. package/dist/src/core/llm/availability-messages.d.ts +33 -0
  34. package/dist/src/core/llm/availability-messages.d.ts.map +1 -0
  35. package/dist/src/core/llm/availability-messages.js +170 -0
  36. package/dist/src/core/llm/availability-messages.js.map +1 -0
  37. package/dist/src/core/llm/index.d.ts +34 -0
  38. package/dist/src/core/llm/index.d.ts.map +1 -0
  39. package/dist/src/core/llm/index.js +35 -0
  40. package/dist/src/core/llm/index.js.map +1 -0
  41. package/dist/src/core/llm/provider-factory.d.ts +48 -0
  42. package/dist/src/core/llm/provider-factory.d.ts.map +1 -0
  43. package/dist/src/core/llm/provider-factory.js +274 -0
  44. package/dist/src/core/llm/provider-factory.js.map +1 -0
  45. package/dist/src/core/llm/providers/anthropic-provider.d.ts +66 -0
  46. package/dist/src/core/llm/providers/anthropic-provider.d.ts.map +1 -0
  47. package/dist/src/core/llm/providers/anthropic-provider.js +195 -0
  48. package/dist/src/core/llm/providers/anthropic-provider.js.map +1 -0
  49. package/dist/src/core/llm/providers/azure-openai-provider.d.ts +47 -0
  50. package/dist/src/core/llm/providers/azure-openai-provider.d.ts.map +1 -0
  51. package/dist/src/core/llm/providers/azure-openai-provider.js +116 -0
  52. package/dist/src/core/llm/providers/azure-openai-provider.js.map +1 -0
  53. package/dist/src/core/llm/providers/bedrock-provider.d.ts +44 -0
  54. package/dist/src/core/llm/providers/bedrock-provider.d.ts.map +1 -0
  55. package/dist/src/core/llm/providers/bedrock-provider.js +149 -0
  56. package/dist/src/core/llm/providers/bedrock-provider.js.map +1 -0
  57. package/dist/src/core/llm/providers/claude-code-provider.d.ts +115 -0
  58. package/dist/src/core/llm/providers/claude-code-provider.d.ts.map +1 -0
  59. package/dist/src/core/llm/providers/claude-code-provider.js +379 -0
  60. package/dist/src/core/llm/providers/claude-code-provider.js.map +1 -0
  61. package/dist/src/core/llm/providers/ollama-provider.d.ts +40 -0
  62. package/dist/src/core/llm/providers/ollama-provider.d.ts.map +1 -0
  63. package/dist/src/core/llm/providers/ollama-provider.js +116 -0
  64. package/dist/src/core/llm/providers/ollama-provider.js.map +1 -0
  65. package/dist/src/core/llm/providers/openai-provider.d.ts +44 -0
  66. package/dist/src/core/llm/providers/openai-provider.d.ts.map +1 -0
  67. package/dist/src/core/llm/providers/openai-provider.js +119 -0
  68. package/dist/src/core/llm/providers/openai-provider.js.map +1 -0
  69. package/dist/src/core/llm/providers/vertex-ai-provider.d.ts +46 -0
  70. package/dist/src/core/llm/providers/vertex-ai-provider.d.ts.map +1 -0
  71. package/dist/src/core/llm/providers/vertex-ai-provider.js +123 -0
  72. package/dist/src/core/llm/providers/vertex-ai-provider.js.map +1 -0
  73. package/dist/src/core/llm/types.d.ts +181 -0
  74. package/dist/src/core/llm/types.d.ts.map +1 -0
  75. package/dist/src/core/llm/types.js +56 -0
  76. package/dist/src/core/llm/types.js.map +1 -0
  77. package/dist/src/importers/item-converter.d.ts.map +1 -1
  78. package/dist/src/importers/item-converter.js +69 -12
  79. package/dist/src/importers/item-converter.js.map +1 -1
  80. package/dist/src/integrations/ado/ado-client.d.ts +22 -0
  81. package/dist/src/integrations/ado/ado-client.d.ts.map +1 -1
  82. package/dist/src/integrations/ado/ado-client.js +89 -37
  83. package/dist/src/integrations/ado/ado-client.js.map +1 -1
  84. package/dist/src/living-docs/enterprise-analyzer.d.ts +160 -0
  85. package/dist/src/living-docs/enterprise-analyzer.d.ts.map +1 -0
  86. package/dist/src/living-docs/enterprise-analyzer.js +836 -0
  87. package/dist/src/living-docs/enterprise-analyzer.js.map +1 -0
  88. package/dist/src/living-docs/fs-id-allocator.d.ts +5 -0
  89. package/dist/src/living-docs/fs-id-allocator.d.ts.map +1 -1
  90. package/dist/src/living-docs/fs-id-allocator.js +12 -5
  91. package/dist/src/living-docs/fs-id-allocator.js.map +1 -1
  92. package/package.json +1 -1
  93. package/plugins/specweave/commands/specweave-archive.md +69 -2
  94. package/plugins/specweave-ado/lib/ado-client-v2.js +43 -8
  95. package/plugins/specweave-ado/lib/ado-client-v2.ts +52 -12
@@ -0,0 +1,836 @@
1
+ /**
2
+ * Enterprise Documentation Analyzer
3
+ *
4
+ * Provides comprehensive documentation analysis covering:
5
+ * - All docs/internal folders (specs, architecture, ADRs, governance)
6
+ * - Spec-code mismatch detection
7
+ * - Documentation health scoring
8
+ * - Enterprise-grade reporting
9
+ */
10
+ import * as fs from 'fs';
11
+ import * as path from 'path';
12
+ import { glob } from 'glob';
13
+ import { consoleLogger } from '../utils/logger.js';
14
+ /**
15
+ * Enterprise Documentation Analyzer
16
+ */
17
+ export class EnterpriseDocAnalyzer {
18
+ constructor(options) {
19
+ this.projectPath = options.projectPath;
20
+ this.logger = options.logger ?? consoleLogger;
21
+ this.includeArchived = options.includeArchived ?? false;
22
+ }
23
+ /**
24
+ * Run full enterprise documentation analysis
25
+ */
26
+ async analyze() {
27
+ this.logger.info('Starting enterprise documentation analysis...');
28
+ // Phase 1: Scan all documentation categories
29
+ const categories = await this.scanAllCategories();
30
+ // Phase 2: Extract acceptance criteria from specs
31
+ const allACs = this.extractAllAcceptanceCriteria(categories);
32
+ // Phase 3: Detect spec-code mismatches
33
+ const mismatches = await this.detectMismatches(allACs);
34
+ // Phase 4: Detect naming convention violations
35
+ const namingViolations = this.detectNamingViolations(categories);
36
+ this.logger.info(`Detected ${namingViolations.length} naming convention violations`);
37
+ // Phase 5: Detect duplicate documents
38
+ const duplicates = this.detectDuplicates(categories);
39
+ this.logger.info(`Detected ${duplicates.length} potential duplicates`);
40
+ // Phase 6: Detect discrepancies
41
+ const discrepancies = await this.detectDiscrepancies(categories);
42
+ this.logger.info(`Detected ${discrepancies.length} discrepancies`);
43
+ // Phase 7: Calculate health scores
44
+ const healthScore = this.calculateHealthScore(categories, mismatches, namingViolations);
45
+ // Phase 8: Generate recommendations
46
+ const recommendations = this.generateRecommendations(categories, mismatches, healthScore, namingViolations, duplicates);
47
+ const totalDocuments = categories.reduce((sum, cat) => sum + cat.fileCount, 0);
48
+ return {
49
+ generatedAt: new Date(),
50
+ projectPath: this.projectPath,
51
+ categories,
52
+ totalDocuments,
53
+ healthScore,
54
+ mismatches,
55
+ namingViolations,
56
+ duplicates,
57
+ discrepancies,
58
+ recommendations,
59
+ };
60
+ }
61
+ /**
62
+ * Scan all documentation categories
63
+ */
64
+ async scanAllCategories() {
65
+ const categories = [];
66
+ const internalDocsPath = path.join(this.projectPath, '.specweave/docs/internal');
67
+ if (!fs.existsSync(internalDocsPath)) {
68
+ this.logger.warn('No .specweave/docs/internal directory found');
69
+ return categories;
70
+ }
71
+ // Define category mappings
72
+ const categoryDefs = [
73
+ { name: 'Feature Specs', subpath: 'specs' },
74
+ { name: 'Architecture', subpath: 'architecture' },
75
+ { name: 'ADRs', subpath: 'architecture/adr' },
76
+ { name: 'Governance', subpath: 'governance' },
77
+ { name: 'Modules', subpath: 'modules' },
78
+ { name: 'Emergency Procedures', subpath: 'emergency-procedures' },
79
+ ];
80
+ for (const def of categoryDefs) {
81
+ const categoryPath = path.join(internalDocsPath, def.subpath);
82
+ if (fs.existsSync(categoryPath)) {
83
+ const category = await this.scanCategory(def.name, categoryPath);
84
+ if (category.fileCount > 0) {
85
+ categories.push(category);
86
+ }
87
+ }
88
+ }
89
+ // Also scan increment specs
90
+ const incrementsPath = path.join(this.projectPath, '.specweave/increments');
91
+ if (fs.existsSync(incrementsPath)) {
92
+ const incrementCategory = await this.scanIncrementSpecs(incrementsPath);
93
+ if (incrementCategory.fileCount > 0) {
94
+ categories.push(incrementCategory);
95
+ }
96
+ }
97
+ this.logger.info(`Scanned ${categories.length} documentation categories`);
98
+ return categories;
99
+ }
100
+ /**
101
+ * Scan a single documentation category
102
+ */
103
+ async scanCategory(name, categoryPath) {
104
+ // Simple pattern - rely on ignore for filtering
105
+ const files = await glob('**/*.md', {
106
+ cwd: categoryPath,
107
+ nodir: true,
108
+ ignore: this.includeArchived ? [] : ['**/_archive/**', '**/node_modules/**'],
109
+ });
110
+ const documents = [];
111
+ let latestUpdate = null;
112
+ for (const file of files) {
113
+ const fullPath = path.join(categoryPath, file);
114
+ const stats = fs.statSync(fullPath);
115
+ const content = fs.readFileSync(fullPath, 'utf-8');
116
+ const acs = this.parseAcceptanceCriteria(content, fullPath);
117
+ documents.push({
118
+ path: fullPath,
119
+ name: path.basename(file, '.md'),
120
+ category: name,
121
+ lastModified: stats.mtime,
122
+ size: stats.size,
123
+ hasAcceptanceCriteria: acs.length > 0,
124
+ acceptanceCriteria: acs,
125
+ });
126
+ if (!latestUpdate || stats.mtime > latestUpdate) {
127
+ latestUpdate = stats.mtime;
128
+ }
129
+ }
130
+ return {
131
+ name,
132
+ path: categoryPath,
133
+ fileCount: documents.length,
134
+ files: documents,
135
+ lastUpdated: latestUpdate,
136
+ };
137
+ }
138
+ /**
139
+ * Scan increment spec files
140
+ */
141
+ async scanIncrementSpecs(incrementsPath) {
142
+ // Simple pattern - rely on ignore for filtering
143
+ const files = await glob('*/spec.md', {
144
+ cwd: incrementsPath,
145
+ nodir: true,
146
+ ignore: this.includeArchived ? [] : ['_archive/**'],
147
+ });
148
+ const documents = [];
149
+ let latestUpdate = null;
150
+ for (const file of files) {
151
+ const fullPath = path.join(incrementsPath, file);
152
+ const stats = fs.statSync(fullPath);
153
+ const content = fs.readFileSync(fullPath, 'utf-8');
154
+ const acs = this.parseAcceptanceCriteria(content, fullPath);
155
+ documents.push({
156
+ path: fullPath,
157
+ name: path.dirname(file),
158
+ category: 'Increment Specs',
159
+ lastModified: stats.mtime,
160
+ size: stats.size,
161
+ hasAcceptanceCriteria: acs.length > 0,
162
+ acceptanceCriteria: acs,
163
+ });
164
+ if (!latestUpdate || stats.mtime > latestUpdate) {
165
+ latestUpdate = stats.mtime;
166
+ }
167
+ }
168
+ return {
169
+ name: 'Increment Specs',
170
+ path: incrementsPath,
171
+ fileCount: documents.length,
172
+ files: documents,
173
+ lastUpdated: latestUpdate,
174
+ };
175
+ }
176
+ /**
177
+ * Parse acceptance criteria from markdown content
178
+ */
179
+ parseAcceptanceCriteria(content, sourceFile) {
180
+ const acs = [];
181
+ // Pattern: - [ ] **AC-US1-01**: Description or - [x] **AC-US1-01**: Description
182
+ const acPattern = /- \[([ x])\] \*\*?(AC-[A-Z0-9]+-\d+)\*\*?:?\s*(.+)/g;
183
+ let match;
184
+ while ((match = acPattern.exec(content)) !== null) {
185
+ acs.push({
186
+ id: match[2],
187
+ description: match[3].trim(),
188
+ isComplete: match[1] === 'x',
189
+ sourceFile,
190
+ });
191
+ }
192
+ return acs;
193
+ }
194
+ /**
195
+ * Extract all acceptance criteria from categories
196
+ */
197
+ extractAllAcceptanceCriteria(categories) {
198
+ const allACs = [];
199
+ for (const category of categories) {
200
+ for (const doc of category.files) {
201
+ allACs.push(...doc.acceptanceCriteria);
202
+ }
203
+ }
204
+ this.logger.info(`Extracted ${allACs.length} acceptance criteria`);
205
+ return allACs;
206
+ }
207
+ /**
208
+ * Detect mismatches between specs and code
209
+ */
210
+ async detectMismatches(acs) {
211
+ const mismatches = [];
212
+ const srcPath = path.join(this.projectPath, 'src');
213
+ if (!fs.existsSync(srcPath)) {
214
+ return mismatches;
215
+ }
216
+ // Get list of source files for evidence search
217
+ const sourceFiles = await glob('**/*.{ts,js,tsx,jsx}', {
218
+ cwd: srcPath,
219
+ nodir: true,
220
+ ignore: ['**/*.test.*', '**/*.spec.*', '**/node_modules/**'],
221
+ });
222
+ // Check completed ACs for code evidence
223
+ const completedACs = acs.filter(ac => ac.isComplete);
224
+ for (const ac of completedACs) {
225
+ // Extract keywords from AC description
226
+ const keywords = this.extractKeywords(ac.description);
227
+ // Search for evidence in code
228
+ const evidence = await this.searchCodeEvidence(srcPath, sourceFiles, keywords);
229
+ if (!evidence || evidence.files.length === 0) {
230
+ // AC marked complete but no code evidence found
231
+ mismatches.push({
232
+ specFile: ac.sourceFile,
233
+ criterionId: ac.id,
234
+ description: ac.description,
235
+ claimedComplete: true,
236
+ codeEvidence: null,
237
+ confidence: 70, // Medium confidence - might be false positive
238
+ mismatchType: 'ghost_completion',
239
+ });
240
+ }
241
+ else if (evidence.lineCount < 10) {
242
+ // Very little code evidence
243
+ mismatches.push({
244
+ specFile: ac.sourceFile,
245
+ criterionId: ac.id,
246
+ description: ac.description,
247
+ claimedComplete: true,
248
+ codeEvidence: evidence,
249
+ confidence: 50,
250
+ mismatchType: 'partial_implementation',
251
+ });
252
+ }
253
+ }
254
+ this.logger.info(`Detected ${mismatches.length} potential mismatches`);
255
+ return mismatches;
256
+ }
257
+ /**
258
+ * Extract keywords from AC description for code search
259
+ */
260
+ extractKeywords(description) {
261
+ // Remove common words and extract meaningful terms
262
+ const stopWords = new Set([
263
+ 'the', 'a', 'an', 'is', 'are', 'was', 'were', 'be', 'been', 'being',
264
+ 'have', 'has', 'had', 'do', 'does', 'did', 'will', 'would', 'could',
265
+ 'should', 'may', 'might', 'must', 'shall', 'can', 'need', 'dare',
266
+ 'ought', 'used', 'to', 'of', 'in', 'for', 'on', 'with', 'at', 'by',
267
+ 'from', 'as', 'into', 'through', 'during', 'before', 'after', 'above',
268
+ 'below', 'between', 'under', 'again', 'further', 'then', 'once',
269
+ 'and', 'but', 'or', 'nor', 'so', 'yet', 'both', 'either', 'neither',
270
+ 'not', 'only', 'own', 'same', 'than', 'too', 'very', 'just', 'that',
271
+ 'this', 'these', 'those', 'when', 'where', 'which', 'who', 'whom',
272
+ 'whose', 'why', 'how', 'all', 'each', 'every', 'any', 'some', 'no',
273
+ ]);
274
+ const words = description
275
+ .toLowerCase()
276
+ .replace(/[^a-z0-9\s]/g, ' ')
277
+ .split(/\s+/)
278
+ .filter(word => word.length > 3 && !stopWords.has(word));
279
+ // Deduplicate and limit to 5 keywords
280
+ return [...new Set(words)].slice(0, 5);
281
+ }
282
+ /**
283
+ * Search for code evidence matching keywords
284
+ */
285
+ async searchCodeEvidence(srcPath, sourceFiles, keywords) {
286
+ if (keywords.length === 0)
287
+ return null;
288
+ const matchingFiles = [];
289
+ const functions = [];
290
+ let totalLines = 0;
291
+ for (const file of sourceFiles.slice(0, 100)) { // Limit for performance
292
+ const fullPath = path.join(srcPath, file);
293
+ try {
294
+ const content = fs.readFileSync(fullPath, 'utf-8');
295
+ const contentLower = content.toLowerCase();
296
+ // Check if any keyword appears in file
297
+ const matches = keywords.filter(kw => contentLower.includes(kw));
298
+ if (matches.length >= Math.ceil(keywords.length / 2)) {
299
+ matchingFiles.push(file);
300
+ totalLines += content.split('\n').length;
301
+ // Extract function names that might be related
302
+ const funcPattern = /(?:function|const|let|var)\s+(\w+)\s*[=(]/g;
303
+ let funcMatch;
304
+ while ((funcMatch = funcPattern.exec(content)) !== null) {
305
+ const funcName = funcMatch[1].toLowerCase();
306
+ if (keywords.some(kw => funcName.includes(kw))) {
307
+ functions.push(funcMatch[1]);
308
+ }
309
+ }
310
+ }
311
+ }
312
+ catch {
313
+ // Skip files that can't be read
314
+ }
315
+ }
316
+ if (matchingFiles.length === 0)
317
+ return null;
318
+ return {
319
+ files: matchingFiles,
320
+ functions: [...new Set(functions)],
321
+ lineCount: totalLines,
322
+ };
323
+ }
324
+ /**
325
+ * Detect naming convention violations in documentation files
326
+ */
327
+ detectNamingViolations(categories) {
328
+ const violations = [];
329
+ // Define expected patterns per category
330
+ const categoryPatterns = {
331
+ 'Feature Specs': { pattern: /^(us-\d{3}|FS-\d{3}|[a-z][a-z0-9-]+)\.md$/i, expected: 'us-XXX.md, FS-XXX/*, or lowercase-kebab.md' },
332
+ 'Architecture': { pattern: /^[a-z][a-z0-9-]+\.md$/, expected: 'lowercase-kebab-case.md' },
333
+ 'ADRs': { pattern: /^\d{4}-[a-z][a-z0-9-]+\.md$/, expected: 'XXXX-title-in-kebab-case.md' },
334
+ 'Governance': { pattern: /^[a-z][a-z0-9-]+\.md$/, expected: 'lowercase-kebab-case.md' },
335
+ 'Modules': { pattern: /^[a-z][a-z0-9-]+\.md$/i, expected: 'lowercase-kebab-case.md' },
336
+ 'Emergency Procedures': { pattern: /^[a-z][a-z0-9-]+\.md$/, expected: 'lowercase-kebab-case.md' },
337
+ 'Increment Specs': { pattern: /^spec\.md$/, expected: 'spec.md' },
338
+ };
339
+ // Patterns to detect violations
340
+ const allCapsPattern = /^[A-Z][A-Z0-9-]+\.md$/;
341
+ const mixedCasePattern = /^(?=.*[A-Z])(?=.*[a-z])[A-Za-z0-9-]+\.md$/;
342
+ const dateSuffixPattern = /-\d{4}-\d{2}-\d{2}/;
343
+ const noExtensionPattern = /^[^.]+$/;
344
+ for (const category of categories) {
345
+ const expectedRule = categoryPatterns[category.name];
346
+ for (const doc of category.files) {
347
+ const fileName = path.basename(doc.path);
348
+ const relativePath = path.relative(this.projectPath, doc.path);
349
+ // Check for ALL CAPS files (e.g., CIRCUIT-BREAKER-MONITORING.md)
350
+ // Exclude standard files like README.md, FEATURE.md, API.md, CHANGELOG.md
351
+ const standardAllCapsFiles = ['README.md', 'FEATURE.md', 'API.md', 'CHANGELOG.md', 'LICENSE.md', 'CONTRIBUTING.md'];
352
+ if (allCapsPattern.test(fileName) && !standardAllCapsFiles.includes(fileName)) {
353
+ violations.push({
354
+ file: relativePath,
355
+ category: category.name,
356
+ violationType: 'all_caps',
357
+ expectedPattern: expectedRule?.expected ?? 'lowercase-kebab-case.md',
358
+ actual: fileName,
359
+ severity: 'warning',
360
+ });
361
+ continue;
362
+ }
363
+ // Check for mixed case (e.g., CircuitBreaker.md)
364
+ // Exclude standard files that use conventional naming
365
+ if (mixedCasePattern.test(fileName) && !standardAllCapsFiles.includes(fileName) && !fileName.startsWith('README') && !fileName.startsWith('API')) {
366
+ violations.push({
367
+ file: relativePath,
368
+ category: category.name,
369
+ violationType: 'mixed_case',
370
+ expectedPattern: expectedRule?.expected ?? 'lowercase-kebab-case.md',
371
+ actual: fileName,
372
+ severity: 'warning',
373
+ });
374
+ continue;
375
+ }
376
+ // Check for date suffixes (e.g., feature-fix-2025-11-24.md)
377
+ if (dateSuffixPattern.test(fileName)) {
378
+ violations.push({
379
+ file: relativePath,
380
+ category: category.name,
381
+ violationType: 'date_suffix',
382
+ expectedPattern: 'Document names should not include dates (use git history)',
383
+ actual: fileName,
384
+ severity: 'info',
385
+ });
386
+ }
387
+ // Check for no extension
388
+ if (noExtensionPattern.test(fileName)) {
389
+ violations.push({
390
+ file: relativePath,
391
+ category: category.name,
392
+ violationType: 'no_extension',
393
+ expectedPattern: 'Files should have .md extension',
394
+ actual: fileName,
395
+ severity: 'error',
396
+ });
397
+ }
398
+ // Check against category-specific pattern
399
+ if (expectedRule && !expectedRule.pattern.test(fileName)) {
400
+ // Skip if already caught by other checks
401
+ if (!allCapsPattern.test(fileName) && !mixedCasePattern.test(fileName)) {
402
+ // Skip ADR numbered files (e.g., 0001-decision-name.md) - this is standard ADR convention
403
+ const adrPattern = /^\d{4}-[a-z0-9-]+\.md$/;
404
+ const isInAdrFolder = relativePath.includes('/adr/') || relativePath.includes('\\adr\\');
405
+ if (isInAdrFolder && adrPattern.test(fileName)) {
406
+ continue; // Valid ADR naming, skip
407
+ }
408
+ violations.push({
409
+ file: relativePath,
410
+ category: category.name,
411
+ violationType: 'inconsistent_prefix',
412
+ expectedPattern: expectedRule.expected,
413
+ actual: fileName,
414
+ severity: 'info',
415
+ });
416
+ }
417
+ }
418
+ }
419
+ }
420
+ return violations;
421
+ }
422
+ /**
423
+ * Detect duplicate documents based on title similarity and content
424
+ */
425
+ detectDuplicates(categories) {
426
+ const duplicates = [];
427
+ const titleMap = new Map(); // Use Set to prevent duplicates
428
+ // Standard files that intentionally have same name across folders
429
+ const standardOrganizationalFiles = [
430
+ 'readme', 'feature', 'api', 'changelog', 'license', 'contributing', 'index',
431
+ ];
432
+ // Group files by normalized title
433
+ for (const category of categories) {
434
+ for (const doc of category.files) {
435
+ // Normalize title: lowercase, remove numbers, dashes, underscores
436
+ const normalizedTitle = doc.name
437
+ .toLowerCase()
438
+ .replace(/[\d_-]+/g, '')
439
+ .replace(/\s+/g, '');
440
+ // Skip standard organizational files from same_title detection
441
+ if (standardOrganizationalFiles.includes(normalizedTitle.replace(/\.md$/, ''))) {
442
+ continue;
443
+ }
444
+ if (!titleMap.has(normalizedTitle)) {
445
+ titleMap.set(normalizedTitle, new Set());
446
+ }
447
+ titleMap.get(normalizedTitle).add(doc.path); // Use add() for Set
448
+ }
449
+ }
450
+ // Find duplicates with same normalized title
451
+ for (const [normalizedTitle, filesSet] of titleMap) {
452
+ const files = Array.from(filesSet); // Convert Set to Array
453
+ if (files.length > 1 && normalizedTitle.length > 3) {
454
+ // Check for exact or near duplicates by comparing content
455
+ const contentHashes = new Map(); // Use Set
456
+ for (const filePath of files) {
457
+ try {
458
+ const content = fs.readFileSync(filePath, 'utf-8');
459
+ // Create simple content hash (first 500 chars normalized)
460
+ const contentSample = content
461
+ .slice(0, 500)
462
+ .toLowerCase()
463
+ .replace(/\s+/g, '')
464
+ .replace(/[^a-z0-9]/g, '');
465
+ if (!contentHashes.has(contentSample)) {
466
+ contentHashes.set(contentSample, new Set());
467
+ }
468
+ contentHashes.get(contentSample).add(filePath);
469
+ }
470
+ catch {
471
+ // Skip files that can't be read
472
+ }
473
+ }
474
+ // Check for exact content duplicates
475
+ let hasExactDupes = false;
476
+ for (const [, sameContentFilesSet] of contentHashes) {
477
+ const sameContentFiles = Array.from(sameContentFilesSet);
478
+ if (sameContentFiles.length > 1) {
479
+ hasExactDupes = true;
480
+ duplicates.push({
481
+ files: sameContentFiles.map(f => path.relative(this.projectPath, f)),
482
+ similarity: 100,
483
+ duplicateType: 'exact',
484
+ });
485
+ }
486
+ }
487
+ // If no exact duplicates but same title, mark as same_title
488
+ // But only if files are in the same documentation type (not adr vs hld vs concepts)
489
+ if (!hasExactDupes && files.length > 1) {
490
+ const relativePaths = files.map(f => path.relative(this.projectPath, f));
491
+ // Group by documentation type to avoid cross-type false positives
492
+ const docTypes = ['adr', 'hld', 'concepts', 'specs', 'guides'];
493
+ const filesByType = new Map();
494
+ for (const file of relativePaths) {
495
+ // Determine doc type from path
496
+ let docType = 'other';
497
+ for (const type of docTypes) {
498
+ if (file.includes(`/${type}/`) || file.includes(`\\${type}\\`)) {
499
+ docType = type;
500
+ break;
501
+ }
502
+ }
503
+ if (!filesByType.has(docType)) {
504
+ filesByType.set(docType, []);
505
+ }
506
+ filesByType.get(docType).push(file);
507
+ }
508
+ // Only report as duplicate if multiple files of same doc type
509
+ for (const [, sameTypeFiles] of filesByType) {
510
+ if (sameTypeFiles.length > 1) {
511
+ duplicates.push({
512
+ files: sameTypeFiles,
513
+ similarity: 80,
514
+ duplicateType: 'same_title',
515
+ });
516
+ }
517
+ }
518
+ }
519
+ }
520
+ }
521
+ return duplicates;
522
+ }
523
+ /**
524
+ * Detect discrepancies in documentation (broken links, orphaned refs)
525
+ */
526
+ async detectDiscrepancies(categories) {
527
+ const discrepancies = [];
528
+ const allDocPaths = new Set();
529
+ // Build set of all doc paths
530
+ for (const category of categories) {
531
+ for (const doc of category.files) {
532
+ allDocPaths.add(doc.path);
533
+ allDocPaths.add(path.basename(doc.path));
534
+ allDocPaths.add(doc.name);
535
+ }
536
+ }
537
+ // Check each document for broken links and orphaned references
538
+ for (const category of categories) {
539
+ for (const doc of category.files) {
540
+ try {
541
+ const content = fs.readFileSync(doc.path, 'utf-8');
542
+ const relativePath = path.relative(this.projectPath, doc.path);
543
+ // Check for markdown links to other docs
544
+ const linkPattern = /\[([^\]]+)\]\(([^)]+\.md)\)/g;
545
+ let match;
546
+ while ((match = linkPattern.exec(content)) !== null) {
547
+ const linkedPath = match[2];
548
+ const absoluteLinkedPath = path.resolve(path.dirname(doc.path), linkedPath);
549
+ if (!fs.existsSync(absoluteLinkedPath)) {
550
+ // For links to increments folder, also check _archive
551
+ let isArchivedIncrement = false;
552
+ if (linkedPath.includes('/increments/') && !linkedPath.includes('/_archive/')) {
553
+ // Extract increment ID and check archive
554
+ const incrementMatch = linkedPath.match(/increments\/(\d{4}-[a-z0-9-]+)/);
555
+ if (incrementMatch) {
556
+ const archivedPath = absoluteLinkedPath.replace(`/increments/${incrementMatch[1]}`, `/increments/_archive/${incrementMatch[1]}`);
557
+ isArchivedIncrement = fs.existsSync(archivedPath);
558
+ }
559
+ }
560
+ if (!isArchivedIncrement) {
561
+ discrepancies.push({
562
+ file: relativePath,
563
+ discrepancyType: 'broken_link',
564
+ description: `Broken link to: ${linkedPath}`,
565
+ relatedFiles: [linkedPath],
566
+ });
567
+ }
568
+ }
569
+ }
570
+ // Check for references to increment IDs that don't exist
571
+ const incrementRefPattern = /(?:increment|spec)[:\s]+(\d{4}-[a-z0-9-]+)/gi;
572
+ while ((match = incrementRefPattern.exec(content)) !== null) {
573
+ const incrementId = match[1];
574
+ const incrementPath = path.join(this.projectPath, '.specweave/increments', incrementId);
575
+ const archivedPath = path.join(this.projectPath, '.specweave/increments/_archive', incrementId);
576
+ if (!fs.existsSync(incrementPath) && !fs.existsSync(archivedPath)) {
577
+ discrepancies.push({
578
+ file: relativePath,
579
+ discrepancyType: 'orphaned_reference',
580
+ description: `Reference to non-existent increment: ${incrementId}`,
581
+ });
582
+ }
583
+ }
584
+ // Check for outdated version references (e.g., v0.XX references)
585
+ const versionPattern = /\bv(0\.\d+\.\d+)\b/g;
586
+ const currentVersionMatch = content.match(/version[:\s]+"?(\d+\.\d+\.\d+)"?/i);
587
+ while ((match = versionPattern.exec(content)) !== null) {
588
+ const referencedVersion = match[1];
589
+ // Flag very old versions (before 0.20)
590
+ const majorMinor = referencedVersion.split('.').slice(0, 2).join('.');
591
+ if (parseFloat(majorMinor) < 0.2) {
592
+ discrepancies.push({
593
+ file: relativePath,
594
+ discrepancyType: 'outdated_version',
595
+ description: `Potentially outdated version reference: v${referencedVersion}`,
596
+ });
597
+ }
598
+ }
599
+ }
600
+ catch {
601
+ // Skip files that can't be read
602
+ }
603
+ }
604
+ }
605
+ return discrepancies;
606
+ }
607
+ /**
608
+ * Calculate documentation health score
609
+ */
610
+ calculateHealthScore(categories, mismatches, namingViolations) {
611
+ // Calculate freshness (based on document age)
612
+ const now = new Date();
613
+ const thirtyDaysAgo = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
614
+ let freshDocs = 0;
615
+ let totalDocs = 0;
616
+ for (const cat of categories) {
617
+ for (const doc of cat.files) {
618
+ totalDocs++;
619
+ if (doc.lastModified > thirtyDaysAgo) {
620
+ freshDocs++;
621
+ }
622
+ }
623
+ }
624
+ const freshness = totalDocs > 0 ? Math.round((freshDocs / totalDocs) * 100) : 0;
625
+ // Calculate coverage (docs with ACs vs total docs)
626
+ let docsWithACs = 0;
627
+ for (const cat of categories) {
628
+ docsWithACs += cat.files.filter(d => d.hasAcceptanceCriteria).length;
629
+ }
630
+ const coverage = totalDocs > 0 ? Math.round((docsWithACs / totalDocs) * 100) : 0;
631
+ // Calculate accuracy (ACs without mismatches + naming violations penalty)
632
+ let totalACs = 0;
633
+ for (const cat of categories) {
634
+ for (const doc of cat.files) {
635
+ totalACs += doc.acceptanceCriteria.length;
636
+ }
637
+ }
638
+ const mismatchCount = mismatches.length;
639
+ // Apply naming violation penalty (errors: -3%, warnings: -1%, info: -0.5%)
640
+ const namingPenalty = namingViolations.reduce((penalty, v) => {
641
+ if (v.severity === 'error')
642
+ return penalty + 3;
643
+ if (v.severity === 'warning')
644
+ return penalty + 1;
645
+ return penalty + 0.5;
646
+ }, 0);
647
+ const accuracy = totalACs > 0
648
+ ? Math.max(0, Math.round(((totalACs - mismatchCount) / totalACs) * 100) - Math.min(namingPenalty, 20))
649
+ : Math.max(0, 100 - Math.min(namingPenalty, 20));
650
+ // Calculate overall score
651
+ const overall = Math.round((freshness * 0.2) + (coverage * 0.3) + (accuracy * 0.5));
652
+ // Determine grade
653
+ let grade;
654
+ if (overall >= 90)
655
+ grade = 'A';
656
+ else if (overall >= 80)
657
+ grade = 'B';
658
+ else if (overall >= 70)
659
+ grade = 'C';
660
+ else if (overall >= 60)
661
+ grade = 'D';
662
+ else
663
+ grade = 'F';
664
+ return {
665
+ overall,
666
+ grade,
667
+ freshness,
668
+ coverage,
669
+ accuracy,
670
+ trend: 'stable', // Would need historical data for actual trend
671
+ };
672
+ }
673
+ /**
674
+ * Generate recommendations based on analysis
675
+ */
676
+ generateRecommendations(categories, mismatches, healthScore, namingViolations, duplicates) {
677
+ const recommendations = [];
678
+ // Freshness recommendations
679
+ if (healthScore.freshness < 50) {
680
+ recommendations.push('Documentation freshness is low. Consider reviewing and updating docs that are over 30 days old.');
681
+ }
682
+ // Coverage recommendations
683
+ if (healthScore.coverage < 60) {
684
+ recommendations.push('Documentation coverage is limited. Add acceptance criteria to more documents.');
685
+ }
686
+ // Mismatch recommendations
687
+ if (mismatches.length > 0) {
688
+ const ghostCompletions = mismatches.filter(m => m.mismatchType === 'ghost_completion');
689
+ if (ghostCompletions.length > 0) {
690
+ recommendations.push(`${ghostCompletions.length} acceptance criteria are marked complete but lack code evidence. Review: ${ghostCompletions.slice(0, 3).map(m => m.criterionId).join(', ')}${ghostCompletions.length > 3 ? '...' : ''}`);
691
+ }
692
+ }
693
+ // Naming convention recommendations
694
+ if (namingViolations.length > 0) {
695
+ const allCapsViolations = namingViolations.filter(v => v.violationType === 'all_caps');
696
+ const dateSuffixViolations = namingViolations.filter(v => v.violationType === 'date_suffix');
697
+ if (allCapsViolations.length > 0) {
698
+ recommendations.push(`${allCapsViolations.length} files use ALL CAPS naming (e.g., ${allCapsViolations[0].actual}). Rename to lowercase-kebab-case for consistency.`);
699
+ }
700
+ if (dateSuffixViolations.length > 0) {
701
+ recommendations.push(`${dateSuffixViolations.length} files include dates in names. Use git history for versioning instead of date suffixes.`);
702
+ }
703
+ }
704
+ // Duplicate recommendations
705
+ if (duplicates.length > 0) {
706
+ const exactDupes = duplicates.filter(d => d.duplicateType === 'exact');
707
+ if (exactDupes.length > 0) {
708
+ recommendations.push(`${exactDupes.length} sets of duplicate documents detected. Consider consolidating: ${exactDupes[0].files.slice(0, 2).join(', ')}`);
709
+ }
710
+ }
711
+ // Category-specific recommendations
712
+ const hasADRs = categories.some(c => c.name === 'ADRs' && c.fileCount > 0);
713
+ if (!hasADRs) {
714
+ recommendations.push('No Architecture Decision Records found. Consider documenting key architectural decisions in .specweave/docs/internal/architecture/adr/');
715
+ }
716
+ const hasGovernance = categories.some(c => c.name === 'Governance' && c.fileCount > 0);
717
+ if (!hasGovernance) {
718
+ recommendations.push('No governance documentation found. Consider adding coding standards to .specweave/docs/internal/governance/');
719
+ }
720
+ return recommendations;
721
+ }
722
+ }
723
+ /**
724
+ * Generate markdown report from enterprise analysis
725
+ */
726
+ export function generateEnterpriseReport(report) {
727
+ const lines = [];
728
+ lines.push('# Enterprise Documentation Health Report');
729
+ lines.push('');
730
+ lines.push(`*Generated: ${report.generatedAt.toLocaleString()}*`);
731
+ lines.push('');
732
+ // Health Score Summary
733
+ lines.push('## Documentation Health Score');
734
+ lines.push('');
735
+ lines.push(`| Metric | Score | Grade |`);
736
+ lines.push(`|--------|-------|-------|`);
737
+ lines.push(`| **Overall** | ${report.healthScore.overall}% | **${report.healthScore.grade}** |`);
738
+ lines.push(`| Freshness | ${report.healthScore.freshness}% | - |`);
739
+ lines.push(`| Coverage | ${report.healthScore.coverage}% | - |`);
740
+ lines.push(`| Accuracy | ${report.healthScore.accuracy}% | - |`);
741
+ lines.push('');
742
+ // Document Categories
743
+ lines.push('## Documentation Categories');
744
+ lines.push('');
745
+ lines.push('| Category | Documents | Last Updated |');
746
+ lines.push('|----------|-----------|--------------|');
747
+ for (const cat of report.categories) {
748
+ const lastUpdated = cat.lastUpdated
749
+ ? cat.lastUpdated.toLocaleDateString()
750
+ : 'N/A';
751
+ lines.push(`| ${cat.name} | ${cat.fileCount} | ${lastUpdated} |`);
752
+ }
753
+ lines.push('');
754
+ lines.push(`**Total Documents**: ${report.totalDocuments}`);
755
+ lines.push('');
756
+ // Mismatches
757
+ if (report.mismatches.length > 0) {
758
+ lines.push('## Spec-Code Mismatches');
759
+ lines.push('');
760
+ lines.push('| AC ID | Type | Confidence | File |');
761
+ lines.push('|-------|------|------------|------|');
762
+ for (const mismatch of report.mismatches.slice(0, 20)) {
763
+ const typeEmoji = mismatch.mismatchType === 'ghost_completion' ? '👻' :
764
+ mismatch.mismatchType === 'partial_implementation' ? '⚠️' : '❓';
765
+ const fileName = path.basename(mismatch.specFile);
766
+ lines.push(`| ${mismatch.criterionId} | ${typeEmoji} ${mismatch.mismatchType} | ${mismatch.confidence}% | ${fileName} |`);
767
+ }
768
+ if (report.mismatches.length > 20) {
769
+ lines.push(`| ... | ... | ... | *${report.mismatches.length - 20} more* |`);
770
+ }
771
+ lines.push('');
772
+ }
773
+ // Naming Violations
774
+ if (report.namingViolations.length > 0) {
775
+ lines.push('## Naming Convention Violations');
776
+ lines.push('');
777
+ lines.push('| File | Type | Severity | Expected Pattern |');
778
+ lines.push('|------|------|----------|------------------|');
779
+ const severityEmoji = { error: '🔴', warning: '🟡', info: '🔵' };
780
+ for (const violation of report.namingViolations.slice(0, 25)) {
781
+ const emoji = severityEmoji[violation.severity];
782
+ lines.push(`| ${violation.file} | ${emoji} ${violation.violationType} | ${violation.severity} | ${violation.expectedPattern} |`);
783
+ }
784
+ if (report.namingViolations.length > 25) {
785
+ lines.push(`| ... | ... | ... | *${report.namingViolations.length - 25} more* |`);
786
+ }
787
+ lines.push('');
788
+ }
789
+ // Duplicates
790
+ if (report.duplicates.length > 0) {
791
+ lines.push('## Duplicate Documents');
792
+ lines.push('');
793
+ lines.push('| Files | Similarity | Type |');
794
+ lines.push('|-------|------------|------|');
795
+ for (const dup of report.duplicates.slice(0, 15)) {
796
+ const filesStr = dup.files.slice(0, 3).join(', ') + (dup.files.length > 3 ? '...' : '');
797
+ lines.push(`| ${filesStr} | ${dup.similarity}% | ${dup.duplicateType} |`);
798
+ }
799
+ if (report.duplicates.length > 15) {
800
+ lines.push(`| ... | ... | *${report.duplicates.length - 15} more* |`);
801
+ }
802
+ lines.push('');
803
+ }
804
+ // Discrepancies
805
+ if (report.discrepancies.length > 0) {
806
+ lines.push('## Documentation Discrepancies');
807
+ lines.push('');
808
+ lines.push('| File | Type | Description |');
809
+ lines.push('|------|------|-------------|');
810
+ const discrepancyEmoji = {
811
+ broken_link: '🔗',
812
+ orphaned_reference: '👻',
813
+ outdated_version: '📅',
814
+ conflicting_info: '⚠️',
815
+ };
816
+ for (const disc of report.discrepancies.slice(0, 20)) {
817
+ const emoji = discrepancyEmoji[disc.discrepancyType] ?? '❓';
818
+ lines.push(`| ${disc.file} | ${emoji} ${disc.discrepancyType} | ${disc.description} |`);
819
+ }
820
+ if (report.discrepancies.length > 20) {
821
+ lines.push(`| ... | ... | *${report.discrepancies.length - 20} more* |`);
822
+ }
823
+ lines.push('');
824
+ }
825
+ // Recommendations
826
+ if (report.recommendations.length > 0) {
827
+ lines.push('## Recommendations');
828
+ lines.push('');
829
+ for (const rec of report.recommendations) {
830
+ lines.push(`- ${rec}`);
831
+ }
832
+ lines.push('');
833
+ }
834
+ return lines.join('\n');
835
+ }
836
+ //# sourceMappingURL=enterprise-analyzer.js.map