@esthernandez/vibe-doc 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. package/dist/checker/index.d.ts +34 -0
  2. package/dist/checker/index.d.ts.map +1 -0
  3. package/dist/checker/index.js +154 -0
  4. package/dist/checker/staleness.d.ts +26 -0
  5. package/dist/checker/staleness.d.ts.map +1 -0
  6. package/dist/checker/staleness.js +56 -0
  7. package/dist/classifier/index.d.ts +26 -0
  8. package/dist/classifier/index.d.ts.map +1 -0
  9. package/dist/classifier/index.js +146 -0
  10. package/dist/classifier/llm-prompt.d.ts +12 -0
  11. package/dist/classifier/llm-prompt.d.ts.map +1 -0
  12. package/dist/classifier/llm-prompt.js +123 -0
  13. package/dist/classifier/scoring-engine.d.ts +41 -0
  14. package/dist/classifier/scoring-engine.d.ts.map +1 -0
  15. package/dist/classifier/scoring-engine.js +197 -0
  16. package/dist/classifier/signals.d.ts +16 -0
  17. package/dist/classifier/signals.d.ts.map +1 -0
  18. package/dist/classifier/signals.js +305 -0
  19. package/dist/gap-analyzer/breadcrumbs.d.ts +18 -0
  20. package/dist/gap-analyzer/breadcrumbs.d.ts.map +1 -0
  21. package/dist/gap-analyzer/breadcrumbs.js +314 -0
  22. package/dist/gap-analyzer/index.d.ts +13 -0
  23. package/dist/gap-analyzer/index.d.ts.map +1 -0
  24. package/dist/gap-analyzer/index.js +88 -0
  25. package/dist/gap-analyzer/matrix.d.ts +29 -0
  26. package/dist/gap-analyzer/matrix.d.ts.map +1 -0
  27. package/dist/gap-analyzer/matrix.js +137 -0
  28. package/dist/gap-analyzer/tier-assigner.d.ts +22 -0
  29. package/dist/gap-analyzer/tier-assigner.d.ts.map +1 -0
  30. package/dist/gap-analyzer/tier-assigner.js +112 -0
  31. package/dist/generator/docx-writer.d.ts +15 -0
  32. package/dist/generator/docx-writer.d.ts.map +1 -0
  33. package/dist/generator/docx-writer.js +271 -0
  34. package/dist/generator/extractor.d.ts +11 -0
  35. package/dist/generator/extractor.d.ts.map +1 -0
  36. package/dist/generator/extractor.js +459 -0
  37. package/dist/generator/index.d.ts +25 -0
  38. package/dist/generator/index.d.ts.map +1 -0
  39. package/dist/generator/index.js +106 -0
  40. package/dist/generator/markdown-writer.d.ts +27 -0
  41. package/dist/generator/markdown-writer.d.ts.map +1 -0
  42. package/dist/generator/markdown-writer.js +85 -0
  43. package/dist/index.d.ts +7 -0
  44. package/dist/index.d.ts.map +1 -0
  45. package/dist/index.js +372 -0
  46. package/dist/scanner/artifact-scanner.d.ts +16 -0
  47. package/dist/scanner/artifact-scanner.d.ts.map +1 -0
  48. package/dist/scanner/artifact-scanner.js +189 -0
  49. package/dist/scanner/code-scanner.d.ts +17 -0
  50. package/dist/scanner/code-scanner.d.ts.map +1 -0
  51. package/dist/scanner/code-scanner.js +69 -0
  52. package/dist/scanner/file-scanner.d.ts +16 -0
  53. package/dist/scanner/file-scanner.d.ts.map +1 -0
  54. package/dist/scanner/file-scanner.js +119 -0
  55. package/dist/scanner/git-scanner.d.ts +10 -0
  56. package/dist/scanner/git-scanner.d.ts.map +1 -0
  57. package/dist/scanner/git-scanner.js +120 -0
  58. package/dist/scanner/index.d.ts +15 -0
  59. package/dist/scanner/index.d.ts.map +1 -0
  60. package/dist/scanner/index.js +106 -0
  61. package/dist/state/index.d.ts +20 -0
  62. package/dist/state/index.d.ts.map +1 -0
  63. package/dist/state/index.js +141 -0
  64. package/dist/state/schema.d.ts +101 -0
  65. package/dist/state/schema.d.ts.map +1 -0
  66. package/dist/state/schema.js +6 -0
  67. package/dist/templates/embedded/adr.md +45 -0
  68. package/dist/templates/embedded/api-spec.md +55 -0
  69. package/dist/templates/embedded/data-model.md +55 -0
  70. package/dist/templates/embedded/deployment-procedure.md +63 -0
  71. package/dist/templates/embedded/runbook.md +55 -0
  72. package/dist/templates/embedded/test-plan.md +55 -0
  73. package/dist/templates/embedded/threat-model.md +47 -0
  74. package/dist/templates/index.d.ts +20 -0
  75. package/dist/templates/index.d.ts.map +1 -0
  76. package/dist/templates/index.js +106 -0
  77. package/dist/templates/registry.d.ts +31 -0
  78. package/dist/templates/registry.d.ts.map +1 -0
  79. package/dist/templates/registry.js +172 -0
  80. package/dist/templates/renderer.d.ts +26 -0
  81. package/dist/templates/renderer.d.ts.map +1 -0
  82. package/dist/templates/renderer.js +145 -0
  83. package/dist/utils/language-detect.d.ts +14 -0
  84. package/dist/utils/language-detect.d.ts.map +1 -0
  85. package/dist/utils/language-detect.js +58 -0
  86. package/dist/utils/logger.d.ts +16 -0
  87. package/dist/utils/logger.d.ts.map +1 -0
  88. package/dist/utils/logger.js +35 -0
  89. package/dist/versioning/differ.d.ts +20 -0
  90. package/dist/versioning/differ.d.ts.map +1 -0
  91. package/dist/versioning/differ.js +160 -0
  92. package/dist/versioning/index.d.ts +44 -0
  93. package/dist/versioning/index.d.ts.map +1 -0
  94. package/dist/versioning/index.js +165 -0
  95. package/package.json +40 -0
@@ -0,0 +1,459 @@
1
+ "use strict";
2
+ /**
3
+ * Artifact Data Extractor
4
+ * Maps scan inventory data to template section tokens
5
+ * Extracts rich context from project artifacts to pre-populate templates
6
+ */
7
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
8
+ if (k2 === undefined) k2 = k;
9
+ var desc = Object.getOwnPropertyDescriptor(m, k);
10
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
11
+ desc = { enumerable: true, get: function() { return m[k]; } };
12
+ }
13
+ Object.defineProperty(o, k2, desc);
14
+ }) : (function(o, m, k, k2) {
15
+ if (k2 === undefined) k2 = k;
16
+ o[k2] = m[k];
17
+ }));
18
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
19
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
20
+ }) : function(o, v) {
21
+ o["default"] = v;
22
+ });
23
+ var __importStar = (this && this.__importStar) || (function () {
24
+ var ownKeys = function(o) {
25
+ ownKeys = Object.getOwnPropertyNames || function (o) {
26
+ var ar = [];
27
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
28
+ return ar;
29
+ };
30
+ return ownKeys(o);
31
+ };
32
+ return function (mod) {
33
+ if (mod && mod.__esModule) return mod;
34
+ var result = {};
35
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
36
+ __setModuleDefault(result, mod);
37
+ return result;
38
+ };
39
+ })();
40
+ Object.defineProperty(exports, "__esModule", { value: true });
41
+ exports.extractDataForDocType = extractDataForDocType;
42
+ const fs = __importStar(require("fs"));
43
+ const path = __importStar(require("path"));
44
+ const logger_1 = require("../utils/logger");
45
+ /**
46
+ * Try to read and parse package.json
47
+ */
48
+ function readPackageJson(projectPath) {
49
+ try {
50
+ const pkgPath = path.join(projectPath, 'package.json');
51
+ if (!fs.existsSync(pkgPath))
52
+ return null;
53
+ const content = fs.readFileSync(pkgPath, 'utf-8');
54
+ return JSON.parse(content);
55
+ }
56
+ catch (error) {
57
+ logger_1.logger.debug('Failed to read package.json', { error });
58
+ return null;
59
+ }
60
+ }
61
+ /**
62
+ * Try to read CLAUDE.md for context
63
+ */
64
+ function readClaudeMd(projectPath) {
65
+ try {
66
+ // Search in common locations
67
+ const locations = [
68
+ path.join(projectPath, 'CLAUDE.md'),
69
+ path.join(projectPath, 'docs', 'CLAUDE.md'),
70
+ path.join(projectPath, '.claude', 'CLAUDE.md'),
71
+ ];
72
+ for (const loc of locations) {
73
+ if (fs.existsSync(loc)) {
74
+ return fs.readFileSync(loc, 'utf-8');
75
+ }
76
+ }
77
+ }
78
+ catch (error) {
79
+ logger_1.logger.debug('Failed to read CLAUDE.md', { error });
80
+ }
81
+ return '';
82
+ }
83
+ /**
84
+ * Try to read Dockerfile for deployment info
85
+ */
86
+ function readDockerfile(projectPath) {
87
+ try {
88
+ const dockerPath = path.join(projectPath, 'Dockerfile');
89
+ if (!fs.existsSync(dockerPath))
90
+ return '';
91
+ return fs.readFileSync(dockerPath, 'utf-8');
92
+ }
93
+ catch (error) {
94
+ logger_1.logger.debug('Failed to read Dockerfile', { error });
95
+ return '';
96
+ }
97
+ }
98
+ /**
99
+ * Try to read GitHub Actions workflow for CI/CD
100
+ */
101
+ function readWorkflow(projectPath) {
102
+ try {
103
+ const workflowDir = path.join(projectPath, '.github', 'workflows');
104
+ if (!fs.existsSync(workflowDir))
105
+ return '';
106
+ const files = fs.readdirSync(workflowDir);
107
+ const yamlFiles = files.filter(f => f.endsWith('.yml') || f.endsWith('.yaml'));
108
+ if (yamlFiles.length === 0)
109
+ return '';
110
+ // Read first workflow file
111
+ const workflowPath = path.join(workflowDir, yamlFiles[0]);
112
+ return fs.readFileSync(workflowPath, 'utf-8');
113
+ }
114
+ catch (error) {
115
+ logger_1.logger.debug('Failed to read workflow', { error });
116
+ return '';
117
+ }
118
+ }
119
+ /**
120
+ * Extract Runbook sections
121
+ */
122
+ function extractRunbook(projectPath, state) {
123
+ const pkg = readPackageJson(projectPath);
124
+ const claude = readClaudeMd(projectPath);
125
+ const dockerfile = readDockerfile(projectPath);
126
+ const workflow = readWorkflow(projectPath);
127
+ const result = {};
128
+ // service-overview: from package.json description and CLAUDE.md
129
+ if (pkg?.description) {
130
+ result['service-overview'] = pkg.description;
131
+ }
132
+ else if (claude) {
133
+ const lines = claude.split('\n').slice(0, 5).join(' ');
134
+ result['service-overview'] = lines.trim();
135
+ }
136
+ // startup-procedure: from package.json scripts
137
+ if (pkg?.scripts) {
138
+ const scripts = [];
139
+ if (pkg.scripts.start)
140
+ scripts.push(`npm start: ${pkg.scripts.start}`);
141
+ if (pkg.scripts.dev)
142
+ scripts.push(`npm dev: ${pkg.scripts.dev}`);
143
+ if (pkg.scripts['server:start'])
144
+ scripts.push(`npm server:start: ${pkg.scripts['server:start']}`);
145
+ if (scripts.length > 0) {
146
+ result['startup-procedure'] = scripts.join('\n\n');
147
+ }
148
+ }
149
+ // health-checks: look for common patterns in code
150
+ const hasHealthCheck = state.artifactInventory.categories.sourceCode.files.some(f => /health|liveness|readiness|ping|status/.test(f.toLowerCase()));
151
+ if (hasHealthCheck) {
152
+ result['health-checks'] = 'Service includes health check endpoints. Verify via /health or /status endpoints.';
153
+ }
154
+ // common-issues: from git patterns (fix/hotfix commits are rare, but we note this)
155
+ if (state.artifactInventory.gitStats.totalCommits > 10) {
156
+ result['common-issues'] =
157
+ `Based on ${state.artifactInventory.gitStats.totalCommits} commits, check git log for frequent fixes: ` +
158
+ `git log --oneline | grep -E "^(fix|hotfix):" | head -10`;
159
+ }
160
+ // rollback-procedure: from CI/CD config
161
+ if (workflow && workflow.includes('revert')) {
162
+ result['rollback-procedure'] = 'CI/CD pipeline includes revert capabilities. Check .github/workflows for rollback steps.';
163
+ }
164
+ else if (dockerfile || pkg?.scripts?.deploy) {
165
+ result['rollback-procedure'] = 'Deployment handled via Docker/scripts. Manual rollback: revert deployment, restart service.';
166
+ }
167
+ // escalation-path: from CLAUDE.md team context
168
+ if (claude && claude.includes('on-call')) {
169
+ const onCallMatch = claude.match(/on[- ]call[:\s]+([^\n]+)/i);
170
+ if (onCallMatch) {
171
+ result['escalation-path'] = onCallMatch[1];
172
+ }
173
+ }
174
+ return result;
175
+ }
176
+ /**
177
+ * Extract ADR sections
178
+ */
179
+ function extractADR(projectPath, state) {
180
+ const claude = readClaudeMd(projectPath);
181
+ const result = {};
182
+ // context: from CLAUDE.md or architecture notes
183
+ if (claude) {
184
+ const contextMatch = claude.match(/^#+\s+(?:Architecture|Context|Background)([\s\S]*?)(?=^#+|$)/m);
185
+ if (contextMatch) {
186
+ result['context'] = contextMatch[1].trim().slice(0, 500);
187
+ }
188
+ else {
189
+ result['context'] = claude.slice(0, 300);
190
+ }
191
+ }
192
+ // decision: from architecture docs or CLAUDE.md
193
+ if (claude && claude.includes('decision')) {
194
+ const decisionMatch = claude.match(/(?:decision|chose|selected)[:\s]+([^\n]+)/i);
195
+ if (decisionMatch) {
196
+ result['decision'] = decisionMatch[1];
197
+ }
198
+ }
199
+ // consequences: from stack composition
200
+ const languages = state.artifactInventory.gitStats.mainLanguages;
201
+ if (languages.length > 0) {
202
+ result['consequences'] =
203
+ `Technology stack: ${languages.join(', ')}. ` +
204
+ `Positive: focused ecosystem. ` +
205
+ `Negative: limited polyglot flexibility.`;
206
+ }
207
+ // alternatives: from package.json dependencies (show alternatives not chosen)
208
+ const pkg = readPackageJson(projectPath);
209
+ if (pkg?.dependencies) {
210
+ const keys = Object.keys(pkg.dependencies).slice(0, 3);
211
+ result['alternatives'] =
212
+ `Considered alternatives before selecting current stack. ` +
213
+ `Current primary dependencies: ${keys.join(', ')}`;
214
+ }
215
+ return result;
216
+ }
217
+ /**
218
+ * Extract Deployment Procedure sections
219
+ */
220
+ function extractDeploymentProcedure(projectPath, state) {
221
+ const pkg = readPackageJson(projectPath);
222
+ const dockerfile = readDockerfile(projectPath);
223
+ const workflow = readWorkflow(projectPath);
224
+ const result = {};
225
+ // prerequisites: from package.json engines and Dockerfile
226
+ const prereqs = [];
227
+ if (pkg?.engines?.node) {
228
+ prereqs.push(`Node.js ${pkg.engines.node}`);
229
+ }
230
+ if (pkg?.engines?.npm) {
231
+ prereqs.push(`npm ${pkg.engines.npm}`);
232
+ }
233
+ if (dockerfile && dockerfile.includes('FROM')) {
234
+ const fromMatch = dockerfile.match(/FROM\s+([^\n]+)/);
235
+ if (fromMatch) {
236
+ prereqs.push(`Docker base: ${fromMatch[1]}`);
237
+ }
238
+ }
239
+ if (prereqs.length > 0) {
240
+ result['prerequisites'] = prereqs.join('\n');
241
+ }
242
+ // build-process: from package.json scripts
243
+ if (pkg?.scripts) {
244
+ const buildScripts = [];
245
+ if (pkg.scripts.build)
246
+ buildScripts.push(`1. npm run build: ${pkg.scripts.build}`);
247
+ if (pkg.scripts.compile)
248
+ buildScripts.push(`1. npm run compile: ${pkg.scripts.compile}`);
249
+ if (pkg.scripts['install'])
250
+ buildScripts.push(`2. npm install dependencies`);
251
+ if (pkg.scripts.test)
252
+ buildScripts.push(`3. npm test: ${pkg.scripts.test}`);
253
+ if (buildScripts.length > 0) {
254
+ result['build-process'] = buildScripts.join('\n');
255
+ }
256
+ }
257
+ // environment-setup: from .env patterns
258
+ if (fs.existsSync(path.join(projectPath, '.env.example'))) {
259
+ result['environment-setup'] = 'See .env.example for required environment variables. Copy to .env and fill in values.';
260
+ }
261
+ else if (pkg?.scripts?.setup || pkg?.scripts?.configure) {
262
+ result['environment-setup'] = `Run: npm run setup (or configure script defined in package.json)`;
263
+ }
264
+ // testing-before-deploy: from test scripts
265
+ if (pkg?.scripts?.test || pkg?.scripts?.['test:ci']) {
266
+ const testCmd = pkg.scripts['test:ci'] || pkg.scripts.test;
267
+ result['testing-before-deploy'] = `Run: ${testCmd}`;
268
+ }
269
+ // deployment-steps: from workflow
270
+ if (workflow) {
271
+ if (workflow.includes('deploy')) {
272
+ result['deployment-steps'] = 'See .github/workflows for automated deployment steps. Manual: build, test, push Docker image, apply configuration.';
273
+ }
274
+ }
275
+ // post-deployment-checks: from health check patterns
276
+ if (state.artifactInventory.categories.sourceCode.files.some(f => /health|status|ping/.test(f.toLowerCase()))) {
277
+ result['post-deployment-checks'] = 'Verify: curl /health endpoint, check logs, monitor key metrics.';
278
+ }
279
+ // rollback-procedure: from CI/CD or package scripts
280
+ if (pkg?.scripts?.rollback) {
281
+ result['rollback-procedure'] = `Run: npm run rollback`;
282
+ }
283
+ else if (workflow && workflow.includes('revert')) {
284
+ result['rollback-procedure'] = 'Revert the deployment commit and redeploy previous version.';
285
+ }
286
+ return result;
287
+ }
288
+ /**
289
+ * Extract API Spec sections
290
+ */
291
+ function extractAPISpec(projectPath, state) {
292
+ const pkg = readPackageJson(projectPath);
293
+ const result = {};
294
+ // base-url: from package.json config or common ports
295
+ if (pkg?.description && /api|server/i.test(pkg.description)) {
296
+ const port = pkg.scripts?.dev?.match(/:\d+/) ? pkg.scripts.dev.match(/:\d+/)?.[0] : ':3000';
297
+ result['base-url'] = `http://localhost${port}/api`;
298
+ }
299
+ else if (process.env.API_BASE_URL) {
300
+ result['base-url'] = process.env.API_BASE_URL;
301
+ }
302
+ // endpoints: from route file patterns
303
+ const hasRoutes = state.artifactInventory.categories.sourceCode.files.some(f => /routes?|controllers?|handlers?/.test(f.toLowerCase()));
304
+ if (hasRoutes) {
305
+ result['endpoints'] =
306
+ 'API endpoints documented in routes/controllers. ' +
307
+ 'Common patterns: GET /users, POST /users, GET /users/:id, PUT /users/:id, DELETE /users/:id';
308
+ }
309
+ // authentication: from common auth patterns
310
+ const hasAuth = state.artifactInventory.categories.sourceCode.files.some(f => /auth|middleware|jwt|passport/.test(f.toLowerCase()));
311
+ if (hasAuth) {
312
+ result['authentication'] =
313
+ 'API uses token-based authentication (JWT). ' +
314
+ 'Include Authorization header: Bearer <token>. ' +
315
+ 'Obtain token from /auth/login endpoint.';
316
+ }
317
+ return result;
318
+ }
319
+ /**
320
+ * Extract Test Plan sections
321
+ */
322
+ function extractTestPlan(projectPath, state) {
323
+ const pkg = readPackageJson(projectPath);
324
+ const result = {};
325
+ // test-strategy: from test framework detection
326
+ let testFramework = 'unknown';
327
+ if (pkg?.devDependencies) {
328
+ if (pkg.devDependencies.jest)
329
+ testFramework = 'Jest';
330
+ else if (pkg.devDependencies.vitest)
331
+ testFramework = 'Vitest';
332
+ else if (pkg.devDependencies.mocha)
333
+ testFramework = 'Mocha';
334
+ else if (pkg.devDependencies.playwright)
335
+ testFramework = 'Playwright (E2E)';
336
+ }
337
+ if (testFramework !== 'unknown') {
338
+ result['test-strategy'] =
339
+ `Testing framework: ${testFramework}. ` +
340
+ `Run: npm test. ` +
341
+ `Strategy: Unit tests for business logic, integration tests for APIs, E2E for critical paths.`;
342
+ }
343
+ // unit-tests: from test file patterns
344
+ const testFiles = state.artifactInventory.categories.tests.files.slice(0, 5);
345
+ if (testFiles.length > 0) {
346
+ result['unit-tests'] =
347
+ `Test files found: ${testFiles.length} files. ` +
348
+ `Pattern: *.test.ts or *.spec.ts. ` +
349
+ `Run: npm test`;
350
+ }
351
+ // coverage-targets: from package.json jest config
352
+ if (pkg && typeof pkg === 'object' && 'jest' in pkg) {
353
+ const jestConfig = pkg.jest;
354
+ if (jestConfig?.collectCoverageFrom) {
355
+ result['coverage-targets'] =
356
+ `Coverage target: ${jestConfig.coverageThreshold?.global?.lines || '80'}% lines. ` +
357
+ `Critical paths: all API endpoints and core business logic.`;
358
+ }
359
+ }
360
+ else {
361
+ result['coverage-targets'] = 'Recommended: 80%+ line coverage for critical paths.';
362
+ }
363
+ return result;
364
+ }
365
+ /**
366
+ * Extract Data Model sections
367
+ */
368
+ function extractDataModel(projectPath, state) {
369
+ const result = {};
370
+ // entity-overview: from models/schema files
371
+ const hasModels = state.artifactInventory.categories.sourceCode.files.some(f => /models?|schema|entities?/.test(f.toLowerCase()));
372
+ if (hasModels) {
373
+ result['entity-overview'] =
374
+ 'Project includes data models/entities. Primary entities: User, Account, Transaction, etc. ' +
375
+ 'See models/ or schema/ directory for definitions.';
376
+ }
377
+ // table-schemas: from migrations or schema files
378
+ const hasMigrations = state.artifactInventory.categories.sourceCode.files.some(f => /migrations?|sql|schema/.test(f.toLowerCase()));
379
+ if (hasMigrations) {
380
+ result['table-schemas'] =
381
+ 'Database schema defined via migrations. ' +
382
+ 'Check migrations/ directory for CREATE TABLE statements and schema changes.';
383
+ }
384
+ return result;
385
+ }
386
+ /**
387
+ * Extract Threat Model sections
388
+ */
389
+ function extractThreatModel(projectPath, state) {
390
+ const claude = readClaudeMd(projectPath);
391
+ const result = {};
392
+ // asset-scope: from API and data patterns
393
+ const hasAPI = state.artifactInventory.categories.architecture.files.length > 0 ||
394
+ state.artifactInventory.categories.sourceCode.files.some(f => /routes?|controllers?|endpoints?/.test(f.toLowerCase()));
395
+ const hasData = state.artifactInventory.categories.sourceCode.files.some(f => /database|db|sql|model|schema/.test(f.toLowerCase()));
396
+ if (hasAPI || hasData) {
397
+ const assets = [];
398
+ if (hasAPI)
399
+ assets.push('API endpoints');
400
+ if (hasData)
401
+ assets.push('user data in database');
402
+ result['asset-scope'] =
403
+ `Critical assets: ${assets.join(', ')}. ` +
404
+ `Entry points: HTTP API endpoints, database connections.`;
405
+ }
406
+ // mitigations: from auth/validation patterns
407
+ const hasAuth = state.artifactInventory.categories.sourceCode.files.some(f => /auth|middleware|jwt|passport|validate/.test(f.toLowerCase()));
408
+ if (hasAuth) {
409
+ result['mitigations'] =
410
+ 'Implemented: JWT authentication for API access, input validation, ' +
411
+ 'rate limiting, HTTPS enforced. ' +
412
+ 'Recommended: encryption at rest, audit logging, security headers.';
413
+ }
414
+ return result;
415
+ }
416
+ /**
417
+ * Main extraction function - maps doc type to specific extractor
418
+ */
419
+ function extractDataForDocType(docType, state, projectPath = process.cwd()) {
420
+ logger_1.logger.info('Extracting data for doc type', { docType });
421
+ try {
422
+ let extracted = {};
423
+ switch (docType) {
424
+ case 'runbook':
425
+ extracted = extractRunbook(projectPath, state);
426
+ break;
427
+ case 'adr':
428
+ extracted = extractADR(projectPath, state);
429
+ break;
430
+ case 'deployment-procedure':
431
+ extracted = extractDeploymentProcedure(projectPath, state);
432
+ break;
433
+ case 'api-spec':
434
+ extracted = extractAPISpec(projectPath, state);
435
+ break;
436
+ case 'test-plan':
437
+ extracted = extractTestPlan(projectPath, state);
438
+ break;
439
+ case 'data-model':
440
+ extracted = extractDataModel(projectPath, state);
441
+ break;
442
+ case 'threat-model':
443
+ extracted = extractThreatModel(projectPath, state);
444
+ break;
445
+ default:
446
+ logger_1.logger.warn('Unknown doc type for extraction', { docType });
447
+ extracted = {};
448
+ }
449
+ logger_1.logger.debug('Data extraction complete', {
450
+ docType,
451
+ sectionsExtracted: Object.keys(extracted).length,
452
+ });
453
+ return extracted;
454
+ }
455
+ catch (error) {
456
+ logger_1.logger.error('Extraction failed', { docType, error });
457
+ return {};
458
+ }
459
+ }
@@ -0,0 +1,25 @@
1
+ /**
2
+ * Generator Orchestrator Module
3
+ * Orchestrates the document generation pipeline
4
+ */
5
+ import { RenderData } from '../templates';
6
+ import { VibedocState } from '../state/schema';
7
+ /**
8
+ * Result of a document generation
9
+ */
10
+ export interface GenerationResult {
11
+ paths: string[];
12
+ docType: string;
13
+ version: number;
14
+ }
15
+ /**
16
+ * Generate a document in one or more formats
17
+ * @param docType - Type of document (e.g., 'adr', 'runbook')
18
+ * @param projectPath - Root path of the project
19
+ * @param state - Current vibe-doc state
20
+ * @param renderData - Data to render into the template
21
+ * @param format - Format(s) to generate: 'md', 'docx', or 'both'
22
+ * @returns GenerationResult with paths and version number
23
+ */
24
+ export declare function generateDocument(docType: string, projectPath: string, state: VibedocState, renderData: RenderData, format?: 'md' | 'docx' | 'both'): Promise<GenerationResult>;
25
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/generator/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAgB,UAAU,EAAkB,MAAM,cAAc,CAAC;AAIxE,OAAO,EAAE,YAAY,EAAgB,MAAM,iBAAiB,CAAC;AAG7D;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED;;;;;;;;GAQG;AACH,wBAAsB,gBAAgB,CACpC,OAAO,EAAE,MAAM,EACf,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,YAAY,EACnB,UAAU,EAAE,UAAU,EACtB,MAAM,GAAE,IAAI,GAAG,MAAM,GAAG,MAAe,GACtC,OAAO,CAAC,gBAAgB,CAAC,CA+F3B"}
@@ -0,0 +1,106 @@
1
+ "use strict";
2
+ /**
3
+ * Generator Orchestrator Module
4
+ * Orchestrates the document generation pipeline
5
+ */
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.generateDocument = generateDocument;
8
+ const templates_1 = require("../templates");
9
+ const markdown_writer_1 = require("./markdown-writer");
10
+ const docx_writer_1 = require("./docx-writer");
11
+ const versioning_1 = require("../versioning");
12
+ const logger_1 = require("../utils/logger");
13
+ /**
14
+ * Generate a document in one or more formats
15
+ * @param docType - Type of document (e.g., 'adr', 'runbook')
16
+ * @param projectPath - Root path of the project
17
+ * @param state - Current vibe-doc state
18
+ * @param renderData - Data to render into the template
19
+ * @param format - Format(s) to generate: 'md', 'docx', or 'both'
20
+ * @returns GenerationResult with paths and version number
21
+ */
22
+ async function generateDocument(docType, projectPath, state, renderData, format = 'both') {
23
+ logger_1.logger.info('Starting document generation', { docType, format });
24
+ try {
25
+ // Load template
26
+ const template = (0, templates_1.loadTemplate)(docType);
27
+ logger_1.logger.debug('Template loaded', { docType });
28
+ // Render template
29
+ const rendered = (0, templates_1.renderTemplate)(template, renderData);
30
+ logger_1.logger.debug('Template rendered', { docType, length: rendered.length });
31
+ // Archive current version if exists
32
+ const nextVersion = (0, versioning_1.getNextVersion)(state, docType);
33
+ const archived = (0, versioning_1.archiveCurrentVersion)(projectPath, docType, state);
34
+ if (archived) {
35
+ logger_1.logger.info('Previous version archived', { docType, version: archived.version });
36
+ state.history.push(archived);
37
+ }
38
+ // Prepare metadata
39
+ const metadata = {
40
+ generatedAt: new Date().toISOString(),
41
+ classification: renderData.metadata.classification,
42
+ sourceArtifacts: renderData.metadata.sourceArtifacts,
43
+ confidenceSummary: {
44
+ high: 0,
45
+ medium: 0,
46
+ low: 0,
47
+ },
48
+ };
49
+ // Count confidence levels from rendered output
50
+ // Look for confidence tags in the rendered output
51
+ const highMatch = rendered.match(/high/gi);
52
+ const mediumMatch = rendered.match(/medium/gi);
53
+ const lowMatch = rendered.match(/low/gi);
54
+ metadata.confidenceSummary.high = highMatch ? highMatch.length : 0;
55
+ metadata.confidenceSummary.medium = mediumMatch ? mediumMatch.length : 0;
56
+ metadata.confidenceSummary.low = lowMatch ? lowMatch.length : 0;
57
+ const paths = [];
58
+ // Write markdown
59
+ if (format === 'md' || format === 'both') {
60
+ const mdPath = (0, markdown_writer_1.writeMarkdown)(rendered, docType, projectPath, metadata);
61
+ paths.push(mdPath);
62
+ logger_1.logger.info('Markdown document generated', { docType, path: mdPath });
63
+ }
64
+ // Write DOCX
65
+ if (format === 'docx' || format === 'both') {
66
+ const docxPath = await (0, docx_writer_1.writeDocx)(rendered, docType, projectPath, metadata);
67
+ paths.push(docxPath);
68
+ logger_1.logger.info('DOCX document generated', { docType, path: docxPath });
69
+ }
70
+ // Update state with generated doc entry
71
+ const generatedDoc = {
72
+ docType,
73
+ generatedAt: new Date().toISOString(),
74
+ paths,
75
+ sourceArtifacts: renderData.metadata.sourceArtifacts.toString().split(','),
76
+ confidenceSections: {
77
+ high: metadata.confidenceSummary.high,
78
+ medium: metadata.confidenceSummary.medium,
79
+ low: metadata.confidenceSummary.low,
80
+ },
81
+ stateHash: '', // TODO: compute state hash if needed
82
+ };
83
+ // Replace or add generated doc entry
84
+ const existingIndex = state.generatedDocs.findIndex((doc) => doc.docType === docType);
85
+ if (existingIndex >= 0) {
86
+ state.generatedDocs[existingIndex] = generatedDoc;
87
+ }
88
+ else {
89
+ state.generatedDocs.push(generatedDoc);
90
+ }
91
+ logger_1.logger.info('Document generation complete', {
92
+ docType,
93
+ version: nextVersion,
94
+ formats: format,
95
+ });
96
+ return {
97
+ paths,
98
+ docType,
99
+ version: nextVersion,
100
+ };
101
+ }
102
+ catch (error) {
103
+ logger_1.logger.error('Document generation failed', { docType, error });
104
+ throw error;
105
+ }
106
+ }
@@ -0,0 +1,27 @@
1
+ /**
2
+ * Markdown Writer Module
3
+ * Writes rendered content to .md files with metadata headers
4
+ */
5
+ /**
6
+ * Metadata for the generated document
7
+ */
8
+ export interface DocMetadata {
9
+ generatedAt: string;
10
+ classification: string;
11
+ sourceArtifacts: number;
12
+ confidenceSummary: {
13
+ high: number;
14
+ medium: number;
15
+ low: number;
16
+ };
17
+ }
18
+ /**
19
+ * Write rendered content to a markdown file with metadata header
20
+ * @param renderedContent - The rendered markdown content
21
+ * @param docType - Type of document (e.g., 'adr', 'runbook')
22
+ * @param projectPath - Root path of the project
23
+ * @param metadata - Document metadata
24
+ * @returns Full path to the written file
25
+ */
26
+ export declare function writeMarkdown(renderedContent: string, docType: string, projectPath: string, metadata: DocMetadata): string;
27
+ //# sourceMappingURL=markdown-writer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"markdown-writer.d.ts","sourceRoot":"","sources":["../../src/generator/markdown-writer.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,WAAW,EAAE,MAAM,CAAC;IACpB,cAAc,EAAE,MAAM,CAAC;IACvB,eAAe,EAAE,MAAM,CAAC;IACxB,iBAAiB,EAAE;QACjB,IAAI,EAAE,MAAM,CAAC;QACb,MAAM,EAAE,MAAM,CAAC;QACf,GAAG,EAAE,MAAM,CAAC;KACb,CAAC;CACH;AAED;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAC3B,eAAe,EAAE,MAAM,EACvB,OAAO,EAAE,MAAM,EACf,WAAW,EAAE,MAAM,EACnB,QAAQ,EAAE,WAAW,GACpB,MAAM,CAyBR"}