@esthernandez/vibe-doc 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/checker/index.d.ts +34 -0
- package/dist/checker/index.d.ts.map +1 -0
- package/dist/checker/index.js +154 -0
- package/dist/checker/staleness.d.ts +26 -0
- package/dist/checker/staleness.d.ts.map +1 -0
- package/dist/checker/staleness.js +56 -0
- package/dist/classifier/index.d.ts +26 -0
- package/dist/classifier/index.d.ts.map +1 -0
- package/dist/classifier/index.js +146 -0
- package/dist/classifier/llm-prompt.d.ts +12 -0
- package/dist/classifier/llm-prompt.d.ts.map +1 -0
- package/dist/classifier/llm-prompt.js +123 -0
- package/dist/classifier/scoring-engine.d.ts +41 -0
- package/dist/classifier/scoring-engine.d.ts.map +1 -0
- package/dist/classifier/scoring-engine.js +197 -0
- package/dist/classifier/signals.d.ts +16 -0
- package/dist/classifier/signals.d.ts.map +1 -0
- package/dist/classifier/signals.js +305 -0
- package/dist/gap-analyzer/breadcrumbs.d.ts +18 -0
- package/dist/gap-analyzer/breadcrumbs.d.ts.map +1 -0
- package/dist/gap-analyzer/breadcrumbs.js +314 -0
- package/dist/gap-analyzer/index.d.ts +13 -0
- package/dist/gap-analyzer/index.d.ts.map +1 -0
- package/dist/gap-analyzer/index.js +88 -0
- package/dist/gap-analyzer/matrix.d.ts +29 -0
- package/dist/gap-analyzer/matrix.d.ts.map +1 -0
- package/dist/gap-analyzer/matrix.js +137 -0
- package/dist/gap-analyzer/tier-assigner.d.ts +22 -0
- package/dist/gap-analyzer/tier-assigner.d.ts.map +1 -0
- package/dist/gap-analyzer/tier-assigner.js +112 -0
- package/dist/generator/docx-writer.d.ts +15 -0
- package/dist/generator/docx-writer.d.ts.map +1 -0
- package/dist/generator/docx-writer.js +271 -0
- package/dist/generator/extractor.d.ts +11 -0
- package/dist/generator/extractor.d.ts.map +1 -0
- package/dist/generator/extractor.js +459 -0
- package/dist/generator/index.d.ts +25 -0
- package/dist/generator/index.d.ts.map +1 -0
- package/dist/generator/index.js +106 -0
- package/dist/generator/markdown-writer.d.ts +27 -0
- package/dist/generator/markdown-writer.d.ts.map +1 -0
- package/dist/generator/markdown-writer.js +85 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +372 -0
- package/dist/scanner/artifact-scanner.d.ts +16 -0
- package/dist/scanner/artifact-scanner.d.ts.map +1 -0
- package/dist/scanner/artifact-scanner.js +189 -0
- package/dist/scanner/code-scanner.d.ts +17 -0
- package/dist/scanner/code-scanner.d.ts.map +1 -0
- package/dist/scanner/code-scanner.js +69 -0
- package/dist/scanner/file-scanner.d.ts +16 -0
- package/dist/scanner/file-scanner.d.ts.map +1 -0
- package/dist/scanner/file-scanner.js +119 -0
- package/dist/scanner/git-scanner.d.ts +10 -0
- package/dist/scanner/git-scanner.d.ts.map +1 -0
- package/dist/scanner/git-scanner.js +120 -0
- package/dist/scanner/index.d.ts +15 -0
- package/dist/scanner/index.d.ts.map +1 -0
- package/dist/scanner/index.js +106 -0
- package/dist/state/index.d.ts +20 -0
- package/dist/state/index.d.ts.map +1 -0
- package/dist/state/index.js +141 -0
- package/dist/state/schema.d.ts +101 -0
- package/dist/state/schema.d.ts.map +1 -0
- package/dist/state/schema.js +6 -0
- package/dist/templates/embedded/adr.md +45 -0
- package/dist/templates/embedded/api-spec.md +55 -0
- package/dist/templates/embedded/data-model.md +55 -0
- package/dist/templates/embedded/deployment-procedure.md +63 -0
- package/dist/templates/embedded/runbook.md +55 -0
- package/dist/templates/embedded/test-plan.md +55 -0
- package/dist/templates/embedded/threat-model.md +47 -0
- package/dist/templates/index.d.ts +20 -0
- package/dist/templates/index.d.ts.map +1 -0
- package/dist/templates/index.js +106 -0
- package/dist/templates/registry.d.ts +31 -0
- package/dist/templates/registry.d.ts.map +1 -0
- package/dist/templates/registry.js +172 -0
- package/dist/templates/renderer.d.ts +26 -0
- package/dist/templates/renderer.d.ts.map +1 -0
- package/dist/templates/renderer.js +145 -0
- package/dist/utils/language-detect.d.ts +14 -0
- package/dist/utils/language-detect.d.ts.map +1 -0
- package/dist/utils/language-detect.js +58 -0
- package/dist/utils/logger.d.ts +16 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +35 -0
- package/dist/versioning/differ.d.ts +20 -0
- package/dist/versioning/differ.d.ts.map +1 -0
- package/dist/versioning/differ.js +160 -0
- package/dist/versioning/index.d.ts +44 -0
- package/dist/versioning/index.d.ts.map +1 -0
- package/dist/versioning/index.js +165 -0
- package/package.json +40 -0
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { DetectedLanguage } from '../utils/language-detect';
|
|
2
|
+
export interface CodeStructure {
|
|
3
|
+
languages: DetectedLanguage[];
|
|
4
|
+
entryPoints: {
|
|
5
|
+
[language: string]: string[];
|
|
6
|
+
};
|
|
7
|
+
routeHandlers: {
|
|
8
|
+
[language: string]: string[];
|
|
9
|
+
};
|
|
10
|
+
models: {
|
|
11
|
+
[language: string]: string[];
|
|
12
|
+
};
|
|
13
|
+
testDirectories: string[];
|
|
14
|
+
packageConfigs: string[];
|
|
15
|
+
}
|
|
16
|
+
export declare function scanCode(projectPath: string): Promise<CodeStructure>;
|
|
17
|
+
//# sourceMappingURL=code-scanner.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"code-scanner.d.ts","sourceRoot":"","sources":["../../src/scanner/code-scanner.ts"],"names":[],"mappings":"AAIA,OAAO,EAAmB,gBAAgB,EAAE,MAAM,0BAA0B,CAAC;AAE7E,MAAM,WAAW,aAAa;IAC5B,SAAS,EAAE,gBAAgB,EAAE,CAAC;IAC9B,WAAW,EAAE;QAAE,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,EAAE,CAAA;KAAE,CAAC;IAC9C,aAAa,EAAE;QAAE,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,EAAE,CAAA;KAAE,CAAC;IAChD,MAAM,EAAE;QAAE,CAAC,QAAQ,EAAE,MAAM,GAAG,MAAM,EAAE,CAAA;KAAE,CAAC;IACzC,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1B,cAAc,EAAE,MAAM,EAAE,CAAC;CAC1B;AAYD,wBAAsB,QAAQ,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,CAAC,CAoB1E"}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.scanCode = scanCode;
|
|
37
|
+
const fs = __importStar(require("fs"));
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const fast_glob_1 = require("fast-glob");
|
|
40
|
+
const logger_1 = require("../utils/logger");
|
|
41
|
+
const language_detect_1 = require("../utils/language-detect");
|
|
42
|
+
async function findPackageConfigs(projectPath) {
|
|
43
|
+
const cfgs = await (0, fast_glob_1.glob)(['package.json', 'pyproject.toml', 'Cargo.toml', 'go.mod'], { cwd: projectPath });
|
|
44
|
+
return cfgs.map(c => path.join(projectPath, c)).filter(c => fs.existsSync(c));
|
|
45
|
+
}
|
|
46
|
+
async function findTestDirectories(projectPath) {
|
|
47
|
+
const dirs = await (0, fast_glob_1.glob)(['__tests__', 'tests', 'test'], { cwd: projectPath, onlyDirectories: true });
|
|
48
|
+
return dirs.map(d => path.join(projectPath, d)).filter(d => fs.existsSync(d));
|
|
49
|
+
}
|
|
50
|
+
async function scanCode(projectPath) {
|
|
51
|
+
try {
|
|
52
|
+
const packageConfigs = await findPackageConfigs(projectPath);
|
|
53
|
+
const languages = (0, language_detect_1.detectLanguages)(packageConfigs);
|
|
54
|
+
const testDirectories = await findTestDirectories(projectPath);
|
|
55
|
+
logger_1.logger.info('Code scan completed', { languages: languages.length });
|
|
56
|
+
return {
|
|
57
|
+
languages,
|
|
58
|
+
entryPoints: {},
|
|
59
|
+
routeHandlers: {},
|
|
60
|
+
models: {},
|
|
61
|
+
testDirectories,
|
|
62
|
+
packageConfigs,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
catch (error) {
|
|
66
|
+
logger_1.logger.error('Code scan failed', { error });
|
|
67
|
+
throw error;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { ArtifactsByType } from '../state/schema';
|
|
2
|
+
interface FileScanResult {
|
|
3
|
+
configAsDocs: ArtifactsByType;
|
|
4
|
+
agentArtifacts: ArtifactsByType;
|
|
5
|
+
sessionContext: ArtifactsByType;
|
|
6
|
+
documentation: ArtifactsByType;
|
|
7
|
+
packageConfigs: ArtifactsByType;
|
|
8
|
+
cicdConfigs: ArtifactsByType;
|
|
9
|
+
infrastructure: ArtifactsByType;
|
|
10
|
+
testFiles: ArtifactsByType;
|
|
11
|
+
apiSpecs: ArtifactsByType;
|
|
12
|
+
sourceCode: ArtifactsByType;
|
|
13
|
+
}
|
|
14
|
+
export declare function scanFiles(projectPath: string): Promise<FileScanResult>;
|
|
15
|
+
export {};
|
|
16
|
+
//# sourceMappingURL=file-scanner.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file-scanner.d.ts","sourceRoot":"","sources":["../../src/scanner/file-scanner.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAC;AAElD,UAAU,cAAc;IACtB,YAAY,EAAE,eAAe,CAAC;IAC9B,cAAc,EAAE,eAAe,CAAC;IAChC,cAAc,EAAE,eAAe,CAAC;IAChC,aAAa,EAAE,eAAe,CAAC;IAC/B,cAAc,EAAE,eAAe,CAAC;IAChC,WAAW,EAAE,eAAe,CAAC;IAC7B,cAAc,EAAE,eAAe,CAAC;IAChC,SAAS,EAAE,eAAe,CAAC;IAC3B,QAAQ,EAAE,eAAe,CAAC;IAC1B,UAAU,EAAE,eAAe,CAAC;CAC7B;AA8CD,wBAAsB,SAAS,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,cAAc,CAAC,CA2B5E"}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.scanFiles = scanFiles;
|
|
37
|
+
const fs = __importStar(require("fs"));
|
|
38
|
+
const path = __importStar(require("path"));
|
|
39
|
+
const logger_1 = require("../utils/logger");
|
|
40
|
+
const EXCLUDE_DIRS = new Set([
|
|
41
|
+
'node_modules', '.git', 'dist', 'build', 'coverage', '.next', 'venv', '.venv',
|
|
42
|
+
'__pycache__', 'target', '.cache', '.turbo', 'temp', 'tmp', '.pytest_cache',
|
|
43
|
+
'jest_cache', '.eslintcache', '.nuxt', '.parcel-cache', 'bower_components'
|
|
44
|
+
]);
|
|
45
|
+
function categorizeFile(filePath) {
|
|
46
|
+
const n = filePath.replace(/\\/g, '/');
|
|
47
|
+
if (n.endsWith('CLAUDE.md'))
|
|
48
|
+
return 'configAsDocs';
|
|
49
|
+
if (n.includes('/.ai/') || n.includes('/.agent/skills/'))
|
|
50
|
+
return 'agentArtifacts';
|
|
51
|
+
if (n.includes('/.claude/'))
|
|
52
|
+
return 'sessionContext';
|
|
53
|
+
if (n.includes('/docs/') && n.endsWith('.md'))
|
|
54
|
+
return 'documentation';
|
|
55
|
+
if (n.endsWith('package.json') || n.endsWith('Cargo.toml'))
|
|
56
|
+
return 'packageConfigs';
|
|
57
|
+
if (n.includes('/.github/workflows/'))
|
|
58
|
+
return 'cicdConfigs';
|
|
59
|
+
if (n.endsWith('Dockerfile') || n.endsWith('.tf'))
|
|
60
|
+
return 'infrastructure';
|
|
61
|
+
if (n.match(/\.(test|spec)\.[jt]sx?$/))
|
|
62
|
+
return 'testFiles';
|
|
63
|
+
if (n.endsWith('swagger.json'))
|
|
64
|
+
return 'apiSpecs';
|
|
65
|
+
return 'sourceCode';
|
|
66
|
+
}
|
|
67
|
+
function walkDir(dir, maxDepth = 4, currentDepth = 0) {
|
|
68
|
+
const files = [];
|
|
69
|
+
if (currentDepth > maxDepth)
|
|
70
|
+
return files;
|
|
71
|
+
try {
|
|
72
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
73
|
+
for (const entry of entries) {
|
|
74
|
+
if (EXCLUDE_DIRS.has(entry.name))
|
|
75
|
+
continue;
|
|
76
|
+
if (entry.name.startsWith('.') && entry.name !== '.ai' && entry.name !== '.agent' && entry.name !== '.claude' && entry.name !== '.github')
|
|
77
|
+
continue;
|
|
78
|
+
const fullPath = path.join(dir, entry.name);
|
|
79
|
+
if (entry.isFile()) {
|
|
80
|
+
files.push(fullPath);
|
|
81
|
+
}
|
|
82
|
+
else if (entry.isDirectory()) {
|
|
83
|
+
files.push(...walkDir(fullPath, maxDepth, currentDepth + 1));
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
catch (error) {
|
|
88
|
+
logger_1.logger.warn('Failed to read directory', { dir, error });
|
|
89
|
+
}
|
|
90
|
+
return files;
|
|
91
|
+
}
|
|
92
|
+
async function scanFiles(projectPath) {
|
|
93
|
+
const result = {
|
|
94
|
+
configAsDocs: { files: [], count: 0 },
|
|
95
|
+
agentArtifacts: { files: [], count: 0 },
|
|
96
|
+
sessionContext: { files: [], count: 0 },
|
|
97
|
+
documentation: { files: [], count: 0 },
|
|
98
|
+
packageConfigs: { files: [], count: 0 },
|
|
99
|
+
cicdConfigs: { files: [], count: 0 },
|
|
100
|
+
infrastructure: { files: [], count: 0 },
|
|
101
|
+
testFiles: { files: [], count: 0 },
|
|
102
|
+
apiSpecs: { files: [], count: 0 },
|
|
103
|
+
sourceCode: { files: [], count: 0 },
|
|
104
|
+
};
|
|
105
|
+
try {
|
|
106
|
+
const files = walkDir(projectPath);
|
|
107
|
+
for (const file of files) {
|
|
108
|
+
const cat = categorizeFile(file);
|
|
109
|
+
result[cat].files.push(file);
|
|
110
|
+
result[cat].count++;
|
|
111
|
+
}
|
|
112
|
+
logger_1.logger.info('File scan completed', { total: files.length });
|
|
113
|
+
return result;
|
|
114
|
+
}
|
|
115
|
+
catch (error) {
|
|
116
|
+
logger_1.logger.error('File scan failed', { error });
|
|
117
|
+
throw error;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Git Scanner
|
|
3
|
+
* Extracts git statistics from a repository
|
|
4
|
+
*/
|
|
5
|
+
import { GitStats } from '../state/schema';
|
|
6
|
+
/**
|
|
7
|
+
* Scans git repository for statistics
|
|
8
|
+
*/
|
|
9
|
+
export declare function scanGit(projectPath: string): Promise<GitStats>;
|
|
10
|
+
//# sourceMappingURL=git-scanner.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"git-scanner.d.ts","sourceRoot":"","sources":["../../src/scanner/git-scanner.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,OAAO,EAAE,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AAiE3C;;GAEG;AACH,wBAAsB,OAAO,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,QAAQ,CAAC,CAuEpE"}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Git Scanner
|
|
4
|
+
* Extracts git statistics from a repository
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.scanGit = scanGit;
|
|
8
|
+
const simple_git_1 = require("simple-git");
|
|
9
|
+
const logger_1 = require("../utils/logger");
|
|
10
|
+
/**
|
|
11
|
+
* Analyzes commit messages for conventional commit format
|
|
12
|
+
*/
|
|
13
|
+
function analyzeConventionalCommits(messages) {
|
|
14
|
+
const patterns = {
|
|
15
|
+
feat: 0,
|
|
16
|
+
fix: 0,
|
|
17
|
+
refactor: 0,
|
|
18
|
+
docs: 0,
|
|
19
|
+
chore: 0,
|
|
20
|
+
other: 0,
|
|
21
|
+
};
|
|
22
|
+
let validConventional = 0;
|
|
23
|
+
for (const message of messages) {
|
|
24
|
+
const lines = message.split('\n');
|
|
25
|
+
const firstLine = lines[0];
|
|
26
|
+
if (firstLine.match(/^feat(\(.+\))?:/)) {
|
|
27
|
+
patterns.feat++;
|
|
28
|
+
validConventional++;
|
|
29
|
+
}
|
|
30
|
+
else if (firstLine.match(/^fix(\(.+\))?:/)) {
|
|
31
|
+
patterns.fix++;
|
|
32
|
+
validConventional++;
|
|
33
|
+
}
|
|
34
|
+
else if (firstLine.match(/^refactor(\(.+\))?:/)) {
|
|
35
|
+
patterns.refactor++;
|
|
36
|
+
validConventional++;
|
|
37
|
+
}
|
|
38
|
+
else if (firstLine.match(/^docs(\(.+\))?:/)) {
|
|
39
|
+
patterns.docs++;
|
|
40
|
+
validConventional++;
|
|
41
|
+
}
|
|
42
|
+
else if (firstLine.match(/^chore(\(.+\))?:/)) {
|
|
43
|
+
patterns.chore++;
|
|
44
|
+
validConventional++;
|
|
45
|
+
}
|
|
46
|
+
else {
|
|
47
|
+
patterns.other++;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
const rate = messages.length > 0 ? validConventional / messages.length : 0;
|
|
51
|
+
return { rate, patterns };
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Scans git repository for statistics
|
|
55
|
+
*/
|
|
56
|
+
async function scanGit(projectPath) {
|
|
57
|
+
logger_1.logger.debug('Starting git scan', { projectPath });
|
|
58
|
+
try {
|
|
59
|
+
const git = (0, simple_git_1.simpleGit)(projectPath);
|
|
60
|
+
// Check if it's a git repository
|
|
61
|
+
const isRepo = await git.checkIsRepo();
|
|
62
|
+
if (!isRepo) {
|
|
63
|
+
logger_1.logger.warn('Not a git repository', { projectPath });
|
|
64
|
+
return {
|
|
65
|
+
totalCommits: 0,
|
|
66
|
+
contributors: 0,
|
|
67
|
+
lastCommitDate: '',
|
|
68
|
+
mainLanguages: [],
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
// Get commit count
|
|
72
|
+
const log = await git.log();
|
|
73
|
+
const totalCommits = log.total || 0;
|
|
74
|
+
logger_1.logger.debug('Total commits', { count: totalCommits });
|
|
75
|
+
// Get unique contributors
|
|
76
|
+
const logWithAuthor = await git.log(['--format=%an']);
|
|
77
|
+
const authors = logWithAuthor.all
|
|
78
|
+
.map((commit) => commit.message)
|
|
79
|
+
.filter((author) => author.length > 0);
|
|
80
|
+
const uniqueAuthors = new Set(authors);
|
|
81
|
+
const contributors = uniqueAuthors.size;
|
|
82
|
+
logger_1.logger.debug('Unique contributors', { count: contributors });
|
|
83
|
+
// Get last commit date
|
|
84
|
+
let lastCommitDate = '';
|
|
85
|
+
if (log.latest) {
|
|
86
|
+
lastCommitDate = log.latest.date || '';
|
|
87
|
+
}
|
|
88
|
+
logger_1.logger.debug('Last commit date', { date: lastCommitDate });
|
|
89
|
+
// Analyze conventional commits (sample for performance)
|
|
90
|
+
const commitMessages = [];
|
|
91
|
+
const sampleSize = Math.min(100, totalCommits);
|
|
92
|
+
for (let i = 0; i < sampleSize; i++) {
|
|
93
|
+
if (log.all[i]) {
|
|
94
|
+
commitMessages.push(log.all[i].message);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
const { patterns } = analyzeConventionalCommits(commitMessages);
|
|
98
|
+
logger_1.logger.info('Git scan completed', {
|
|
99
|
+
totalCommits,
|
|
100
|
+
contributors,
|
|
101
|
+
lastCommitDate,
|
|
102
|
+
commitPatterns: patterns,
|
|
103
|
+
});
|
|
104
|
+
return {
|
|
105
|
+
totalCommits,
|
|
106
|
+
contributors,
|
|
107
|
+
lastCommitDate,
|
|
108
|
+
mainLanguages: [],
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
catch (error) {
|
|
112
|
+
logger_1.logger.warn('Git scan failed', { error, projectPath });
|
|
113
|
+
return {
|
|
114
|
+
totalCommits: 0,
|
|
115
|
+
contributors: 0,
|
|
116
|
+
lastCommitDate: '',
|
|
117
|
+
mainLanguages: [],
|
|
118
|
+
};
|
|
119
|
+
}
|
|
120
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Scanner Orchestrator
|
|
3
|
+
* Runs all scanner modules in sequence and assembles complete ArtifactInventory
|
|
4
|
+
*/
|
|
5
|
+
import { ArtifactInventory } from '../state/schema';
|
|
6
|
+
/**
|
|
7
|
+
* Main scanner function that orchestrates all scanners
|
|
8
|
+
* Returns complete ArtifactInventory
|
|
9
|
+
*/
|
|
10
|
+
export declare function scan(projectPath: string): Promise<ArtifactInventory>;
|
|
11
|
+
export { scanFiles } from './file-scanner';
|
|
12
|
+
export { scanGit } from './git-scanner';
|
|
13
|
+
export { scanCode } from './code-scanner';
|
|
14
|
+
export { enrichArtifacts } from './artifact-scanner';
|
|
15
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/scanner/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAMpD;;;GAGG;AACH,wBAAsB,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,iBAAiB,CAAC,CA8F1E;AAED,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAC;AACxC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAC;AAC1C,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC"}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Scanner Orchestrator
|
|
4
|
+
* Runs all scanner modules in sequence and assembles complete ArtifactInventory
|
|
5
|
+
*/
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
exports.enrichArtifacts = exports.scanCode = exports.scanGit = exports.scanFiles = void 0;
|
|
8
|
+
exports.scan = scan;
|
|
9
|
+
const logger_1 = require("../utils/logger");
|
|
10
|
+
const file_scanner_1 = require("./file-scanner");
|
|
11
|
+
const git_scanner_1 = require("./git-scanner");
|
|
12
|
+
const code_scanner_1 = require("./code-scanner");
|
|
13
|
+
const artifact_scanner_1 = require("./artifact-scanner");
|
|
14
|
+
/**
|
|
15
|
+
* Main scanner function that orchestrates all scanners
|
|
16
|
+
* Returns complete ArtifactInventory
|
|
17
|
+
*/
|
|
18
|
+
async function scan(projectPath) {
|
|
19
|
+
logger_1.logger.info('Starting comprehensive project scan', { projectPath });
|
|
20
|
+
try {
|
|
21
|
+
// Phase 1: File scanning
|
|
22
|
+
logger_1.logger.info('Phase 1/4: Scanning file structure...');
|
|
23
|
+
const fileResults = await (0, file_scanner_1.scanFiles)(projectPath);
|
|
24
|
+
// Phase 2: Git scanning
|
|
25
|
+
logger_1.logger.info('Phase 2/4: Analyzing git history...');
|
|
26
|
+
const gitStats = await (0, git_scanner_1.scanGit)(projectPath);
|
|
27
|
+
// Phase 3: Code scanning
|
|
28
|
+
logger_1.logger.info('Phase 3/4: Analyzing code structure...');
|
|
29
|
+
const codeStructure = await (0, code_scanner_1.scanCode)(projectPath);
|
|
30
|
+
// Phase 4: Artifact enrichment
|
|
31
|
+
logger_1.logger.info('Phase 4/4: Enriching artifacts with summaries...');
|
|
32
|
+
await (0, artifact_scanner_1.enrichArtifacts)(fileResults.documentation.files, fileResults.agentArtifacts.files, fileResults.configAsDocs.files);
|
|
33
|
+
// Assemble inventory
|
|
34
|
+
const totalArtifacts = fileResults.configAsDocs.count +
|
|
35
|
+
fileResults.agentArtifacts.count +
|
|
36
|
+
fileResults.sessionContext.count +
|
|
37
|
+
fileResults.documentation.count +
|
|
38
|
+
fileResults.packageConfigs.count +
|
|
39
|
+
fileResults.cicdConfigs.count +
|
|
40
|
+
fileResults.infrastructure.count +
|
|
41
|
+
fileResults.testFiles.count +
|
|
42
|
+
fileResults.apiSpecs.count +
|
|
43
|
+
fileResults.sourceCode.count;
|
|
44
|
+
// Map file scanner categories to inventory categories
|
|
45
|
+
const inventory = {
|
|
46
|
+
totalArtifacts,
|
|
47
|
+
categories: {
|
|
48
|
+
sourceCode: fileResults.sourceCode,
|
|
49
|
+
configuration: {
|
|
50
|
+
files: [
|
|
51
|
+
...fileResults.packageConfigs.files,
|
|
52
|
+
...fileResults.cicdConfigs.files,
|
|
53
|
+
...fileResults.configAsDocs.files,
|
|
54
|
+
],
|
|
55
|
+
count: fileResults.packageConfigs.count +
|
|
56
|
+
fileResults.cicdConfigs.count +
|
|
57
|
+
fileResults.configAsDocs.count,
|
|
58
|
+
},
|
|
59
|
+
documentation: fileResults.documentation,
|
|
60
|
+
tests: fileResults.testFiles,
|
|
61
|
+
architecture: {
|
|
62
|
+
files: [
|
|
63
|
+
...fileResults.agentArtifacts.files,
|
|
64
|
+
...fileResults.sessionContext.files,
|
|
65
|
+
...fileResults.apiSpecs.files,
|
|
66
|
+
],
|
|
67
|
+
count: fileResults.agentArtifacts.count +
|
|
68
|
+
fileResults.sessionContext.count +
|
|
69
|
+
fileResults.apiSpecs.count,
|
|
70
|
+
},
|
|
71
|
+
infrastructure: fileResults.infrastructure,
|
|
72
|
+
},
|
|
73
|
+
gitStats: {
|
|
74
|
+
...gitStats,
|
|
75
|
+
mainLanguages: codeStructure.languages.map((l) => l.name),
|
|
76
|
+
},
|
|
77
|
+
};
|
|
78
|
+
logger_1.logger.info('Scan completed successfully', {
|
|
79
|
+
total: totalArtifacts,
|
|
80
|
+
categories: {
|
|
81
|
+
sourceCode: inventory.categories.sourceCode.count,
|
|
82
|
+
configuration: inventory.categories.configuration.count,
|
|
83
|
+
documentation: inventory.categories.documentation.count,
|
|
84
|
+
tests: inventory.categories.tests.count,
|
|
85
|
+
architecture: inventory.categories.architecture.count,
|
|
86
|
+
infrastructure: inventory.categories.infrastructure.count,
|
|
87
|
+
},
|
|
88
|
+
languages: inventory.gitStats.mainLanguages,
|
|
89
|
+
contributors: inventory.gitStats.contributors,
|
|
90
|
+
commits: inventory.gitStats.totalCommits,
|
|
91
|
+
});
|
|
92
|
+
return inventory;
|
|
93
|
+
}
|
|
94
|
+
catch (error) {
|
|
95
|
+
logger_1.logger.error('Scan failed', { error });
|
|
96
|
+
throw error;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
var file_scanner_2 = require("./file-scanner");
|
|
100
|
+
Object.defineProperty(exports, "scanFiles", { enumerable: true, get: function () { return file_scanner_2.scanFiles; } });
|
|
101
|
+
var git_scanner_2 = require("./git-scanner");
|
|
102
|
+
Object.defineProperty(exports, "scanGit", { enumerable: true, get: function () { return git_scanner_2.scanGit; } });
|
|
103
|
+
var code_scanner_2 = require("./code-scanner");
|
|
104
|
+
Object.defineProperty(exports, "scanCode", { enumerable: true, get: function () { return code_scanner_2.scanCode; } });
|
|
105
|
+
var artifact_scanner_2 = require("./artifact-scanner");
|
|
106
|
+
Object.defineProperty(exports, "enrichArtifacts", { enumerable: true, get: function () { return artifact_scanner_2.enrichArtifacts; } });
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* State Management
|
|
3
|
+
* Handles reading and writing .vibe-doc/state.json
|
|
4
|
+
*/
|
|
5
|
+
import { VibedocState } from './schema';
|
|
6
|
+
/**
|
|
7
|
+
* Read the vibe-doc state from disk
|
|
8
|
+
* Returns null if state file doesn't exist
|
|
9
|
+
*/
|
|
10
|
+
export declare function readState(projectPath: string): VibedocState | null;
|
|
11
|
+
/**
|
|
12
|
+
* Write the vibe-doc state to disk
|
|
13
|
+
* Creates .vibe-doc directory if it doesn't exist
|
|
14
|
+
*/
|
|
15
|
+
export declare function writeState(projectPath: string, state: VibedocState): void;
|
|
16
|
+
/**
|
|
17
|
+
* Initialize a fresh vibe-doc state with empty values
|
|
18
|
+
*/
|
|
19
|
+
export declare function initState(): VibedocState;
|
|
20
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/state/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAKxC;;;GAGG;AACH,wBAAgB,SAAS,CAAC,WAAW,EAAE,MAAM,GAAG,YAAY,GAAG,IAAI,CAYlE;AAED;;;GAGG;AACH,wBAAgB,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,KAAK,EAAE,YAAY,GAAG,IAAI,CAWzE;AAED;;GAEG;AACH,wBAAgB,SAAS,IAAI,YAAY,CAwDxC"}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* State Management
|
|
4
|
+
* Handles reading and writing .vibe-doc/state.json
|
|
5
|
+
*/
|
|
6
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
7
|
+
if (k2 === undefined) k2 = k;
|
|
8
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
9
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
10
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
11
|
+
}
|
|
12
|
+
Object.defineProperty(o, k2, desc);
|
|
13
|
+
}) : (function(o, m, k, k2) {
|
|
14
|
+
if (k2 === undefined) k2 = k;
|
|
15
|
+
o[k2] = m[k];
|
|
16
|
+
}));
|
|
17
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
18
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
19
|
+
}) : function(o, v) {
|
|
20
|
+
o["default"] = v;
|
|
21
|
+
});
|
|
22
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
23
|
+
var ownKeys = function(o) {
|
|
24
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
25
|
+
var ar = [];
|
|
26
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
27
|
+
return ar;
|
|
28
|
+
};
|
|
29
|
+
return ownKeys(o);
|
|
30
|
+
};
|
|
31
|
+
return function (mod) {
|
|
32
|
+
if (mod && mod.__esModule) return mod;
|
|
33
|
+
var result = {};
|
|
34
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
35
|
+
__setModuleDefault(result, mod);
|
|
36
|
+
return result;
|
|
37
|
+
};
|
|
38
|
+
})();
|
|
39
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
40
|
+
exports.readState = readState;
|
|
41
|
+
exports.writeState = writeState;
|
|
42
|
+
exports.initState = initState;
|
|
43
|
+
const fs = __importStar(require("fs"));
|
|
44
|
+
const path = __importStar(require("path"));
|
|
45
|
+
const STATE_DIR = '.vibe-doc';
|
|
46
|
+
const STATE_FILE = 'state.json';
|
|
47
|
+
/**
|
|
48
|
+
* Read the vibe-doc state from disk
|
|
49
|
+
* Returns null if state file doesn't exist
|
|
50
|
+
*/
|
|
51
|
+
function readState(projectPath) {
|
|
52
|
+
try {
|
|
53
|
+
const statePath = path.join(projectPath, STATE_DIR, STATE_FILE);
|
|
54
|
+
if (!fs.existsSync(statePath)) {
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
const content = fs.readFileSync(statePath, 'utf-8');
|
|
58
|
+
return JSON.parse(content);
|
|
59
|
+
}
|
|
60
|
+
catch (error) {
|
|
61
|
+
console.error(`Failed to read state: ${error}`);
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Write the vibe-doc state to disk
|
|
67
|
+
* Creates .vibe-doc directory if it doesn't exist
|
|
68
|
+
*/
|
|
69
|
+
function writeState(projectPath, state) {
|
|
70
|
+
try {
|
|
71
|
+
const stateDir = path.join(projectPath, STATE_DIR);
|
|
72
|
+
if (!fs.existsSync(stateDir)) {
|
|
73
|
+
fs.mkdirSync(stateDir, { recursive: true });
|
|
74
|
+
}
|
|
75
|
+
const statePath = path.join(stateDir, STATE_FILE);
|
|
76
|
+
fs.writeFileSync(statePath, JSON.stringify(state, null, 2), 'utf-8');
|
|
77
|
+
}
|
|
78
|
+
catch (error) {
|
|
79
|
+
throw new Error(`Failed to write state: ${error}`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Initialize a fresh vibe-doc state with empty values
|
|
84
|
+
*/
|
|
85
|
+
function initState() {
|
|
86
|
+
return {
|
|
87
|
+
version: '1.0.0',
|
|
88
|
+
lastScan: new Date().toISOString(),
|
|
89
|
+
projectProfile: {
|
|
90
|
+
interviewAnswers: {
|
|
91
|
+
projectName: '',
|
|
92
|
+
projectDescription: '',
|
|
93
|
+
mainPurpose: '',
|
|
94
|
+
primaryUsers: '',
|
|
95
|
+
coreFeatures: [],
|
|
96
|
+
technologies: [],
|
|
97
|
+
deploymentModel: '',
|
|
98
|
+
architectureStyle: '',
|
|
99
|
+
},
|
|
100
|
+
providedContext: '',
|
|
101
|
+
},
|
|
102
|
+
artifactInventory: {
|
|
103
|
+
totalArtifacts: 0,
|
|
104
|
+
categories: {
|
|
105
|
+
sourceCode: { files: [], count: 0 },
|
|
106
|
+
configuration: { files: [], count: 0 },
|
|
107
|
+
documentation: { files: [], count: 0 },
|
|
108
|
+
tests: { files: [], count: 0 },
|
|
109
|
+
architecture: { files: [], count: 0 },
|
|
110
|
+
infrastructure: { files: [], count: 0 },
|
|
111
|
+
},
|
|
112
|
+
gitStats: {
|
|
113
|
+
totalCommits: 0,
|
|
114
|
+
contributors: 0,
|
|
115
|
+
lastCommitDate: '',
|
|
116
|
+
mainLanguages: [],
|
|
117
|
+
},
|
|
118
|
+
},
|
|
119
|
+
classification: {
|
|
120
|
+
primaryCategory: '',
|
|
121
|
+
secondaryCategory: '',
|
|
122
|
+
deploymentContext: [],
|
|
123
|
+
contextModifiers: [],
|
|
124
|
+
confidence: 0,
|
|
125
|
+
rationale: '',
|
|
126
|
+
userConfirmed: false,
|
|
127
|
+
},
|
|
128
|
+
gapReport: {
|
|
129
|
+
summary: {
|
|
130
|
+
totalArtifacts: 0,
|
|
131
|
+
docsCovered: 0,
|
|
132
|
+
docsPartial: 0,
|
|
133
|
+
docsMissing: 0,
|
|
134
|
+
coveragePercent: 0,
|
|
135
|
+
},
|
|
136
|
+
gaps: [],
|
|
137
|
+
},
|
|
138
|
+
generatedDocs: [],
|
|
139
|
+
history: [],
|
|
140
|
+
};
|
|
141
|
+
}
|