readme-gen-analyzer 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/.turbo/turbo-build.log +4 -0
  2. package/README.md +77 -0
  3. package/dist/analyzers/ast-feature.detector.d.ts +10 -0
  4. package/dist/analyzers/ast-feature.detector.js +151 -0
  5. package/dist/analyzers/definition.extractor.d.ts +9 -0
  6. package/dist/analyzers/definition.extractor.js +141 -0
  7. package/dist/analyzers/dependency.analyzer.d.ts +14 -0
  8. package/dist/analyzers/dependency.analyzer.js +30 -0
  9. package/dist/analyzers/devops.analyzer.d.ts +3 -0
  10. package/dist/analyzers/devops.analyzer.js +42 -0
  11. package/dist/analyzers/env.extractor.d.ts +7 -0
  12. package/dist/analyzers/env.extractor.js +46 -0
  13. package/dist/analyzers/example.analyzer.d.ts +6 -0
  14. package/dist/analyzers/example.analyzer.js +84 -0
  15. package/dist/analyzers/feature.detector.d.ts +4 -0
  16. package/dist/analyzers/feature.detector.js +68 -0
  17. package/dist/analyzers/package.parser.d.ts +28 -0
  18. package/dist/analyzers/package.parser.js +341 -0
  19. package/dist/analyzers/polyglot.extractors.d.ts +18 -0
  20. package/dist/analyzers/polyglot.extractors.js +153 -0
  21. package/dist/analyzers/route.extractor.d.ts +10 -0
  22. package/dist/analyzers/route.extractor.js +41 -0
  23. package/dist/analyzers/schema.analyzer.d.ts +3 -0
  24. package/dist/analyzers/schema.analyzer.js +48 -0
  25. package/dist/analyzers/semantic.refiner.d.ts +16 -0
  26. package/dist/analyzers/semantic.refiner.js +154 -0
  27. package/dist/analyzers/structure.analyzer.d.ts +18 -0
  28. package/dist/analyzers/structure.analyzer.js +150 -0
  29. package/dist/analyzers/trace.analyzer.d.ts +10 -0
  30. package/dist/analyzers/trace.analyzer.js +75 -0
  31. package/dist/index.d.ts +27 -0
  32. package/dist/index.js +44 -0
  33. package/dist/internal/analysis/chunker.d.ts +25 -0
  34. package/dist/internal/analysis/chunker.js +78 -0
  35. package/dist/internal/analysis/evidence.d.ts +17 -0
  36. package/dist/internal/analysis/evidence.js +130 -0
  37. package/dist/internal/analysis/techStack.d.ts +6 -0
  38. package/dist/internal/analysis/techStack.js +67 -0
  39. package/dist/internal/llm/llmClient.d.ts +69 -0
  40. package/dist/internal/llm/llmClient.js +204 -0
  41. package/dist/internal/pipeline/merge.d.ts +14 -0
  42. package/dist/internal/pipeline/merge.js +53 -0
  43. package/dist/internal/pipeline/persona.d.ts +7 -0
  44. package/dist/internal/pipeline/persona.js +28 -0
  45. package/dist/internal/pipeline/quality.d.ts +9 -0
  46. package/dist/internal/pipeline/quality.js +52 -0
  47. package/dist/internal/pipeline/readme.d.ts +3 -0
  48. package/dist/internal/pipeline/readme.js +80 -0
  49. package/dist/internal/pipeline/runPipeline.d.ts +57 -0
  50. package/dist/internal/pipeline/runPipeline.js +101 -0
  51. package/dist/internal/pipeline/stages.d.ts +5 -0
  52. package/dist/internal/pipeline/stages.js +85 -0
  53. package/dist/internal/pipeline/types.d.ts +98 -0
  54. package/dist/internal/pipeline/types.js +2 -0
  55. package/dist/types.d.ts +85 -0
  56. package/dist/types.js +2 -0
  57. package/dist/utils/scanner.d.ts +14 -0
  58. package/dist/utils/scanner.js +81 -0
  59. package/dist/utils/scriptsMarkdown.d.ts +9 -0
  60. package/dist/utils/scriptsMarkdown.js +131 -0
  61. package/package.json +19 -0
  62. package/src/analyzers/ast-feature.detector.ts +173 -0
  63. package/src/analyzers/definition.extractor.ts +156 -0
  64. package/src/analyzers/dependency.analyzer.ts +32 -0
  65. package/src/analyzers/devops.analyzer.ts +44 -0
  66. package/src/analyzers/env.extractor.ts +58 -0
  67. package/src/analyzers/example.analyzer.ts +96 -0
  68. package/src/analyzers/feature.detector.ts +65 -0
  69. package/src/analyzers/package.parser.ts +364 -0
  70. package/src/analyzers/polyglot.extractors.ts +169 -0
  71. package/src/analyzers/route.extractor.ts +54 -0
  72. package/src/analyzers/schema.analyzer.ts +50 -0
  73. package/src/analyzers/semantic.refiner.ts +163 -0
  74. package/src/analyzers/structure.analyzer.ts +156 -0
  75. package/src/analyzers/trace.analyzer.ts +75 -0
  76. package/src/index.ts +29 -0
  77. package/src/internal/analysis/chunker.ts +103 -0
  78. package/src/internal/analysis/evidence.ts +152 -0
  79. package/src/internal/analysis/techStack.ts +71 -0
  80. package/src/internal/llm/llmClient.ts +261 -0
  81. package/src/internal/pipeline/merge.ts +63 -0
  82. package/src/internal/pipeline/persona.ts +27 -0
  83. package/src/internal/pipeline/quality.ts +47 -0
  84. package/src/internal/pipeline/readme.ts +98 -0
  85. package/src/internal/pipeline/runPipeline.ts +153 -0
  86. package/src/internal/pipeline/stages.ts +89 -0
  87. package/src/internal/pipeline/types.ts +102 -0
  88. package/src/types.ts +100 -0
  89. package/src/utils/scanner.ts +48 -0
  90. package/src/utils/scriptsMarkdown.ts +140 -0
  91. package/test-local.ts +16 -0
  92. package/tsconfig.json +16 -0
@@ -0,0 +1,4 @@
1
+
2
+ > @readme-gen/analyzer@1.0.0 build G:\Web_Develpoment\main_Projects\readme-gen\packages\analyzer
3
+ > tsc
4
+
package/README.md ADDED
@@ -0,0 +1,77 @@
1
+ # 🧬 @readme-gen/analyzer
2
+
3
+ **The shared repository analysis engine and semantic README pipeline for the readme-gen platform.**
4
+
5
+ ---
6
+
7
+ ## 📖 Overview
8
+
9
+ The `analyzer` package is the core grounding layer for all `readme-gen` generation. It extracts project evidence and provides a multi-stage semantic pipeline to ensure generated documentation is tied to concrete code data instead of generic model guesses.
10
+
11
+ ### Key Responsibilities
12
+ - **Repository Structural Analysis**: Mapping file systems and identifying monorepo boundaries.
13
+ - **Evidence Extraction**: Parsing package manifests, API routes, environment variables, and source signatures.
14
+ - **Semantic Mapping**: Merging raw extraction data into a canonical, AI-ready JSON structure.
15
+ - **Generation Orchestration**: Running the multi-stage LLM pipeline.
16
+ - **Quality Evaluation**: Scoring generated READMEs for clarity, completeness, and accuracy.
17
+
18
+ ---
19
+
20
+ ## 🏗️ Semantic Pipeline
21
+
22
+ The `analyzer` uses a sophisticated 6-stage pipeline to build high-quality documentation:
23
+
24
+ 1. **🔍 Evidence Extraction**: Gathers raw project facts (Dependencies, Routes, Env Vars, AST Patterns).
25
+ 2. **🧠 Intent Inference**: Deduces "why" the project exists and its primary goal.
26
+ 3. **✨ Feature Extraction**: Distills user-facing capabilities from extracted signatures.
27
+ 4. **🏛️ Architecture Analysis**: Maps internal project structure and flow.
28
+ 5. **🧩 Semantic Merge**: Consolidates all discoveries into a unified `ProjectAnalysis` object.
29
+ 6. **✍️ Markdown Generation**: Orchestrates the final markdown construction from semantic JSON.
30
+
31
+ ---
32
+
33
+ ## 🛠️ Package Structure
34
+
35
+ ```text
36
+ src/
37
+ ├── analyzers/ # Specialized extractors for Files, Routes, Env, and AST
38
+ ├── internal/ # Semantic pipeline, Evidence builder, and LLM orchestration
39
+ ├── utils/ # Shared prompt engineering and markdown rendering helpers
40
+ ├── types.ts # Repository-wide project analysis and extraction types
41
+ └── constants/ # Persona definitions and prompt templates
42
+ ```
43
+
44
+ ---
45
+
46
+ ## 📝 Key Features
47
+
48
+ ### Grounded Generation
49
+ The analyzer ensures that everything in the final README is backed by code evidence.
50
+ - **Rewriting Mode**: Maintains custom sections while updating grounding facts.
51
+ - **Appending Mode**: Intelligently adds new sections to existing documentation.
52
+
53
+ ### Multi-Model Compatibility
54
+ The analyzer's prompts are optimized for:
55
+ - **OpenAI** (GPT-4o / GPT-3.5)
56
+ - **Gemini** (Pro 1.5 / Flash)
57
+ - **Groq** (Llama 3 / Mixtral)
58
+
59
+ ---
60
+
61
+ ## 🛠️ Development
62
+
63
+ From the workspace root:
64
+
65
+ ```bash
66
+ pnpm --filter @readme-gen/analyzer build
67
+ ```
68
+
69
+ ### Dependencies
70
+ - **ts-morph**: For high-fidelity AST analysis and source signature extraction.
71
+ - **ignore**: For respecting `.gitignore` rules during project analysis.
72
+
73
+ ---
74
+
75
+ <div align="center">
76
+ <sub>Part of the 🚀 <a href="../../README.md">readme-gen</a> ecosystem.</sub>
77
+ </div>
@@ -0,0 +1,10 @@
1
+ export interface AstFeature {
2
+ name: string;
3
+ evidence: {
4
+ snippet: string;
5
+ file: string;
6
+ }[];
7
+ }
8
+ export declare class AstFeatureDetector {
9
+ static detect(files: Record<string, string>): AstFeature[];
10
+ }
@@ -0,0 +1,151 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.AstFeatureDetector = void 0;
4
+ const ts_morph_1 = require("ts-morph");
5
+ class AstFeatureDetector {
6
+ static detect(files) {
7
+ const project = new ts_morph_1.Project({ useInMemoryFileSystem: true });
8
+ const features = new Map();
9
+ const addEvidence = (featureName, snippet, file) => {
10
+ if (!features.has(featureName)) {
11
+ features.set(featureName, { name: featureName, evidence: [] });
12
+ }
13
+ const feat = features.get(featureName);
14
+ if (feat.evidence.length < 5) { // Limit evidence to 5 snippets per feature
15
+ feat.evidence.push({ snippet, file });
16
+ }
17
+ };
18
+ for (const [filePath, content] of Object.entries(files)) {
19
+ if (!filePath.match(/\.(ts|js|tsx|jsx)$/))
20
+ continue;
21
+ const sourceFile = project.createSourceFile(filePath, content);
22
+ // 1. Scan for Call Expressions (Mainly for app.get, mongoose.connect, etc.)
23
+ const callExpressions = sourceFile.getDescendantsOfKind(ts_morph_1.SyntaxKind.CallExpression);
24
+ for (const call of callExpressions) {
25
+ const expression = call.getExpression();
26
+ const text = call.getText();
27
+ // Pattern: app.get, router.post, etc.
28
+ if (expression.getKind() === ts_morph_1.SyntaxKind.PropertyAccessExpression) {
29
+ const pae = expression;
30
+ const name = pae.getName().toLowerCase();
31
+ const base = pae.getExpression().getText().toLowerCase();
32
+ // API Routes
33
+ if (['get', 'post', 'put', 'delete', 'patch', 'use'].includes(name)) {
34
+ if (['app', 'router', 'server', 'express'].includes(base)) {
35
+ addEvidence('API Endpoints', text.substring(0, 150), filePath);
36
+ }
37
+ }
38
+ // Database Integration (Mongoose, pg, MySQL)
39
+ if (name === 'connect' && (base === 'mongoose' || base === 'db')) {
40
+ addEvidence('Database Integration', text.substring(0, 150), filePath);
41
+ }
42
+ if (name === 'model' && base === 'mongoose') {
43
+ addEvidence('Database Integration', text.substring(0, 150), filePath);
44
+ }
45
+ if (name === 'createconnection' && (base === 'mysql' || base === 'db')) {
46
+ addEvidence('Database Integration', text.substring(0, 150), filePath);
47
+ }
48
+ // Authentication (JWT, bcrypt, passport)
49
+ if (['sign', 'verify'].includes(name) && base === 'jwt') {
50
+ addEvidence('Authentication', text.substring(0, 150), filePath);
51
+ }
52
+ if (['hash', 'compare'].includes(name) && base === 'bcrypt') {
53
+ addEvidence('Authentication', text.substring(0, 150), filePath);
54
+ }
55
+ if (['use', 'authenticate'].includes(name) && base === 'passport') {
56
+ addEvidence('Authentication', text.substring(0, 150), filePath);
57
+ }
58
+ // Environment Configuration
59
+ if (name === 'config' && base === 'dotenv') {
60
+ addEvidence('Environment Configuration', text.substring(0, 150), filePath);
61
+ }
62
+ if (name === 'get' && base === 'config') {
63
+ addEvidence('Environment Configuration', text.substring(0, 150), filePath);
64
+ }
65
+ // CLI Tool (Commander)
66
+ if (['command', 'option', 'parse', 'version'].includes(name) && ['program', 'cmd'].includes(base)) {
67
+ addEvidence('CLI Tool', text.substring(0, 150), filePath);
68
+ }
69
+ }
70
+ // 2. Scan for New Expressions (Prisma, pg Pool)
71
+ const newExpressions = sourceFile.getDescendantsOfKind(ts_morph_1.SyntaxKind.NewExpression);
72
+ for (const newExpr of newExpressions) {
73
+ const newText = newExpr.getText().toLowerCase();
74
+ if (newText.includes('prismaclient')) {
75
+ addEvidence('Database Integration', newExpr.getText().substring(0, 150), filePath);
76
+ }
77
+ if (newText.includes('pool') || newText.includes('client')) {
78
+ addEvidence('Database Integration', newExpr.getText().substring(0, 150), filePath);
79
+ }
80
+ }
81
+ // 3. Scan for Property Access (process.env)
82
+ const propAccess = sourceFile.getDescendantsOfKind(ts_morph_1.SyntaxKind.PropertyAccessExpression);
83
+ for (const pae of propAccess) {
84
+ if (pae.getText().startsWith('process.env.')) {
85
+ addEvidence('Environment Configuration', pae.getText(), filePath);
86
+ }
87
+ }
88
+ // 4. Scan for Decorators (NestJS)
89
+ const decorators = sourceFile.getDescendantsOfKind(ts_morph_1.SyntaxKind.Decorator);
90
+ for (const dec of decorators) {
91
+ const decName = dec.getName().toLowerCase();
92
+ if (['get', 'post', 'put', 'delete', 'patch', 'controller'].includes(decName)) {
93
+ addEvidence('API Endpoints', dec.getText(), filePath);
94
+ }
95
+ if (['injectable'].includes(decName)) {
96
+ // Marker for service oriented architecture
97
+ }
98
+ }
99
+ }
100
+ // 5. File-path based API Route Detection (Next.js, etc.)
101
+ if (filePath.includes('pages/api/') || filePath.includes('app/api/')) {
102
+ addEvidence('API Endpoints', `File-based route: ${filePath}`, filePath);
103
+ }
104
+ // 6. Direct variable usage for CLI
105
+ if (content.includes('process.argv')) {
106
+ addEvidence('CLI Tool', 'process.argv', filePath);
107
+ }
108
+ }
109
+ for (const [filePath, content] of Object.entries(files)) {
110
+ if (filePath.endsWith('.py')) {
111
+ if (/@(?:app|router)\.(get|post|put|delete|patch)\s*\(/i.test(content)) {
112
+ addEvidence('API Endpoints', 'FastAPI/Starlette-style route decorators', filePath);
113
+ }
114
+ if (/flask|Flask\(__name__\)/.test(content) || /@app\.route\s*\(/i.test(content)) {
115
+ addEvidence('API Endpoints', 'Flask routes', filePath);
116
+ }
117
+ if (/django\.|from\s+django/.test(content)) {
118
+ addEvidence('Django application', 'Django imports', filePath);
119
+ }
120
+ if (/sqlalchemy|SQLAlchemy/.test(content)) {
121
+ addEvidence('Database Integration', 'SQLAlchemy', filePath);
122
+ }
123
+ if (/os\.getenv|os\.environ/.test(content)) {
124
+ addEvidence('Environment Configuration', 'os.getenv / os.environ', filePath);
125
+ }
126
+ if (/celery|Celery/.test(content)) {
127
+ addEvidence('Task queue', 'Celery', filePath);
128
+ }
129
+ }
130
+ if (filePath.endsWith('.go')) {
131
+ if (/gin-gonic\/gin|\.GET\s*\(|\.POST\s*\(/.test(content)) {
132
+ addEvidence('API Endpoints', 'Go HTTP handlers (gin / stdlib style)', filePath);
133
+ }
134
+ if (/gorm\.io|database\/sql|jackc\/pgx/.test(content)) {
135
+ addEvidence('Database Integration', 'Go database client', filePath);
136
+ }
137
+ if (/os\.Getenv/.test(content)) {
138
+ addEvidence('Environment Configuration', 'os.Getenv', filePath);
139
+ }
140
+ if (/spf13\/cobra/.test(content)) {
141
+ addEvidence('CLI Tool', 'Cobra CLI', filePath);
142
+ }
143
+ if (/grpc\.|google\.golang\.org\/grpc/.test(content)) {
144
+ addEvidence('gRPC', 'gRPC imports', filePath);
145
+ }
146
+ }
147
+ }
148
+ return Array.from(features.values());
149
+ }
150
+ }
151
+ exports.AstFeatureDetector = AstFeatureDetector;
@@ -0,0 +1,9 @@
1
+ export declare class DefinitionExtractor {
2
+ static extract(files: Record<string, string>): Record<string, string[]>;
3
+ private static walkNode;
4
+ private static formatSignature;
5
+ private static formatParamsSnippet;
6
+ private static formatParams;
7
+ private static safeGetParamType;
8
+ private static safeGetReturnType;
9
+ }
@@ -0,0 +1,141 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DefinitionExtractor = void 0;
4
+ const ts_morph_1 = require("ts-morph");
5
+ const polyglot_extractors_1 = require("./polyglot.extractors");
6
+ class DefinitionExtractor {
7
+ static extract(files) {
8
+ const project = new ts_morph_1.Project({ useInMemoryFileSystem: true });
9
+ const result = {};
10
+ for (const [filePath, content] of Object.entries(files)) {
11
+ if (!filePath.match(/\.(ts|js|tsx|jsx)$/))
12
+ continue;
13
+ console.log(`[Analyzer] Parsing file for definitions: ${filePath}`);
14
+ project.createSourceFile(filePath, content);
15
+ }
16
+ project.getSourceFiles().forEach(sourceFile => {
17
+ const filePath = sourceFile.getFilePath();
18
+ const definitions = [];
19
+ // Extract Imports
20
+ sourceFile.getImportDeclarations().forEach(imp => {
21
+ const module = imp.getModuleSpecifierValue();
22
+ const names = imp.getNamedImports().map(n => n.getName()).join(', ');
23
+ if (names)
24
+ definitions.push(`Import: { ${names} } from "${module}"`);
25
+ else if (imp.getDefaultImport())
26
+ definitions.push(`Import Default: ${imp.getDefaultImport()?.getText()} from "${module}"`);
27
+ });
28
+ // Recursive Extraction from all top-level nodes
29
+ sourceFile.forEachChild(node => {
30
+ this.walkNode(node, definitions);
31
+ });
32
+ // Interfaces/Types (Stay top-level mostly)
33
+ sourceFile.getInterfaces().forEach(intf => {
34
+ const name = intf.getName();
35
+ const properties = intf.getProperties().map(p => `${p.getName()}: ${p.getType().getText()}`).join(', ');
36
+ definitions.push(`Interface: ${name} { ${properties.substring(0, 200)} }`);
37
+ });
38
+ sourceFile.getTypeAliases().forEach(type => {
39
+ definitions.push(`Type Alias: ${type.getName()} = ${type.getType().getText().substring(0, 200)}`);
40
+ });
41
+ if (definitions.length > 0) {
42
+ const cleanPath = filePath.startsWith('/') ? filePath.substring(1) : filePath;
43
+ result[cleanPath] = definitions;
44
+ }
45
+ });
46
+ const polyglot = polyglot_extractors_1.PolyglotExtractors.extractDefinitions(files);
47
+ for (const [p, defs] of Object.entries(polyglot)) {
48
+ if (!result[p])
49
+ result[p] = defs;
50
+ else
51
+ result[p] = [...result[p], ...defs];
52
+ }
53
+ return result;
54
+ }
55
+ static walkNode(node, definitions) {
56
+ // Classes
57
+ if (ts_morph_1.Node.isClassDeclaration(node)) {
58
+ const name = node.getName() || "AnonymousClass";
59
+ definitions.push(`Class: ${name}`);
60
+ node.getMethods().forEach(m => definitions.push(` ${this.formatSignature(m, 'Method')}`));
61
+ node.getConstructors().forEach(c => definitions.push(` ${this.formatSignature(c, 'Constructor')}`));
62
+ }
63
+ // Functions
64
+ if (ts_morph_1.Node.isFunctionDeclaration(node)) {
65
+ const signature = this.formatSignature(node, 'Function');
66
+ console.log(` [Extractor] Found Function: ${signature}`);
67
+ definitions.push(signature);
68
+ }
69
+ // Variable-based Functions (Arrow or Function Expressions)
70
+ if (ts_morph_1.Node.isVariableDeclaration(node)) {
71
+ const initializer = node.getInitializer();
72
+ if (initializer) {
73
+ if (ts_morph_1.Node.isArrowFunction(initializer) || ts_morph_1.Node.isFunctionExpression(initializer)) {
74
+ const name = node.getName();
75
+ const params = this.formatSignature(initializer, 'Function (assigned)');
76
+ console.log(` [Extractor] Found Variable Function: ${name} ${params}`);
77
+ definitions.push(params);
78
+ }
79
+ else if (ts_morph_1.Node.isObjectLiteralExpression(initializer)) {
80
+ // If it's a small object literal with methods, extract them
81
+ initializer.getProperties().forEach(prop => {
82
+ if (ts_morph_1.Node.isMethodDeclaration(prop) || ts_morph_1.Node.isPropertyAssignment(prop)) {
83
+ const propInit = ts_morph_1.Node.isPropertyAssignment(prop) ? prop.getInitializer() : null;
84
+ if (ts_morph_1.Node.isMethodDeclaration(prop) || (propInit && (ts_morph_1.Node.isArrowFunction(propInit) || ts_morph_1.Node.isFunctionExpression(propInit)))) {
85
+ const name = ts_morph_1.Node.isPropertyAssignment(prop) ? prop.getName() : prop.getName();
86
+ const fn = ts_morph_1.Node.isPropertyAssignment(prop) ? propInit : prop;
87
+ definitions.push(` Method (obj-prop): ${name}${this.formatParamsSnippet(fn)}`);
88
+ }
89
+ }
90
+ });
91
+ }
92
+ }
93
+ }
94
+ // Export Assignments
95
+ if (ts_morph_1.Node.isExportAssignment(node)) {
96
+ const expression = node.getExpression();
97
+ if (ts_morph_1.Node.isArrowFunction(expression) || ts_morph_1.Node.isFunctionExpression(expression)) {
98
+ definitions.push(`Export Default Function: ${this.formatSignature(expression, '')}`);
99
+ }
100
+ }
101
+ // Recurse into children to find nested functions (but not too deep into implementations)
102
+ if (!ts_morph_1.Node.isClassDeclaration(node) && !ts_morph_1.Node.isFunctionDeclaration(node) && !ts_morph_1.Node.isMethodDeclaration(node)) {
103
+ node.forEachChild(child => this.walkNode(child, definitions));
104
+ }
105
+ }
106
+ static formatSignature(node, kind) {
107
+ const name = node.getName?.() || "";
108
+ const params = this.formatParamsSnippet(node);
109
+ const asyncStr = node.isAsync?.() ? 'async ' : '';
110
+ return `${asyncStr}${kind}${name ? ': ' + name : ''}${params}`;
111
+ }
112
+ static formatParamsSnippet(node) {
113
+ const params = this.formatParams(node.getParameters?.() || []);
114
+ const returnType = this.safeGetReturnType(node);
115
+ return `(${params}) -> ${returnType}`;
116
+ }
117
+ static formatParams(params) {
118
+ return params.map(p => {
119
+ const name = p.getName();
120
+ const type = this.safeGetParamType(p);
121
+ return `${name}${p.isOptional() ? '?' : ''}: ${type}`;
122
+ }).join(', ');
123
+ }
124
+ static safeGetParamType(p) {
125
+ try {
126
+ return p.getTypeNode()?.getText() || p.getType().getText() || 'any';
127
+ }
128
+ catch {
129
+ return 'any';
130
+ }
131
+ }
132
+ static safeGetReturnType(fn) {
133
+ try {
134
+ return fn.getReturnTypeNode()?.getText() || fn.getReturnType().getText() || 'void';
135
+ }
136
+ catch {
137
+ return 'void';
138
+ }
139
+ }
140
+ }
141
+ exports.DefinitionExtractor = DefinitionExtractor;
@@ -0,0 +1,14 @@
1
+ interface DependencyGroups {
2
+ core: string[];
3
+ database: string[];
4
+ testing: string[];
5
+ deployment: string[];
6
+ }
7
+ export declare class DependencyAnalyzer {
8
+ private static CORE;
9
+ private static DATABASE;
10
+ private static TESTING;
11
+ private static DEPLOYMENT;
12
+ static analyze(dependencies: string[]): DependencyGroups;
13
+ }
14
+ export {};
@@ -0,0 +1,30 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DependencyAnalyzer = void 0;
4
+ class DependencyAnalyzer {
5
+ static CORE = ['express', 'fastapi', 'flask', 'django', 'react', 'next', 'vue', 'nest', 'koa'];
6
+ static DATABASE = ['mongoose', 'prisma', 'pg', 'mysql2', 'redis', 'sequelize', 'mongodb', 'psycopg2', 'sqlalchemy'];
7
+ static TESTING = ['jest', 'mocha', 'chai', 'cypress', 'pytest', 'vitest', 'playwright'];
8
+ static DEPLOYMENT = ['docker', 'aws-sdk', 'firebase', 'vercel', 'netlify', 'terraform'];
9
+ static analyze(dependencies) {
10
+ const groups = {
11
+ core: [],
12
+ database: [],
13
+ testing: [],
14
+ deployment: []
15
+ };
16
+ dependencies.forEach(dep => {
17
+ const lower = dep.toLowerCase();
18
+ if (this.CORE.some(c => lower.includes(c)))
19
+ groups.core.push(dep);
20
+ if (this.DATABASE.some(d => lower.includes(d)))
21
+ groups.database.push(dep);
22
+ if (this.TESTING.some(t => lower.includes(t)))
23
+ groups.testing.push(dep);
24
+ if (this.DEPLOYMENT.some(d => lower.includes(d)))
25
+ groups.deployment.push(dep);
26
+ });
27
+ return groups;
28
+ }
29
+ }
30
+ exports.DependencyAnalyzer = DependencyAnalyzer;
@@ -0,0 +1,3 @@
1
+ export declare class DevOpsAnalyzer {
2
+ static analyze(files: Record<string, string>): any;
3
+ }
@@ -0,0 +1,42 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.DevOpsAnalyzer = void 0;
4
+ class DevOpsAnalyzer {
5
+ static analyze(files) {
6
+ const devOps = {};
7
+ // 1. Dockerfile
8
+ const dockerPath = Object.keys(files).find(f => f.toLowerCase().endsWith('dockerfile'));
9
+ if (dockerPath) {
10
+ const content = files[dockerPath];
11
+ devOps.docker = {
12
+ baseImage: content.match(/FROM\s+([^\s\n]+)/i)?.[1],
13
+ ports: [...content.matchAll(/EXPOSE\s+(\d+)/gi)].map(m => m[1]),
14
+ command: content.match(/CMD\s+\[?([^\]\n]+)\]?/i)?.[1]?.replace(/"/g, '')
15
+ };
16
+ }
17
+ // 2. Docker Compose
18
+ const composePath = Object.keys(files).find(f => f.toLowerCase().includes('docker-compose') && f.endsWith('.yml'));
19
+ if (composePath) {
20
+ const content = files[composePath];
21
+ const services = [...content.matchAll(/^\s+([a-z0-9_-]+):/gm)]
22
+ .map(m => m[1])
23
+ .filter(s => !['services', 'networks', 'volumes', 'version'].includes(s));
24
+ const networks = [...content.matchAll(/^\s+networks:\s*\n(\s+- [a-z0-9_-]+\n)+/gi)].length > 0 ? ['Enabled'] : [];
25
+ devOps.compose = { services, networks };
26
+ }
27
+ // 3. GitHub Actions
28
+ const workflowPath = Object.keys(files).find(f => f.includes('.github/workflows') && f.endsWith('.yml'));
29
+ if (workflowPath) {
30
+ const content = files[workflowPath];
31
+ const jobs = [...content.matchAll(/^\s+([a-z0-9_-]+):/gm)]
32
+ .map(m => m[1])
33
+ .filter(j => !['jobs', 'on', 'workflow_dispatch', 'name'].includes(j));
34
+ devOps.pipeline = {
35
+ provider: 'GitHub Actions',
36
+ jobs
37
+ };
38
+ }
39
+ return Object.keys(devOps).length > 0 ? devOps : undefined;
40
+ }
41
+ }
42
+ exports.DevOpsAnalyzer = DevOpsAnalyzer;
@@ -0,0 +1,7 @@
1
+ export interface EnvVar {
2
+ name: string;
3
+ }
4
+ export declare class EnvExtractor {
5
+ private static ENV_REGEX;
6
+ static extract(files: Record<string, string>): string[];
7
+ }
@@ -0,0 +1,46 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.EnvExtractor = void 0;
4
+ class EnvExtractor {
5
+ static ENV_REGEX = /process\.env\.([a-zA-Z_][a-zA-Z0-9_]*)/g;
6
+ static extract(files) {
7
+ const envVars = new Set();
8
+ for (const [filePath, content] of Object.entries(files)) {
9
+ // 1. From .env or .env.example
10
+ if (filePath.endsWith('.env') || filePath.endsWith('.env.example')) {
11
+ const lines = content.split('\n');
12
+ for (const line of lines) {
13
+ const trimmed = line.trim();
14
+ if (trimmed && !trimmed.startsWith('#')) {
15
+ const match = trimmed.match(/^([^=]+)=/);
16
+ if (match) {
17
+ envVars.add(match[1].trim());
18
+ }
19
+ }
20
+ }
21
+ }
22
+ // 2. From code usage (process.env.VAR)
23
+ if (filePath.endsWith('.ts') || filePath.endsWith('.js')) {
24
+ let match;
25
+ while ((match = this.ENV_REGEX.exec(content)) !== null) {
26
+ envVars.add(match[1]);
27
+ }
28
+ }
29
+ if (filePath.endsWith('.py')) {
30
+ for (const m of content.matchAll(/os\.(?:getenv|environ\.get)\(\s*["']([a-zA-Z_][a-zA-Z0-9_]*)["']/g)) {
31
+ envVars.add(m[1]);
32
+ }
33
+ for (const m of content.matchAll(/os\.environ\[\s*["']([a-zA-Z_][a-zA-Z0-9_]*)["']\s*\]/g)) {
34
+ envVars.add(m[1]);
35
+ }
36
+ }
37
+ if (filePath.endsWith('.go')) {
38
+ for (const m of content.matchAll(/os\.Getenv\(\s*["']([a-zA-Z_][a-zA-Z0-9_]*)["']\s*\)/g)) {
39
+ envVars.add(m[1]);
40
+ }
41
+ }
42
+ }
43
+ return Array.from(envVars);
44
+ }
45
+ }
46
+ exports.EnvExtractor = EnvExtractor;
@@ -0,0 +1,6 @@
1
+ export declare class ExampleAnalyzer {
2
+ static analyze(files: Record<string, string>): any[] | undefined;
3
+ private static isPythonTestPath;
4
+ private static collectPythonTests;
5
+ private static collectGoTests;
6
+ }
@@ -0,0 +1,84 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ExampleAnalyzer = void 0;
4
+ const ts_morph_1 = require("ts-morph");
5
+ class ExampleAnalyzer {
6
+ static analyze(files) {
7
+ const examples = [];
8
+ const project = new ts_morph_1.Project({ useInMemoryFileSystem: true });
9
+ for (const [filePath, content] of Object.entries(files)) {
10
+ if (filePath.endsWith('.py') && this.isPythonTestPath(filePath)) {
11
+ this.collectPythonTests(filePath, content, examples);
12
+ continue;
13
+ }
14
+ if (filePath.endsWith('_test.go')) {
15
+ this.collectGoTests(filePath, content, examples);
16
+ continue;
17
+ }
18
+ if (!filePath.match(/\.(test|spec)\.(ts|js|jsx|tsx)$/) && !filePath.includes('__tests__'))
19
+ continue;
20
+ const sourceFile = project.createSourceFile(filePath, content);
21
+ const callExpressions = sourceFile.getDescendantsOfKind(ts_morph_1.SyntaxKind.CallExpression);
22
+ for (const call of callExpressions) {
23
+ const name = call.getExpression().getText().toLowerCase();
24
+ if (['it', 'test'].includes(name)) {
25
+ const args = call.getArguments();
26
+ if (args.length >= 2) {
27
+ const description = args[0].getText().replace(/['"`]/g, '');
28
+ const body = args[1].getText();
29
+ // Heuristic for "Good" examples: contain multiple calls or interesting snippets
30
+ const isInteresting = body.includes('await') || body.includes('expect') || body.split('\n').length > 5;
31
+ if (isInteresting && examples.length < 15) {
32
+ const cleanBody = body.replace(/^\(\) => \{|\}$|^\(async \(\) => \{|\}$/g, '').trim();
33
+ examples.push({
34
+ description,
35
+ code: cleanBody,
36
+ file: filePath
37
+ });
38
+ }
39
+ }
40
+ }
41
+ }
42
+ }
43
+ return examples.length > 0 ? examples : undefined;
44
+ }
45
+ static isPythonTestPath(filePath) {
46
+ const base = filePath.split('/').pop() || '';
47
+ if (base.startsWith('test_') && base.endsWith('.py'))
48
+ return true;
49
+ if (filePath.includes('/tests/') || filePath.includes('/test/'))
50
+ return true;
51
+ return false;
52
+ }
53
+ static collectPythonTests(filePath, content, examples) {
54
+ const re = /^(async\s+)?def\s+(test_\w+)\s*\([^)]*\):/gm;
55
+ let m;
56
+ while ((m = re.exec(content)) !== null) {
57
+ if (examples.length >= 15)
58
+ break;
59
+ const start = m.index;
60
+ const next = content.indexOf('\ndef ', start + 5);
61
+ const block = content.slice(start, next === -1 ? Math.min(start + 800, content.length) : Math.min(next, start + 800));
62
+ examples.push({
63
+ description: m[2],
64
+ code: block.trim(),
65
+ file: filePath,
66
+ });
67
+ }
68
+ }
69
+ static collectGoTests(filePath, content, examples) {
70
+ const re = /^func\s+(Test\w+)\s*\(/gm;
71
+ let m;
72
+ while ((m = re.exec(content)) !== null) {
73
+ if (examples.length >= 15)
74
+ break;
75
+ const block = content.slice(m.index, m.index + 700);
76
+ examples.push({
77
+ description: m[1],
78
+ code: block.trim(),
79
+ file: filePath,
80
+ });
81
+ }
82
+ }
83
+ }
84
+ exports.ExampleAnalyzer = ExampleAnalyzer;
@@ -0,0 +1,4 @@
1
+ export declare class FeatureDetector {
2
+ private static FEATURE_MAP;
3
+ static detect(dependencies: string[], devDependencies: string[], hasDocker: boolean, hasRoutes: boolean): string[];
4
+ }