@chappibunny/repolens 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,139 @@
1
+ import { execSync } from "node:child_process";
2
+ import { log } from "../utils/logger.js";
3
+ import { fetchWithRetry } from "../utils/retry.js";
4
+
5
+ function safe(text) {
6
+ return String(text || "").trim();
7
+ }
8
+
9
+ function buildBody(diff) {
10
+ const lines = [
11
+ "## RepoLens — Architecture Summary",
12
+ "",
13
+ `**Added files:** ${diff.added.length}`,
14
+ `**Removed files:** ${diff.removed.length}`,
15
+ `**Modified files:** ${diff.modified.length}`,
16
+ `**Added routes:** ${diff.addedRoutes.length}`,
17
+ `**Removed routes:** ${diff.removedRoutes.length}`,
18
+ `**Impacted modules:** ${diff.impactedModules.length}`,
19
+ ""
20
+ ];
21
+
22
+ if (diff.addedRoutes.length) {
23
+ lines.push("### Added routes", "");
24
+ for (const route of diff.addedRoutes.slice(0, 10)) {
25
+ lines.push(`- \`${route}\``);
26
+ }
27
+ lines.push("");
28
+ }
29
+
30
+ if (diff.removedRoutes.length) {
31
+ lines.push("### Removed routes", "");
32
+ for (const route of diff.removedRoutes.slice(0, 10)) {
33
+ lines.push(`- \`${route}\``);
34
+ }
35
+ lines.push("");
36
+ }
37
+
38
+ if (diff.impactedModules.length) {
39
+ lines.push("### Impacted modules", "");
40
+ for (const module of diff.impactedModules.slice(0, 12)) {
41
+ lines.push(`- \`${module}\``);
42
+ }
43
+ lines.push("");
44
+ }
45
+
46
+ lines.push("_Generated by RepoLens_");
47
+ return lines.join("\n");
48
+ }
49
+
50
+ async function githubRequest(method, url, body) {
51
+ const token = process.env.GITHUB_TOKEN;
52
+ const repo = process.env.GITHUB_REPOSITORY;
53
+
54
+ if (!token) throw new Error("Missing GITHUB_TOKEN");
55
+ if (!repo) throw new Error("Missing GITHUB_REPOSITORY");
56
+
57
+ const res = await fetchWithRetry(`https://api.github.com${url}`, {
58
+ method,
59
+ headers: {
60
+ "Authorization": `Bearer ${token}`,
61
+ "Accept": "application/vnd.github+json",
62
+ "Content-Type": "application/json",
63
+ "User-Agent": "RepoLens"
64
+ },
65
+ body: body ? JSON.stringify(body) : undefined
66
+ }, {
67
+ retries: 3,
68
+ baseDelayMs: 500,
69
+ maxDelayMs: 4000,
70
+ label: `GitHub ${method} ${url}`
71
+ });
72
+
73
+ if (!res.ok) {
74
+ const text = await res.text();
75
+ throw new Error(`GitHub API error ${res.status}: ${text}`);
76
+ }
77
+
78
+ return await res.json();
79
+ }
80
+
81
+ async function findExistingRepoLensComment(owner, repo, issueNumber) {
82
+ const comments = await githubRequest(
83
+ "GET",
84
+ `/repos/${owner}/${repo}/issues/${issueNumber}/comments`
85
+ );
86
+
87
+ return comments.find((c) =>
88
+ typeof c.body === "string" && c.body.includes("## RepoLens — Architecture Summary")
89
+ );
90
+ }
91
+
92
+ export async function upsertPrComment(diff) {
93
+ const eventName = process.env.GITHUB_EVENT_NAME;
94
+ if (eventName !== "pull_request") {
95
+ log("GITHUB_EVENT_NAME is not 'pull_request', skipping PR comment.");
96
+ return;
97
+ }
98
+
99
+ const repoFull = process.env.GITHUB_REPOSITORY;
100
+ const prNumber = process.env.GITHUB_REF_NAME
101
+ ? undefined
102
+ : undefined;
103
+
104
+ const eventPath = process.env.GITHUB_EVENT_PATH;
105
+ if (!eventPath) throw new Error("Missing GITHUB_EVENT_PATH");
106
+
107
+ const fs = await import("node:fs/promises");
108
+ const raw = await fs.readFile(eventPath, "utf8");
109
+ const event = JSON.parse(raw);
110
+
111
+ const issueNumber = event?.pull_request?.number;
112
+ if (!issueNumber) {
113
+ log("No PR number found, skipping PR comment.");
114
+ return;
115
+ }
116
+
117
+ const [owner, repo] = repoFull.split("/");
118
+ const body = buildBody(diff);
119
+
120
+ const existing = await findExistingRepoLensComment(owner, repo, issueNumber);
121
+
122
+ if (existing) {
123
+ await githubRequest(
124
+ "PATCH",
125
+ `/repos/${owner}/${repo}/issues/comments/${existing.id}`,
126
+ { body }
127
+ );
128
+ log(`Updated existing PR comment ${existing.id}`);
129
+ return;
130
+ }
131
+
132
+ const created = await githubRequest(
133
+ "POST",
134
+ `/repos/${owner}/${repo}/issues/${issueNumber}/comments`,
135
+ { body }
136
+ );
137
+
138
+ log(`Created PR comment ${created.id}`);
139
+ }// update existing PR comment test
@@ -0,0 +1,123 @@
1
+ // Orchestrate generation of complete documentation set
2
+
3
+ import { buildAIContext, buildModuleContext } from "../analyzers/context-builder.js";
4
+ import { inferDataFlows } from "../analyzers/flow-inference.js";
5
+ import { getActiveDocuments } from "../ai/document-plan.js";
6
+ import {
7
+ generateExecutiveSummary,
8
+ generateSystemOverview,
9
+ generateBusinessDomains,
10
+ generateArchitectureOverview,
11
+ generateDataFlows,
12
+ generateDeveloperOnboarding
13
+ } from "../ai/generate-sections.js";
14
+ import { renderModuleCatalog as renderModuleCatalogOriginal } from "../renderers/render.js";
15
+ import { renderRouteMap as renderRouteMapOriginal } from "../renderers/render.js";
16
+ import { renderApiSurface as renderApiSurfaceOriginal } from "../renderers/render.js";
17
+ import { renderSystemMap } from "../renderers/renderMap.js";
18
+ import { renderArchitectureDiff } from "../renderers/renderDiff.js";
19
+ import { info } from "../utils/logger.js";
20
+
21
+ export async function generateDocumentSet(scanResult, config, diffData = null) {
22
+ info("Building structured context for AI...");
23
+
24
+ // Build AI context from scan results
25
+ const aiContext = buildAIContext(scanResult, config);
26
+ const moduleContext = buildModuleContext(scanResult.modules, config);
27
+ const flows = inferDataFlows(scanResult, config);
28
+
29
+ // Get active documents based on config
30
+ const activeDocuments = getActiveDocuments(config);
31
+
32
+ info(`Generating ${activeDocuments.length} documentation files...`);
33
+
34
+ const documents = [];
35
+ const artifacts = {
36
+ context: aiContext,
37
+ modules: moduleContext,
38
+ flows
39
+ };
40
+
41
+ // Generate each document
42
+ for (const docPlan of activeDocuments) {
43
+ let content = null;
44
+
45
+ try {
46
+ content = await generateDocument(docPlan, {
47
+ scanResult,
48
+ config,
49
+ aiContext,
50
+ moduleContext,
51
+ flows,
52
+ diffData
53
+ });
54
+
55
+ documents.push({
56
+ ...docPlan,
57
+ content,
58
+ generated: new Date().toISOString()
59
+ });
60
+
61
+ info(`✓ Generated ${docPlan.filename}`);
62
+
63
+ } catch (error) {
64
+ info(`✗ Failed to generate ${docPlan.filename}: ${error.message}`);
65
+ }
66
+ }
67
+
68
+ return {
69
+ documents,
70
+ artifacts,
71
+ config
72
+ };
73
+ }
74
+
75
+ async function generateDocument(docPlan, context) {
76
+ const { key } = docPlan;
77
+ const { scanResult, config, aiContext, moduleContext, flows, diffData } = context;
78
+
79
+ switch (key) {
80
+ case "executive_summary":
81
+ return await generateExecutiveSummary(aiContext);
82
+
83
+ case "system_overview":
84
+ return await generateSystemOverview(aiContext);
85
+
86
+ case "business_domains":
87
+ return await generateBusinessDomains(aiContext);
88
+
89
+ case "architecture_overview":
90
+ return await generateArchitectureOverview(aiContext);
91
+
92
+ case "module_catalog":
93
+ // Hybrid: deterministic skeleton + AI enhancement (for now, just deterministic)
94
+ return renderModuleCatalogOriginal(config, scanResult);
95
+
96
+ case "route_map":
97
+ // Hybrid: deterministic skeleton + AI enhancement (for now, just deterministic)
98
+ return renderRouteMapOriginal(config, scanResult);
99
+
100
+ case "api_surface":
101
+ // Hybrid: deterministic skeleton + AI enhancement (for now, just deterministic)
102
+ return renderApiSurfaceOriginal(config, scanResult);
103
+
104
+ case "data_flows":
105
+ return await generateDataFlows(flows, aiContext);
106
+
107
+ case "arch_diff":
108
+ if (!diffData) {
109
+ return "# Architecture Diff\n\nNo changes detected.";
110
+ }
111
+ return renderArchitectureDiff(diffData);
112
+
113
+ case "system_map":
114
+ // Hybrid: deterministic diagram + AI explanation (for now, just diagram)
115
+ return renderSystemMap(scanResult, config);
116
+
117
+ case "developer_onboarding":
118
+ return await generateDeveloperOnboarding(aiContext);
119
+
120
+ default:
121
+ throw new Error(`Unknown document type: ${key}`);
122
+ }
123
+ }
@@ -0,0 +1,85 @@
1
+ // Write generated documentation set to disk
2
+
3
+ import fs from "node:fs/promises";
4
+ import path from "node:path";
5
+ import { info } from "../utils/logger.js";
6
+
7
+ export async function writeDocumentSet(docSet, targetDir = process.cwd()) {
8
+ const { documents, artifacts, config } = docSet;
9
+
10
+ // Determine output directory
11
+ const outputDir = path.join(
12
+ targetDir,
13
+ config.documentation?.output_dir || ".repolens"
14
+ );
15
+
16
+ // Create output directory
17
+ await fs.mkdir(outputDir, { recursive: true });
18
+
19
+ info(`Writing documentation to ${outputDir}`);
20
+
21
+ // Write each document
22
+ for (const doc of documents) {
23
+ const filePath = path.join(outputDir, doc.filename);
24
+ await fs.writeFile(filePath, doc.content, "utf8");
25
+ info(`✓ Wrote ${doc.filename}`);
26
+ }
27
+
28
+ // Write artifacts if enabled
29
+ if (config.documentation?.include_artifacts !== false) {
30
+ const artifactsDir = path.join(outputDir, "artifacts");
31
+ await fs.mkdir(artifactsDir, { recursive: true });
32
+
33
+ // Write AI context
34
+ await fs.writeFile(
35
+ path.join(artifactsDir, "ai-context.json"),
36
+ JSON.stringify(artifacts.context, null, 2),
37
+ "utf8"
38
+ );
39
+
40
+ // Write module context
41
+ await fs.writeFile(
42
+ path.join(artifactsDir, "modules.json"),
43
+ JSON.stringify(artifacts.modules, null, 2),
44
+ "utf8"
45
+ );
46
+
47
+ // Write flows
48
+ await fs.writeFile(
49
+ path.join(artifactsDir, "flows.json"),
50
+ JSON.stringify(artifacts.flows, null, 2),
51
+ "utf8"
52
+ );
53
+
54
+ info(`✓ Wrote artifacts to artifacts/`);
55
+ }
56
+
57
+ info(`Documentation written successfully to ${outputDir}`);
58
+
59
+ return {
60
+ outputDir,
61
+ documentCount: documents.length,
62
+ files: documents.map(d => d.filename)
63
+ };
64
+ }
65
+
66
+ export async function readPreviousDocumentSet(targetDir = process.cwd(), config) {
67
+ const outputDir = path.join(
68
+ targetDir,
69
+ config.documentation?.output_dir || ".repolens"
70
+ );
71
+
72
+ try {
73
+ const artifactsDir = path.join(outputDir, "artifacts");
74
+
75
+ const contextData = await fs.readFile(
76
+ path.join(artifactsDir, "ai-context.json"),
77
+ "utf8"
78
+ );
79
+
80
+ return JSON.parse(contextData);
81
+
82
+ } catch (error) {
83
+ return null;
84
+ }
85
+ }
package/src/doctor.js ADDED
@@ -0,0 +1,174 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { loadConfig } from "./core/config.js";
4
+ import { info } from "./utils/logger.js";
5
+ import { forceCheckForUpdates } from "./utils/update-check.js";
6
+
7
+ const DETECTABLE_ROOTS = [
8
+ "app",
9
+ "src/app",
10
+ "components",
11
+ "src/components",
12
+ "lib",
13
+ "src/lib",
14
+ "hooks",
15
+ "src/hooks",
16
+ "store",
17
+ "src/store",
18
+ "pages",
19
+ "src/pages"
20
+ ];
21
+
22
+ async function fileExists(filePath) {
23
+ try {
24
+ await fs.access(filePath);
25
+ return true;
26
+ } catch {
27
+ return false;
28
+ }
29
+ }
30
+
31
+ async function dirExists(dirPath) {
32
+ try {
33
+ const stat = await fs.stat(dirPath);
34
+ return stat.isDirectory();
35
+ } catch {
36
+ return false;
37
+ }
38
+ }
39
+
40
+ async function detectRepoRoots(repoRoot) {
41
+ const found = [];
42
+
43
+ for (const relativePath of DETECTABLE_ROOTS) {
44
+ const absolutePath = path.join(repoRoot, relativePath);
45
+ if (await dirExists(absolutePath)) {
46
+ found.push(relativePath);
47
+ }
48
+ }
49
+
50
+ return found;
51
+ }
52
+
53
+ function ok(message) {
54
+ info(`✅ ${message}`);
55
+ }
56
+
57
+ function fail(message) {
58
+ info(`❌ ${message}`);
59
+ }
60
+
61
+ function warn(message) {
62
+ info(`⚠️ ${message}`);
63
+ }
64
+
65
+ export async function runDoctor(targetDir = process.cwd()) {
66
+ const repoRoot = path.resolve(targetDir);
67
+
68
+ const repolensConfigPath = path.join(repoRoot, ".repolens.yml");
69
+ const workflowPath = path.join(repoRoot, ".github", "workflows", "repolens.yml");
70
+ const envExamplePath = path.join(repoRoot, ".env.example");
71
+ const readmePath = path.join(repoRoot, "README.repolens.md");
72
+
73
+ let hasFailures = false;
74
+
75
+ info(`Running doctor for ${repoRoot}`);
76
+ info("");
77
+
78
+ // Check for RepoLens version updates
79
+ info("Version Check:");
80
+ info("");
81
+ await forceCheckForUpdates();
82
+ info("");
83
+
84
+ if (await fileExists(repolensConfigPath)) {
85
+ ok("Found .repolens.yml");
86
+ } else {
87
+ fail("Missing .repolens.yml");
88
+ hasFailures = true;
89
+ }
90
+
91
+ if (await fileExists(workflowPath)) {
92
+ ok("Found .github/workflows/repolens.yml");
93
+ } else {
94
+ fail("Missing .github/workflows/repolens.yml");
95
+ hasFailures = true;
96
+ }
97
+
98
+ if (await fileExists(envExamplePath)) {
99
+ ok("Found .env.example");
100
+ } else {
101
+ warn("Missing .env.example");
102
+ }
103
+
104
+ if (await fileExists(readmePath)) {
105
+ ok("Found README.repolens.md");
106
+ } else {
107
+ warn("Missing README.repolens.md");
108
+ }
109
+
110
+ info("");
111
+
112
+ let cfg = null;
113
+
114
+ if (await fileExists(repolensConfigPath)) {
115
+ try {
116
+ cfg = await loadConfig(repolensConfigPath);
117
+ ok("RepoLens config parsed successfully");
118
+ } catch (error) {
119
+ fail(`RepoLens config is invalid: ${error.message}`);
120
+ hasFailures = true;
121
+ }
122
+ }
123
+
124
+ if (cfg) {
125
+ if (Array.isArray(cfg.publishers) && cfg.publishers.length > 0) {
126
+ ok(`Configured publishers: ${cfg.publishers.join(", ")}`);
127
+ } else {
128
+ fail("No publishers configured");
129
+ hasFailures = true;
130
+ }
131
+
132
+ if (Array.isArray(cfg.scan?.include) && cfg.scan.include.length > 0) {
133
+ ok(`scan.include has ${cfg.scan.include.length} pattern(s)`);
134
+ } else {
135
+ fail("scan.include is missing or empty");
136
+ hasFailures = true;
137
+ }
138
+
139
+ if (Array.isArray(cfg.module_roots) && cfg.module_roots.length > 0) {
140
+ ok(`module_roots has ${cfg.module_roots.length} item(s)`);
141
+ } else {
142
+ fail("module_roots is missing or empty");
143
+ hasFailures = true;
144
+ }
145
+
146
+ if (Array.isArray(cfg.outputs?.pages) && cfg.outputs.pages.length > 0) {
147
+ ok(`outputs.pages has ${cfg.outputs.pages.length} page definition(s)`);
148
+ } else {
149
+ fail("outputs.pages is missing or empty");
150
+ hasFailures = true;
151
+ }
152
+ }
153
+
154
+ info("");
155
+
156
+ const detectedRoots = await detectRepoRoots(repoRoot);
157
+
158
+ if (detectedRoots.length > 0) {
159
+ ok(`Detected repo roots: ${detectedRoots.join(", ")}`);
160
+ } else {
161
+ warn("No known repo roots detected (app/src/components/lib/hooks/store/pages)");
162
+ }
163
+
164
+ info("");
165
+
166
+ if (hasFailures) {
167
+ fail("RepoLens doctor found blocking issues.");
168
+ process.exitCode = 1;
169
+ return { ok: false };
170
+ }
171
+
172
+ ok("RepoLens doctor passed.");
173
+ return { ok: true };
174
+ }