@chappibunny/repolens 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/migrate.js ADDED
@@ -0,0 +1,251 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { info, warn, error as logError } from "./utils/logger.js";
4
+
5
+ /**
6
+ * Detect legacy workflow patterns that need migration
7
+ */
8
+ function detectLegacyPatterns(content) {
9
+ const patterns = {
10
+ cdToolsRepolens: /cd\s+tools\/repolens/i.test(content),
11
+ npmInstallRepolens: /npm\s+(?:ci|install)(?:\s|$)(?!.*repolens@)/m.test(content),
12
+ npxWithoutLatest: /npx\s+repolens\s+(?!@latest)/m.test(content),
13
+ missingNodeSetup: !content.includes("actions/setup-node@"),
14
+ missingEnvVars: !content.includes("NOTION_TOKEN") && !content.includes("env:")
15
+ };
16
+
17
+ const detected = [];
18
+ if (patterns.cdToolsRepolens) {
19
+ detected.push("cd tools/repolens");
20
+ }
21
+ if (patterns.npmInstallRepolens) {
22
+ detected.push("npm install without @latest");
23
+ }
24
+ if (patterns.npxWithoutLatest) {
25
+ detected.push("npx repolens without @latest");
26
+ }
27
+ if (patterns.missingNodeSetup) {
28
+ detected.push("missing Node.js setup");
29
+ }
30
+ if (patterns.missingEnvVars) {
31
+ detected.push("missing environment variables");
32
+ }
33
+
34
+ return {
35
+ isLegacy: detected.length > 0,
36
+ patterns: detected
37
+ };
38
+ }
39
+
40
+ /**
41
+ * Migrate workflow to v0.4.0 format
42
+ */
43
+ function migrateWorkflowContent(content) {
44
+ let migrated = content;
45
+
46
+ // Remove cd tools/repolens commands
47
+ migrated = migrated.replace(/cd\s+tools\/repolens\s*\n?/gi, "");
48
+
49
+ // Remove standalone npm ci/install that's part of old setup
50
+ migrated = migrated.replace(/npm\s+(?:ci|install)\s*\n/g, "");
51
+
52
+ // Update npx repolens to npx repolens@latest
53
+ migrated = migrated.replace(/npx\s+repolens\s+/g, "npx repolens@latest ");
54
+
55
+ // Add Node.js setup if missing (insert after checkout step)
56
+ if (!migrated.includes("actions/setup-node@")) {
57
+ const checkoutPattern = /(- name: Checkout repository\s+uses: actions\/checkout@v\d+)/;
58
+ if (checkoutPattern.test(migrated)) {
59
+ migrated = migrated.replace(
60
+ checkoutPattern,
61
+ `$1
62
+
63
+ - name: Setup Node.js
64
+ uses: actions/setup-node@v4
65
+ with:
66
+ node-version: 20`
67
+ );
68
+ }
69
+ }
70
+
71
+ // Add environment variables if missing
72
+ if (!migrated.includes("NOTION_TOKEN") && !migrated.includes("env:")) {
73
+ // Find the publish/generate documentation step
74
+ const publishPattern = /(- name: .*(?:publish|generate).*documentation.*\n)/i;
75
+ if (publishPattern.test(migrated)) {
76
+ migrated = migrated.replace(
77
+ publishPattern,
78
+ `$1 env:
79
+ NOTION_TOKEN: \${{ secrets.NOTION_TOKEN }}
80
+ NOTION_PARENT_PAGE_ID: \${{ secrets.NOTION_PARENT_PAGE_ID }}
81
+ REPOLENS_AI_API_KEY: \${{ secrets.REPOLENS_AI_API_KEY }}
82
+ REPOLENS_AI_PROVIDER: openai
83
+ `
84
+ );
85
+ }
86
+ }
87
+
88
+ // Add AI environment variables if env section exists but missing AI vars
89
+ if (migrated.includes("NOTION_TOKEN") && !migrated.includes("REPOLENS_AI_API_KEY")) {
90
+ migrated = migrated.replace(
91
+ /(NOTION_PARENT_PAGE_ID: \${{ secrets\.NOTION_PARENT_PAGE_ID }})/,
92
+ `$1
93
+ REPOLENS_AI_API_KEY: \${{ secrets.REPOLENS_AI_API_KEY }}
94
+ REPOLENS_AI_PROVIDER: openai`
95
+ );
96
+ }
97
+
98
+ return migrated;
99
+ }
100
+
101
+ /**
102
+ * Find all workflow files in .github/workflows
103
+ */
104
+ async function findWorkflowFiles(targetDir) {
105
+ const workflowDir = path.join(targetDir, ".github", "workflows");
106
+
107
+ try {
108
+ await fs.access(workflowDir);
109
+ } catch {
110
+ return [];
111
+ }
112
+
113
+ const files = await fs.readdir(workflowDir);
114
+ const workflowFiles = files.filter(f => f.endsWith(".yml") || f.endsWith(".yaml"));
115
+
116
+ return workflowFiles.map(f => path.join(workflowDir, f));
117
+ }
118
+
119
+ /**
120
+ * Show diff between old and new content
121
+ */
122
+ function showDiff(oldContent, newContent) {
123
+ const oldLines = oldContent.split("\n");
124
+ const newLines = newContent.split("\n");
125
+ const maxLines = Math.max(oldLines.length, newLines.length);
126
+
127
+ console.log("\n📋 Changes Preview:");
128
+ console.log("─".repeat(60));
129
+
130
+ let changesShown = 0;
131
+ for (let i = 0; i < maxLines; i++) {
132
+ const oldLine = oldLines[i] || "";
133
+ const newLine = newLines[i] || "";
134
+
135
+ if (oldLine !== newLine) {
136
+ if (oldLine) {
137
+ console.log(` - ${oldLine}`);
138
+ }
139
+ if (newLine) {
140
+ console.log(` + ${newLine}`);
141
+ }
142
+ changesShown++;
143
+ if (changesShown > 20) {
144
+ console.log(` ... (${maxLines - i} more lines)`);
145
+ break;
146
+ }
147
+ }
148
+ }
149
+
150
+ console.log("─".repeat(60));
151
+ }
152
+
153
+ /**
154
+ * Main migration function
155
+ */
156
+ export async function runMigrate(targetDir = process.cwd(), options = {}) {
157
+ const { dryRun = false, force = false } = options;
158
+
159
+ try {
160
+ await printMigrationBanner();
161
+
162
+ info(`🔍 Scanning for workflow files in: ${targetDir}`);
163
+
164
+ const workflowFiles = await findWorkflowFiles(targetDir);
165
+
166
+ if (workflowFiles.length === 0) {
167
+ warn("⚠️ No workflow files found in .github/workflows/");
168
+ info("\n💡 If you haven't set up GitHub Actions yet:");
169
+ info(" Run: repolens init");
170
+ return;
171
+ }
172
+
173
+ info(` Found ${workflowFiles.length} workflow file(s)`);
174
+
175
+ let migratedCount = 0;
176
+ let skippedCount = 0;
177
+
178
+ for (const workflowPath of workflowFiles) {
179
+ const filename = path.basename(workflowPath);
180
+ console.log(`\n📄 Checking: ${filename}`);
181
+
182
+ const content = await fs.readFile(workflowPath, "utf8");
183
+ const detection = detectLegacyPatterns(content);
184
+
185
+ if (!detection.isLegacy) {
186
+ info(" ✅ Already up to date!");
187
+ skippedCount++;
188
+ continue;
189
+ }
190
+
191
+ info(" 🔧 Legacy patterns detected:");
192
+ detection.patterns.forEach(p => info(` - ${p}`));
193
+
194
+ const migratedContent = migrateWorkflowContent(content);
195
+
196
+ if (!force) {
197
+ showDiff(content, migratedContent);
198
+ }
199
+
200
+ if (dryRun) {
201
+ info("\n 🔍 DRY RUN: No changes written");
202
+ migratedCount++;
203
+ continue;
204
+ }
205
+
206
+ // Backup original file
207
+ const backupPath = `${workflowPath}.backup`;
208
+ await fs.writeFile(backupPath, content, "utf8");
209
+ info(` 💾 Backup saved: ${path.basename(backupPath)}`);
210
+
211
+ // Write migrated content
212
+ await fs.writeFile(workflowPath, migratedContent, "utf8");
213
+ info(` ✅ Migrated: ${filename}`);
214
+ migratedCount++;
215
+ }
216
+
217
+ // Summary
218
+ console.log("\n" + "─".repeat(60));
219
+ console.log("📊 Migration Summary:");
220
+ console.log(` ✅ Migrated: ${migratedCount}`);
221
+ console.log(` ⏭️ Skipped: ${skippedCount}`);
222
+
223
+ if (migratedCount > 0) {
224
+ console.log("\n🎉 Migration complete!");
225
+
226
+ if (!dryRun) {
227
+ console.log("\n📝 Next steps:");
228
+ console.log(" 1. Review the changes: git diff .github/workflows/");
229
+ console.log(" 2. Test locally: npx repolens@latest publish");
230
+ console.log(" 3. Commit: git add .github/workflows/ && git commit -m 'chore: migrate RepoLens workflow to v0.4.0'");
231
+ console.log(" 4. Push: git push");
232
+ console.log("\n💡 Tip: Backups saved as *.backup - delete them once verified");
233
+ } else {
234
+ console.log("\n💡 Run without --dry-run to apply changes");
235
+ }
236
+ } else {
237
+ console.log("\n✨ All workflows are up to date!");
238
+ }
239
+
240
+ } catch (err) {
241
+ logError(`Migration failed: ${err.message}`);
242
+ throw err;
243
+ }
244
+ }
245
+
246
+ async function printMigrationBanner() {
247
+ console.log("\n" + "=".repeat(60));
248
+ console.log("🔄 RepoLens Workflow Migration Tool");
249
+ console.log(" Upgrading to v0.4.0 format");
250
+ console.log("=".repeat(60));
251
+ }
@@ -0,0 +1,33 @@
1
+ import { publishToNotion } from "./publish.js";
2
+ import { publishToMarkdown } from "./markdown.js";
3
+ import { shouldPublishToNotion, getCurrentBranch } from "../utils/branch.js";
4
+ import { info, warn } from "../utils/logger.js";
5
+
6
+ function hasNotionSecrets() {
7
+ return !!process.env.NOTION_TOKEN && !!process.env.NOTION_PARENT_PAGE_ID;
8
+ }
9
+
10
+ export async function publishDocs(cfg, renderedPages) {
11
+ const publishers = cfg.publishers || ["markdown", "notion"];
12
+ const currentBranch = getCurrentBranch();
13
+
14
+ // Always try Notion publishing if secrets are configured
15
+ if (publishers.includes("notion") || hasNotionSecrets()) {
16
+ if (!hasNotionSecrets()) {
17
+ info("Skipping Notion publish: NOTION_TOKEN or NOTION_PARENT_PAGE_ID not configured");
18
+ info("To enable Notion publishing, set these environment variables or GitHub Actions secrets");
19
+ } else if (shouldPublishToNotion(cfg, currentBranch)) {
20
+ info(`Publishing to Notion from branch: ${currentBranch}`);
21
+ await publishToNotion(cfg, renderedPages);
22
+ } else {
23
+ const allowedBranches = cfg.notion?.branches?.join(", ") || "none configured";
24
+ warn(`Skipping Notion publish: branch "${currentBranch}" not in allowed list (${allowedBranches})`);
25
+ info("To publish from this branch, add it to notion.branches in .repolens.yml");
26
+ }
27
+ }
28
+
29
+ // Always generate markdown output
30
+ if (publishers.includes("markdown") || !publishers.includes("notion")) {
31
+ await publishToMarkdown(cfg, renderedPages);
32
+ }
33
+ }
@@ -0,0 +1,32 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { info } from "../utils/logger.js";
4
+
5
+ function outputDir(cfg) {
6
+ return path.join(cfg.__repoRoot, ".repolens");
7
+ }
8
+
9
+ function pageFileName(key) {
10
+ const mapping = {
11
+ system_overview: "system_overview.md",
12
+ module_catalog: "module_catalog.md",
13
+ api_surface: "api_surface.md",
14
+ arch_diff: "architecture_diff.md",
15
+ route_map: "route_map.md",
16
+ system_map: "system_map.md"
17
+ };
18
+
19
+ return mapping[key] || `${key}.md`;
20
+ }
21
+
22
+ export async function publishToMarkdown(cfg, renderedPages) {
23
+ const dir = outputDir(cfg);
24
+ await fs.mkdir(dir, { recursive: true });
25
+
26
+ for (const [key, markdown] of Object.entries(renderedPages)) {
27
+ const filePath = path.join(dir, pageFileName(key));
28
+ await fs.writeFile(filePath, markdown, "utf8");
29
+ }
30
+
31
+ info(`markdown docs written to ${dir}`);
32
+ }
@@ -0,0 +1,325 @@
1
+ import fetch from "node-fetch";
2
+ import fs from "node:fs/promises";
3
+ import path from "node:path";
4
+ import { log } from "../utils/logger.js";
5
+ import { fetchWithRetry } from "../utils/retry.js";
6
+
7
+ function notionHeaders() {
8
+ const token = process.env.NOTION_TOKEN;
9
+ const version = process.env.NOTION_VERSION || "2022-06-28";
10
+
11
+ if (!token) {
12
+ throw new Error("Missing NOTION_TOKEN in tools/repolens/.env or GitHub Actions secrets");
13
+ }
14
+
15
+ return {
16
+ Authorization: `Bearer ${token}`,
17
+ "Notion-Version": version,
18
+ "Content-Type": "application/json"
19
+ };
20
+ }
21
+
22
+ async function notionRequest(method, url, body) {
23
+ const res = await fetchWithRetry(`https://api.notion.com/v1${url}`, {
24
+ method,
25
+ headers: notionHeaders(),
26
+ body: body ? JSON.stringify(body) : undefined
27
+ }, {
28
+ retries: 3,
29
+ baseDelayMs: 500,
30
+ maxDelayMs: 4000,
31
+ label: `Notion ${method} ${url}`
32
+ });
33
+
34
+ if (!res.ok) {
35
+ const text = await res.text();
36
+ throw new Error(`Notion API error ${res.status}: ${text}`);
37
+ }
38
+
39
+ return await res.json();
40
+ }
41
+
42
+ const CACHE_DIR = path.join(process.cwd(), ".cache");
43
+ const CACHE_FILE = path.join(CACHE_DIR, "notion-pages.json");
44
+
45
+ async function readCache() {
46
+ try {
47
+ const raw = await fs.readFile(CACHE_FILE, "utf8");
48
+ return JSON.parse(raw);
49
+ } catch {
50
+ return {};
51
+ }
52
+ }
53
+
54
+ async function writeCache(cache) {
55
+ await fs.mkdir(CACHE_DIR, { recursive: true });
56
+ await fs.writeFile(CACHE_FILE, JSON.stringify(cache, null, 2), "utf8");
57
+ }
58
+
59
+ async function getChildBlocks(blockId) {
60
+ const results = [];
61
+ let cursor = undefined;
62
+
63
+ while (true) {
64
+ const suffix = cursor ? `?start_cursor=${cursor}` : "";
65
+ const response = await notionRequest("GET", `/blocks/${blockId}/children${suffix}`);
66
+
67
+ if (response.results?.length) {
68
+ results.push(...response.results);
69
+ }
70
+
71
+ if (!response.has_more || !response.next_cursor) {
72
+ break;
73
+ }
74
+
75
+ cursor = response.next_cursor;
76
+ }
77
+
78
+ return results;
79
+ }
80
+
81
+ async function findExistingChildPageByTitle(parentPageId, title) {
82
+ const children = await getChildBlocks(parentPageId);
83
+
84
+ log(`Looking for child page: "${title}"`);
85
+ log(`Parent has ${children.length} child blocks`);
86
+
87
+ for (const child of children) {
88
+ if (child.type === "child_page") {
89
+ const childTitle = child.child_page?.title?.trim();
90
+ log(`Found child page block: "${childTitle}" (${child.id})`);
91
+
92
+ if (childTitle === title) {
93
+ log(`Reusing existing page: "${title}" (${child.id})`);
94
+ return child.id;
95
+ }
96
+ }
97
+ }
98
+
99
+ log(`No existing child page found for "${title}"`);
100
+ return null;
101
+ }
102
+
103
+ export async function ensurePage(parentPageId, title, cacheKey) {
104
+ const cache = await readCache();
105
+
106
+ if (cache[cacheKey]) {
107
+ log(`Using cached page ID for ${cacheKey}: ${cache[cacheKey]}`);
108
+ return cache[cacheKey];
109
+ }
110
+
111
+ const existingId = await findExistingChildPageByTitle(parentPageId, title);
112
+
113
+ if (existingId) {
114
+ cache[cacheKey] = existingId;
115
+ await writeCache(cache);
116
+ return existingId;
117
+ }
118
+
119
+ log(`Creating NEW page for "${title}"`);
120
+
121
+ const created = await notionRequest("POST", "/pages", {
122
+ parent: { page_id: parentPageId },
123
+ properties: {
124
+ title: {
125
+ title: [
126
+ {
127
+ text: {
128
+ content: title
129
+ }
130
+ }
131
+ ]
132
+ }
133
+ }
134
+ });
135
+
136
+ cache[cacheKey] = created.id;
137
+ await writeCache(cache);
138
+
139
+ log(`Created page "${title}" with ID ${created.id}`);
140
+ return created.id;
141
+ }
142
+
143
+ async function archiveBlock(blockId) {
144
+ return notionRequest("PATCH", `/blocks/${blockId}`, {
145
+ archived: true
146
+ });
147
+ }
148
+
149
+ async function unarchivePage(pageId) {
150
+ try {
151
+ return await notionRequest("PATCH", `/pages/${pageId}`, {
152
+ archived: false
153
+ });
154
+ } catch (error) {
155
+ // If page is already unarchived, ignore error
156
+ log(`Note: Could not unarchive page ${pageId}: ${error.message}`);
157
+ }
158
+ }
159
+
160
+ export async function clearPage(pageId) {
161
+ const children = await getChildBlocks(pageId);
162
+
163
+ // Only archive blocks that are not already archived
164
+ const unarchivedBlocks = children.filter(child => !child.archived);
165
+
166
+ for (const child of unarchivedBlocks) {
167
+ await archiveBlock(child.id);
168
+ }
169
+ }
170
+
171
+ function markdownToNotionBlocks(markdown) {
172
+ // Safety check: handle undefined/null markdown
173
+ if (!markdown || typeof markdown !== 'string') {
174
+ console.warn(`Warning: markdownToNotionBlocks received invalid markdown: ${typeof markdown}`);
175
+ return [];
176
+ }
177
+
178
+ const lines = markdown.split("\n");
179
+ const blocks = [];
180
+ let i = 0;
181
+
182
+ while (i < lines.length && blocks.length < 100) {
183
+ const line = lines[i].trim();
184
+
185
+ // Skip empty lines
186
+ if (!line) {
187
+ i++;
188
+ continue;
189
+ }
190
+
191
+ // Handle code blocks (```language...```)
192
+ if (line.startsWith("```")) {
193
+ const language = line.slice(3).trim() || "plain text";
194
+ const codeLines = [];
195
+ i++; // Move past opening ```
196
+
197
+ // Collect code block content
198
+ while (i < lines.length && !lines[i].trim().startsWith("```")) {
199
+ codeLines.push(lines[i]);
200
+ i++;
201
+ }
202
+
203
+ // Create Notion code block
204
+ const codeContent = codeLines.join("\n");
205
+ if (codeContent.trim()) {
206
+ blocks.push({
207
+ object: "block",
208
+ type: "code",
209
+ code: {
210
+ rich_text: [
211
+ {
212
+ type: "text",
213
+ text: {
214
+ content: codeContent.slice(0, 2000) // Notion limit
215
+ }
216
+ }
217
+ ],
218
+ language: language,
219
+ caption: []
220
+ }
221
+ });
222
+ }
223
+
224
+ i++; // Move past closing ```
225
+ continue;
226
+ }
227
+
228
+ // Handle headings
229
+ if (line.startsWith("# ")) {
230
+ blocks.push({
231
+ object: "block",
232
+ type: "heading_1",
233
+ heading_1: {
234
+ rich_text: [
235
+ {
236
+ type: "text",
237
+ text: {
238
+ content: line.replace(/^# /, "")
239
+ }
240
+ }
241
+ ]
242
+ }
243
+ });
244
+ i++;
245
+ continue;
246
+ }
247
+
248
+ if (line.startsWith("## ")) {
249
+ blocks.push({
250
+ object: "block",
251
+ type: "heading_2",
252
+ heading_2: {
253
+ rich_text: [
254
+ {
255
+ type: "text",
256
+ text: {
257
+ content: line.replace(/^## /, "")
258
+ }
259
+ }
260
+ ]
261
+ }
262
+ });
263
+ i++;
264
+ continue;
265
+ }
266
+
267
+ // Handle bullet lists
268
+ if (line.startsWith("- ")) {
269
+ blocks.push({
270
+ object: "block",
271
+ type: "bulleted_list_item",
272
+ bulleted_list_item: {
273
+ rich_text: [
274
+ {
275
+ type: "text",
276
+ text: {
277
+ content: line.replace(/^- /, "")
278
+ }
279
+ }
280
+ ]
281
+ }
282
+ });
283
+ i++;
284
+ continue;
285
+ }
286
+
287
+ // Handle regular paragraphs
288
+ blocks.push({
289
+ object: "block",
290
+ type: "paragraph",
291
+ paragraph: {
292
+ rich_text: [
293
+ {
294
+ type: "text",
295
+ text: {
296
+ content: line
297
+ }
298
+ }
299
+ ]
300
+ }
301
+ });
302
+ i++;
303
+ }
304
+
305
+ return blocks;
306
+ }
307
+
308
+ export async function replacePageContent(pageId, markdown) {
309
+ // Ensure page is unarchived before editing
310
+ await unarchivePage(pageId);
311
+
312
+ await clearPage(pageId);
313
+
314
+ const children = markdownToNotionBlocks(markdown);
315
+
316
+ if (!children.length) return;
317
+
318
+ for (let i = 0; i < children.length; i += 50) {
319
+ const chunk = children.slice(i, i + 50);
320
+
321
+ await notionRequest("PATCH", `/blocks/${pageId}/children`, {
322
+ children: chunk
323
+ });
324
+ }
325
+ }
@@ -0,0 +1,31 @@
1
+ import { ensurePage, replacePageContent } from "./notion.js";
2
+ import { getCurrentBranch, getBranchQualifiedTitle } from "../utils/branch.js";
3
+
4
+ export async function publishToNotion(cfg, renderedPages) {
5
+ const parentPageId = process.env.NOTION_PARENT_PAGE_ID;
6
+
7
+ if (!parentPageId) {
8
+ throw new Error("Missing NOTION_PARENT_PAGE_ID in tools/repolens/.env");
9
+ }
10
+
11
+ const prefix = cfg.project.docs_title_prefix || "RepoLens";
12
+ const currentBranch = getCurrentBranch();
13
+ const includeBranchInTitle = cfg.notion?.includeBranchInTitle !== false; // Default true
14
+
15
+ for (const page of cfg.outputs.pages) {
16
+ const baseTitle = `${prefix} — ${page.title}`;
17
+ const title = getBranchQualifiedTitle(baseTitle, currentBranch, includeBranchInTitle);
18
+ const cacheKey = `${page.key}-${currentBranch}`; // Branch-scoped cache
19
+
20
+ const pageId = await ensurePage(parentPageId, title, cacheKey);
21
+ const markdown = renderedPages[page.key];
22
+
23
+ // Skip if content not generated (e.g., disabled feature or generation error)
24
+ if (!markdown) {
25
+ console.log(`⚠️ Skipping ${page.key}: No content generated`);
26
+ continue;
27
+ }
28
+
29
+ await replacePageContent(pageId, markdown);
30
+ }
31
+ }