@apmantza/greedysearch-pi 1.8.2 → 1.8.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/github.mjs CHANGED
@@ -1,237 +1,237 @@
1
- // src/github.mjs - GitHub content fetching via REST API
2
-
3
- const GITHUB_API = "https://api.github.com";
4
- const DEFAULT_HEADERS = {
5
- "user-agent": "GreedySearch/1.0",
6
- accept: "application/vnd.github+json",
7
- "x-github-api-version": "2022-11-28",
8
- };
9
-
10
- /**
11
- * Parse a GitHub URL into components
12
- * @param {string} url
13
- * @returns {{owner: string, repo: string, type: 'blob'|'tree'|'root', ref?: string, path?: string} | null}
14
- */
15
- export function parseGitHubUrl(url) {
16
- try {
17
- const parsed = new URL(url);
18
- if (!parsed.hostname.endsWith("github.com")) {
19
- return null;
20
- }
21
-
22
- const parts = parsed.pathname.split("/").filter(Boolean);
23
- if (parts.length < 2) {
24
- return null;
25
- }
26
-
27
- const [owner, repo] = parts;
28
-
29
- // Root: github.com/owner/repo
30
- if (parts.length === 2) {
31
- return { owner, repo, type: "root" };
32
- }
33
-
34
- // With type: github.com/owner/repo/blob|tree/ref/path
35
- if (parts.length >= 4 && (parts[2] === "blob" || parts[2] === "tree")) {
36
- const type = parts[2];
37
- const ref = parts[3];
38
- const path = parts.slice(4).join("/");
39
- return { owner, repo, type, ref, path };
40
- }
41
-
42
- return null;
43
- } catch {
44
- return null;
45
- }
46
- }
47
-
48
- /**
49
- * Fetch JSON from GitHub API with timeout
50
- */
51
- async function apiGet(path, timeoutMs = 10000) {
52
- const controller = new AbortController();
53
- const tid = setTimeout(() => controller.abort(), timeoutMs);
54
- try {
55
- const res = await fetch(`${GITHUB_API}${path}`, {
56
- headers: DEFAULT_HEADERS,
57
- signal: controller.signal,
58
- });
59
- clearTimeout(tid);
60
- if (!res.ok) {
61
- throw new Error(`GitHub API ${res.status}: ${path}`);
62
- }
63
- return await res.json();
64
- } catch (err) {
65
- clearTimeout(tid);
66
- throw err;
67
- }
68
- }
69
-
70
- /**
71
- * Fetch the default branch README as plain text
72
- */
73
- async function fetchReadme(owner, repo) {
74
- try {
75
- const data = await apiGet(`/repos/${owner}/${repo}/readme`);
76
- if (data.content && data.encoding === "base64") {
77
- return Buffer.from(data.content, "base64").toString("utf8");
78
- }
79
- return "";
80
- } catch {
81
- return "";
82
- }
83
- }
84
-
85
- /**
86
- * Fetch top-level file tree (non-recursive)
87
- */
88
- async function fetchTree(owner, repo, ref = "HEAD", subPath = "") {
89
- try {
90
- // Resolve ref to a tree SHA first when using HEAD or a branch name
91
- const refData = await apiGet(`/repos/${owner}/${repo}/git/ref/heads/${ref === "HEAD" ? "main" : ref}`).catch(() =>
92
- apiGet(`/repos/${owner}/${repo}/git/ref/heads/master`).catch(() => null)
93
- );
94
-
95
- let treeSha;
96
- if (refData?.object?.sha) {
97
- // Get commit to get tree SHA
98
- const commit = await apiGet(`/repos/${owner}/${repo}/git/commits/${refData.object.sha}`);
99
- treeSha = commit.tree.sha;
100
- } else {
101
- // Fall back to repo default branch info
102
- const repoInfo = await apiGet(`/repos/${owner}/${repo}`);
103
- const branch = await apiGet(`/repos/${owner}/${repo}/branches/${repoInfo.default_branch}`);
104
- treeSha = branch.commit.commit.tree.sha;
105
- }
106
-
107
- const treeData = await apiGet(`/repos/${owner}/${repo}/git/trees/${treeSha}`);
108
- let items = treeData.tree || [];
109
-
110
- // Filter to subPath if requested
111
- if (subPath) {
112
- items = items.filter((item) => item.path.startsWith(subPath));
113
- }
114
-
115
- return items.slice(0, 50).map((item) => ({
116
- path: item.path,
117
- type: item.type === "tree" ? "dir" : "file",
118
- size: item.size,
119
- }));
120
- } catch {
121
- return [];
122
- }
123
- }
124
-
125
- /**
126
- * Fetch a specific file via raw.githubusercontent.com
127
- */
128
- async function fetchRawFile(owner, repo, ref, filePath, timeoutMs = 10000) {
129
- const ref_ = ref && ref !== "HEAD" ? ref : "main";
130
- const urls = [
131
- `https://raw.githubusercontent.com/${owner}/${repo}/${ref_}/${filePath}`,
132
- `https://raw.githubusercontent.com/${owner}/${repo}/master/${filePath}`,
133
- ];
134
-
135
- for (const url of urls) {
136
- const controller = new AbortController();
137
- const tid = setTimeout(() => controller.abort(), timeoutMs);
138
- try {
139
- const res = await fetch(url, {
140
- headers: { "user-agent": DEFAULT_HEADERS["user-agent"] },
141
- signal: controller.signal,
142
- });
143
- clearTimeout(tid);
144
- if (res.ok) {
145
- return await res.text();
146
- }
147
- } catch {
148
- clearTimeout(tid);
149
- }
150
- }
151
- return null;
152
- }
153
-
154
- /**
155
- * Fetch GitHub content via API
156
- * @param {string} url - GitHub URL (blob, tree, or root)
157
- * @returns {Promise<{ok: boolean, content?: string, title?: string, error?: string, tree?: Array}>}
158
- */
159
- export async function fetchGitHubContent(url) {
160
- const parsed = parseGitHubUrl(url);
161
- if (!parsed) {
162
- return { ok: false, error: "Not a valid GitHub URL" };
163
- }
164
-
165
- const { owner, repo, type, ref, path } = parsed;
166
-
167
- try {
168
- if (type === "root" || (type === "tree" && !path)) {
169
- // Fetch repo info + README + top-level tree in parallel
170
- const [repoInfo, readme, tree] = await Promise.allSettled([
171
- apiGet(`/repos/${owner}/${repo}`),
172
- fetchReadme(owner, repo),
173
- fetchTree(owner, repo, ref || "HEAD"),
174
- ]);
175
-
176
- // If repo info failed (e.g. 404 — repo doesn't exist), bail out
177
- if (repoInfo.status === "rejected") {
178
- return { ok: false, error: repoInfo.reason?.message || "Repo not found" };
179
- }
180
-
181
- const info = repoInfo.value;
182
- const readmeText = readme.status === "fulfilled" ? readme.value : "";
183
- const treeItems = tree.status === "fulfilled" ? tree.value : [];
184
-
185
- const description = info?.description ? `\n\n> ${info.description}` : "";
186
- const stars = info?.stargazers_count != null ? ` ⭐ ${info.stargazers_count}` : "";
187
- const language = info?.language ? ` · ${info.language}` : "";
188
-
189
- let content = `# ${owner}/${repo}${stars}${language}${description}\n\n`;
190
-
191
- if (readmeText) {
192
- content += readmeText.slice(0, 6000);
193
- } else {
194
- content += `[No README found]\n\nFiles:\n${treeItems.map((t) => ` ${t.type === "dir" ? "📁" : "📄"} ${t.path}`).join("\n")}`;
195
- }
196
-
197
- return {
198
- ok: true,
199
- title: `${owner}/${repo}`,
200
- content,
201
- tree: treeItems.slice(0, 30),
202
- };
203
- }
204
-
205
- if (type === "blob" && path) {
206
- // Fetch specific file via raw URL
207
- const content = await fetchRawFile(owner, repo, ref, path);
208
- if (content === null) {
209
- return { ok: false, error: `File not found: ${path}` };
210
- }
211
- return {
212
- ok: true,
213
- title: `${owner}/${repo}: ${path}`,
214
- content,
215
- };
216
- }
217
-
218
- if (type === "tree" && path) {
219
- // Directory listing via API tree
220
- const treeItems = await fetchTree(owner, repo, ref || "HEAD", path);
221
- const listing = treeItems
222
- .map((t) => ` ${t.type === "dir" ? "📁" : "📄"} ${t.path}`)
223
- .join("\n");
224
-
225
- return {
226
- ok: true,
227
- title: `${owner}/${repo}/${path}`,
228
- content: `[Directory: ${path}]\n\nFiles:\n${listing}`,
229
- tree: treeItems,
230
- };
231
- }
232
-
233
- return { ok: false, error: "Unsupported GitHub URL type" };
234
- } catch (err) {
235
- return { ok: false, error: err.message };
236
- }
237
- }
1
+ // src/github.mjs - GitHub content fetching via REST API
2
+
3
+ const GITHUB_API = "https://api.github.com";
4
+ const DEFAULT_HEADERS = {
5
+ "user-agent": "GreedySearch/1.0",
6
+ accept: "application/vnd.github+json",
7
+ "x-github-api-version": "2022-11-28",
8
+ };
9
+
10
+ /**
11
+ * Parse a GitHub URL into components
12
+ * @param {string} url
13
+ * @returns {{owner: string, repo: string, type: 'blob'|'tree'|'root', ref?: string, path?: string} | null}
14
+ */
15
+ export function parseGitHubUrl(url) {
16
+ try {
17
+ const parsed = new URL(url);
18
+ if (!parsed.hostname.endsWith("github.com")) {
19
+ return null;
20
+ }
21
+
22
+ const parts = parsed.pathname.split("/").filter(Boolean);
23
+ if (parts.length < 2) {
24
+ return null;
25
+ }
26
+
27
+ const [owner, repo] = parts;
28
+
29
+ // Root: github.com/owner/repo
30
+ if (parts.length === 2) {
31
+ return { owner, repo, type: "root" };
32
+ }
33
+
34
+ // With type: github.com/owner/repo/blob|tree/ref/path
35
+ if (parts.length >= 4 && (parts[2] === "blob" || parts[2] === "tree")) {
36
+ const type = parts[2];
37
+ const ref = parts[3];
38
+ const path = parts.slice(4).join("/");
39
+ return { owner, repo, type, ref, path };
40
+ }
41
+
42
+ return null;
43
+ } catch {
44
+ return null;
45
+ }
46
+ }
47
+
48
+ /**
49
+ * Fetch JSON from GitHub API with timeout
50
+ */
51
+ async function apiGet(path, timeoutMs = 10000) {
52
+ const controller = new AbortController();
53
+ const tid = setTimeout(() => controller.abort(), timeoutMs);
54
+ try {
55
+ const res = await fetch(`${GITHUB_API}${path}`, {
56
+ headers: DEFAULT_HEADERS,
57
+ signal: controller.signal,
58
+ });
59
+ clearTimeout(tid);
60
+ if (!res.ok) {
61
+ throw new Error(`GitHub API ${res.status}: ${path}`);
62
+ }
63
+ return await res.json();
64
+ } catch (err) {
65
+ clearTimeout(tid);
66
+ throw err;
67
+ }
68
+ }
69
+
70
+ /**
71
+ * Fetch the default branch README as plain text
72
+ */
73
+ async function fetchReadme(owner, repo) {
74
+ try {
75
+ const data = await apiGet(`/repos/${owner}/${repo}/readme`);
76
+ if (data.content && data.encoding === "base64") {
77
+ return Buffer.from(data.content, "base64").toString("utf8");
78
+ }
79
+ return "";
80
+ } catch {
81
+ return "";
82
+ }
83
+ }
84
+
85
+ /**
86
+ * Fetch top-level file tree (non-recursive)
87
+ */
88
+ async function fetchTree(owner, repo, ref = "HEAD", subPath = "") {
89
+ try {
90
+ // Resolve ref to a tree SHA first when using HEAD or a branch name
91
+ const refData = await apiGet(`/repos/${owner}/${repo}/git/ref/heads/${ref === "HEAD" ? "main" : ref}`).catch(() =>
92
+ apiGet(`/repos/${owner}/${repo}/git/ref/heads/master`).catch(() => null)
93
+ );
94
+
95
+ let treeSha;
96
+ if (refData?.object?.sha) {
97
+ // Get commit to get tree SHA
98
+ const commit = await apiGet(`/repos/${owner}/${repo}/git/commits/${refData.object.sha}`);
99
+ treeSha = commit.tree.sha;
100
+ } else {
101
+ // Fall back to repo default branch info
102
+ const repoInfo = await apiGet(`/repos/${owner}/${repo}`);
103
+ const branch = await apiGet(`/repos/${owner}/${repo}/branches/${repoInfo.default_branch}`);
104
+ treeSha = branch.commit.commit.tree.sha;
105
+ }
106
+
107
+ const treeData = await apiGet(`/repos/${owner}/${repo}/git/trees/${treeSha}`);
108
+ let items = treeData.tree || [];
109
+
110
+ // Filter to subPath if requested
111
+ if (subPath) {
112
+ items = items.filter((item) => item.path.startsWith(subPath));
113
+ }
114
+
115
+ return items.slice(0, 50).map((item) => ({
116
+ path: item.path,
117
+ type: item.type === "tree" ? "dir" : "file",
118
+ size: item.size,
119
+ }));
120
+ } catch {
121
+ return [];
122
+ }
123
+ }
124
+
125
+ /**
126
+ * Fetch a specific file via raw.githubusercontent.com
127
+ */
128
+ async function fetchRawFile(owner, repo, ref, filePath, timeoutMs = 10000) {
129
+ const ref_ = ref && ref !== "HEAD" ? ref : "main";
130
+ const urls = [
131
+ `https://raw.githubusercontent.com/${owner}/${repo}/${ref_}/${filePath}`,
132
+ `https://raw.githubusercontent.com/${owner}/${repo}/master/${filePath}`,
133
+ ];
134
+
135
+ for (const url of urls) {
136
+ const controller = new AbortController();
137
+ const tid = setTimeout(() => controller.abort(), timeoutMs);
138
+ try {
139
+ const res = await fetch(url, {
140
+ headers: { "user-agent": DEFAULT_HEADERS["user-agent"] },
141
+ signal: controller.signal,
142
+ });
143
+ clearTimeout(tid);
144
+ if (res.ok) {
145
+ return await res.text();
146
+ }
147
+ } catch {
148
+ clearTimeout(tid);
149
+ }
150
+ }
151
+ return null;
152
+ }
153
+
154
+ /**
155
+ * Fetch GitHub content via API
156
+ * @param {string} url - GitHub URL (blob, tree, or root)
157
+ * @returns {Promise<{ok: boolean, content?: string, title?: string, error?: string, tree?: Array}>}
158
+ */
159
+ export async function fetchGitHubContent(url) {
160
+ const parsed = parseGitHubUrl(url);
161
+ if (!parsed) {
162
+ return { ok: false, error: "Not a valid GitHub URL" };
163
+ }
164
+
165
+ const { owner, repo, type, ref, path } = parsed;
166
+
167
+ try {
168
+ if (type === "root" || (type === "tree" && !path)) {
169
+ // Fetch repo info + README + top-level tree in parallel
170
+ const [repoInfo, readme, tree] = await Promise.allSettled([
171
+ apiGet(`/repos/${owner}/${repo}`),
172
+ fetchReadme(owner, repo),
173
+ fetchTree(owner, repo, ref || "HEAD"),
174
+ ]);
175
+
176
+ // If repo info failed (e.g. 404 — repo doesn't exist), bail out
177
+ if (repoInfo.status === "rejected") {
178
+ return { ok: false, error: repoInfo.reason?.message || "Repo not found" };
179
+ }
180
+
181
+ const info = repoInfo.value;
182
+ const readmeText = readme.status === "fulfilled" ? readme.value : "";
183
+ const treeItems = tree.status === "fulfilled" ? tree.value : [];
184
+
185
+ const description = info?.description ? `\n\n> ${info.description}` : "";
186
+ const stars = info?.stargazers_count != null ? ` ⭐ ${info.stargazers_count}` : "";
187
+ const language = info?.language ? ` · ${info.language}` : "";
188
+
189
+ let content = `# ${owner}/${repo}${stars}${language}${description}\n\n`;
190
+
191
+ if (readmeText) {
192
+ content += readmeText.slice(0, 6000);
193
+ } else {
194
+ content += `[No README found]\n\nFiles:\n${treeItems.map((t) => ` ${t.type === "dir" ? "📁" : "📄"} ${t.path}`).join("\n")}`;
195
+ }
196
+
197
+ return {
198
+ ok: true,
199
+ title: `${owner}/${repo}`,
200
+ content,
201
+ tree: treeItems.slice(0, 30),
202
+ };
203
+ }
204
+
205
+ if (type === "blob" && path) {
206
+ // Fetch specific file via raw URL
207
+ const content = await fetchRawFile(owner, repo, ref, path);
208
+ if (content === null) {
209
+ return { ok: false, error: `File not found: ${path}` };
210
+ }
211
+ return {
212
+ ok: true,
213
+ title: `${owner}/${repo}: ${path}`,
214
+ content,
215
+ };
216
+ }
217
+
218
+ if (type === "tree" && path) {
219
+ // Directory listing via API tree
220
+ const treeItems = await fetchTree(owner, repo, ref || "HEAD", path);
221
+ const listing = treeItems
222
+ .map((t) => ` ${t.type === "dir" ? "📁" : "📄"} ${t.path}`)
223
+ .join("\n");
224
+
225
+ return {
226
+ ok: true,
227
+ title: `${owner}/${repo}/${path}`,
228
+ content: `[Directory: ${path}]\n\nFiles:\n${listing}`,
229
+ tree: treeItems,
230
+ };
231
+ }
232
+
233
+ return { ok: false, error: "Unsupported GitHub URL type" };
234
+ } catch (err) {
235
+ return { ok: false, error: err.message };
236
+ }
237
+ }
package/src/reddit.mjs ADDED
@@ -0,0 +1,210 @@
1
+ // src/reddit.mjs - Reddit content fetching via public JSON API
2
+ // Reddit exposes structured data by appending .json to any URL
3
+
4
+ const REDDIT_HEADERS = {
5
+ "user-agent": "GreedySearch/1.0 (Research Bot)",
6
+ accept: "application/json",
7
+ };
8
+
9
+ /**
10
+ * Parse a Reddit URL to check if it's a post, comment, or user profile
11
+ * @param {string} url
12
+ * @returns {{type: 'post'|'user'|'other', cleanUrl: string} | null}
13
+ */
14
+ export function parseRedditUrl(url) {
15
+ try {
16
+ const parsed = new URL(url);
17
+ const hostname = parsed.hostname.toLowerCase();
18
+
19
+ // Support reddit.com, old.reddit.com, www.reddit.com
20
+ if (!hostname.endsWith("reddit.com")) {
21
+ return null;
22
+ }
23
+
24
+ const pathname = parsed.pathname;
25
+
26
+ // User profile: /u/username or /user/username
27
+ if (pathname.match(/^\/(u|user)\/[^/]+\/?$/i)) {
28
+ return { type: "user", cleanUrl: normalizeRedditUrl(url) };
29
+ }
30
+
31
+ // Post: /r/subreddit/comments/xxxx/...
32
+ if (pathname.match(/^\/r\/[^/]+\/comments\/[^/]+/i)) {
33
+ return { type: "post", cleanUrl: normalizeRedditUrl(url) };
34
+ }
35
+
36
+ return null;
37
+ } catch {
38
+ return null;
39
+ }
40
+ }
41
+
42
+ /**
43
+ * Normalize Reddit URL (remove query params, fragments)
44
+ * @param {string} url
45
+ * @returns {string}
46
+ */
47
+ function normalizeRedditUrl(url) {
48
+ try {
49
+ const parsed = new URL(url);
50
+ // Reconstruct without query/fragment
51
+ return `${parsed.protocol}//${parsed.hostname}${parsed.pathname}`;
52
+ } catch {
53
+ return url;
54
+ }
55
+ }
56
+
57
+ /**
58
+ * Fetch Reddit content via the .json API
59
+ * @param {string} url - Reddit URL (will have .json appended)
60
+ * @param {number} maxChars - Max characters for content
61
+ * @returns {Promise<FetchResult>}
62
+ */
63
+ export async function fetchRedditContent(url, maxChars = 8000) {
64
+ const start = Date.now();
65
+
66
+ try {
67
+ // Append .json to get API response
68
+ const jsonUrl = url.replace(/\/?$/, ".json");
69
+
70
+ const controller = new AbortController();
71
+ const timeoutId = setTimeout(() => controller.abort(), 15000);
72
+
73
+ const response = await fetch(jsonUrl, {
74
+ headers: REDDIT_HEADERS,
75
+ signal: controller.signal,
76
+ });
77
+
78
+ clearTimeout(timeoutId);
79
+
80
+ if (!response.ok) {
81
+ throw new Error(`Reddit API ${response.status}`);
82
+ }
83
+
84
+ const data = await response.json();
85
+
86
+ // data[0] = post listing, data[1] = comments listing
87
+ if (!Array.isArray(data) || data.length < 1) {
88
+ throw new Error("Invalid Reddit API response structure");
89
+ }
90
+
91
+ const postListing = data[0];
92
+ const commentsListing = data[1];
93
+
94
+ // Extract post data
95
+ const post = postListing?.data?.children?.[0]?.data;
96
+ if (!post) {
97
+ throw new Error("No post data in Reddit response");
98
+ }
99
+
100
+ // Format as markdown
101
+ const markdown = formatRedditPost(post, commentsListing, maxChars);
102
+
103
+ return {
104
+ ok: true,
105
+ url,
106
+ finalUrl: url,
107
+ status: 200,
108
+ contentType: "text/markdown",
109
+ lastModified: "",
110
+ title: post.title || "Reddit Post",
111
+ byline: `u/${post.author}`,
112
+ siteName: `r/${post.subreddit}`,
113
+ lang: "en",
114
+ publishedTime: new Date(post.created_utc * 1000).toISOString(),
115
+ excerpt: post.selftext?.slice(0, 300).replace(/\n/g, " ") || "",
116
+ markdown,
117
+ contentLength: markdown.length,
118
+ needsBrowser: false,
119
+ duration: Date.now() - start,
120
+ };
121
+ } catch (error) {
122
+ return {
123
+ ok: false,
124
+ url,
125
+ finalUrl: url,
126
+ status: 0,
127
+ error: `Reddit fetch failed: ${error.message}`,
128
+ needsBrowser: false,
129
+ duration: Date.now() - start,
130
+ };
131
+ }
132
+ }
133
+
134
+ /**
135
+ * Format Reddit post and comments as clean markdown
136
+ * @param {object} post - Reddit post data
137
+ * @param {object|null} commentsListing - Comments listing data
138
+ * @param {number} maxChars - Max characters
139
+ * @returns {string}
140
+ */
141
+ function formatRedditPost(post, commentsListing, maxChars) {
142
+ let md = "";
143
+
144
+ // Post header
145
+ md += `# ${post.title}\n\n`;
146
+ md += `**Subreddit:** r/${post.subreddit} | **Author:** u/${post.author} | **Score:** ${post.score}\n\n`;
147
+
148
+ // Post body (selftext) or link
149
+ if (post.selftext) {
150
+ md += post.selftext;
151
+ md += "\n\n";
152
+ } else if (post.url && !post.url.includes("reddit.com")) {
153
+ // External link post
154
+ md += `**Link:** ${post.url}\n\n`;
155
+ }
156
+
157
+ // Comments section
158
+ if (commentsListing?.data?.children?.length > 0) {
159
+ md += "---\n\n## Comments\n\n";
160
+ const comments = commentsListing.data.children
161
+ .filter((c) => c.kind === "t1") // t1 = comment
162
+ .slice(0, 10); // Top 10 comments
163
+
164
+ for (const comment of comments) {
165
+ md += formatComment(comment.data, 0);
166
+ md += "\n";
167
+ }
168
+ }
169
+
170
+ // Trim to maxChars while keeping structure
171
+ if (md.length > maxChars) {
172
+ md = md.slice(0, maxChars).trim() + "\n\n... (truncated)";
173
+ }
174
+
175
+ return md;
176
+ }
177
+
178
+ /**
179
+ * Format a single comment with nesting
180
+ * @param {object} comment - Reddit comment data
181
+ * @param {number} depth - Nesting depth
182
+ * @returns {string}
183
+ */
184
+ function formatComment(comment, depth) {
185
+ if (
186
+ !comment ||
187
+ comment.body === "[deleted]" ||
188
+ comment.body === "[removed]"
189
+ ) {
190
+ return "";
191
+ }
192
+
193
+ const indent = "> ".repeat(depth);
194
+ let md = "";
195
+
196
+ md += `${indent}**u/${comment.author}** (${comment.score} pts)\n`;
197
+ md += `${indent}${comment.body.replace(/\n/g, "\n" + indent)}\n`;
198
+
199
+ // Handle nested replies (limit depth to 3)
200
+ if (depth < 3 && comment.replies?.data?.children) {
201
+ const replies = comment.replies.data.children.filter(
202
+ (r) => r.kind === "t1",
203
+ );
204
+ for (const reply of replies.slice(0, 5)) {
205
+ md += "\n" + formatComment(reply.data, depth + 1);
206
+ }
207
+ }
208
+
209
+ return md;
210
+ }