mcp-researchpowerpack-http 3.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +124 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +227 -0
- package/dist/index.js.map +7 -0
- package/dist/mcp-use.json +7 -0
- package/dist/src/clients/github.d.ts +83 -0
- package/dist/src/clients/github.d.ts.map +1 -0
- package/dist/src/clients/github.js +370 -0
- package/dist/src/clients/github.js.map +7 -0
- package/dist/src/clients/reddit.d.ts +60 -0
- package/dist/src/clients/reddit.d.ts.map +1 -0
- package/dist/src/clients/reddit.js +287 -0
- package/dist/src/clients/reddit.js.map +7 -0
- package/dist/src/clients/research.d.ts +67 -0
- package/dist/src/clients/research.d.ts.map +1 -0
- package/dist/src/clients/research.js +282 -0
- package/dist/src/clients/research.js.map +7 -0
- package/dist/src/clients/scraper.d.ts +72 -0
- package/dist/src/clients/scraper.d.ts.map +1 -0
- package/dist/src/clients/scraper.js +327 -0
- package/dist/src/clients/scraper.js.map +7 -0
- package/dist/src/clients/search.d.ts +57 -0
- package/dist/src/clients/search.d.ts.map +1 -0
- package/dist/src/clients/search.js +218 -0
- package/dist/src/clients/search.js.map +7 -0
- package/dist/src/config/index.d.ts +93 -0
- package/dist/src/config/index.d.ts.map +1 -0
- package/dist/src/config/index.js +218 -0
- package/dist/src/config/index.js.map +7 -0
- package/dist/src/schemas/deep-research.d.ts +40 -0
- package/dist/src/schemas/deep-research.d.ts.map +1 -0
- package/dist/src/schemas/deep-research.js +216 -0
- package/dist/src/schemas/deep-research.js.map +7 -0
- package/dist/src/schemas/github-score.d.ts +50 -0
- package/dist/src/schemas/github-score.d.ts.map +1 -0
- package/dist/src/schemas/github-score.js +58 -0
- package/dist/src/schemas/github-score.js.map +7 -0
- package/dist/src/schemas/scrape-links.d.ts +23 -0
- package/dist/src/schemas/scrape-links.d.ts.map +1 -0
- package/dist/src/schemas/scrape-links.js +32 -0
- package/dist/src/schemas/scrape-links.js.map +7 -0
- package/dist/src/schemas/web-search.d.ts +18 -0
- package/dist/src/schemas/web-search.d.ts.map +1 -0
- package/dist/src/schemas/web-search.js +28 -0
- package/dist/src/schemas/web-search.js.map +7 -0
- package/dist/src/scoring/github-quality.d.ts +142 -0
- package/dist/src/scoring/github-quality.d.ts.map +1 -0
- package/dist/src/scoring/github-quality.js +202 -0
- package/dist/src/scoring/github-quality.js.map +7 -0
- package/dist/src/services/file-attachment.d.ts +30 -0
- package/dist/src/services/file-attachment.d.ts.map +1 -0
- package/dist/src/services/file-attachment.js +205 -0
- package/dist/src/services/file-attachment.js.map +7 -0
- package/dist/src/services/llm-processor.d.ts +29 -0
- package/dist/src/services/llm-processor.d.ts.map +1 -0
- package/dist/src/services/llm-processor.js +206 -0
- package/dist/src/services/llm-processor.js.map +7 -0
- package/dist/src/services/markdown-cleaner.d.ts +8 -0
- package/dist/src/services/markdown-cleaner.d.ts.map +1 -0
- package/dist/src/services/markdown-cleaner.js +63 -0
- package/dist/src/services/markdown-cleaner.js.map +7 -0
- package/dist/src/tools/github-score.d.ts +12 -0
- package/dist/src/tools/github-score.d.ts.map +1 -0
- package/dist/src/tools/github-score.js +306 -0
- package/dist/src/tools/github-score.js.map +7 -0
- package/dist/src/tools/mcp-helpers.d.ts +27 -0
- package/dist/src/tools/mcp-helpers.d.ts.map +1 -0
- package/dist/src/tools/mcp-helpers.js +47 -0
- package/dist/src/tools/mcp-helpers.js.map +7 -0
- package/dist/src/tools/reddit.d.ts +54 -0
- package/dist/src/tools/reddit.d.ts.map +1 -0
- package/dist/src/tools/reddit.js +498 -0
- package/dist/src/tools/reddit.js.map +7 -0
- package/dist/src/tools/registry.d.ts +3 -0
- package/dist/src/tools/registry.d.ts.map +1 -0
- package/dist/src/tools/registry.js +17 -0
- package/dist/src/tools/registry.js.map +7 -0
- package/dist/src/tools/research.d.ts +14 -0
- package/dist/src/tools/research.d.ts.map +1 -0
- package/dist/src/tools/research.js +250 -0
- package/dist/src/tools/research.js.map +7 -0
- package/dist/src/tools/scrape.d.ts +14 -0
- package/dist/src/tools/scrape.d.ts.map +1 -0
- package/dist/src/tools/scrape.js +290 -0
- package/dist/src/tools/scrape.js.map +7 -0
- package/dist/src/tools/search.d.ts +10 -0
- package/dist/src/tools/search.d.ts.map +1 -0
- package/dist/src/tools/search.js +197 -0
- package/dist/src/tools/search.js.map +7 -0
- package/dist/src/tools/utils.d.ts +105 -0
- package/dist/src/tools/utils.d.ts.map +1 -0
- package/dist/src/tools/utils.js +96 -0
- package/dist/src/tools/utils.js.map +7 -0
- package/dist/src/utils/concurrency.d.ts +28 -0
- package/dist/src/utils/concurrency.d.ts.map +1 -0
- package/dist/src/utils/concurrency.js +62 -0
- package/dist/src/utils/concurrency.js.map +7 -0
- package/dist/src/utils/errors.d.ts +95 -0
- package/dist/src/utils/errors.d.ts.map +1 -0
- package/dist/src/utils/errors.js +289 -0
- package/dist/src/utils/errors.js.map +7 -0
- package/dist/src/utils/logger.d.ts +33 -0
- package/dist/src/utils/logger.d.ts.map +1 -0
- package/dist/src/utils/logger.js +41 -0
- package/dist/src/utils/logger.js.map +7 -0
- package/dist/src/utils/markdown-formatter.d.ts +5 -0
- package/dist/src/utils/markdown-formatter.d.ts.map +1 -0
- package/dist/src/utils/markdown-formatter.js +15 -0
- package/dist/src/utils/markdown-formatter.js.map +7 -0
- package/dist/src/utils/response.d.ts +83 -0
- package/dist/src/utils/response.d.ts.map +1 -0
- package/dist/src/utils/response.js +109 -0
- package/dist/src/utils/response.js.map +7 -0
- package/dist/src/utils/retry.d.ts +43 -0
- package/dist/src/utils/retry.d.ts.map +1 -0
- package/dist/src/utils/retry.js +37 -0
- package/dist/src/utils/retry.js.map +7 -0
- package/dist/src/utils/url-aggregator.d.ts +92 -0
- package/dist/src/utils/url-aggregator.d.ts.map +1 -0
- package/dist/src/utils/url-aggregator.js +357 -0
- package/dist/src/utils/url-aggregator.js.map +7 -0
- package/dist/src/version.d.ts +28 -0
- package/dist/src/version.d.ts.map +1 -0
- package/dist/src/version.js +32 -0
- package/dist/src/version.js.map +7 -0
- package/package.json +73 -0
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
import { REDDIT } from "../config/index.js";
|
|
2
|
+
import { USER_AGENT_VERSION } from "../version.js";
|
|
3
|
+
import { calculateBackoff } from "../utils/retry.js";
|
|
4
|
+
import {
|
|
5
|
+
classifyError,
|
|
6
|
+
fetchWithTimeout,
|
|
7
|
+
sleep,
|
|
8
|
+
ErrorCode
|
|
9
|
+
} from "../utils/errors.js";
|
|
10
|
+
import { pMap, pMapSettled } from "../utils/concurrency.js";
|
|
11
|
+
import { mcpLog } from "../utils/logger.js";
|
|
12
|
+
const REDDIT_TOKEN_URL = "https://www.reddit.com/api/v1/access_token";
|
|
13
|
+
const REDDIT_API_BASE = "https://oauth.reddit.com";
|
|
14
|
+
const TOKEN_EXPIRY_MS = 55e3;
|
|
15
|
+
const FETCH_LIMIT = REDDIT.FETCH_LIMIT_PER_POST;
|
|
16
|
+
let cachedToken = null;
|
|
17
|
+
let cachedTokenExpiry = 0;
|
|
18
|
+
const DEBUG_TOKEN_CACHE = process.env.DEBUG_REDDIT === "true";
|
|
19
|
+
let pendingAuthPromise = null;
|
|
20
|
+
async function fetchRedditJson(sub, id, token, userAgent) {
|
|
21
|
+
const limit = Math.min(FETCH_LIMIT, 500);
|
|
22
|
+
const apiUrl = `${REDDIT_API_BASE}/r/${sub}/comments/${id}?sort=top&limit=${limit}&depth=10&raw_json=1`;
|
|
23
|
+
const res = await fetchWithTimeout(apiUrl, {
|
|
24
|
+
headers: {
|
|
25
|
+
"Authorization": `Bearer ${token}`,
|
|
26
|
+
"User-Agent": userAgent
|
|
27
|
+
},
|
|
28
|
+
timeoutMs: 3e4
|
|
29
|
+
});
|
|
30
|
+
if (res.status === 429) {
|
|
31
|
+
const err = new Error("Rate limited by Reddit API");
|
|
32
|
+
err.status = 429;
|
|
33
|
+
throw err;
|
|
34
|
+
}
|
|
35
|
+
if (res.status === 404) {
|
|
36
|
+
throw new Error(`Post not found: /r/${sub}/comments/${id}`);
|
|
37
|
+
}
|
|
38
|
+
if (!res.ok) {
|
|
39
|
+
const err = new Error(`Reddit API error: ${res.status}`);
|
|
40
|
+
err.status = res.status;
|
|
41
|
+
throw err;
|
|
42
|
+
}
|
|
43
|
+
try {
|
|
44
|
+
return await res.json();
|
|
45
|
+
} catch {
|
|
46
|
+
throw new Error("Failed to parse Reddit API response");
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
function parsePostData(postListing, sub) {
|
|
50
|
+
const p = postListing?.data?.children?.[0]?.data;
|
|
51
|
+
if (!p) {
|
|
52
|
+
throw new Error(`Post data not found in response for /r/${sub}`);
|
|
53
|
+
}
|
|
54
|
+
return {
|
|
55
|
+
title: p.title || "Untitled",
|
|
56
|
+
author: p.author || "[deleted]",
|
|
57
|
+
subreddit: p.subreddit || sub,
|
|
58
|
+
body: formatBody(p),
|
|
59
|
+
score: p.score || 0,
|
|
60
|
+
commentCount: p.num_comments || 0,
|
|
61
|
+
url: `https://reddit.com${p.permalink || ""}`,
|
|
62
|
+
created: new Date((p.created_utc || 0) * 1e3),
|
|
63
|
+
flair: p.link_flair_text || void 0,
|
|
64
|
+
isNsfw: p.over_18 || false,
|
|
65
|
+
isPinned: p.stickied || false
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
function formatBody(p) {
|
|
69
|
+
if (p.selftext?.trim()) return p.selftext;
|
|
70
|
+
if (p.is_self) return "";
|
|
71
|
+
if (p.url) return `**Link:** ${p.url}`;
|
|
72
|
+
return "";
|
|
73
|
+
}
|
|
74
|
+
const MAX_COMMENT_DEPTH = 15;
|
|
75
|
+
function parseCommentTree(commentListing, opAuthor) {
|
|
76
|
+
const result = [];
|
|
77
|
+
const extract = (items, depth = 0) => {
|
|
78
|
+
if (depth > MAX_COMMENT_DEPTH) return;
|
|
79
|
+
const sorted = [...items].sort((a, b) => (b.data?.score || 0) - (a.data?.score || 0));
|
|
80
|
+
for (const c of sorted) {
|
|
81
|
+
if (c.kind !== "t1" || !c.data?.author || c.data.author === "[deleted]") continue;
|
|
82
|
+
result.push({
|
|
83
|
+
author: c.data.author,
|
|
84
|
+
body: c.data.body || "",
|
|
85
|
+
score: c.data.score || 0,
|
|
86
|
+
depth,
|
|
87
|
+
isOP: c.data.author === opAuthor
|
|
88
|
+
});
|
|
89
|
+
if (typeof c.data.replies === "object" && c.data.replies?.data?.children) {
|
|
90
|
+
extract(c.data.replies.data.children, depth + 1);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
};
|
|
94
|
+
extract(commentListing?.data?.children || []);
|
|
95
|
+
return result;
|
|
96
|
+
}
|
|
97
|
+
async function processBatch(client, batchUrls) {
|
|
98
|
+
const results = /* @__PURE__ */ new Map();
|
|
99
|
+
let rateLimitHits = 0;
|
|
100
|
+
const batchResults = await pMapSettled(
|
|
101
|
+
batchUrls,
|
|
102
|
+
(url) => client.getPost(url),
|
|
103
|
+
5
|
|
104
|
+
);
|
|
105
|
+
for (let i = 0; i < batchResults.length; i++) {
|
|
106
|
+
const result = batchResults[i];
|
|
107
|
+
if (!result) continue;
|
|
108
|
+
const url = batchUrls[i] ?? "";
|
|
109
|
+
if (result.status === "fulfilled") {
|
|
110
|
+
results.set(url, result.value);
|
|
111
|
+
} else {
|
|
112
|
+
const errorMsg = result.reason?.message || String(result.reason);
|
|
113
|
+
if (errorMsg.includes("429") || errorMsg.includes("rate")) rateLimitHits++;
|
|
114
|
+
results.set(url, new Error(errorMsg));
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return { results, rateLimitHits };
|
|
118
|
+
}
|
|
119
|
+
class RedditClient {
|
|
120
|
+
constructor(clientId, clientSecret) {
|
|
121
|
+
this.clientId = clientId;
|
|
122
|
+
this.clientSecret = clientSecret;
|
|
123
|
+
}
|
|
124
|
+
userAgent = `script:${USER_AGENT_VERSION} (by /u/research-powerpack)`;
|
|
125
|
+
/**
|
|
126
|
+
* Authenticate with Reddit API with retry logic
|
|
127
|
+
* Uses module-level token cache and promise deduplication to prevent
|
|
128
|
+
* concurrent auth calls from firing multiple token requests
|
|
129
|
+
* Returns null on failure instead of throwing
|
|
130
|
+
*/
|
|
131
|
+
async auth() {
|
|
132
|
+
if (cachedToken && Date.now() < cachedTokenExpiry - TOKEN_EXPIRY_MS) {
|
|
133
|
+
if (DEBUG_TOKEN_CACHE) console.error("[RedditClient] Token cache HIT");
|
|
134
|
+
return cachedToken;
|
|
135
|
+
}
|
|
136
|
+
if (pendingAuthPromise) {
|
|
137
|
+
if (DEBUG_TOKEN_CACHE) console.error("[RedditClient] Auth already in flight, awaiting...");
|
|
138
|
+
return pendingAuthPromise;
|
|
139
|
+
}
|
|
140
|
+
pendingAuthPromise = this.performAuth();
|
|
141
|
+
try {
|
|
142
|
+
return await pendingAuthPromise;
|
|
143
|
+
} finally {
|
|
144
|
+
pendingAuthPromise = null;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
async performAuth() {
|
|
148
|
+
if (DEBUG_TOKEN_CACHE) console.error("[RedditClient] Token cache MISS - authenticating");
|
|
149
|
+
const credentials = Buffer.from(`${this.clientId}:${this.clientSecret}`).toString("base64");
|
|
150
|
+
for (let attempt = 0; attempt < 3; attempt++) {
|
|
151
|
+
try {
|
|
152
|
+
const res = await fetchWithTimeout(REDDIT_TOKEN_URL, {
|
|
153
|
+
method: "POST",
|
|
154
|
+
headers: {
|
|
155
|
+
"Authorization": `Basic ${credentials}`,
|
|
156
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
157
|
+
"User-Agent": this.userAgent
|
|
158
|
+
},
|
|
159
|
+
body: "grant_type=client_credentials",
|
|
160
|
+
timeoutMs: 15e3
|
|
161
|
+
});
|
|
162
|
+
if (!res.ok) {
|
|
163
|
+
const text = await res.text().catch(() => "");
|
|
164
|
+
mcpLog("error", `Auth failed (${res.status}): ${text}`, "reddit");
|
|
165
|
+
if (res.status === 401 || res.status === 403) {
|
|
166
|
+
cachedToken = null;
|
|
167
|
+
cachedTokenExpiry = 0;
|
|
168
|
+
return null;
|
|
169
|
+
}
|
|
170
|
+
if (res.status >= 500 && attempt < 2) {
|
|
171
|
+
await sleep(calculateBackoff(attempt));
|
|
172
|
+
continue;
|
|
173
|
+
}
|
|
174
|
+
return null;
|
|
175
|
+
}
|
|
176
|
+
const data = await res.json();
|
|
177
|
+
if (!data.access_token) {
|
|
178
|
+
mcpLog("error", "Auth response missing access_token", "reddit");
|
|
179
|
+
return null;
|
|
180
|
+
}
|
|
181
|
+
cachedToken = data.access_token;
|
|
182
|
+
cachedTokenExpiry = Date.now() + (data.expires_in || 3600) * 1e3;
|
|
183
|
+
return cachedToken;
|
|
184
|
+
} catch (error) {
|
|
185
|
+
const err = classifyError(error);
|
|
186
|
+
mcpLog("error", `Auth error (attempt ${attempt + 1}): ${err.message}`, "reddit");
|
|
187
|
+
if (err.code === ErrorCode.AUTH_ERROR) {
|
|
188
|
+
cachedToken = null;
|
|
189
|
+
cachedTokenExpiry = 0;
|
|
190
|
+
}
|
|
191
|
+
if (attempt < 2 && err.retryable) {
|
|
192
|
+
await sleep(calculateBackoff(attempt));
|
|
193
|
+
continue;
|
|
194
|
+
}
|
|
195
|
+
return null;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
return null;
|
|
199
|
+
}
|
|
200
|
+
parseUrl(url) {
|
|
201
|
+
const m = url.match(/reddit\.com\/r\/([^\/]+)\/comments\/([a-z0-9]+)/i);
|
|
202
|
+
return m ? { sub: m[1], id: m[2] } : null;
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Get a single Reddit post with comments
|
|
206
|
+
* Returns PostResult or throws Error (for use with Promise.allSettled)
|
|
207
|
+
*/
|
|
208
|
+
async getPost(url) {
|
|
209
|
+
const parsed = this.parseUrl(url);
|
|
210
|
+
if (!parsed) {
|
|
211
|
+
throw new Error(`Invalid Reddit URL format: ${url}`);
|
|
212
|
+
}
|
|
213
|
+
const token = await this.auth();
|
|
214
|
+
if (!token) {
|
|
215
|
+
throw new Error("Reddit authentication failed - check credentials");
|
|
216
|
+
}
|
|
217
|
+
let lastError = null;
|
|
218
|
+
for (let attempt = 0; attempt < REDDIT.RETRY_COUNT; attempt++) {
|
|
219
|
+
try {
|
|
220
|
+
const data = await fetchRedditJson(parsed.sub, parsed.id, token, this.userAgent);
|
|
221
|
+
const [postListing, commentListing] = data;
|
|
222
|
+
const post = parsePostData(postListing, parsed.sub);
|
|
223
|
+
const comments = parseCommentTree(commentListing, post.author);
|
|
224
|
+
return { post, comments, actualComments: post.commentCount };
|
|
225
|
+
} catch (error) {
|
|
226
|
+
lastError = classifyError(error);
|
|
227
|
+
const status = error.status;
|
|
228
|
+
if (status === 429) {
|
|
229
|
+
const delay = REDDIT.RETRY_DELAYS[attempt] || 32e3;
|
|
230
|
+
mcpLog("warning", `Rate limited. Retry ${attempt + 1}/${REDDIT.RETRY_COUNT} after ${delay}ms`, "reddit");
|
|
231
|
+
await sleep(delay);
|
|
232
|
+
continue;
|
|
233
|
+
}
|
|
234
|
+
if (!lastError.retryable) {
|
|
235
|
+
throw error instanceof Error ? error : new Error(lastError.message);
|
|
236
|
+
}
|
|
237
|
+
if (attempt < REDDIT.RETRY_COUNT - 1) {
|
|
238
|
+
const delay = REDDIT.RETRY_DELAYS[attempt] || 2e3;
|
|
239
|
+
mcpLog("warning", `${lastError.code}: ${lastError.message}. Retry ${attempt + 1}/${REDDIT.RETRY_COUNT}`, "reddit");
|
|
240
|
+
await sleep(delay);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
throw new Error(lastError?.message || "Failed to fetch Reddit post after retries");
|
|
245
|
+
}
|
|
246
|
+
async getPosts(urls) {
|
|
247
|
+
if (urls.length <= REDDIT.BATCH_SIZE) {
|
|
248
|
+
const results = await pMap(
|
|
249
|
+
urls,
|
|
250
|
+
(u) => this.getPost(u).catch((e) => e),
|
|
251
|
+
5
|
|
252
|
+
);
|
|
253
|
+
return new Map(urls.map((u, i) => [u, results[i]]));
|
|
254
|
+
}
|
|
255
|
+
return (await this.batchGetPosts(urls)).results;
|
|
256
|
+
}
|
|
257
|
+
async batchGetPosts(urls, fetchComments = true, onBatchComplete) {
|
|
258
|
+
const allResults = /* @__PURE__ */ new Map();
|
|
259
|
+
let rateLimitHits = 0;
|
|
260
|
+
const totalBatches = Math.ceil(urls.length / REDDIT.BATCH_SIZE);
|
|
261
|
+
mcpLog("info", `Fetching ${urls.length} posts in ${totalBatches} batch(es), up to ${FETCH_LIMIT} comments/post`, "reddit");
|
|
262
|
+
for (let batchNum = 0; batchNum < totalBatches; batchNum++) {
|
|
263
|
+
const startIdx = batchNum * REDDIT.BATCH_SIZE;
|
|
264
|
+
const batchUrls = urls.slice(startIdx, startIdx + REDDIT.BATCH_SIZE);
|
|
265
|
+
mcpLog("info", `Batch ${batchNum + 1}/${totalBatches} (${batchUrls.length} posts)`, "reddit");
|
|
266
|
+
const batchResult = await processBatch(this, batchUrls);
|
|
267
|
+
for (const [url, result] of batchResult.results) {
|
|
268
|
+
allResults.set(url, result);
|
|
269
|
+
}
|
|
270
|
+
rateLimitHits += batchResult.rateLimitHits;
|
|
271
|
+
try {
|
|
272
|
+
onBatchComplete?.(batchNum + 1, totalBatches, allResults.size);
|
|
273
|
+
} catch (callbackError) {
|
|
274
|
+
mcpLog("error", `onBatchComplete callback error: ${callbackError}`, "reddit");
|
|
275
|
+
}
|
|
276
|
+
mcpLog("info", `Batch ${batchNum + 1} complete (${allResults.size}/${urls.length})`, "reddit");
|
|
277
|
+
if (batchNum < totalBatches - 1) {
|
|
278
|
+
await sleep(500);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
return { results: allResults, batchesProcessed: totalBatches, totalPosts: urls.length, rateLimitHits };
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
export {
|
|
285
|
+
RedditClient
|
|
286
|
+
};
|
|
287
|
+
//# sourceMappingURL=reddit.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../src/clients/reddit.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * Reddit OAuth API Client\n * Fetches posts and comments sorted by score (most upvoted first)\n * Implements robust error handling that NEVER crashes\n */\n\nimport { REDDIT } from '../config/index.js';\nimport { USER_AGENT_VERSION } from '../version.js';\nimport { calculateBackoff } from '../utils/retry.js';\nimport {\n classifyError,\n fetchWithTimeout,\n sleep,\n ErrorCode,\n type StructuredError,\n} from '../utils/errors.js';\nimport { pMap, pMapSettled } from '../utils/concurrency.js';\nimport { mcpLog } from '../utils/logger.js';\n\n// \u2500\u2500 Constants \u2500\u2500\n\nconst REDDIT_TOKEN_URL = 'https://www.reddit.com/api/v1/access_token' as const;\nconst REDDIT_API_BASE = 'https://oauth.reddit.com' as const;\nconst TOKEN_EXPIRY_MS = 55_000 as const; // 55 second expiry (conservative)\n\n// \u2500\u2500 Data Interfaces \u2500\u2500\n\ninterface Post {\n readonly title: string;\n readonly author: string;\n readonly subreddit: string;\n readonly body: string;\n readonly score: number;\n readonly commentCount: number;\n readonly url: string;\n readonly created: Date;\n readonly flair?: string;\n readonly isNsfw: boolean;\n readonly isPinned: boolean;\n}\n\nexport interface Comment {\n readonly author: string;\n readonly body: string;\n readonly score: number;\n readonly depth: number;\n readonly isOP: boolean;\n}\n\nexport interface PostResult {\n readonly post: Post;\n readonly comments: Comment[];\n readonly actualComments: number;\n}\n\ninterface BatchPostResult {\n readonly results: Map<string, PostResult | Error>;\n readonly batchesProcessed: number;\n readonly totalPosts: number;\n readonly rateLimitHits: number;\n}\n\n/** Reddit API \"Listing\" wrapper */\ninterface RedditListing<T> {\n readonly kind: string;\n readonly data: {\n readonly children: ReadonlyArray<{ readonly kind: string; readonly data: T }>;\n readonly after?: string;\n readonly before?: string;\n };\n}\n\n/** Reddit post data from API */\ninterface RedditPostData {\n readonly title: string;\n readonly selftext: string;\n readonly selftext_html?: string;\n readonly author: string;\n readonly subreddit: string;\n readonly score: number;\n readonly upvote_ratio: number;\n readonly num_comments: number;\n readonly created_utc: number;\n readonly url: string;\n readonly permalink: string;\n readonly is_self: boolean;\n readonly over_18: boolean;\n readonly stickied: boolean;\n readonly link_flair_text?: string;\n readonly [key: string]: unknown;\n}\n\n/** Reddit comment data from API */\ninterface RedditCommentData {\n readonly body?: string;\n readonly author?: string;\n readonly score?: number;\n readonly created_utc?: number;\n readonly replies?: RedditListing<RedditCommentData> | string;\n readonly [key: string]: unknown;\n}\n\ntype RedditPostResponse = [RedditListing<RedditPostData>, RedditListing<RedditCommentData>];\n\n/** Max comments to fetch per post from Reddit API */\nconst FETCH_LIMIT = REDDIT.FETCH_LIMIT_PER_POST;\n\n// ============================================================================\n// Module-Level Token Cache (shared across all RedditClient instances)\n// ============================================================================\nlet cachedToken: string | null = null;\nlet cachedTokenExpiry = 0;\n\n// Token cache logging only when DEBUG env is set\nconst DEBUG_TOKEN_CACHE = process.env.DEBUG_REDDIT === 'true';\n\n// Pending auth promise for deduplicating concurrent auth calls\nlet pendingAuthPromise: Promise<string | null> | null = null;\n\n// \u2500\u2500 Decomposed Helpers \u2500\u2500\n\n/**\n * Fetch a Reddit post's JSON from the API\n */\nasync function fetchRedditJson(\n sub: string,\n id: string,\n token: string,\n userAgent: string,\n): Promise<RedditPostResponse> {\n const limit = Math.min(FETCH_LIMIT, 500);\n const apiUrl = `${REDDIT_API_BASE}/r/${sub}/comments/${id}?sort=top&limit=${limit}&depth=10&raw_json=1`;\n\n const res = await fetchWithTimeout(apiUrl, {\n headers: {\n 'Authorization': `Bearer ${token}`,\n 'User-Agent': userAgent,\n },\n timeoutMs: 30000,\n });\n\n if (res.status === 429) {\n const err = new Error('Rate limited by Reddit API');\n (err as Error & { status: number }).status = 429;\n throw err;\n }\n\n if (res.status === 404) {\n throw new Error(`Post not found: /r/${sub}/comments/${id}`);\n }\n\n if (!res.ok) {\n const err = new Error(`Reddit API error: ${res.status}`);\n (err as Error & { status: number }).status = res.status;\n throw err;\n }\n\n try {\n return await res.json() as RedditPostResponse;\n } catch {\n throw new Error('Failed to parse Reddit API response');\n }\n}\n\n/**\n * Extract structured post data from a Reddit listing\n */\nfunction parsePostData(\n postListing: RedditListing<RedditPostData>,\n sub: string,\n): Post {\n const p = postListing?.data?.children?.[0]?.data;\n if (!p) {\n throw new Error(`Post data not found in response for /r/${sub}`);\n }\n\n return {\n title: p.title || 'Untitled',\n author: p.author || '[deleted]',\n subreddit: p.subreddit || sub,\n body: formatBody(p),\n score: p.score || 0,\n commentCount: p.num_comments || 0,\n url: `https://reddit.com${p.permalink || ''}`,\n created: new Date((p.created_utc || 0) * 1000),\n flair: p.link_flair_text || undefined,\n isNsfw: p.over_18 || false,\n isPinned: p.stickied || false,\n };\n}\n\nfunction formatBody(p: RedditPostData): string {\n if (p.selftext?.trim()) return p.selftext;\n if (p.is_self) return '';\n if (p.url) return `**Link:** ${p.url}`;\n return '';\n}\n\n/** Safety cap on comment tree recursion depth */\nconst MAX_COMMENT_DEPTH = 15 as const;\n\n/**\n * Extract and sort comments from a Reddit comment listing\n */\nfunction parseCommentTree(\n commentListing: RedditListing<RedditCommentData>,\n opAuthor: string,\n): Comment[] {\n const result: Comment[] = [];\n\n const extract = (items: ReadonlyArray<{ readonly kind: string; readonly data: RedditCommentData }>, depth = 0): void => {\n if (depth > MAX_COMMENT_DEPTH) return;\n const sorted = [...items].sort((a, b) => (b.data?.score || 0) - (a.data?.score || 0));\n\n for (const c of sorted) {\n if (c.kind !== 't1' || !c.data?.author || c.data.author === '[deleted]') continue;\n\n result.push({\n author: c.data.author,\n body: c.data.body || '',\n score: c.data.score || 0,\n depth,\n isOP: c.data.author === opAuthor,\n });\n\n if (typeof c.data.replies === 'object' && c.data.replies?.data?.children) {\n extract(c.data.replies.data.children, depth + 1);\n }\n }\n };\n\n extract(commentListing?.data?.children || []);\n return result;\n}\n\n// \u2500\u2500 Batch Helpers \u2500\u2500\n\n/**\n * Process a single batch of Reddit URLs, returning results keyed by URL\n */\nasync function processBatch(\n client: RedditClient,\n batchUrls: string[],\n): Promise<{ results: Map<string, PostResult | Error>; rateLimitHits: number }> {\n const results = new Map<string, PostResult | Error>();\n let rateLimitHits = 0;\n\n const batchResults = await pMapSettled(\n batchUrls,\n url => client.getPost(url),\n 5,\n );\n\n for (let i = 0; i < batchResults.length; i++) {\n const result = batchResults[i];\n if (!result) continue;\n const url = batchUrls[i] ?? '';\n\n if (result.status === 'fulfilled') {\n results.set(url, result.value);\n } else {\n const errorMsg = result.reason?.message || String(result.reason);\n if (errorMsg.includes('429') || errorMsg.includes('rate')) rateLimitHits++;\n results.set(url, new Error(errorMsg));\n }\n }\n\n return { results, rateLimitHits };\n}\n\n// \u2500\u2500 RedditClient Class \u2500\u2500\n\nexport class RedditClient {\n private userAgent = `script:${USER_AGENT_VERSION} (by /u/research-powerpack)`;\n\n constructor(private clientId: string, private clientSecret: string) {}\n\n /**\n * Authenticate with Reddit API with retry logic\n * Uses module-level token cache and promise deduplication to prevent\n * concurrent auth calls from firing multiple token requests\n * Returns null on failure instead of throwing\n */\n private async auth(): Promise<string | null> {\n if (cachedToken && Date.now() < cachedTokenExpiry - TOKEN_EXPIRY_MS) {\n if (DEBUG_TOKEN_CACHE) console.error('[RedditClient] Token cache HIT');\n return cachedToken;\n }\n\n if (pendingAuthPromise) {\n if (DEBUG_TOKEN_CACHE) console.error('[RedditClient] Auth already in flight, awaiting...');\n return pendingAuthPromise;\n }\n\n pendingAuthPromise = this.performAuth();\n try {\n return await pendingAuthPromise;\n } finally {\n pendingAuthPromise = null;\n }\n }\n\n private async performAuth(): Promise<string | null> {\n if (DEBUG_TOKEN_CACHE) console.error('[RedditClient] Token cache MISS - authenticating');\n\n const credentials = Buffer.from(`${this.clientId}:${this.clientSecret}`).toString('base64');\n\n for (let attempt = 0; attempt < 3; attempt++) {\n try {\n const res = await fetchWithTimeout(REDDIT_TOKEN_URL, {\n method: 'POST',\n headers: {\n 'Authorization': `Basic ${credentials}`,\n 'Content-Type': 'application/x-www-form-urlencoded',\n 'User-Agent': this.userAgent,\n },\n body: 'grant_type=client_credentials',\n timeoutMs: 15000,\n });\n\n if (!res.ok) {\n const text = await res.text().catch(() => '');\n mcpLog('error', `Auth failed (${res.status}): ${text}`, 'reddit');\n\n if (res.status === 401 || res.status === 403) {\n cachedToken = null;\n cachedTokenExpiry = 0;\n return null;\n }\n\n if (res.status >= 500 && attempt < 2) {\n await sleep(calculateBackoff(attempt));\n continue;\n }\n\n return null;\n }\n\n const data = await res.json() as { access_token?: string; expires_in?: number };\n if (!data.access_token) {\n mcpLog('error', 'Auth response missing access_token', 'reddit');\n return null;\n }\n\n cachedToken = data.access_token;\n cachedTokenExpiry = Date.now() + (data.expires_in || 3600) * 1000;\n return cachedToken;\n\n } catch (error) {\n const err = classifyError(error);\n mcpLog('error', `Auth error (attempt ${attempt + 1}): ${err.message}`, 'reddit');\n\n if (err.code === ErrorCode.AUTH_ERROR) {\n cachedToken = null;\n cachedTokenExpiry = 0;\n }\n\n if (attempt < 2 && err.retryable) {\n await sleep(calculateBackoff(attempt));\n continue;\n }\n\n return null;\n }\n }\n\n return null;\n }\n\n private parseUrl(url: string): { sub: string; id: string } | null {\n const m = url.match(/reddit\\.com\\/r\\/([^\\/]+)\\/comments\\/([a-z0-9]+)/i);\n return m ? { sub: m[1]!, id: m[2]! } : null;\n }\n\n /**\n * Get a single Reddit post with comments\n * Returns PostResult or throws Error (for use with Promise.allSettled)\n */\n async getPost(url: string): Promise<PostResult> {\n const parsed = this.parseUrl(url);\n if (!parsed) {\n throw new Error(`Invalid Reddit URL format: ${url}`);\n }\n\n const token = await this.auth();\n if (!token) {\n throw new Error('Reddit authentication failed - check credentials');\n }\n\n let lastError: StructuredError | null = null;\n\n for (let attempt = 0; attempt < REDDIT.RETRY_COUNT; attempt++) {\n try {\n const data = await fetchRedditJson(parsed.sub, parsed.id, token, this.userAgent);\n const [postListing, commentListing] = data;\n\n const post = parsePostData(postListing, parsed.sub);\n const comments = parseCommentTree(commentListing, post.author);\n\n return { post, comments, actualComments: post.commentCount };\n\n } catch (error) {\n lastError = classifyError(error);\n\n // Rate limited \u2014 always retry with backoff\n const status = (error as Error & { status?: number }).status;\n if (status === 429) {\n const delay = REDDIT.RETRY_DELAYS[attempt] || 32000;\n mcpLog('warning', `Rate limited. Retry ${attempt + 1}/${REDDIT.RETRY_COUNT} after ${delay}ms`, 'reddit');\n await sleep(delay);\n continue;\n }\n\n if (!lastError.retryable) {\n throw error instanceof Error ? error : new Error(lastError.message);\n }\n\n if (attempt < REDDIT.RETRY_COUNT - 1) {\n const delay = REDDIT.RETRY_DELAYS[attempt] || 2000;\n mcpLog('warning', `${lastError.code}: ${lastError.message}. Retry ${attempt + 1}/${REDDIT.RETRY_COUNT}`, 'reddit');\n await sleep(delay);\n }\n }\n }\n\n throw new Error(lastError?.message || 'Failed to fetch Reddit post after retries');\n }\n\n async getPosts(urls: string[]): Promise<Map<string, PostResult | Error>> {\n if (urls.length <= REDDIT.BATCH_SIZE) {\n const results = await pMap(\n urls,\n u => this.getPost(u).catch(e => e as Error),\n 5,\n );\n return new Map(urls.map((u, i) => [u, results[i]!]));\n }\n return (await this.batchGetPosts(urls)).results;\n }\n\n async batchGetPosts(\n urls: string[],\n fetchComments = true,\n onBatchComplete?: (batchNum: number, totalBatches: number, processed: number) => void,\n ): Promise<BatchPostResult> {\n const allResults = new Map<string, PostResult | Error>();\n let rateLimitHits = 0;\n\n const totalBatches = Math.ceil(urls.length / REDDIT.BATCH_SIZE);\n mcpLog('info', `Fetching ${urls.length} posts in ${totalBatches} batch(es), up to ${FETCH_LIMIT} comments/post`, 'reddit');\n\n for (let batchNum = 0; batchNum < totalBatches; batchNum++) {\n const startIdx = batchNum * REDDIT.BATCH_SIZE;\n const batchUrls = urls.slice(startIdx, startIdx + REDDIT.BATCH_SIZE);\n\n mcpLog('info', `Batch ${batchNum + 1}/${totalBatches} (${batchUrls.length} posts)`, 'reddit');\n\n const batchResult = await processBatch(this, batchUrls);\n for (const [url, result] of batchResult.results) {\n allResults.set(url, result);\n }\n rateLimitHits += batchResult.rateLimitHits;\n\n try {\n onBatchComplete?.(batchNum + 1, totalBatches, allResults.size);\n } catch (callbackError) {\n mcpLog('error', `onBatchComplete callback error: ${callbackError}`, 'reddit');\n }\n\n mcpLog('info', `Batch ${batchNum + 1} complete (${allResults.size}/${urls.length})`, 'reddit');\n\n if (batchNum < totalBatches - 1) {\n await sleep(500);\n }\n }\n\n return { results: allResults, batchesProcessed: totalBatches, totalPosts: urls.length, rateLimitHits };\n }\n}\n"],
|
|
5
|
+
"mappings": "AAMA,SAAS,cAAc;AACvB,SAAS,0BAA0B;AACnC,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,MAAM,mBAAmB;AAClC,SAAS,cAAc;AAIvB,MAAM,mBAAmB;AACzB,MAAM,kBAAkB;AACxB,MAAM,kBAAkB;AAkFxB,MAAM,cAAc,OAAO;AAK3B,IAAI,cAA6B;AACjC,IAAI,oBAAoB;AAGxB,MAAM,oBAAoB,QAAQ,IAAI,iBAAiB;AAGvD,IAAI,qBAAoD;AAOxD,eAAe,gBACb,KACA,IACA,OACA,WAC6B;AAC7B,QAAM,QAAQ,KAAK,IAAI,aAAa,GAAG;AACvC,QAAM,SAAS,GAAG,eAAe,MAAM,GAAG,aAAa,EAAE,mBAAmB,KAAK;AAEjF,QAAM,MAAM,MAAM,iBAAiB,QAAQ;AAAA,IACzC,SAAS;AAAA,MACP,iBAAiB,UAAU,KAAK;AAAA,MAChC,cAAc;AAAA,IAChB;AAAA,IACA,WAAW;AAAA,EACb,CAAC;AAED,MAAI,IAAI,WAAW,KAAK;AACtB,UAAM,MAAM,IAAI,MAAM,4BAA4B;AAClD,IAAC,IAAmC,SAAS;AAC7C,UAAM;AAAA,EACR;AAEA,MAAI,IAAI,WAAW,KAAK;AACtB,UAAM,IAAI,MAAM,sBAAsB,GAAG,aAAa,EAAE,EAAE;AAAA,EAC5D;AAEA,MAAI,CAAC,IAAI,IAAI;AACX,UAAM,MAAM,IAAI,MAAM,qBAAqB,IAAI,MAAM,EAAE;AACvD,IAAC,IAAmC,SAAS,IAAI;AACjD,UAAM;AAAA,EACR;AAEA,MAAI;AACF,WAAO,MAAM,IAAI,KAAK;AAAA,EACxB,QAAQ;AACN,UAAM,IAAI,MAAM,qCAAqC;AAAA,EACvD;AACF;AAKA,SAAS,cACP,aACA,KACM;AACN,QAAM,IAAI,aAAa,MAAM,WAAW,CAAC,GAAG;AAC5C,MAAI,CAAC,GAAG;AACN,UAAM,IAAI,MAAM,0CAA0C,GAAG,EAAE;AAAA,EACjE;AAEA,SAAO;AAAA,IACL,OAAO,EAAE,SAAS;AAAA,IAClB,QAAQ,EAAE,UAAU;AAAA,IACpB,WAAW,EAAE,aAAa;AAAA,IAC1B,MAAM,WAAW,CAAC;AAAA,IAClB,OAAO,EAAE,SAAS;AAAA,IAClB,cAAc,EAAE,gBAAgB;AAAA,IAChC,KAAK,qBAAqB,EAAE,aAAa,EAAE;AAAA,IAC3C,SAAS,IAAI,MAAM,EAAE,eAAe,KAAK,GAAI;AAAA,IAC7C,OAAO,EAAE,mBAAmB;AAAA,IAC5B,QAAQ,EAAE,WAAW;AAAA,IACrB,UAAU,EAAE,YAAY;AAAA,EAC1B;AACF;AAEA,SAAS,WAAW,GAA2B;AAC7C,MAAI,EAAE,UAAU,KAAK,EAAG,QAAO,EAAE;AACjC,MAAI,EAAE,QAAS,QAAO;AACtB,MAAI,EAAE,IAAK,QAAO,aAAa,EAAE,GAAG;AACpC,SAAO;AACT;AAGA,MAAM,oBAAoB;AAK1B,SAAS,iBACP,gBACA,UACW;AACX,QAAM,SAAoB,CAAC;AAE3B,QAAM,UAAU,CAAC,OAAmF,QAAQ,MAAY;AACtH,QAAI,QAAQ,kBAAmB;AAC/B,UAAM,SAAS,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,OAAO,EAAE,MAAM,SAAS,MAAM,EAAE,MAAM,SAAS,EAAE;AAEpF,eAAW,KAAK,QAAQ;AACtB,UAAI,EAAE,SAAS,QAAQ,CAAC,EAAE,MAAM,UAAU,EAAE,KAAK,WAAW,YAAa;AAEzE,aAAO,KAAK;AAAA,QACV,QAAQ,EAAE,KAAK;AAAA,QACf,MAAM,EAAE,KAAK,QAAQ;AAAA,QACrB,OAAO,EAAE,KAAK,SAAS;AAAA,QACvB;AAAA,QACA,MAAM,EAAE,KAAK,WAAW;AAAA,MAC1B,CAAC;AAED,UAAI,OAAO,EAAE,KAAK,YAAY,YAAY,EAAE,KAAK,SAAS,MAAM,UAAU;AACxE,gBAAQ,EAAE,KAAK,QAAQ,KAAK,UAAU,QAAQ,CAAC;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AAEA,UAAQ,gBAAgB,MAAM,YAAY,CAAC,CAAC;AAC5C,SAAO;AACT;AAOA,eAAe,aACb,QACA,WAC8E;AAC9E,QAAM,UAAU,oBAAI,IAAgC;AACpD,MAAI,gBAAgB;AAEpB,QAAM,eAAe,MAAM;AAAA,IACzB;AAAA,IACA,SAAO,OAAO,QAAQ,GAAG;AAAA,IACzB;AAAA,EACF;AAEA,WAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,UAAM,SAAS,aAAa,CAAC;AAC7B,QAAI,CAAC,OAAQ;AACb,UAAM,MAAM,UAAU,CAAC,KAAK;AAE5B,QAAI,OAAO,WAAW,aAAa;AACjC,cAAQ,IAAI,KAAK,OAAO,KAAK;AAAA,IAC/B,OAAO;AACL,YAAM,WAAW,OAAO,QAAQ,WAAW,OAAO,OAAO,MAAM;AAC/D,UAAI,SAAS,SAAS,KAAK,KAAK,SAAS,SAAS,MAAM,EAAG;AAC3D,cAAQ,IAAI,KAAK,IAAI,MAAM,QAAQ,CAAC;AAAA,IACtC;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,cAAc;AAClC;AAIO,MAAM,aAAa;AAAA,EAGxB,YAAoB,UAA0B,cAAsB;AAAhD;AAA0B;AAAA,EAAuB;AAAA,EAF7D,YAAY,UAAU,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUhD,MAAc,OAA+B;AAC3C,QAAI,eAAe,KAAK,IAAI,IAAI,oBAAoB,iBAAiB;AACnE,UAAI,kBAAmB,SAAQ,MAAM,gCAAgC;AACrE,aAAO;AAAA,IACT;AAEA,QAAI,oBAAoB;AACtB,UAAI,kBAAmB,SAAQ,MAAM,oDAAoD;AACzF,aAAO;AAAA,IACT;AAEA,yBAAqB,KAAK,YAAY;AACtC,QAAI;AACF,aAAO,MAAM;AAAA,IACf,UAAE;AACA,2BAAqB;AAAA,IACvB;AAAA,EACF;AAAA,EAEA,MAAc,cAAsC;AAClD,QAAI,kBAAmB,SAAQ,MAAM,kDAAkD;AAEvF,UAAM,cAAc,OAAO,KAAK,GAAG,KAAK,QAAQ,IAAI,KAAK,YAAY,EAAE,EAAE,SAAS,QAAQ;AAE1F,aAAS,UAAU,GAAG,UAAU,GAAG,WAAW;AAC5C,UAAI;AACF,cAAM,MAAM,MAAM,iBAAiB,kBAAkB;AAAA,UACnD,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,iBAAiB,SAAS,WAAW;AAAA,YACrC,gBAAgB;AAAA,YAChB,cAAc,KAAK;AAAA,UACrB;AAAA,UACA,MAAM;AAAA,UACN,WAAW;AAAA,QACb,CAAC;AAED,YAAI,CAAC,IAAI,IAAI;AACX,gBAAM,OAAO,MAAM,IAAI,KAAK,EAAE,MAAM,MAAM,EAAE;AAC5C,iBAAO,SAAS,gBAAgB,IAAI,MAAM,MAAM,IAAI,IAAI,QAAQ;AAEhE,cAAI,IAAI,WAAW,OAAO,IAAI,WAAW,KAAK;AAC5C,0BAAc;AACd,gCAAoB;AACpB,mBAAO;AAAA,UACT;AAEA,cAAI,IAAI,UAAU,OAAO,UAAU,GAAG;AACpC,kBAAM,MAAM,iBAAiB,OAAO,CAAC;AACrC;AAAA,UACF;AAEA,iBAAO;AAAA,QACT;AAEA,cAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,YAAI,CAAC,KAAK,cAAc;AACtB,iBAAO,SAAS,sCAAsC,QAAQ;AAC9D,iBAAO;AAAA,QACT;AAEA,sBAAc,KAAK;AACnB,4BAAoB,KAAK,IAAI,KAAK,KAAK,cAAc,QAAQ;AAC7D,eAAO;AAAA,MAET,SAAS,OAAO;AACd,cAAM,MAAM,cAAc,KAAK;AAC/B,eAAO,SAAS,uBAAuB,UAAU,CAAC,MAAM,IAAI,OAAO,IAAI,QAAQ;AAE/E,YAAI,IAAI,SAAS,UAAU,YAAY;AACrC,wBAAc;AACd,8BAAoB;AAAA,QACtB;AAEA,YAAI,UAAU,KAAK,IAAI,WAAW;AAChC,gBAAM,MAAM,iBAAiB,OAAO,CAAC;AACrC;AAAA,QACF;AAEA,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,SAAS,KAAiD;AAChE,UAAM,IAAI,IAAI,MAAM,kDAAkD;AACtE,WAAO,IAAI,EAAE,KAAK,EAAE,CAAC,GAAI,IAAI,EAAE,CAAC,EAAG,IAAI;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,KAAkC;AAC9C,UAAM,SAAS,KAAK,SAAS,GAAG;AAChC,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI,MAAM,8BAA8B,GAAG,EAAE;AAAA,IACrD;AAEA,UAAM,QAAQ,MAAM,KAAK,KAAK;AAC9B,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACpE;AAEA,QAAI,YAAoC;AAExC,aAAS,UAAU,GAAG,UAAU,OAAO,aAAa,WAAW;AAC7D,UAAI;AACF,cAAM,OAAO,MAAM,gBAAgB,OAAO,KAAK,OAAO,IAAI,OAAO,KAAK,SAAS;AAC/E,cAAM,CAAC,aAAa,cAAc,IAAI;AAEtC,cAAM,OAAO,cAAc,aAAa,OAAO,GAAG;AAClD,cAAM,WAAW,iBAAiB,gBAAgB,KAAK,MAAM;AAE7D,eAAO,EAAE,MAAM,UAAU,gBAAgB,KAAK,aAAa;AAAA,MAE7D,SAAS,OAAO;AACd,oBAAY,cAAc,KAAK;AAG/B,cAAM,SAAU,MAAsC;AACtD,YAAI,WAAW,KAAK;AAClB,gBAAM,QAAQ,OAAO,aAAa,OAAO,KAAK;AAC9C,iBAAO,WAAW,uBAAuB,UAAU,CAAC,IAAI,OAAO,WAAW,UAAU,KAAK,MAAM,QAAQ;AACvG,gBAAM,MAAM,KAAK;AACjB;AAAA,QACF;AAEA,YAAI,CAAC,UAAU,WAAW;AACxB,gBAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,UAAU,OAAO;AAAA,QACpE;AAEA,YAAI,UAAU,OAAO,cAAc,GAAG;AACpC,gBAAM,QAAQ,OAAO,aAAa,OAAO,KAAK;AAC9C,iBAAO,WAAW,GAAG,UAAU,IAAI,KAAK,UAAU,OAAO,WAAW,UAAU,CAAC,IAAI,OAAO,WAAW,IAAI,QAAQ;AACjH,gBAAM,MAAM,KAAK;AAAA,QACnB;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI,MAAM,WAAW,WAAW,2CAA2C;AAAA,EACnF;AAAA,EAEA,MAAM,SAAS,MAA0D;AACvE,QAAI,KAAK,UAAU,OAAO,YAAY;AACpC,YAAM,UAAU,MAAM;AAAA,QACpB;AAAA,QACA,OAAK,KAAK,QAAQ,CAAC,EAAE,MAAM,OAAK,CAAU;AAAA,QAC1C;AAAA,MACF;AACA,aAAO,IAAI,IAAI,KAAK,IAAI,CAAC,GAAG,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAE,CAAC,CAAC;AAAA,IACrD;AACA,YAAQ,MAAM,KAAK,cAAc,IAAI,GAAG;AAAA,EAC1C;AAAA,EAEA,MAAM,cACJ,MACA,gBAAgB,MAChB,iBAC0B;AAC1B,UAAM,aAAa,oBAAI,IAAgC;AACvD,QAAI,gBAAgB;AAEpB,UAAM,eAAe,KAAK,KAAK,KAAK,SAAS,OAAO,UAAU;AAC9D,WAAO,QAAQ,YAAY,KAAK,MAAM,aAAa,YAAY,qBAAqB,WAAW,kBAAkB,QAAQ;AAEzH,aAAS,WAAW,GAAG,WAAW,cAAc,YAAY;AAC1D,YAAM,WAAW,WAAW,OAAO;AACnC,YAAM,YAAY,KAAK,MAAM,UAAU,WAAW,OAAO,UAAU;AAEnE,aAAO,QAAQ,SAAS,WAAW,CAAC,IAAI,YAAY,KAAK,UAAU,MAAM,WAAW,QAAQ;AAE5F,YAAM,cAAc,MAAM,aAAa,MAAM,SAAS;AACtD,iBAAW,CAAC,KAAK,MAAM,KAAK,YAAY,SAAS;AAC/C,mBAAW,IAAI,KAAK,MAAM;AAAA,MAC5B;AACA,uBAAiB,YAAY;AAE7B,UAAI;AACF,0BAAkB,WAAW,GAAG,cAAc,WAAW,IAAI;AAAA,MAC/D,SAAS,eAAe;AACtB,eAAO,SAAS,mCAAmC,aAAa,IAAI,QAAQ;AAAA,MAC9E;AAEA,aAAO,QAAQ,SAAS,WAAW,CAAC,cAAc,WAAW,IAAI,IAAI,KAAK,MAAM,KAAK,QAAQ;AAE7F,UAAI,WAAW,eAAe,GAAG;AAC/B,cAAM,MAAM,GAAG;AAAA,MACjB;AAAA,IACF;AAEA,WAAO,EAAE,SAAS,YAAY,kBAAkB,cAAc,YAAY,KAAK,QAAQ,cAAc;AAAA,EACvG;AACF;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deep Research Client
|
|
3
|
+
* Handles research API requests with web search capabilities
|
|
4
|
+
* Implements robust retry logic and NEVER crashes the server
|
|
5
|
+
*/
|
|
6
|
+
import { type StructuredError } from '../utils/errors.js';
|
|
7
|
+
declare const DEFAULT_RESEARCH_CONCURRENCY: 3;
|
|
8
|
+
declare const MAX_RESEARCH_RETRIES: 3;
|
|
9
|
+
declare const RESEARCH_TEMPERATURE: 0.3;
|
|
10
|
+
interface ResearchParams {
|
|
11
|
+
readonly question: string;
|
|
12
|
+
readonly systemPrompt?: string;
|
|
13
|
+
readonly reasoningEffort?: 'low' | 'medium' | 'high';
|
|
14
|
+
readonly maxSearchResults?: number;
|
|
15
|
+
readonly maxTokens?: number;
|
|
16
|
+
readonly temperature?: number;
|
|
17
|
+
readonly responseFormat?: {
|
|
18
|
+
readonly type: 'json_object' | 'text';
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
export interface ResearchResponse {
|
|
22
|
+
readonly id: string;
|
|
23
|
+
readonly model: string;
|
|
24
|
+
readonly created: number;
|
|
25
|
+
readonly content: string;
|
|
26
|
+
readonly finishReason?: string;
|
|
27
|
+
readonly usage?: {
|
|
28
|
+
readonly promptTokens: number;
|
|
29
|
+
readonly completionTokens: number;
|
|
30
|
+
readonly totalTokens: number;
|
|
31
|
+
readonly sourcesUsed?: number;
|
|
32
|
+
};
|
|
33
|
+
readonly annotations?: ReadonlyArray<{
|
|
34
|
+
readonly type: 'url_citation';
|
|
35
|
+
readonly url: string;
|
|
36
|
+
readonly title: string;
|
|
37
|
+
readonly startIndex: number;
|
|
38
|
+
readonly endIndex: number;
|
|
39
|
+
}>;
|
|
40
|
+
readonly error?: StructuredError;
|
|
41
|
+
}
|
|
42
|
+
export { DEFAULT_RESEARCH_CONCURRENCY, MAX_RESEARCH_RETRIES, RESEARCH_TEMPERATURE };
|
|
43
|
+
export declare class ResearchClient {
|
|
44
|
+
private client;
|
|
45
|
+
constructor();
|
|
46
|
+
/**
|
|
47
|
+
* Check if an error is retryable for research requests
|
|
48
|
+
*/
|
|
49
|
+
private isRetryableError;
|
|
50
|
+
/**
|
|
51
|
+
* Make the API call to OpenRouter with retry logic.
|
|
52
|
+
* Returns the raw response or null if all attempts fail.
|
|
53
|
+
*/
|
|
54
|
+
private callOpenRouter;
|
|
55
|
+
/**
|
|
56
|
+
* Execute a single research request with a specific model.
|
|
57
|
+
* Thin orchestrator: build payload → call API → parse response.
|
|
58
|
+
*/
|
|
59
|
+
private executeResearch;
|
|
60
|
+
/**
|
|
61
|
+
* Perform research with retry logic and fallback to secondary model
|
|
62
|
+
* Returns a ResearchResponse - may contain error field on failure
|
|
63
|
+
* NEVER throws - always returns a valid response object
|
|
64
|
+
*/
|
|
65
|
+
research(params: ResearchParams, signal?: AbortSignal): Promise<ResearchResponse>;
|
|
66
|
+
}
|
|
67
|
+
//# sourceMappingURL=research.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"research.d.ts","sourceRoot":"","sources":["../../../src/clients/research.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,EAML,KAAK,eAAe,EACrB,MAAM,oBAAoB,CAAC;AAK5B,QAAA,MAAM,4BAA4B,EAAG,CAAU,CAAC;AAChD,QAAA,MAAM,oBAAoB,EAAG,CAAU,CAAC;AACxC,QAAA,MAAM,oBAAoB,EAAG,GAAY,CAAC;AAyB1C,UAAU,cAAc;IACtB,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,YAAY,CAAC,EAAE,MAAM,CAAC;IAC/B,QAAQ,CAAC,eAAe,CAAC,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IACrD,QAAQ,CAAC,gBAAgB,CAAC,EAAE,MAAM,CAAC;IACnC,QAAQ,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAC9B,QAAQ,CAAC,cAAc,CAAC,EAAE;QAAE,QAAQ,CAAC,IAAI,EAAE,aAAa,GAAG,MAAM,CAAA;KAAE,CAAC;CACrE;AAED,MAAM,WAAW,gBAAgB;IAC/B,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC;IACpB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,YAAY,CAAC,EAAE,MAAM,CAAC;IAC/B,QAAQ,CAAC,KAAK,CAAC,EAAE;QACf,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC;QAC9B,QAAQ,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAClC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;QAC7B,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;KAC/B,CAAC;IACF,QAAQ,CAAC,WAAW,CAAC,EAAE,aAAa,CAAC;QACnC,QAAQ,CAAC,IAAI,EAAE,cAAc,CAAC;QAC9B,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;QACrB,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;QACvB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;QAC5B,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;KAC3B,CAAC,CAAC;IACH,QAAQ,CAAC,KAAK,CAAC,EAAE,eAAe,CAAC;CAClC;AA0ID,OAAO,EAAE,4BAA4B,EAAE,oBAAoB,EAAE,oBAAoB,EAAE,CAAC;AAEpF,qBAAa,cAAc;IACzB,OAAO,CAAC,MAAM,CAAS;;IAevB;;OAEG;IACH,OAAO,CAAC,gBAAgB;IAiCxB;;;OAGG;YACW,cAAc;IAsF5B;;;OAGG;YACW,eAAe;IAsB7B;;;;OAIG;IACG,QAAQ,CAAC,MAAM,EAAE,cAAc,EAAE,MAAM,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,gBAAgB,CAAC;CAkExF"}
|