mcp-researchpowerpack-http 3.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/README.md +124 -0
  2. package/dist/index.d.ts +3 -0
  3. package/dist/index.d.ts.map +1 -0
  4. package/dist/index.js +227 -0
  5. package/dist/index.js.map +7 -0
  6. package/dist/mcp-use.json +7 -0
  7. package/dist/src/clients/github.d.ts +83 -0
  8. package/dist/src/clients/github.d.ts.map +1 -0
  9. package/dist/src/clients/github.js +370 -0
  10. package/dist/src/clients/github.js.map +7 -0
  11. package/dist/src/clients/reddit.d.ts +60 -0
  12. package/dist/src/clients/reddit.d.ts.map +1 -0
  13. package/dist/src/clients/reddit.js +287 -0
  14. package/dist/src/clients/reddit.js.map +7 -0
  15. package/dist/src/clients/research.d.ts +67 -0
  16. package/dist/src/clients/research.d.ts.map +1 -0
  17. package/dist/src/clients/research.js +282 -0
  18. package/dist/src/clients/research.js.map +7 -0
  19. package/dist/src/clients/scraper.d.ts +72 -0
  20. package/dist/src/clients/scraper.d.ts.map +1 -0
  21. package/dist/src/clients/scraper.js +327 -0
  22. package/dist/src/clients/scraper.js.map +7 -0
  23. package/dist/src/clients/search.d.ts +57 -0
  24. package/dist/src/clients/search.d.ts.map +1 -0
  25. package/dist/src/clients/search.js +218 -0
  26. package/dist/src/clients/search.js.map +7 -0
  27. package/dist/src/config/index.d.ts +93 -0
  28. package/dist/src/config/index.d.ts.map +1 -0
  29. package/dist/src/config/index.js +218 -0
  30. package/dist/src/config/index.js.map +7 -0
  31. package/dist/src/schemas/deep-research.d.ts +40 -0
  32. package/dist/src/schemas/deep-research.d.ts.map +1 -0
  33. package/dist/src/schemas/deep-research.js +216 -0
  34. package/dist/src/schemas/deep-research.js.map +7 -0
  35. package/dist/src/schemas/github-score.d.ts +50 -0
  36. package/dist/src/schemas/github-score.d.ts.map +1 -0
  37. package/dist/src/schemas/github-score.js +58 -0
  38. package/dist/src/schemas/github-score.js.map +7 -0
  39. package/dist/src/schemas/scrape-links.d.ts +23 -0
  40. package/dist/src/schemas/scrape-links.d.ts.map +1 -0
  41. package/dist/src/schemas/scrape-links.js +32 -0
  42. package/dist/src/schemas/scrape-links.js.map +7 -0
  43. package/dist/src/schemas/web-search.d.ts +18 -0
  44. package/dist/src/schemas/web-search.d.ts.map +1 -0
  45. package/dist/src/schemas/web-search.js +28 -0
  46. package/dist/src/schemas/web-search.js.map +7 -0
  47. package/dist/src/scoring/github-quality.d.ts +142 -0
  48. package/dist/src/scoring/github-quality.d.ts.map +1 -0
  49. package/dist/src/scoring/github-quality.js +202 -0
  50. package/dist/src/scoring/github-quality.js.map +7 -0
  51. package/dist/src/services/file-attachment.d.ts +30 -0
  52. package/dist/src/services/file-attachment.d.ts.map +1 -0
  53. package/dist/src/services/file-attachment.js +205 -0
  54. package/dist/src/services/file-attachment.js.map +7 -0
  55. package/dist/src/services/llm-processor.d.ts +29 -0
  56. package/dist/src/services/llm-processor.d.ts.map +1 -0
  57. package/dist/src/services/llm-processor.js +206 -0
  58. package/dist/src/services/llm-processor.js.map +7 -0
  59. package/dist/src/services/markdown-cleaner.d.ts +8 -0
  60. package/dist/src/services/markdown-cleaner.d.ts.map +1 -0
  61. package/dist/src/services/markdown-cleaner.js +63 -0
  62. package/dist/src/services/markdown-cleaner.js.map +7 -0
  63. package/dist/src/tools/github-score.d.ts +12 -0
  64. package/dist/src/tools/github-score.d.ts.map +1 -0
  65. package/dist/src/tools/github-score.js +306 -0
  66. package/dist/src/tools/github-score.js.map +7 -0
  67. package/dist/src/tools/mcp-helpers.d.ts +27 -0
  68. package/dist/src/tools/mcp-helpers.d.ts.map +1 -0
  69. package/dist/src/tools/mcp-helpers.js +47 -0
  70. package/dist/src/tools/mcp-helpers.js.map +7 -0
  71. package/dist/src/tools/reddit.d.ts +54 -0
  72. package/dist/src/tools/reddit.d.ts.map +1 -0
  73. package/dist/src/tools/reddit.js +498 -0
  74. package/dist/src/tools/reddit.js.map +7 -0
  75. package/dist/src/tools/registry.d.ts +3 -0
  76. package/dist/src/tools/registry.d.ts.map +1 -0
  77. package/dist/src/tools/registry.js +17 -0
  78. package/dist/src/tools/registry.js.map +7 -0
  79. package/dist/src/tools/research.d.ts +14 -0
  80. package/dist/src/tools/research.d.ts.map +1 -0
  81. package/dist/src/tools/research.js +250 -0
  82. package/dist/src/tools/research.js.map +7 -0
  83. package/dist/src/tools/scrape.d.ts +14 -0
  84. package/dist/src/tools/scrape.d.ts.map +1 -0
  85. package/dist/src/tools/scrape.js +290 -0
  86. package/dist/src/tools/scrape.js.map +7 -0
  87. package/dist/src/tools/search.d.ts +10 -0
  88. package/dist/src/tools/search.d.ts.map +1 -0
  89. package/dist/src/tools/search.js +197 -0
  90. package/dist/src/tools/search.js.map +7 -0
  91. package/dist/src/tools/utils.d.ts +105 -0
  92. package/dist/src/tools/utils.d.ts.map +1 -0
  93. package/dist/src/tools/utils.js +96 -0
  94. package/dist/src/tools/utils.js.map +7 -0
  95. package/dist/src/utils/concurrency.d.ts +28 -0
  96. package/dist/src/utils/concurrency.d.ts.map +1 -0
  97. package/dist/src/utils/concurrency.js +62 -0
  98. package/dist/src/utils/concurrency.js.map +7 -0
  99. package/dist/src/utils/errors.d.ts +95 -0
  100. package/dist/src/utils/errors.d.ts.map +1 -0
  101. package/dist/src/utils/errors.js +289 -0
  102. package/dist/src/utils/errors.js.map +7 -0
  103. package/dist/src/utils/logger.d.ts +33 -0
  104. package/dist/src/utils/logger.d.ts.map +1 -0
  105. package/dist/src/utils/logger.js +41 -0
  106. package/dist/src/utils/logger.js.map +7 -0
  107. package/dist/src/utils/markdown-formatter.d.ts +5 -0
  108. package/dist/src/utils/markdown-formatter.d.ts.map +1 -0
  109. package/dist/src/utils/markdown-formatter.js +15 -0
  110. package/dist/src/utils/markdown-formatter.js.map +7 -0
  111. package/dist/src/utils/response.d.ts +83 -0
  112. package/dist/src/utils/response.d.ts.map +1 -0
  113. package/dist/src/utils/response.js +109 -0
  114. package/dist/src/utils/response.js.map +7 -0
  115. package/dist/src/utils/retry.d.ts +43 -0
  116. package/dist/src/utils/retry.d.ts.map +1 -0
  117. package/dist/src/utils/retry.js +37 -0
  118. package/dist/src/utils/retry.js.map +7 -0
  119. package/dist/src/utils/url-aggregator.d.ts +92 -0
  120. package/dist/src/utils/url-aggregator.d.ts.map +1 -0
  121. package/dist/src/utils/url-aggregator.js +357 -0
  122. package/dist/src/utils/url-aggregator.js.map +7 -0
  123. package/dist/src/version.d.ts +28 -0
  124. package/dist/src/version.d.ts.map +1 -0
  125. package/dist/src/version.js +32 -0
  126. package/dist/src/version.js.map +7 -0
  127. package/package.json +73 -0
@@ -0,0 +1,357 @@
1
+ import { CTR_WEIGHTS } from "../config/index.js";
2
+ const WEB_CONSENSUS_THRESHOLD = 3;
3
+ const REDDIT_CONSENSUS_THRESHOLD = 2;
4
+ const MIN_BEYOND_TOP10_WEIGHT = 0;
5
+ const BEYOND_TOP10_DECAY = 0.5;
6
+ const BEYOND_TOP10_BASE = 10;
7
+ const DEFAULT_MIN_CONSENSUS_URLS = 5;
8
+ const DEFAULT_REDDIT_MIN_CONSENSUS_URLS = 3;
9
+ const HIGH_CONSENSUS_THRESHOLD = 4;
10
+ function getCtrWeight(position) {
11
+ if (position >= 1 && position <= 10) {
12
+ return CTR_WEIGHTS[position] ?? 0;
13
+ }
14
+ return Math.max(MIN_BEYOND_TOP10_WEIGHT, BEYOND_TOP10_BASE - (position - BEYOND_TOP10_BASE) * BEYOND_TOP10_DECAY);
15
+ }
16
+ function aggregateResults(searches) {
17
+ const urlMap = /* @__PURE__ */ new Map();
18
+ for (const search of searches) {
19
+ for (const result of search.results) {
20
+ const normalizedUrl = normalizeUrl(result.link);
21
+ const existing = urlMap.get(normalizedUrl);
22
+ if (existing) {
23
+ existing.frequency += 1;
24
+ existing.positions.push(result.position);
25
+ existing.queries.push(search.keyword);
26
+ const prevBest = existing.bestPosition;
27
+ existing.bestPosition = Math.min(existing.bestPosition, result.position);
28
+ existing.totalScore += getCtrWeight(result.position);
29
+ if (result.position < prevBest) {
30
+ existing.title = result.title;
31
+ existing.snippet = result.snippet;
32
+ }
33
+ } else {
34
+ urlMap.set(normalizedUrl, {
35
+ url: result.link,
36
+ title: result.title,
37
+ snippet: result.snippet,
38
+ frequency: 1,
39
+ positions: [result.position],
40
+ queries: [search.keyword],
41
+ bestPosition: result.position,
42
+ totalScore: getCtrWeight(result.position)
43
+ });
44
+ }
45
+ }
46
+ }
47
+ return urlMap;
48
+ }
49
+ function normalizeUrl(url) {
50
+ try {
51
+ const parsed = new URL(url);
52
+ let host = parsed.hostname.replace(/^www\./, "");
53
+ let path = parsed.pathname.replace(/\/$/, "") || "/";
54
+ return `${host}${path}${parsed.search}`.toLowerCase();
55
+ } catch {
56
+ return url.toLowerCase().replace(/\/$/, "");
57
+ }
58
+ }
59
+ function countByFrequency(urlMap, minFrequency) {
60
+ let count = 0;
61
+ for (const url of urlMap.values()) {
62
+ if (url.frequency >= minFrequency) count++;
63
+ }
64
+ return count;
65
+ }
66
+ function calculateWeightedScores(urls, consensusThreshold) {
67
+ if (urls.length === 0) return [];
68
+ const sorted = [...urls].sort((a, b) => b.totalScore - a.totalScore);
69
+ const maxScore = sorted[0].totalScore;
70
+ return sorted.map((url, index) => ({
71
+ url: url.url,
72
+ title: url.title,
73
+ snippet: url.snippet,
74
+ rank: index + 1,
75
+ score: maxScore > 0 ? url.totalScore / maxScore * 100 : 0,
76
+ frequency: url.frequency,
77
+ positions: url.positions,
78
+ queries: url.queries,
79
+ bestPosition: url.bestPosition,
80
+ isConsensus: url.frequency >= consensusThreshold
81
+ }));
82
+ }
83
+ function markConsensus(frequency) {
84
+ return frequency >= WEB_CONSENSUS_THRESHOLD ? "\u2713" : "\u2717";
85
+ }
86
+ function generateJustification(url, rank) {
87
+ const parts = [];
88
+ if (url.frequency >= HIGH_CONSENSUS_THRESHOLD) {
89
+ parts.push(`Appeared in ${url.frequency} different searches showing strong cross-query relevance`);
90
+ } else if (url.frequency >= WEB_CONSENSUS_THRESHOLD) {
91
+ parts.push(`Found across ${url.frequency} searches indicating solid topical coverage`);
92
+ } else {
93
+ parts.push(`Appeared in ${url.frequency} search${url.frequency > 1 ? "es" : ""}`);
94
+ }
95
+ if (url.bestPosition === 1) {
96
+ parts.push("ranked #1 in at least one search");
97
+ } else if (url.bestPosition <= 3) {
98
+ parts.push(`best position was top-3 (#${url.bestPosition})`);
99
+ }
100
+ return parts.join(", ") + ".";
101
+ }
102
+ function generateEnhancedOutput(rankedUrls, allKeywords, totalUniqueUrls, frequencyThreshold, thresholdNote) {
103
+ const lines = [];
104
+ const consensusCount = rankedUrls.filter((u) => u.isConsensus).length;
105
+ lines.push(`## Aggregated Search Results (${allKeywords.length} Queries \u2192 ${rankedUrls.length} Unique URLs)`);
106
+ lines.push("");
107
+ lines.push(`Based on ${allKeywords.length} distinct searches, we found **${rankedUrls.length} unique resources** (${consensusCount} appear in multiple queries).`);
108
+ lines.push("");
109
+ if (thresholdNote) {
110
+ lines.push(`> ${thresholdNote}`);
111
+ lines.push("");
112
+ }
113
+ lines.push("### \u{1F947} Ranked Resources");
114
+ lines.push("");
115
+ for (const url of rankedUrls) {
116
+ const highConsensus = url.frequency >= HIGH_CONSENSUS_THRESHOLD ? " \u2B50 HIGHEST CONSENSUS" : url.isConsensus ? " \u2713 CONSENSUS" : "";
117
+ lines.push(`#### #${url.rank}: ${url.title} (Score: ${url.score.toFixed(1)})${highConsensus}`);
118
+ const queriesList = url.queries.map((q) => `"${q}"`).join(", ");
119
+ lines.push(`- **Appeared in:** ${url.frequency} queries (${queriesList})`);
120
+ lines.push(`- **Best ranking:** Position ${url.bestPosition}`);
121
+ lines.push(`- **Description:** ${url.snippet}`);
122
+ lines.push(`- **Why it's #${url.rank}:** ${generateJustification(url, url.rank)}`);
123
+ lines.push(`- **URL:** ${url.url}`);
124
+ lines.push("");
125
+ }
126
+ lines.push("---");
127
+ lines.push("");
128
+ lines.push("### \u{1F4C8} Metadata");
129
+ lines.push("");
130
+ lines.push(`- **Total Queries:** ${allKeywords.length} (${allKeywords.join(", ")})`);
131
+ const sortedByFreq = [...rankedUrls].sort((a, b) => b.frequency - a.frequency);
132
+ const urlFreqList = sortedByFreq.map((u) => `${u.url} (${u.frequency}x)`).join(", ");
133
+ lines.push(`- **Unique URLs Found:** ${totalUniqueUrls} \u2014 top by frequency: ${urlFreqList}`);
134
+ lines.push(`- **Consensus Threshold:** \u2265${frequencyThreshold} appearances`);
135
+ lines.push("");
136
+ return lines.join("\n");
137
+ }
138
+ function aggregateAndRank(searches, minConsensusUrls = DEFAULT_MIN_CONSENSUS_URLS) {
139
+ const urlMap = aggregateResults(searches);
140
+ const totalUniqueUrls = urlMap.size;
141
+ const totalQueries = searches.length;
142
+ const thresholds = [3, 2, 1];
143
+ let usedThreshold = 1;
144
+ let thresholdNote;
145
+ for (const threshold of thresholds) {
146
+ const count = countByFrequency(urlMap, threshold);
147
+ if (count >= minConsensusUrls || threshold === 1) {
148
+ usedThreshold = threshold;
149
+ if (threshold < 3) {
150
+ thresholdNote = `Note: Consensus threshold set to \u2265${threshold} due to result diversity.`;
151
+ }
152
+ break;
153
+ }
154
+ }
155
+ const allUrls = [...urlMap.values()];
156
+ const rankedUrls = calculateWeightedScores(allUrls, usedThreshold);
157
+ return {
158
+ rankedUrls,
159
+ totalUniqueUrls,
160
+ totalQueries,
161
+ frequencyThreshold: usedThreshold,
162
+ thresholdNote
163
+ };
164
+ }
165
+ function buildUrlLookup(rankedUrls) {
166
+ const lookup = /* @__PURE__ */ new Map();
167
+ for (const url of rankedUrls) {
168
+ const normalized = normalizeUrl(url.url);
169
+ lookup.set(normalized, url);
170
+ lookup.set(url.url.toLowerCase(), url);
171
+ }
172
+ return lookup;
173
+ }
174
+ function lookupUrl(url, lookup) {
175
+ const normalized = normalizeUrl(url);
176
+ return lookup.get(normalized) || lookup.get(url.toLowerCase());
177
+ }
178
+ function aggregateRedditResults(searches) {
179
+ const urlMap = /* @__PURE__ */ new Map();
180
+ for (const [query, results] of searches) {
181
+ for (let i = 0; i < results.length; i++) {
182
+ const result = results[i];
183
+ if (!result) continue;
184
+ const position = i + 1;
185
+ const normalizedUrl = normalizeUrl(result.url);
186
+ const existing = urlMap.get(normalizedUrl);
187
+ if (existing) {
188
+ existing.frequency += 1;
189
+ existing.positions.push(position);
190
+ existing.queries.push(query);
191
+ const prevBest = existing.bestPosition;
192
+ existing.bestPosition = Math.min(existing.bestPosition, position);
193
+ existing.totalScore += getCtrWeight(position);
194
+ if (position < prevBest) {
195
+ existing.title = result.title;
196
+ existing.snippet = result.snippet;
197
+ existing.date = result.date;
198
+ }
199
+ } else {
200
+ urlMap.set(normalizedUrl, {
201
+ url: result.url,
202
+ title: result.title,
203
+ snippet: result.snippet,
204
+ date: result.date,
205
+ frequency: 1,
206
+ positions: [position],
207
+ queries: [query],
208
+ bestPosition: position,
209
+ totalScore: getCtrWeight(position)
210
+ });
211
+ }
212
+ }
213
+ }
214
+ return urlMap;
215
+ }
216
+ function countRedditByFrequency(urlMap, minFrequency) {
217
+ let count = 0;
218
+ for (const url of urlMap.values()) {
219
+ if (url.frequency >= minFrequency) count++;
220
+ }
221
+ return count;
222
+ }
223
+ function calculateRedditWeightedScores(urls, consensusThreshold) {
224
+ if (urls.length === 0) return [];
225
+ const sorted = [...urls].sort((a, b) => b.totalScore - a.totalScore);
226
+ const maxScore = sorted[0].totalScore;
227
+ return sorted.map((url, index) => ({
228
+ url: url.url,
229
+ title: url.title,
230
+ snippet: url.snippet,
231
+ date: url.date,
232
+ rank: index + 1,
233
+ score: maxScore > 0 ? url.totalScore / maxScore * 100 : 0,
234
+ frequency: url.frequency,
235
+ positions: url.positions,
236
+ queries: url.queries,
237
+ bestPosition: url.bestPosition,
238
+ isConsensus: url.frequency >= consensusThreshold
239
+ }));
240
+ }
241
+ function aggregateAndRankReddit(searches, minConsensusUrls = DEFAULT_REDDIT_MIN_CONSENSUS_URLS) {
242
+ const urlMap = aggregateRedditResults(searches);
243
+ const totalUniqueUrls = urlMap.size;
244
+ const totalQueries = searches.size;
245
+ const thresholds = [2, 1];
246
+ let usedThreshold = 1;
247
+ let thresholdNote;
248
+ for (const threshold of thresholds) {
249
+ const count = countRedditByFrequency(urlMap, threshold);
250
+ if (count >= minConsensusUrls || threshold === 1) {
251
+ usedThreshold = threshold;
252
+ if (threshold < 2 && totalQueries > 1) {
253
+ thresholdNote = `Note: Consensus threshold set to \u2265${threshold} due to result diversity across queries.`;
254
+ }
255
+ break;
256
+ }
257
+ }
258
+ const allUrls = [...urlMap.values()];
259
+ const rankedUrls = calculateRedditWeightedScores(allUrls, usedThreshold);
260
+ return {
261
+ rankedUrls,
262
+ totalUniqueUrls,
263
+ totalQueries,
264
+ frequencyThreshold: usedThreshold,
265
+ thresholdNote
266
+ };
267
+ }
268
+ function generateRedditEnhancedOutput(aggregation, allQueries, rawResults) {
269
+ const { rankedUrls, totalUniqueUrls, frequencyThreshold, thresholdNote } = aggregation;
270
+ const lines = [];
271
+ lines.push(`# \u{1F50D} Reddit Search Results (Aggregated from ${allQueries.length} Queries)`);
272
+ lines.push("");
273
+ lines.push(`**Total Unique Posts:** ${totalUniqueUrls} | **Consensus Threshold:** \u2265${frequencyThreshold} appearances`);
274
+ lines.push("");
275
+ if (thresholdNote) {
276
+ lines.push(`> ${thresholdNote}`);
277
+ lines.push("");
278
+ }
279
+ const consensusUrls = rankedUrls.filter((u) => u.frequency >= frequencyThreshold && u.frequency > 1);
280
+ if (consensusUrls.length > 0) {
281
+ lines.push("## \u2B50 High-Consensus Posts (Multiple Queries)");
282
+ lines.push("");
283
+ lines.push("*These posts appeared across multiple search queries, indicating high relevance:*");
284
+ lines.push("");
285
+ for (const url of consensusUrls) {
286
+ const dateStr = url.date ? ` \u2022 \u{1F4C5} ${url.date}` : "";
287
+ const queriesList = url.queries.map((q) => `"${q}"`).join(", ");
288
+ lines.push(`### #${url.rank}: ${url.title}`);
289
+ lines.push(`**Score:** ${url.score.toFixed(1)} | **Found in:** ${url.frequency} queries (${queriesList})${dateStr}`);
290
+ lines.push(`${url.url}`);
291
+ lines.push(`> ${url.snippet}`);
292
+ lines.push("");
293
+ }
294
+ lines.push("---");
295
+ lines.push("");
296
+ }
297
+ lines.push("## \u{1F4CA} All Results (CTR-Ranked)");
298
+ lines.push("");
299
+ for (const url of rankedUrls) {
300
+ const dateStr = url.date ? ` \u2022 \u{1F4C5} ${url.date}` : "";
301
+ const consensusMarker = url.frequency > 1 ? " \u2B50" : "";
302
+ lines.push(`**${url.rank}. ${url.title}**${consensusMarker}${dateStr}`);
303
+ lines.push(`${url.url}`);
304
+ lines.push(`> ${url.snippet}`);
305
+ if (url.frequency > 1) {
306
+ lines.push(`_Found in ${url.frequency} queries: ${url.queries.map((q) => `"${q}"`).join(", ")}_`);
307
+ }
308
+ lines.push("");
309
+ }
310
+ if (rawResults && rawResults.size > 0) {
311
+ lines.push("---");
312
+ lines.push("");
313
+ lines.push("## \u{1F4CB} Per-Query Raw Results");
314
+ lines.push("");
315
+ lines.push("*Complete results for each individual query before aggregation:*");
316
+ lines.push("");
317
+ for (const [query, results] of rawResults) {
318
+ lines.push(`### \u{1F50E} Query: "${query}"`);
319
+ lines.push(`**Results:** ${results.length} posts`);
320
+ lines.push("");
321
+ if (results.length === 0) {
322
+ lines.push("_No results found for this query._");
323
+ lines.push("");
324
+ continue;
325
+ }
326
+ for (let i = 0; i < results.length; i++) {
327
+ const result = results[i];
328
+ if (!result) continue;
329
+ const position = i + 1;
330
+ const dateStr = result.date ? ` \u2022 \u{1F4C5} ${result.date}` : "";
331
+ lines.push(`${position}. **${result.title}**${dateStr}`);
332
+ lines.push(` ${result.url}`);
333
+ lines.push(` > ${result.snippet}`);
334
+ lines.push("");
335
+ }
336
+ }
337
+ }
338
+ lines.push("---");
339
+ lines.push("");
340
+ lines.push("### \u{1F4C8} Search Metadata");
341
+ lines.push("");
342
+ lines.push(`- **Queries:** ${allQueries.map((q) => `"${q}"`).join(", ")}`);
343
+ lines.push(`- **Unique Posts Found:** ${totalUniqueUrls}`);
344
+ lines.push(`- **High-Consensus Posts:** ${consensusUrls.length}`);
345
+ lines.push("");
346
+ return lines.join("\n");
347
+ }
348
+ export {
349
+ aggregateAndRank,
350
+ aggregateAndRankReddit,
351
+ buildUrlLookup,
352
+ generateEnhancedOutput,
353
+ generateRedditEnhancedOutput,
354
+ lookupUrl,
355
+ markConsensus
356
+ };
357
+ //# sourceMappingURL=url-aggregator.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/utils/url-aggregator.ts"],
4
+ "sourcesContent": ["/**\n * URL Aggregator Utility\n * Aggregates search results across multiple queries, calculates CTR-weighted scores,\n * and generates consensus-based rankings.\n */\n\nimport { CTR_WEIGHTS } from '../config/index.js';\nimport type { KeywordSearchResult, RedditSearchResult } from '../clients/search.js';\n\n/** Minimum frequency for web search consensus marking */\nconst WEB_CONSENSUS_THRESHOLD = 3 as const;\n\n/** Minimum frequency for Reddit consensus marking (lower due to fewer overlapping results) */\nconst REDDIT_CONSENSUS_THRESHOLD = 2 as const;\n\n/** Minimum weight assigned to positions beyond top 10 */\nconst MIN_BEYOND_TOP10_WEIGHT = 0 as const;\n\n/** Weight decay per position beyond top 10 */\nconst BEYOND_TOP10_DECAY = 0.5 as const;\n\n/** Base position for beyond-top-10 weight calculation */\nconst BEYOND_TOP10_BASE = 10 as const;\n\n/** Default minimum consensus URLs before lowering threshold (web search) */\nconst DEFAULT_MIN_CONSENSUS_URLS = 5 as const;\n\n/** Default minimum consensus URLs before lowering threshold (Reddit) */\nconst DEFAULT_REDDIT_MIN_CONSENSUS_URLS = 3 as const;\n\n/** High consensus frequency threshold for enhanced output labeling */\nconst HIGH_CONSENSUS_THRESHOLD = 4 as const;\n\n/**\n * Aggregated URL data structure\n */\ninterface AggregatedUrl {\n readonly url: string;\n title: string;\n snippet: string;\n frequency: number;\n readonly positions: number[];\n readonly queries: string[];\n bestPosition: number;\n totalScore: number;\n}\n\n/**\n * Ranked URL with normalized score\n */\ninterface RankedUrl {\n readonly url: string;\n readonly title: string;\n readonly snippet: string;\n readonly rank: number;\n readonly score: number;\n readonly frequency: number;\n readonly positions: number[];\n readonly queries: string[];\n readonly bestPosition: number;\n readonly isConsensus: boolean;\n}\n\n/**\n * Aggregation result containing all processed data\n */\ninterface AggregationResult {\n readonly rankedUrls: RankedUrl[];\n readonly totalUniqueUrls: number;\n readonly totalQueries: number;\n readonly frequencyThreshold: number;\n readonly thresholdNote?: string;\n}\n\n/**\n * Get CTR weight for a position (1-10)\n * Positions beyond 10 get minimal weight\n */\nfunction getCtrWeight(position: number): number {\n if (position >= 1 && position <= 10) {\n return CTR_WEIGHTS[position] ?? 0;\n }\n // Positions beyond 10 get diminishing returns\n return Math.max(MIN_BEYOND_TOP10_WEIGHT, BEYOND_TOP10_BASE - (position - BEYOND_TOP10_BASE) * BEYOND_TOP10_DECAY);\n}\n\n/**\n * Aggregate results from multiple searches\n * Flattens all results, deduplicates by URL, and tracks frequency/positions\n */\nfunction aggregateResults(searches: KeywordSearchResult[]): Map<string, AggregatedUrl> {\n const urlMap = new Map<string, AggregatedUrl>();\n\n for (const search of searches) {\n for (const result of search.results) {\n const normalizedUrl = normalizeUrl(result.link);\n const existing = urlMap.get(normalizedUrl);\n\n if (existing) {\n existing.frequency += 1;\n existing.positions.push(result.position);\n existing.queries.push(search.keyword);\n const prevBest = existing.bestPosition;\n existing.bestPosition = Math.min(existing.bestPosition, result.position);\n existing.totalScore += getCtrWeight(result.position);\n // Keep best title/snippet (from highest ranking position)\n if (result.position < prevBest) {\n existing.title = result.title;\n existing.snippet = result.snippet;\n }\n } else {\n urlMap.set(normalizedUrl, {\n url: result.link,\n title: result.title,\n snippet: result.snippet,\n frequency: 1,\n positions: [result.position],\n queries: [search.keyword],\n bestPosition: result.position,\n totalScore: getCtrWeight(result.position),\n });\n }\n }\n }\n\n return urlMap;\n}\n\n/**\n * Normalize URL for deduplication\n * Removes trailing slashes, www prefix, and normalizes protocol\n */\nfunction normalizeUrl(url: string): string {\n try {\n const parsed = new URL(url);\n let host = parsed.hostname.replace(/^www\\./, '');\n let path = parsed.pathname.replace(/\\/$/, '') || '/';\n return `${host}${path}${parsed.search}`.toLowerCase();\n } catch {\n return url.toLowerCase().replace(/\\/$/, '');\n }\n}\n\n/**\n * Count URLs meeting a frequency threshold\n */\nfunction countByFrequency(\n urlMap: Map<string, AggregatedUrl>,\n minFrequency: number\n): number {\n let count = 0;\n for (const url of urlMap.values()) {\n if (url.frequency >= minFrequency) count++;\n }\n return count;\n}\n\n/**\n * Calculate weighted scores and normalize to 100.0\n * Returns ALL URLs sorted by score with rank assignments and consensus marking\n */\nfunction calculateWeightedScores(urls: AggregatedUrl[], consensusThreshold: number): RankedUrl[] {\n if (urls.length === 0) return [];\n\n // Sort by total score descending\n const sorted = [...urls].sort((a, b) => b.totalScore - a.totalScore);\n\n // Find max score for normalization\n const maxScore = sorted[0]!.totalScore;\n\n // Map to ranked URLs with normalized scores\n return sorted.map((url, index) => ({\n url: url.url,\n title: url.title,\n snippet: url.snippet,\n rank: index + 1,\n score: maxScore > 0 ? (url.totalScore / maxScore) * 100 : 0,\n frequency: url.frequency,\n positions: url.positions,\n queries: url.queries,\n bestPosition: url.bestPosition,\n isConsensus: url.frequency >= consensusThreshold,\n }));\n}\n\n/**\n * Mark consensus status for a URL\n * Returns \"\u2713\" if frequency >= threshold, else \"\u2717\"\n */\nexport function markConsensus(frequency: number): string {\n return frequency >= WEB_CONSENSUS_THRESHOLD ? '\u2713' : '\u2717';\n}\n\n/**\n * Generate justification for why a URL is ranked at its position\n */\nfunction generateJustification(url: RankedUrl, rank: number): string {\n const parts: string[] = [];\n \n if (url.frequency >= HIGH_CONSENSUS_THRESHOLD) {\n parts.push(`Appeared in ${url.frequency} different searches showing strong cross-query relevance`);\n } else if (url.frequency >= WEB_CONSENSUS_THRESHOLD) {\n parts.push(`Found across ${url.frequency} searches indicating solid topical coverage`);\n } else {\n parts.push(`Appeared in ${url.frequency} search${url.frequency > 1 ? 'es' : ''}`);\n }\n \n if (url.bestPosition === 1) {\n parts.push('ranked #1 in at least one search');\n } else if (url.bestPosition <= 3) {\n parts.push(`best position was top-3 (#${url.bestPosition})`);\n }\n \n return parts.join(', ') + '.';\n}\n\n/**\n * Generate enhanced narrative output for consensus URLs\n */\nexport function generateEnhancedOutput(\n rankedUrls: RankedUrl[],\n allKeywords: string[],\n totalUniqueUrls: number,\n frequencyThreshold: number,\n thresholdNote?: string\n): string {\n const lines: string[] = [];\n \n // Header\n const consensusCount = rankedUrls.filter(u => u.isConsensus).length;\n lines.push(`## Aggregated Search Results (${allKeywords.length} Queries \u2192 ${rankedUrls.length} Unique URLs)`);\n lines.push('');\n lines.push(`Based on ${allKeywords.length} distinct searches, we found **${rankedUrls.length} unique resources** (${consensusCount} appear in multiple queries).`);\n lines.push('');\n\n if (thresholdNote) {\n lines.push(`> ${thresholdNote}`);\n lines.push('');\n }\n\n // All ranked resources\n lines.push('### \uD83E\uDD47 Ranked Resources');\n lines.push('');\n\n for (const url of rankedUrls) {\n const highConsensus = url.frequency >= HIGH_CONSENSUS_THRESHOLD ? ' \u2B50 HIGHEST CONSENSUS' : url.isConsensus ? ' \u2713 CONSENSUS' : '';\n lines.push(`#### #${url.rank}: ${url.title} (Score: ${url.score.toFixed(1)})${highConsensus}`);\n \n // Appeared in queries\n const queriesList = url.queries.map(q => `\"${q}\"`).join(', ');\n lines.push(`- **Appeared in:** ${url.frequency} queries (${queriesList})`);\n \n // Best ranking\n lines.push(`- **Best ranking:** Position ${url.bestPosition}`);\n \n // Description\n lines.push(`- **Description:** ${url.snippet}`);\n \n // Justification\n lines.push(`- **Why it's #${url.rank}:** ${generateJustification(url, url.rank)}`);\n \n // URL\n lines.push(`- **URL:** ${url.url}`);\n lines.push('');\n }\n \n // Metadata section\n lines.push('---');\n lines.push('');\n lines.push('### \uD83D\uDCC8 Metadata');\n lines.push('');\n lines.push(`- **Total Queries:** ${allKeywords.length} (${allKeywords.join(', ')})`);\n \n // Sort all URLs by frequency for the unique URLs list\n const sortedByFreq = [...rankedUrls].sort((a, b) => b.frequency - a.frequency);\n const urlFreqList = sortedByFreq\n .map(u => `${u.url} (${u.frequency}x)`)\n .join(', ');\n \n lines.push(`- **Unique URLs Found:** ${totalUniqueUrls} \u2014 top by frequency: ${urlFreqList}`);\n lines.push(`- **Consensus Threshold:** \u2265${frequencyThreshold} appearances`);\n lines.push('');\n\n return lines.join('\\n');\n}\n\n/**\n * Full aggregation pipeline \u2014 returns ALL URLs ranked by CTR score.\n * Determines a consensus threshold (\u22653, \u22652, or \u22651) for labeling, but never\n * drops URLs below the threshold. Every collected URL appears in the output.\n */\nexport function aggregateAndRank(\n searches: KeywordSearchResult[],\n minConsensusUrls: number = DEFAULT_MIN_CONSENSUS_URLS\n): AggregationResult {\n const urlMap = aggregateResults(searches);\n const totalUniqueUrls = urlMap.size;\n const totalQueries = searches.length;\n\n // Determine consensus threshold for labeling (not filtering)\n const thresholds = [3, 2, 1];\n let usedThreshold = 1;\n let thresholdNote: string | undefined;\n\n for (const threshold of thresholds) {\n const count = countByFrequency(urlMap, threshold);\n if (count >= minConsensusUrls || threshold === 1) {\n usedThreshold = threshold;\n if (threshold < 3) {\n thresholdNote = `Note: Consensus threshold set to \u2265${threshold} due to result diversity.`;\n }\n break;\n }\n }\n\n // Rank ALL URLs, marking consensus based on determined threshold\n const allUrls = [...urlMap.values()];\n const rankedUrls = calculateWeightedScores(allUrls, usedThreshold);\n\n return {\n rankedUrls,\n totalUniqueUrls,\n totalQueries,\n frequencyThreshold: usedThreshold,\n thresholdNote,\n };\n}\n\n/**\n * Build URL lookup map for quick consensus checking during result formatting\n */\nexport function buildUrlLookup(rankedUrls: RankedUrl[]): Map<string, RankedUrl> {\n const lookup = new Map<string, RankedUrl>();\n \n for (const url of rankedUrls) {\n const normalized = normalizeUrl(url.url);\n lookup.set(normalized, url);\n // Also store original URL\n lookup.set(url.url.toLowerCase(), url);\n }\n\n return lookup;\n}\n\n/**\n * Look up a URL in the ranked results\n */\nexport function lookupUrl(url: string, lookup: Map<string, RankedUrl>): RankedUrl | undefined {\n const normalized = normalizeUrl(url);\n return lookup.get(normalized) || lookup.get(url.toLowerCase());\n}\n\n// ============================================================================\n// Reddit-Specific Aggregation\n// ============================================================================\n\n/**\n * Aggregated Reddit URL data structure\n */\ninterface AggregatedRedditUrl {\n readonly url: string;\n title: string;\n snippet: string;\n date?: string;\n frequency: number;\n readonly positions: number[];\n readonly queries: string[];\n bestPosition: number;\n totalScore: number;\n}\n\n/**\n * Ranked Reddit URL with normalized score\n */\ninterface RankedRedditUrl {\n readonly url: string;\n readonly title: string;\n readonly snippet: string;\n readonly date?: string;\n readonly rank: number;\n readonly score: number;\n readonly frequency: number;\n readonly positions: number[];\n readonly queries: string[];\n readonly bestPosition: number;\n readonly isConsensus: boolean;\n}\n\n/**\n * Reddit aggregation result\n */\ninterface RedditAggregationResult {\n readonly rankedUrls: RankedRedditUrl[];\n readonly totalUniqueUrls: number;\n readonly totalQueries: number;\n readonly frequencyThreshold: number;\n readonly thresholdNote?: string;\n}\n\n/**\n * Aggregate Reddit search results from multiple queries\n */\nfunction aggregateRedditResults(\n searches: Map<string, RedditSearchResult[]>\n): Map<string, AggregatedRedditUrl> {\n const urlMap = new Map<string, AggregatedRedditUrl>();\n\n for (const [query, results] of searches) {\n for (let i = 0; i < results.length; i++) {\n const result = results[i];\n if (!result) continue;\n const position = i + 1;\n const normalizedUrl = normalizeUrl(result.url);\n const existing = urlMap.get(normalizedUrl);\n\n if (existing) {\n existing.frequency += 1;\n existing.positions.push(position);\n existing.queries.push(query);\n const prevBest = existing.bestPosition;\n existing.bestPosition = Math.min(existing.bestPosition, position);\n existing.totalScore += getCtrWeight(position);\n // Keep best title/snippet (from highest ranking position)\n if (position < prevBest) {\n existing.title = result.title;\n existing.snippet = result.snippet;\n existing.date = result.date;\n }\n } else {\n urlMap.set(normalizedUrl, {\n url: result.url,\n title: result.title,\n snippet: result.snippet,\n date: result.date,\n frequency: 1,\n positions: [position],\n queries: [query],\n bestPosition: position,\n totalScore: getCtrWeight(position),\n });\n }\n }\n }\n\n return urlMap;\n}\n\n/**\n * Count Reddit URLs meeting a frequency threshold\n */\nfunction countRedditByFrequency(\n urlMap: Map<string, AggregatedRedditUrl>,\n minFrequency: number\n): number {\n let count = 0;\n for (const url of urlMap.values()) {\n if (url.frequency >= minFrequency) count++;\n }\n return count;\n}\n\n/**\n * Calculate weighted scores for Reddit URLs\n * Returns ALL URLs sorted by score with consensus marking\n */\nfunction calculateRedditWeightedScores(urls: AggregatedRedditUrl[], consensusThreshold: number): RankedRedditUrl[] {\n if (urls.length === 0) return [];\n\n // Sort by total score descending\n const sorted = [...urls].sort((a, b) => b.totalScore - a.totalScore);\n\n // Find max score for normalization\n const maxScore = sorted[0]!.totalScore;\n\n // Map to ranked URLs with normalized scores\n return sorted.map((url, index) => ({\n url: url.url,\n title: url.title,\n snippet: url.snippet,\n date: url.date,\n rank: index + 1,\n score: maxScore > 0 ? (url.totalScore / maxScore) * 100 : 0,\n frequency: url.frequency,\n positions: url.positions,\n queries: url.queries,\n bestPosition: url.bestPosition,\n isConsensus: url.frequency >= consensusThreshold,\n }));\n}\n\n/**\n * Full Reddit aggregation pipeline \u2014 returns ALL URLs ranked by CTR score.\n * Determines a consensus threshold for labeling, never drops URLs.\n */\nexport function aggregateAndRankReddit(\n searches: Map<string, RedditSearchResult[]>,\n minConsensusUrls: number = DEFAULT_REDDIT_MIN_CONSENSUS_URLS\n): RedditAggregationResult {\n const urlMap = aggregateRedditResults(searches);\n const totalUniqueUrls = urlMap.size;\n const totalQueries = searches.size;\n\n // Determine consensus threshold for labeling (not filtering)\n const thresholds = [2, 1];\n let usedThreshold = 1;\n let thresholdNote: string | undefined;\n\n for (const threshold of thresholds) {\n const count = countRedditByFrequency(urlMap, threshold);\n if (count >= minConsensusUrls || threshold === 1) {\n usedThreshold = threshold;\n if (threshold < 2 && totalQueries > 1) {\n thresholdNote = `Note: Consensus threshold set to \u2265${threshold} due to result diversity across queries.`;\n }\n break;\n }\n }\n\n // Rank ALL URLs, marking consensus based on determined threshold\n const allUrls = [...urlMap.values()];\n const rankedUrls = calculateRedditWeightedScores(allUrls, usedThreshold);\n\n return {\n rankedUrls,\n totalUniqueUrls,\n totalQueries,\n frequencyThreshold: usedThreshold,\n thresholdNote,\n };\n}\n\n/**\n * Generate enhanced output for Reddit aggregated results\n * Now includes both aggregated view AND per-query raw results\n */\nexport function generateRedditEnhancedOutput(\n aggregation: RedditAggregationResult,\n allQueries: string[],\n rawResults?: Map<string, RedditSearchResult[]>\n): string {\n const { rankedUrls, totalUniqueUrls, frequencyThreshold, thresholdNote } = aggregation;\n const lines: string[] = [];\n\n // Header\n lines.push(`# \uD83D\uDD0D Reddit Search Results (Aggregated from ${allQueries.length} Queries)`);\n lines.push('');\n lines.push(`**Total Unique Posts:** ${totalUniqueUrls} | **Consensus Threshold:** \u2265${frequencyThreshold} appearances`);\n lines.push('');\n\n if (thresholdNote) {\n lines.push(`> ${thresholdNote}`);\n lines.push('');\n }\n\n // Consensus section (URLs appearing in multiple queries)\n const consensusUrls = rankedUrls.filter(u => u.frequency >= frequencyThreshold && u.frequency > 1);\n if (consensusUrls.length > 0) {\n lines.push('## \u2B50 High-Consensus Posts (Multiple Queries)');\n lines.push('');\n lines.push('*These posts appeared across multiple search queries, indicating high relevance:*');\n lines.push('');\n\n for (const url of consensusUrls) {\n const dateStr = url.date ? ` \u2022 \uD83D\uDCC5 ${url.date}` : '';\n const queriesList = url.queries.map(q => `\"${q}\"`).join(', ');\n lines.push(`### #${url.rank}: ${url.title}`);\n lines.push(`**Score:** ${url.score.toFixed(1)} | **Found in:** ${url.frequency} queries (${queriesList})${dateStr}`);\n lines.push(`${url.url}`);\n lines.push(`> ${url.snippet}`);\n lines.push('');\n }\n\n lines.push('---');\n lines.push('');\n }\n\n // All results ranked by CTR score\n lines.push('## \uD83D\uDCCA All Results (CTR-Ranked)');\n lines.push('');\n\n for (const url of rankedUrls) {\n const dateStr = url.date ? ` \u2022 \uD83D\uDCC5 ${url.date}` : '';\n const consensusMarker = url.frequency > 1 ? ' \u2B50' : '';\n lines.push(`**${url.rank}. ${url.title}**${consensusMarker}${dateStr}`);\n lines.push(`${url.url}`);\n lines.push(`> ${url.snippet}`);\n if (url.frequency > 1) {\n lines.push(`_Found in ${url.frequency} queries: ${url.queries.map(q => `\"${q}\"`).join(', ')}_`);\n }\n lines.push('');\n }\n\n // Per-Query Raw Results Section (NEW)\n if (rawResults && rawResults.size > 0) {\n lines.push('---');\n lines.push('');\n lines.push('## \uD83D\uDCCB Per-Query Raw Results');\n lines.push('');\n lines.push('*Complete results for each individual query before aggregation:*');\n lines.push('');\n\n for (const [query, results] of rawResults) {\n lines.push(`### \uD83D\uDD0E Query: \"${query}\"`);\n lines.push(`**Results:** ${results.length} posts`);\n lines.push('');\n\n if (results.length === 0) {\n lines.push('_No results found for this query._');\n lines.push('');\n continue;\n }\n\n for (let i = 0; i < results.length; i++) {\n const result = results[i];\n if (!result) continue;\n const position = i + 1;\n const dateStr = result.date ? ` \u2022 \uD83D\uDCC5 ${result.date}` : '';\n lines.push(`${position}. **${result.title}**${dateStr}`);\n lines.push(` ${result.url}`);\n lines.push(` > ${result.snippet}`);\n lines.push('');\n }\n }\n }\n\n // Metadata\n lines.push('---');\n lines.push('');\n lines.push('### \uD83D\uDCC8 Search Metadata');\n lines.push('');\n lines.push(`- **Queries:** ${allQueries.map(q => `\"${q}\"`).join(', ')}`);\n lines.push(`- **Unique Posts Found:** ${totalUniqueUrls}`);\n lines.push(`- **High-Consensus Posts:** ${consensusUrls.length}`);\n lines.push('');\n\n return lines.join('\\n');\n}\n"],
5
+ "mappings": "AAMA,SAAS,mBAAmB;AAI5B,MAAM,0BAA0B;AAGhC,MAAM,6BAA6B;AAGnC,MAAM,0BAA0B;AAGhC,MAAM,qBAAqB;AAG3B,MAAM,oBAAoB;AAG1B,MAAM,6BAA6B;AAGnC,MAAM,oCAAoC;AAG1C,MAAM,2BAA2B;AA+CjC,SAAS,aAAa,UAA0B;AAC9C,MAAI,YAAY,KAAK,YAAY,IAAI;AACnC,WAAO,YAAY,QAAQ,KAAK;AAAA,EAClC;AAEA,SAAO,KAAK,IAAI,yBAAyB,qBAAqB,WAAW,qBAAqB,kBAAkB;AAClH;AAMA,SAAS,iBAAiB,UAA6D;AACrF,QAAM,SAAS,oBAAI,IAA2B;AAE9C,aAAW,UAAU,UAAU;AAC7B,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,gBAAgB,aAAa,OAAO,IAAI;AAC9C,YAAM,WAAW,OAAO,IAAI,aAAa;AAEzC,UAAI,UAAU;AACZ,iBAAS,aAAa;AACtB,iBAAS,UAAU,KAAK,OAAO,QAAQ;AACvC,iBAAS,QAAQ,KAAK,OAAO,OAAO;AACpC,cAAM,WAAW,SAAS;AAC1B,iBAAS,eAAe,KAAK,IAAI,SAAS,cAAc,OAAO,QAAQ;AACvE,iBAAS,cAAc,aAAa,OAAO,QAAQ;AAEnD,YAAI,OAAO,WAAW,UAAU;AAC9B,mBAAS,QAAQ,OAAO;AACxB,mBAAS,UAAU,OAAO;AAAA,QAC5B;AAAA,MACF,OAAO;AACL,eAAO,IAAI,eAAe;AAAA,UACxB,KAAK,OAAO;AAAA,UACZ,OAAO,OAAO;AAAA,UACd,SAAS,OAAO;AAAA,UAChB,WAAW;AAAA,UACX,WAAW,CAAC,OAAO,QAAQ;AAAA,UAC3B,SAAS,CAAC,OAAO,OAAO;AAAA,UACxB,cAAc,OAAO;AAAA,UACrB,YAAY,aAAa,OAAO,QAAQ;AAAA,QAC1C,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,aAAa,KAAqB;AACzC,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,QAAI,OAAO,OAAO,SAAS,QAAQ,UAAU,EAAE;AAC/C,QAAI,OAAO,OAAO,SAAS,QAAQ,OAAO,EAAE,KAAK;AACjD,WAAO,GAAG,IAAI,GAAG,IAAI,GAAG,OAAO,MAAM,GAAG,YAAY;AAAA,EACtD,QAAQ;AACN,WAAO,IAAI,YAAY,EAAE,QAAQ,OAAO,EAAE;AAAA,EAC5C;AACF;AAKA,SAAS,iBACP,QACA,cACQ;AACR,MAAI,QAAQ;AACZ,aAAW,OAAO,OAAO,OAAO,GAAG;AACjC,QAAI,IAAI,aAAa,aAAc;AAAA,EACrC;AACA,SAAO;AACT;AAMA,SAAS,wBAAwB,MAAuB,oBAAyC;AAC/F,MAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAG/B,QAAM,SAAS,CAAC,GAAG,IAAI,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAGnE,QAAM,WAAW,OAAO,CAAC,EAAG;AAG5B,SAAO,OAAO,IAAI,CAAC,KAAK,WAAW;AAAA,IACjC,KAAK,IAAI;AAAA,IACT,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,MAAM,QAAQ;AAAA,IACd,OAAO,WAAW,IAAK,IAAI,aAAa,WAAY,MAAM;AAAA,IAC1D,WAAW,IAAI;AAAA,IACf,WAAW,IAAI;AAAA,IACf,SAAS,IAAI;AAAA,IACb,cAAc,IAAI;AAAA,IAClB,aAAa,IAAI,aAAa;AAAA,EAChC,EAAE;AACJ;AAMO,SAAS,cAAc,WAA2B;AACvD,SAAO,aAAa,0BAA0B,WAAM;AACtD;AAKA,SAAS,sBAAsB,KAAgB,MAAsB;AACnE,QAAM,QAAkB,CAAC;AAEzB,MAAI,IAAI,aAAa,0BAA0B;AAC7C,UAAM,KAAK,eAAe,IAAI,SAAS,0DAA0D;AAAA,EACnG,WAAW,IAAI,aAAa,yBAAyB;AACnD,UAAM,KAAK,gBAAgB,IAAI,SAAS,6CAA6C;AAAA,EACvF,OAAO;AACL,UAAM,KAAK,eAAe,IAAI,SAAS,UAAU,IAAI,YAAY,IAAI,OAAO,EAAE,EAAE;AAAA,EAClF;AAEA,MAAI,IAAI,iBAAiB,GAAG;AAC1B,UAAM,KAAK,kCAAkC;AAAA,EAC/C,WAAW,IAAI,gBAAgB,GAAG;AAChC,UAAM,KAAK,6BAA6B,IAAI,YAAY,GAAG;AAAA,EAC7D;AAEA,SAAO,MAAM,KAAK,IAAI,IAAI;AAC5B;AAKO,SAAS,uBACd,YACA,aACA,iBACA,oBACA,eACQ;AACR,QAAM,QAAkB,CAAC;AAGzB,QAAM,iBAAiB,WAAW,OAAO,OAAK,EAAE,WAAW,EAAE;AAC7D,QAAM,KAAK,iCAAiC,YAAY,MAAM,mBAAc,WAAW,MAAM,eAAe;AAC5G,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,YAAY,YAAY,MAAM,kCAAkC,WAAW,MAAM,wBAAwB,cAAc,+BAA+B;AACjK,QAAM,KAAK,EAAE;AAEb,MAAI,eAAe;AACjB,UAAM,KAAK,KAAK,aAAa,EAAE;AAC/B,UAAM,KAAK,EAAE;AAAA,EACf;AAGA,QAAM,KAAK,gCAAyB;AACpC,QAAM,KAAK,EAAE;AAEb,aAAW,OAAO,YAAY;AAC5B,UAAM,gBAAgB,IAAI,aAAa,2BAA2B,8BAAyB,IAAI,cAAc,sBAAiB;AAC9H,UAAM,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,KAAK,YAAY,IAAI,MAAM,QAAQ,CAAC,CAAC,IAAI,aAAa,EAAE;AAG7F,UAAM,cAAc,IAAI,QAAQ,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AAC5D,UAAM,KAAK,sBAAsB,IAAI,SAAS,aAAa,WAAW,GAAG;AAGzE,UAAM,KAAK,gCAAgC,IAAI,YAAY,EAAE;AAG7D,UAAM,KAAK,sBAAsB,IAAI,OAAO,EAAE;AAG9C,UAAM,KAAK,iBAAiB,IAAI,IAAI,OAAO,sBAAsB,KAAK,IAAI,IAAI,CAAC,EAAE;AAGjF,UAAM,KAAK,cAAc,IAAI,GAAG,EAAE;AAClC,UAAM,KAAK,EAAE;AAAA,EACf;AAGA,QAAM,KAAK,KAAK;AAChB,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,wBAAiB;AAC5B,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,wBAAwB,YAAY,MAAM,KAAK,YAAY,KAAK,IAAI,CAAC,GAAG;AAGnF,QAAM,eAAe,CAAC,GAAG,UAAU,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS;AAC7E,QAAM,cAAc,aACjB,IAAI,OAAK,GAAG,EAAE,GAAG,KAAK,EAAE,SAAS,IAAI,EACrC,KAAK,IAAI;AAEZ,QAAM,KAAK,4BAA4B,eAAe,6BAAwB,WAAW,EAAE;AAC3F,QAAM,KAAK,oCAA+B,kBAAkB,cAAc;AAC1E,QAAM,KAAK,EAAE;AAEb,SAAO,MAAM,KAAK,IAAI;AACxB;AAOO,SAAS,iBACd,UACA,mBAA2B,4BACR;AACnB,QAAM,SAAS,iBAAiB,QAAQ;AACxC,QAAM,kBAAkB,OAAO;AAC/B,QAAM,eAAe,SAAS;AAG9B,QAAM,aAAa,CAAC,GAAG,GAAG,CAAC;AAC3B,MAAI,gBAAgB;AACpB,MAAI;AAEJ,aAAW,aAAa,YAAY;AAClC,UAAM,QAAQ,iBAAiB,QAAQ,SAAS;AAChD,QAAI,SAAS,oBAAoB,cAAc,GAAG;AAChD,sBAAgB;AAChB,UAAI,YAAY,GAAG;AACjB,wBAAgB,0CAAqC,SAAS;AAAA,MAChE;AACA;AAAA,IACF;AAAA,EACF;AAGA,QAAM,UAAU,CAAC,GAAG,OAAO,OAAO,CAAC;AACnC,QAAM,aAAa,wBAAwB,SAAS,aAAa;AAEjE,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,oBAAoB;AAAA,IACpB;AAAA,EACF;AACF;AAKO,SAAS,eAAe,YAAiD;AAC9E,QAAM,SAAS,oBAAI,IAAuB;AAE1C,aAAW,OAAO,YAAY;AAC5B,UAAM,aAAa,aAAa,IAAI,GAAG;AACvC,WAAO,IAAI,YAAY,GAAG;AAE1B,WAAO,IAAI,IAAI,IAAI,YAAY,GAAG,GAAG;AAAA,EACvC;AAEA,SAAO;AACT;AAKO,SAAS,UAAU,KAAa,QAAuD;AAC5F,QAAM,aAAa,aAAa,GAAG;AACnC,SAAO,OAAO,IAAI,UAAU,KAAK,OAAO,IAAI,IAAI,YAAY,CAAC;AAC/D;AAoDA,SAAS,uBACP,UACkC;AAClC,QAAM,SAAS,oBAAI,IAAiC;AAEpD,aAAW,CAAC,OAAO,OAAO,KAAK,UAAU;AACvC,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAM,SAAS,QAAQ,CAAC;AACxB,UAAI,CAAC,OAAQ;AACb,YAAM,WAAW,IAAI;AACrB,YAAM,gBAAgB,aAAa,OAAO,GAAG;AAC7C,YAAM,WAAW,OAAO,IAAI,aAAa;AAEzC,UAAI,UAAU;AACZ,iBAAS,aAAa;AACtB,iBAAS,UAAU,KAAK,QAAQ;AAChC,iBAAS,QAAQ,KAAK,KAAK;AAC3B,cAAM,WAAW,SAAS;AAC1B,iBAAS,eAAe,KAAK,IAAI,SAAS,cAAc,QAAQ;AAChE,iBAAS,cAAc,aAAa,QAAQ;AAE5C,YAAI,WAAW,UAAU;AACvB,mBAAS,QAAQ,OAAO;AACxB,mBAAS,UAAU,OAAO;AAC1B,mBAAS,OAAO,OAAO;AAAA,QACzB;AAAA,MACF,OAAO;AACL,eAAO,IAAI,eAAe;AAAA,UACxB,KAAK,OAAO;AAAA,UACZ,OAAO,OAAO;AAAA,UACd,SAAS,OAAO;AAAA,UAChB,MAAM,OAAO;AAAA,UACb,WAAW;AAAA,UACX,WAAW,CAAC,QAAQ;AAAA,UACpB,SAAS,CAAC,KAAK;AAAA,UACf,cAAc;AAAA,UACd,YAAY,aAAa,QAAQ;AAAA,QACnC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,uBACP,QACA,cACQ;AACR,MAAI,QAAQ;AACZ,aAAW,OAAO,OAAO,OAAO,GAAG;AACjC,QAAI,IAAI,aAAa,aAAc;AAAA,EACrC;AACA,SAAO;AACT;AAMA,SAAS,8BAA8B,MAA6B,oBAA+C;AACjH,MAAI,KAAK,WAAW,EAAG,QAAO,CAAC;AAG/B,QAAM,SAAS,CAAC,GAAG,IAAI,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAGnE,QAAM,WAAW,OAAO,CAAC,EAAG;AAG5B,SAAO,OAAO,IAAI,CAAC,KAAK,WAAW;AAAA,IACjC,KAAK,IAAI;AAAA,IACT,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,MAAM,IAAI;AAAA,IACV,MAAM,QAAQ;AAAA,IACd,OAAO,WAAW,IAAK,IAAI,aAAa,WAAY,MAAM;AAAA,IAC1D,WAAW,IAAI;AAAA,IACf,WAAW,IAAI;AAAA,IACf,SAAS,IAAI;AAAA,IACb,cAAc,IAAI;AAAA,IAClB,aAAa,IAAI,aAAa;AAAA,EAChC,EAAE;AACJ;AAMO,SAAS,uBACd,UACA,mBAA2B,mCACF;AACzB,QAAM,SAAS,uBAAuB,QAAQ;AAC9C,QAAM,kBAAkB,OAAO;AAC/B,QAAM,eAAe,SAAS;AAG9B,QAAM,aAAa,CAAC,GAAG,CAAC;AACxB,MAAI,gBAAgB;AACpB,MAAI;AAEJ,aAAW,aAAa,YAAY;AAClC,UAAM,QAAQ,uBAAuB,QAAQ,SAAS;AACtD,QAAI,SAAS,oBAAoB,cAAc,GAAG;AAChD,sBAAgB;AAChB,UAAI,YAAY,KAAK,eAAe,GAAG;AACrC,wBAAgB,0CAAqC,SAAS;AAAA,MAChE;AACA;AAAA,IACF;AAAA,EACF;AAGA,QAAM,UAAU,CAAC,GAAG,OAAO,OAAO,CAAC;AACnC,QAAM,aAAa,8BAA8B,SAAS,aAAa;AAEvE,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,oBAAoB;AAAA,IACpB;AAAA,EACF;AACF;AAMO,SAAS,6BACd,aACA,YACA,YACQ;AACR,QAAM,EAAE,YAAY,iBAAiB,oBAAoB,cAAc,IAAI;AAC3E,QAAM,QAAkB,CAAC;AAGzB,QAAM,KAAK,sDAA+C,WAAW,MAAM,WAAW;AACtF,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,2BAA2B,eAAe,qCAAgC,kBAAkB,cAAc;AACrH,QAAM,KAAK,EAAE;AAEb,MAAI,eAAe;AACjB,UAAM,KAAK,KAAK,aAAa,EAAE;AAC/B,UAAM,KAAK,EAAE;AAAA,EACf;AAGA,QAAM,gBAAgB,WAAW,OAAO,OAAK,EAAE,aAAa,sBAAsB,EAAE,YAAY,CAAC;AACjG,MAAI,cAAc,SAAS,GAAG;AAC5B,UAAM,KAAK,mDAA8C;AACzD,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,mFAAmF;AAC9F,UAAM,KAAK,EAAE;AAEb,eAAW,OAAO,eAAe;AAC/B,YAAM,UAAU,IAAI,OAAO,qBAAS,IAAI,IAAI,KAAK;AACjD,YAAM,cAAc,IAAI,QAAQ,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI;AAC5D,YAAM,KAAK,QAAQ,IAAI,IAAI,KAAK,IAAI,KAAK,EAAE;AAC3C,YAAM,KAAK,cAAc,IAAI,MAAM,QAAQ,CAAC,CAAC,oBAAoB,IAAI,SAAS,aAAa,WAAW,IAAI,OAAO,EAAE;AACnH,YAAM,KAAK,GAAG,IAAI,GAAG,EAAE;AACvB,YAAM,KAAK,KAAK,IAAI,OAAO,EAAE;AAC7B,YAAM,KAAK,EAAE;AAAA,IACf;AAEA,UAAM,KAAK,KAAK;AAChB,UAAM,KAAK,EAAE;AAAA,EACf;AAGA,QAAM,KAAK,uCAAgC;AAC3C,QAAM,KAAK,EAAE;AAEb,aAAW,OAAO,YAAY;AAC5B,UAAM,UAAU,IAAI,OAAO,qBAAS,IAAI,IAAI,KAAK;AACjD,UAAM,kBAAkB,IAAI,YAAY,IAAI,YAAO;AACnD,UAAM,KAAK,KAAK,IAAI,IAAI,KAAK,IAAI,KAAK,KAAK,eAAe,GAAG,OAAO,EAAE;AACtE,UAAM,KAAK,GAAG,IAAI,GAAG,EAAE;AACvB,UAAM,KAAK,KAAK,IAAI,OAAO,EAAE;AAC7B,QAAI,IAAI,YAAY,GAAG;AACrB,YAAM,KAAK,aAAa,IAAI,SAAS,aAAa,IAAI,QAAQ,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,GAAG;AAAA,IAChG;AACA,UAAM,KAAK,EAAE;AAAA,EACf;AAGA,MAAI,cAAc,WAAW,OAAO,GAAG;AACrC,UAAM,KAAK,KAAK;AAChB,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,oCAA6B;AACxC,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,kEAAkE;AAC7E,UAAM,KAAK,EAAE;AAEb,eAAW,CAAC,OAAO,OAAO,KAAK,YAAY;AACzC,YAAM,KAAK,yBAAkB,KAAK,GAAG;AACrC,YAAM,KAAK,gBAAgB,QAAQ,MAAM,QAAQ;AACjD,YAAM,KAAK,EAAE;AAEb,UAAI,QAAQ,WAAW,GAAG;AACxB,cAAM,KAAK,oCAAoC;AAC/C,cAAM,KAAK,EAAE;AACb;AAAA,MACF;AAEA,eAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,cAAM,SAAS,QAAQ,CAAC;AACxB,YAAI,CAAC,OAAQ;AACb,cAAM,WAAW,IAAI;AACrB,cAAM,UAAU,OAAO,OAAO,qBAAS,OAAO,IAAI,KAAK;AACvD,cAAM,KAAK,GAAG,QAAQ,OAAO,OAAO,KAAK,KAAK,OAAO,EAAE;AACvD,cAAM,KAAK,MAAM,OAAO,GAAG,EAAE;AAC7B,cAAM,KAAK,QAAQ,OAAO,OAAO,EAAE;AACnC,cAAM,KAAK,EAAE;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAGA,QAAM,KAAK,KAAK;AAChB,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,+BAAwB;AACnC,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,kBAAkB,WAAW,IAAI,OAAK,IAAI,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC,EAAE;AACvE,QAAM,KAAK,6BAA6B,eAAe,EAAE;AACzD,QAAM,KAAK,+BAA+B,cAAc,MAAM,EAAE;AAChE,QAAM,KAAK,EAAE;AAEb,SAAO,MAAM,KAAK,IAAI;AACxB;",
6
+ "names": []
7
+ }
@@ -0,0 +1,28 @@
1
+ /**
2
+ * Version Module - Single Source of Truth
3
+ *
4
+ * This module reads package metadata from package.json at runtime and keeps a
5
+ * fallback copy for environments where package.json cannot be resolved.
6
+ *
7
+ * Usage:
8
+ * import { VERSION, PACKAGE_NAME } from './version.js';
9
+ */
10
+ /**
11
+ * Package version from package.json
12
+ * This is the single source of truth for versioning
13
+ */
14
+ export declare const VERSION: string;
15
+ /**
16
+ * Package name from package.json
17
+ */
18
+ export declare const PACKAGE_NAME: string;
19
+ /**
20
+ * Package description from package.json
21
+ */
22
+ export declare const PACKAGE_DESCRIPTION: string;
23
+ /**
24
+ * Formatted version string for user agents and logging
25
+ * Example: "mcp-researchpowerpack-http/3.2.0"
26
+ */
27
+ export declare const USER_AGENT_VERSION: string;
28
+ //# sourceMappingURL=version.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"version.d.ts","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AA6BH;;;GAGG;AACH,eAAO,MAAM,OAAO,EAAE,MAA4B,CAAC;AAEnD;;GAEG;AACH,eAAO,MAAM,YAAY,EAAE,MAAyB,CAAC;AAErD;;GAEG;AACH,eAAO,MAAM,mBAAmB,EAAE,MAAgC,CAAC;AAEnE;;;GAGG;AACH,eAAO,MAAM,kBAAkB,EAAE,MAAqC,CAAC"}
@@ -0,0 +1,32 @@
1
+ import { createRequire } from "module";
2
+ import { fileURLToPath } from "url";
3
+ import { dirname, join } from "path";
4
+ const DEFAULT_PACKAGE_INFO = {
5
+ version: "3.9.5",
6
+ name: "mcp-researchpowerpack-http",
7
+ description: "Research Powerpack MCP Server"
8
+ };
9
+ let packageJson = { ...DEFAULT_PACKAGE_INFO };
10
+ try {
11
+ if (typeof import.meta.url === "string" && import.meta.url.startsWith("file:")) {
12
+ const _require = createRequire(import.meta.url);
13
+ const _dirname = dirname(fileURLToPath(import.meta.url));
14
+ try {
15
+ packageJson = _require(join(_dirname, "..", "package.json"));
16
+ } catch {
17
+ packageJson = _require(join(_dirname, "..", "..", "package.json"));
18
+ }
19
+ }
20
+ } catch {
21
+ }
22
+ const VERSION = packageJson.version;
23
+ const PACKAGE_NAME = packageJson.name;
24
+ const PACKAGE_DESCRIPTION = packageJson.description;
25
+ const USER_AGENT_VERSION = `${PACKAGE_NAME}/${VERSION}`;
26
+ export {
27
+ PACKAGE_DESCRIPTION,
28
+ PACKAGE_NAME,
29
+ USER_AGENT_VERSION,
30
+ VERSION
31
+ };
32
+ //# sourceMappingURL=version.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/version.ts"],
4
+ "sourcesContent": ["/**\n * Version Module - Single Source of Truth\n * \n * This module reads package metadata from package.json at runtime and keeps a\n * fallback copy for environments where package.json cannot be resolved.\n * \n * Usage:\n * import { VERSION, PACKAGE_NAME } from './version.js';\n */\n\nimport { createRequire } from 'module';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\n\n// Defaults used if package.json cannot be loaded at runtime.\nconst DEFAULT_PACKAGE_INFO = {\n version: '3.9.5',\n name: 'mcp-researchpowerpack-http',\n description: 'Research Powerpack MCP Server',\n} as const;\n\nlet packageJson: { version: string; name: string; description: string } = { ...DEFAULT_PACKAGE_INFO };\n\ntry {\n if (typeof import.meta.url === 'string' && import.meta.url.startsWith('file:')) {\n const _require = createRequire(import.meta.url);\n const _dirname = dirname(fileURLToPath(import.meta.url));\n try {\n packageJson = _require(join(_dirname, '..', 'package.json'));\n } catch {\n packageJson = _require(join(_dirname, '..', '..', 'package.json'));\n }\n }\n} catch {\n // Keep hardcoded defaults when package.json is unavailable\n}\n\n/**\n * Package version from package.json\n * This is the single source of truth for versioning\n */\nexport const VERSION: string = packageJson.version;\n\n/**\n * Package name from package.json\n */\nexport const PACKAGE_NAME: string = packageJson.name;\n\n/**\n * Package description from package.json\n */\nexport const PACKAGE_DESCRIPTION: string = packageJson.description;\n\n/**\n * Formatted version string for user agents and logging\n * Example: \"mcp-researchpowerpack-http/3.2.0\"\n */\nexport const USER_AGENT_VERSION: string = `${PACKAGE_NAME}/${VERSION}`;\n\n// VERSION_INFO removed - unused, individual exports sufficient\n"],
5
+ "mappings": "AAUA,SAAS,qBAAqB;AAC9B,SAAS,qBAAqB;AAC9B,SAAS,SAAS,YAAY;AAG9B,MAAM,uBAAuB;AAAA,EAC3B,SAAS;AAAA,EACT,MAAM;AAAA,EACN,aAAa;AACf;AAEA,IAAI,cAAsE,EAAE,GAAG,qBAAqB;AAEpG,IAAI;AACF,MAAI,OAAO,YAAY,QAAQ,YAAY,YAAY,IAAI,WAAW,OAAO,GAAG;AAC9E,UAAM,WAAW,cAAc,YAAY,GAAG;AAC9C,UAAM,WAAW,QAAQ,cAAc,YAAY,GAAG,CAAC;AACvD,QAAI;AACF,oBAAc,SAAS,KAAK,UAAU,MAAM,cAAc,CAAC;AAAA,IAC7D,QAAQ;AACN,oBAAc,SAAS,KAAK,UAAU,MAAM,MAAM,cAAc,CAAC;AAAA,IACnE;AAAA,EACF;AACF,QAAQ;AAER;AAMO,MAAM,UAAkB,YAAY;AAKpC,MAAM,eAAuB,YAAY;AAKzC,MAAM,sBAA8B,YAAY;AAMhD,MAAM,qBAA6B,GAAG,YAAY,IAAI,OAAO;",
6
+ "names": []
7
+ }
package/package.json ADDED
@@ -0,0 +1,73 @@
1
+ {
2
+ "name": "mcp-researchpowerpack-http",
3
+ "version": "3.10.0",
4
+ "description": "The ultimate research MCP toolkit: Reddit mining, web search with CTR aggregation, AI-powered deep research, and intelligent web scraping - all in one modular package",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "bin": {
8
+ "mcp-researchpowerpack-http": "dist/index.js",
9
+ "mcp-research-powerpack-http": "dist/index.js"
10
+ },
11
+ "files": [
12
+ "dist",
13
+ "!dist/tests"
14
+ ],
15
+ "scripts": {
16
+ "build": "mcp-use build",
17
+ "dev": "mcp-use dev",
18
+ "start": "mcp-use start",
19
+ "deploy": "mcp-use deploy",
20
+ "inspect": "npx @mcp-use/inspector --url http://localhost:3000/mcp",
21
+ "typecheck": "tsc --noEmit",
22
+ "test": "pnpm test:http",
23
+ "test:http": "tsx tests/http-server.ts",
24
+ "prepublishOnly": "pnpm build && pnpm typecheck"
25
+ },
26
+ "keywords": [
27
+ "mcp",
28
+ "research",
29
+ "reddit",
30
+ "serper",
31
+ "search",
32
+ "ai",
33
+ "model-context-protocol",
34
+ "deep-research",
35
+ "web-scraping",
36
+ "claude",
37
+ "anthropic",
38
+ "openrouter",
39
+ "scraping",
40
+ "research-powerpack",
41
+ "http-mcp"
42
+ ],
43
+ "author": "Yiğit Konur <yigit35@gmail.com>",
44
+ "license": "MIT",
45
+ "repository": {
46
+ "type": "git",
47
+ "url": "git+https://github.com/yigitkonur/mcp-researchpowerpack-http.git"
48
+ },
49
+ "homepage": "https://github.com/yigitkonur/mcp-researchpowerpack-http#readme",
50
+ "bugs": {
51
+ "url": "https://github.com/yigitkonur/mcp-researchpowerpack-http/issues"
52
+ },
53
+ "dependencies": {
54
+ "mcp-use": "1.22.0",
55
+ "openai": "^4.77.0",
56
+ "redis": "^5.11.0",
57
+ "turndown": "^7.2.2",
58
+ "zod": "^4.3.6"
59
+ },
60
+ "devDependencies": {
61
+ "@mcp-use/cli": "2.20.0",
62
+ "@types/node": "^22.0.0",
63
+ "@types/turndown": "^5.0.6",
64
+ "tsx": "^4.19.0",
65
+ "typescript": "^5.6.0"
66
+ },
67
+ "engines": {
68
+ "node": "^20.19.0 || >=22.12.0"
69
+ },
70
+ "publishConfig": {
71
+ "access": "public"
72
+ }
73
+ }