research-powerpack-mcp 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/README.md +486 -0
  2. package/dist/clients/reddit.d.ts +61 -0
  3. package/dist/clients/reddit.d.ts.map +1 -0
  4. package/dist/clients/reddit.js +179 -0
  5. package/dist/clients/reddit.js.map +1 -0
  6. package/dist/clients/research.d.ts +41 -0
  7. package/dist/clients/research.d.ts.map +1 -0
  8. package/dist/clients/research.js +77 -0
  9. package/dist/clients/research.js.map +1 -0
  10. package/dist/clients/scraper.d.ts +44 -0
  11. package/dist/clients/scraper.d.ts.map +1 -0
  12. package/dist/clients/scraper.js +171 -0
  13. package/dist/clients/scraper.js.map +1 -0
  14. package/dist/clients/search.d.ts +46 -0
  15. package/dist/clients/search.d.ts.map +1 -0
  16. package/dist/clients/search.js +91 -0
  17. package/dist/clients/search.js.map +1 -0
  18. package/dist/config/index.d.ts +59 -0
  19. package/dist/config/index.d.ts.map +1 -0
  20. package/dist/config/index.js +100 -0
  21. package/dist/config/index.js.map +1 -0
  22. package/dist/index.d.ts +3 -0
  23. package/dist/index.d.ts.map +1 -0
  24. package/dist/index.js +152 -0
  25. package/dist/index.js.map +1 -0
  26. package/dist/schemas/deep-research.d.ts +100 -0
  27. package/dist/schemas/deep-research.d.ts.map +1 -0
  28. package/dist/schemas/deep-research.js +57 -0
  29. package/dist/schemas/deep-research.js.map +1 -0
  30. package/dist/schemas/scrape-links.d.ts +38 -0
  31. package/dist/schemas/scrape-links.d.ts.map +1 -0
  32. package/dist/schemas/scrape-links.js +26 -0
  33. package/dist/schemas/scrape-links.js.map +1 -0
  34. package/dist/schemas/web-search.d.ts +24 -0
  35. package/dist/schemas/web-search.d.ts.map +1 -0
  36. package/dist/schemas/web-search.js +12 -0
  37. package/dist/schemas/web-search.js.map +1 -0
  38. package/dist/services/file-attachment.d.ts +30 -0
  39. package/dist/services/file-attachment.d.ts.map +1 -0
  40. package/dist/services/file-attachment.js +196 -0
  41. package/dist/services/file-attachment.js.map +1 -0
  42. package/dist/services/llm-processor.d.ts +19 -0
  43. package/dist/services/llm-processor.d.ts.map +1 -0
  44. package/dist/services/llm-processor.js +44 -0
  45. package/dist/services/llm-processor.js.map +1 -0
  46. package/dist/services/markdown-cleaner.d.ts +8 -0
  47. package/dist/services/markdown-cleaner.d.ts.map +1 -0
  48. package/dist/services/markdown-cleaner.js +56 -0
  49. package/dist/services/markdown-cleaner.js.map +1 -0
  50. package/dist/tools/definitions.d.ts +66 -0
  51. package/dist/tools/definitions.d.ts.map +1 -0
  52. package/dist/tools/definitions.js +125 -0
  53. package/dist/tools/definitions.js.map +1 -0
  54. package/dist/tools/reddit.d.ts +10 -0
  55. package/dist/tools/reddit.d.ts.map +1 -0
  56. package/dist/tools/reddit.js +105 -0
  57. package/dist/tools/reddit.js.map +1 -0
  58. package/dist/tools/research.d.ts +14 -0
  59. package/dist/tools/research.d.ts.map +1 -0
  60. package/dist/tools/research.js +126 -0
  61. package/dist/tools/research.js.map +1 -0
  62. package/dist/tools/scrape.d.ts +14 -0
  63. package/dist/tools/scrape.d.ts.map +1 -0
  64. package/dist/tools/scrape.js +111 -0
  65. package/dist/tools/scrape.js.map +1 -0
  66. package/dist/tools/search.d.ts +14 -0
  67. package/dist/tools/search.d.ts.map +1 -0
  68. package/dist/tools/search.js +121 -0
  69. package/dist/tools/search.js.map +1 -0
  70. package/dist/utils/errors.d.ts +8 -0
  71. package/dist/utils/errors.d.ts.map +1 -0
  72. package/dist/utils/errors.js +30 -0
  73. package/dist/utils/errors.js.map +1 -0
  74. package/dist/utils/markdown-formatter.d.ts +5 -0
  75. package/dist/utils/markdown-formatter.d.ts.map +1 -0
  76. package/dist/utils/markdown-formatter.js +15 -0
  77. package/dist/utils/markdown-formatter.js.map +1 -0
  78. package/dist/utils/url-aggregator.d.ts +55 -0
  79. package/dist/utils/url-aggregator.d.ts.map +1 -0
  80. package/dist/utils/url-aggregator.js +246 -0
  81. package/dist/utils/url-aggregator.js.map +1 -0
  82. package/package.json +56 -0
@@ -0,0 +1,125 @@
1
+ /**
2
+ * MCP Tool Definitions
3
+ * Extracted from index.ts for cleaner separation
4
+ */
5
+ import { zodToJsonSchema } from 'zod-to-json-schema';
6
+ import { deepResearchParamsSchema } from '../schemas/deep-research.js';
7
+ import { scrapeLinksParamsSchema } from '../schemas/scrape-links.js';
8
+ import { webSearchParamsSchema } from '../schemas/web-search.js';
9
+ export const TOOLS = [
10
+ // === REDDIT TOOLS ===
11
+ {
12
+ name: 'search_reddit',
13
+ description: `Search Reddit via Google (10 results/query). MUST call get_reddit_post after. Supports: intitle:, "exact", OR, -exclude. Auto-adds site:reddit.com. Call this tool first to find relevant posts, then call get_reddit_post with the URLs you find. Try to add all distinct relevant searches, try to bring max 10 search but effectively utilize limits by distinct queries like given example. Only use date_after if recent content needed`,
14
+ inputSchema: {
15
+ type: 'object',
16
+ properties: {
17
+ queries: {
18
+ type: 'array',
19
+ items: { type: 'string' },
20
+ description: 'Distinct queries (max 10). Maximize count for multiple perspectives. eg: ["best IDE 2025", "best AI features on IDEs", "best IDE for Python", "top alternatives to vscode", "top alternatives to intitle:cursor -windsurf", "intitle:comparison of top IDEs","new IDEs like intitle:zed"]',
21
+ },
22
+ date_after: {
23
+ type: 'string',
24
+ description: 'Filter results after date (YYYY-MM-DD). Optional.',
25
+ },
26
+ },
27
+ required: ['queries'],
28
+ },
29
+ },
30
+ {
31
+ name: 'get_reddit_post',
32
+ description: `**Fetch Reddit posts with smart comment allocation (2-50 posts supported).**
33
+
34
+ **SMART COMMENT BUDGET:** 1,000 comments distributed across all posts automatically.
35
+ - 2 posts: ~500 comments/post (deep dive)
36
+ - 10 posts: 100 comments/post
37
+ - 50 posts: 20 comments/post (quick scan)
38
+
39
+ **PARAMETERS:**
40
+ - \`urls\`: 2-50 Reddit post URLs. More posts = broader community perspective.
41
+ - \`fetch_comments\`: Set to false for post-only queries (faster). Default: true.
42
+ - \`max_comments\`: Override auto-allocation if needed.
43
+
44
+ **USE:** After search_reddit. Maximize post count for research breadth. Comment allocation is automatic and optimized.`,
45
+ inputSchema: {
46
+ type: 'object',
47
+ properties: {
48
+ urls: {
49
+ type: 'array',
50
+ items: { type: 'string' },
51
+ description: 'Reddit URLs (2-50). More posts = broader community perspective.',
52
+ },
53
+ fetch_comments: {
54
+ type: 'boolean',
55
+ description: 'Fetch comments? Set false for quick post overview. Default: true',
56
+ default: true,
57
+ },
58
+ max_comments: {
59
+ type: 'number',
60
+ description: 'Override auto-allocation. Leave empty for smart allocation.',
61
+ default: 100,
62
+ },
63
+ },
64
+ required: ['urls'],
65
+ },
66
+ },
67
+ // === DEEP RESEARCH TOOL ===
68
+ {
69
+ name: 'deep_research',
70
+ description: `**Batch deep research (2-10 questions) with dynamic token allocation.**
71
+
72
+ **TOKEN BUDGET:** 32,000 tokens distributed across all questions:
73
+ - 2 questions: 16,000 tokens/question (deep dive)
74
+ - 5 questions: 6,400 tokens/question (balanced)
75
+ - 10 questions: 3,200 tokens/question (rapid multi-topic)
76
+
77
+ **WHEN TO USE:**
78
+ - Need multi-perspective analysis on related topics
79
+ - Researching a domain from multiple angles
80
+ - Validating understanding across different aspects
81
+ - Comparing approaches/technologies side-by-side
82
+
83
+ **EACH QUESTION SHOULD INCLUDE:**
84
+ - Topic & context (what decision it informs)
85
+ - Your current understanding (to fill gaps)
86
+ - Specific sub-questions (2-5 per topic)
87
+
88
+ **USE:** Maximize question count for comprehensive coverage. All questions run in parallel. Group related questions for coherent research.`,
89
+ inputSchema: zodToJsonSchema(deepResearchParamsSchema, { $refStrategy: 'none' }),
90
+ },
91
+ // === SCRAPE LINKS TOOL ===
92
+ {
93
+ name: 'scrape_links',
94
+ description: `**Universal URL content extraction (3-50 URLs) with dynamic token allocation.**
95
+
96
+ **TOKEN ALLOCATION:** 32,000 tokens distributed across all URLs automatically.
97
+ - 3 URLs: ~10,666 tokens/URL (deep extraction)
98
+ - 10 URLs: 3,200 tokens/URL (detailed)
99
+ - 50 URLs: 640 tokens/URL (high-level scan)
100
+
101
+ **AUTOMATIC FALLBACK:** Basic → JavaScript → JavaScript+US geo-targeting.
102
+
103
+ **AI EXTRACTION:** Set use_llm=true with what_to_extract for intelligent filtering. Extraction is concise + comprehensive (high info density).
104
+
105
+ **BATCHING:** Max 30 concurrent requests. 50 URLs = [30] then [20] batches.
106
+
107
+ **USE:** Provide 3-50 URLs. More URLs = broader coverage, fewer tokens per URL. Choose based on research scope. Maximize URL count for comprehensive research.`,
108
+ inputSchema: zodToJsonSchema(scrapeLinksParamsSchema, { $refStrategy: 'none' }),
109
+ },
110
+ // === WEB SEARCH TOOL ===
111
+ {
112
+ name: 'web_search',
113
+ description: `**Batch web search** using Google via SERPER API. Search up to 100 keywords in parallel, get top 10 results per keyword with snippets, links, and related searches.
114
+
115
+ **FEATURES:**
116
+ - Supports Google search operators (site:, -exclusion, "exact phrase", filetype:)
117
+ - Returns clickable markdown links with snippets
118
+ - Provides related search suggestions
119
+ - Identifies frequently appearing URLs across queries
120
+
121
+ **USE:** For research tasks requiring multiple perspectives. Use distinct keywords to maximize coverage. Follow up with scrape_links to extract full content from promising URLs.`,
122
+ inputSchema: zodToJsonSchema(webSearchParamsSchema, { $refStrategy: 'none' }),
123
+ },
124
+ ];
125
+ //# sourceMappingURL=definitions.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"definitions.js","sourceRoot":"","sources":["../../src/tools/definitions.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,wBAAwB,EAAE,MAAM,6BAA6B,CAAC;AACvE,OAAO,EAAE,uBAAuB,EAAE,MAAM,4BAA4B,CAAC;AACrE,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,MAAM,CAAC,MAAM,KAAK,GAAG;IACnB,uBAAuB;IACvB;QACE,IAAI,EAAE,eAAe;QACrB,WAAW,EAAE,8aAA8a;QAC3b,WAAW,EAAE;YACX,IAAI,EAAE,QAAiB;YACvB,UAAU,EAAE;gBACV,OAAO,EAAE;oBACP,IAAI,EAAE,OAAO;oBACb,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;oBACzB,WAAW,EAAE,2RAA2R;iBACzS;gBACD,UAAU,EAAE;oBACV,IAAI,EAAE,QAAQ;oBACd,WAAW,EAAE,mDAAmD;iBACjE;aACF;YACD,QAAQ,EAAE,CAAC,SAAS,CAAC;SACtB;KACF;IACD;QACE,IAAI,EAAE,iBAAiB;QACvB,WAAW,EAAE;;;;;;;;;;;;uHAYsG;QACnH,WAAW,EAAE;YACX,IAAI,EAAE,QAAiB;YACvB,UAAU,EAAE;gBACV,IAAI,EAAE;oBACJ,IAAI,EAAE,OAAO;oBACb,KAAK,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;oBACzB,WAAW,EAAE,iEAAiE;iBAC/E;gBACD,cAAc,EAAE;oBACd,IAAI,EAAE,SAAS;oBACf,WAAW,EAAE,kEAAkE;oBAC/E,OAAO,EAAE,IAAI;iBACd;gBACD,YAAY,EAAE;oBACZ,IAAI,EAAE,QAAQ;oBACd,WAAW,EAAE,6DAA6D;oBAC1E,OAAO,EAAE,GAAG;iBACb;aACF;YACD,QAAQ,EAAE,CAAC,MAAM,CAAC;SACnB;KACF;IAED,6BAA6B;IAC7B;QACE,IAAI,EAAE,eAAe;QACrB,WAAW,EAAE;;;;;;;;;;;;;;;;;;2IAkB0H;QACvI,WAAW,EAAE,eAAe,CAAC,wBAAwB,EAAE,EAAE,YAAY,EAAE,MAAM,EAAE,CAAC;KACjF;IAED,4BAA4B;IAC5B;QACE,IAAI,EAAE,cAAc;QACpB,WAAW,EAAE;;;;;;;;;;;;;+JAa8I;QAC3J,WAAW,EAAE,eAAe,CAAC,uBAAuB,EAAE,EAAE,YAAY,EAAE,MAAM,EAAE,CAAC;KAChF;IAED,0BAA0B;IAC1B;QACE,IAAI,EAAE,YAAY;QAClB,WAAW,EAAE;;;;;;;;kLAQiK;QAC9K,WAAW,EAAE,eAAe,CAAC,qBAAqB,EAAE,EAAE,YAAY,EAAE,MAAM,EAAE,CAAC;KAC9E;CACF,CAAC"}
@@ -0,0 +1,10 @@
1
+ /**
2
+ * Reddit Tools - Search and Fetch
3
+ */
4
+ export declare function handleSearchReddit(queries: string[], apiKey: string, dateAfter?: string): Promise<string>;
5
+ export interface GetRedditPostsOptions {
6
+ fetchComments?: boolean;
7
+ maxCommentsOverride?: number;
8
+ }
9
+ export declare function handleGetRedditPosts(urls: string[], clientId: string, clientSecret: string, maxComments?: number, options?: GetRedditPostsOptions): Promise<string>;
10
+ //# sourceMappingURL=reddit.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"reddit.d.ts","sourceRoot":"","sources":["../../src/tools/reddit.ts"],"names":[],"mappings":"AAAA;;GAEG;AA+CH,wBAAsB,kBAAkB,CACtC,OAAO,EAAE,MAAM,EAAE,EACjB,MAAM,EAAE,MAAM,EACd,SAAS,CAAC,EAAE,MAAM,GACjB,OAAO,CAAC,MAAM,CAAC,CAqBjB;AAMD,MAAM,WAAW,qBAAqB;IACpC,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;CAC9B;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,MAAM,EAAE,EACd,QAAQ,EAAE,MAAM,EAChB,YAAY,EAAE,MAAM,EACpB,WAAW,SAAM,EACjB,OAAO,GAAE,qBAA0B,GAClC,OAAO,CAAC,MAAM,CAAC,CAgDjB"}
@@ -0,0 +1,105 @@
1
+ /**
2
+ * Reddit Tools - Search and Fetch
3
+ */
4
+ import { SearchClient } from '../clients/search.js';
5
+ import { RedditClient, calculateCommentAllocation } from '../clients/reddit.js';
6
+ import { REDDIT } from '../config/index.js';
7
+ // ============================================================================
8
+ // Formatters
9
+ // ============================================================================
10
+ function formatComments(comments) {
11
+ let md = '';
12
+ for (const c of comments) {
13
+ const indent = ' '.repeat(c.depth);
14
+ const op = c.isOP ? ' **[OP]**' : '';
15
+ const score = c.score >= 0 ? `+${c.score}` : `${c.score}`;
16
+ md += `${indent}- **u/${c.author}**${op} _(${score})_\n`;
17
+ const bodyLines = c.body.split('\n').map(line => `${indent} ${line}`).join('\n');
18
+ md += `${bodyLines}\n\n`;
19
+ }
20
+ return md;
21
+ }
22
+ function formatPost(result, fetchComments) {
23
+ const { post, comments, allocatedComments } = result;
24
+ let md = `## ${post.title}\n\n`;
25
+ md += `**r/${post.subreddit}** • u/${post.author} • ā¬†ļø ${post.score} • šŸ’¬ ${post.commentCount} comments\n`;
26
+ md += `šŸ”— ${post.url}\n\n`;
27
+ if (post.body) {
28
+ md += `### Post Content\n\n${post.body}\n\n`;
29
+ }
30
+ if (fetchComments && comments.length > 0) {
31
+ md += `### Top Comments (${comments.length}/${post.commentCount} shown, allocated: ${allocatedComments})\n\n`;
32
+ md += formatComments(comments);
33
+ }
34
+ else if (!fetchComments) {
35
+ md += `_Comments not fetched (fetch_comments=false)_\n\n`;
36
+ }
37
+ return md;
38
+ }
39
+ // ============================================================================
40
+ // Search Reddit Handler
41
+ // ============================================================================
42
+ export async function handleSearchReddit(queries, apiKey, dateAfter) {
43
+ const limited = queries.slice(0, 10);
44
+ const client = new SearchClient(apiKey);
45
+ const results = await client.searchRedditMultiple(limited, dateAfter);
46
+ let md = '';
47
+ for (const [query, items] of results) {
48
+ md += `## šŸ” "${query}"${dateAfter ? ` (after ${dateAfter})` : ''}\n\n`;
49
+ if (items.length === 0) {
50
+ md += '_No results found_\n\n';
51
+ continue;
52
+ }
53
+ for (let i = 0; i < items.length; i++) {
54
+ const r = items[i];
55
+ const dateStr = r.date ? ` • šŸ“… ${r.date}` : '';
56
+ md += `**${i + 1}. ${r.title}**${dateStr}\n`;
57
+ md += `${r.url}\n`;
58
+ md += `> ${r.snippet}\n\n`;
59
+ }
60
+ }
61
+ return md.trim();
62
+ }
63
+ export async function handleGetRedditPosts(urls, clientId, clientSecret, maxComments = 100, options = {}) {
64
+ const { fetchComments = true, maxCommentsOverride } = options;
65
+ if (urls.length < REDDIT.MIN_POSTS) {
66
+ return `# āŒ Error\n\nMinimum ${REDDIT.MIN_POSTS} Reddit posts required. Received: ${urls.length}`;
67
+ }
68
+ if (urls.length > REDDIT.MAX_POSTS) {
69
+ return `# āŒ Error\n\nMaximum ${REDDIT.MAX_POSTS} Reddit posts allowed. Received: ${urls.length}. Please remove ${urls.length - REDDIT.MAX_POSTS} URL(s) and retry.`;
70
+ }
71
+ const allocation = calculateCommentAllocation(urls.length);
72
+ const commentsPerPost = fetchComments ? (maxCommentsOverride || allocation.perPostCapped) : 0;
73
+ const totalBatches = Math.ceil(urls.length / REDDIT.BATCH_SIZE);
74
+ const client = new RedditClient(clientId, clientSecret);
75
+ const batchResult = await client.batchGetPosts(urls, commentsPerPost, fetchComments);
76
+ const results = batchResult.results;
77
+ let md = `# Reddit Posts (${urls.length} posts)\n\n`;
78
+ if (fetchComments) {
79
+ md += `**Comment Allocation:** ${commentsPerPost} comments/post (${urls.length} posts, ${REDDIT.MAX_COMMENT_BUDGET} total budget)\n`;
80
+ }
81
+ else {
82
+ md += `**Comments:** Not fetched (fetch_comments=false)\n`;
83
+ }
84
+ md += `**Status:** šŸ“¦ ${totalBatches} batch(es) processed\n\n`;
85
+ md += `---\n\n`;
86
+ let successful = 0;
87
+ let failed = 0;
88
+ for (const [url, result] of results) {
89
+ if (result instanceof Error) {
90
+ failed++;
91
+ md += `## āŒ Failed: ${url}\n\n_${result.message}_\n\n---\n\n`;
92
+ }
93
+ else {
94
+ successful++;
95
+ md += formatPost(result, fetchComments);
96
+ md += '\n---\n\n';
97
+ }
98
+ }
99
+ md += `\n**Summary:** āœ… ${successful} successful | āŒ ${failed} failed`;
100
+ if (batchResult.rateLimitHits > 0) {
101
+ md += ` | āš ļø ${batchResult.rateLimitHits} rate limit retries`;
102
+ }
103
+ return md.trim();
104
+ }
105
+ //# sourceMappingURL=reddit.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"reddit.js","sourceRoot":"","sources":["../../src/tools/reddit.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AACpD,OAAO,EAAE,YAAY,EAAE,0BAA0B,EAAiC,MAAM,sBAAsB,CAAC;AAC/G,OAAO,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAE5C,+EAA+E;AAC/E,aAAa;AACb,+EAA+E;AAE/E,SAAS,cAAc,CAAC,QAAmB;IACzC,IAAI,EAAE,GAAG,EAAE,CAAC;IACZ,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE,CAAC;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;QACpC,MAAM,EAAE,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;QAC1D,EAAE,IAAI,GAAG,MAAM,SAAS,CAAC,CAAC,MAAM,KAAK,EAAE,MAAM,KAAK,MAAM,CAAC;QACzD,MAAM,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,GAAG,MAAM,KAAK,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAClF,EAAE,IAAI,GAAG,SAAS,MAAM,CAAC;IAC3B,CAAC;IACD,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,SAAS,UAAU,CAAC,MAAkB,EAAE,aAAsB;IAC5D,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,iBAAiB,EAAE,GAAG,MAAM,CAAC;IACrD,IAAI,EAAE,GAAG,MAAM,IAAI,CAAC,KAAK,MAAM,CAAC;IAChC,EAAE,IAAI,OAAO,IAAI,CAAC,SAAS,UAAU,IAAI,CAAC,MAAM,SAAS,IAAI,CAAC,KAAK,SAAS,IAAI,CAAC,YAAY,aAAa,CAAC;IAC3G,EAAE,IAAI,MAAM,IAAI,CAAC,GAAG,MAAM,CAAC;IAE3B,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;QACd,EAAE,IAAI,uBAAuB,IAAI,CAAC,IAAI,MAAM,CAAC;IAC/C,CAAC;IAED,IAAI,aAAa,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACzC,EAAE,IAAI,qBAAqB,QAAQ,CAAC,MAAM,IAAI,IAAI,CAAC,YAAY,sBAAsB,iBAAiB,OAAO,CAAC;QAC9G,EAAE,IAAI,cAAc,CAAC,QAAQ,CAAC,CAAC;IACjC,CAAC;SAAM,IAAI,CAAC,aAAa,EAAE,CAAC;QAC1B,EAAE,IAAI,mDAAmD,CAAC;IAC5D,CAAC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,+EAA+E;AAC/E,wBAAwB;AACxB,+EAA+E;AAE/E,MAAM,CAAC,KAAK,UAAU,kBAAkB,CACtC,OAAiB,EACjB,MAAc,EACd,SAAkB;IAElB,MAAM,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IACrC,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,MAAM,CAAC,CAAC;IACxC,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;IAEtE,IAAI,EAAE,GAAG,EAAE,CAAC;IACZ,KAAK,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,IAAI,OAAO,EAAE,CAAC;QACrC,EAAE,IAAI,UAAU,KAAK,IAAI,SAAS,CAAC,CAAC,CAAC,WAAW,SAAS,GAAG,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC;QACxE,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACvB,EAAE,IAAI,wBAAwB,CAAC;YAC/B,SAAS;QACX,CAAC;QACD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACtC,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,MAAM,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;YAChD,EAAE,IAAI,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,KAAK,OAAO,IAAI,CAAC;YAC7C,EAAE,IAAI,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC;YACnB,EAAE,IAAI,KAAK,CAAC,CAAC,OAAO,MAAM,CAAC;QAC7B,CAAC;IACH,CAAC;IACD,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,CAAC;AAWD,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACxC,IAAc,EACd,QAAgB,EAChB,YAAoB,EACpB,WAAW,GAAG,GAAG,EACjB,UAAiC,EAAE;IAEnC,MAAM,EAAE,aAAa,GAAG,IAAI,EAAE,mBAAmB,EAAE,GAAG,OAAO,CAAC;IAE9D,IAAI,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,SAAS,EAAE,CAAC;QACnC,OAAO,wBAAwB,MAAM,CAAC,SAAS,qCAAqC,IAAI,CAAC,MAAM,EAAE,CAAC;IACpG,CAAC;IACD,IAAI,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,SAAS,EAAE,CAAC;QACnC,OAAO,wBAAwB,MAAM,CAAC,SAAS,oCAAoC,IAAI,CAAC,MAAM,mBAAmB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,SAAS,oBAAoB,CAAC;IACtK,CAAC;IAED,MAAM,UAAU,GAAG,0BAA0B,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC3D,MAAM,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,CAAC,mBAAmB,IAAI,UAAU,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9F,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC;IAEhE,MAAM,MAAM,GAAG,IAAI,YAAY,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;IACxD,MAAM,WAAW,GAAG,MAAM,MAAM,CAAC,aAAa,CAAC,IAAI,EAAE,eAAe,EAAE,aAAa,CAAC,CAAC;IACrF,MAAM,OAAO,GAAG,WAAW,CAAC,OAAO,CAAC;IAEpC,IAAI,EAAE,GAAG,mBAAmB,IAAI,CAAC,MAAM,aAAa,CAAC;IAErD,IAAI,aAAa,EAAE,CAAC;QAClB,EAAE,IAAI,2BAA2B,eAAe,mBAAmB,IAAI,CAAC,MAAM,WAAW,MAAM,CAAC,kBAAkB,kBAAkB,CAAC;IACvI,CAAC;SAAM,CAAC;QACN,EAAE,IAAI,oDAAoD,CAAC;IAC7D,CAAC;IACD,EAAE,IAAI,kBAAkB,YAAY,0BAA0B,CAAC;IAC/D,EAAE,IAAI,SAAS,CAAC;IAEhB,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,IAAI,MAAM,GAAG,CAAC,CAAC;IAEf,KAAK,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;QACpC,IAAI,MAAM,YAAY,KAAK,EAAE,CAAC;YAC5B,MAAM,EAAE,CAAC;YACT,EAAE,IAAI,gBAAgB,GAAG,QAAQ,MAAM,CAAC,OAAO,cAAc,CAAC;QAChE,CAAC;aAAM,CAAC;YACN,UAAU,EAAE,CAAC;YACb,EAAE,IAAI,UAAU,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC;YACxC,EAAE,IAAI,WAAW,CAAC;QACpB,CAAC;IACH,CAAC;IAED,EAAE,IAAI,oBAAoB,UAAU,mBAAmB,MAAM,SAAS,CAAC;IACvE,IAAI,WAAW,CAAC,aAAa,GAAG,CAAC,EAAE,CAAC;QAClC,EAAE,IAAI,SAAS,WAAW,CAAC,aAAa,qBAAqB,CAAC;IAChE,CAAC;IAED,OAAO,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,CAAC"}
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Deep Research Tool Handler - Batch processing with dynamic token allocation
3
+ */
4
+ import type { DeepResearchParams } from '../schemas/deep-research.js';
5
+ interface ResearchOptions {
6
+ sessionId?: string;
7
+ logger?: (level: 'info' | 'error' | 'debug', message: string, sessionId: string) => Promise<void>;
8
+ }
9
+ export declare function handleDeepResearch(params: DeepResearchParams, options?: ResearchOptions): Promise<{
10
+ content: string;
11
+ structuredContent: object;
12
+ }>;
13
+ export {};
14
+ //# sourceMappingURL=research.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"research.d.ts","sourceRoot":"","sources":["../../src/tools/research.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAWtE,UAAU,eAAe;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;CACnG;AA+BD,wBAAsB,kBAAkB,CACtC,MAAM,EAAE,kBAAkB,EAC1B,OAAO,GAAE,eAAoB,GAC5B,OAAO,CAAC;IAAE,OAAO,EAAE,MAAM,CAAC;IAAC,iBAAiB,EAAE,MAAM,CAAA;CAAE,CAAC,CA2GzD"}
@@ -0,0 +1,126 @@
1
+ /**
2
+ * Deep Research Tool Handler - Batch processing with dynamic token allocation
3
+ */
4
+ import { ResearchClient } from '../clients/research.js';
5
+ import { FileAttachmentService } from '../services/file-attachment.js';
6
+ import { RESEARCH } from '../config/index.js';
7
+ import { createSimpleError } from '../utils/errors.js';
8
+ // Constants
9
+ const TOTAL_TOKEN_BUDGET = 32000;
10
+ const MIN_QUESTIONS = 2;
11
+ const MAX_QUESTIONS = 10;
12
+ function calculateTokenAllocation(questionCount) {
13
+ return Math.floor(TOTAL_TOKEN_BUDGET / questionCount);
14
+ }
15
+ const SYSTEM_PROMPT = `You are an expert research consultant. Provide evidence-based, multi-perspective analysis.
16
+
17
+ METHODOLOGY:
18
+ - SOURCE DIVERSITY: Official docs, papers, blogs, case studies
19
+ - CURRENT + HISTORICAL: Latest developments AND context
20
+ - MULTIPLE PERSPECTIVES: Different approaches with pros/cons
21
+ - EVIDENCE-BASED: Claims backed by citations
22
+
23
+ FORMAT (high info density):
24
+ - CURRENT STATE: Status quo, what we know
25
+ - KEY INSIGHTS: Most important findings with evidence
26
+ - TRADE-OFFS: Competing priorities honestly analyzed
27
+ - PRACTICAL IMPLICATIONS: Real-world application
28
+ - WHAT'S CHANGING: Recent developments
29
+
30
+ Be dense with insights, light on filler. Use examples and citations.`;
31
+ export async function handleDeepResearch(params, options = {}) {
32
+ const { sessionId, logger } = options;
33
+ const questions = params.questions;
34
+ // Validation
35
+ if (questions.length < MIN_QUESTIONS) {
36
+ return {
37
+ content: `# āŒ Error\n\nMinimum ${MIN_QUESTIONS} research questions required. Received: ${questions.length}`,
38
+ structuredContent: { error: true, message: `Minimum ${MIN_QUESTIONS} questions required` },
39
+ };
40
+ }
41
+ if (questions.length > MAX_QUESTIONS) {
42
+ return {
43
+ content: `# āŒ Error\n\nMaximum ${MAX_QUESTIONS} research questions allowed. Received: ${questions.length}`,
44
+ structuredContent: { error: true, message: `Maximum ${MAX_QUESTIONS} questions allowed` },
45
+ };
46
+ }
47
+ const tokensPerQuestion = calculateTokenAllocation(questions.length);
48
+ if (sessionId && logger) {
49
+ await logger('info', `Starting batch research: ${questions.length} questions, ${tokensPerQuestion.toLocaleString()} tokens/question`, sessionId);
50
+ }
51
+ const client = new ResearchClient();
52
+ const fileService = new FileAttachmentService();
53
+ const results = [];
54
+ // Process all questions in parallel
55
+ const researchPromises = questions.map(async (q, index) => {
56
+ try {
57
+ // Enhance question with file attachments if present
58
+ let enhancedQuestion = q.question;
59
+ if (q.file_attachments && q.file_attachments.length > 0) {
60
+ const attachmentsMarkdown = await fileService.formatAttachments(q.file_attachments);
61
+ enhancedQuestion = q.question + attachmentsMarkdown;
62
+ }
63
+ const response = await client.research({
64
+ question: enhancedQuestion,
65
+ systemPrompt: SYSTEM_PROMPT,
66
+ reasoningEffort: RESEARCH.REASONING_EFFORT,
67
+ maxSearchResults: Math.min(RESEARCH.MAX_URLS, 20),
68
+ maxTokens: tokensPerQuestion,
69
+ });
70
+ return {
71
+ question: q.question,
72
+ content: response.content,
73
+ success: true,
74
+ tokensUsed: response.usage?.totalTokens,
75
+ };
76
+ }
77
+ catch (error) {
78
+ const simpleError = createSimpleError(error);
79
+ return {
80
+ question: q.question,
81
+ content: '',
82
+ success: false,
83
+ error: simpleError.message,
84
+ };
85
+ }
86
+ });
87
+ const allResults = await Promise.all(researchPromises);
88
+ results.push(...allResults);
89
+ // Build markdown output
90
+ const successful = results.filter(r => r.success);
91
+ const failed = results.filter(r => !r.success);
92
+ const totalTokens = successful.reduce((sum, r) => sum + (r.tokensUsed || 0), 0);
93
+ let markdown = `# Deep Research Results (${questions.length} questions)\n\n`;
94
+ markdown += `**Token Allocation:** ${tokensPerQuestion.toLocaleString()} tokens/question (${questions.length} questions, ${TOTAL_TOKEN_BUDGET.toLocaleString()} total budget)\n`;
95
+ markdown += `**Status:** āœ… ${successful.length} successful | āŒ ${failed.length} failed | šŸ“Š ${totalTokens.toLocaleString()} tokens used\n\n`;
96
+ markdown += `---\n\n`;
97
+ for (let i = 0; i < results.length; i++) {
98
+ const r = results[i];
99
+ markdown += `## Question ${i + 1}: ${r.question.substring(0, 100)}${r.question.length > 100 ? '...' : ''}\n\n`;
100
+ if (r.success) {
101
+ markdown += r.content + '\n\n';
102
+ if (r.tokensUsed) {
103
+ markdown += `_Tokens used: ${r.tokensUsed.toLocaleString()}_\n\n`;
104
+ }
105
+ }
106
+ else {
107
+ markdown += `**āŒ Error:** ${r.error}\n\n`;
108
+ }
109
+ markdown += `---\n\n`;
110
+ }
111
+ if (sessionId && logger) {
112
+ await logger('info', `Research completed: ${successful.length}/${questions.length} successful, ${totalTokens.toLocaleString()} tokens`, sessionId);
113
+ }
114
+ return {
115
+ content: markdown.trim(),
116
+ structuredContent: {
117
+ totalQuestions: questions.length,
118
+ successful: successful.length,
119
+ failed: failed.length,
120
+ tokensPerQuestion,
121
+ totalTokensUsed: totalTokens,
122
+ results,
123
+ },
124
+ };
125
+ }
126
+ //# sourceMappingURL=research.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"research.js","sourceRoot":"","sources":["../../src/tools/research.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAAE,cAAc,EAAyB,MAAM,wBAAwB,CAAC;AAC/E,OAAO,EAAE,qBAAqB,EAAE,MAAM,gCAAgC,CAAC;AACvE,OAAO,EAAE,QAAQ,EAAE,MAAM,oBAAoB,CAAC;AAC9C,OAAO,EAAE,iBAAiB,EAAE,MAAM,oBAAoB,CAAC;AAEvD,YAAY;AACZ,MAAM,kBAAkB,GAAG,KAAK,CAAC;AACjC,MAAM,aAAa,GAAG,CAAC,CAAC;AACxB,MAAM,aAAa,GAAG,EAAE,CAAC;AAezB,SAAS,wBAAwB,CAAC,aAAqB;IACrD,OAAO,IAAI,CAAC,KAAK,CAAC,kBAAkB,GAAG,aAAa,CAAC,CAAC;AACxD,CAAC;AAED,MAAM,aAAa,GAAG;;;;;;;;;;;;;;;qEAe+C,CAAC;AAEtE,MAAM,CAAC,KAAK,UAAU,kBAAkB,CACtC,MAA0B,EAC1B,UAA2B,EAAE;IAE7B,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC;IACtC,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,CAAC;IAEnC,aAAa;IACb,IAAI,SAAS,CAAC,MAAM,GAAG,aAAa,EAAE,CAAC;QACrC,OAAO;YACL,OAAO,EAAE,wBAAwB,aAAa,2CAA2C,SAAS,CAAC,MAAM,EAAE;YAC3G,iBAAiB,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,aAAa,qBAAqB,EAAE;SAC3F,CAAC;IACJ,CAAC;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,aAAa,EAAE,CAAC;QACrC,OAAO;YACL,OAAO,EAAE,wBAAwB,aAAa,0CAA0C,SAAS,CAAC,MAAM,EAAE;YAC1G,iBAAiB,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,aAAa,oBAAoB,EAAE;SAC1F,CAAC;IACJ,CAAC;IAED,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAErE,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;QACxB,MAAM,MAAM,CAAC,MAAM,EAAE,4BAA4B,SAAS,CAAC,MAAM,eAAe,iBAAiB,CAAC,cAAc,EAAE,kBAAkB,EAAE,SAAS,CAAC,CAAC;IACnJ,CAAC;IAED,MAAM,MAAM,GAAG,IAAI,cAAc,EAAE,CAAC;IACpC,MAAM,WAAW,GAAG,IAAI,qBAAqB,EAAE,CAAC;IAChD,MAAM,OAAO,GAAqB,EAAE,CAAC;IAErC,oCAAoC;IACpC,MAAM,gBAAgB,GAAG,SAAS,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,EAAE,KAAK,EAA2B,EAAE;QACjF,IAAI,CAAC;YACH,oDAAoD;YACpD,IAAI,gBAAgB,GAAG,CAAC,CAAC,QAAQ,CAAC;YAClC,IAAI,CAAC,CAAC,gBAAgB,IAAI,CAAC,CAAC,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACxD,MAAM,mBAAmB,GAAG,MAAM,WAAW,CAAC,iBAAiB,CAAC,CAAC,CAAC,gBAAgB,CAAC,CAAC;gBACpF,gBAAgB,GAAG,CAAC,CAAC,QAAQ,GAAG,mBAAmB,CAAC;YACtD,CAAC;YAED,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,QAAQ,CAAC;gBACrC,QAAQ,EAAE,gBAAgB;gBAC1B,YAAY,EAAE,aAAa;gBAC3B,eAAe,EAAE,QAAQ,CAAC,gBAAgB;gBAC1C,gBAAgB,EAAE,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,QAAQ,EAAE,EAAE,CAAC;gBACjD,SAAS,EAAE,iBAAiB;aAC7B,CAAC,CAAC;YAEH,OAAO;gBACL,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBACpB,OAAO,EAAE,QAAQ,CAAC,OAAO;gBACzB,OAAO,EAAE,IAAI;gBACb,UAAU,EAAE,QAAQ,CAAC,KAAK,EAAE,WAAW;aACxC,CAAC;QACJ,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,WAAW,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;YAC7C,OAAO;gBACL,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBACpB,OAAO,EAAE,EAAE;gBACX,OAAO,EAAE,KAAK;gBACd,KAAK,EAAE,WAAW,CAAC,OAAO;aAC3B,CAAC;QACJ,CAAC;IACH,CAAC,CAAC,CAAC;IAEH,MAAM,UAAU,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC;IACvD,OAAO,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,CAAC;IAE5B,wBAAwB;IACxB,MAAM,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;IAC/C,MAAM,WAAW,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,UAAU,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAEhF,IAAI,QAAQ,GAAG,4BAA4B,SAAS,CAAC,MAAM,iBAAiB,CAAC;IAC7E,QAAQ,IAAI,yBAAyB,iBAAiB,CAAC,cAAc,EAAE,qBAAqB,SAAS,CAAC,MAAM,eAAe,kBAAkB,CAAC,cAAc,EAAE,kBAAkB,CAAC;IACjL,QAAQ,IAAI,iBAAiB,UAAU,CAAC,MAAM,mBAAmB,MAAM,CAAC,MAAM,gBAAgB,WAAW,CAAC,cAAc,EAAE,kBAAkB,CAAC;IAC7I,QAAQ,IAAI,SAAS,CAAC;IAEtB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QACxC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACrB,QAAQ,IAAI,eAAe,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC;QAE/G,IAAI,CAAC,CAAC,OAAO,EAAE,CAAC;YACd,QAAQ,IAAI,CAAC,CAAC,OAAO,GAAG,MAAM,CAAC;YAC/B,IAAI,CAAC,CAAC,UAAU,EAAE,CAAC;gBACjB,QAAQ,IAAI,iBAAiB,CAAC,CAAC,UAAU,CAAC,cAAc,EAAE,OAAO,CAAC;YACpE,CAAC;QACH,CAAC;aAAM,CAAC;YACN,QAAQ,IAAI,gBAAgB,CAAC,CAAC,KAAK,MAAM,CAAC;QAC5C,CAAC;QAED,QAAQ,IAAI,SAAS,CAAC;IACxB,CAAC;IAED,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;QACxB,MAAM,MAAM,CAAC,MAAM,EAAE,uBAAuB,UAAU,CAAC,MAAM,IAAI,SAAS,CAAC,MAAM,gBAAgB,WAAW,CAAC,cAAc,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACrJ,CAAC;IAED,OAAO;QACL,OAAO,EAAE,QAAQ,CAAC,IAAI,EAAE;QACxB,iBAAiB,EAAE;YACjB,cAAc,EAAE,SAAS,CAAC,MAAM;YAChC,UAAU,EAAE,UAAU,CAAC,MAAM;YAC7B,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,iBAAiB;YACjB,eAAe,EAAE,WAAW;YAC5B,OAAO;SACR;KACF,CAAC;AACJ,CAAC"}
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Scrape Links Tool Handler
3
+ */
4
+ import type { ScrapeLinksParams, ScrapeLinksOutput } from '../schemas/scrape-links.js';
5
+ interface ToolOptions {
6
+ sessionId?: string;
7
+ logger?: (level: 'info' | 'error' | 'debug', message: string, sessionId: string) => Promise<void>;
8
+ }
9
+ export declare function handleScrapeLinks(params: ScrapeLinksParams, options?: ToolOptions): Promise<{
10
+ content: string;
11
+ structuredContent: ScrapeLinksOutput;
12
+ }>;
13
+ export {};
14
+ //# sourceMappingURL=scrape.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrape.d.ts","sourceRoot":"","sources":["../../src/tools/scrape.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,4BAA4B,CAAC;AAOvF,UAAU,WAAW;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;CACnG;AAWD,wBAAsB,iBAAiB,CACrC,MAAM,EAAE,iBAAiB,EACzB,OAAO,GAAE,WAAgB,GACxB,OAAO,CAAC;IAAE,OAAO,EAAE,MAAM,CAAC;IAAC,iBAAiB,EAAE,iBAAiB,CAAA;CAAE,CAAC,CAuHpE"}
@@ -0,0 +1,111 @@
1
+ /**
2
+ * Scrape Links Tool Handler
3
+ */
4
+ import { ScraperClient } from '../clients/scraper.js';
5
+ import { MarkdownCleaner } from '../services/markdown-cleaner.js';
6
+ import { createLLMProcessor, processContentWithLLM } from '../services/llm-processor.js';
7
+ import { removeMetaTags } from '../utils/markdown-formatter.js';
8
+ import { SCRAPER } from '../config/index.js';
9
+ function calculateTokenAllocation(urlCount) {
10
+ return Math.floor(SCRAPER.MAX_TOKENS_BUDGET / urlCount);
11
+ }
12
+ function enhanceExtractionInstruction(instruction) {
13
+ const base = instruction || 'Extract the main content and key information from this page.';
14
+ return `${base}\n\n${SCRAPER.EXTRACTION_SUFFIX}`;
15
+ }
16
+ export async function handleScrapeLinks(params, options = {}) {
17
+ const { sessionId, logger } = options;
18
+ const startTime = Date.now();
19
+ try {
20
+ const tokensPerUrl = calculateTokenAllocation(params.urls.length);
21
+ const totalBatches = Math.ceil(params.urls.length / SCRAPER.BATCH_SIZE);
22
+ if (sessionId && logger) {
23
+ await logger('info', `Starting scrape: ${params.urls.length} URL(s), ${tokensPerUrl} tokens/URL, ${totalBatches} batch(es)`, sessionId);
24
+ }
25
+ const client = new ScraperClient();
26
+ const markdownCleaner = new MarkdownCleaner();
27
+ const llmProcessor = createLLMProcessor();
28
+ const enhancedInstruction = params.use_llm
29
+ ? enhanceExtractionInstruction(params.what_to_extract)
30
+ : undefined;
31
+ const results = await client.scrapeMultiple(params.urls, { timeout: params.timeout });
32
+ if (sessionId && logger) {
33
+ await logger('info', `Scraping complete. Processing ${results.length} results...`, sessionId);
34
+ }
35
+ let successful = 0;
36
+ let failed = 0;
37
+ let totalCredits = 0;
38
+ const contents = [];
39
+ for (let i = 0; i < results.length; i++) {
40
+ const result = results[i];
41
+ if (!result)
42
+ continue;
43
+ if (sessionId && logger) {
44
+ await logger('info', `[${i + 1}/${results.length}] Processing ${result.url}`, sessionId);
45
+ }
46
+ if (result.statusCode >= 200 && result.statusCode < 300) {
47
+ successful++;
48
+ totalCredits += result.credits;
49
+ let content = markdownCleaner.processContent(result.content);
50
+ if (params.use_llm && llmProcessor) {
51
+ if (sessionId && logger) {
52
+ await logger('info', `[${i + 1}/${results.length}] Applying LLM extraction (${tokensPerUrl} tokens)...`, sessionId);
53
+ }
54
+ const llmResult = await processContentWithLLM(content, { use_llm: params.use_llm, what_to_extract: enhancedInstruction, max_tokens: tokensPerUrl }, llmProcessor);
55
+ content = llmResult.content;
56
+ if (sessionId && logger) {
57
+ await logger('info', `[${i + 1}/${results.length}] LLM processing ${llmResult.processed ? 'complete' : 'skipped'}`, sessionId);
58
+ }
59
+ }
60
+ content = removeMetaTags(content);
61
+ contents.push(`## ${result.url}\n\n${content}`);
62
+ }
63
+ else {
64
+ failed++;
65
+ contents.push(`## ${result.url}\n\nāŒ Failed to scrape: ${result.content}`);
66
+ if (sessionId && logger) {
67
+ await logger('error', `[${i + 1}/${results.length}] Failed: ${result.statusCode}`, sessionId);
68
+ }
69
+ }
70
+ }
71
+ const executionTime = Date.now() - startTime;
72
+ if (sessionId && logger) {
73
+ await logger('info', `Completed: ${successful} successful, ${failed} failed, ${totalCredits} credits used`, sessionId);
74
+ }
75
+ const allocationHeader = `**Token Allocation:** ${tokensPerUrl.toLocaleString()} tokens/URL (${params.urls.length} URLs, ${SCRAPER.MAX_TOKENS_BUDGET.toLocaleString()} total budget)`;
76
+ const statusHeader = `**Status:** āœ… ${successful} successful | āŒ ${failed} failed | šŸ“¦ ${totalBatches} batch(es)`;
77
+ const formattedContent = `# Scraped Content (${params.urls.length} URLs)\n\n${allocationHeader}\n${statusHeader}\n\n---\n\n${contents.join('\n\n---\n\n')}`;
78
+ const metadata = {
79
+ total_urls: params.urls.length,
80
+ successful,
81
+ failed,
82
+ total_credits: totalCredits,
83
+ execution_time_ms: executionTime,
84
+ tokens_per_url: tokensPerUrl,
85
+ total_token_budget: SCRAPER.MAX_TOKENS_BUDGET,
86
+ batches_processed: totalBatches,
87
+ };
88
+ return { content: formattedContent, structuredContent: { content: formattedContent, metadata } };
89
+ }
90
+ catch (error) {
91
+ const errorMessage = error instanceof Error ? error.message : String(error);
92
+ if (sessionId && logger) {
93
+ await logger('error', errorMessage, sessionId);
94
+ }
95
+ const executionTime = Date.now() - startTime;
96
+ return {
97
+ content: `# āŒ Scraping Failed\n\n${errorMessage}`,
98
+ structuredContent: {
99
+ content: `# āŒ Scraping Failed\n\n${errorMessage}`,
100
+ metadata: {
101
+ total_urls: params.urls.length,
102
+ successful: 0,
103
+ failed: params.urls.length,
104
+ total_credits: 0,
105
+ execution_time_ms: executionTime,
106
+ },
107
+ },
108
+ };
109
+ }
110
+ }
111
+ //# sourceMappingURL=scrape.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrape.js","sourceRoot":"","sources":["../../src/tools/scrape.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AACtD,OAAO,EAAE,eAAe,EAAE,MAAM,iCAAiC,CAAC;AAClE,OAAO,EAAE,kBAAkB,EAAE,qBAAqB,EAAE,MAAM,8BAA8B,CAAC;AACzF,OAAO,EAAE,cAAc,EAAE,MAAM,gCAAgC,CAAC;AAChE,OAAO,EAAE,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAO7C,SAAS,wBAAwB,CAAC,QAAgB;IAChD,OAAO,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,iBAAiB,GAAG,QAAQ,CAAC,CAAC;AAC1D,CAAC;AAED,SAAS,4BAA4B,CAAC,WAA+B;IACnE,MAAM,IAAI,GAAG,WAAW,IAAI,8DAA8D,CAAC;IAC3F,OAAO,GAAG,IAAI,OAAO,OAAO,CAAC,iBAAiB,EAAE,CAAC;AACnD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,iBAAiB,CACrC,MAAyB,EACzB,UAAuB,EAAE;IAEzB,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC;IACtC,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAE7B,IAAI,CAAC;QACH,MAAM,YAAY,GAAG,wBAAwB,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAClE,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAAC;QAExE,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;YACxB,MAAM,MAAM,CAAC,MAAM,EAAE,oBAAoB,MAAM,CAAC,IAAI,CAAC,MAAM,YAAY,YAAY,gBAAgB,YAAY,YAAY,EAAE,SAAS,CAAC,CAAC;QAC1I,CAAC;QAED,MAAM,MAAM,GAAG,IAAI,aAAa,EAAE,CAAC;QACnC,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAC9C,MAAM,YAAY,GAAG,kBAAkB,EAAE,CAAC;QAE1C,MAAM,mBAAmB,GAAG,MAAM,CAAC,OAAO;YACxC,CAAC,CAAC,4BAA4B,CAAC,MAAM,CAAC,eAAe,CAAC;YACtD,CAAC,CAAC,SAAS,CAAC;QAEd,MAAM,OAAO,GAAG,MAAM,MAAM,CAAC,cAAc,CAAC,MAAM,CAAC,IAAI,EAAE,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;QAEtF,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;YACxB,MAAM,MAAM,CAAC,MAAM,EAAE,iCAAiC,OAAO,CAAC,MAAM,aAAa,EAAE,SAAS,CAAC,CAAC;QAChG,CAAC;QAED,IAAI,UAAU,GAAG,CAAC,CAAC;QACnB,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,YAAY,GAAG,CAAC,CAAC;QACrB,MAAM,QAAQ,GAAa,EAAE,CAAC;QAE9B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACxC,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,IAAI,CAAC,MAAM;gBAAE,SAAS;YAEtB,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;gBACxB,MAAM,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,gBAAgB,MAAM,CAAC,GAAG,EAAE,EAAE,SAAS,CAAC,CAAC;YAC3F,CAAC;YAED,IAAI,MAAM,CAAC,UAAU,IAAI,GAAG,IAAI,MAAM,CAAC,UAAU,GAAG,GAAG,EAAE,CAAC;gBACxD,UAAU,EAAE,CAAC;gBACb,YAAY,IAAI,MAAM,CAAC,OAAO,CAAC;gBAE/B,IAAI,OAAO,GAAG,eAAe,CAAC,cAAc,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;gBAE7D,IAAI,MAAM,CAAC,OAAO,IAAI,YAAY,EAAE,CAAC;oBACnC,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;wBACxB,MAAM,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,8BAA8B,YAAY,aAAa,EAAE,SAAS,CAAC,CAAC;oBACtH,CAAC;oBAED,MAAM,SAAS,GAAG,MAAM,qBAAqB,CAC3C,OAAO,EACP,EAAE,OAAO,EAAE,MAAM,CAAC,OAAO,EAAE,eAAe,EAAE,mBAAmB,EAAE,UAAU,EAAE,YAAY,EAAE,EAC3F,YAAY,CACb,CAAC;oBACF,OAAO,GAAG,SAAS,CAAC,OAAO,CAAC;oBAE5B,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;wBACxB,MAAM,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,oBAAoB,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,EAAE,EAAE,SAAS,CAAC,CAAC;oBACjI,CAAC;gBACH,CAAC;gBAED,OAAO,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC;gBAClC,QAAQ,CAAC,IAAI,CAAC,MAAM,MAAM,CAAC,GAAG,OAAO,OAAO,EAAE,CAAC,CAAC;YAClD,CAAC;iBAAM,CAAC;gBACN,MAAM,EAAE,CAAC;gBACT,QAAQ,CAAC,IAAI,CAAC,MAAM,MAAM,CAAC,GAAG,2BAA2B,MAAM,CAAC,OAAO,EAAE,CAAC,CAAC;gBAE3E,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;oBACxB,MAAM,MAAM,CAAC,OAAO,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,aAAa,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,CAAC,CAAC;gBAChG,CAAC;YACH,CAAC;QACH,CAAC;QAED,MAAM,aAAa,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAE7C,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;YACxB,MAAM,MAAM,CAAC,MAAM,EAAE,cAAc,UAAU,gBAAgB,MAAM,YAAY,YAAY,eAAe,EAAE,SAAS,CAAC,CAAC;QACzH,CAAC;QAED,MAAM,gBAAgB,GAAG,yBAAyB,YAAY,CAAC,cAAc,EAAE,gBAAgB,MAAM,CAAC,IAAI,CAAC,MAAM,UAAU,OAAO,CAAC,iBAAiB,CAAC,cAAc,EAAE,gBAAgB,CAAC;QACtL,MAAM,YAAY,GAAG,iBAAiB,UAAU,mBAAmB,MAAM,gBAAgB,YAAY,YAAY,CAAC;QAClH,MAAM,gBAAgB,GAAG,sBAAsB,MAAM,CAAC,IAAI,CAAC,MAAM,aAAa,gBAAgB,KAAK,YAAY,cAAc,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE,CAAC;QAE5J,MAAM,QAAQ,GAAG;YACf,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM;YAC9B,UAAU;YACV,MAAM;YACN,aAAa,EAAE,YAAY;YAC3B,iBAAiB,EAAE,aAAa;YAChC,cAAc,EAAE,YAAY;YAC5B,kBAAkB,EAAE,OAAO,CAAC,iBAAiB;YAC7C,iBAAiB,EAAE,YAAY;SAChC,CAAC;QAEF,OAAO,EAAE,OAAO,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,EAAE,OAAO,EAAE,gBAAgB,EAAE,QAAQ,EAAE,EAAE,CAAC;IACnG,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAE5E,IAAI,SAAS,IAAI,MAAM,EAAE,CAAC;YACxB,MAAM,MAAM,CAAC,OAAO,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC;QACjD,CAAC;QAED,MAAM,aAAa,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,SAAS,CAAC;QAE7C,OAAO;YACL,OAAO,EAAE,0BAA0B,YAAY,EAAE;YACjD,iBAAiB,EAAE;gBACjB,OAAO,EAAE,0BAA0B,YAAY,EAAE;gBACjD,QAAQ,EAAE;oBACR,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM;oBAC9B,UAAU,EAAE,CAAC;oBACb,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM;oBAC1B,aAAa,EAAE,CAAC;oBAChB,iBAAiB,EAAE,aAAa;iBACjC;aACF;SACF,CAAC;IACJ,CAAC;AACH,CAAC"}
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Web Search Tool Handler
3
+ */
4
+ import type { WebSearchParams, WebSearchOutput } from '../schemas/web-search.js';
5
+ interface ToolOptions {
6
+ sessionId?: string;
7
+ logger?: (level: 'info' | 'error' | 'debug', message: string, sessionId: string) => Promise<void>;
8
+ }
9
+ export declare function handleWebSearch(params: WebSearchParams, options?: ToolOptions): Promise<{
10
+ content: string;
11
+ structuredContent: WebSearchOutput;
12
+ }>;
13
+ export {};
14
+ //# sourceMappingURL=search.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"search.d.ts","sourceRoot":"","sources":["../../src/tools/search.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAWjF,UAAU,WAAW;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;CACnG;AASD,wBAAsB,eAAe,CACnC,MAAM,EAAE,eAAe,EACvB,OAAO,GAAE,WAAgB,GACxB,OAAO,CAAC;IAAE,OAAO,EAAE,MAAM,CAAC;IAAC,iBAAiB,EAAE,eAAe,CAAA;CAAE,CAAC,CA+IlE"}