research-powerpack-mcp 3.3.0 → 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/clients/reddit.d.ts +1 -2
- package/dist/clients/reddit.d.ts.map +1 -1
- package/dist/clients/reddit.js +29 -10
- package/dist/clients/reddit.js.map +1 -1
- package/dist/clients/research.js +1 -1
- package/dist/clients/research.js.map +1 -1
- package/dist/config/index.d.ts.map +1 -1
- package/dist/config/index.js +35 -3
- package/dist/config/index.js.map +1 -1
- package/dist/index.js +57 -22
- package/dist/index.js.map +1 -1
- package/dist/schemas/deep-research.d.ts +1 -0
- package/dist/schemas/deep-research.d.ts.map +1 -1
- package/dist/schemas/deep-research.js +204 -40
- package/dist/schemas/deep-research.js.map +1 -1
- package/dist/schemas/scrape-links.d.ts +2 -2
- package/dist/schemas/scrape-links.d.ts.map +1 -1
- package/dist/schemas/scrape-links.js +16 -8
- package/dist/schemas/scrape-links.js.map +1 -1
- package/dist/schemas/web-search.d.ts +3 -2
- package/dist/schemas/web-search.d.ts.map +1 -1
- package/dist/schemas/web-search.js +14 -5
- package/dist/schemas/web-search.js.map +1 -1
- package/dist/services/markdown-cleaner.d.ts +4 -0
- package/dist/services/markdown-cleaner.d.ts.map +1 -1
- package/dist/services/markdown-cleaner.js +26 -13
- package/dist/services/markdown-cleaner.js.map +1 -1
- package/dist/tools/reddit.d.ts +1 -0
- package/dist/tools/reddit.d.ts.map +1 -1
- package/dist/tools/reddit.js +68 -48
- package/dist/tools/reddit.js.map +1 -1
- package/dist/tools/scrape.js.map +1 -1
- package/dist/tools/search.d.ts +1 -0
- package/dist/tools/search.d.ts.map +1 -1
- package/dist/tools/search.js +11 -3
- package/dist/tools/search.js.map +1 -1
- package/dist/utils/errors.d.ts +43 -4
- package/dist/utils/errors.d.ts.map +1 -1
- package/dist/utils/errors.js +76 -5
- package/dist/utils/errors.js.map +1 -1
- package/dist/utils/url-aggregator.d.ts.map +1 -1
- package/dist/utils/url-aggregator.js +0 -2
- package/dist/utils/url-aggregator.js.map +1 -1
- package/dist/version.d.ts +22 -5
- package/dist/version.d.ts.map +1 -1
- package/dist/version.js +24 -7
- package/dist/version.js.map +1 -1
- package/package.json +1 -1
|
@@ -1,59 +1,223 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Deep research schema - batch research with dynamic token allocation
|
|
3
|
+
* Enhanced with comprehensive prompting for bugs, programming questions, and general research
|
|
3
4
|
*/
|
|
4
5
|
import { z } from 'zod';
|
|
6
|
+
// File attachment schema with comprehensive descriptions to guide LLM usage
|
|
7
|
+
const fileAttachmentSchema = z.object({
|
|
8
|
+
path: z
|
|
9
|
+
.string({ required_error: 'deep_research: File path is required' })
|
|
10
|
+
.min(1, { message: 'deep_research: File path cannot be empty' })
|
|
11
|
+
.describe(`**[REQUIRED] Absolute file path to attach.**
|
|
12
|
+
|
|
13
|
+
⚠️ **YOU MUST USE ABSOLUTE PATHS** - e.g., "/Users/john/project/src/utils/auth.ts" NOT "src/utils/auth.ts"
|
|
14
|
+
|
|
15
|
+
The file will be read from the filesystem and included as context for the research question. This is CRITICAL for:
|
|
16
|
+
- Bug investigations (attach the failing code)
|
|
17
|
+
- Code reviews (attach the code to review)
|
|
18
|
+
- Refactoring questions (attach current implementation)
|
|
19
|
+
- Architecture decisions (attach relevant modules)
|
|
20
|
+
- Performance issues (attach the slow code path)
|
|
21
|
+
|
|
22
|
+
**IMPORTANT:** Always use the full absolute path as shown in your IDE or terminal.`),
|
|
23
|
+
start_line: z
|
|
24
|
+
.number({ invalid_type_error: 'deep_research: start_line must be a number' })
|
|
25
|
+
.int({ message: 'deep_research: start_line must be an integer' })
|
|
26
|
+
.positive({ message: 'deep_research: start_line must be a positive integer (1-indexed)' })
|
|
27
|
+
.optional()
|
|
28
|
+
.describe(`**[OPTIONAL] Start line number (1-indexed).**
|
|
29
|
+
|
|
30
|
+
Use this to focus on a specific section of a large file. If omitted, reads from line 1.
|
|
31
|
+
Example: start_line=50 with end_line=100 reads lines 50-100 only.`),
|
|
32
|
+
end_line: z
|
|
33
|
+
.number({ invalid_type_error: 'deep_research: end_line must be a number' })
|
|
34
|
+
.int({ message: 'deep_research: end_line must be an integer' })
|
|
35
|
+
.positive({ message: 'deep_research: end_line must be a positive integer (1-indexed)' })
|
|
36
|
+
.optional()
|
|
37
|
+
.describe(`**[OPTIONAL] End line number (1-indexed).**
|
|
38
|
+
|
|
39
|
+
Use this to limit the scope to relevant code sections. If omitted, reads to end of file.
|
|
40
|
+
For large files (>500 lines), consider specifying a range to focus the research.`),
|
|
41
|
+
description: z
|
|
42
|
+
.string()
|
|
43
|
+
.optional()
|
|
44
|
+
.describe(`**[HIGHLY RECOMMENDED] Comprehensive description of why this file is attached and what to focus on.**
|
|
45
|
+
|
|
46
|
+
⚠️ **THIS IS CRITICAL FOR EFFECTIVE RESEARCH.** Write a detailed description explaining:
|
|
47
|
+
|
|
48
|
+
1. **What this file is:** "This is the main authentication middleware that handles JWT validation"
|
|
49
|
+
2. **Why it's relevant:** "The bug occurs when tokens expire during long-running requests"
|
|
50
|
+
3. **What to focus on:** "Pay attention to the refreshToken() function on lines 45-80"
|
|
51
|
+
4. **Known issues/context:** "We suspect the race condition happens in the async validation"
|
|
52
|
+
5. **Related files:** "This interacts with /src/services/token-service.ts for token refresh"
|
|
53
|
+
|
|
54
|
+
**GOOD EXAMPLE:**
|
|
55
|
+
"This is our Redis caching layer (cache-service.ts). The bug manifests as stale data being returned after cache invalidation. Focus on the invalidatePattern() method (lines 120-150) and how it interacts with the pub/sub mechanism. We're using Redis Cluster and suspect the issue is related to cross-node invalidation timing."
|
|
56
|
+
|
|
57
|
+
**BAD EXAMPLE:**
|
|
58
|
+
"cache file" ← Too vague, research will be unfocused`),
|
|
59
|
+
});
|
|
60
|
+
// Research question schema with structured template guidance
|
|
5
61
|
const researchQuestionSchema = z.object({
|
|
6
62
|
question: z
|
|
7
|
-
.string()
|
|
8
|
-
.min(10, '
|
|
9
|
-
.describe(
|
|
63
|
+
.string({ required_error: 'deep_research: Question is required' })
|
|
64
|
+
.min(10, { message: 'deep_research: Question must be at least 10 characters' })
|
|
65
|
+
.describe(`**[REQUIRED] Your research question - MUST follow this structured template:**
|
|
66
|
+
|
|
67
|
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
68
|
+
📋 **STRUCTURED QUESTION TEMPLATE** (You MUST use this format)
|
|
69
|
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
70
|
+
|
|
71
|
+
**1. 🎯 WHAT I NEED:**
|
|
72
|
+
[Clearly state what you're trying to achieve, solve, or understand]
|
|
73
|
+
|
|
74
|
+
**2. 🤔 WHY I'M RESEARCHING THIS:**
|
|
75
|
+
[Explain the context - what decision does this inform? What problem are you solving?]
|
|
76
|
+
|
|
77
|
+
**3. 📚 WHAT I ALREADY KNOW:**
|
|
78
|
+
[Share your current understanding so research fills gaps, not repeats basics]
|
|
79
|
+
|
|
80
|
+
**4. 🔧 HOW I PLAN TO USE THIS:**
|
|
81
|
+
[Describe the practical application - implementation, debugging, architecture, etc.]
|
|
82
|
+
|
|
83
|
+
**5. ❓ SPECIFIC QUESTIONS (2-5):**
|
|
84
|
+
- Question 1: [Specific, pointed question]
|
|
85
|
+
- Question 2: [Another specific question]
|
|
86
|
+
- Question 3: [etc.]
|
|
87
|
+
|
|
88
|
+
**6. 🌐 PRIORITY SOURCES (optional):**
|
|
89
|
+
[Sites/docs to prioritize: "Prefer official React docs, GitHub issues, Stack Overflow"]
|
|
90
|
+
|
|
91
|
+
**7. ⚡ PRIORITY INFO (optional):**
|
|
92
|
+
[What matters most: "Focus on performance implications" or "Prioritize security best practices"]
|
|
93
|
+
|
|
94
|
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
95
|
+
|
|
96
|
+
**EXAMPLE FOR BUG INVESTIGATION:**
|
|
97
|
+
"🎯 WHAT I NEED: Debug why our WebSocket connections drop after exactly 60 seconds of inactivity.
|
|
98
|
+
|
|
99
|
+
🤔 WHY: Production users are losing real-time updates, causing data sync issues and support tickets.
|
|
100
|
+
|
|
101
|
+
📚 WHAT I KNOW: We use Socket.io v4.6 with Redis adapter. The 60s timeout suggests a proxy/load balancer issue, but we've checked nginx configs.
|
|
102
|
+
|
|
103
|
+
🔧 HOW I'LL USE THIS: Implement the fix in our connection-manager.ts (attached) and update our deployment configs.
|
|
104
|
+
|
|
105
|
+
❓ SPECIFIC QUESTIONS:
|
|
106
|
+
1. What are common causes of exactly 60-second WebSocket timeouts?
|
|
107
|
+
2. How should Socket.io heartbeat/ping intervals be configured to prevent this?
|
|
108
|
+
3. Are there AWS ALB-specific settings we need to consider?
|
|
109
|
+
4. How do other production apps handle WebSocket keep-alive?
|
|
110
|
+
|
|
111
|
+
🌐 PRIORITY: Socket.io official docs, AWS documentation, GitHub issues with similar problems
|
|
112
|
+
|
|
113
|
+
⚡ FOCUS: Production-ready solutions, not development workarounds"
|
|
114
|
+
|
|
115
|
+
**EXAMPLE FOR ARCHITECTURE RESEARCH:**
|
|
116
|
+
"🎯 WHAT I NEED: Best practices for implementing CQRS pattern with Event Sourcing in Node.js/TypeScript.
|
|
117
|
+
|
|
118
|
+
🤔 WHY: Our monolithic API is hitting scaling limits. We need to separate read/write paths for our order processing system.
|
|
119
|
+
|
|
120
|
+
📚 WHAT I KNOW: Familiar with basic event-driven architecture, used RabbitMQ before. New to full CQRS/ES implementation.
|
|
121
|
+
|
|
122
|
+
🔧 HOW I'LL USE THIS: Design the new order-service architecture, select appropriate libraries, plan migration strategy.
|
|
123
|
+
|
|
124
|
+
❓ SPECIFIC QUESTIONS:
|
|
125
|
+
1. What are the recommended Node.js libraries for CQRS/ES? (Pros/cons of each)
|
|
126
|
+
2. How should we handle eventual consistency in read models?
|
|
127
|
+
3. What's the best event store for our scale (~10k events/day)?
|
|
128
|
+
4. How do we handle schema evolution for events over time?
|
|
129
|
+
5. What are common pitfalls teams encounter when adopting CQRS/ES?
|
|
130
|
+
|
|
131
|
+
🌐 PRIORITY: Microsoft docs (they coined CQRS), Martin Fowler, real-world case studies
|
|
132
|
+
|
|
133
|
+
⚡ FOCUS: Production patterns, not theoretical explanations. Include code examples."`),
|
|
10
134
|
file_attachments: z
|
|
11
|
-
.array(
|
|
12
|
-
path: z.string().describe('File path (absolute or relative)'),
|
|
13
|
-
start_line: z.number().int().positive().optional().describe('Start line (1-indexed)'),
|
|
14
|
-
end_line: z.number().int().positive().optional().describe('End line (1-indexed)'),
|
|
15
|
-
description: z.string().optional().describe('What to focus on'),
|
|
16
|
-
}))
|
|
135
|
+
.array(fileAttachmentSchema)
|
|
17
136
|
.optional()
|
|
18
|
-
.describe(
|
|
19
|
-
});
|
|
20
|
-
export const deepResearchParamsShape = {
|
|
21
|
-
questions: z
|
|
22
|
-
.array(researchQuestionSchema)
|
|
23
|
-
.min(1, 'At least one research question required, but you should keep it around 6-7 at each round')
|
|
24
|
-
.max(10, 'Maximum 10 research questions per batch')
|
|
25
|
-
.describe(`**BATCH RESEARCH (1-10 questions) with dynamic token allocation.**
|
|
137
|
+
.describe(`**[CRITICAL FOR BUGS/CODE QUESTIONS] File attachments to include as research context.**
|
|
26
138
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
- 5-7 questions: ~4-6K tokens/question (balanced breadth, ideal for domain exploration)
|
|
31
|
-
- 8-10 questions: ~3-4K tokens/question (quick multi-topic scan)
|
|
139
|
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
140
|
+
⚠️ **YOU MUST ATTACH FILES WHEN:**
|
|
141
|
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
32
142
|
|
|
33
|
-
**
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
**
|
|
40
|
-
-
|
|
41
|
-
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
-
|
|
45
|
-
|
|
46
|
-
|
|
143
|
+
✅ **MANDATORY file attachment scenarios:**
|
|
144
|
+
- 🐛 **Bug investigation** → Attach the buggy code file(s)
|
|
145
|
+
- 🔍 **Code review** → Attach the code to be reviewed
|
|
146
|
+
- ♻️ **Refactoring** → Attach current implementation
|
|
147
|
+
- 🏗️ **Architecture questions about YOUR code** → Attach relevant modules
|
|
148
|
+
- ⚡ **Performance issues** → Attach the slow code paths
|
|
149
|
+
- 🔒 **Security review** → Attach the security-sensitive code
|
|
150
|
+
- 🧪 **Testing questions** → Attach both the code AND test files
|
|
151
|
+
- 🔗 **Integration issues** → Attach files from both sides of the integration
|
|
152
|
+
|
|
153
|
+
❌ **File attachments NOT needed for:**
|
|
154
|
+
- General concept questions ("What is CQRS?")
|
|
155
|
+
- Technology comparisons ("React vs Vue")
|
|
156
|
+
- Best practices research (unless about your specific code)
|
|
157
|
+
- Documentation lookups
|
|
158
|
+
|
|
159
|
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
160
|
+
📎 **HOW TO ATTACH FILES:**
|
|
161
|
+
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
|
162
|
+
|
|
163
|
+
**Each attachment requires:**
|
|
164
|
+
1. \`path\` (REQUIRED): **Absolute path** like "/Users/dev/project/src/auth.ts"
|
|
165
|
+
2. \`start_line\` (optional): Focus on specific section
|
|
166
|
+
3. \`end_line\` (optional): Limit scope for large files
|
|
167
|
+
4. \`description\` (HIGHLY RECOMMENDED): Explain what this file is and why it matters
|
|
168
|
+
|
|
169
|
+
**EXAMPLE - Bug with multiple related files:**
|
|
47
170
|
\`\`\`json
|
|
48
171
|
{
|
|
49
|
-
"
|
|
50
|
-
|
|
51
|
-
{
|
|
172
|
+
"question": "🎯 WHAT I NEED: Fix the race condition in our order processing...",
|
|
173
|
+
"file_attachments": [
|
|
174
|
+
{
|
|
175
|
+
"path": "/Users/dev/ecommerce/src/services/order-processor.ts",
|
|
176
|
+
"description": "Main order processing service. The race condition occurs in processOrder() when two requests hit simultaneously. Lines 45-120 contain the critical section."
|
|
177
|
+
},
|
|
178
|
+
{
|
|
179
|
+
"path": "/Users/dev/ecommerce/src/repositories/inventory-repo.ts",
|
|
180
|
+
"start_line": 30,
|
|
181
|
+
"end_line": 80,
|
|
182
|
+
"description": "Inventory repository - the decrementStock() method (lines 30-80) is called by order-processor and we suspect it's not properly locked."
|
|
183
|
+
},
|
|
184
|
+
{
|
|
185
|
+
"path": "/Users/dev/ecommerce/src/utils/db-transaction.ts",
|
|
186
|
+
"description": "Our transaction wrapper utility. Need to verify if it properly handles concurrent transactions."
|
|
187
|
+
}
|
|
52
188
|
]
|
|
53
189
|
}
|
|
54
190
|
\`\`\`
|
|
55
191
|
|
|
56
|
-
|
|
192
|
+
**Attach as many files as needed for complete context - there is no limit!**`),
|
|
193
|
+
});
|
|
194
|
+
export const deepResearchParamsShape = {
|
|
195
|
+
questions: z
|
|
196
|
+
.array(researchQuestionSchema, {
|
|
197
|
+
required_error: 'deep_research: Questions array is required',
|
|
198
|
+
invalid_type_error: 'deep_research: Questions must be an array'
|
|
199
|
+
})
|
|
200
|
+
.min(1, { message: 'deep_research: At least 1 question is required (recommend 2-7 for optimal depth)' })
|
|
201
|
+
.max(10, { message: 'deep_research: Maximum 10 questions allowed per batch' })
|
|
202
|
+
.describe(`**Batch deep research (2-10 questions) with dynamic token allocation.**
|
|
203
|
+
|
|
204
|
+
**TOKEN BUDGET:** 32,000 tokens distributed across all questions:
|
|
205
|
+
- 2 questions: 16,000 tokens/question (deep dive)
|
|
206
|
+
- 5 questions: 6,400 tokens/question (balanced)
|
|
207
|
+
- 10 questions: 3,200 tokens/question (rapid multi-topic)
|
|
208
|
+
|
|
209
|
+
**WHEN TO USE:**
|
|
210
|
+
- Need multi-perspective analysis on related topics
|
|
211
|
+
- Researching a domain from multiple angles
|
|
212
|
+
- Validating understanding across different aspects
|
|
213
|
+
- Comparing approaches/technologies side-by-side
|
|
214
|
+
|
|
215
|
+
**EACH QUESTION SHOULD INCLUDE:**
|
|
216
|
+
- Topic & context (what decision it informs)
|
|
217
|
+
- Your current understanding (to fill gaps)
|
|
218
|
+
- Specific sub-questions (2-5 per topic)
|
|
219
|
+
|
|
220
|
+
**USE:** Maximize question count for comprehensive coverage. All questions run in parallel. Group related questions for coherent research.`),
|
|
57
221
|
};
|
|
58
222
|
export const deepResearchParamsSchema = z.object(deepResearchParamsShape);
|
|
59
223
|
//# sourceMappingURL=deep-research.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"deep-research.js","sourceRoot":"","sources":["../../src/schemas/deep-research.ts"],"names":[],"mappings":"AAAA
|
|
1
|
+
{"version":3,"file":"deep-research.js","sourceRoot":"","sources":["../../src/schemas/deep-research.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,4EAA4E;AAC5E,MAAM,oBAAoB,GAAG,CAAC,CAAC,MAAM,CAAC;IACpC,IAAI,EAAE,CAAC;SACJ,MAAM,CAAC,EAAE,cAAc,EAAE,sCAAsC,EAAE,CAAC;SAClE,GAAG,CAAC,CAAC,EAAE,EAAE,OAAO,EAAE,0CAA0C,EAAE,CAAC;SAC/D,QAAQ,CACP;;;;;;;;;;;mFAW6E,CAC9E;IACH,UAAU,EAAE,CAAC;SACV,MAAM,CAAC,EAAE,kBAAkB,EAAE,4CAA4C,EAAE,CAAC;SAC5E,GAAG,CAAC,EAAE,OAAO,EAAE,8CAA8C,EAAE,CAAC;SAChE,QAAQ,CAAC,EAAE,OAAO,EAAE,kEAAkE,EAAE,CAAC;SACzF,QAAQ,EAAE;SACV,QAAQ,CACP;;;kEAG4D,CAC7D;IACH,QAAQ,EAAE,CAAC;SACR,MAAM,CAAC,EAAE,kBAAkB,EAAE,0CAA0C,EAAE,CAAC;SAC1E,GAAG,CAAC,EAAE,OAAO,EAAE,4CAA4C,EAAE,CAAC;SAC9D,QAAQ,CAAC,EAAE,OAAO,EAAE,gEAAgE,EAAE,CAAC;SACvF,QAAQ,EAAE;SACV,QAAQ,CACP;;;iFAG2E,CAC5E;IACH,WAAW,EAAE,CAAC;SACX,MAAM,EAAE;SACR,QAAQ,EAAE;SACV,QAAQ,CACP;;;;;;;;;;;;;;qDAc+C,CAChD;CACJ,CAAC,CAAC;AAEH,6DAA6D;AAC7D,MAAM,sBAAsB,GAAG,CAAC,CAAC,MAAM,CAAC;IACtC,QAAQ,EAAE,CAAC;SACR,MAAM,CAAC,EAAE,cAAc,EAAE,qCAAqC,EAAE,CAAC;SACjE,GAAG,CAAC,EAAE,EAAE,EAAE,OAAO,EAAE,wDAAwD,EAAE,CAAC;SAC9E,QAAQ,CACP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;oFAoE8E,CAC/E;IACH,gBAAgB,EAAE,CAAC;SAChB,KAAK,CAAC,oBAAoB,CAAC;SAC3B,QAAQ,EAAE;SACV,QAAQ,CACP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;6EAuDuE,CACxE;CACJ,CAAC,CAAC;AAEH,MAAM,CAAC,MAAM,uBAAuB,GAAG;IACrC,SAAS,EAAE,CAAC;SACT,KAAK,CAAC,sBAAsB,EAAE;QAC7B,cAAc,EAAE,4CAA4C;QAC5D,kBAAkB,EAAE,2CAA2C;KAChE,CAAC;SACD,GAAG,CAAC,CAAC,EAAE,EAAE,OAAO,EAAE,kFAAkF,EAAE,CAAC;SACvG,GAAG,CAAC,EAAE,EAAE,EAAE,OAAO,EAAE,uDAAuD,EAAE,CAAC;SAC7E,QAAQ,CACP;;;;;;;;;;;;;;;;;;2IAkBqI,CACtI;CACJ,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAG,CAAC,CAAC,MAAM,CAAC,uBAAuB,CAAC,CAAC"}
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
2
|
export declare const scrapeLinksParamsShape: {
|
|
3
|
-
urls: z.ZodArray<z.ZodString, "many">;
|
|
3
|
+
urls: z.ZodArray<z.ZodEffects<z.ZodString, string, string>, "many">;
|
|
4
4
|
timeout: z.ZodDefault<z.ZodNumber>;
|
|
5
5
|
use_llm: z.ZodDefault<z.ZodBoolean>;
|
|
6
6
|
what_to_extract: z.ZodOptional<z.ZodString>;
|
|
7
7
|
};
|
|
8
8
|
export declare const scrapeLinksParamsSchema: z.ZodObject<{
|
|
9
|
-
urls: z.ZodArray<z.ZodString, "many">;
|
|
9
|
+
urls: z.ZodArray<z.ZodEffects<z.ZodString, string, string>, "many">;
|
|
10
10
|
timeout: z.ZodDefault<z.ZodNumber>;
|
|
11
11
|
use_llm: z.ZodDefault<z.ZodBoolean>;
|
|
12
12
|
what_to_extract: z.ZodOptional<z.ZodString>;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"scrape-links.d.ts","sourceRoot":"","sources":["../../src/schemas/scrape-links.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;
|
|
1
|
+
{"version":3,"file":"scrape-links.d.ts","sourceRoot":"","sources":["../../src/schemas/scrape-links.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAYxB,eAAO,MAAM,sBAAsB;;;;;CAwBlC,CAAC;AAEF,eAAO,MAAM,uBAAuB;;;;;;;;;;;;;;;EAAmC,CAAC;AACxE,MAAM,MAAM,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,uBAAuB,CAAC,CAAC;AAGxE,MAAM,WAAW,iBAAiB;IAChC,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE;QACR,UAAU,EAAE,MAAM,CAAC;QACnB,UAAU,EAAE,MAAM,CAAC;QACnB,MAAM,EAAE,MAAM,CAAC;QACf,aAAa,EAAE,MAAM,CAAC;QACtB,iBAAiB,EAAE,MAAM,CAAC;QAC1B,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,kBAAkB,CAAC,EAAE,MAAM,CAAC;QAC5B,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,CAAC;CACH"}
|
|
@@ -1,24 +1,32 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
+
// URL schema with protocol validation
|
|
3
|
+
const urlSchema = z
|
|
4
|
+
.string({ required_error: 'scrape_links: URL is required' })
|
|
5
|
+
.url({ message: 'scrape_links: Invalid URL format' })
|
|
6
|
+
.refine(url => url.startsWith('http://') || url.startsWith('https://'), { message: 'scrape_links: URL must use http:// or https:// protocol' });
|
|
2
7
|
// Input schema for scrape_links tool
|
|
3
8
|
export const scrapeLinksParamsShape = {
|
|
4
9
|
urls: z
|
|
5
|
-
.array(
|
|
6
|
-
|
|
7
|
-
|
|
10
|
+
.array(urlSchema, {
|
|
11
|
+
required_error: 'scrape_links: URLs array is required',
|
|
12
|
+
invalid_type_error: 'scrape_links: URLs must be an array'
|
|
13
|
+
})
|
|
14
|
+
.min(1, { message: 'scrape_links: At least 1 URL is required' })
|
|
15
|
+
.max(50, { message: 'scrape_links: Maximum 50 URLs allowed per request' })
|
|
8
16
|
.describe('URLs to scrape (1-50). Recommend 3-5 URLs for balanced depth/breadth. More URLs = broader coverage but fewer tokens per URL. 3 URLs: ~10K tokens each (deep); 10 URLs: ~3K tokens each (balanced); 50 URLs: ~640 tokens each (scan).'),
|
|
9
17
|
timeout: z
|
|
10
|
-
.number()
|
|
11
|
-
.min(5)
|
|
12
|
-
.max(120)
|
|
18
|
+
.number({ invalid_type_error: 'scrape_links: Timeout must be a number' })
|
|
19
|
+
.min(5, { message: 'scrape_links: Timeout must be at least 5 seconds' })
|
|
20
|
+
.max(120, { message: 'scrape_links: Timeout cannot exceed 120 seconds' })
|
|
13
21
|
.default(30)
|
|
14
22
|
.describe('Timeout in seconds for each URL'),
|
|
15
23
|
use_llm: z
|
|
16
|
-
.boolean()
|
|
24
|
+
.boolean({ invalid_type_error: 'scrape_links: use_llm must be a boolean' })
|
|
17
25
|
.default(false)
|
|
18
26
|
.describe('Enable AI processing for content extraction (requires OPENROUTER_API_KEY)'),
|
|
19
27
|
what_to_extract: z
|
|
20
28
|
.string()
|
|
21
|
-
.max(1000)
|
|
29
|
+
.max(1000, { message: 'scrape_links: Extraction instructions too long (max 1000 characters)' })
|
|
22
30
|
.optional()
|
|
23
31
|
.describe('Specific content extraction instructions for AI. Will be enhanced with conciseness suffix automatically.'),
|
|
24
32
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"scrape-links.js","sourceRoot":"","sources":["../../src/schemas/scrape-links.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,qCAAqC;AACrC,MAAM,CAAC,MAAM,sBAAsB,GAAG;IACpC,IAAI,EAAE,CAAC;SACJ,KAAK,CAAC,
|
|
1
|
+
{"version":3,"file":"scrape-links.js","sourceRoot":"","sources":["../../src/schemas/scrape-links.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,sCAAsC;AACtC,MAAM,SAAS,GAAG,CAAC;KAChB,MAAM,CAAC,EAAE,cAAc,EAAE,+BAA+B,EAAE,CAAC;KAC3D,GAAG,CAAC,EAAE,OAAO,EAAE,kCAAkC,EAAE,CAAC;KACpD,MAAM,CACL,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,IAAI,GAAG,CAAC,UAAU,CAAC,UAAU,CAAC,EAC9D,EAAE,OAAO,EAAE,yDAAyD,EAAE,CACvE,CAAC;AAEJ,qCAAqC;AACrC,MAAM,CAAC,MAAM,sBAAsB,GAAG;IACpC,IAAI,EAAE,CAAC;SACJ,KAAK,CAAC,SAAS,EAAE;QAChB,cAAc,EAAE,sCAAsC;QACtD,kBAAkB,EAAE,qCAAqC;KAC1D,CAAC;SACD,GAAG,CAAC,CAAC,EAAE,EAAE,OAAO,EAAE,0CAA0C,EAAE,CAAC;SAC/D,GAAG,CAAC,EAAE,EAAE,EAAE,OAAO,EAAE,mDAAmD,EAAE,CAAC;SACzE,QAAQ,CAAC,sOAAsO,CAAC;IACnP,OAAO,EAAE,CAAC;SACP,MAAM,CAAC,EAAE,kBAAkB,EAAE,wCAAwC,EAAE,CAAC;SACxE,GAAG,CAAC,CAAC,EAAE,EAAE,OAAO,EAAE,kDAAkD,EAAE,CAAC;SACvE,GAAG,CAAC,GAAG,EAAE,EAAE,OAAO,EAAE,iDAAiD,EAAE,CAAC;SACxE,OAAO,CAAC,EAAE,CAAC;SACX,QAAQ,CAAC,iCAAiC,CAAC;IAC9C,OAAO,EAAE,CAAC;SACP,OAAO,CAAC,EAAE,kBAAkB,EAAE,yCAAyC,EAAE,CAAC;SAC1E,OAAO,CAAC,KAAK,CAAC;SACd,QAAQ,CAAC,2EAA2E,CAAC;IACxF,eAAe,EAAE,CAAC;SACf,MAAM,EAAE;SACR,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,EAAE,sEAAsE,EAAE,CAAC;SAC9F,QAAQ,EAAE;SACV,QAAQ,CAAC,0GAA0G,CAAC;CACxH,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,CAAC,MAAM,CAAC,sBAAsB,CAAC,CAAC"}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
2
|
export declare const webSearchParamsShape: {
|
|
3
|
-
keywords: z.ZodArray<z.ZodString, "many">;
|
|
3
|
+
keywords: z.ZodArray<z.ZodEffects<z.ZodString, string, string>, "many">;
|
|
4
4
|
};
|
|
5
5
|
export declare const webSearchParamsSchema: z.ZodObject<{
|
|
6
|
-
keywords: z.ZodArray<z.ZodString, "many">;
|
|
6
|
+
keywords: z.ZodArray<z.ZodEffects<z.ZodString, string, string>, "many">;
|
|
7
7
|
}, "strip", z.ZodTypeAny, {
|
|
8
8
|
keywords: string[];
|
|
9
9
|
}, {
|
|
@@ -19,6 +19,7 @@ export interface WebSearchOutput {
|
|
|
19
19
|
total_unique_urls?: number;
|
|
20
20
|
consensus_url_count?: number;
|
|
21
21
|
frequency_threshold?: number;
|
|
22
|
+
errorCode?: string;
|
|
22
23
|
};
|
|
23
24
|
}
|
|
24
25
|
//# sourceMappingURL=web-search.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"web-search.d.ts","sourceRoot":"","sources":["../../src/schemas/web-search.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;
|
|
1
|
+
{"version":3,"file":"web-search.d.ts","sourceRoot":"","sources":["../../src/schemas/web-search.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAsBxB,eAAO,MAAM,oBAAoB;;CAEhC,CAAC;AAEF,eAAO,MAAM,qBAAqB;;;;;;EAAiC,CAAC;AACpE,MAAM,MAAM,eAAe,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,qBAAqB,CAAC,CAAC;AAGpE,MAAM,WAAW,eAAe;IAC9B,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE;QACR,cAAc,EAAE,MAAM,CAAC;QACvB,aAAa,EAAE,MAAM,CAAC;QACtB,iBAAiB,EAAE,MAAM,CAAC;QAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAC;QAC3B,mBAAmB,CAAC,EAAE,MAAM,CAAC;QAC7B,mBAAmB,CAAC,EAAE,MAAM,CAAC;QAC7B,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;CACH"}
|
|
@@ -1,10 +1,19 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
-
//
|
|
2
|
+
// Keyword schema with validation
|
|
3
|
+
const keywordSchema = z
|
|
4
|
+
.string({ required_error: 'web_search: Keyword is required' })
|
|
5
|
+
.min(1, { message: 'web_search: Keyword cannot be empty' })
|
|
6
|
+
.max(500, { message: 'web_search: Keyword too long (max 500 characters)' })
|
|
7
|
+
.refine(k => k.trim().length > 0, { message: 'web_search: Keyword cannot be whitespace only' });
|
|
8
|
+
// Input schema for web_search tool
|
|
3
9
|
const keywordsSchema = z
|
|
4
|
-
.array(
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
10
|
+
.array(keywordSchema, {
|
|
11
|
+
required_error: 'web_search: Keywords array is required',
|
|
12
|
+
invalid_type_error: 'web_search: Keywords must be an array'
|
|
13
|
+
})
|
|
14
|
+
.min(1, { message: 'web_search: At least 1 keyword is required' })
|
|
15
|
+
.max(100, { message: 'web_search: Maximum 100 keywords allowed per request' })
|
|
16
|
+
.describe('Array of search keywords (1-100 keywords). Recommend 3-7 keywords for comprehensive research. Supports Google search operators (site:, -exclusion, "exact phrase", filetype:). More keywords = broader coverage and diverse perspectives.');
|
|
8
17
|
export const webSearchParamsShape = {
|
|
9
18
|
keywords: keywordsSchema,
|
|
10
19
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"web-search.js","sourceRoot":"","sources":["../../src/schemas/web-search.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,
|
|
1
|
+
{"version":3,"file":"web-search.js","sourceRoot":"","sources":["../../src/schemas/web-search.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,iCAAiC;AACjC,MAAM,aAAa,GAAG,CAAC;KACpB,MAAM,CAAC,EAAE,cAAc,EAAE,iCAAiC,EAAE,CAAC;KAC7D,GAAG,CAAC,CAAC,EAAE,EAAE,OAAO,EAAE,qCAAqC,EAAE,CAAC;KAC1D,GAAG,CAAC,GAAG,EAAE,EAAE,OAAO,EAAE,mDAAmD,EAAE,CAAC;KAC1E,MAAM,CACL,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,EACxB,EAAE,OAAO,EAAE,+CAA+C,EAAE,CAC7D,CAAC;AAEJ,mCAAmC;AACnC,MAAM,cAAc,GAAG,CAAC;KACrB,KAAK,CAAC,aAAa,EAAE;IACpB,cAAc,EAAE,wCAAwC;IACxD,kBAAkB,EAAE,uCAAuC;CAC5D,CAAC;KACD,GAAG,CAAC,CAAC,EAAE,EAAE,OAAO,EAAE,4CAA4C,EAAE,CAAC;KACjE,GAAG,CAAC,GAAG,EAAE,EAAE,OAAO,EAAE,sDAAsD,EAAE,CAAC;KAC7E,QAAQ,CAAC,2OAA2O,CAAC,CAAC;AAEzP,MAAM,CAAC,MAAM,oBAAoB,GAAG;IAClC,QAAQ,EAAE,cAAc;CACzB,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAG,CAAC,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC"}
|
|
@@ -1,4 +1,8 @@
|
|
|
1
1
|
export declare class MarkdownCleaner {
|
|
2
|
+
/**
|
|
3
|
+
* Process HTML content and convert to clean Markdown
|
|
4
|
+
* NEVER throws - returns original content on any error for graceful degradation
|
|
5
|
+
*/
|
|
2
6
|
processContent(htmlContent: string): string;
|
|
3
7
|
}
|
|
4
8
|
//# sourceMappingURL=markdown-cleaner.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"markdown-cleaner.d.ts","sourceRoot":"","sources":["../../src/services/markdown-cleaner.ts"],"names":[],"mappings":"AAcA,qBAAa,eAAe;IAC1B,cAAc,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM;
|
|
1
|
+
{"version":3,"file":"markdown-cleaner.d.ts","sourceRoot":"","sources":["../../src/services/markdown-cleaner.ts"],"names":[],"mappings":"AAcA,qBAAa,eAAe;IAC1B;;;OAGG;IACH,cAAc,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM;CAmC5C"}
|
|
@@ -10,22 +10,35 @@ const turndown = new TurndownService({
|
|
|
10
10
|
// Remove script, style, nav, footer, aside elements
|
|
11
11
|
turndown.remove(['script', 'style', 'nav', 'footer', 'aside', 'noscript']);
|
|
12
12
|
export class MarkdownCleaner {
|
|
13
|
+
/**
|
|
14
|
+
* Process HTML content and convert to clean Markdown
|
|
15
|
+
* NEVER throws - returns original content on any error for graceful degradation
|
|
16
|
+
*/
|
|
13
17
|
processContent(htmlContent) {
|
|
14
|
-
|
|
15
|
-
|
|
18
|
+
try {
|
|
19
|
+
// Handle null/undefined/non-string inputs gracefully
|
|
20
|
+
if (!htmlContent || typeof htmlContent !== 'string') {
|
|
21
|
+
return htmlContent || '';
|
|
22
|
+
}
|
|
23
|
+
// If already markdown (no HTML tags), return as-is
|
|
24
|
+
if (!htmlContent.includes('<')) {
|
|
25
|
+
return htmlContent.trim();
|
|
26
|
+
}
|
|
27
|
+
// Remove HTML comments before conversion
|
|
28
|
+
let content = htmlContent.replace(/<!--[\s\S]*?-->/g, '');
|
|
29
|
+
// Convert HTML to Markdown using Turndown
|
|
30
|
+
content = turndown.turndown(content);
|
|
31
|
+
// Clean up whitespace
|
|
32
|
+
content = content.replace(/\n{3,}/g, '\n\n');
|
|
33
|
+
content = content.trim();
|
|
34
|
+
return content;
|
|
16
35
|
}
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
36
|
+
catch (error) {
|
|
37
|
+
// Log error but don't crash - return original content for graceful degradation
|
|
38
|
+
console.error('[MarkdownCleaner] processContent failed:', error instanceof Error ? error.message : String(error), '| Content length:', htmlContent?.length ?? 0);
|
|
39
|
+
// Return original content if conversion fails
|
|
40
|
+
return htmlContent || '';
|
|
20
41
|
}
|
|
21
|
-
// Remove HTML comments before conversion
|
|
22
|
-
let content = htmlContent.replace(/<!--[\s\S]*?-->/g, '');
|
|
23
|
-
// Convert HTML to Markdown using Turndown
|
|
24
|
-
content = turndown.turndown(content);
|
|
25
|
-
// Clean up whitespace
|
|
26
|
-
content = content.replace(/\n{3,}/g, '\n\n');
|
|
27
|
-
content = content.trim();
|
|
28
|
-
return content;
|
|
29
42
|
}
|
|
30
43
|
}
|
|
31
44
|
//# sourceMappingURL=markdown-cleaner.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"markdown-cleaner.js","sourceRoot":"","sources":["../../src/services/markdown-cleaner.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,eAAe,MAAM,UAAU,CAAC;AAEvC,MAAM,QAAQ,GAAG,IAAI,eAAe,CAAC;IACnC,YAAY,EAAE,KAAK;IACnB,cAAc,EAAE,QAAQ;IACxB,gBAAgB,EAAE,GAAG;CACtB,CAAC,CAAC;AAEH,oDAAoD;AACpD,QAAQ,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC,CAAC;AAE3E,MAAM,OAAO,eAAe;IAC1B,cAAc,CAAC,WAAmB;QAChC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE,CAAC;
|
|
1
|
+
{"version":3,"file":"markdown-cleaner.js","sourceRoot":"","sources":["../../src/services/markdown-cleaner.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,eAAe,MAAM,UAAU,CAAC;AAEvC,MAAM,QAAQ,GAAG,IAAI,eAAe,CAAC;IACnC,YAAY,EAAE,KAAK;IACnB,cAAc,EAAE,QAAQ;IACxB,gBAAgB,EAAE,GAAG;CACtB,CAAC,CAAC;AAEH,oDAAoD;AACpD,QAAQ,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC,CAAC;AAE3E,MAAM,OAAO,eAAe;IAC1B;;;OAGG;IACH,cAAc,CAAC,WAAmB;QAChC,IAAI,CAAC;YACH,qDAAqD;YACrD,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE,CAAC;gBACpD,OAAO,WAAW,IAAI,EAAE,CAAC;YAC3B,CAAC;YAED,mDAAmD;YACnD,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;gBAC/B,OAAO,WAAW,CAAC,IAAI,EAAE,CAAC;YAC5B,CAAC;YAED,yCAAyC;YACzC,IAAI,OAAO,GAAG,WAAW,CAAC,OAAO,CAAC,kBAAkB,EAAE,EAAE,CAAC,CAAC;YAE1D,0CAA0C;YAC1C,OAAO,GAAG,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAErC,sBAAsB;YACtB,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;YAC7C,OAAO,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;YAEzB,OAAO,OAAO,CAAC;QACjB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,+EAA+E;YAC/E,OAAO,CAAC,KAAK,CACX,0CAA0C,EAC1C,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,EACtD,mBAAmB,EACnB,WAAW,EAAE,MAAM,IAAI,CAAC,CACzB,CAAC;YACF,8CAA8C;YAC9C,OAAO,WAAW,IAAI,EAAE,CAAC;QAC3B,CAAC;IACH,CAAC;CACF"}
|
package/dist/tools/reddit.d.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Reddit Tools - Search and Fetch
|
|
3
|
+
* NEVER throws - always returns structured response for graceful degradation
|
|
3
4
|
*/
|
|
4
5
|
export declare function handleSearchReddit(queries: string[], apiKey: string, dateAfter?: string): Promise<string>;
|
|
5
6
|
export interface GetRedditPostsOptions {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"reddit.d.ts","sourceRoot":"","sources":["../../src/tools/reddit.ts"],"names":[],"mappings":"AAAA
|
|
1
|
+
{"version":3,"file":"reddit.d.ts","sourceRoot":"","sources":["../../src/tools/reddit.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAiDH,wBAAsB,kBAAkB,CACtC,OAAO,EAAE,MAAM,EAAE,EACjB,MAAM,EAAE,MAAM,EACd,SAAS,CAAC,EAAE,MAAM,GACjB,OAAO,CAAC,MAAM,CAAC,CA4BjB;AAMD,MAAM,WAAW,qBAAqB;IACpC,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,mBAAmB,CAAC,EAAE,MAAM,CAAC;CAC9B;AAED,wBAAsB,oBAAoB,CACxC,IAAI,EAAE,MAAM,EAAE,EACd,QAAQ,EAAE,MAAM,EAChB,YAAY,EAAE,MAAM,EACpB,WAAW,SAAM,EACjB,OAAO,GAAE,qBAA0B,GAClC,OAAO,CAAC,MAAM,CAAC,CAwDjB"}
|