mcp-researchpowerpack-http 3.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/README.md +124 -0
  2. package/dist/index.d.ts +3 -0
  3. package/dist/index.d.ts.map +1 -0
  4. package/dist/index.js +227 -0
  5. package/dist/index.js.map +7 -0
  6. package/dist/mcp-use.json +7 -0
  7. package/dist/src/clients/github.d.ts +83 -0
  8. package/dist/src/clients/github.d.ts.map +1 -0
  9. package/dist/src/clients/github.js +370 -0
  10. package/dist/src/clients/github.js.map +7 -0
  11. package/dist/src/clients/reddit.d.ts +60 -0
  12. package/dist/src/clients/reddit.d.ts.map +1 -0
  13. package/dist/src/clients/reddit.js +287 -0
  14. package/dist/src/clients/reddit.js.map +7 -0
  15. package/dist/src/clients/research.d.ts +67 -0
  16. package/dist/src/clients/research.d.ts.map +1 -0
  17. package/dist/src/clients/research.js +282 -0
  18. package/dist/src/clients/research.js.map +7 -0
  19. package/dist/src/clients/scraper.d.ts +72 -0
  20. package/dist/src/clients/scraper.d.ts.map +1 -0
  21. package/dist/src/clients/scraper.js +327 -0
  22. package/dist/src/clients/scraper.js.map +7 -0
  23. package/dist/src/clients/search.d.ts +57 -0
  24. package/dist/src/clients/search.d.ts.map +1 -0
  25. package/dist/src/clients/search.js +218 -0
  26. package/dist/src/clients/search.js.map +7 -0
  27. package/dist/src/config/index.d.ts +93 -0
  28. package/dist/src/config/index.d.ts.map +1 -0
  29. package/dist/src/config/index.js +218 -0
  30. package/dist/src/config/index.js.map +7 -0
  31. package/dist/src/schemas/deep-research.d.ts +40 -0
  32. package/dist/src/schemas/deep-research.d.ts.map +1 -0
  33. package/dist/src/schemas/deep-research.js +216 -0
  34. package/dist/src/schemas/deep-research.js.map +7 -0
  35. package/dist/src/schemas/github-score.d.ts +50 -0
  36. package/dist/src/schemas/github-score.d.ts.map +1 -0
  37. package/dist/src/schemas/github-score.js +58 -0
  38. package/dist/src/schemas/github-score.js.map +7 -0
  39. package/dist/src/schemas/scrape-links.d.ts +23 -0
  40. package/dist/src/schemas/scrape-links.d.ts.map +1 -0
  41. package/dist/src/schemas/scrape-links.js +32 -0
  42. package/dist/src/schemas/scrape-links.js.map +7 -0
  43. package/dist/src/schemas/web-search.d.ts +18 -0
  44. package/dist/src/schemas/web-search.d.ts.map +1 -0
  45. package/dist/src/schemas/web-search.js +28 -0
  46. package/dist/src/schemas/web-search.js.map +7 -0
  47. package/dist/src/scoring/github-quality.d.ts +142 -0
  48. package/dist/src/scoring/github-quality.d.ts.map +1 -0
  49. package/dist/src/scoring/github-quality.js +202 -0
  50. package/dist/src/scoring/github-quality.js.map +7 -0
  51. package/dist/src/services/file-attachment.d.ts +30 -0
  52. package/dist/src/services/file-attachment.d.ts.map +1 -0
  53. package/dist/src/services/file-attachment.js +205 -0
  54. package/dist/src/services/file-attachment.js.map +7 -0
  55. package/dist/src/services/llm-processor.d.ts +29 -0
  56. package/dist/src/services/llm-processor.d.ts.map +1 -0
  57. package/dist/src/services/llm-processor.js +206 -0
  58. package/dist/src/services/llm-processor.js.map +7 -0
  59. package/dist/src/services/markdown-cleaner.d.ts +8 -0
  60. package/dist/src/services/markdown-cleaner.d.ts.map +1 -0
  61. package/dist/src/services/markdown-cleaner.js +63 -0
  62. package/dist/src/services/markdown-cleaner.js.map +7 -0
  63. package/dist/src/tools/github-score.d.ts +12 -0
  64. package/dist/src/tools/github-score.d.ts.map +1 -0
  65. package/dist/src/tools/github-score.js +306 -0
  66. package/dist/src/tools/github-score.js.map +7 -0
  67. package/dist/src/tools/mcp-helpers.d.ts +27 -0
  68. package/dist/src/tools/mcp-helpers.d.ts.map +1 -0
  69. package/dist/src/tools/mcp-helpers.js +47 -0
  70. package/dist/src/tools/mcp-helpers.js.map +7 -0
  71. package/dist/src/tools/reddit.d.ts +54 -0
  72. package/dist/src/tools/reddit.d.ts.map +1 -0
  73. package/dist/src/tools/reddit.js +498 -0
  74. package/dist/src/tools/reddit.js.map +7 -0
  75. package/dist/src/tools/registry.d.ts +3 -0
  76. package/dist/src/tools/registry.d.ts.map +1 -0
  77. package/dist/src/tools/registry.js +17 -0
  78. package/dist/src/tools/registry.js.map +7 -0
  79. package/dist/src/tools/research.d.ts +14 -0
  80. package/dist/src/tools/research.d.ts.map +1 -0
  81. package/dist/src/tools/research.js +250 -0
  82. package/dist/src/tools/research.js.map +7 -0
  83. package/dist/src/tools/scrape.d.ts +14 -0
  84. package/dist/src/tools/scrape.d.ts.map +1 -0
  85. package/dist/src/tools/scrape.js +290 -0
  86. package/dist/src/tools/scrape.js.map +7 -0
  87. package/dist/src/tools/search.d.ts +10 -0
  88. package/dist/src/tools/search.d.ts.map +1 -0
  89. package/dist/src/tools/search.js +197 -0
  90. package/dist/src/tools/search.js.map +7 -0
  91. package/dist/src/tools/utils.d.ts +105 -0
  92. package/dist/src/tools/utils.d.ts.map +1 -0
  93. package/dist/src/tools/utils.js +96 -0
  94. package/dist/src/tools/utils.js.map +7 -0
  95. package/dist/src/utils/concurrency.d.ts +28 -0
  96. package/dist/src/utils/concurrency.d.ts.map +1 -0
  97. package/dist/src/utils/concurrency.js +62 -0
  98. package/dist/src/utils/concurrency.js.map +7 -0
  99. package/dist/src/utils/errors.d.ts +95 -0
  100. package/dist/src/utils/errors.d.ts.map +1 -0
  101. package/dist/src/utils/errors.js +289 -0
  102. package/dist/src/utils/errors.js.map +7 -0
  103. package/dist/src/utils/logger.d.ts +33 -0
  104. package/dist/src/utils/logger.d.ts.map +1 -0
  105. package/dist/src/utils/logger.js +41 -0
  106. package/dist/src/utils/logger.js.map +7 -0
  107. package/dist/src/utils/markdown-formatter.d.ts +5 -0
  108. package/dist/src/utils/markdown-formatter.d.ts.map +1 -0
  109. package/dist/src/utils/markdown-formatter.js +15 -0
  110. package/dist/src/utils/markdown-formatter.js.map +7 -0
  111. package/dist/src/utils/response.d.ts +83 -0
  112. package/dist/src/utils/response.d.ts.map +1 -0
  113. package/dist/src/utils/response.js +109 -0
  114. package/dist/src/utils/response.js.map +7 -0
  115. package/dist/src/utils/retry.d.ts +43 -0
  116. package/dist/src/utils/retry.d.ts.map +1 -0
  117. package/dist/src/utils/retry.js +37 -0
  118. package/dist/src/utils/retry.js.map +7 -0
  119. package/dist/src/utils/url-aggregator.d.ts +92 -0
  120. package/dist/src/utils/url-aggregator.d.ts.map +1 -0
  121. package/dist/src/utils/url-aggregator.js +357 -0
  122. package/dist/src/utils/url-aggregator.js.map +7 -0
  123. package/dist/src/version.d.ts +28 -0
  124. package/dist/src/version.d.ts.map +1 -0
  125. package/dist/src/version.js +32 -0
  126. package/dist/src/version.js.map +7 -0
  127. package/package.json +73 -0
@@ -0,0 +1,282 @@
1
+ import OpenAI from "openai";
2
+ import { RESEARCH } from "../config/index.js";
3
+ import { calculateBackoff } from "../utils/retry.js";
4
+ import {
5
+ classifyError,
6
+ sleep,
7
+ ErrorCode,
8
+ withRequestTimeout,
9
+ withStallProtection
10
+ } from "../utils/errors.js";
11
+ import { mcpLog } from "../utils/logger.js";
12
+ const DEFAULT_RESEARCH_CONCURRENCY = 3;
13
+ const MAX_RESEARCH_RETRIES = 3;
14
+ const RESEARCH_TEMPERATURE = 0.3;
15
+ const RESEARCH_BASE_DELAY_MS = 5e3;
16
+ const RESEARCH_MAX_DELAY_MS = 6e4;
17
+ const DEFAULT_MAX_TOKENS = 32e3;
18
+ const MAX_SEARCH_RESULTS_CAP = 30;
19
+ const RESEARCH_REQUEST_DEADLINE_MS = 12e4;
20
+ const RESEARCH_STALL_TIMEOUT_MS = 9e4;
21
+ const RETRYABLE_RESEARCH_CODES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
22
+ const GEMINI_STYLE_MODELS = /* @__PURE__ */ new Set([
23
+ "google/gemini-2.5-flash",
24
+ "google/gemini-2.5-pro",
25
+ "google/gemini-2.0-flash",
26
+ "google/gemini-pro"
27
+ ]);
28
+ function isGeminiStyleModel(model) {
29
+ return GEMINI_STYLE_MODELS.has(model) || model.startsWith("google/gemini");
30
+ }
31
+ function buildResearchPayload(model, messages, options) {
32
+ const { temperature, reasoningEffort, maxTokens, maxSearchResults, responseFormat } = options;
33
+ if (isGeminiStyleModel(model)) {
34
+ const payload2 = {
35
+ model,
36
+ messages,
37
+ temperature,
38
+ max_tokens: maxTokens,
39
+ tools: [
40
+ {
41
+ type: "google_search",
42
+ googleSearch: {}
43
+ }
44
+ ]
45
+ };
46
+ if (responseFormat) {
47
+ payload2.response_format = responseFormat;
48
+ }
49
+ return payload2;
50
+ }
51
+ const payload = {
52
+ model,
53
+ messages,
54
+ temperature,
55
+ reasoning_effort: reasoningEffort,
56
+ max_completion_tokens: maxTokens,
57
+ search_parameters: {
58
+ mode: "on",
59
+ max_search_results: Math.min(maxSearchResults, MAX_SEARCH_RESULTS_CAP),
60
+ return_citations: true,
61
+ sources: [{ type: "web" }]
62
+ }
63
+ };
64
+ if (responseFormat) {
65
+ payload.response_format = responseFormat;
66
+ }
67
+ return payload;
68
+ }
69
+ function parseResearchResponse(raw, model) {
70
+ const { response, choice, message } = raw;
71
+ return {
72
+ id: response.id || "",
73
+ model: response.model || model,
74
+ created: response.created || Date.now(),
75
+ content: message?.content || "",
76
+ finishReason: choice?.finish_reason ?? void 0,
77
+ usage: response.usage ? {
78
+ promptTokens: response.usage.prompt_tokens,
79
+ completionTokens: response.usage.completion_tokens,
80
+ totalTokens: response.usage.total_tokens,
81
+ sourcesUsed: response.usage.num_sources_used
82
+ } : void 0,
83
+ annotations: message?.annotations?.map((a) => ({
84
+ type: "url_citation",
85
+ url: a.url_citation?.url || "",
86
+ title: a.url_citation?.title || "",
87
+ startIndex: a.url_citation?.start_index || 0,
88
+ endIndex: a.url_citation?.end_index || 0
89
+ }))
90
+ };
91
+ }
92
+ class ResearchClient {
93
+ client;
94
+ constructor() {
95
+ if (!RESEARCH.API_KEY) {
96
+ throw new Error("Research capability is not configured. Please set up the required API credentials.");
97
+ }
98
+ this.client = new OpenAI({
99
+ baseURL: RESEARCH.BASE_URL,
100
+ apiKey: RESEARCH.API_KEY,
101
+ timeout: RESEARCH.TIMEOUT_MS,
102
+ maxRetries: 0
103
+ // We handle retries ourselves
104
+ });
105
+ }
106
+ /**
107
+ * Check if an error is retryable for research requests
108
+ */
109
+ isRetryableError(error) {
110
+ if (!error) return false;
111
+ const err = error;
112
+ if (err.status && RETRYABLE_RESEARCH_CODES.has(err.status)) {
113
+ return true;
114
+ }
115
+ const message = (err.message || "").toLowerCase();
116
+ if (message.includes("rate limit") || message.includes("timeout") || message.includes("timed out") || message.includes("service unavailable") || message.includes("connection") || message.includes("stalled")) {
117
+ return true;
118
+ }
119
+ if (err.code === "ESTALLED" || err.code === "ETIMEDOUT") {
120
+ return true;
121
+ }
122
+ return false;
123
+ }
124
+ /**
125
+ * Make the API call to OpenRouter with retry logic.
126
+ * Returns the raw response or null if all attempts fail.
127
+ */
128
+ async callOpenRouter(payload, model, signal) {
129
+ let lastError;
130
+ for (let attempt = 0; attempt <= MAX_RESEARCH_RETRIES; attempt++) {
131
+ try {
132
+ if (attempt > 0) {
133
+ mcpLog("warning", `Retry attempt ${attempt}/${MAX_RESEARCH_RETRIES} for ${model}`, "research");
134
+ }
135
+ const response = await withStallProtection(
136
+ (stallSignal) => withRequestTimeout(
137
+ (timeoutSignal) => {
138
+ const mergedController = new AbortController();
139
+ const abortMerged = () => mergedController.abort();
140
+ signal?.addEventListener("abort", abortMerged, { once: true });
141
+ stallSignal.addEventListener("abort", abortMerged, { once: true });
142
+ timeoutSignal.addEventListener("abort", abortMerged, { once: true });
143
+ return this.client.chat.completions.create(
144
+ payload,
145
+ { signal: mergedController.signal }
146
+ ).finally(() => {
147
+ signal?.removeEventListener("abort", abortMerged);
148
+ stallSignal.removeEventListener("abort", abortMerged);
149
+ timeoutSignal.removeEventListener("abort", abortMerged);
150
+ });
151
+ },
152
+ RESEARCH_REQUEST_DEADLINE_MS,
153
+ `research (${model})`
154
+ ),
155
+ RESEARCH_STALL_TIMEOUT_MS,
156
+ 2,
157
+ `research (${model})`
158
+ );
159
+ const choice = response.choices?.[0];
160
+ const message = choice?.message;
161
+ if (!message?.content && !choice) {
162
+ lastError = {
163
+ code: ErrorCode.INTERNAL_ERROR,
164
+ message: "Research API returned empty response",
165
+ retryable: true
166
+ };
167
+ if (attempt < MAX_RESEARCH_RETRIES) {
168
+ const delayMs = calculateBackoff(attempt, RESEARCH_BASE_DELAY_MS, RESEARCH_MAX_DELAY_MS);
169
+ mcpLog("warning", `Empty response, retrying in ${delayMs}ms...`, "research");
170
+ await sleep(delayMs, signal);
171
+ continue;
172
+ }
173
+ }
174
+ return { raw: { response, choice, message } };
175
+ } catch (error) {
176
+ lastError = classifyError(error);
177
+ const err = error;
178
+ mcpLog("error", `Error with ${model} (attempt ${attempt + 1}): ${lastError.message} (status: ${err.status})`, "research");
179
+ if (this.isRetryableError(error) && attempt < MAX_RESEARCH_RETRIES) {
180
+ const delayMs = calculateBackoff(attempt, RESEARCH_BASE_DELAY_MS, RESEARCH_MAX_DELAY_MS);
181
+ mcpLog("warning", `Retrying in ${delayMs}ms...`, "research");
182
+ try {
183
+ await sleep(delayMs, signal);
184
+ } catch {
185
+ break;
186
+ }
187
+ continue;
188
+ }
189
+ break;
190
+ }
191
+ }
192
+ return {
193
+ error: lastError || {
194
+ code: ErrorCode.UNKNOWN_ERROR,
195
+ message: "Unknown research error",
196
+ retryable: false
197
+ }
198
+ };
199
+ }
200
+ /**
201
+ * Execute a single research request with a specific model.
202
+ * Thin orchestrator: build payload → call API → parse response.
203
+ */
204
+ async executeResearch(model, messages, options, signal) {
205
+ const payload = buildResearchPayload(model, messages, options);
206
+ const result = await this.callOpenRouter(payload, model, signal);
207
+ if (result.raw) {
208
+ return parseResearchResponse(result.raw, model);
209
+ }
210
+ return {
211
+ id: "",
212
+ model,
213
+ created: Date.now(),
214
+ content: "",
215
+ error: result.error
216
+ };
217
+ }
218
+ /**
219
+ * Perform research with retry logic and fallback to secondary model
220
+ * Returns a ResearchResponse - may contain error field on failure
221
+ * NEVER throws - always returns a valid response object
222
+ */
223
+ async research(params, signal) {
224
+ const {
225
+ question,
226
+ systemPrompt,
227
+ reasoningEffort = RESEARCH.REASONING_EFFORT,
228
+ maxSearchResults = RESEARCH.MAX_URLS,
229
+ maxTokens = DEFAULT_MAX_TOKENS,
230
+ temperature = RESEARCH_TEMPERATURE,
231
+ responseFormat
232
+ } = params;
233
+ if (!question?.trim()) {
234
+ return {
235
+ id: "",
236
+ model: RESEARCH.MODEL,
237
+ created: Date.now(),
238
+ content: "",
239
+ error: {
240
+ code: ErrorCode.INVALID_INPUT,
241
+ message: "Research question cannot be empty",
242
+ retryable: false
243
+ }
244
+ };
245
+ }
246
+ const messages = [];
247
+ if (systemPrompt) {
248
+ messages.push({ role: "system", content: systemPrompt });
249
+ }
250
+ messages.push({ role: "user", content: question });
251
+ const options = { temperature, reasoningEffort, maxTokens, maxSearchResults, responseFormat };
252
+ mcpLog("info", `Trying primary model: ${RESEARCH.MODEL}`, "research");
253
+ const primaryResult = await this.executeResearch(RESEARCH.MODEL, messages, options, signal);
254
+ if (!primaryResult.error) {
255
+ return primaryResult;
256
+ }
257
+ if (RESEARCH.FALLBACK_MODEL && RESEARCH.FALLBACK_MODEL !== RESEARCH.MODEL) {
258
+ mcpLog("warning", `Primary model failed, trying fallback: ${RESEARCH.FALLBACK_MODEL}`, "research");
259
+ const fallbackResult = await this.executeResearch(RESEARCH.FALLBACK_MODEL, messages, options, signal);
260
+ if (!fallbackResult.error) {
261
+ return fallbackResult;
262
+ }
263
+ mcpLog("error", `Both models failed. Primary: ${primaryResult.error?.message}, Fallback: ${fallbackResult.error?.message}`, "research");
264
+ return {
265
+ ...fallbackResult,
266
+ content: `Research failed with both models. Primary (${RESEARCH.MODEL}): ${primaryResult.error?.message}. Fallback (${RESEARCH.FALLBACK_MODEL}): ${fallbackResult.error?.message}`
267
+ };
268
+ }
269
+ mcpLog("error", `All attempts failed: ${primaryResult.error?.message}`, "research");
270
+ return {
271
+ ...primaryResult,
272
+ content: `Research failed: ${primaryResult.error?.message}`
273
+ };
274
+ }
275
+ }
276
+ export {
277
+ DEFAULT_RESEARCH_CONCURRENCY,
278
+ MAX_RESEARCH_RETRIES,
279
+ RESEARCH_TEMPERATURE,
280
+ ResearchClient
281
+ };
282
+ //# sourceMappingURL=research.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/clients/research.ts"],
4
+ "sourcesContent": ["/**\n * Deep Research Client\n * Handles research API requests with web search capabilities\n * Implements robust retry logic and NEVER crashes the server\n */\n\nimport OpenAI from 'openai';\nimport { RESEARCH } from '../config/index.js';\nimport { calculateBackoff } from '../utils/retry.js';\nimport {\n classifyError,\n sleep,\n ErrorCode,\n withRequestTimeout,\n withStallProtection,\n type StructuredError,\n} from '../utils/errors.js';\nimport { mcpLog } from '../utils/logger.js';\n\n// \u2500\u2500 Constants \u2500\u2500\n\nconst DEFAULT_RESEARCH_CONCURRENCY = 3 as const;\nconst MAX_RESEARCH_RETRIES = 3 as const;\nconst RESEARCH_TEMPERATURE = 0.3 as const;\nconst RESEARCH_BASE_DELAY_MS = 5_000 as const;\nconst RESEARCH_MAX_DELAY_MS = 60_000 as const;\nconst DEFAULT_MAX_TOKENS = 32_000 as const;\nconst MAX_SEARCH_RESULTS_CAP = 30 as const;\n\n/** Hard deadline for a single research API call */\nconst RESEARCH_REQUEST_DEADLINE_MS = 120_000 as const;\n\n/** Stall detection \u2014 abort if no response for this duration */\nconst RESEARCH_STALL_TIMEOUT_MS = 90_000 as const;\n\n// Retryable status codes for research API\nconst RETRYABLE_RESEARCH_CODES = new Set([429, 500, 502, 503, 504]);\n\n// Models that use Gemini-style google_search tool instead of search_parameters\nconst GEMINI_STYLE_MODELS = new Set([\n 'google/gemini-2.5-flash',\n 'google/gemini-2.5-pro',\n 'google/gemini-2.0-flash',\n 'google/gemini-pro',\n]);\n\n// \u2500\u2500 Interfaces \u2500\u2500\n\ninterface ResearchParams {\n readonly question: string;\n readonly systemPrompt?: string;\n readonly reasoningEffort?: 'low' | 'medium' | 'high';\n readonly maxSearchResults?: number;\n readonly maxTokens?: number;\n readonly temperature?: number;\n readonly responseFormat?: { readonly type: 'json_object' | 'text' };\n}\n\nexport interface ResearchResponse {\n readonly id: string;\n readonly model: string;\n readonly created: number;\n readonly content: string;\n readonly finishReason?: string;\n readonly usage?: {\n readonly promptTokens: number;\n readonly completionTokens: number;\n readonly totalTokens: number;\n readonly sourcesUsed?: number;\n };\n readonly annotations?: ReadonlyArray<{\n readonly type: 'url_citation';\n readonly url: string;\n readonly title: string;\n readonly startIndex: number;\n readonly endIndex: number;\n }>;\n readonly error?: StructuredError;\n}\n\n/** OpenRouter extension for response messages with annotations */\ninterface OpenRouterMessage {\n readonly role: string;\n readonly content: string | null;\n readonly annotations?: readonly OpenRouterAnnotation[];\n}\n\n/** Single annotation from OpenRouter response */\ninterface OpenRouterAnnotation {\n readonly type: string;\n readonly url_citation?: {\n readonly url: string;\n readonly title?: string;\n readonly start_index?: number;\n readonly end_index?: number;\n };\n readonly [key: string]: unknown;\n}\n\n/** OpenRouter extensions to usage stats */\ninterface OpenRouterUsage {\n readonly prompt_tokens: number;\n readonly completion_tokens: number;\n readonly total_tokens: number;\n readonly num_sources_used?: number;\n}\n\n/** Raw response shape from OpenRouter API call */\ninterface OpenRouterRawResponse {\n readonly response: OpenAI.ChatCompletion;\n readonly choice: OpenAI.ChatCompletion.Choice | undefined;\n readonly message: OpenRouterMessage | undefined;\n}\n\n/** Options passed through the research execution pipeline */\ninterface ResearchExecutionOptions {\n readonly temperature: number;\n readonly reasoningEffort: 'low' | 'medium' | 'high';\n readonly maxTokens: number;\n readonly maxSearchResults: number;\n readonly responseFormat?: { readonly type: 'json_object' | 'text' };\n}\n\n// \u2500\u2500 Helpers \u2500\u2500\n\n/**\n * Check if a model uses Gemini-style google_search tool\n */\nfunction isGeminiStyleModel(model: string): boolean {\n return GEMINI_STYLE_MODELS.has(model) || model.startsWith('google/gemini');\n}\n\n/**\n * Build the OpenRouter request payload based on model type.\n * Gemini models use tools with google_search, others use search_parameters.\n */\nfunction buildResearchPayload(\n model: string,\n messages: ReadonlyArray<{ readonly role: 'system' | 'user'; readonly content: string }>,\n options: ResearchExecutionOptions,\n): Record<string, unknown> {\n const { temperature, reasoningEffort, maxTokens, maxSearchResults, responseFormat } = options;\n\n if (isGeminiStyleModel(model)) {\n const payload: Record<string, unknown> = {\n model,\n messages,\n temperature,\n max_tokens: maxTokens,\n tools: [\n {\n type: 'google_search',\n googleSearch: {},\n },\n ],\n };\n if (responseFormat) {\n payload.response_format = responseFormat;\n }\n return payload;\n }\n\n // Default: use search_parameters (for Grok, Perplexity, etc.)\n const payload: Record<string, unknown> = {\n model,\n messages,\n temperature,\n reasoning_effort: reasoningEffort,\n max_completion_tokens: maxTokens,\n search_parameters: {\n mode: 'on',\n max_search_results: Math.min(maxSearchResults, MAX_SEARCH_RESULTS_CAP),\n return_citations: true,\n sources: [{ type: 'web' }],\n },\n };\n if (responseFormat) {\n payload.response_format = responseFormat;\n }\n return payload;\n}\n\n/**\n * Parse an OpenRouter raw response into a structured ResearchResponse.\n * Extracts content, token usage, and citation annotations.\n */\nfunction parseResearchResponse(\n raw: OpenRouterRawResponse,\n model: string,\n): ResearchResponse {\n const { response, choice, message } = raw;\n\n return {\n id: response.id || '',\n model: response.model || model,\n created: response.created || Date.now(),\n content: message?.content || '',\n finishReason: choice?.finish_reason ?? undefined,\n usage: response.usage ? {\n promptTokens: response.usage.prompt_tokens,\n completionTokens: response.usage.completion_tokens,\n totalTokens: response.usage.total_tokens,\n sourcesUsed: (response.usage as unknown as OpenRouterUsage).num_sources_used,\n } : undefined,\n annotations: message?.annotations?.map((a: OpenRouterAnnotation) => ({\n type: 'url_citation' as const,\n url: a.url_citation?.url || '',\n title: a.url_citation?.title || '',\n startIndex: a.url_citation?.start_index || 0,\n endIndex: a.url_citation?.end_index || 0,\n })),\n };\n}\n\n// \u2500\u2500 Client \u2500\u2500\n\nexport { DEFAULT_RESEARCH_CONCURRENCY, MAX_RESEARCH_RETRIES, RESEARCH_TEMPERATURE };\n\nexport class ResearchClient {\n private client: OpenAI;\n\n constructor() {\n if (!RESEARCH.API_KEY) {\n throw new Error('Research capability is not configured. Please set up the required API credentials.');\n }\n\n this.client = new OpenAI({\n baseURL: RESEARCH.BASE_URL,\n apiKey: RESEARCH.API_KEY,\n timeout: RESEARCH.TIMEOUT_MS,\n maxRetries: 0, // We handle retries ourselves\n });\n }\n\n /**\n * Check if an error is retryable for research requests\n */\n private isRetryableError(error: unknown): boolean {\n if (!error) return false;\n\n const err = error as {\n status?: number;\n code?: string;\n message?: string;\n };\n\n if (err.status && RETRYABLE_RESEARCH_CODES.has(err.status)) {\n return true;\n }\n\n const message = (err.message || '').toLowerCase();\n if (\n message.includes('rate limit') ||\n message.includes('timeout') ||\n message.includes('timed out') ||\n message.includes('service unavailable') ||\n message.includes('connection') ||\n message.includes('stalled')\n ) {\n return true;\n }\n\n // Check for stall protection errors\n if (err.code === 'ESTALLED' || err.code === 'ETIMEDOUT') {\n return true;\n }\n\n return false;\n }\n\n /**\n * Make the API call to OpenRouter with retry logic.\n * Returns the raw response or null if all attempts fail.\n */\n private async callOpenRouter(\n payload: Record<string, unknown>,\n model: string,\n signal?: AbortSignal,\n ): Promise<{ raw: OpenRouterRawResponse; error?: undefined } | { raw?: undefined; error: StructuredError }> {\n let lastError: StructuredError | undefined;\n\n for (let attempt = 0; attempt <= MAX_RESEARCH_RETRIES; attempt++) {\n try {\n if (attempt > 0) {\n mcpLog('warning', `Retry attempt ${attempt}/${MAX_RESEARCH_RETRIES} for ${model}`, 'research');\n }\n\n const response = await withStallProtection(\n (stallSignal) => withRequestTimeout(\n (timeoutSignal) => {\n // Merge all abort signals (external + stall + timeout)\n const mergedController = new AbortController();\n const abortMerged = () => mergedController.abort();\n signal?.addEventListener('abort', abortMerged, { once: true });\n stallSignal.addEventListener('abort', abortMerged, { once: true });\n timeoutSignal.addEventListener('abort', abortMerged, { once: true });\n\n return this.client.chat.completions.create(\n payload as unknown as OpenAI.ChatCompletionCreateParamsNonStreaming,\n { signal: mergedController.signal }\n ).finally(() => {\n signal?.removeEventListener('abort', abortMerged);\n stallSignal.removeEventListener('abort', abortMerged);\n timeoutSignal.removeEventListener('abort', abortMerged);\n });\n },\n RESEARCH_REQUEST_DEADLINE_MS,\n `research (${model})`,\n ),\n RESEARCH_STALL_TIMEOUT_MS,\n 2,\n `research (${model})`,\n );\n const choice = response.choices?.[0];\n const message = choice?.message as unknown as OpenRouterMessage;\n\n // Validate response \u2014 retry on empty\n if (!message?.content && !choice) {\n lastError = {\n code: ErrorCode.INTERNAL_ERROR,\n message: 'Research API returned empty response',\n retryable: true,\n };\n\n if (attempt < MAX_RESEARCH_RETRIES) {\n const delayMs = calculateBackoff(attempt, RESEARCH_BASE_DELAY_MS, RESEARCH_MAX_DELAY_MS);\n mcpLog('warning', `Empty response, retrying in ${delayMs}ms...`, 'research');\n await sleep(delayMs, signal);\n continue;\n }\n }\n\n return { raw: { response, choice, message } };\n\n } catch (error: unknown) {\n lastError = classifyError(error);\n\n const err = error as { status?: number; message?: string };\n mcpLog('error', `Error with ${model} (attempt ${attempt + 1}): ${lastError.message} (status: ${err.status})`, 'research');\n\n if (this.isRetryableError(error) && attempt < MAX_RESEARCH_RETRIES) {\n const delayMs = calculateBackoff(attempt, RESEARCH_BASE_DELAY_MS, RESEARCH_MAX_DELAY_MS);\n mcpLog('warning', `Retrying in ${delayMs}ms...`, 'research');\n try { await sleep(delayMs, signal); } catch { break; }\n continue;\n }\n\n break;\n }\n }\n\n return {\n error: lastError || {\n code: ErrorCode.UNKNOWN_ERROR,\n message: 'Unknown research error',\n retryable: false,\n },\n };\n }\n\n /**\n * Execute a single research request with a specific model.\n * Thin orchestrator: build payload \u2192 call API \u2192 parse response.\n */\n private async executeResearch(\n model: string,\n messages: ReadonlyArray<{ readonly role: 'system' | 'user'; readonly content: string }>,\n options: ResearchExecutionOptions,\n signal?: AbortSignal,\n ): Promise<ResearchResponse> {\n const payload = buildResearchPayload(model, messages, options);\n const result = await this.callOpenRouter(payload, model, signal);\n\n if (result.raw) {\n return parseResearchResponse(result.raw, model);\n }\n\n return {\n id: '',\n model,\n created: Date.now(),\n content: '',\n error: result.error,\n };\n }\n\n /**\n * Perform research with retry logic and fallback to secondary model\n * Returns a ResearchResponse - may contain error field on failure\n * NEVER throws - always returns a valid response object\n */\n async research(params: ResearchParams, signal?: AbortSignal): Promise<ResearchResponse> {\n const {\n question,\n systemPrompt,\n reasoningEffort = RESEARCH.REASONING_EFFORT,\n maxSearchResults = RESEARCH.MAX_URLS,\n maxTokens = DEFAULT_MAX_TOKENS,\n temperature = RESEARCH_TEMPERATURE,\n responseFormat,\n } = params;\n\n // Validate input\n if (!question?.trim()) {\n return {\n id: '',\n model: RESEARCH.MODEL,\n created: Date.now(),\n content: '',\n error: {\n code: ErrorCode.INVALID_INPUT,\n message: 'Research question cannot be empty',\n retryable: false,\n },\n };\n }\n\n const messages: Array<{ role: 'system' | 'user'; content: string }> = [];\n if (systemPrompt) {\n messages.push({ role: 'system', content: systemPrompt });\n }\n messages.push({ role: 'user', content: question });\n\n const options: ResearchExecutionOptions = { temperature, reasoningEffort, maxTokens, maxSearchResults, responseFormat };\n\n // Try primary model first\n mcpLog('info', `Trying primary model: ${RESEARCH.MODEL}`, 'research');\n const primaryResult = await this.executeResearch(RESEARCH.MODEL, messages, options, signal);\n\n if (!primaryResult.error) {\n return primaryResult;\n }\n\n // Primary failed - try fallback model if different\n if (RESEARCH.FALLBACK_MODEL && RESEARCH.FALLBACK_MODEL !== RESEARCH.MODEL) {\n mcpLog('warning', `Primary model failed, trying fallback: ${RESEARCH.FALLBACK_MODEL}`, 'research');\n const fallbackResult = await this.executeResearch(RESEARCH.FALLBACK_MODEL, messages, options, signal);\n\n if (!fallbackResult.error) {\n return fallbackResult;\n }\n\n // Both failed - return the fallback error (more recent)\n mcpLog('error', `Both models failed. Primary: ${primaryResult.error?.message}, Fallback: ${fallbackResult.error?.message}`, 'research');\n return {\n ...fallbackResult,\n content: `Research failed with both models. Primary (${RESEARCH.MODEL}): ${primaryResult.error?.message}. Fallback (${RESEARCH.FALLBACK_MODEL}): ${fallbackResult.error?.message}`,\n };\n }\n\n // No fallback or same model - return primary error\n mcpLog('error', `All attempts failed: ${primaryResult.error?.message}`, 'research');\n return {\n ...primaryResult,\n content: `Research failed: ${primaryResult.error?.message}`,\n };\n }\n}\n"],
5
+ "mappings": "AAMA,OAAO,YAAY;AACnB,SAAS,gBAAgB;AACzB,SAAS,wBAAwB;AACjC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,cAAc;AAIvB,MAAM,+BAA+B;AACrC,MAAM,uBAAuB;AAC7B,MAAM,uBAAuB;AAC7B,MAAM,yBAAyB;AAC/B,MAAM,wBAAwB;AAC9B,MAAM,qBAAqB;AAC3B,MAAM,yBAAyB;AAG/B,MAAM,+BAA+B;AAGrC,MAAM,4BAA4B;AAGlC,MAAM,2BAA2B,oBAAI,IAAI,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC;AAGlE,MAAM,sBAAsB,oBAAI,IAAI;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAoFD,SAAS,mBAAmB,OAAwB;AAClD,SAAO,oBAAoB,IAAI,KAAK,KAAK,MAAM,WAAW,eAAe;AAC3E;AAMA,SAAS,qBACP,OACA,UACA,SACyB;AACzB,QAAM,EAAE,aAAa,iBAAiB,WAAW,kBAAkB,eAAe,IAAI;AAEtF,MAAI,mBAAmB,KAAK,GAAG;AAC7B,UAAMA,WAAmC;AAAA,MACvC;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ,OAAO;AAAA,QACL;AAAA,UACE,MAAM;AAAA,UACN,cAAc,CAAC;AAAA,QACjB;AAAA,MACF;AAAA,IACF;AACA,QAAI,gBAAgB;AAClB,MAAAA,SAAQ,kBAAkB;AAAA,IAC5B;AACA,WAAOA;AAAA,EACT;AAGA,QAAM,UAAmC;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,uBAAuB;AAAA,IACvB,mBAAmB;AAAA,MACjB,MAAM;AAAA,MACN,oBAAoB,KAAK,IAAI,kBAAkB,sBAAsB;AAAA,MACrE,kBAAkB;AAAA,MAClB,SAAS,CAAC,EAAE,MAAM,MAAM,CAAC;AAAA,IAC3B;AAAA,EACF;AACA,MAAI,gBAAgB;AAClB,YAAQ,kBAAkB;AAAA,EAC5B;AACA,SAAO;AACT;AAMA,SAAS,sBACP,KACA,OACkB;AAClB,QAAM,EAAE,UAAU,QAAQ,QAAQ,IAAI;AAEtC,SAAO;AAAA,IACL,IAAI,SAAS,MAAM;AAAA,IACnB,OAAO,SAAS,SAAS;AAAA,IACzB,SAAS,SAAS,WAAW,KAAK,IAAI;AAAA,IACtC,SAAS,SAAS,WAAW;AAAA,IAC7B,cAAc,QAAQ,iBAAiB;AAAA,IACvC,OAAO,SAAS,QAAQ;AAAA,MACtB,cAAc,SAAS,MAAM;AAAA,MAC7B,kBAAkB,SAAS,MAAM;AAAA,MACjC,aAAa,SAAS,MAAM;AAAA,MAC5B,aAAc,SAAS,MAAqC;AAAA,IAC9D,IAAI;AAAA,IACJ,aAAa,SAAS,aAAa,IAAI,CAAC,OAA6B;AAAA,MACnE,MAAM;AAAA,MACN,KAAK,EAAE,cAAc,OAAO;AAAA,MAC5B,OAAO,EAAE,cAAc,SAAS;AAAA,MAChC,YAAY,EAAE,cAAc,eAAe;AAAA,MAC3C,UAAU,EAAE,cAAc,aAAa;AAAA,IACzC,EAAE;AAAA,EACJ;AACF;AAMO,MAAM,eAAe;AAAA,EAClB;AAAA,EAER,cAAc;AACZ,QAAI,CAAC,SAAS,SAAS;AACrB,YAAM,IAAI,MAAM,oFAAoF;AAAA,IACtG;AAEA,SAAK,SAAS,IAAI,OAAO;AAAA,MACvB,SAAS,SAAS;AAAA,MAClB,QAAQ,SAAS;AAAA,MACjB,SAAS,SAAS;AAAA,MAClB,YAAY;AAAA;AAAA,IACd,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,OAAyB;AAChD,QAAI,CAAC,MAAO,QAAO;AAEnB,UAAM,MAAM;AAMZ,QAAI,IAAI,UAAU,yBAAyB,IAAI,IAAI,MAAM,GAAG;AAC1D,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,IAAI,WAAW,IAAI,YAAY;AAChD,QACE,QAAQ,SAAS,YAAY,KAC7B,QAAQ,SAAS,SAAS,KAC1B,QAAQ,SAAS,WAAW,KAC5B,QAAQ,SAAS,qBAAqB,KACtC,QAAQ,SAAS,YAAY,KAC7B,QAAQ,SAAS,SAAS,GAC1B;AACA,aAAO;AAAA,IACT;AAGA,QAAI,IAAI,SAAS,cAAc,IAAI,SAAS,aAAa;AACvD,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,eACZ,SACA,OACA,QAC0G;AAC1G,QAAI;AAEJ,aAAS,UAAU,GAAG,WAAW,sBAAsB,WAAW;AAChE,UAAI;AACF,YAAI,UAAU,GAAG;AACf,iBAAO,WAAW,iBAAiB,OAAO,IAAI,oBAAoB,QAAQ,KAAK,IAAI,UAAU;AAAA,QAC/F;AAEA,cAAM,WAAW,MAAM;AAAA,UACrB,CAAC,gBAAgB;AAAA,YACf,CAAC,kBAAkB;AAEjB,oBAAM,mBAAmB,IAAI,gBAAgB;AAC7C,oBAAM,cAAc,MAAM,iBAAiB,MAAM;AACjD,sBAAQ,iBAAiB,SAAS,aAAa,EAAE,MAAM,KAAK,CAAC;AAC7D,0BAAY,iBAAiB,SAAS,aAAa,EAAE,MAAM,KAAK,CAAC;AACjE,4BAAc,iBAAiB,SAAS,aAAa,EAAE,MAAM,KAAK,CAAC;AAEnE,qBAAO,KAAK,OAAO,KAAK,YAAY;AAAA,gBAClC;AAAA,gBACA,EAAE,QAAQ,iBAAiB,OAAO;AAAA,cACpC,EAAE,QAAQ,MAAM;AACd,wBAAQ,oBAAoB,SAAS,WAAW;AAChD,4BAAY,oBAAoB,SAAS,WAAW;AACpD,8BAAc,oBAAoB,SAAS,WAAW;AAAA,cACxD,CAAC;AAAA,YACH;AAAA,YACA;AAAA,YACA,aAAa,KAAK;AAAA,UACpB;AAAA,UACA;AAAA,UACA;AAAA,UACA,aAAa,KAAK;AAAA,QACpB;AACA,cAAM,SAAS,SAAS,UAAU,CAAC;AACnC,cAAM,UAAU,QAAQ;AAGxB,YAAI,CAAC,SAAS,WAAW,CAAC,QAAQ;AAChC,sBAAY;AAAA,YACV,MAAM,UAAU;AAAA,YAChB,SAAS;AAAA,YACT,WAAW;AAAA,UACb;AAEA,cAAI,UAAU,sBAAsB;AAClC,kBAAM,UAAU,iBAAiB,SAAS,wBAAwB,qBAAqB;AACvF,mBAAO,WAAW,+BAA+B,OAAO,SAAS,UAAU;AAC3E,kBAAM,MAAM,SAAS,MAAM;AAC3B;AAAA,UACF;AAAA,QACF;AAEA,eAAO,EAAE,KAAK,EAAE,UAAU,QAAQ,QAAQ,EAAE;AAAA,MAE9C,SAAS,OAAgB;AACvB,oBAAY,cAAc,KAAK;AAE/B,cAAM,MAAM;AACZ,eAAO,SAAS,cAAc,KAAK,aAAa,UAAU,CAAC,MAAM,UAAU,OAAO,aAAa,IAAI,MAAM,KAAK,UAAU;AAExH,YAAI,KAAK,iBAAiB,KAAK,KAAK,UAAU,sBAAsB;AAClE,gBAAM,UAAU,iBAAiB,SAAS,wBAAwB,qBAAqB;AACvF,iBAAO,WAAW,eAAe,OAAO,SAAS,UAAU;AAC3D,cAAI;AAAE,kBAAM,MAAM,SAAS,MAAM;AAAA,UAAG,QAAQ;AAAE;AAAA,UAAO;AACrD;AAAA,QACF;AAEA;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,OAAO,aAAa;AAAA,QAClB,MAAM,UAAU;AAAA,QAChB,SAAS;AAAA,QACT,WAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,gBACZ,OACA,UACA,SACA,QAC2B;AAC3B,UAAM,UAAU,qBAAqB,OAAO,UAAU,OAAO;AAC7D,UAAM,SAAS,MAAM,KAAK,eAAe,SAAS,OAAO,MAAM;AAE/D,QAAI,OAAO,KAAK;AACd,aAAO,sBAAsB,OAAO,KAAK,KAAK;AAAA,IAChD;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ;AAAA,MACA,SAAS,KAAK,IAAI;AAAA,MAClB,SAAS;AAAA,MACT,OAAO,OAAO;AAAA,IAChB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,SAAS,QAAwB,QAAiD;AACtF,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA,kBAAkB,SAAS;AAAA,MAC3B,mBAAmB,SAAS;AAAA,MAC5B,YAAY;AAAA,MACZ,cAAc;AAAA,MACd;AAAA,IACF,IAAI;AAGJ,QAAI,CAAC,UAAU,KAAK,GAAG;AACrB,aAAO;AAAA,QACL,IAAI;AAAA,QACJ,OAAO,SAAS;AAAA,QAChB,SAAS,KAAK,IAAI;AAAA,QAClB,SAAS;AAAA,QACT,OAAO;AAAA,UACL,MAAM,UAAU;AAAA,UAChB,SAAS;AAAA,UACT,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAgE,CAAC;AACvE,QAAI,cAAc;AAChB,eAAS,KAAK,EAAE,MAAM,UAAU,SAAS,aAAa,CAAC;AAAA,IACzD;AACA,aAAS,KAAK,EAAE,MAAM,QAAQ,SAAS,SAAS,CAAC;AAEjD,UAAM,UAAoC,EAAE,aAAa,iBAAiB,WAAW,kBAAkB,eAAe;AAGtH,WAAO,QAAQ,yBAAyB,SAAS,KAAK,IAAI,UAAU;AACpE,UAAM,gBAAgB,MAAM,KAAK,gBAAgB,SAAS,OAAO,UAAU,SAAS,MAAM;AAE1F,QAAI,CAAC,cAAc,OAAO;AACxB,aAAO;AAAA,IACT;AAGA,QAAI,SAAS,kBAAkB,SAAS,mBAAmB,SAAS,OAAO;AACzE,aAAO,WAAW,0CAA0C,SAAS,cAAc,IAAI,UAAU;AACjG,YAAM,iBAAiB,MAAM,KAAK,gBAAgB,SAAS,gBAAgB,UAAU,SAAS,MAAM;AAEpG,UAAI,CAAC,eAAe,OAAO;AACzB,eAAO;AAAA,MACT;AAGA,aAAO,SAAS,gCAAgC,cAAc,OAAO,OAAO,eAAe,eAAe,OAAO,OAAO,IAAI,UAAU;AACtI,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS,8CAA8C,SAAS,KAAK,MAAM,cAAc,OAAO,OAAO,eAAe,SAAS,cAAc,MAAM,eAAe,OAAO,OAAO;AAAA,MAClL;AAAA,IACF;AAGA,WAAO,SAAS,wBAAwB,cAAc,OAAO,OAAO,IAAI,UAAU;AAClF,WAAO;AAAA,MACL,GAAG;AAAA,MACH,SAAS,oBAAoB,cAAc,OAAO,OAAO;AAAA,IAC3D;AAAA,EACF;AACF;",
6
+ "names": ["payload"]
7
+ }
@@ -0,0 +1,72 @@
1
+ /**
2
+ * Web Scraper Client
3
+ * Generic interface for URL scraping with automatic fallback modes
4
+ * Implements robust error handling that NEVER crashes
5
+ */
6
+ import { type StructuredError } from '../utils/errors.js';
7
+ interface ScrapeRequest {
8
+ readonly url: string;
9
+ readonly mode?: 'basic' | 'javascript';
10
+ readonly timeout?: number;
11
+ readonly country?: string;
12
+ }
13
+ interface ScrapeResponse {
14
+ readonly content: string;
15
+ readonly statusCode: number;
16
+ readonly credits: number;
17
+ readonly headers?: Record<string, string>;
18
+ readonly error?: StructuredError;
19
+ }
20
+ interface BatchScrapeResult {
21
+ readonly results: ReadonlyArray<ScrapeResponse & {
22
+ readonly url: string;
23
+ }>;
24
+ readonly batchesProcessed: number;
25
+ readonly totalAttempted: number;
26
+ readonly rateLimitHits: number;
27
+ }
28
+ export declare class ScraperClient {
29
+ private apiKey;
30
+ private baseURL;
31
+ constructor(apiKey?: string);
32
+ /**
33
+ * Scrape a single URL with retry logic
34
+ * NEVER throws - always returns a ScrapeResponse (possibly with error)
35
+ */
36
+ scrape(request: ScrapeRequest, maxRetries?: 1): Promise<ScrapeResponse>;
37
+ /**
38
+ * Scrape with automatic fallback through different modes
39
+ * NEVER throws - always returns a ScrapeResponse
40
+ */
41
+ scrapeWithFallback(url: string, options?: {
42
+ timeout?: number;
43
+ }): Promise<ScrapeResponse>;
44
+ /**
45
+ * Execute a single fallback attempt and determine whether to continue.
46
+ * Returns { done: true } on success/terminal or { done: false } to try the next mode.
47
+ */
48
+ private tryFallbackAttempt;
49
+ /**
50
+ * Scrape multiple URLs with batching
51
+ * NEVER throws - always returns results array
52
+ */
53
+ scrapeMultiple(urls: string[], options?: {
54
+ timeout?: number;
55
+ }): Promise<Array<ScrapeResponse & {
56
+ url: string;
57
+ }>>;
58
+ /**
59
+ * Batch scrape with progress callback
60
+ * NEVER throws - uses Promise.allSettled internally
61
+ */
62
+ batchScrape(urls: string[], options?: {
63
+ timeout?: number;
64
+ }, onBatchComplete?: (batchNum: number, totalBatches: number, processed: number) => void): Promise<BatchScrapeResult>;
65
+ /**
66
+ * Process a single batch of URLs
67
+ * NEVER throws
68
+ */
69
+ private processBatch;
70
+ }
71
+ export {};
72
+ //# sourceMappingURL=scraper.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scraper.d.ts","sourceRoot":"","sources":["../../../src/clients/scraper.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,EAKL,KAAK,eAAe,EACrB,MAAM,oBAAoB,CAAC;AAmB5B,UAAU,aAAa;IACrB,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,IAAI,CAAC,EAAE,OAAO,GAAG,YAAY,CAAC;IACvC,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,UAAU,cAAc;IACtB,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC1C,QAAQ,CAAC,KAAK,CAAC,EAAE,eAAe,CAAC;CAClC;AAED,UAAU,iBAAiB;IACzB,QAAQ,CAAC,OAAO,EAAE,aAAa,CAAC,cAAc,GAAG;QAAE,QAAQ,CAAC,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAC3E,QAAQ,CAAC,gBAAgB,EAAE,MAAM,CAAC;IAClC,QAAQ,CAAC,cAAc,EAAE,MAAM,CAAC;IAChC,QAAQ,CAAC,aAAa,EAAE,MAAM,CAAC;CAChC;AAuBD,qBAAa,aAAa;IACxB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,OAAO,CAA2B;gBAE9B,MAAM,CAAC,EAAE,MAAM;IAS3B;;;OAGG;IACG,MAAM,CAAC,OAAO,EAAE,aAAa,EAAE,UAAU,IAAc,GAAG,OAAO,CAAC,cAAc,CAAC;IA0JvF;;;OAGG;IACG,kBAAkB,CAAC,GAAG,EAAE,MAAM,EAAE,OAAO,GAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,cAAc,CAAC;IAwClG;;;OAGG;YACW,kBAAkB;IAkDhC;;;OAGG;IACG,cAAc,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,OAAO,GAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,KAAK,CAAC,cAAc,GAAG;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;IAa1H;;;OAGG;IACG,WAAW,CACf,IAAI,EAAE,MAAM,EAAE,EACd,OAAO,GAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO,EAClC,eAAe,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,KAAK,IAAI,GACpF,OAAO,CAAC,iBAAiB,CAAC;IAoE7B;;;OAGG;YACW,YAAY;CAoB3B"}