mcp-researchpowerpack-http 3.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. package/README.md +124 -0
  2. package/dist/index.d.ts +3 -0
  3. package/dist/index.d.ts.map +1 -0
  4. package/dist/index.js +227 -0
  5. package/dist/index.js.map +7 -0
  6. package/dist/mcp-use.json +7 -0
  7. package/dist/src/clients/github.d.ts +83 -0
  8. package/dist/src/clients/github.d.ts.map +1 -0
  9. package/dist/src/clients/github.js +370 -0
  10. package/dist/src/clients/github.js.map +7 -0
  11. package/dist/src/clients/reddit.d.ts +60 -0
  12. package/dist/src/clients/reddit.d.ts.map +1 -0
  13. package/dist/src/clients/reddit.js +287 -0
  14. package/dist/src/clients/reddit.js.map +7 -0
  15. package/dist/src/clients/research.d.ts +67 -0
  16. package/dist/src/clients/research.d.ts.map +1 -0
  17. package/dist/src/clients/research.js +282 -0
  18. package/dist/src/clients/research.js.map +7 -0
  19. package/dist/src/clients/scraper.d.ts +72 -0
  20. package/dist/src/clients/scraper.d.ts.map +1 -0
  21. package/dist/src/clients/scraper.js +327 -0
  22. package/dist/src/clients/scraper.js.map +7 -0
  23. package/dist/src/clients/search.d.ts +57 -0
  24. package/dist/src/clients/search.d.ts.map +1 -0
  25. package/dist/src/clients/search.js +218 -0
  26. package/dist/src/clients/search.js.map +7 -0
  27. package/dist/src/config/index.d.ts +93 -0
  28. package/dist/src/config/index.d.ts.map +1 -0
  29. package/dist/src/config/index.js +218 -0
  30. package/dist/src/config/index.js.map +7 -0
  31. package/dist/src/schemas/deep-research.d.ts +40 -0
  32. package/dist/src/schemas/deep-research.d.ts.map +1 -0
  33. package/dist/src/schemas/deep-research.js +216 -0
  34. package/dist/src/schemas/deep-research.js.map +7 -0
  35. package/dist/src/schemas/github-score.d.ts +50 -0
  36. package/dist/src/schemas/github-score.d.ts.map +1 -0
  37. package/dist/src/schemas/github-score.js +58 -0
  38. package/dist/src/schemas/github-score.js.map +7 -0
  39. package/dist/src/schemas/scrape-links.d.ts +23 -0
  40. package/dist/src/schemas/scrape-links.d.ts.map +1 -0
  41. package/dist/src/schemas/scrape-links.js +32 -0
  42. package/dist/src/schemas/scrape-links.js.map +7 -0
  43. package/dist/src/schemas/web-search.d.ts +18 -0
  44. package/dist/src/schemas/web-search.d.ts.map +1 -0
  45. package/dist/src/schemas/web-search.js +28 -0
  46. package/dist/src/schemas/web-search.js.map +7 -0
  47. package/dist/src/scoring/github-quality.d.ts +142 -0
  48. package/dist/src/scoring/github-quality.d.ts.map +1 -0
  49. package/dist/src/scoring/github-quality.js +202 -0
  50. package/dist/src/scoring/github-quality.js.map +7 -0
  51. package/dist/src/services/file-attachment.d.ts +30 -0
  52. package/dist/src/services/file-attachment.d.ts.map +1 -0
  53. package/dist/src/services/file-attachment.js +205 -0
  54. package/dist/src/services/file-attachment.js.map +7 -0
  55. package/dist/src/services/llm-processor.d.ts +29 -0
  56. package/dist/src/services/llm-processor.d.ts.map +1 -0
  57. package/dist/src/services/llm-processor.js +206 -0
  58. package/dist/src/services/llm-processor.js.map +7 -0
  59. package/dist/src/services/markdown-cleaner.d.ts +8 -0
  60. package/dist/src/services/markdown-cleaner.d.ts.map +1 -0
  61. package/dist/src/services/markdown-cleaner.js +63 -0
  62. package/dist/src/services/markdown-cleaner.js.map +7 -0
  63. package/dist/src/tools/github-score.d.ts +12 -0
  64. package/dist/src/tools/github-score.d.ts.map +1 -0
  65. package/dist/src/tools/github-score.js +306 -0
  66. package/dist/src/tools/github-score.js.map +7 -0
  67. package/dist/src/tools/mcp-helpers.d.ts +27 -0
  68. package/dist/src/tools/mcp-helpers.d.ts.map +1 -0
  69. package/dist/src/tools/mcp-helpers.js +47 -0
  70. package/dist/src/tools/mcp-helpers.js.map +7 -0
  71. package/dist/src/tools/reddit.d.ts +54 -0
  72. package/dist/src/tools/reddit.d.ts.map +1 -0
  73. package/dist/src/tools/reddit.js +498 -0
  74. package/dist/src/tools/reddit.js.map +7 -0
  75. package/dist/src/tools/registry.d.ts +3 -0
  76. package/dist/src/tools/registry.d.ts.map +1 -0
  77. package/dist/src/tools/registry.js +17 -0
  78. package/dist/src/tools/registry.js.map +7 -0
  79. package/dist/src/tools/research.d.ts +14 -0
  80. package/dist/src/tools/research.d.ts.map +1 -0
  81. package/dist/src/tools/research.js +250 -0
  82. package/dist/src/tools/research.js.map +7 -0
  83. package/dist/src/tools/scrape.d.ts +14 -0
  84. package/dist/src/tools/scrape.d.ts.map +1 -0
  85. package/dist/src/tools/scrape.js +290 -0
  86. package/dist/src/tools/scrape.js.map +7 -0
  87. package/dist/src/tools/search.d.ts +10 -0
  88. package/dist/src/tools/search.d.ts.map +1 -0
  89. package/dist/src/tools/search.js +197 -0
  90. package/dist/src/tools/search.js.map +7 -0
  91. package/dist/src/tools/utils.d.ts +105 -0
  92. package/dist/src/tools/utils.d.ts.map +1 -0
  93. package/dist/src/tools/utils.js +96 -0
  94. package/dist/src/tools/utils.js.map +7 -0
  95. package/dist/src/utils/concurrency.d.ts +28 -0
  96. package/dist/src/utils/concurrency.d.ts.map +1 -0
  97. package/dist/src/utils/concurrency.js +62 -0
  98. package/dist/src/utils/concurrency.js.map +7 -0
  99. package/dist/src/utils/errors.d.ts +95 -0
  100. package/dist/src/utils/errors.d.ts.map +1 -0
  101. package/dist/src/utils/errors.js +289 -0
  102. package/dist/src/utils/errors.js.map +7 -0
  103. package/dist/src/utils/logger.d.ts +33 -0
  104. package/dist/src/utils/logger.d.ts.map +1 -0
  105. package/dist/src/utils/logger.js +41 -0
  106. package/dist/src/utils/logger.js.map +7 -0
  107. package/dist/src/utils/markdown-formatter.d.ts +5 -0
  108. package/dist/src/utils/markdown-formatter.d.ts.map +1 -0
  109. package/dist/src/utils/markdown-formatter.js +15 -0
  110. package/dist/src/utils/markdown-formatter.js.map +7 -0
  111. package/dist/src/utils/response.d.ts +83 -0
  112. package/dist/src/utils/response.d.ts.map +1 -0
  113. package/dist/src/utils/response.js +109 -0
  114. package/dist/src/utils/response.js.map +7 -0
  115. package/dist/src/utils/retry.d.ts +43 -0
  116. package/dist/src/utils/retry.d.ts.map +1 -0
  117. package/dist/src/utils/retry.js +37 -0
  118. package/dist/src/utils/retry.js.map +7 -0
  119. package/dist/src/utils/url-aggregator.d.ts +92 -0
  120. package/dist/src/utils/url-aggregator.d.ts.map +1 -0
  121. package/dist/src/utils/url-aggregator.js +357 -0
  122. package/dist/src/utils/url-aggregator.js.map +7 -0
  123. package/dist/src/version.d.ts +28 -0
  124. package/dist/src/version.d.ts.map +1 -0
  125. package/dist/src/version.js +32 -0
  126. package/dist/src/version.js.map +7 -0
  127. package/package.json +73 -0
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/clients/search.ts"],
4
+ "sourcesContent": ["/**\n * Web Search Client\n * Generic interface for web search via Google (Serper implementation)\n * Implements robust error handling that NEVER crashes\n */\n\nimport { parseEnv } from '../config/index.js';\nimport {\n classifyError,\n fetchWithTimeout,\n sleep,\n ErrorCode,\n type StructuredError,\n} from '../utils/errors.js';\nimport { calculateBackoff } from '../utils/retry.js';\nimport { pMap } from '../utils/concurrency.js';\nimport { mcpLog } from '../utils/logger.js';\n\n// \u2500\u2500 Constants \u2500\u2500\n\nconst SERPER_API_URL = 'https://google.serper.dev/search' as const;\nconst DEFAULT_RESULTS_PER_KEYWORD = 10 as const;\nconst MAX_SEARCH_CONCURRENCY = 8 as const;\nconst MAX_RETRIES = 3 as const;\n\n// \u2500\u2500 Data Interfaces \u2500\u2500\n\ninterface SearchResult {\n readonly title: string;\n readonly link: string;\n readonly snippet: string;\n readonly date?: string;\n readonly position: number;\n}\n\nexport interface KeywordSearchResult {\n readonly keyword: string;\n readonly results: SearchResult[];\n readonly totalResults: number;\n readonly related: string[];\n readonly error?: StructuredError;\n}\n\ninterface MultipleSearchResponse {\n readonly searches: KeywordSearchResult[];\n readonly totalKeywords: number;\n readonly executionTime: number;\n readonly error?: StructuredError;\n}\n\nexport interface RedditSearchResult {\n readonly title: string;\n readonly url: string;\n readonly snippet: string;\n readonly date?: string;\n}\n\n// \u2500\u2500 Retry Configuration \u2500\u2500\n\nconst SEARCH_RETRY_CONFIG = {\n maxRetries: MAX_RETRIES,\n baseDelayMs: 1000,\n maxDelayMs: 10000,\n timeoutMs: 30000,\n} as const;\n\nconst RETRYABLE_SEARCH_CODES = new Set([429, 500, 502, 503, 504]);\n\n// Pre-compiled regex patterns for Reddit search\nconst REDDIT_SITE_REGEX = /site:\\s*reddit\\.com/i;\nconst REDDIT_SUBREDDIT_SUFFIX_REGEX = / : r\\/\\w+$/;\nconst REDDIT_SUFFIX_REGEX = / - Reddit$/;\n\n// \u2500\u2500 Helper: Parse Serper search responses into structured results \u2500\u2500\n\nfunction parseSearchResponses(\n responses: Array<Record<string, unknown>>,\n keywords: string[],\n): KeywordSearchResult[] {\n return responses.map((resp, index) => {\n try {\n const organic = (resp.organic || []) as Array<Record<string, unknown>>;\n const results: SearchResult[] = organic.map((item, idx) => ({\n title: (item.title as string) || 'No title',\n link: (item.link as string) || '#',\n snippet: (item.snippet as string) || '',\n date: item.date as string | undefined,\n position: (item.position as number) || idx + 1,\n }));\n\n const searchInfo = resp.searchInformation as Record<string, unknown> | undefined;\n const totalResults = searchInfo?.totalResults\n ? parseInt(String(searchInfo.totalResults).replace(/,/g, ''), 10)\n : results.length;\n\n const relatedSearches = (resp.relatedSearches || []) as Array<Record<string, unknown>>;\n const related = relatedSearches.map((r) => (r.query as string) || '');\n\n return { keyword: keywords[index] || '', results, totalResults, related };\n } catch {\n return { keyword: keywords[index] || '', results: [], totalResults: 0, related: [] };\n }\n });\n}\n\n// \u2500\u2500 Helper: Execute search API call with retry \u2500\u2500\n\nasync function executeSearchWithRetry(\n apiKey: string,\n body: unknown,\n isRetryable: (status?: number, error?: unknown) => boolean,\n): Promise<{ data: unknown; error?: StructuredError }> {\n let lastError: StructuredError | undefined;\n\n for (let attempt = 0; attempt <= SEARCH_RETRY_CONFIG.maxRetries; attempt++) {\n try {\n if (attempt > 0) {\n mcpLog('warning', `Retry attempt ${attempt}/${SEARCH_RETRY_CONFIG.maxRetries}`, 'search');\n }\n\n const response = await fetchWithTimeout(SERPER_API_URL, {\n method: 'POST',\n headers: {\n 'X-API-KEY': apiKey,\n 'Content-Type': 'application/json',\n },\n body: JSON.stringify(body),\n timeoutMs: SEARCH_RETRY_CONFIG.timeoutMs,\n });\n\n if (!response.ok) {\n const errorText = await response.text().catch(() => '');\n lastError = classifyError({ status: response.status, message: errorText });\n\n if (isRetryable(response.status) && attempt < SEARCH_RETRY_CONFIG.maxRetries) {\n const delayMs = calculateBackoff(attempt, SEARCH_RETRY_CONFIG.baseDelayMs, SEARCH_RETRY_CONFIG.maxDelayMs);\n mcpLog('warning', `API returned ${response.status}, retrying in ${delayMs}ms...`, 'search');\n await sleep(delayMs);\n continue;\n }\n\n return { data: undefined, error: lastError };\n }\n\n try {\n const data = await response.json();\n return { data };\n } catch {\n return {\n data: undefined,\n error: { code: ErrorCode.PARSE_ERROR, message: 'Failed to parse search response', retryable: false },\n };\n }\n } catch (error) {\n lastError = classifyError(error);\n\n if (isRetryable(undefined, error) && attempt < SEARCH_RETRY_CONFIG.maxRetries) {\n const delayMs = calculateBackoff(attempt, SEARCH_RETRY_CONFIG.baseDelayMs, SEARCH_RETRY_CONFIG.maxDelayMs);\n mcpLog('warning', `${lastError.code}: ${lastError.message}, retrying in ${delayMs}ms...`, 'search');\n await sleep(delayMs);\n continue;\n }\n\n return { data: undefined, error: lastError };\n }\n }\n\n return {\n data: undefined,\n error: lastError || { code: ErrorCode.UNKNOWN_ERROR, message: 'Search failed', retryable: false },\n };\n}\n\n// \u2500\u2500 SearchClient \u2500\u2500\n\nexport class SearchClient {\n private apiKey: string;\n\n constructor(apiKey?: string) {\n const env = parseEnv();\n this.apiKey = apiKey || env.SEARCH_API_KEY || '';\n\n if (!this.apiKey) {\n throw new Error('Web search capability is not configured. Please set up the required API credentials.');\n }\n }\n\n /**\n * Check if error is retryable\n */\n private isRetryable(status?: number, error?: unknown): boolean {\n if (status && RETRYABLE_SEARCH_CODES.has(status)) return true;\n\n if (error == null) return false;\n const message = (typeof error === 'object' && 'message' in error && typeof (error as { message?: string }).message === 'string')\n ? (error as { message: string }).message.toLowerCase()\n : '';\n return message.includes('timeout') || message.includes('rate limit') || message.includes('connection');\n }\n\n /**\n * Search multiple keywords in parallel\n * NEVER throws - always returns a valid response\n */\n async searchMultiple(keywords: string[]): Promise<MultipleSearchResponse> {\n const startTime = Date.now();\n\n if (keywords.length === 0) {\n return {\n searches: [],\n totalKeywords: 0,\n executionTime: 0,\n error: { code: ErrorCode.INVALID_INPUT, message: 'No keywords provided', retryable: false },\n };\n }\n\n const searchQueries = keywords.map(keyword => ({ q: keyword }));\n const { data, error } = await executeSearchWithRetry(\n this.apiKey,\n searchQueries,\n (status, err) => this.isRetryable(status, err),\n );\n\n if (error || data === undefined) {\n return {\n searches: [],\n totalKeywords: keywords.length,\n executionTime: Date.now() - startTime,\n error,\n };\n }\n\n const responses = Array.isArray(data) ? data : [data];\n const searches = parseSearchResponses(responses as Array<Record<string, unknown>>, keywords);\n\n return { searches, totalKeywords: keywords.length, executionTime: Date.now() - startTime };\n }\n\n /**\n * Search Reddit via Google (adds site:reddit.com automatically)\n * NEVER throws - returns empty array on failure\n */\n async searchReddit(query: string, dateAfter?: string): Promise<RedditSearchResult[]> {\n if (!query?.trim()) {\n return [];\n }\n\n let q = query.replace(REDDIT_SITE_REGEX, '').trim() + ' site:reddit.com';\n\n if (dateAfter) {\n q += ` after:${dateAfter}`;\n }\n\n for (let attempt = 0; attempt <= SEARCH_RETRY_CONFIG.maxRetries; attempt++) {\n try {\n const res = await fetchWithTimeout(SERPER_API_URL, {\n method: 'POST',\n headers: { 'X-API-KEY': this.apiKey, 'Content-Type': 'application/json' },\n body: JSON.stringify({ q, num: DEFAULT_RESULTS_PER_KEYWORD }),\n timeoutMs: SEARCH_RETRY_CONFIG.timeoutMs,\n });\n\n if (!res.ok) {\n if (this.isRetryable(res.status) && attempt < SEARCH_RETRY_CONFIG.maxRetries) {\n const delayMs = calculateBackoff(attempt, SEARCH_RETRY_CONFIG.baseDelayMs, SEARCH_RETRY_CONFIG.maxDelayMs);\n mcpLog('warning', `Reddit search ${res.status}, retrying in ${delayMs}ms...`, 'search');\n await sleep(delayMs);\n continue;\n }\n mcpLog('error', `Reddit search failed with status ${res.status}`, 'search');\n return [];\n }\n\n const data = await res.json() as { organic?: Array<{ title: string; link: string; snippet: string; date?: string }> };\n return (data.organic || []).map((r) => ({\n title: (r.title || '').replace(REDDIT_SUBREDDIT_SUFFIX_REGEX, '').replace(REDDIT_SUFFIX_REGEX, ''),\n url: r.link || '',\n snippet: r.snippet || '',\n date: r.date,\n }));\n\n } catch (error) {\n const err = classifyError(error);\n if (this.isRetryable(undefined, error) && attempt < SEARCH_RETRY_CONFIG.maxRetries) {\n const delayMs = calculateBackoff(attempt, SEARCH_RETRY_CONFIG.baseDelayMs, SEARCH_RETRY_CONFIG.maxDelayMs);\n mcpLog('warning', `Reddit search ${err.code}, retrying in ${delayMs}ms...`, 'search');\n await sleep(delayMs);\n continue;\n }\n mcpLog('error', `Reddit search failed: ${err.message}`, 'search');\n return [];\n }\n }\n\n return [];\n }\n\n /**\n * Search Reddit with multiple queries (bounded concurrency)\n * NEVER throws - searchReddit never throws, pMap preserves order\n */\n async searchRedditMultiple(queries: string[], dateAfter?: string): Promise<Map<string, RedditSearchResult[]>> {\n if (queries.length === 0) {\n return new Map();\n }\n\n const results = await pMap(\n queries,\n q => this.searchReddit(q, dateAfter),\n MAX_SEARCH_CONCURRENCY\n );\n\n return new Map(queries.map((q, i) => [q, results[i] || []]));\n }\n}\n"],
5
+ "mappings": "AAMA,SAAS,gBAAgB;AACzB;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,wBAAwB;AACjC,SAAS,YAAY;AACrB,SAAS,cAAc;AAIvB,MAAM,iBAAiB;AACvB,MAAM,8BAA8B;AACpC,MAAM,yBAAyB;AAC/B,MAAM,cAAc;AAoCpB,MAAM,sBAAsB;AAAA,EAC1B,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,WAAW;AACb;AAEA,MAAM,yBAAyB,oBAAI,IAAI,CAAC,KAAK,KAAK,KAAK,KAAK,GAAG,CAAC;AAGhE,MAAM,oBAAoB;AAC1B,MAAM,gCAAgC;AACtC,MAAM,sBAAsB;AAI5B,SAAS,qBACP,WACA,UACuB;AACvB,SAAO,UAAU,IAAI,CAAC,MAAM,UAAU;AACpC,QAAI;AACF,YAAM,UAAW,KAAK,WAAW,CAAC;AAClC,YAAM,UAA0B,QAAQ,IAAI,CAAC,MAAM,SAAS;AAAA,QAC1D,OAAQ,KAAK,SAAoB;AAAA,QACjC,MAAO,KAAK,QAAmB;AAAA,QAC/B,SAAU,KAAK,WAAsB;AAAA,QACrC,MAAM,KAAK;AAAA,QACX,UAAW,KAAK,YAAuB,MAAM;AAAA,MAC/C,EAAE;AAEF,YAAM,aAAa,KAAK;AACxB,YAAM,eAAe,YAAY,eAC7B,SAAS,OAAO,WAAW,YAAY,EAAE,QAAQ,MAAM,EAAE,GAAG,EAAE,IAC9D,QAAQ;AAEZ,YAAM,kBAAmB,KAAK,mBAAmB,CAAC;AAClD,YAAM,UAAU,gBAAgB,IAAI,CAAC,MAAO,EAAE,SAAoB,EAAE;AAEpE,aAAO,EAAE,SAAS,SAAS,KAAK,KAAK,IAAI,SAAS,cAAc,QAAQ;AAAA,IAC1E,QAAQ;AACN,aAAO,EAAE,SAAS,SAAS,KAAK,KAAK,IAAI,SAAS,CAAC,GAAG,cAAc,GAAG,SAAS,CAAC,EAAE;AAAA,IACrF;AAAA,EACF,CAAC;AACH;AAIA,eAAe,uBACb,QACA,MACA,aACqD;AACrD,MAAI;AAEJ,WAAS,UAAU,GAAG,WAAW,oBAAoB,YAAY,WAAW;AAC1E,QAAI;AACF,UAAI,UAAU,GAAG;AACf,eAAO,WAAW,iBAAiB,OAAO,IAAI,oBAAoB,UAAU,IAAI,QAAQ;AAAA,MAC1F;AAEA,YAAM,WAAW,MAAM,iBAAiB,gBAAgB;AAAA,QACtD,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,aAAa;AAAA,UACb,gBAAgB;AAAA,QAClB;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,WAAW,oBAAoB;AAAA,MACjC,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,oBAAY,cAAc,EAAE,QAAQ,SAAS,QAAQ,SAAS,UAAU,CAAC;AAEzE,YAAI,YAAY,SAAS,MAAM,KAAK,UAAU,oBAAoB,YAAY;AAC5E,gBAAM,UAAU,iBAAiB,SAAS,oBAAoB,aAAa,oBAAoB,UAAU;AACzG,iBAAO,WAAW,gBAAgB,SAAS,MAAM,iBAAiB,OAAO,SAAS,QAAQ;AAC1F,gBAAM,MAAM,OAAO;AACnB;AAAA,QACF;AAEA,eAAO,EAAE,MAAM,QAAW,OAAO,UAAU;AAAA,MAC7C;AAEA,UAAI;AACF,cAAM,OAAO,MAAM,SAAS,KAAK;AACjC,eAAO,EAAE,KAAK;AAAA,MAChB,QAAQ;AACN,eAAO;AAAA,UACL,MAAM;AAAA,UACN,OAAO,EAAE,MAAM,UAAU,aAAa,SAAS,mCAAmC,WAAW,MAAM;AAAA,QACrG;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,kBAAY,cAAc,KAAK;AAE/B,UAAI,YAAY,QAAW,KAAK,KAAK,UAAU,oBAAoB,YAAY;AAC7E,cAAM,UAAU,iBAAiB,SAAS,oBAAoB,aAAa,oBAAoB,UAAU;AACzG,eAAO,WAAW,GAAG,UAAU,IAAI,KAAK,UAAU,OAAO,iBAAiB,OAAO,SAAS,QAAQ;AAClG,cAAM,MAAM,OAAO;AACnB;AAAA,MACF;AAEA,aAAO,EAAE,MAAM,QAAW,OAAO,UAAU;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,OAAO,aAAa,EAAE,MAAM,UAAU,eAAe,SAAS,iBAAiB,WAAW,MAAM;AAAA,EAClG;AACF;AAIO,MAAM,aAAa;AAAA,EAChB;AAAA,EAER,YAAY,QAAiB;AAC3B,UAAM,MAAM,SAAS;AACrB,SAAK,SAAS,UAAU,IAAI,kBAAkB;AAE9C,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,IAAI,MAAM,sFAAsF;AAAA,IACxG;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,QAAiB,OAA0B;AAC7D,QAAI,UAAU,uBAAuB,IAAI,MAAM,EAAG,QAAO;AAEzD,QAAI,SAAS,KAAM,QAAO;AAC1B,UAAM,UAAW,OAAO,UAAU,YAAY,aAAa,SAAS,OAAQ,MAA+B,YAAY,WAClH,MAA8B,QAAQ,YAAY,IACnD;AACJ,WAAO,QAAQ,SAAS,SAAS,KAAK,QAAQ,SAAS,YAAY,KAAK,QAAQ,SAAS,YAAY;AAAA,EACvG;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eAAe,UAAqD;AACxE,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI,SAAS,WAAW,GAAG;AACzB,aAAO;AAAA,QACL,UAAU,CAAC;AAAA,QACX,eAAe;AAAA,QACf,eAAe;AAAA,QACf,OAAO,EAAE,MAAM,UAAU,eAAe,SAAS,wBAAwB,WAAW,MAAM;AAAA,MAC5F;AAAA,IACF;AAEA,UAAM,gBAAgB,SAAS,IAAI,cAAY,EAAE,GAAG,QAAQ,EAAE;AAC9D,UAAM,EAAE,MAAM,MAAM,IAAI,MAAM;AAAA,MAC5B,KAAK;AAAA,MACL;AAAA,MACA,CAAC,QAAQ,QAAQ,KAAK,YAAY,QAAQ,GAAG;AAAA,IAC/C;AAEA,QAAI,SAAS,SAAS,QAAW;AAC/B,aAAO;AAAA,QACL,UAAU,CAAC;AAAA,QACX,eAAe,SAAS;AAAA,QACxB,eAAe,KAAK,IAAI,IAAI;AAAA,QAC5B;AAAA,MACF;AAAA,IACF;AAEA,UAAM,YAAY,MAAM,QAAQ,IAAI,IAAI,OAAO,CAAC,IAAI;AACpD,UAAM,WAAW,qBAAqB,WAA6C,QAAQ;AAE3F,WAAO,EAAE,UAAU,eAAe,SAAS,QAAQ,eAAe,KAAK,IAAI,IAAI,UAAU;AAAA,EAC3F;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,aAAa,OAAe,WAAmD;AACnF,QAAI,CAAC,OAAO,KAAK,GAAG;AAClB,aAAO,CAAC;AAAA,IACV;AAEA,QAAI,IAAI,MAAM,QAAQ,mBAAmB,EAAE,EAAE,KAAK,IAAI;AAEtD,QAAI,WAAW;AACb,WAAK,UAAU,SAAS;AAAA,IAC1B;AAEA,aAAS,UAAU,GAAG,WAAW,oBAAoB,YAAY,WAAW;AAC1E,UAAI;AACF,cAAM,MAAM,MAAM,iBAAiB,gBAAgB;AAAA,UACjD,QAAQ;AAAA,UACR,SAAS,EAAE,aAAa,KAAK,QAAQ,gBAAgB,mBAAmB;AAAA,UACxE,MAAM,KAAK,UAAU,EAAE,GAAG,KAAK,4BAA4B,CAAC;AAAA,UAC5D,WAAW,oBAAoB;AAAA,QACjC,CAAC;AAED,YAAI,CAAC,IAAI,IAAI;AACX,cAAI,KAAK,YAAY,IAAI,MAAM,KAAK,UAAU,oBAAoB,YAAY;AAC5E,kBAAM,UAAU,iBAAiB,SAAS,oBAAoB,aAAa,oBAAoB,UAAU;AACzG,mBAAO,WAAW,iBAAiB,IAAI,MAAM,iBAAiB,OAAO,SAAS,QAAQ;AACtF,kBAAM,MAAM,OAAO;AACnB;AAAA,UACF;AACA,iBAAO,SAAS,oCAAoC,IAAI,MAAM,IAAI,QAAQ;AAC1E,iBAAO,CAAC;AAAA,QACV;AAEA,cAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,gBAAQ,KAAK,WAAW,CAAC,GAAG,IAAI,CAAC,OAAO;AAAA,UACtC,QAAQ,EAAE,SAAS,IAAI,QAAQ,+BAA+B,EAAE,EAAE,QAAQ,qBAAqB,EAAE;AAAA,UACjG,KAAK,EAAE,QAAQ;AAAA,UACf,SAAS,EAAE,WAAW;AAAA,UACtB,MAAM,EAAE;AAAA,QACV,EAAE;AAAA,MAEJ,SAAS,OAAO;AACd,cAAM,MAAM,cAAc,KAAK;AAC/B,YAAI,KAAK,YAAY,QAAW,KAAK,KAAK,UAAU,oBAAoB,YAAY;AAClF,gBAAM,UAAU,iBAAiB,SAAS,oBAAoB,aAAa,oBAAoB,UAAU;AACzG,iBAAO,WAAW,iBAAiB,IAAI,IAAI,iBAAiB,OAAO,SAAS,QAAQ;AACpF,gBAAM,MAAM,OAAO;AACnB;AAAA,QACF;AACA,eAAO,SAAS,yBAAyB,IAAI,OAAO,IAAI,QAAQ;AAChE,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAEA,WAAO,CAAC;AAAA,EACV;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,qBAAqB,SAAmB,WAAgE;AAC5G,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO,oBAAI,IAAI;AAAA,IACjB;AAEA,UAAM,UAAU,MAAM;AAAA,MACpB;AAAA,MACA,OAAK,KAAK,aAAa,GAAG,SAAS;AAAA,MACnC;AAAA,IACF;AAEA,WAAO,IAAI,IAAI,QAAQ,IAAI,CAAC,GAAG,MAAM,CAAC,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAAA,EAC7D;AACF;",
6
+ "names": []
7
+ }
@@ -0,0 +1,93 @@
1
+ /**
2
+ * Consolidated configuration
3
+ * All environment variables, constants, and LLM config in one place
4
+ */
5
+ interface EnvConfig {
6
+ SCRAPER_API_KEY: string;
7
+ SEARCH_API_KEY: string | undefined;
8
+ REDDIT_CLIENT_ID: string | undefined;
9
+ REDDIT_CLIENT_SECRET: string | undefined;
10
+ CEREBRAS_API_KEY: string | undefined;
11
+ GITHUB_TOKEN: string | undefined;
12
+ }
13
+ export declare function resetEnvCache(): void;
14
+ export declare function parseEnv(): EnvConfig;
15
+ interface ResearchConfig {
16
+ readonly BASE_URL: string;
17
+ readonly MODEL: string;
18
+ readonly FALLBACK_MODEL: string;
19
+ readonly API_KEY: string;
20
+ readonly TIMEOUT_MS: number;
21
+ readonly REASONING_EFFORT: 'low' | 'medium' | 'high';
22
+ readonly MAX_URLS: number;
23
+ }
24
+ export declare const RESEARCH: ResearchConfig;
25
+ export declare const SERVER: {
26
+ readonly NAME: string;
27
+ readonly VERSION: string;
28
+ readonly DESCRIPTION: string;
29
+ };
30
+ export interface Capabilities {
31
+ reddit: boolean;
32
+ search: boolean;
33
+ scraping: boolean;
34
+ deepResearch: boolean;
35
+ llmExtraction: boolean;
36
+ cerebras: boolean;
37
+ github: boolean;
38
+ }
39
+ export declare function getCapabilities(): Capabilities;
40
+ export declare function getMissingEnvMessage(capability: keyof Capabilities): string;
41
+ export declare const GITHUB: {
42
+ readonly MAX_CONCURRENT_REPOS: 5;
43
+ readonly MAX_RESULTS: 50;
44
+ readonly DEFAULT_RESULTS: 20;
45
+ readonly RETRY_COUNT: 3;
46
+ readonly TIMEOUT_MS: 15000;
47
+ readonly PARTICIPATION_RETRY_DELAY_MS: 1500;
48
+ readonly PARTICIPATION_MAX_RETRIES: 3;
49
+ readonly GRAPHQL_URL: "https://api.github.com/graphql";
50
+ readonly REST_BASE_URL: "https://api.github.com";
51
+ };
52
+ export declare const SCRAPER: {
53
+ readonly MAX_CONCURRENT: 30;
54
+ readonly BATCH_SIZE: 30;
55
+ readonly MAX_TOKENS_BUDGET: 32000;
56
+ readonly MIN_URLS: 3;
57
+ readonly MAX_URLS: 50;
58
+ readonly RETRY_COUNT: 3;
59
+ readonly RETRY_DELAYS: readonly [2000, 4000, 8000];
60
+ readonly EXTRACTION_PREFIX: "Extract from document only — never hallucinate or add external knowledge.";
61
+ readonly EXTRACTION_SUFFIX: "First line = content, not preamble. No confirmation messages.";
62
+ };
63
+ export declare const RESEARCH_PROMPTS: {
64
+ readonly SUFFIX: "CONSTRAINTS: No restating the question. Cite sources inline [source]. NEVER hallucinate — only report what sources confirm.";
65
+ };
66
+ export declare const REDDIT: {
67
+ readonly MAX_CONCURRENT: 10;
68
+ readonly BATCH_SIZE: 10;
69
+ readonly MAX_WORDS_PER_POST: 20000;
70
+ readonly MAX_WORDS_TOTAL: 100000;
71
+ readonly FETCH_LIMIT_PER_POST: 500;
72
+ readonly MIN_POSTS: 2;
73
+ readonly MAX_POSTS: 50;
74
+ readonly RETRY_COUNT: 5;
75
+ readonly RETRY_DELAYS: readonly [2000, 4000, 8000, 16000, 32000];
76
+ readonly EXTRACTION_SUFFIX: "\n---\n\n⚠️ IMPORTANT: Extract and synthesize the key insights, opinions, and recommendations from these Reddit discussions. Focus on:\n- Common themes and consensus across posts\n- Specific recommendations with context\n- Contrasting viewpoints and debates\n- Real-world experiences and lessons learned\n- Technical details and implementation tips\n\nBe comprehensive but concise. Prioritize actionable insights.\n\n---";
77
+ };
78
+ export declare const CTR_WEIGHTS: Record<number, number>;
79
+ interface LlmExtractionConfig {
80
+ readonly MODEL: string;
81
+ readonly MAX_TOKENS: number;
82
+ readonly ENABLE_REASONING: boolean;
83
+ }
84
+ export declare const LLM_EXTRACTION: LlmExtractionConfig;
85
+ interface CerebrasConfig {
86
+ readonly ENABLED: boolean;
87
+ readonly API_KEY: string;
88
+ readonly BASE_URL: string;
89
+ readonly MODEL: string;
90
+ }
91
+ export declare const CEREBRAS: CerebrasConfig;
92
+ export {};
93
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/config/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAsEH,UAAU,SAAS;IACjB,eAAe,EAAE,MAAM,CAAC;IACxB,cAAc,EAAE,MAAM,GAAG,SAAS,CAAC;IACnC,gBAAgB,EAAE,MAAM,GAAG,SAAS,CAAC;IACrC,oBAAoB,EAAE,MAAM,GAAG,SAAS,CAAC;IACzC,gBAAgB,EAAE,MAAM,GAAG,SAAS,CAAC;IACrC,YAAY,EAAE,MAAM,GAAG,SAAS,CAAC;CAClC;AAKD,wBAAgB,aAAa,IAAI,IAAI,CAMpC;AAED,wBAAgB,QAAQ,IAAI,SAAS,CAWpC;AAMD,UAAU,cAAc;IACtB,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,cAAc,EAAE,MAAM,CAAC;IAChC,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,gBAAgB,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;IACrD,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;CAC3B;AAmBD,eAAO,MAAM,QAAQ,EAAE,cAIrB,CAAC;AAQH,eAAO,MAAM,MAAM;;;;CAIT,CAAC;AAMX,MAAM,WAAW,YAAY;IAC3B,MAAM,EAAE,OAAO,CAAC;IAChB,MAAM,EAAE,OAAO,CAAC;IAChB,QAAQ,EAAE,OAAO,CAAC;IAClB,YAAY,EAAE,OAAO,CAAC;IACtB,aAAa,EAAE,OAAO,CAAC;IACvB,QAAQ,EAAE,OAAO,CAAC;IAClB,MAAM,EAAE,OAAO,CAAC;CACjB;AAcD,wBAAgB,eAAe,IAAI,YAAY,CAW9C;AAED,wBAAgB,oBAAoB,CAAC,UAAU,EAAE,MAAM,YAAY,GAAG,MAAM,CAW3E;AAMD,eAAO,MAAM,MAAM;;;;;;;;;;CAUT,CAAC;AAMX,eAAO,MAAM,OAAO;;;;;;;;;;CAUV,CAAC;AAMX,eAAO,MAAM,gBAAgB;;CAEnB,CAAC;AAMX,eAAO,MAAM,MAAM;;;;;;;;;;;CAuBT,CAAC;AAMX,eAAO,MAAM,WAAW,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAWrC,CAAC;AAMX,UAAU,mBAAmB;IAC3B,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,UAAU,EAAE,MAAM,CAAC;IAC5B,QAAQ,CAAC,gBAAgB,EAAE,OAAO,CAAC;CACpC;AAcD,eAAO,MAAM,cAAc,EAAE,mBAI3B,CAAC;AAMH,UAAU,cAAc;IACtB,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC;IACzB,QAAQ,CAAC,QAAQ,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,KAAK,EAAE,MAAM,CAAC;CACxB;AAgBD,eAAO,MAAM,QAAQ,EAAE,cAIrB,CAAC"}
@@ -0,0 +1,218 @@
1
+ import { Logger } from "mcp-use";
2
+ import { VERSION, PACKAGE_NAME, PACKAGE_DESCRIPTION } from "../version.js";
3
+ function safeParseInt(value, defaultVal, min, max) {
4
+ const logger = Logger.get("config");
5
+ if (!value) {
6
+ return defaultVal;
7
+ }
8
+ const parsed = parseInt(value, 10);
9
+ if (isNaN(parsed)) {
10
+ logger.warn(`Invalid number "${value}", using default ${defaultVal}`);
11
+ return defaultVal;
12
+ }
13
+ if (parsed < min) {
14
+ logger.warn(`Value ${parsed} below minimum ${min}, clamping to ${min}`);
15
+ return min;
16
+ }
17
+ if (parsed > max) {
18
+ logger.warn(`Value ${parsed} above maximum ${max}, clamping to ${max}`);
19
+ return max;
20
+ }
21
+ return parsed;
22
+ }
23
+ const VALID_REASONING_EFFORTS = ["low", "medium", "high"];
24
+ function parseReasoningEffort(value) {
25
+ if (value && VALID_REASONING_EFFORTS.includes(value)) {
26
+ return value;
27
+ }
28
+ return "high";
29
+ }
30
+ let cachedEnv = null;
31
+ let cachedHasGhCli = null;
32
+ function resetEnvCache() {
33
+ cachedEnv = null;
34
+ cachedResearch = null;
35
+ cachedLlmExtraction = null;
36
+ cachedCerebras = null;
37
+ cachedHasGhCli = null;
38
+ }
39
+ function parseEnv() {
40
+ if (cachedEnv) return cachedEnv;
41
+ cachedEnv = {
42
+ SCRAPER_API_KEY: process.env.SCRAPEDO_API_KEY || "",
43
+ SEARCH_API_KEY: process.env.SERPER_API_KEY || void 0,
44
+ REDDIT_CLIENT_ID: process.env.REDDIT_CLIENT_ID || void 0,
45
+ REDDIT_CLIENT_SECRET: process.env.REDDIT_CLIENT_SECRET || void 0,
46
+ CEREBRAS_API_KEY: process.env.CEREBRAS_API_KEY || void 0,
47
+ GITHUB_TOKEN: process.env.GITHUB_TOKEN || void 0
48
+ };
49
+ return cachedEnv;
50
+ }
51
+ let cachedResearch = null;
52
+ function getResearch() {
53
+ if (cachedResearch) return cachedResearch;
54
+ cachedResearch = {
55
+ BASE_URL: process.env.OPENROUTER_BASE_URL || "https://openrouter.ai/api/v1",
56
+ MODEL: process.env.RESEARCH_MODEL || "x-ai/grok-4-fast",
57
+ FALLBACK_MODEL: process.env.RESEARCH_FALLBACK_MODEL || "google/gemini-2.5-flash",
58
+ API_KEY: process.env.OPENROUTER_API_KEY || "",
59
+ TIMEOUT_MS: safeParseInt(process.env.API_TIMEOUT_MS, 18e5, 1e3, 36e5),
60
+ REASONING_EFFORT: parseReasoningEffort(process.env.DEFAULT_REASONING_EFFORT),
61
+ MAX_URLS: safeParseInt(process.env.DEFAULT_MAX_URLS, 100, 10, 200)
62
+ };
63
+ return cachedResearch;
64
+ }
65
+ const RESEARCH = new Proxy({}, {
66
+ get(_target, prop) {
67
+ return getResearch()[prop];
68
+ }
69
+ });
70
+ const SERVER = {
71
+ NAME: PACKAGE_NAME,
72
+ VERSION,
73
+ DESCRIPTION: PACKAGE_DESCRIPTION
74
+ };
75
+ function hasGhCli() {
76
+ if (cachedHasGhCli !== null) return cachedHasGhCli;
77
+ try {
78
+ const { execSync } = require("node:child_process");
79
+ const token = execSync("gh auth token", { timeout: 3e3, stdio: ["ignore", "pipe", "ignore"], encoding: "utf-8" }).trim();
80
+ cachedHasGhCli = token.length > 0;
81
+ } catch {
82
+ cachedHasGhCli = false;
83
+ }
84
+ return cachedHasGhCli;
85
+ }
86
+ function getCapabilities() {
87
+ const env = parseEnv();
88
+ return {
89
+ reddit: !!(env.REDDIT_CLIENT_ID && env.REDDIT_CLIENT_SECRET),
90
+ search: !!env.SEARCH_API_KEY,
91
+ scraping: !!env.SCRAPER_API_KEY,
92
+ deepResearch: !!RESEARCH.API_KEY,
93
+ llmExtraction: !!RESEARCH.API_KEY || CEREBRAS.ENABLED,
94
+ cerebras: CEREBRAS.ENABLED,
95
+ github: !!env.GITHUB_TOKEN || hasGhCli()
96
+ };
97
+ }
98
+ function getMissingEnvMessage(capability) {
99
+ const messages = {
100
+ reddit: '\u274C **Reddit tools unavailable.** Set `REDDIT_CLIENT_ID` and `REDDIT_CLIENT_SECRET` to enable `get-reddit-post`.\n\n\u{1F449} Create a Reddit app at: https://www.reddit.com/prefs/apps (select "script" type)',
101
+ search: "\u274C **Search unavailable.** Set `SERPER_API_KEY` to enable `web-search` and `search-reddit`.\n\n\u{1F449} Get your free API key at: https://serper.dev (2,500 free queries)",
102
+ scraping: "\u274C **Web scraping unavailable.** Set `SCRAPEDO_API_KEY` to enable `scrape-links`.\n\n\u{1F449} Sign up at: https://scrape.do (1,000 free credits)",
103
+ deepResearch: "\u274C **Deep research unavailable.** Set `OPENROUTER_API_KEY` to enable `deep-research`.\n\n\u{1F449} Get your API key at: https://openrouter.ai/keys",
104
+ llmExtraction: "\u26A0\uFE0F **AI extraction disabled.** The `use_llm` and `what_to_extract` features for `scrape-links` require `OPENROUTER_API_KEY`.\n\nScraping will work but without intelligent content filtering.",
105
+ cerebras: "\u26A0\uFE0F **Cerebras not configured.** Set `USE_CEREBRAS=true` and `CEREBRAS_API_KEY` to use Cerebras for LLM extraction.\n\n\u{1F449} Get your API key at: https://cloud.cerebras.ai",
106
+ github: "\u274C **GitHub Score unavailable.** Set `GITHUB_TOKEN` or run `gh auth login` to enable `github-score`.\n\n\u{1F449} Option 1: `gh auth login` (uses your existing GitHub CLI session)\n\u{1F449} Option 2: Create a personal access token at: https://github.com/settings/tokens (no special scopes needed for public repos)"
107
+ };
108
+ return messages[capability];
109
+ }
110
+ const GITHUB = {
111
+ MAX_CONCURRENT_REPOS: 5,
112
+ MAX_RESULTS: 50,
113
+ DEFAULT_RESULTS: 20,
114
+ RETRY_COUNT: 3,
115
+ TIMEOUT_MS: 15e3,
116
+ PARTICIPATION_RETRY_DELAY_MS: 1500,
117
+ PARTICIPATION_MAX_RETRIES: 3,
118
+ GRAPHQL_URL: "https://api.github.com/graphql",
119
+ REST_BASE_URL: "https://api.github.com"
120
+ };
121
+ const SCRAPER = {
122
+ MAX_CONCURRENT: 30,
123
+ BATCH_SIZE: 30,
124
+ MAX_TOKENS_BUDGET: 32e3,
125
+ MIN_URLS: 3,
126
+ MAX_URLS: 50,
127
+ RETRY_COUNT: 3,
128
+ RETRY_DELAYS: [2e3, 4e3, 8e3],
129
+ EXTRACTION_PREFIX: "Extract from document only \u2014 never hallucinate or add external knowledge.",
130
+ EXTRACTION_SUFFIX: "First line = content, not preamble. No confirmation messages."
131
+ };
132
+ const RESEARCH_PROMPTS = {
133
+ SUFFIX: `CONSTRAINTS: No restating the question. Cite sources inline [source]. NEVER hallucinate \u2014 only report what sources confirm.`
134
+ };
135
+ const REDDIT = {
136
+ MAX_CONCURRENT: 10,
137
+ BATCH_SIZE: 10,
138
+ MAX_WORDS_PER_POST: 2e4,
139
+ MAX_WORDS_TOTAL: 1e5,
140
+ FETCH_LIMIT_PER_POST: 500,
141
+ MIN_POSTS: 2,
142
+ MAX_POSTS: 50,
143
+ RETRY_COUNT: 5,
144
+ RETRY_DELAYS: [2e3, 4e3, 8e3, 16e3, 32e3],
145
+ EXTRACTION_SUFFIX: `
146
+ ---
147
+
148
+ \u26A0\uFE0F IMPORTANT: Extract and synthesize the key insights, opinions, and recommendations from these Reddit discussions. Focus on:
149
+ - Common themes and consensus across posts
150
+ - Specific recommendations with context
151
+ - Contrasting viewpoints and debates
152
+ - Real-world experiences and lessons learned
153
+ - Technical details and implementation tips
154
+
155
+ Be comprehensive but concise. Prioritize actionable insights.
156
+
157
+ ---`
158
+ };
159
+ const CTR_WEIGHTS = {
160
+ 1: 100,
161
+ 2: 60,
162
+ 3: 48.89,
163
+ 4: 33.33,
164
+ 5: 28.89,
165
+ 6: 26.44,
166
+ 7: 24.44,
167
+ 8: 17.78,
168
+ 9: 13.33,
169
+ 10: 12.56
170
+ };
171
+ let cachedLlmExtraction = null;
172
+ function getLlmExtraction() {
173
+ if (cachedLlmExtraction) return cachedLlmExtraction;
174
+ cachedLlmExtraction = {
175
+ MODEL: process.env.LLM_EXTRACTION_MODEL || "openai/gpt-oss-120b:nitro",
176
+ MAX_TOKENS: 8e3,
177
+ ENABLE_REASONING: process.env.LLM_ENABLE_REASONING !== "false"
178
+ };
179
+ return cachedLlmExtraction;
180
+ }
181
+ const LLM_EXTRACTION = new Proxy({}, {
182
+ get(_target, prop) {
183
+ return getLlmExtraction()[prop];
184
+ }
185
+ });
186
+ let cachedCerebras = null;
187
+ function getCerebras() {
188
+ if (cachedCerebras) return cachedCerebras;
189
+ const env = parseEnv();
190
+ cachedCerebras = {
191
+ ENABLED: process.env.USE_CEREBRAS === "true" && !!env.CEREBRAS_API_KEY,
192
+ API_KEY: env.CEREBRAS_API_KEY || "",
193
+ BASE_URL: "https://api.cerebras.ai/v1",
194
+ MODEL: "zai-glm-4.7"
195
+ };
196
+ return cachedCerebras;
197
+ }
198
+ const CEREBRAS = new Proxy({}, {
199
+ get(_target, prop) {
200
+ return getCerebras()[prop];
201
+ }
202
+ });
203
+ export {
204
+ CEREBRAS,
205
+ CTR_WEIGHTS,
206
+ GITHUB,
207
+ LLM_EXTRACTION,
208
+ REDDIT,
209
+ RESEARCH,
210
+ RESEARCH_PROMPTS,
211
+ SCRAPER,
212
+ SERVER,
213
+ getCapabilities,
214
+ getMissingEnvMessage,
215
+ parseEnv,
216
+ resetEnvCache
217
+ };
218
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../src/config/index.ts"],
4
+ "sourcesContent": ["/**\n * Consolidated configuration\n * All environment variables, constants, and LLM config in one place\n */\n\nimport { Logger } from 'mcp-use';\n\nimport { VERSION, PACKAGE_NAME, PACKAGE_DESCRIPTION } from '../version.js';\n\n// Import version utilities (not re-exported - use directly from version.ts if needed externally)\n\n// ============================================================================\n// Safe Integer Parsing Helper\n// ============================================================================\n\n/**\n * Safely parse an integer from environment variable with bounds checking\n * @param value - The string value to parse (from process.env)\n * @param defaultVal - Default value if parsing fails or value is undefined\n * @param min - Minimum allowed value (clamped if below)\n * @param max - Maximum allowed value (clamped if above)\n * @returns Parsed integer within bounds, or default value\n */\nfunction safeParseInt(\n value: string | undefined,\n defaultVal: number,\n min: number,\n max: number\n): number {\n const logger = Logger.get('config');\n\n if (!value) {\n return defaultVal;\n }\n \n const parsed = parseInt(value, 10);\n \n if (isNaN(parsed)) {\n logger.warn(`Invalid number \"${value}\", using default ${defaultVal}`);\n return defaultVal;\n }\n \n if (parsed < min) {\n logger.warn(`Value ${parsed} below minimum ${min}, clamping to ${min}`);\n return min;\n }\n \n if (parsed > max) {\n logger.warn(`Value ${parsed} above maximum ${max}, clamping to ${max}`);\n return max;\n }\n \n return parsed;\n}\n\n// ============================================================================\n// Reasoning Effort Validation\n// ============================================================================\n\nconst VALID_REASONING_EFFORTS = ['low', 'medium', 'high'] as const;\ntype ReasoningEffort = typeof VALID_REASONING_EFFORTS[number];\n\nfunction parseReasoningEffort(value: string | undefined): ReasoningEffort {\n if (value && VALID_REASONING_EFFORTS.includes(value as ReasoningEffort)) {\n return value as ReasoningEffort;\n }\n return 'high';\n}\n\n// ============================================================================\n// Environment Parsing\n// ============================================================================\n\ninterface EnvConfig {\n SCRAPER_API_KEY: string;\n SEARCH_API_KEY: string | undefined;\n REDDIT_CLIENT_ID: string | undefined;\n REDDIT_CLIENT_SECRET: string | undefined;\n CEREBRAS_API_KEY: string | undefined;\n GITHUB_TOKEN: string | undefined;\n}\n\nlet cachedEnv: EnvConfig | null = null;\nlet cachedHasGhCli: boolean | null = null;\n\nexport function resetEnvCache(): void {\n cachedEnv = null;\n cachedResearch = null;\n cachedLlmExtraction = null;\n cachedCerebras = null;\n cachedHasGhCli = null;\n}\n\nexport function parseEnv(): EnvConfig {\n if (cachedEnv) return cachedEnv;\n cachedEnv = {\n SCRAPER_API_KEY: process.env.SCRAPEDO_API_KEY || '',\n SEARCH_API_KEY: process.env.SERPER_API_KEY || undefined,\n REDDIT_CLIENT_ID: process.env.REDDIT_CLIENT_ID || undefined,\n REDDIT_CLIENT_SECRET: process.env.REDDIT_CLIENT_SECRET || undefined,\n CEREBRAS_API_KEY: process.env.CEREBRAS_API_KEY || undefined,\n GITHUB_TOKEN: process.env.GITHUB_TOKEN || undefined,\n };\n return cachedEnv;\n}\n\n// ============================================================================\n// Research API Configuration\n// ============================================================================\n\ninterface ResearchConfig {\n readonly BASE_URL: string;\n readonly MODEL: string;\n readonly FALLBACK_MODEL: string;\n readonly API_KEY: string;\n readonly TIMEOUT_MS: number;\n readonly REASONING_EFFORT: 'low' | 'medium' | 'high';\n readonly MAX_URLS: number;\n}\n\nlet cachedResearch: ResearchConfig | null = null;\n\nfunction getResearch(): ResearchConfig {\n if (cachedResearch) return cachedResearch;\n cachedResearch = {\n BASE_URL: process.env.OPENROUTER_BASE_URL || 'https://openrouter.ai/api/v1',\n MODEL: process.env.RESEARCH_MODEL || 'x-ai/grok-4-fast',\n FALLBACK_MODEL: process.env.RESEARCH_FALLBACK_MODEL || 'google/gemini-2.5-flash',\n API_KEY: process.env.OPENROUTER_API_KEY || '',\n TIMEOUT_MS: safeParseInt(process.env.API_TIMEOUT_MS, 1800000, 1000, 3600000),\n REASONING_EFFORT: parseReasoningEffort(process.env.DEFAULT_REASONING_EFFORT),\n MAX_URLS: safeParseInt(process.env.DEFAULT_MAX_URLS, 100, 10, 200),\n };\n return cachedResearch;\n}\n\n// Lazy proxy so existing code using RESEARCH.X still works\nexport const RESEARCH: ResearchConfig = new Proxy({} as ResearchConfig, {\n get(_target, prop: string) {\n return getResearch()[prop as keyof ResearchConfig];\n },\n});\n\n// ============================================================================\n// MCP Server Configuration\n// ============================================================================\n\n// Version is now automatically read from package.json via version.ts\n// No need to manually update version strings anymore!\nexport const SERVER = {\n NAME: PACKAGE_NAME,\n VERSION: VERSION,\n DESCRIPTION: PACKAGE_DESCRIPTION,\n} as const;\n\n// ============================================================================\n// Capability Detection (which features are available based on ENV)\n// ============================================================================\n\nexport interface Capabilities {\n reddit: boolean; // REDDIT_CLIENT_ID + REDDIT_CLIENT_SECRET\n search: boolean; // SERPER_API_KEY\n scraping: boolean; // SCRAPEDO_API_KEY\n deepResearch: boolean; // OPENROUTER_API_KEY\n llmExtraction: boolean; // OPENROUTER_API_KEY (for what_to_extract in scraping)\n cerebras: boolean; // USE_CEREBRAS=true + CEREBRAS_API_KEY\n github: boolean; // GITHUB_TOKEN\n}\n\nfunction hasGhCli(): boolean {\n if (cachedHasGhCli !== null) return cachedHasGhCli;\n try {\n const { execSync } = require('node:child_process') as typeof import('node:child_process');\n const token = execSync('gh auth token', { timeout: 3000, stdio: ['ignore', 'pipe', 'ignore'], encoding: 'utf-8' }).trim();\n cachedHasGhCli = token.length > 0;\n } catch {\n cachedHasGhCli = false;\n }\n return cachedHasGhCli;\n}\n\nexport function getCapabilities(): Capabilities {\n const env = parseEnv();\n return {\n reddit: !!(env.REDDIT_CLIENT_ID && env.REDDIT_CLIENT_SECRET),\n search: !!env.SEARCH_API_KEY,\n scraping: !!env.SCRAPER_API_KEY,\n deepResearch: !!RESEARCH.API_KEY,\n llmExtraction: !!RESEARCH.API_KEY || CEREBRAS.ENABLED,\n cerebras: CEREBRAS.ENABLED,\n github: !!env.GITHUB_TOKEN || hasGhCli(),\n };\n}\n\nexport function getMissingEnvMessage(capability: keyof Capabilities): string {\n const messages: Record<keyof Capabilities, string> = {\n reddit: '\u274C **Reddit tools unavailable.** Set `REDDIT_CLIENT_ID` and `REDDIT_CLIENT_SECRET` to enable `get-reddit-post`.\\n\\n\uD83D\uDC49 Create a Reddit app at: https://www.reddit.com/prefs/apps (select \"script\" type)',\n search: '\u274C **Search unavailable.** Set `SERPER_API_KEY` to enable `web-search` and `search-reddit`.\\n\\n\uD83D\uDC49 Get your free API key at: https://serper.dev (2,500 free queries)',\n scraping: '\u274C **Web scraping unavailable.** Set `SCRAPEDO_API_KEY` to enable `scrape-links`.\\n\\n\uD83D\uDC49 Sign up at: https://scrape.do (1,000 free credits)',\n deepResearch: '\u274C **Deep research unavailable.** Set `OPENROUTER_API_KEY` to enable `deep-research`.\\n\\n\uD83D\uDC49 Get your API key at: https://openrouter.ai/keys',\n llmExtraction: '\u26A0\uFE0F **AI extraction disabled.** The `use_llm` and `what_to_extract` features for `scrape-links` require `OPENROUTER_API_KEY`.\\n\\nScraping will work but without intelligent content filtering.',\n cerebras: '\u26A0\uFE0F **Cerebras not configured.** Set `USE_CEREBRAS=true` and `CEREBRAS_API_KEY` to use Cerebras for LLM extraction.\\n\\n\uD83D\uDC49 Get your API key at: https://cloud.cerebras.ai',\n github: '\u274C **GitHub Score unavailable.** Set `GITHUB_TOKEN` or run `gh auth login` to enable `github-score`.\\n\\n\uD83D\uDC49 Option 1: `gh auth login` (uses your existing GitHub CLI session)\\n\uD83D\uDC49 Option 2: Create a personal access token at: https://github.com/settings/tokens (no special scopes needed for public repos)',\n };\n return messages[capability];\n}\n\n// ============================================================================\n// GitHub Configuration\n// ============================================================================\n\nexport const GITHUB = {\n MAX_CONCURRENT_REPOS: 5,\n MAX_RESULTS: 50,\n DEFAULT_RESULTS: 20,\n RETRY_COUNT: 3,\n TIMEOUT_MS: 15_000,\n PARTICIPATION_RETRY_DELAY_MS: 1_500,\n PARTICIPATION_MAX_RETRIES: 3,\n GRAPHQL_URL: 'https://api.github.com/graphql',\n REST_BASE_URL: 'https://api.github.com',\n} as const;\n\n// ============================================================================\n// Scraper Configuration (Scrape.do implementation)\n// ============================================================================\n\nexport const SCRAPER = {\n MAX_CONCURRENT: 30,\n BATCH_SIZE: 30,\n MAX_TOKENS_BUDGET: 32000,\n MIN_URLS: 3,\n MAX_URLS: 50,\n RETRY_COUNT: 3,\n RETRY_DELAYS: [2000, 4000, 8000] as const,\n EXTRACTION_PREFIX: 'Extract from document only \u2014 never hallucinate or add external knowledge.',\n EXTRACTION_SUFFIX: 'First line = content, not preamble. No confirmation messages.',\n} as const;\n\n// ============================================================================\n// Research Compression Prefix/Suffix\n// ============================================================================\n\nexport const RESEARCH_PROMPTS = {\n SUFFIX: `CONSTRAINTS: No restating the question. Cite sources inline [source]. NEVER hallucinate \u2014 only report what sources confirm.`,\n} as const;\n\n// ============================================================================\n// Reddit Configuration\n// ============================================================================\n\nexport const REDDIT = {\n MAX_CONCURRENT: 10,\n BATCH_SIZE: 10,\n MAX_WORDS_PER_POST: 20_000,\n MAX_WORDS_TOTAL: 100_000,\n FETCH_LIMIT_PER_POST: 500,\n MIN_POSTS: 2,\n MAX_POSTS: 50,\n RETRY_COUNT: 5,\n RETRY_DELAYS: [2000, 4000, 8000, 16000, 32000] as const,\n EXTRACTION_SUFFIX: `\n---\n\n\u26A0\uFE0F IMPORTANT: Extract and synthesize the key insights, opinions, and recommendations from these Reddit discussions. Focus on:\n- Common themes and consensus across posts\n- Specific recommendations with context\n- Contrasting viewpoints and debates\n- Real-world experiences and lessons learned\n- Technical details and implementation tips\n\nBe comprehensive but concise. Prioritize actionable insights.\n\n---`,\n} as const;\n\n// ============================================================================\n// CTR Weights for URL Ranking (inspired from CTR research)\n// ============================================================================\n\nexport const CTR_WEIGHTS: Record<number, number> = {\n 1: 100.00,\n 2: 60.00,\n 3: 48.89,\n 4: 33.33,\n 5: 28.89,\n 6: 26.44,\n 7: 24.44,\n 8: 17.78,\n 9: 13.33,\n 10: 12.56,\n} as const;\n\n// ============================================================================\n// LLM Extraction Model (uses OPENROUTER for scrape-links AI extraction)\n// ============================================================================\n\ninterface LlmExtractionConfig {\n readonly MODEL: string;\n readonly MAX_TOKENS: number;\n readonly ENABLE_REASONING: boolean;\n}\n\nlet cachedLlmExtraction: LlmExtractionConfig | null = null;\n\nfunction getLlmExtraction(): LlmExtractionConfig {\n if (cachedLlmExtraction) return cachedLlmExtraction;\n cachedLlmExtraction = {\n MODEL: process.env.LLM_EXTRACTION_MODEL || 'openai/gpt-oss-120b:nitro',\n MAX_TOKENS: 8000,\n ENABLE_REASONING: process.env.LLM_ENABLE_REASONING !== 'false',\n };\n return cachedLlmExtraction;\n}\n\nexport const LLM_EXTRACTION: LlmExtractionConfig = new Proxy({} as LlmExtractionConfig, {\n get(_target, prop: string) {\n return getLlmExtraction()[prop as keyof LlmExtractionConfig];\n },\n});\n\n// ============================================================================\n// Cerebras Configuration (optional \u2014 overrides LLM extraction when enabled)\n// ============================================================================\n\ninterface CerebrasConfig {\n readonly ENABLED: boolean;\n readonly API_KEY: string;\n readonly BASE_URL: string;\n readonly MODEL: string;\n}\n\nlet cachedCerebras: CerebrasConfig | null = null;\n\nfunction getCerebras(): CerebrasConfig {\n if (cachedCerebras) return cachedCerebras;\n const env = parseEnv();\n cachedCerebras = {\n ENABLED: process.env.USE_CEREBRAS === 'true' && !!env.CEREBRAS_API_KEY,\n API_KEY: env.CEREBRAS_API_KEY || '',\n BASE_URL: 'https://api.cerebras.ai/v1',\n MODEL: 'zai-glm-4.7',\n };\n return cachedCerebras;\n}\n\nexport const CEREBRAS: CerebrasConfig = new Proxy({} as CerebrasConfig, {\n get(_target, prop: string) {\n return getCerebras()[prop as keyof CerebrasConfig];\n },\n});\n"],
5
+ "mappings": "AAKA,SAAS,cAAc;AAEvB,SAAS,SAAS,cAAc,2BAA2B;AAgB3D,SAAS,aACP,OACA,YACA,KACA,KACQ;AACR,QAAM,SAAS,OAAO,IAAI,QAAQ;AAElC,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,SAAS,OAAO,EAAE;AAEjC,MAAI,MAAM,MAAM,GAAG;AACjB,WAAO,KAAK,mBAAmB,KAAK,oBAAoB,UAAU,EAAE;AACpE,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,KAAK;AAChB,WAAO,KAAK,SAAS,MAAM,kBAAkB,GAAG,iBAAiB,GAAG,EAAE;AACtE,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,KAAK;AAChB,WAAO,KAAK,SAAS,MAAM,kBAAkB,GAAG,iBAAiB,GAAG,EAAE;AACtE,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAMA,MAAM,0BAA0B,CAAC,OAAO,UAAU,MAAM;AAGxD,SAAS,qBAAqB,OAA4C;AACxE,MAAI,SAAS,wBAAwB,SAAS,KAAwB,GAAG;AACvE,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAeA,IAAI,YAA8B;AAClC,IAAI,iBAAiC;AAE9B,SAAS,gBAAsB;AACpC,cAAY;AACZ,mBAAiB;AACjB,wBAAsB;AACtB,mBAAiB;AACjB,mBAAiB;AACnB;AAEO,SAAS,WAAsB;AACpC,MAAI,UAAW,QAAO;AACtB,cAAY;AAAA,IACV,iBAAiB,QAAQ,IAAI,oBAAoB;AAAA,IACjD,gBAAgB,QAAQ,IAAI,kBAAkB;AAAA,IAC9C,kBAAkB,QAAQ,IAAI,oBAAoB;AAAA,IAClD,sBAAsB,QAAQ,IAAI,wBAAwB;AAAA,IAC1D,kBAAkB,QAAQ,IAAI,oBAAoB;AAAA,IAClD,cAAc,QAAQ,IAAI,gBAAgB;AAAA,EAC5C;AACA,SAAO;AACT;AAgBA,IAAI,iBAAwC;AAE5C,SAAS,cAA8B;AACrC,MAAI,eAAgB,QAAO;AAC3B,mBAAiB;AAAA,IACf,UAAU,QAAQ,IAAI,uBAAuB;AAAA,IAC7C,OAAO,QAAQ,IAAI,kBAAkB;AAAA,IACrC,gBAAgB,QAAQ,IAAI,2BAA2B;AAAA,IACvD,SAAS,QAAQ,IAAI,sBAAsB;AAAA,IAC3C,YAAY,aAAa,QAAQ,IAAI,gBAAgB,MAAS,KAAM,IAAO;AAAA,IAC3E,kBAAkB,qBAAqB,QAAQ,IAAI,wBAAwB;AAAA,IAC3E,UAAU,aAAa,QAAQ,IAAI,kBAAkB,KAAK,IAAI,GAAG;AAAA,EACnE;AACA,SAAO;AACT;AAGO,MAAM,WAA2B,IAAI,MAAM,CAAC,GAAqB;AAAA,EACtE,IAAI,SAAS,MAAc;AACzB,WAAO,YAAY,EAAE,IAA4B;AAAA,EACnD;AACF,CAAC;AAQM,MAAM,SAAS;AAAA,EACpB,MAAM;AAAA,EACN;AAAA,EACA,aAAa;AACf;AAgBA,SAAS,WAAoB;AAC3B,MAAI,mBAAmB,KAAM,QAAO;AACpC,MAAI;AACF,UAAM,EAAE,SAAS,IAAI,QAAQ,oBAAoB;AACjD,UAAM,QAAQ,SAAS,iBAAiB,EAAE,SAAS,KAAM,OAAO,CAAC,UAAU,QAAQ,QAAQ,GAAG,UAAU,QAAQ,CAAC,EAAE,KAAK;AACxH,qBAAiB,MAAM,SAAS;AAAA,EAClC,QAAQ;AACN,qBAAiB;AAAA,EACnB;AACA,SAAO;AACT;AAEO,SAAS,kBAAgC;AAC9C,QAAM,MAAM,SAAS;AACrB,SAAO;AAAA,IACL,QAAQ,CAAC,EAAE,IAAI,oBAAoB,IAAI;AAAA,IACvC,QAAQ,CAAC,CAAC,IAAI;AAAA,IACd,UAAU,CAAC,CAAC,IAAI;AAAA,IAChB,cAAc,CAAC,CAAC,SAAS;AAAA,IACzB,eAAe,CAAC,CAAC,SAAS,WAAW,SAAS;AAAA,IAC9C,UAAU,SAAS;AAAA,IACnB,QAAQ,CAAC,CAAC,IAAI,gBAAgB,SAAS;AAAA,EACzC;AACF;AAEO,SAAS,qBAAqB,YAAwC;AAC3E,QAAM,WAA+C;AAAA,IACnD,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,cAAc;AAAA,IACd,eAAe;AAAA,IACf,UAAU;AAAA,IACV,QAAQ;AAAA,EACV;AACA,SAAO,SAAS,UAAU;AAC5B;AAMO,MAAM,SAAS;AAAA,EACpB,sBAAsB;AAAA,EACtB,aAAa;AAAA,EACb,iBAAiB;AAAA,EACjB,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,8BAA8B;AAAA,EAC9B,2BAA2B;AAAA,EAC3B,aAAa;AAAA,EACb,eAAe;AACjB;AAMO,MAAM,UAAU;AAAA,EACrB,gBAAgB;AAAA,EAChB,YAAY;AAAA,EACZ,mBAAmB;AAAA,EACnB,UAAU;AAAA,EACV,UAAU;AAAA,EACV,aAAa;AAAA,EACb,cAAc,CAAC,KAAM,KAAM,GAAI;AAAA,EAC/B,mBAAmB;AAAA,EACnB,mBAAmB;AACrB;AAMO,MAAM,mBAAmB;AAAA,EAC9B,QAAQ;AACV;AAMO,MAAM,SAAS;AAAA,EACpB,gBAAgB;AAAA,EAChB,YAAY;AAAA,EACZ,oBAAoB;AAAA,EACpB,iBAAiB;AAAA,EACjB,sBAAsB;AAAA,EACtB,WAAW;AAAA,EACX,WAAW;AAAA,EACX,aAAa;AAAA,EACb,cAAc,CAAC,KAAM,KAAM,KAAM,MAAO,IAAK;AAAA,EAC7C,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAarB;AAMO,MAAM,cAAsC;AAAA,EACjD,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,IAAI;AACN;AAYA,IAAI,sBAAkD;AAEtD,SAAS,mBAAwC;AAC/C,MAAI,oBAAqB,QAAO;AAChC,wBAAsB;AAAA,IACpB,OAAO,QAAQ,IAAI,wBAAwB;AAAA,IAC3C,YAAY;AAAA,IACZ,kBAAkB,QAAQ,IAAI,yBAAyB;AAAA,EACzD;AACA,SAAO;AACT;AAEO,MAAM,iBAAsC,IAAI,MAAM,CAAC,GAA0B;AAAA,EACtF,IAAI,SAAS,MAAc;AACzB,WAAO,iBAAiB,EAAE,IAAiC;AAAA,EAC7D;AACF,CAAC;AAaD,IAAI,iBAAwC;AAE5C,SAAS,cAA8B;AACrC,MAAI,eAAgB,QAAO;AAC3B,QAAM,MAAM,SAAS;AACrB,mBAAiB;AAAA,IACf,SAAS,QAAQ,IAAI,iBAAiB,UAAU,CAAC,CAAC,IAAI;AAAA,IACtD,SAAS,IAAI,oBAAoB;AAAA,IACjC,UAAU;AAAA,IACV,OAAO;AAAA,EACT;AACA,SAAO;AACT;AAEO,MAAM,WAA2B,IAAI,MAAM,CAAC,GAAqB;AAAA,EACtE,IAAI,SAAS,MAAc;AACzB,WAAO,YAAY,EAAE,IAA4B;AAAA,EACnD;AACF,CAAC;",
6
+ "names": []
7
+ }
@@ -0,0 +1,40 @@
1
+ /**
2
+ * Deep research schema - batch research with dynamic token allocation
3
+ * Enhanced with comprehensive prompting for bugs, programming questions, and general research
4
+ */
5
+ import { z } from 'zod';
6
+ export declare const deepResearchParamsSchema: z.ZodObject<{
7
+ questions: z.ZodArray<z.ZodObject<{
8
+ question: z.ZodString;
9
+ file_attachments: z.ZodOptional<z.ZodArray<z.ZodObject<{
10
+ path: z.ZodString;
11
+ start_line: z.ZodOptional<z.ZodNumber>;
12
+ end_line: z.ZodOptional<z.ZodNumber>;
13
+ description: z.ZodOptional<z.ZodString>;
14
+ }, z.core.$strict>>>;
15
+ }, z.core.$strict>>;
16
+ }, z.core.$strict>;
17
+ export type DeepResearchParams = z.infer<typeof deepResearchParamsSchema>;
18
+ export declare const deepResearchQuestionResultSchema: z.ZodObject<{
19
+ question: z.ZodString;
20
+ content: z.ZodString;
21
+ success: z.ZodBoolean;
22
+ error: z.ZodOptional<z.ZodString>;
23
+ tokensUsed: z.ZodOptional<z.ZodNumber>;
24
+ }, z.core.$strict>;
25
+ export declare const deepResearchOutputSchema: z.ZodObject<{
26
+ totalQuestions: z.ZodNumber;
27
+ successful: z.ZodNumber;
28
+ failed: z.ZodNumber;
29
+ tokensPerQuestion: z.ZodNumber;
30
+ totalTokensUsed: z.ZodNumber;
31
+ results: z.ZodArray<z.ZodObject<{
32
+ question: z.ZodString;
33
+ content: z.ZodString;
34
+ success: z.ZodBoolean;
35
+ error: z.ZodOptional<z.ZodString>;
36
+ tokensUsed: z.ZodOptional<z.ZodNumber>;
37
+ }, z.core.$strict>>;
38
+ }, z.core.$strict>;
39
+ export type DeepResearchOutput = z.infer<typeof deepResearchOutputSchema>;
40
+ //# sourceMappingURL=deep-research.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"deep-research.d.ts","sourceRoot":"","sources":["../../../src/schemas/deep-research.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAmOxB,eAAO,MAAM,wBAAwB;;;;;;;;;;kBAA6C,CAAC;AACnF,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,wBAAwB,CAAC,CAAC;AAE1E,eAAO,MAAM,gCAAgC;;;;;;kBAoBlC,CAAC;AAEZ,eAAO,MAAM,wBAAwB;;;;;;;;;;;;;kBA6B1B,CAAC;AAEZ,MAAM,MAAM,kBAAkB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,wBAAwB,CAAC,CAAC"}