bluera-knowledge 0.9.26 → 0.9.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/.claude/commands/commit.md +4 -7
  2. package/.claude/hooks/post-edit-check.sh +21 -24
  3. package/.claude/skills/atomic-commits/SKILL.md +6 -0
  4. package/.claude-plugin/plugin.json +1 -1
  5. package/.env.example +4 -0
  6. package/.husky/pre-push +12 -2
  7. package/.versionrc.json +0 -4
  8. package/CHANGELOG.md +69 -0
  9. package/README.md +55 -20
  10. package/bun.lock +35 -1
  11. package/commands/crawl.md +2 -0
  12. package/dist/{chunk-BICFAWMN.js → chunk-DNOIM7BO.js} +73 -8
  13. package/dist/chunk-DNOIM7BO.js.map +1 -0
  14. package/dist/{chunk-5QMHZUC4.js → chunk-NJUMU4X2.js} +462 -105
  15. package/dist/chunk-NJUMU4X2.js.map +1 -0
  16. package/dist/{chunk-J7J6LXOJ.js → chunk-SZNTYLYT.js} +106 -41
  17. package/dist/chunk-SZNTYLYT.js.map +1 -0
  18. package/dist/index.js +65 -25
  19. package/dist/index.js.map +1 -1
  20. package/dist/mcp/server.js +2 -2
  21. package/dist/workers/background-worker-cli.js +2 -2
  22. package/eslint.config.js +1 -1
  23. package/package.json +3 -1
  24. package/src/analysis/ast-parser.test.ts +46 -0
  25. package/src/cli/commands/crawl.test.ts +99 -12
  26. package/src/cli/commands/crawl.ts +76 -24
  27. package/src/crawl/article-converter.ts +36 -1
  28. package/src/crawl/bridge.ts +18 -7
  29. package/src/crawl/intelligent-crawler.ts +45 -4
  30. package/src/db/embeddings.test.ts +16 -0
  31. package/src/logging/index.ts +29 -0
  32. package/src/logging/logger.test.ts +75 -0
  33. package/src/logging/logger.ts +147 -0
  34. package/src/logging/payload.test.ts +152 -0
  35. package/src/logging/payload.ts +121 -0
  36. package/src/mcp/handlers/search.handler.test.ts +28 -9
  37. package/src/mcp/handlers/search.handler.ts +69 -29
  38. package/src/mcp/handlers/store.handler.test.ts +1 -0
  39. package/src/mcp/server.ts +44 -16
  40. package/src/services/chunking.service.ts +23 -0
  41. package/src/services/index.service.test.ts +921 -1
  42. package/src/services/index.service.ts +76 -1
  43. package/src/services/index.ts +10 -1
  44. package/src/services/search.service.test.ts +573 -21
  45. package/src/services/search.service.ts +257 -105
  46. package/src/services/snippet.service.ts +28 -3
  47. package/src/services/token.service.test.ts +45 -0
  48. package/src/services/token.service.ts +33 -0
  49. package/src/types/result.test.ts +10 -0
  50. package/tests/integration/cli-consistency.test.ts +1 -4
  51. package/vitest.config.ts +4 -0
  52. package/dist/chunk-5QMHZUC4.js.map +0 -1
  53. package/dist/chunk-BICFAWMN.js.map +0 -1
  54. package/dist/chunk-J7J6LXOJ.js.map +0 -1
  55. package/scripts/readme-version-updater.cjs +0 -18
@@ -1,6 +1,9 @@
1
1
  import {
2
- PythonBridge
3
- } from "./chunk-5QMHZUC4.js";
2
+ PythonBridge,
3
+ createLogger,
4
+ summarizePayload,
5
+ truncateForLog
6
+ } from "./chunk-NJUMU4X2.js";
4
7
 
5
8
  // src/crawl/intelligent-crawler.ts
6
9
  import { EventEmitter } from "events";
@@ -291,7 +294,9 @@ function cleanupMarkdown(markdown) {
291
294
  }
292
295
 
293
296
  // src/crawl/article-converter.ts
297
+ var logger = createLogger("article-converter");
294
298
  async function convertHtmlToMarkdown(html, url) {
299
+ logger.debug({ url, htmlLength: html.length }, "Starting HTML conversion");
295
300
  try {
296
301
  let articleHtml;
297
302
  let title;
@@ -300,11 +305,23 @@ async function convertHtmlToMarkdown(html, url) {
300
305
  if (article !== null && article.content !== void 0 && article.content !== "") {
301
306
  articleHtml = article.content;
302
307
  title = article.title !== void 0 && article.title !== "" ? article.title : void 0;
308
+ logger.debug({
309
+ url,
310
+ title,
311
+ extractedLength: articleHtml.length,
312
+ usedFullHtml: false
313
+ }, "Article content extracted");
303
314
  } else {
304
315
  articleHtml = html;
316
+ logger.debug({ url, usedFullHtml: true }, "Article extraction returned empty, using full HTML");
305
317
  }
306
- } catch {
318
+ } catch (extractError) {
307
319
  articleHtml = html;
320
+ logger.debug({
321
+ url,
322
+ usedFullHtml: true,
323
+ error: extractError instanceof Error ? extractError.message : String(extractError)
324
+ }, "Article extraction failed, using full HTML");
308
325
  }
309
326
  const preprocessed = preprocessHtmlForCodeBlocks(articleHtml);
310
327
  const turndownService = new TurndownService({
@@ -333,12 +350,26 @@ ${hashes} ${cleanContent}
333
350
  });
334
351
  const rawMarkdown = turndownService.turndown(preprocessed);
335
352
  const markdown = cleanupMarkdown(rawMarkdown);
353
+ logger.debug({
354
+ url,
355
+ title,
356
+ rawMarkdownLength: rawMarkdown.length,
357
+ finalMarkdownLength: markdown.length
358
+ }, "HTML to markdown conversion complete");
359
+ logger.trace({
360
+ url,
361
+ markdownPreview: truncateForLog(markdown, 1e3)
362
+ }, "Markdown content preview");
336
363
  return {
337
364
  markdown,
338
365
  ...title !== void 0 && { title },
339
366
  success: true
340
367
  };
341
368
  } catch (error) {
369
+ logger.error({
370
+ url,
371
+ error: error instanceof Error ? error.message : String(error)
372
+ }, "HTML to markdown conversion failed");
342
373
  return {
343
374
  markdown: "",
344
375
  success: false,
@@ -348,6 +379,7 @@ ${hashes} ${cleanContent}
348
379
  }
349
380
 
350
381
  // src/crawl/intelligent-crawler.ts
382
+ var logger2 = createLogger("crawler");
351
383
  var IntelligentCrawler = class extends EventEmitter {
352
384
  claudeClient;
353
385
  pythonBridge;
@@ -372,6 +404,12 @@ var IntelligentCrawler = class extends EventEmitter {
372
404
  } = options;
373
405
  this.visited.clear();
374
406
  this.stopped = false;
407
+ logger2.info({
408
+ seedUrl,
409
+ maxPages,
410
+ mode: simple ? "simple" : crawlInstruction !== void 0 && crawlInstruction !== "" ? "intelligent" : "simple",
411
+ hasExtractInstruction: extractInstruction !== void 0
412
+ }, "Starting crawl");
375
413
  const startProgress = {
376
414
  type: "start",
377
415
  pagesVisited: 0,
@@ -384,6 +422,10 @@ var IntelligentCrawler = class extends EventEmitter {
384
422
  } else {
385
423
  yield* this.crawlSimple(seedUrl, extractInstruction, maxPages, options.useHeadless ?? false);
386
424
  }
425
+ logger2.info({
426
+ seedUrl,
427
+ pagesVisited: this.visited.size
428
+ }, "Crawl complete");
387
429
  const completeProgress = {
388
430
  type: "complete",
389
431
  pagesVisited: this.visited.size,
@@ -484,9 +526,9 @@ var IntelligentCrawler = class extends EventEmitter {
484
526
  try {
485
527
  const links = await this.extractLinks(current.url, useHeadless);
486
528
  if (links.length === 0) {
487
- console.warn(`No links found on ${current.url} - page may be a leaf node`);
529
+ logger2.debug({ url: current.url }, "No links found - page may be a leaf node");
488
530
  } else {
489
- console.log(`Found ${String(links.length)} links on ${current.url}`);
531
+ logger2.debug({ url: current.url, linkCount: links.length }, "Links extracted from page");
490
532
  }
491
533
  for (const link of links) {
492
534
  if (!this.visited.has(link) && this.isSameDomain(seedUrl, link)) {
@@ -532,8 +574,14 @@ var IntelligentCrawler = class extends EventEmitter {
532
574
  const html = await this.fetchHtml(url, useHeadless);
533
575
  const conversion = await convertHtmlToMarkdown(html, url);
534
576
  if (!conversion.success) {
577
+ logger2.error({ url, error: conversion.error }, "HTML to markdown conversion failed");
535
578
  throw new Error(`Failed to convert HTML: ${conversion.error ?? "Unknown error"}`);
536
579
  }
580
+ logger2.debug({
581
+ url,
582
+ title: conversion.title,
583
+ markdownLength: conversion.markdown.length
584
+ }, "Article converted to markdown");
537
585
  let extracted;
538
586
  if (extractInstruction !== void 0 && extractInstruction !== "") {
539
587
  if (!ClaudeClient.isAvailable()) {
@@ -583,12 +631,21 @@ var IntelligentCrawler = class extends EventEmitter {
583
631
  * Fetch HTML content from a URL
584
632
  */
585
633
  async fetchHtml(url, useHeadless = false) {
634
+ const startTime = Date.now();
635
+ logger2.debug({ url, useHeadless }, "Fetching HTML");
586
636
  if (useHeadless) {
587
637
  try {
588
638
  const result = await this.pythonBridge.fetchHeadless(url);
639
+ const durationMs = Date.now() - startTime;
640
+ logger2.info({
641
+ url,
642
+ useHeadless: true,
643
+ durationMs,
644
+ ...summarizePayload(result.html, "raw-html", url)
645
+ }, "Raw HTML fetched");
589
646
  return result.html;
590
647
  } catch (error) {
591
- console.warn(`Headless fetch failed for ${url}, falling back to axios:`, error);
648
+ logger2.warn({ url, error: error instanceof Error ? error.message : String(error) }, "Headless fetch failed, falling back to axios");
592
649
  }
593
650
  }
594
651
  try {
@@ -598,8 +655,16 @@ var IntelligentCrawler = class extends EventEmitter {
598
655
  "User-Agent": "Mozilla/5.0 (compatible; bluera-knowledge-crawler/1.0)"
599
656
  }
600
657
  });
658
+ const durationMs = Date.now() - startTime;
659
+ logger2.info({
660
+ url,
661
+ useHeadless: false,
662
+ durationMs,
663
+ ...summarizePayload(response.data, "raw-html", url)
664
+ }, "Raw HTML fetched");
601
665
  return response.data;
602
666
  } catch (error) {
667
+ logger2.error({ url, error: error instanceof Error ? error.message : String(error) }, "Failed to fetch HTML");
603
668
  throw new Error(
604
669
  `Failed to fetch ${url}: ${error instanceof Error ? error.message : String(error)}`
605
670
  );
@@ -625,7 +690,7 @@ var IntelligentCrawler = class extends EventEmitter {
625
690
  return firstPage.links;
626
691
  } catch (error) {
627
692
  const errorMessage = error instanceof Error ? error.message : String(error);
628
- console.error(`Failed to extract links from ${url}:`, errorMessage);
693
+ logger2.error({ url, error: errorMessage }, "Failed to extract links");
629
694
  throw new Error(`Link extraction failed for ${url}: ${errorMessage}`);
630
695
  }
631
696
  }
@@ -653,4 +718,4 @@ var IntelligentCrawler = class extends EventEmitter {
653
718
  export {
654
719
  IntelligentCrawler
655
720
  };
656
- //# sourceMappingURL=chunk-BICFAWMN.js.map
721
+ //# sourceMappingURL=chunk-DNOIM7BO.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/crawl/intelligent-crawler.ts","../src/crawl/claude-client.ts","../src/crawl/article-converter.ts","../src/crawl/markdown-utils.ts"],"sourcesContent":["/**\n * Intelligent web crawler with natural language control\n * Two modes: Intelligent (Claude-driven) and Simple (BFS)\n */\n\nimport { EventEmitter } from 'node:events';\nimport axios from 'axios';\nimport { ClaudeClient, type CrawlStrategy } from './claude-client.js';\nimport { convertHtmlToMarkdown } from './article-converter.js';\nimport { PythonBridge, type CrawledLink } from './bridge.js';\nimport { createLogger, summarizePayload } from '../logging/index.js';\n\nconst logger = createLogger('crawler');\n\nexport interface CrawlOptions {\n crawlInstruction?: string; // Natural language: what to crawl\n extractInstruction?: string; // Natural language: what to extract\n maxPages?: number; // Max pages to crawl (default: 50)\n timeout?: number; // Per-page timeout in ms (default: 30000)\n simple?: boolean; // Force simple BFS mode\n useHeadless?: boolean; // Enable headless browser for JavaScript-rendered sites\n}\n\nexport interface CrawlResult {\n url: string;\n title?: string;\n markdown: string;\n extracted?: string;\n depth?: number;\n}\n\nexport interface CrawlProgress {\n type: 'start' | 'strategy' | 'page' | 'extraction' | 'complete' | 'error';\n pagesVisited: number;\n totalPages: number;\n currentUrl?: string;\n message?: string;\n error?: Error;\n}\n\n/**\n * Intelligent crawler that uses Claude CLI for strategy and extraction\n */\nexport class IntelligentCrawler extends EventEmitter {\n private readonly claudeClient: ClaudeClient;\n private readonly pythonBridge: PythonBridge;\n private readonly visited: Set<string>;\n private stopped: boolean;\n\n constructor() {\n super();\n this.claudeClient = new ClaudeClient();\n this.pythonBridge = new PythonBridge();\n this.visited = new Set();\n this.stopped = false;\n }\n\n /**\n * Crawl a website with intelligent or simple mode\n */\n async *crawl(\n seedUrl: string,\n options: CrawlOptions = {},\n ): AsyncIterable<CrawlResult> {\n const {\n crawlInstruction,\n extractInstruction,\n maxPages = 50,\n simple = false,\n } = options;\n\n this.visited.clear();\n this.stopped = false;\n\n logger.info({\n seedUrl,\n maxPages,\n mode: simple ? 'simple' : (crawlInstruction !== undefined && crawlInstruction !== '' ? 'intelligent' : 'simple'),\n hasExtractInstruction: extractInstruction !== undefined,\n }, 'Starting crawl');\n\n const startProgress: CrawlProgress = {\n type: 'start',\n pagesVisited: 0,\n totalPages: maxPages,\n };\n this.emit('progress', startProgress);\n\n // Determine mode: intelligent (with crawl instruction) or simple (BFS)\n const useIntelligentMode = !simple && crawlInstruction !== undefined && crawlInstruction !== '';\n\n if (useIntelligentMode) {\n // TypeScript knows crawlInstruction is defined here due to useIntelligentMode check\n yield* this.crawlIntelligent(seedUrl, crawlInstruction, extractInstruction, maxPages, options.useHeadless ?? false);\n } else {\n yield* this.crawlSimple(seedUrl, extractInstruction, maxPages, options.useHeadless ?? false);\n }\n\n logger.info({\n seedUrl,\n pagesVisited: this.visited.size,\n }, 'Crawl complete');\n\n const completeProgress: CrawlProgress = {\n type: 'complete',\n pagesVisited: this.visited.size,\n totalPages: this.visited.size,\n };\n this.emit('progress', completeProgress);\n }\n\n /**\n * Intelligent mode: Use Claude to determine which URLs to crawl\n */\n private async *crawlIntelligent(\n seedUrl: string,\n crawlInstruction: string,\n extractInstruction: string | undefined,\n maxPages: number,\n useHeadless: boolean = false,\n ): AsyncIterable<CrawlResult> {\n // Check if Claude CLI is available before attempting intelligent mode\n if (!ClaudeClient.isAvailable()) {\n const fallbackProgress: CrawlProgress = {\n type: 'error',\n pagesVisited: 0,\n totalPages: maxPages,\n message: 'Claude CLI not found, using simple crawl mode (install Claude Code for intelligent crawling)',\n error: new Error('Claude CLI not available'),\n };\n this.emit('progress', fallbackProgress);\n yield* this.crawlSimple(seedUrl, extractInstruction, maxPages, useHeadless);\n return;\n }\n\n let strategy: CrawlStrategy;\n\n try {\n // Step 1: Fetch seed page HTML\n const strategyStartProgress: CrawlProgress = {\n type: 'strategy',\n pagesVisited: 0,\n totalPages: maxPages,\n currentUrl: seedUrl,\n message: 'Analyzing page structure with Claude...',\n };\n this.emit('progress', strategyStartProgress);\n\n const seedHtml = await this.fetchHtml(seedUrl, useHeadless);\n\n // Step 2: Ask Claude which URLs to crawl\n strategy = await this.claudeClient.determineCrawlUrls(seedHtml, crawlInstruction);\n\n const strategyCompleteProgress: CrawlProgress = {\n type: 'strategy',\n pagesVisited: 0,\n totalPages: maxPages,\n message: `Claude identified ${String(strategy.urls.length)} URLs to crawl: ${strategy.reasoning}`,\n };\n this.emit('progress', strategyCompleteProgress);\n } catch (error) {\n // Fallback to simple mode if Claude fails\n const errorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited: 0,\n totalPages: maxPages,\n message: 'Claude crawl strategy failed, falling back to simple mode',\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', errorProgress);\n\n yield* this.crawlSimple(seedUrl, extractInstruction, maxPages);\n return;\n }\n\n // Step 3: Crawl each URL from Claude's strategy\n let pagesVisited = 0;\n\n for (const url of strategy.urls) {\n if (this.stopped || pagesVisited >= maxPages) break;\n if (this.visited.has(url)) continue;\n\n try {\n const result = await this.crawlSinglePage(url, extractInstruction, pagesVisited, useHeadless);\n pagesVisited++;\n yield result;\n } catch (error) {\n const pageErrorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: url,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', pageErrorProgress);\n }\n }\n }\n\n /**\n * Simple mode: BFS crawling with depth limit\n */\n private async *crawlSimple(\n seedUrl: string,\n extractInstruction: string | undefined,\n maxPages: number,\n useHeadless: boolean = false,\n ): AsyncIterable<CrawlResult> {\n const queue: Array<{ url: string; depth: number }> = [{ url: seedUrl, depth: 0 }];\n const maxDepth = 2; // Default depth limit for simple mode\n let pagesVisited = 0;\n\n while (queue.length > 0 && pagesVisited < maxPages && !this.stopped) {\n const current = queue.shift();\n\n if (!current || this.visited.has(current.url) || current.depth > maxDepth) {\n continue;\n }\n\n try {\n const result = await this.crawlSinglePage(\n current.url,\n extractInstruction,\n pagesVisited,\n useHeadless,\n );\n result.depth = current.depth;\n pagesVisited++;\n\n yield result;\n\n // Add links to queue if we haven't reached max depth\n if (current.depth < maxDepth) {\n try {\n const links = await this.extractLinks(current.url, useHeadless);\n\n if (links.length === 0) {\n logger.debug({ url: current.url }, 'No links found - page may be a leaf node');\n } else {\n logger.debug({ url: current.url, linkCount: links.length }, 'Links extracted from page');\n }\n\n for (const link of links) {\n if (!this.visited.has(link) && this.isSameDomain(seedUrl, link)) {\n queue.push({ url: link, depth: current.depth + 1 });\n }\n }\n } catch (error) {\n // Log link extraction failure but continue crawling other pages\n const errorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: current.url,\n message: `Failed to extract links from ${current.url}`,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', errorProgress);\n }\n }\n } catch (error) {\n const simpleErrorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: maxPages,\n currentUrl: current.url,\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', simpleErrorProgress);\n }\n }\n }\n\n /**\n * Crawl a single page: fetch, convert to markdown, optionally extract\n */\n private async crawlSinglePage(\n url: string,\n extractInstruction: string | undefined,\n pagesVisited: number,\n useHeadless: boolean = false,\n ): Promise<CrawlResult> {\n const pageProgress: CrawlProgress = {\n type: 'page',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n };\n this.emit('progress', pageProgress);\n\n // Mark as visited\n this.visited.add(url);\n\n // Fetch HTML\n const html = await this.fetchHtml(url, useHeadless);\n\n // Convert to clean markdown using slurp-ai techniques\n const conversion = await convertHtmlToMarkdown(html, url);\n\n if (!conversion.success) {\n logger.error({ url, error: conversion.error }, 'HTML to markdown conversion failed');\n throw new Error(`Failed to convert HTML: ${conversion.error ?? 'Unknown error'}`);\n }\n\n logger.debug({\n url,\n title: conversion.title,\n markdownLength: conversion.markdown.length,\n }, 'Article converted to markdown');\n\n let extracted: string | undefined;\n\n // Optional: Extract specific information using Claude\n if (extractInstruction !== undefined && extractInstruction !== '') {\n // Skip extraction if Claude CLI isn't available\n if (!ClaudeClient.isAvailable()) {\n const skipProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n message: 'Skipping extraction (Claude CLI not available), storing raw markdown',\n error: new Error('Claude CLI not available'),\n };\n this.emit('progress', skipProgress);\n } else {\n try {\n const extractionProgress: CrawlProgress = {\n type: 'extraction',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n };\n this.emit('progress', extractionProgress);\n\n extracted = await this.claudeClient.extractContent(\n conversion.markdown,\n extractInstruction,\n );\n } catch (error) {\n // If extraction fails, just store raw markdown\n const extractionErrorProgress: CrawlProgress = {\n type: 'error',\n pagesVisited,\n totalPages: 0,\n currentUrl: url,\n message: 'Extraction failed, storing raw markdown',\n error: error instanceof Error ? error : new Error(String(error)),\n };\n this.emit('progress', extractionErrorProgress);\n }\n }\n }\n\n return {\n url,\n ...(conversion.title !== undefined && { title: conversion.title }),\n markdown: conversion.markdown,\n ...(extracted !== undefined && { extracted }),\n };\n }\n\n /**\n * Fetch HTML content from a URL\n */\n private async fetchHtml(url: string, useHeadless: boolean = false): Promise<string> {\n const startTime = Date.now();\n logger.debug({ url, useHeadless }, 'Fetching HTML');\n\n if (useHeadless) {\n try {\n const result = await this.pythonBridge.fetchHeadless(url);\n const durationMs = Date.now() - startTime;\n logger.info({\n url,\n useHeadless: true,\n durationMs,\n ...summarizePayload(result.html, 'raw-html', url),\n }, 'Raw HTML fetched');\n return result.html;\n } catch (error) {\n // Fallback to axios if headless fails\n logger.warn({ url, error: error instanceof Error ? error.message : String(error) }, 'Headless fetch failed, falling back to axios');\n }\n }\n\n // Original axios implementation for static sites\n try {\n const response = await axios.get<string>(url, {\n timeout: 30000,\n headers: {\n 'User-Agent':\n 'Mozilla/5.0 (compatible; bluera-knowledge-crawler/1.0)',\n },\n });\n\n const durationMs = Date.now() - startTime;\n logger.info({\n url,\n useHeadless: false,\n durationMs,\n ...summarizePayload(response.data, 'raw-html', url),\n }, 'Raw HTML fetched');\n\n return response.data;\n } catch (error) {\n logger.error({ url, error: error instanceof Error ? error.message : String(error) }, 'Failed to fetch HTML');\n throw new Error(\n `Failed to fetch ${url}: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n /**\n * Extract links from a page using Python bridge\n */\n private async extractLinks(url: string, useHeadless: boolean = false): Promise<string[]> {\n try {\n // Use headless mode for link extraction if enabled\n if (useHeadless) {\n const result = await this.pythonBridge.fetchHeadless(url);\n // Extract href strings from link objects (crawl4ai returns objects, not strings)\n return result.links.map((link: CrawledLink | string) => {\n if (typeof link === 'string') return link;\n return link.href;\n });\n }\n\n const result = await this.pythonBridge.crawl(url);\n\n // Validate response structure (handle potential runtime type mismatches)\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition -- TypeScript types claim pages exists but Python bridge may return invalid structure at runtime\n const firstPage = result.pages?.[0];\n if (!firstPage) {\n throw new Error(`Invalid crawl response structure for ${url}: missing pages array`);\n }\n\n return firstPage.links;\n } catch (error: unknown) {\n // Log the error for debugging\n const errorMessage = error instanceof Error ? error.message : String(error);\n logger.error({ url, error: errorMessage }, 'Failed to extract links');\n\n // Re-throw the error instead of silently swallowing it\n throw new Error(`Link extraction failed for ${url}: ${errorMessage}`);\n }\n }\n\n /**\n * Check if two URLs are from the same domain\n */\n private isSameDomain(url1: string, url2: string): boolean {\n try {\n const domain1 = new URL(url1).hostname.toLowerCase();\n const domain2 = new URL(url2).hostname.toLowerCase();\n return domain1 === domain2 || domain1.endsWith(`.${domain2}`) || domain2.endsWith(`.${domain1}`);\n } catch {\n return false;\n }\n }\n\n /**\n * Stop the crawler\n */\n async stop(): Promise<void> {\n this.stopped = true;\n await this.pythonBridge.stop();\n }\n}\n","/**\n * Claude CLI client for intelligent crawling and extraction\n * Uses `claude -p` programmatically to analyze page structure and extract content\n */\n\nimport { spawn, execSync } from 'node:child_process';\n\n/**\n * Schema for crawl strategy response from Claude\n */\nexport interface CrawlStrategy {\n urls: string[];\n reasoning: string;\n}\n\nconst CRAWL_STRATEGY_SCHEMA = {\n type: 'object',\n properties: {\n urls: {\n type: 'array',\n items: { type: 'string' },\n description: 'List of URLs to crawl based on the instruction',\n },\n reasoning: {\n type: 'string',\n description: 'Brief explanation of why these URLs were selected',\n },\n },\n required: ['urls', 'reasoning'],\n};\n\n/**\n * Client for interacting with Claude Code CLI\n */\nexport class ClaudeClient {\n private readonly timeout: number;\n private static availabilityChecked = false;\n private static available = false;\n\n /**\n * Check if Claude CLI is available in PATH\n * Result is cached after first check for performance\n */\n static isAvailable(): boolean {\n if (!ClaudeClient.availabilityChecked) {\n try {\n execSync('which claude', { stdio: 'ignore' });\n ClaudeClient.available = true;\n } catch {\n ClaudeClient.available = false;\n }\n ClaudeClient.availabilityChecked = true;\n }\n return ClaudeClient.available;\n }\n\n /**\n * Reset availability cache (for testing)\n */\n static resetAvailabilityCache(): void {\n ClaudeClient.availabilityChecked = false;\n ClaudeClient.available = false;\n }\n\n constructor(options: { timeout?: number } = {}) {\n this.timeout = options.timeout ?? 30000; // 30s default\n }\n\n /**\n * Determine which URLs to crawl based on natural language instruction\n *\n * @param seedHtml - HTML content of the seed page\n * @param instruction - Natural language crawl instruction (e.g., \"scrape all Getting Started pages\")\n * @returns List of URLs to crawl with reasoning\n */\n async determineCrawlUrls(\n seedHtml: string,\n instruction: string,\n ): Promise<CrawlStrategy> {\n const prompt = `You are analyzing a webpage to determine which pages to crawl based on the user's instruction.\n\nInstruction: ${instruction}\n\nWebpage HTML (analyze the navigation structure, links, and content):\n${this.truncateHtml(seedHtml, 50000)}\n\nBased on the instruction, extract and return a list of absolute URLs that should be crawled. Look for navigation menus, sidebars, headers, and link structures that match the instruction.\n\nReturn only URLs that are relevant to the instruction. If the instruction mentions specific sections (e.g., \"Getting Started\"), find links in those sections.`;\n\n try {\n const result = await this.callClaude(prompt, CRAWL_STRATEGY_SCHEMA);\n const parsed: unknown = JSON.parse(result);\n\n // Validate and narrow type\n if (\n typeof parsed !== 'object' ||\n parsed === null ||\n !('urls' in parsed) ||\n !('reasoning' in parsed) ||\n !Array.isArray(parsed.urls) ||\n parsed.urls.length === 0 ||\n typeof parsed.reasoning !== 'string' ||\n !parsed.urls.every((url) => typeof url === 'string')\n ) {\n throw new Error('Claude returned invalid crawl strategy');\n }\n\n // Type is now properly narrowed - urls is string[] after validation\n return { urls: parsed.urls, reasoning: parsed.reasoning };\n } catch (error) {\n throw new Error(\n `Failed to determine crawl strategy: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n /**\n * Extract specific information from markdown content using natural language\n *\n * @param markdown - Page content in markdown format\n * @param instruction - Natural language extraction instruction (e.g., \"extract pricing info\")\n * @returns Extracted information as text\n */\n async extractContent(markdown: string, instruction: string): Promise<string> {\n const prompt = `${instruction}\n\nContent to analyze:\n${this.truncateMarkdown(markdown, 100000)}`;\n\n try {\n const result = await this.callClaude(prompt);\n return result.trim();\n } catch (error) {\n throw new Error(\n `Failed to extract content: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n /**\n * Call Claude CLI with a prompt\n *\n * @param prompt - The prompt to send to Claude\n * @param jsonSchema - Optional JSON schema for structured output\n * @returns Claude's response as a string\n */\n private async callClaude(\n prompt: string,\n jsonSchema?: Record<string, unknown>,\n ): Promise<string> {\n return new Promise<string>((resolve, reject) => {\n const args = ['-p'];\n\n // Add JSON schema if provided\n if (jsonSchema) {\n args.push('--json-schema', JSON.stringify(jsonSchema));\n args.push('--output-format', 'json');\n }\n\n const proc = spawn('claude', args, {\n stdio: ['pipe', 'pipe', 'pipe'],\n cwd: process.cwd(),\n env: { ...process.env },\n });\n\n let stdout = '';\n let stderr = '';\n let timeoutId: NodeJS.Timeout | undefined;\n\n // Set timeout\n if (this.timeout > 0) {\n timeoutId = setTimeout(() => {\n proc.kill('SIGTERM');\n reject(new Error(`Claude CLI timed out after ${String(this.timeout)}ms`));\n }, this.timeout);\n }\n\n proc.stdout.on('data', (chunk: Buffer) => {\n stdout += chunk.toString();\n });\n\n proc.stderr.on('data', (chunk: Buffer) => {\n stderr += chunk.toString();\n });\n\n proc.on('close', (code: number | null) => {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n\n if (code === 0) {\n resolve(stdout.trim());\n } else {\n reject(\n new Error(\n `Claude CLI exited with code ${String(code)}${stderr ? `: ${stderr}` : ''}`,\n ),\n );\n }\n });\n\n proc.on('error', (err) => {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n reject(new Error(`Failed to spawn Claude CLI: ${err.message}`));\n });\n\n // Write prompt to stdin\n proc.stdin.write(prompt);\n proc.stdin.end();\n });\n }\n\n /**\n * Truncate HTML to a maximum length (keep important parts)\n */\n private truncateHtml(html: string, maxLength: number): string {\n if (html.length <= maxLength) return html;\n\n // Try to keep the beginning (usually has navigation)\n return html.substring(0, maxLength) + '\\n\\n[... HTML truncated ...]';\n }\n\n /**\n * Truncate markdown to a maximum length\n */\n private truncateMarkdown(markdown: string, maxLength: number): string {\n if (markdown.length <= maxLength) return markdown;\n\n return markdown.substring(0, maxLength) + '\\n\\n[... content truncated ...]';\n }\n}\n","/**\n * Article converter using @extractus/article-extractor and Turndown\n * Produces clean markdown from HTML using slurp-ai techniques\n */\n\nimport { extractFromHtml } from '@extractus/article-extractor';\nimport TurndownService from 'turndown';\nimport { gfm } from 'turndown-plugin-gfm';\nimport { preprocessHtmlForCodeBlocks, cleanupMarkdown } from './markdown-utils.js';\nimport { createLogger, truncateForLog } from '../logging/index.js';\n\nconst logger = createLogger('article-converter');\n\nexport interface ConversionResult {\n markdown: string;\n title?: string;\n success: boolean;\n error?: string;\n}\n\n/**\n * Convert HTML to clean markdown using best practices from slurp-ai\n *\n * Pipeline:\n * 1. Extract main article content (strips navigation, ads, boilerplate)\n * 2. Preprocess HTML (handle MkDocs code blocks)\n * 3. Convert to markdown with Turndown + GFM\n * 4. Cleanup markdown (regex patterns)\n */\nexport async function convertHtmlToMarkdown(\n html: string,\n url: string,\n): Promise<ConversionResult> {\n logger.debug({ url, htmlLength: html.length }, 'Starting HTML conversion');\n\n try {\n // Step 1: Extract main article content\n let articleHtml: string;\n let title: string | undefined;\n\n try {\n const article = await extractFromHtml(html, url);\n if (article !== null && article.content !== undefined && article.content !== '') {\n articleHtml = article.content;\n title = article.title !== undefined && article.title !== '' ? article.title : undefined;\n logger.debug({\n url,\n title,\n extractedLength: articleHtml.length,\n usedFullHtml: false,\n }, 'Article content extracted');\n } else {\n // Fallback to full HTML if extraction fails\n articleHtml = html;\n logger.debug({ url, usedFullHtml: true }, 'Article extraction returned empty, using full HTML');\n }\n } catch (extractError) {\n // Fallback to full HTML if extraction fails\n articleHtml = html;\n logger.debug({\n url,\n usedFullHtml: true,\n error: extractError instanceof Error ? extractError.message : String(extractError),\n }, 'Article extraction failed, using full HTML');\n }\n\n // Step 2: Preprocess HTML for code blocks\n const preprocessed = preprocessHtmlForCodeBlocks(articleHtml);\n\n // Step 3: Configure Turndown with custom rules\n const turndownService = new TurndownService({\n headingStyle: 'atx', // Use # style headings\n codeBlockStyle: 'fenced', // Use ``` style code blocks\n fence: '```',\n emDelimiter: '*',\n strongDelimiter: '**',\n linkStyle: 'inlined',\n });\n\n // Add GitHub Flavored Markdown support (tables, strikethrough, task lists)\n turndownService.use(gfm);\n\n // Custom rule for headings with anchors (from slurp-ai)\n turndownService.addRule('headingsWithAnchors', {\n filter: ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'],\n replacement(content: string, node: HTMLElement): string {\n const level = Number(node.nodeName.charAt(1));\n const hashes = '#'.repeat(level);\n const cleanContent = content\n .replace(/\\[\\]\\([^)]*\\)/g, '') // Remove empty links\n .replace(/\\s+/g, ' ') // Normalize whitespace\n .trim();\n return cleanContent !== '' ? `\\n\\n${hashes} ${cleanContent}\\n\\n` : '';\n },\n });\n\n // Convert to markdown\n const rawMarkdown = turndownService.turndown(preprocessed);\n\n // Step 4: Cleanup markdown with comprehensive regex patterns\n const markdown = cleanupMarkdown(rawMarkdown);\n\n logger.debug({\n url,\n title,\n rawMarkdownLength: rawMarkdown.length,\n finalMarkdownLength: markdown.length,\n }, 'HTML to markdown conversion complete');\n\n // Log markdown preview at trace level\n logger.trace({\n url,\n markdownPreview: truncateForLog(markdown, 1000),\n }, 'Markdown content preview');\n\n return {\n markdown,\n ...(title !== undefined && { title }),\n success: true,\n };\n } catch (error) {\n logger.error({\n url,\n error: error instanceof Error ? error.message : String(error),\n }, 'HTML to markdown conversion failed');\n\n return {\n markdown: '',\n success: false,\n error: error instanceof Error ? error.message : String(error),\n };\n }\n}\n","/**\n * Markdown conversion utilities ported from slurp-ai\n * Source: https://github.com/ratacat/slurp-ai\n *\n * These utilities handle complex documentation site patterns (MkDocs, Sphinx, etc.)\n * and produce clean, well-formatted markdown.\n */\n\nimport * as cheerio from 'cheerio';\n\n/**\n * Detect language from code element class names.\n * Handles various class naming patterns from different highlighters.\n */\nfunction detectLanguageFromClass(className: string | undefined): string {\n if (className === undefined || className === '') return '';\n\n // Common patterns: \"language-python\", \"lang-js\", \"highlight-python\", \"python\", \"hljs language-python\"\n const patterns = [\n /language-(\\w+)/i,\n /lang-(\\w+)/i,\n /highlight-(\\w+)/i,\n /hljs\\s+(\\w+)/i,\n /^(\\w+)$/i,\n ];\n\n for (const pattern of patterns) {\n const match = className.match(pattern);\n if (match?.[1] !== undefined) {\n const lang = match[1].toLowerCase();\n // Filter out common non-language classes\n if (!['hljs', 'highlight', 'code', 'pre', 'block', 'inline'].includes(lang)) {\n return lang;\n }\n }\n }\n\n return '';\n}\n\n/**\n * Escape HTML special characters for safe embedding in HTML.\n */\nfunction escapeHtml(text: string): string {\n return text\n .replace(/&/g, '&amp;')\n .replace(/</g, '&lt;')\n .replace(/>/g, '&gt;')\n .replace(/\"/g, '&quot;')\n .replace(/'/g, '&#039;');\n}\n\n/**\n * Preprocess HTML to handle MkDocs/Material theme code blocks.\n *\n * MkDocs wraps code in tables for line numbers:\n * <table><tbody><tr><td>line numbers</td><td><pre><code>code</code></pre></td></tr></tbody></table>\n *\n * This function converts them to standard <pre><code> blocks that Turndown handles correctly.\n * Also strips syntax highlighting spans and empty anchors from code.\n */\nexport function preprocessHtmlForCodeBlocks(html: string): string {\n if (!html || typeof html !== 'string') return html;\n\n const $ = cheerio.load(html);\n\n // Handle MkDocs/Material table-wrapped code blocks\n $('table').each((_i, table) => {\n const $table = $(table);\n\n // Check if this table contains a code block\n const $codeCell = $table.find('td pre code, td div pre code');\n\n if ($codeCell.length > 0) {\n // This is a code block table - extract the code\n const $pre = $codeCell.closest('pre');\n const $code = $codeCell.first();\n\n // Get language from class\n let language = detectLanguageFromClass($code.attr('class'));\n if (!language) {\n language = detectLanguageFromClass($pre.attr('class'));\n }\n\n // Get the text content, stripping all inner HTML tags\n const codeText = $code.text();\n\n // Create a clean pre > code block\n const cleanPre = `<pre><code class=\"language-${language}\">${escapeHtml(codeText)}</code></pre>`;\n\n // Replace the entire table with the clean code block\n $table.replaceWith(cleanPre);\n }\n });\n\n // Strip empty anchor tags used for line numbers\n $('pre a, code a').each((_i, anchor) => {\n const $anchor = $(anchor);\n if (!$anchor.text().trim()) {\n $anchor.remove();\n }\n });\n\n // Strip syntax highlighting spans inside code blocks, keeping only text\n $('pre span, code span').each((_i, span) => {\n const $span = $(span);\n $span.replaceWith($span.text());\n });\n\n // Handle standalone pre blocks that might have spans/anchors\n $('pre').each((_i, pre) => {\n const $pre = $(pre);\n // If this pre has a code child, it was already processed\n if ($pre.find('code').length === 0) {\n // Direct pre without code - get text content\n const text = $pre.text();\n const lang = detectLanguageFromClass($pre.attr('class'));\n $pre.html(`<code class=\"language-${lang}\">${escapeHtml(text)}</code>`);\n }\n });\n\n return $.html();\n}\n\n/**\n * Apply comprehensive cleanup rules to markdown content.\n *\n * Formatting rules:\n * - Double newlines between paragraphs and headings\n * - Double newlines before lists when preceded by normal text\n * - Single newlines between list items\n * - No blank lines inside code blocks\n */\nexport function cleanupMarkdown(markdown: string): string {\n if (!markdown) return '';\n\n const trimmed = markdown.trim();\n if (trimmed === '') return '';\n\n let result = trimmed;\n\n // 0. Fix broken headings where ## is on its own line followed by the text\n // Pattern: \"## \\n\\nSome text\" → \"## Some text\"\n result = result.replace(/^(#{1,6})\\s*\\n\\n+(\\S[^\\n]*)/gm, '$1 $2');\n\n // 0.5. Normalize multiple spaces after heading markers to single space\n // Pattern: \"## Subtitle\" → \"## Subtitle\"\n result = result.replace(/(#{1,6})\\s{2,}/g, '$1 ');\n\n // 1. Fix navigation links with excessive whitespace\n result = result.replace(/\\*\\s+\\[\\s*([^\\n]+?)\\s*\\]\\(([^)]+)\\)/g, '* [$1]($2)');\n\n // 2. Handle headings with specific newline requirements\n\n // Text followed by heading should have a single newline between them (no blank line)\n result = result.replace(/([^\\n])\\n\\n+(#\\s)/g, '$1\\n$2');\n\n // Add double newlines between text and next heading\n result = result.replace(/(Some text\\.)\\n(##\\s)/g, '$1\\n\\n$2');\n\n // Double newlines after a heading when followed by text\n result = result.replace(/(#{1,6}\\s[^\\n]+)\\n([^#\\n])/g, '$1\\n\\n$2');\n\n // Double newlines between headings\n result = result.replace(/(#{1,6}\\s[^\\n]+)\\n(#{1,6}\\s)/g, '$1\\n\\n$2');\n\n // 3. Lists - ensure all list items have single newlines only\n result = result.replace(\n /(\\* Item 1)\\n\\n+(\\* Item 2)\\n\\n+(\\* Item 3)/g,\n '$1\\n$2\\n$3',\n );\n\n // 3.5. General list item spacing - ensure single newlines between list items\n result = result.replace(/(^\\*\\s[^\\n]+)\\n{2,}(^\\*\\s)/gm, '$1\\n$2');\n\n // 4. Clean up excessive blank lines (3+ newlines → 2 newlines)\n result = result.replace(/\\n{3,}/g, '\\n\\n');\n\n // 5. Code blocks - no blank lines after opening or before closing backticks\n result = result.replace(/(```[^\\n]*)\\n\\n+/g, '$1\\n');\n result = result.replace(/\\n\\n+```/g, '\\n```');\n\n // 6. Remove empty list items\n result = result.replace(/\\*\\s*\\n\\s*\\*/g, '*');\n\n // 7. Strip any remaining HTML tags that leaked through (common in MkDocs/Material)\n // Remove table structure tags\n result = result.replace(/<\\/?table[^>]*>/gi, '');\n result = result.replace(/<\\/?tbody[^>]*>/gi, '');\n result = result.replace(/<\\/?thead[^>]*>/gi, '');\n result = result.replace(/<\\/?tr[^>]*>/gi, '');\n result = result.replace(/<\\/?td[^>]*>/gi, '');\n result = result.replace(/<\\/?th[^>]*>/gi, '');\n\n // Remove empty anchor tags: <a></a> or <a id=\"...\"></a>\n result = result.replace(/<a[^>]*><\\/a>/gi, '');\n\n // Remove span tags (syntax highlighting remnants)\n result = result.replace(/<\\/?span[^>]*>/gi, '');\n\n // Remove div tags\n result = result.replace(/<\\/?div[^>]*>/gi, '');\n\n // Remove pre/code tags that leaked\n result = result.replace(/<\\/?pre[^>]*>/gi, '');\n result = result.replace(/<\\/?code[^>]*>/gi, '');\n\n // 8. Remove empty markdown links: [](url) and []()\n result = result.replace(/\\[\\]\\([^)]*\\)/g, '');\n\n // 9. Remove codelineno references that leaked into content\n // Pattern: [](_file.md#__codelineno-N-M)\n result = result.replace(/\\[\\]\\([^)]*#__codelineno-[^)]+\\)/g, '');\n\n // Also clean inline codelineno patterns\n result = result.replace(/\\[?\\]?\\([^)]*#__codelineno-[^)]*\\)/g, '');\n\n // 10. Clean up any double-escaped HTML entities that might result\n result = result.replace(/&amp;lt;/g, '&lt;');\n result = result.replace(/&amp;gt;/g, '&gt;');\n result = result.replace(/&amp;amp;/g, '&amp;');\n\n // 11. Final cleanup - normalize excessive whitespace from removed tags\n result = result.replace(/\\n{3,}/g, '\\n\\n');\n result = result.replace(/[ \\t]+\\n/g, '\\n');\n\n return result;\n}\n"],"mappings":";;;;;;;;AAKA,SAAS,oBAAoB;AAC7B,OAAO,WAAW;;;ACDlB,SAAS,OAAO,gBAAgB;AAUhC,IAAM,wBAAwB;AAAA,EAC5B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,OAAO,EAAE,MAAM,SAAS;AAAA,MACxB,aAAa;AAAA,IACf;AAAA,IACA,WAAW;AAAA,MACT,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,UAAU,CAAC,QAAQ,WAAW;AAChC;AAKO,IAAM,eAAN,MAAM,cAAa;AAAA,EACP;AAAA,EACjB,OAAe,sBAAsB;AAAA,EACrC,OAAe,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA,EAM3B,OAAO,cAAuB;AAC5B,QAAI,CAAC,cAAa,qBAAqB;AACrC,UAAI;AACF,iBAAS,gBAAgB,EAAE,OAAO,SAAS,CAAC;AAC5C,sBAAa,YAAY;AAAA,MAC3B,QAAQ;AACN,sBAAa,YAAY;AAAA,MAC3B;AACA,oBAAa,sBAAsB;AAAA,IACrC;AACA,WAAO,cAAa;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,yBAA+B;AACpC,kBAAa,sBAAsB;AACnC,kBAAa,YAAY;AAAA,EAC3B;AAAA,EAEA,YAAY,UAAgC,CAAC,GAAG;AAC9C,SAAK,UAAU,QAAQ,WAAW;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,mBACJ,UACA,aACwB;AACxB,UAAM,SAAS;AAAA;AAAA,eAEJ,WAAW;AAAA;AAAA;AAAA,EAGxB,KAAK,aAAa,UAAU,GAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAMhC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,WAAW,QAAQ,qBAAqB;AAClE,YAAM,SAAkB,KAAK,MAAM,MAAM;AAGzC,UACE,OAAO,WAAW,YAClB,WAAW,QACX,EAAE,UAAU,WACZ,EAAE,eAAe,WACjB,CAAC,MAAM,QAAQ,OAAO,IAAI,KAC1B,OAAO,KAAK,WAAW,KACvB,OAAO,OAAO,cAAc,YAC5B,CAAC,OAAO,KAAK,MAAM,CAAC,QAAQ,OAAO,QAAQ,QAAQ,GACnD;AACA,cAAM,IAAI,MAAM,wCAAwC;AAAA,MAC1D;AAGA,aAAO,EAAE,MAAM,OAAO,MAAM,WAAW,OAAO,UAAU;AAAA,IAC1D,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,uCAAuC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAC/F;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,eAAe,UAAkB,aAAsC;AAC3E,UAAM,SAAS,GAAG,WAAW;AAAA;AAAA;AAAA,EAG/B,KAAK,iBAAiB,UAAU,GAAM,CAAC;AAErC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,WAAW,MAAM;AAC3C,aAAO,OAAO,KAAK;AAAA,IACrB,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACtF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,WACZ,QACA,YACiB;AACjB,WAAO,IAAI,QAAgB,CAAC,SAAS,WAAW;AAC9C,YAAM,OAAO,CAAC,IAAI;AAGlB,UAAI,YAAY;AACd,aAAK,KAAK,iBAAiB,KAAK,UAAU,UAAU,CAAC;AACrD,aAAK,KAAK,mBAAmB,MAAM;AAAA,MACrC;AAEA,YAAM,OAAO,MAAM,UAAU,MAAM;AAAA,QACjC,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,QAC9B,KAAK,QAAQ,IAAI;AAAA,QACjB,KAAK,EAAE,GAAG,QAAQ,IAAI;AAAA,MACxB,CAAC;AAED,UAAI,SAAS;AACb,UAAI,SAAS;AACb,UAAI;AAGJ,UAAI,KAAK,UAAU,GAAG;AACpB,oBAAY,WAAW,MAAM;AAC3B,eAAK,KAAK,SAAS;AACnB,iBAAO,IAAI,MAAM,8BAA8B,OAAO,KAAK,OAAO,CAAC,IAAI,CAAC;AAAA,QAC1E,GAAG,KAAK,OAAO;AAAA,MACjB;AAEA,WAAK,OAAO,GAAG,QAAQ,CAAC,UAAkB;AACxC,kBAAU,MAAM,SAAS;AAAA,MAC3B,CAAC;AAED,WAAK,OAAO,GAAG,QAAQ,CAAC,UAAkB;AACxC,kBAAU,MAAM,SAAS;AAAA,MAC3B,CAAC;AAED,WAAK,GAAG,SAAS,CAAC,SAAwB;AACxC,YAAI,cAAc,QAAW;AAC3B,uBAAa,SAAS;AAAA,QACxB;AAEA,YAAI,SAAS,GAAG;AACd,kBAAQ,OAAO,KAAK,CAAC;AAAA,QACvB,OAAO;AACL;AAAA,YACE,IAAI;AAAA,cACF,+BAA+B,OAAO,IAAI,CAAC,GAAG,SAAS,KAAK,MAAM,KAAK,EAAE;AAAA,YAC3E;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AAED,WAAK,GAAG,SAAS,CAAC,QAAQ;AACxB,YAAI,cAAc,QAAW;AAC3B,uBAAa,SAAS;AAAA,QACxB;AACA,eAAO,IAAI,MAAM,+BAA+B,IAAI,OAAO,EAAE,CAAC;AAAA,MAChE,CAAC;AAGD,WAAK,MAAM,MAAM,MAAM;AACvB,WAAK,MAAM,IAAI;AAAA,IACjB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,WAA2B;AAC5D,QAAI,KAAK,UAAU,UAAW,QAAO;AAGrC,WAAO,KAAK,UAAU,GAAG,SAAS,IAAI;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAkB,WAA2B;AACpE,QAAI,SAAS,UAAU,UAAW,QAAO;AAEzC,WAAO,SAAS,UAAU,GAAG,SAAS,IAAI;AAAA,EAC5C;AACF;;;ACpOA,SAAS,uBAAuB;AAChC,OAAO,qBAAqB;AAC5B,SAAS,WAAW;;;ACCpB,YAAY,aAAa;AAMzB,SAAS,wBAAwB,WAAuC;AACtE,MAAI,cAAc,UAAa,cAAc,GAAI,QAAO;AAGxD,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,aAAW,WAAW,UAAU;AAC9B,UAAM,QAAQ,UAAU,MAAM,OAAO;AACrC,QAAI,QAAQ,CAAC,MAAM,QAAW;AAC5B,YAAM,OAAO,MAAM,CAAC,EAAE,YAAY;AAElC,UAAI,CAAC,CAAC,QAAQ,aAAa,QAAQ,OAAO,SAAS,QAAQ,EAAE,SAAS,IAAI,GAAG;AAC3E,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,WAAW,MAAsB;AACxC,SAAO,KACJ,QAAQ,MAAM,OAAO,EACrB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,MAAM,EACpB,QAAQ,MAAM,QAAQ,EACtB,QAAQ,MAAM,QAAQ;AAC3B;AAWO,SAAS,4BAA4B,MAAsB;AAChE,MAAI,CAAC,QAAQ,OAAO,SAAS,SAAU,QAAO;AAE9C,QAAM,IAAY,aAAK,IAAI;AAG3B,IAAE,OAAO,EAAE,KAAK,CAAC,IAAI,UAAU;AAC7B,UAAM,SAAS,EAAE,KAAK;AAGtB,UAAM,YAAY,OAAO,KAAK,8BAA8B;AAE5D,QAAI,UAAU,SAAS,GAAG;AAExB,YAAM,OAAO,UAAU,QAAQ,KAAK;AACpC,YAAM,QAAQ,UAAU,MAAM;AAG9B,UAAI,WAAW,wBAAwB,MAAM,KAAK,OAAO,CAAC;AAC1D,UAAI,CAAC,UAAU;AACb,mBAAW,wBAAwB,KAAK,KAAK,OAAO,CAAC;AAAA,MACvD;AAGA,YAAM,WAAW,MAAM,KAAK;AAG5B,YAAM,WAAW,8BAA8B,QAAQ,KAAK,WAAW,QAAQ,CAAC;AAGhF,aAAO,YAAY,QAAQ;AAAA,IAC7B;AAAA,EACF,CAAC;AAGD,IAAE,eAAe,EAAE,KAAK,CAAC,IAAI,WAAW;AACtC,UAAM,UAAU,EAAE,MAAM;AACxB,QAAI,CAAC,QAAQ,KAAK,EAAE,KAAK,GAAG;AAC1B,cAAQ,OAAO;AAAA,IACjB;AAAA,EACF,CAAC;AAGD,IAAE,qBAAqB,EAAE,KAAK,CAAC,IAAI,SAAS;AAC1C,UAAM,QAAQ,EAAE,IAAI;AACpB,UAAM,YAAY,MAAM,KAAK,CAAC;AAAA,EAChC,CAAC;AAGD,IAAE,KAAK,EAAE,KAAK,CAAC,IAAI,QAAQ;AACzB,UAAM,OAAO,EAAE,GAAG;AAElB,QAAI,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG;AAElC,YAAM,OAAO,KAAK,KAAK;AACvB,YAAM,OAAO,wBAAwB,KAAK,KAAK,OAAO,CAAC;AACvD,WAAK,KAAK,yBAAyB,IAAI,KAAK,WAAW,IAAI,CAAC,SAAS;AAAA,IACvE;AAAA,EACF,CAAC;AAED,SAAO,EAAE,KAAK;AAChB;AAWO,SAAS,gBAAgB,UAA0B;AACxD,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,UAAU,SAAS,KAAK;AAC9B,MAAI,YAAY,GAAI,QAAO;AAE3B,MAAI,SAAS;AAIb,WAAS,OAAO,QAAQ,iCAAiC,OAAO;AAIhE,WAAS,OAAO,QAAQ,mBAAmB,KAAK;AAGhD,WAAS,OAAO,QAAQ,wCAAwC,YAAY;AAK5E,WAAS,OAAO,QAAQ,sBAAsB,QAAQ;AAGtD,WAAS,OAAO,QAAQ,0BAA0B,UAAU;AAG5D,WAAS,OAAO,QAAQ,+BAA+B,UAAU;AAGjE,WAAS,OAAO,QAAQ,iCAAiC,UAAU;AAGnE,WAAS,OAAO;AAAA,IACd;AAAA,IACA;AAAA,EACF;AAGA,WAAS,OAAO,QAAQ,gCAAgC,QAAQ;AAGhE,WAAS,OAAO,QAAQ,WAAW,MAAM;AAGzC,WAAS,OAAO,QAAQ,qBAAqB,MAAM;AACnD,WAAS,OAAO,QAAQ,aAAa,OAAO;AAG5C,WAAS,OAAO,QAAQ,iBAAiB,GAAG;AAI5C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,qBAAqB,EAAE;AAC/C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAC5C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAC5C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAG5C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAG7C,WAAS,OAAO,QAAQ,oBAAoB,EAAE;AAG9C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAG7C,WAAS,OAAO,QAAQ,mBAAmB,EAAE;AAC7C,WAAS,OAAO,QAAQ,oBAAoB,EAAE;AAG9C,WAAS,OAAO,QAAQ,kBAAkB,EAAE;AAI5C,WAAS,OAAO,QAAQ,qCAAqC,EAAE;AAG/D,WAAS,OAAO,QAAQ,uCAAuC,EAAE;AAGjE,WAAS,OAAO,QAAQ,aAAa,MAAM;AAC3C,WAAS,OAAO,QAAQ,aAAa,MAAM;AAC3C,WAAS,OAAO,QAAQ,cAAc,OAAO;AAG7C,WAAS,OAAO,QAAQ,WAAW,MAAM;AACzC,WAAS,OAAO,QAAQ,aAAa,IAAI;AAEzC,SAAO;AACT;;;ADxNA,IAAM,SAAS,aAAa,mBAAmB;AAkB/C,eAAsB,sBACpB,MACA,KAC2B;AAC3B,SAAO,MAAM,EAAE,KAAK,YAAY,KAAK,OAAO,GAAG,0BAA0B;AAEzE,MAAI;AAEF,QAAI;AACJ,QAAI;AAEJ,QAAI;AACF,YAAM,UAAU,MAAM,gBAAgB,MAAM,GAAG;AAC/C,UAAI,YAAY,QAAQ,QAAQ,YAAY,UAAa,QAAQ,YAAY,IAAI;AAC/E,sBAAc,QAAQ;AACtB,gBAAQ,QAAQ,UAAU,UAAa,QAAQ,UAAU,KAAK,QAAQ,QAAQ;AAC9E,eAAO,MAAM;AAAA,UACX;AAAA,UACA;AAAA,UACA,iBAAiB,YAAY;AAAA,UAC7B,cAAc;AAAA,QAChB,GAAG,2BAA2B;AAAA,MAChC,OAAO;AAEL,sBAAc;AACd,eAAO,MAAM,EAAE,KAAK,cAAc,KAAK,GAAG,oDAAoD;AAAA,MAChG;AAAA,IACF,SAAS,cAAc;AAErB,oBAAc;AACd,aAAO,MAAM;AAAA,QACX;AAAA,QACA,cAAc;AAAA,QACd,OAAO,wBAAwB,QAAQ,aAAa,UAAU,OAAO,YAAY;AAAA,MACnF,GAAG,4CAA4C;AAAA,IACjD;AAGA,UAAM,eAAe,4BAA4B,WAAW;AAG5D,UAAM,kBAAkB,IAAI,gBAAgB;AAAA,MAC1C,cAAc;AAAA;AAAA,MACd,gBAAgB;AAAA;AAAA,MAChB,OAAO;AAAA,MACP,aAAa;AAAA,MACb,iBAAiB;AAAA,MACjB,WAAW;AAAA,IACb,CAAC;AAGD,oBAAgB,IAAI,GAAG;AAGvB,oBAAgB,QAAQ,uBAAuB;AAAA,MAC7C,QAAQ,CAAC,MAAM,MAAM,MAAM,MAAM,MAAM,IAAI;AAAA,MAC3C,YAAY,SAAiB,MAA2B;AACtD,cAAM,QAAQ,OAAO,KAAK,SAAS,OAAO,CAAC,CAAC;AAC5C,cAAM,SAAS,IAAI,OAAO,KAAK;AAC/B,cAAM,eAAe,QAClB,QAAQ,kBAAkB,EAAE,EAC5B,QAAQ,QAAQ,GAAG,EACnB,KAAK;AACR,eAAO,iBAAiB,KAAK;AAAA;AAAA,EAAO,MAAM,IAAI,YAAY;AAAA;AAAA,IAAS;AAAA,MACrE;AAAA,IACF,CAAC;AAGD,UAAM,cAAc,gBAAgB,SAAS,YAAY;AAGzD,UAAM,WAAW,gBAAgB,WAAW;AAE5C,WAAO,MAAM;AAAA,MACX;AAAA,MACA;AAAA,MACA,mBAAmB,YAAY;AAAA,MAC/B,qBAAqB,SAAS;AAAA,IAChC,GAAG,sCAAsC;AAGzC,WAAO,MAAM;AAAA,MACX;AAAA,MACA,iBAAiB,eAAe,UAAU,GAAI;AAAA,IAChD,GAAG,0BAA0B;AAE7B,WAAO;AAAA,MACL;AAAA,MACA,GAAI,UAAU,UAAa,EAAE,MAAM;AAAA,MACnC,SAAS;AAAA,IACX;AAAA,EACF,SAAS,OAAO;AACd,WAAO,MAAM;AAAA,MACX;AAAA,MACA,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAC9D,GAAG,oCAAoC;AAEvC,WAAO;AAAA,MACL,UAAU;AAAA,MACV,SAAS;AAAA,MACT,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAC9D;AAAA,EACF;AACF;;;AFxHA,IAAMA,UAAS,aAAa,SAAS;AA+B9B,IAAM,qBAAN,cAAiC,aAAa;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACT;AAAA,EAER,cAAc;AACZ,UAAM;AACN,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,eAAe,IAAI,aAAa;AACrC,SAAK,UAAU,oBAAI,IAAI;AACvB,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,MACL,SACA,UAAwB,CAAC,GACG;AAC5B,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA,WAAW;AAAA,MACX,SAAS;AAAA,IACX,IAAI;AAEJ,SAAK,QAAQ,MAAM;AACnB,SAAK,UAAU;AAEf,IAAAA,QAAO,KAAK;AAAA,MACV;AAAA,MACA;AAAA,MACA,MAAM,SAAS,WAAY,qBAAqB,UAAa,qBAAqB,KAAK,gBAAgB;AAAA,MACvG,uBAAuB,uBAAuB;AAAA,IAChD,GAAG,gBAAgB;AAEnB,UAAM,gBAA+B;AAAA,MACnC,MAAM;AAAA,MACN,cAAc;AAAA,MACd,YAAY;AAAA,IACd;AACA,SAAK,KAAK,YAAY,aAAa;AAGnC,UAAM,qBAAqB,CAAC,UAAU,qBAAqB,UAAa,qBAAqB;AAE7F,QAAI,oBAAoB;AAEtB,aAAO,KAAK,iBAAiB,SAAS,kBAAkB,oBAAoB,UAAU,QAAQ,eAAe,KAAK;AAAA,IACpH,OAAO;AACL,aAAO,KAAK,YAAY,SAAS,oBAAoB,UAAU,QAAQ,eAAe,KAAK;AAAA,IAC7F;AAEA,IAAAA,QAAO,KAAK;AAAA,MACV;AAAA,MACA,cAAc,KAAK,QAAQ;AAAA,IAC7B,GAAG,gBAAgB;AAEnB,UAAM,mBAAkC;AAAA,MACtC,MAAM;AAAA,MACN,cAAc,KAAK,QAAQ;AAAA,MAC3B,YAAY,KAAK,QAAQ;AAAA,IAC3B;AACA,SAAK,KAAK,YAAY,gBAAgB;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,iBACb,SACA,kBACA,oBACA,UACA,cAAuB,OACK;AAE5B,QAAI,CAAC,aAAa,YAAY,GAAG;AAC/B,YAAM,mBAAkC;AAAA,QACtC,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,SAAS;AAAA,QACT,OAAO,IAAI,MAAM,0BAA0B;AAAA,MAC7C;AACA,WAAK,KAAK,YAAY,gBAAgB;AACtC,aAAO,KAAK,YAAY,SAAS,oBAAoB,UAAU,WAAW;AAC1E;AAAA,IACF;AAEA,QAAI;AAEJ,QAAI;AAEF,YAAM,wBAAuC;AAAA,QAC3C,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,YAAY;AAAA,QACZ,SAAS;AAAA,MACX;AACA,WAAK,KAAK,YAAY,qBAAqB;AAE3C,YAAM,WAAW,MAAM,KAAK,UAAU,SAAS,WAAW;AAG1D,iBAAW,MAAM,KAAK,aAAa,mBAAmB,UAAU,gBAAgB;AAEhF,YAAM,2BAA0C;AAAA,QAC9C,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,SAAS,qBAAqB,OAAO,SAAS,KAAK,MAAM,CAAC,mBAAmB,SAAS,SAAS;AAAA,MACjG;AACA,WAAK,KAAK,YAAY,wBAAwB;AAAA,IAChD,SAAS,OAAO;AAEd,YAAM,gBAA+B;AAAA,QACnC,MAAM;AAAA,QACN,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,SAAS;AAAA,QACT,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MACjE;AACA,WAAK,KAAK,YAAY,aAAa;AAEnC,aAAO,KAAK,YAAY,SAAS,oBAAoB,QAAQ;AAC7D;AAAA,IACF;AAGA,QAAI,eAAe;AAEnB,eAAW,OAAO,SAAS,MAAM;AAC/B,UAAI,KAAK,WAAW,gBAAgB,SAAU;AAC9C,UAAI,KAAK,QAAQ,IAAI,GAAG,EAAG;AAE3B,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,gBAAgB,KAAK,oBAAoB,cAAc,WAAW;AAC5F;AACA,cAAM;AAAA,MACR,SAAS,OAAO;AACd,cAAM,oBAAmC;AAAA,UACvC,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QACjE;AACA,aAAK,KAAK,YAAY,iBAAiB;AAAA,MACzC;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,YACb,SACA,oBACA,UACA,cAAuB,OACK;AAC5B,UAAM,QAA+C,CAAC,EAAE,KAAK,SAAS,OAAO,EAAE,CAAC;AAChF,UAAM,WAAW;AACjB,QAAI,eAAe;AAEnB,WAAO,MAAM,SAAS,KAAK,eAAe,YAAY,CAAC,KAAK,SAAS;AACnE,YAAM,UAAU,MAAM,MAAM;AAE5B,UAAI,CAAC,WAAW,KAAK,QAAQ,IAAI,QAAQ,GAAG,KAAK,QAAQ,QAAQ,UAAU;AACzE;AAAA,MACF;AAEA,UAAI;AACF,cAAM,SAAS,MAAM,KAAK;AAAA,UACxB,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,eAAO,QAAQ,QAAQ;AACvB;AAEA,cAAM;AAGN,YAAI,QAAQ,QAAQ,UAAU;AAC5B,cAAI;AACF,kBAAM,QAAQ,MAAM,KAAK,aAAa,QAAQ,KAAK,WAAW;AAE9D,gBAAI,MAAM,WAAW,GAAG;AACtB,cAAAA,QAAO,MAAM,EAAE,KAAK,QAAQ,IAAI,GAAG,0CAA0C;AAAA,YAC/E,OAAO;AACL,cAAAA,QAAO,MAAM,EAAE,KAAK,QAAQ,KAAK,WAAW,MAAM,OAAO,GAAG,2BAA2B;AAAA,YACzF;AAEA,uBAAW,QAAQ,OAAO;AACxB,kBAAI,CAAC,KAAK,QAAQ,IAAI,IAAI,KAAK,KAAK,aAAa,SAAS,IAAI,GAAG;AAC/D,sBAAM,KAAK,EAAE,KAAK,MAAM,OAAO,QAAQ,QAAQ,EAAE,CAAC;AAAA,cACpD;AAAA,YACF;AAAA,UACF,SAAS,OAAO;AAEd,kBAAM,gBAA+B;AAAA,cACnC,MAAM;AAAA,cACN;AAAA,cACA,YAAY;AAAA,cACZ,YAAY,QAAQ;AAAA,cACpB,SAAS,gCAAgC,QAAQ,GAAG;AAAA,cACpD,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,YACjE;AACA,iBAAK,KAAK,YAAY,aAAa;AAAA,UACrC;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,cAAM,sBAAqC;AAAA,UACzC,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,YAAY,QAAQ;AAAA,UACpB,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QACjE;AACA,aAAK,KAAK,YAAY,mBAAmB;AAAA,MAC3C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,KACA,oBACA,cACA,cAAuB,OACD;AACtB,UAAM,eAA8B;AAAA,MAClC,MAAM;AAAA,MACN;AAAA,MACA,YAAY;AAAA,MACZ,YAAY;AAAA,IACd;AACA,SAAK,KAAK,YAAY,YAAY;AAGlC,SAAK,QAAQ,IAAI,GAAG;AAGpB,UAAM,OAAO,MAAM,KAAK,UAAU,KAAK,WAAW;AAGlD,UAAM,aAAa,MAAM,sBAAsB,MAAM,GAAG;AAExD,QAAI,CAAC,WAAW,SAAS;AACvB,MAAAA,QAAO,MAAM,EAAE,KAAK,OAAO,WAAW,MAAM,GAAG,oCAAoC;AACnF,YAAM,IAAI,MAAM,2BAA2B,WAAW,SAAS,eAAe,EAAE;AAAA,IAClF;AAEA,IAAAA,QAAO,MAAM;AAAA,MACX;AAAA,MACA,OAAO,WAAW;AAAA,MAClB,gBAAgB,WAAW,SAAS;AAAA,IACtC,GAAG,+BAA+B;AAElC,QAAI;AAGJ,QAAI,uBAAuB,UAAa,uBAAuB,IAAI;AAEjE,UAAI,CAAC,aAAa,YAAY,GAAG;AAC/B,cAAM,eAA8B;AAAA,UAClC,MAAM;AAAA,UACN;AAAA,UACA,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,SAAS;AAAA,UACT,OAAO,IAAI,MAAM,0BAA0B;AAAA,QAC7C;AACA,aAAK,KAAK,YAAY,YAAY;AAAA,MACpC,OAAO;AACL,YAAI;AACF,gBAAM,qBAAoC;AAAA,YACxC,MAAM;AAAA,YACN;AAAA,YACA,YAAY;AAAA,YACZ,YAAY;AAAA,UACd;AACA,eAAK,KAAK,YAAY,kBAAkB;AAExC,sBAAY,MAAM,KAAK,aAAa;AAAA,YAClC,WAAW;AAAA,YACX;AAAA,UACF;AAAA,QACF,SAAS,OAAO;AAEd,gBAAM,0BAAyC;AAAA,YAC7C,MAAM;AAAA,YACN;AAAA,YACA,YAAY;AAAA,YACZ,YAAY;AAAA,YACZ,SAAS;AAAA,YACT,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,UACjE;AACA,eAAK,KAAK,YAAY,uBAAuB;AAAA,QAC/C;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL;AAAA,MACA,GAAI,WAAW,UAAU,UAAa,EAAE,OAAO,WAAW,MAAM;AAAA,MAChE,UAAU,WAAW;AAAA,MACrB,GAAI,cAAc,UAAa,EAAE,UAAU;AAAA,IAC7C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,UAAU,KAAa,cAAuB,OAAwB;AAClF,UAAM,YAAY,KAAK,IAAI;AAC3B,IAAAA,QAAO,MAAM,EAAE,KAAK,YAAY,GAAG,eAAe;AAElD,QAAI,aAAa;AACf,UAAI;AACF,cAAM,SAAS,MAAM,KAAK,aAAa,cAAc,GAAG;AACxD,cAAM,aAAa,KAAK,IAAI,IAAI;AAChC,QAAAA,QAAO,KAAK;AAAA,UACV;AAAA,UACA,aAAa;AAAA,UACb;AAAA,UACA,GAAG,iBAAiB,OAAO,MAAM,YAAY,GAAG;AAAA,QAClD,GAAG,kBAAkB;AACrB,eAAO,OAAO;AAAA,MAChB,SAAS,OAAO;AAEd,QAAAA,QAAO,KAAK,EAAE,KAAK,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE,GAAG,8CAA8C;AAAA,MACpI;AAAA,IACF;AAGA,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,IAAY,KAAK;AAAA,QAC5C,SAAS;AAAA,QACT,SAAS;AAAA,UACP,cACE;AAAA,QACJ;AAAA,MACF,CAAC;AAED,YAAM,aAAa,KAAK,IAAI,IAAI;AAChC,MAAAA,QAAO,KAAK;AAAA,QACV;AAAA,QACA,aAAa;AAAA,QACb;AAAA,QACA,GAAG,iBAAiB,SAAS,MAAM,YAAY,GAAG;AAAA,MACpD,GAAG,kBAAkB;AAErB,aAAO,SAAS;AAAA,IAClB,SAAS,OAAO;AACd,MAAAA,QAAO,MAAM,EAAE,KAAK,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE,GAAG,sBAAsB;AAC3G,YAAM,IAAI;AAAA,QACR,mBAAmB,GAAG,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACnF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,KAAa,cAAuB,OAA0B;AACvF,QAAI;AAEF,UAAI,aAAa;AACf,cAAMC,UAAS,MAAM,KAAK,aAAa,cAAc,GAAG;AAExD,eAAOA,QAAO,MAAM,IAAI,CAAC,SAA+B;AACtD,cAAI,OAAO,SAAS,SAAU,QAAO;AACrC,iBAAO,KAAK;AAAA,QACd,CAAC;AAAA,MACH;AAEA,YAAM,SAAS,MAAM,KAAK,aAAa,MAAM,GAAG;AAIhD,YAAM,YAAY,OAAO,QAAQ,CAAC;AAClC,UAAI,CAAC,WAAW;AACd,cAAM,IAAI,MAAM,wCAAwC,GAAG,uBAAuB;AAAA,MACpF;AAEA,aAAO,UAAU;AAAA,IACnB,SAAS,OAAgB;AAEvB,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,MAAAD,QAAO,MAAM,EAAE,KAAK,OAAO,aAAa,GAAG,yBAAyB;AAGpE,YAAM,IAAI,MAAM,8BAA8B,GAAG,KAAK,YAAY,EAAE;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAc,MAAuB;AACxD,QAAI;AACF,YAAM,UAAU,IAAI,IAAI,IAAI,EAAE,SAAS,YAAY;AACnD,YAAM,UAAU,IAAI,IAAI,IAAI,EAAE,SAAS,YAAY;AACnD,aAAO,YAAY,WAAW,QAAQ,SAAS,IAAI,OAAO,EAAE,KAAK,QAAQ,SAAS,IAAI,OAAO,EAAE;AAAA,IACjG,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAsB;AAC1B,SAAK,UAAU;AACf,UAAM,KAAK,aAAa,KAAK;AAAA,EAC/B;AACF;","names":["logger","result"]}