llm-search-tools 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +244 -0
  3. package/dist/index.d.ts +18 -0
  4. package/dist/index.js +40 -0
  5. package/dist/index.js.map +1 -0
  6. package/dist/integration.test.d.ts +1 -0
  7. package/dist/integration.test.js +237 -0
  8. package/dist/modules/answerbox.test.d.ts +1 -0
  9. package/dist/modules/answerbox.test.js +105 -0
  10. package/dist/modules/autocomplete.d.ts +11 -0
  11. package/dist/modules/autocomplete.js +159 -0
  12. package/dist/modules/autocomplete.test.d.ts +1 -0
  13. package/dist/modules/autocomplete.test.js +188 -0
  14. package/dist/modules/common.d.ts +26 -0
  15. package/dist/modules/common.js +263 -0
  16. package/dist/modules/common.test.d.ts +1 -0
  17. package/dist/modules/common.test.js +87 -0
  18. package/dist/modules/crawl.d.ts +9 -0
  19. package/dist/modules/crawl.js +117 -0
  20. package/dist/modules/crawl.test.d.ts +1 -0
  21. package/dist/modules/crawl.test.js +48 -0
  22. package/dist/modules/events.d.ts +8 -0
  23. package/dist/modules/events.js +129 -0
  24. package/dist/modules/events.test.d.ts +1 -0
  25. package/dist/modules/events.test.js +104 -0
  26. package/dist/modules/finance.d.ts +10 -0
  27. package/dist/modules/finance.js +20 -0
  28. package/dist/modules/finance.test.d.ts +1 -0
  29. package/dist/modules/finance.test.js +77 -0
  30. package/dist/modules/flights.d.ts +8 -0
  31. package/dist/modules/flights.js +135 -0
  32. package/dist/modules/flights.test.d.ts +1 -0
  33. package/dist/modules/flights.test.js +128 -0
  34. package/dist/modules/hackernews.d.ts +8 -0
  35. package/dist/modules/hackernews.js +87 -0
  36. package/dist/modules/hackernews.js.map +1 -0
  37. package/dist/modules/images.test.d.ts +1 -0
  38. package/dist/modules/images.test.js +145 -0
  39. package/dist/modules/integrations.test.d.ts +1 -0
  40. package/dist/modules/integrations.test.js +93 -0
  41. package/dist/modules/media.d.ts +11 -0
  42. package/dist/modules/media.js +132 -0
  43. package/dist/modules/media.test.d.ts +1 -0
  44. package/dist/modules/media.test.js +186 -0
  45. package/dist/modules/news.d.ts +3 -0
  46. package/dist/modules/news.js +39 -0
  47. package/dist/modules/news.test.d.ts +1 -0
  48. package/dist/modules/news.test.js +88 -0
  49. package/dist/modules/parser.d.ts +19 -0
  50. package/dist/modules/parser.js +361 -0
  51. package/dist/modules/parser.test.d.ts +1 -0
  52. package/dist/modules/parser.test.js +151 -0
  53. package/dist/modules/reddit.d.ts +21 -0
  54. package/dist/modules/reddit.js +107 -0
  55. package/dist/modules/scrape.d.ts +16 -0
  56. package/dist/modules/scrape.js +272 -0
  57. package/dist/modules/scrape.test.d.ts +1 -0
  58. package/dist/modules/scrape.test.js +232 -0
  59. package/dist/modules/scraper.d.ts +12 -0
  60. package/dist/modules/scraper.js +640 -0
  61. package/dist/modules/scrapers/anidb.d.ts +8 -0
  62. package/dist/modules/scrapers/anidb.js +156 -0
  63. package/dist/modules/scrapers/duckduckgo.d.ts +6 -0
  64. package/dist/modules/scrapers/duckduckgo.js +284 -0
  65. package/dist/modules/scrapers/google-news.d.ts +2 -0
  66. package/dist/modules/scrapers/google-news.js +60 -0
  67. package/dist/modules/scrapers/google.d.ts +6 -0
  68. package/dist/modules/scrapers/google.js +211 -0
  69. package/dist/modules/scrapers/searxng.d.ts +2 -0
  70. package/dist/modules/scrapers/searxng.js +93 -0
  71. package/dist/modules/scrapers/thetvdb.d.ts +3 -0
  72. package/dist/modules/scrapers/thetvdb.js +147 -0
  73. package/dist/modules/scrapers/tmdb.d.ts +3 -0
  74. package/dist/modules/scrapers/tmdb.js +172 -0
  75. package/dist/modules/scrapers/yahoo-finance.d.ts +2 -0
  76. package/dist/modules/scrapers/yahoo-finance.js +33 -0
  77. package/dist/modules/search.d.ts +5 -0
  78. package/dist/modules/search.js +45 -0
  79. package/dist/modules/search.js.map +1 -0
  80. package/dist/modules/search.test.d.ts +1 -0
  81. package/dist/modules/search.test.js +219 -0
  82. package/dist/modules/urbandictionary.d.ts +12 -0
  83. package/dist/modules/urbandictionary.js +26 -0
  84. package/dist/modules/webpage.d.ts +4 -0
  85. package/dist/modules/webpage.js +150 -0
  86. package/dist/modules/webpage.js.map +1 -0
  87. package/dist/modules/wikipedia.d.ts +5 -0
  88. package/dist/modules/wikipedia.js +85 -0
  89. package/dist/modules/wikipedia.js.map +1 -0
  90. package/dist/scripts/interactive-search.d.ts +1 -0
  91. package/dist/scripts/interactive-search.js +98 -0
  92. package/dist/test.d.ts +1 -0
  93. package/dist/test.js +179 -0
  94. package/dist/test.js.map +1 -0
  95. package/dist/testBraveSearch.d.ts +1 -0
  96. package/dist/testBraveSearch.js +34 -0
  97. package/dist/testDuckDuckGo.d.ts +1 -0
  98. package/dist/testDuckDuckGo.js +52 -0
  99. package/dist/testEcosia.d.ts +1 -0
  100. package/dist/testEcosia.js +57 -0
  101. package/dist/testSearchModule.d.ts +1 -0
  102. package/dist/testSearchModule.js +95 -0
  103. package/dist/testwebpage.d.ts +1 -0
  104. package/dist/testwebpage.js +81 -0
  105. package/dist/types.d.ts +174 -0
  106. package/dist/types.js +3 -0
  107. package/dist/types.js.map +1 -0
  108. package/dist/utils/createTestDocx.d.ts +1 -0
  109. package/dist/utils/createTestDocx.js +58 -0
  110. package/dist/utils/htmlcleaner.d.ts +20 -0
  111. package/dist/utils/htmlcleaner.js +172 -0
  112. package/docs/README.md +275 -0
  113. package/docs/autocomplete.md +73 -0
  114. package/docs/crawling.md +88 -0
  115. package/docs/events.md +58 -0
  116. package/docs/examples.md +158 -0
  117. package/docs/finance.md +60 -0
  118. package/docs/flights.md +71 -0
  119. package/docs/hackernews.md +121 -0
  120. package/docs/media.md +87 -0
  121. package/docs/news.md +75 -0
  122. package/docs/parser.md +197 -0
  123. package/docs/scraper.md +347 -0
  124. package/docs/search.md +106 -0
  125. package/docs/wikipedia.md +91 -0
  126. package/package.json +97 -0
@@ -0,0 +1,45 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.searchSearxNG = exports.searchDuckDuckGo = exports.searchGoogle = void 0;
4
+ exports.search = search;
5
+ const google_1 = require("./scrapers/google");
6
+ const duckduckgo_1 = require("./scrapers/duckduckgo");
7
+ const searxng_1 = require("./scrapers/searxng");
8
+ // Re-export specific search functions
9
+ var google_2 = require("./scrapers/google");
10
+ Object.defineProperty(exports, "searchGoogle", { enumerable: true, get: function () { return google_2.searchGoogle; } });
11
+ var duckduckgo_2 = require("./scrapers/duckduckgo");
12
+ Object.defineProperty(exports, "searchDuckDuckGo", { enumerable: true, get: function () { return duckduckgo_2.searchDuckDuckGo; } });
13
+ var searxng_2 = require("./scrapers/searxng");
14
+ Object.defineProperty(exports, "searchSearxNG", { enumerable: true, get: function () { return searxng_2.searchSearxNG; } });
15
+ // Unified search that tries engines in sequence: DuckDuckGo -> Google -> SearxNG
16
+ async function search(query, options = {}) {
17
+ const errors = [];
18
+ // 1. Try DuckDuckGo first (most lenient)
19
+ try {
20
+ return await (0, duckduckgo_1.searchDuckDuckGo)(query, options);
21
+ }
22
+ catch (err) {
23
+ errors.push(err);
24
+ }
25
+ // 2. Try Google (best quality, but strict bot detection)
26
+ try {
27
+ return await (0, google_1.searchGoogle)(query, options);
28
+ }
29
+ catch (err) {
30
+ errors.push(err);
31
+ }
32
+ // 3. Try SearxNG (fallback to public instances)
33
+ try {
34
+ return await (0, searxng_1.searchSearxNG)(query, options);
35
+ }
36
+ catch (err) {
37
+ errors.push(err);
38
+ }
39
+ // If all failed, throw error with details
40
+ throw {
41
+ message: "All search engines failed",
42
+ code: "ALL_SEARCH_ENGINES_FAILED",
43
+ errors,
44
+ };
45
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"file":"search.js","sourceRoot":"","sources":["../../src/modules/search.ts"],"names":[],"mappings":";AAAA,8EAA8E;;AAY9E,oCA8BC;AAED,4CAwBC;AAGD,wBASC;AA9ED,yCAAyE;AACzE,uDAAwE;AAGxE,MAAM,cAAc,GAAkB;IACpC,KAAK,EAAE,EAAE;IACT,UAAU,EAAE,IAAI;IAChB,OAAO,EAAE,KAAK;CACf,CAAC;AAEK,KAAK,UAAU,YAAY,CAAC,KAAa,EAAE,UAAyB,EAAE;IAC3E,IAAI,CAAC;QACH,sBAAsB;QACtB,MAAM,IAAI,GAAG,EAAE,GAAG,cAAc,EAAE,GAAG,OAAO,EAAE,CAAC;QAE/C,uBAAuB;QACvB,MAAM,OAAO,GAAG,MAAM,IAAA,kBAAY,EAAC;YACjC,KAAK;YACL,aAAa,EAAE;gBACb,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,MAAM,EAAE;oBACN,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK;iBACzC;aACF;SACF,CAAC,CAAC;QAEH,wBAAwB;QACxB,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,CAAuB,EAAE,EAAE,CAAC,CAAC;YAC/C,KAAK,EAAG,CAAS,CAAC,KAAK,IAAI,EAAE;YAC7B,GAAG,EAAG,CAAS,CAAC,IAAI,IAAI,EAAE,EAAE,0BAA0B;YACtD,OAAO,EAAG,CAAS,CAAC,WAAW,IAAI,EAAE;YACrC,MAAM,EAAE,QAAiB;SAC1B,CAAC,CAAC,CAAC;IACN,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM;YACJ,OAAO,EAAE,yBAAyB;YAClC,IAAI,EAAE,qBAAqB;YAC3B,aAAa,EAAE,GAAG;SACJ,CAAC;IACnB,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,gBAAgB,CAAC,KAAa,EAAE,UAAyB,EAAE;IAC/E,IAAI,CAAC;QACH,MAAM,IAAI,GAAG,EAAE,GAAG,cAAc,EAAE,GAAG,OAAO,EAAE,CAAC;QAE/C,aAAa;QACb,MAAM,OAAO,GAAG,MAAM,IAAA,yBAAU,EAAC,KAAK,EAAE;YACtC,UAAU,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,iCAAc,CAAC,MAAM,CAAC,CAAC,CAAC,iCAAc,CAAC,GAAG;SACzE,CAAC,CAAC;QAEH,OAAO,OAAO,CAAC,OAAO;aACnB,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC;aACpB,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;YACT,KAAK,EAAE,CAAC,CAAC,KAAK;YACd,GAAG,EAAE,CAAC,CAAC,GAAG;YACV,OAAO,EAAE,CAAC,CAAC,WAAW;YACtB,MAAM,EAAE,YAAqB;SAC9B,CAAC,CAAC,CAAC;IACR,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM;YACJ,OAAO,EAAE,6BAA6B;YACtC,IAAI,EAAE,kBAAkB;YACxB,aAAa,EAAE,GAAG;SACJ,CAAC;IACnB,CAAC;AACH,CAAC;AAED,+DAA+D;AACxD,KAAK,UAAU,MAAM,CAAC,KAAa,EAAE,UAAyB,EAAE;IACrE,IAAI,CAAC;QACH,uBAAuB;QACvB,OAAO,MAAM,gBAAgB,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAChD,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,kCAAkC;QAClC,OAAO,CAAC,IAAI,CAAC,qDAAqD,EAAE,GAAG,CAAC,CAAC;QACzE,OAAO,MAAM,YAAY,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IAC5C,CAAC;AACH,CAAC"}
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,219 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ const vitest_1 = require("vitest");
37
+ const search_1 = require("./search");
38
+ const common = __importStar(require("./common"));
39
+ const google_sr_1 = require("google-sr");
40
+ // Mock dependencies
41
+ vitest_1.vi.mock("google-sr");
42
+ vitest_1.vi.mock("./common", async () => {
43
+ const actual = await vitest_1.vi.importActual("./common");
44
+ return {
45
+ ...actual,
46
+ createStealthBrowser: vitest_1.vi.fn(),
47
+ fetchWithDetection: vitest_1.vi.fn(),
48
+ parseProxyConfig: vitest_1.vi.fn(),
49
+ };
50
+ });
51
+ // Mock the scrapers modules to test orchestration separately if needed,
52
+ // but for now we are testing the full flow so we'll mock the low-level fetch/puppeteer.
53
+ // actually, let's mock the implementation details (fetch/puppeteer) to test the scraper logic.
54
+ (0, vitest_1.describe)("Search Module", () => {
55
+ (0, vitest_1.beforeEach)(() => {
56
+ vitest_1.vi.resetAllMocks();
57
+ common.parseProxyConfig.mockReturnValue(null);
58
+ });
59
+ (0, vitest_1.describe)("searchGoogle", () => {
60
+ (0, vitest_1.it)("should return results using google-sr library", async () => {
61
+ const mockResults = [{ title: "Test Title", link: "https://example.com", description: "Test Snippet" }];
62
+ google_sr_1.search.mockResolvedValue(mockResults);
63
+ common.fetchWithDetection.mockResolvedValue({ headers: {}, body: "html" });
64
+ const results = await (0, search_1.searchGoogle)("test query");
65
+ (0, vitest_1.expect)(results).toHaveLength(1);
66
+ (0, vitest_1.expect)(results[0]).toEqual({
67
+ title: "Test Title",
68
+ url: "https://example.com",
69
+ snippet: "Test Snippet",
70
+ source: "google",
71
+ });
72
+ });
73
+ (0, vitest_1.it)("should fallback to puppeteer if bot protection detected", async () => {
74
+ // Mock fetchWithDetection to throw bot protection error
75
+ const error = new Error("Bot protection detected");
76
+ common.fetchWithDetection.mockRejectedValue(error);
77
+ // Mock puppeteer execution via createStealthBrowser
78
+ const mockPage = {
79
+ setViewport: vitest_1.vi.fn(),
80
+ setExtraHTTPHeaders: vitest_1.vi.fn(),
81
+ goto: vitest_1.vi.fn(),
82
+ waitForSelector: vitest_1.vi.fn(),
83
+ evaluate: vitest_1.vi
84
+ .fn()
85
+ .mockResolvedValue([
86
+ { title: "Puppeteer Title", url: "https://pup.com", snippet: "Pup Snippet", source: "google" },
87
+ ]),
88
+ close: vitest_1.vi.fn(),
89
+ };
90
+ const mockBrowser = {
91
+ newPage: vitest_1.vi.fn().mockResolvedValue(mockPage),
92
+ close: vitest_1.vi.fn(),
93
+ };
94
+ common.createStealthBrowser.mockResolvedValue(mockBrowser);
95
+ const results = await (0, search_1.searchGoogle)("test query", { antiBot: { enabled: true } });
96
+ (0, vitest_1.expect)(results).toHaveLength(1);
97
+ (0, vitest_1.expect)(results[0].title).toBe("Puppeteer Title");
98
+ (0, vitest_1.expect)(common.createStealthBrowser).toHaveBeenCalled();
99
+ (0, vitest_1.expect)(mockBrowser.close).toHaveBeenCalled();
100
+ });
101
+ });
102
+ (0, vitest_1.describe)("searchDuckDuckGo", () => {
103
+ (0, vitest_1.it)("should return results by scraping HTML", async () => {
104
+ const mockHtml = `
105
+ <div class="result">
106
+ <h2 class="result__title">
107
+ <a href="/l/?uddg=https%3A%2F%2Fddg.com">DDG Title</a>
108
+ </h2>
109
+ <div class="result__snippet">DDG Snippet</div>
110
+ <div class="result__url">https://ddg.com</div>
111
+ </div>
112
+ `;
113
+ common.fetchWithDetection.mockResolvedValue({ headers: {}, body: mockHtml });
114
+ const results = await (0, search_1.searchDuckDuckGo)("test query");
115
+ (0, vitest_1.expect)(results).toHaveLength(1);
116
+ (0, vitest_1.expect)(results[0]).toEqual({
117
+ title: "DDG Title",
118
+ url: "https://ddg.com",
119
+ snippet: "DDG Snippet",
120
+ source: "duckduckgo",
121
+ });
122
+ });
123
+ (0, vitest_1.it)("should fallback to puppeteer if HTML scraping fails or returns no results", async () => {
124
+ // Mock HTML scrape returning empty results (e.g. strict bot protection that returned valid HTML but no results)
125
+ common.fetchWithDetection.mockResolvedValue({
126
+ headers: {},
127
+ body: "<html><body>No results</body></html>",
128
+ });
129
+ // Mock puppeteer
130
+ const mockPage = {
131
+ setViewport: vitest_1.vi.fn(),
132
+ setExtraHTTPHeaders: vitest_1.vi.fn(),
133
+ goto: vitest_1.vi.fn(),
134
+ waitForSelector: vitest_1.vi.fn(),
135
+ evaluate: vitest_1.vi
136
+ .fn()
137
+ .mockResolvedValue([
138
+ { title: "Puppeteer DDG", url: "https://ddg-pup.com", snippet: "Snippet", source: "duckduckgo" },
139
+ ]),
140
+ close: vitest_1.vi.fn(),
141
+ };
142
+ const mockBrowser = {
143
+ newPage: vitest_1.vi.fn().mockResolvedValue(mockPage),
144
+ close: vitest_1.vi.fn(),
145
+ };
146
+ common.createStealthBrowser.mockResolvedValue(mockBrowser);
147
+ // Use a unique query to avoid cache hit from previous test
148
+ const results = await (0, search_1.searchDuckDuckGo)("test query fallback");
149
+ (0, vitest_1.expect)(results).toHaveLength(1);
150
+ (0, vitest_1.expect)(results[0].title).toBe("Puppeteer DDG");
151
+ (0, vitest_1.expect)(common.createStealthBrowser).toHaveBeenCalled();
152
+ });
153
+ });
154
+ (0, vitest_1.describe)("searchSearxNG", () => {
155
+ (0, vitest_1.it)("should return results from JSON API", async () => {
156
+ const mockResponse = {
157
+ results: [{ title: "Searx Result", url: "https://searx.com", content: "Searx Snippet" }],
158
+ };
159
+ common.fetchWithDetection.mockResolvedValue({
160
+ headers: {},
161
+ body: JSON.stringify(mockResponse),
162
+ });
163
+ const results = await (0, search_1.searchSearxNG)("test query");
164
+ (0, vitest_1.expect)(results).toHaveLength(1);
165
+ (0, vitest_1.expect)(results[0]).toEqual({
166
+ title: "Searx Result",
167
+ url: "https://searx.com",
168
+ snippet: "Searx Snippet",
169
+ source: "searxng",
170
+ });
171
+ });
172
+ });
173
+ (0, vitest_1.describe)("Unified Search", () => {
174
+ (0, vitest_1.it)("should try DuckDuckGo first", async () => {
175
+ // Mock DDG success
176
+ const mockHtml = `
177
+ <div class="result">
178
+ <h2 class="result__title">
179
+ <a href="/l/?uddg=https%3A%2F%2Fddg.com">DDG Title</a>
180
+ </h2>
181
+ <div class="result__snippet">Desc</div>
182
+ </div>`;
183
+ common.fetchWithDetection.mockResolvedValue({ headers: {}, body: mockHtml });
184
+ const results = await (0, search_1.search)("unified");
185
+ (0, vitest_1.expect)(results[0].source).toBe("duckduckgo");
186
+ });
187
+ (0, vitest_1.it)("should fallback to Google if DuckDuckGo fails", async () => {
188
+ // Mock DDG failure (both HTML and Puppeteer)
189
+ // 1. Fetch HTML -> throws error
190
+ // 2. Fallback to Puppeteer -> throws error
191
+ common.fetchWithDetection
192
+ .mockRejectedValueOnce(new Error("DDG Fetch Fail")) // DDG HTML
193
+ .mockResolvedValueOnce({ headers: {}, body: "google html" }); // Google Fetch (for next step)
194
+ // Mock createStealthBrowser to throw for DDG puppeteer attempt to simulate full failure
195
+ common.createStealthBrowser.mockRejectedValueOnce(new Error("DDG Puppeteer Fail"));
196
+ // Mock Google Success
197
+ google_sr_1.search.mockResolvedValue([
198
+ { title: "Google", link: "https://google.com", description: "desc" },
199
+ ]);
200
+ const results = await (0, search_1.search)("fallback");
201
+ (0, vitest_1.expect)(results[0].source).toBe("google");
202
+ });
203
+ (0, vitest_1.it)("should fallback to SearxNG if Google fails", async () => {
204
+ // Fail DDG (Fetch + Puppeteer)
205
+ common.fetchWithDetection.mockRejectedValueOnce(new Error("DDG Fail"));
206
+ common.createStealthBrowser.mockRejectedValueOnce(new Error("DDG Puppeteer Fail"));
207
+ // Fail Google (Fetch + Puppeteer)
208
+ common.fetchWithDetection.mockRejectedValueOnce(new Error("Google Fail"));
209
+ common.createStealthBrowser.mockRejectedValueOnce(new Error("Google Puppeteer Fail"));
210
+ // SearxNG Success
211
+ common.fetchWithDetection.mockResolvedValueOnce({
212
+ headers: {},
213
+ body: JSON.stringify({ results: [{ title: "Searx", url: "https://s.com", content: "c" }] }),
214
+ });
215
+ const results = await (0, search_1.search)("deep fallback");
216
+ (0, vitest_1.expect)(results[0].source).toBe("searxng");
217
+ });
218
+ });
219
+ });
@@ -0,0 +1,12 @@
1
+ interface UrbanDictionaryDefinition {
2
+ definition: string;
3
+ permalink: string;
4
+ thumbs_up: number;
5
+ author: string;
6
+ word: string;
7
+ written_on: string;
8
+ example: string;
9
+ thumbs_down: number;
10
+ }
11
+ export declare const searchUrbanDictionary: (query: string) => Promise<UrbanDictionaryDefinition[]>;
12
+ export {};
@@ -0,0 +1,26 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.searchUrbanDictionary = void 0;
7
+ // ughh another api to deal with... at least this one is simple
8
+ const node_fetch_1 = __importDefault(require("node-fetch"));
9
+ // look i just want this to work ok
10
+ const searchUrbanDictionary = async (query) => {
11
+ try {
12
+ // welp here goes nothing
13
+ const response = await (0, node_fetch_1.default)(`https://api.urbandictionary.com/v0/define?term=${encodeURIComponent(query)}`);
14
+ if (!response.ok) {
15
+ throw new Error('urban dictionary api is being weird again');
16
+ }
17
+ const data = await response.json();
18
+ return data.list;
19
+ }
20
+ catch (error) {
21
+ // oof something broke
22
+ console.error('urban dictionary search failed:', error);
23
+ return [];
24
+ }
25
+ };
26
+ exports.searchUrbanDictionary = searchUrbanDictionary;
@@ -0,0 +1,4 @@
1
+ import { WebpageContent } from '../types';
2
+ export declare function getWebpageContent(url: string, usePuppeteer?: boolean): Promise<WebpageContent>;
3
+ export declare function getWebpageText(url: string, usePuppeteer?: boolean): Promise<string>;
4
+ export declare function isUrlAccessible(url: string): Promise<boolean>;
@@ -0,0 +1,150 @@
1
+ "use strict";
2
+ // webpage.ts - extract readable content from webpages using readability n stuff
3
+ var __importDefault = (this && this.__importDefault) || function (mod) {
4
+ return (mod && mod.__esModule) ? mod : { "default": mod };
5
+ };
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.getWebpageContent = getWebpageContent;
8
+ exports.getWebpageText = getWebpageText;
9
+ exports.isUrlAccessible = isUrlAccessible;
10
+ const readability_1 = require("@mozilla/readability");
11
+ const jsdom_1 = require("jsdom");
12
+ const puppeteer_1 = __importDefault(require("puppeteer"));
13
+ const wikipedia_1 = require("./wikipedia");
14
+ const hackernews_1 = require("./hackernews");
15
+ // clean up text by removing excessive whitespace and making it more readable
16
+ function cleanText(text) {
17
+ return text
18
+ .replace(/[\n\s\r]+/g, ' ')
19
+ .replace(/([.!?])\s+/g, '$1\n\n')
20
+ .replace(/\n{3,}/g, '\n\n')
21
+ .replace(/\s+/g, ' ')
22
+ .trim();
23
+ }
24
+ // check url type and get appropriate handler
25
+ function getUrlType(url) {
26
+ try {
27
+ const urlObj = new URL(url);
28
+ const hostname = urlObj.hostname;
29
+ if (hostname.includes('wikipedia.org')) {
30
+ return 'wikipedia';
31
+ }
32
+ if (hostname === 'news.ycombinator.com' && url.includes('item?id=')) {
33
+ return 'hackernews';
34
+ }
35
+ // list of domains that don't work well with readability
36
+ const unsupported = [
37
+ 'youtube.com', 'youtu.be', 'vimeo.com',
38
+ 'twitter.com', 'x.com', 'instagram.com',
39
+ 'facebook.com', 'linkedin.com'
40
+ ];
41
+ if (unsupported.some(domain => hostname.includes(domain))) {
42
+ return 'unsupported';
43
+ }
44
+ return 'general';
45
+ }
46
+ catch {
47
+ return 'unsupported';
48
+ }
49
+ }
50
+ // get webpage content using readability
51
+ async function getWebpageContent(url, usePuppeteer = false) {
52
+ try {
53
+ const urlType = getUrlType(url);
54
+ // handle special cases
55
+ if (urlType === 'wikipedia') {
56
+ const title = url.split('/wiki/')[1]?.replace(/_/g, ' ') || url;
57
+ const content = await (0, wikipedia_1.wikiGetContent)(title);
58
+ return {
59
+ title,
60
+ content,
61
+ textContent: cleanText(content),
62
+ length: content.length,
63
+ excerpt: content.slice(0, 200) + '...',
64
+ siteName: 'Wikipedia'
65
+ };
66
+ }
67
+ if (urlType === 'hackernews') {
68
+ const id = parseInt(url.split('id=')[1]);
69
+ const story = await (0, hackernews_1.getStoryById)(id);
70
+ const content = story.snippet || story.title || 'No content available';
71
+ const cleanedContent = cleanText(content);
72
+ return {
73
+ title: story.title || url,
74
+ content: content,
75
+ textContent: cleanedContent,
76
+ length: cleanedContent.length,
77
+ excerpt: cleanedContent.slice(0, 200) + (cleanedContent.length > 200 ? '...' : ''),
78
+ siteName: 'Hacker News'
79
+ };
80
+ }
81
+ if (urlType === 'unsupported') {
82
+ return {
83
+ title: url,
84
+ content: '',
85
+ textContent: 'This URL type is not supported for content extraction.',
86
+ length: 0,
87
+ excerpt: 'Content not available - URL type not supported'
88
+ };
89
+ }
90
+ // handle general case with readability
91
+ let html;
92
+ if (usePuppeteer) {
93
+ const browser = await puppeteer_1.default.launch({
94
+ headless: true,
95
+ args: ['--no-sandbox', '--disable-setuid-sandbox']
96
+ });
97
+ const page = await browser.newPage();
98
+ await page.goto(url, { waitUntil: 'networkidle0' });
99
+ html = await page.content();
100
+ await browser.close();
101
+ }
102
+ else {
103
+ const response = await fetch(url);
104
+ html = await response.text();
105
+ }
106
+ const dom = new jsdom_1.JSDOM(html, { url });
107
+ const reader = new readability_1.Readability(dom.window.document);
108
+ const article = reader.parse();
109
+ if (!article) {
110
+ return {
111
+ title: url,
112
+ content: '',
113
+ textContent: 'Failed to extract readable content from this page.',
114
+ length: 0,
115
+ excerpt: 'Content extraction failed'
116
+ };
117
+ }
118
+ const cleanedText = cleanText(article.textContent || '');
119
+ return {
120
+ title: article.title || url,
121
+ content: article.content || '',
122
+ textContent: cleanedText,
123
+ length: cleanedText.length,
124
+ excerpt: article.excerpt || undefined,
125
+ siteName: article.siteName || undefined
126
+ };
127
+ }
128
+ catch (err) {
129
+ throw {
130
+ message: 'failed to get webpage content :/',
131
+ code: 'WEBPAGE_ERROR',
132
+ originalError: err
133
+ };
134
+ }
135
+ }
136
+ // get just the text content
137
+ async function getWebpageText(url, usePuppeteer = false) {
138
+ const content = await getWebpageContent(url, usePuppeteer);
139
+ return content.textContent;
140
+ }
141
+ // check if url is accessible
142
+ async function isUrlAccessible(url) {
143
+ try {
144
+ const response = await fetch(url, { method: 'HEAD' });
145
+ return response.ok;
146
+ }
147
+ catch {
148
+ return false;
149
+ }
150
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"file":"webpage.js","sourceRoot":"","sources":["../../src/modules/webpage.ts"],"names":[],"mappings":";AAAA,gFAAgF;;;;;AAmDhF,8CA4FC;AAGD,wCAGC;AAGD,0CAOC;AA7JD,sDAAmD;AACnD,iCAA8B;AAC9B,0DAAkC;AAElC,2CAA6C;AAC7C,6CAA4C;AAE5C,6EAA6E;AAC7E,SAAS,SAAS,CAAC,IAAY;IAC7B,OAAO,IAAI;SACR,OAAO,CAAC,YAAY,EAAE,GAAG,CAAC;SAC1B,OAAO,CAAC,aAAa,EAAE,QAAQ,CAAC;SAChC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC;SAC1B,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;SACpB,IAAI,EAAE,CAAC;AACZ,CAAC;AAED,6CAA6C;AAC7C,SAAS,UAAU,CAAC,GAAW;IAC7B,IAAI,CAAC;QACH,MAAM,MAAM,GAAG,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC;QAC5B,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;QAEjC,IAAI,QAAQ,CAAC,QAAQ,CAAC,eAAe,CAAC,EAAE,CAAC;YACvC,OAAO,WAAW,CAAC;QACrB,CAAC;QAED,IAAI,QAAQ,KAAK,sBAAsB,IAAI,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE,CAAC;YACpE,OAAO,YAAY,CAAC;QACtB,CAAC;QAED,wDAAwD;QACxD,MAAM,WAAW,GAAG;YAClB,aAAa,EAAE,UAAU,EAAE,WAAW;YACtC,aAAa,EAAE,OAAO,EAAE,eAAe;YACvC,cAAc,EAAE,cAAc;SAC/B,CAAC;QAEF,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;YAC1D,OAAO,aAAa,CAAC;QACvB,CAAC;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,aAAa,CAAC;IACvB,CAAC;AACH,CAAC;AAED,wCAAwC;AACjC,KAAK,UAAU,iBAAiB,CAAC,GAAW,EAAE,eAAwB,KAAK;IAChF,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QAEhC,uBAAuB;QACvB,IAAI,OAAO,KAAK,WAAW,EAAE,CAAC;YAC5B,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,GAAG,CAAC;YAChE,MAAM,OAAO,GAAG,MAAM,IAAA,0BAAc,EAAC,KAAK,CAAC,CAAC;YAC5C,OAAO;gBACL,KAAK;gBACL,OAAO;gBACP,WAAW,EAAE,SAAS,CAAC,OAAO,CAAC;gBAC/B,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,OAAO,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,KAAK;gBACtC,QAAQ,EAAE,WAAW;aACtB,CAAC;QACJ,CAAC;QAED,IAAI,OAAO,KAAK,YAAY,EAAE,CAAC;YAC7B,MAAM,EAAE,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACzC,MAAM,KAAK,GAAG,MAAM,IAAA,yBAAY,EAAC,EAAE,CAAC,CAAC;YACrC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,IAAI,KAAK,CAAC,KAAK,IAAI,sBAAsB,CAAC;YACvE,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;YAC1C,OAAO;gBACL,KAAK,EAAE,KAAK,CAAC,KAAK,IAAI,GAAG;gBACzB,OAAO,EAAE,OAAO;gBAChB,WAAW,EAAE,cAAc;gBAC3B,MAAM,EAAE,cAAc,CAAC,MAAM;gBAC7B,OAAO,EAAE,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;gBAClF,QAAQ,EAAE,aAAa;aACxB,CAAC;QACJ,CAAC;QAED,IAAI,OAAO,KAAK,aAAa,EAAE,CAAC;YAC9B,OAAO;gBACL,KAAK,EAAE,GAAG;gBACV,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,wDAAwD;gBACrE,MAAM,EAAE,CAAC;gBACT,OAAO,EAAE,gDAAgD;aAC1D,CAAC;QACJ,CAAC;QAED,uCAAuC;QACvC,IAAI,IAAY,CAAC;QAEjB,IAAI,YAAY,EAAE,CAAC;YACjB,MAAM,OAAO,GAAG,MAAM,mBAAS,CAAC,MAAM,CAAC;gBACrC,QAAQ,EAAE,IAAI;gBACd,IAAI,EAAE,CAAC,cAAc,EAAE,0BAA0B,CAAC;aACnD,CAAC,CAAC;YACH,MAAM,IAAI,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,CAAC;YACrC,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;YACpD,IAAI,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;YAC5B,MAAM,OAAO,CAAC,KAAK,EAAE,CAAC;QACxB,CAAC;aAAM,CAAC;YACN,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC,CAAC;YAClC,IAAI,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/B,CAAC;QAED,MAAM,GAAG,GAAG,IAAI,aAAK,CAAC,IAAI,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC;QACrC,MAAM,MAAM,GAAG,IAAI,yBAAW,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QACpD,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,EAAE,CAAC;QAE/B,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,OAAO;gBACL,KAAK,EAAE,GAAG;gBACV,OAAO,EAAE,EAAE;gBACX,WAAW,EAAE,oDAAoD;gBACjE,MAAM,EAAE,CAAC;gBACT,OAAO,EAAE,2BAA2B;aACrC,CAAC;QACJ,CAAC;QAED,MAAM,WAAW,GAAG,SAAS,CAAC,OAAO,CAAC,WAAW,IAAI,EAAE,CAAC,CAAC;QAEzD,OAAO;YACL,KAAK,EAAE,OAAO,CAAC,KAAK,IAAI,GAAG;YAC3B,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,EAAE;YAC9B,WAAW,EAAE,WAAW;YACxB,MAAM,EAAE,WAAW,CAAC,MAAM;YAC1B,OAAO,EAAE,OAAO,CAAC,OAAO,IAAI,SAAS;YACrC,QAAQ,EAAE,OAAO,CAAC,QAAQ,IAAI,SAAS;SACxC,CAAC;IAEJ,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM;YACJ,OAAO,EAAE,kCAAkC;YAC3C,IAAI,EAAE,eAAe;YACrB,aAAa,EAAE,GAAG;SACJ,CAAC;IACnB,CAAC;AACH,CAAC;AAED,4BAA4B;AACrB,KAAK,UAAU,cAAc,CAAC,GAAW,EAAE,eAAwB,KAAK;IAC7E,MAAM,OAAO,GAAG,MAAM,iBAAiB,CAAC,GAAG,EAAE,YAAY,CAAC,CAAC;IAC3D,OAAO,OAAO,CAAC,WAAW,CAAC;AAC7B,CAAC;AAED,6BAA6B;AACtB,KAAK,UAAU,eAAe,CAAC,GAAW;IAC/C,IAAI,CAAC;QACH,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;QACtD,OAAO,QAAQ,CAAC,EAAE,CAAC;IACrB,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC"}
@@ -0,0 +1,5 @@
1
+ import { WikipediaResult } from "../types";
2
+ export declare function wikiSearch(query: string, limit?: number): Promise<WikipediaResult[]>;
3
+ export declare function wikiGetContent(title: string): Promise<string>;
4
+ export declare function wikiGetSummary(title: string): Promise<WikipediaResult>;
5
+ export declare function setWikiLang(language: string): void;
@@ -0,0 +1,85 @@
1
+ "use strict";
2
+ // wikipedia.ts - wikipedia stuff, pretty self explanatory tbh
3
+ var __importDefault = (this && this.__importDefault) || function (mod) {
4
+ return (mod && mod.__esModule) ? mod : { "default": mod };
5
+ };
6
+ Object.defineProperty(exports, "__esModule", { value: true });
7
+ exports.wikiSearch = wikiSearch;
8
+ exports.wikiGetContent = wikiGetContent;
9
+ exports.wikiGetSummary = wikiGetSummary;
10
+ exports.setWikiLang = setWikiLang;
11
+ const wikipedia_1 = __importDefault(require("wikipedia"));
12
+ async function wikiSearch(query, limit = 10) {
13
+ try {
14
+ // search wikipedia
15
+ const results = await wikipedia_1.default.search(query, { limit });
16
+ // convert results to our format
17
+ return Promise.all(results.results.map(async (r) => {
18
+ try {
19
+ // get page summary for each result
20
+ const summary = await wikipedia_1.default.summary(r.title);
21
+ return {
22
+ title: r.title,
23
+ url: `https://wikipedia.org/wiki/${encodeURIComponent(r.title)}`,
24
+ snippet: summary.extract,
25
+ source: "wikipedia",
26
+ extract: summary.extract,
27
+ thumbnail: summary.thumbnail?.source,
28
+ };
29
+ }
30
+ catch {
31
+ // fallback if summary fails
32
+ return {
33
+ title: r.title,
34
+ url: `https://wikipedia.org/wiki/${encodeURIComponent(r.title)}`,
35
+ source: "wikipedia",
36
+ };
37
+ }
38
+ }));
39
+ }
40
+ catch (err) {
41
+ throw {
42
+ message: "wikipedia search failed :/",
43
+ code: "WIKI_SEARCH_ERROR",
44
+ originalError: err,
45
+ };
46
+ }
47
+ }
48
+ async function wikiGetContent(title) {
49
+ try {
50
+ const page = await wikipedia_1.default.page(title);
51
+ const content = await page.content();
52
+ return content;
53
+ }
54
+ catch (err) {
55
+ throw {
56
+ message: "failed to get wikipedia content :(",
57
+ code: "WIKI_CONTENT_ERROR",
58
+ originalError: err,
59
+ };
60
+ }
61
+ }
62
+ async function wikiGetSummary(title) {
63
+ try {
64
+ const summary = await wikipedia_1.default.summary(title);
65
+ return {
66
+ title: summary.title,
67
+ url: `https://wikipedia.org/wiki/${encodeURIComponent(summary.title)}`,
68
+ snippet: summary.extract,
69
+ source: "wikipedia",
70
+ extract: summary.extract,
71
+ thumbnail: summary.thumbnail?.source,
72
+ };
73
+ }
74
+ catch (err) {
75
+ throw {
76
+ message: "failed to get wikipedia summary :/",
77
+ code: "WIKI_SUMMARY_ERROR",
78
+ originalError: err,
79
+ };
80
+ }
81
+ }
82
+ // set language for wikipedia api
83
+ function setWikiLang(language) {
84
+ wikipedia_1.default.setLang(language);
85
+ }
@@ -0,0 +1 @@
1
+ {"version":3,"file":"wikipedia.js","sourceRoot":"","sources":["../../src/modules/wikipedia.ts"],"names":[],"mappings":";AAAA,8DAA8D;;;;;AAK9D,gCAkCC;AAED,wCAYC;AAED,wCAkBC;AAGD,kCAEC;AA5ED,0DAA6B;AAGtB,KAAK,UAAU,UAAU,CAAC,KAAa,EAAE,QAAgB,EAAE;IAChE,IAAI,CAAC;QACH,mBAAmB;QACnB,MAAM,OAAO,GAAG,MAAM,mBAAI,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,KAAK,EAAE,CAAC,CAAC;QAEpD,gCAAgC;QAChC,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,EAAE,EAAE;YACjD,IAAI,CAAC;gBACH,mCAAmC;gBACnC,MAAM,OAAO,GAAG,MAAM,mBAAI,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;gBAC5C,OAAO;oBACL,KAAK,EAAE,CAAC,CAAC,KAAK;oBACd,GAAG,EAAE,8BAA8B,kBAAkB,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;oBAChE,OAAO,EAAE,OAAO,CAAC,OAAO;oBACxB,MAAM,EAAE,WAAoB;oBAC5B,OAAO,EAAE,OAAO,CAAC,OAAO;oBACxB,SAAS,EAAE,OAAO,CAAC,SAAS,EAAE,MAAM;iBACrC,CAAC;YACJ,CAAC;YAAC,MAAM,CAAC;gBACP,4BAA4B;gBAC5B,OAAO;oBACL,KAAK,EAAE,CAAC,CAAC,KAAK;oBACd,GAAG,EAAE,8BAA8B,kBAAkB,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE;oBAChE,MAAM,EAAE,WAAoB;iBAC7B,CAAC;YACJ,CAAC;QACH,CAAC,CAAC,CAAC,CAAC;IACN,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM;YACJ,OAAO,EAAE,4BAA4B;YACrC,IAAI,EAAE,mBAAmB;YACzB,aAAa,EAAE,GAAG;SACJ,CAAC;IACnB,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,cAAc,CAAC,KAAa;IAChD,IAAI,CAAC;QACH,MAAM,IAAI,GAAG,MAAM,mBAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpC,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;QACrC,OAAO,OAAO,CAAC;IACjB,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM;YACJ,OAAO,EAAE,oCAAoC;YAC7C,IAAI,EAAE,oBAAoB;YAC1B,aAAa,EAAE,GAAG;SACJ,CAAC;IACnB,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,cAAc,CAAC,KAAa;IAChD,IAAI,CAAC;QACH,MAAM,OAAO,GAAG,MAAM,mBAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;QAC1C,OAAO;YACL,KAAK,EAAE,OAAO,CAAC,KAAK;YACpB,GAAG,EAAE,8BAA8B,kBAAkB,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YACtE,OAAO,EAAE,OAAO,CAAC,OAAO;YACxB,MAAM,EAAE,WAAW;YACnB,OAAO,EAAE,OAAO,CAAC,OAAO;YACxB,SAAS,EAAE,OAAO,CAAC,SAAS,EAAE,MAAM;SACrC,CAAC;IACJ,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM;YACJ,OAAO,EAAE,oCAAoC;YAC7C,IAAI,EAAE,oBAAoB;YAC1B,aAAa,EAAE,GAAG;SACJ,CAAC;IACnB,CAAC;AACH,CAAC;AAED,iCAAiC;AACjC,SAAgB,WAAW,CAAC,QAAgB;IAC1C,mBAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACzB,CAAC"}
@@ -0,0 +1 @@
1
+ export {};