@crawlee/basic 4.0.0-beta.2 → 4.0.0-beta.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/index.d.ts +1 -1
- package/index.d.ts.map +1 -1
- package/index.js +0 -1
- package/index.js.map +1 -1
- package/internals/basic-crawler.d.ts +153 -77
- package/internals/basic-crawler.d.ts.map +1 -1
- package/internals/basic-crawler.js +334 -151
- package/internals/basic-crawler.js.map +1 -1
- package/internals/send-request.d.ts +1 -3
- package/internals/send-request.d.ts.map +1 -1
- package/internals/send-request.js +3 -18
- package/internals/send-request.js.map +1 -1
- package/package.json +5 -5
- package/internals/constants.d.ts +0 -7
- package/internals/constants.d.ts.map +0 -1
- package/internals/constants.js +0 -7
- package/internals/constants.js.map +0 -1
- package/tsconfig.build.tsbuildinfo +0 -1
package/README.md
CHANGED
|
@@ -9,6 +9,10 @@
|
|
|
9
9
|
<small>A web scraping and browser automation library</small>
|
|
10
10
|
</h1>
|
|
11
11
|
|
|
12
|
+
<p align=center>
|
|
13
|
+
<a href="https://trendshift.io/repositories/5179" target="_blank"><img src="https://trendshift.io/api/badge/repositories/5179" alt="apify%2Fcrawlee | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
|
14
|
+
</p>
|
|
15
|
+
|
|
12
16
|
<p align=center>
|
|
13
17
|
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/v/@crawlee/core.svg" alt="NPM latest version" data-canonical-src="https://img.shields.io/npm/v/@crawlee/core/next.svg" style="max-width: 100%;"></a>
|
|
14
18
|
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/dm/@crawlee/core.svg" alt="Downloads" data-canonical-src="https://img.shields.io/npm/dm/@crawlee/core.svg" style="max-width: 100%;"></a>
|
|
@@ -24,7 +28,7 @@ Crawlee is available as the [`crawlee`](https://www.npmjs.com/package/crawlee) N
|
|
|
24
28
|
|
|
25
29
|
> 👉 **View full documentation, guides and examples on the [Crawlee project website](https://crawlee.dev)** 👈
|
|
26
30
|
|
|
27
|
-
>
|
|
31
|
+
> Do you prefer 🐍 Python instead of JavaScript? [👉 Checkout Crawlee for Python 👈](https://github.com/apify/crawlee-python).
|
|
28
32
|
|
|
29
33
|
## Installation
|
|
30
34
|
|
package/index.d.ts
CHANGED
package/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC;AAC7C,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC;AAC7C,OAAO,EAAE,WAAW,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,gBAAgB,CAAC"}
|
package/index.js
CHANGED
package/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC"}
|
|
@@ -1,38 +1,14 @@
|
|
|
1
|
-
import type { AddRequestsBatchedOptions, AddRequestsBatchedResult, AutoscaledPoolOptions, BaseHttpClient, CrawlingContext, DatasetExportOptions, EnqueueLinksOptions, EventManager, FinalStatistics, GetUserDataFromRequest, IRequestList,
|
|
2
|
-
import { AutoscaledPool, Configuration, Dataset, RequestProvider, SessionPool, Statistics } from '@crawlee/core';
|
|
1
|
+
import type { AddRequestsBatchedOptions, AddRequestsBatchedResult, AutoscaledPoolOptions, BaseHttpClient, CrawlingContext, DatasetExportOptions, EnqueueLinksOptions, EventManager, FinalStatistics, GetUserDataFromRequest, IRequestList, IRequestManager, ProxyConfiguration, ProxyInfo, Request, RequestsLike, RouterHandler, RouterRoutes, Session, SessionPoolOptions, SkippedRequestCallback, Source, StatisticsOptions, StatisticState } from '@crawlee/core';
|
|
2
|
+
import { AutoscaledPool, Configuration, ContextPipeline, Dataset, RequestProvider, SessionPool, Statistics } from '@crawlee/core';
|
|
3
3
|
import type { Awaitable, BatchAddRequestsResult, Dictionary, SetStatusMessageOptions } from '@crawlee/types';
|
|
4
4
|
import { RobotsTxtFile } from '@crawlee/utils';
|
|
5
|
-
import type { SetRequired } from 'type-fest';
|
|
5
|
+
import type { ReadonlyDeep, SetRequired } from 'type-fest';
|
|
6
6
|
import type { Log } from '@apify/log';
|
|
7
7
|
import { TimeoutError } from '@apify/timeout';
|
|
8
|
-
export interface BasicCrawlingContext<UserData extends Dictionary = Dictionary> extends CrawlingContext<
|
|
9
|
-
/**
|
|
10
|
-
* This function automatically finds and enqueues links from the current page, adding them to the {@link RequestQueue}
|
|
11
|
-
* currently used by the crawler.
|
|
12
|
-
*
|
|
13
|
-
* Optionally, the function allows you to filter the target links' URLs using an array of globs or regular expressions
|
|
14
|
-
* and override settings of the enqueued {@link Request} objects.
|
|
15
|
-
*
|
|
16
|
-
* Check out the [Crawl a website with relative links](https://crawlee.dev/js/docs/examples/crawl-relative-links) example
|
|
17
|
-
* for more details regarding its usage.
|
|
18
|
-
*
|
|
19
|
-
* **Example usage**
|
|
20
|
-
*
|
|
21
|
-
* ```ts
|
|
22
|
-
* async requestHandler({ enqueueLinks }) {
|
|
23
|
-
* await enqueueLinks({
|
|
24
|
-
* urls: [...],
|
|
25
|
-
* });
|
|
26
|
-
* },
|
|
27
|
-
* ```
|
|
28
|
-
*
|
|
29
|
-
* @param [options] All `enqueueLinks()` parameters are passed via an options object.
|
|
30
|
-
* @returns Promise that resolves to {@link BatchAddRequestsResult} object.
|
|
31
|
-
*/
|
|
32
|
-
enqueueLinks(options?: SetRequired<EnqueueLinksOptions, 'urls'>): Promise<BatchAddRequestsResult>;
|
|
8
|
+
export interface BasicCrawlingContext<UserData extends Dictionary = Dictionary> extends CrawlingContext<UserData> {
|
|
33
9
|
}
|
|
34
|
-
export type RequestHandler<Context extends CrawlingContext =
|
|
35
|
-
export type ErrorHandler<Context extends CrawlingContext =
|
|
10
|
+
export type RequestHandler<Context extends CrawlingContext = CrawlingContext> = (inputs: Context) => Awaitable<void>;
|
|
11
|
+
export type ErrorHandler<Context extends CrawlingContext = CrawlingContext, ExtendedContext extends Context = Context> = (inputs: Context & Partial<ExtendedContext>, error: Error) => Awaitable<void>;
|
|
36
12
|
export interface StatusMessageCallbackParams<Context extends CrawlingContext = BasicCrawlingContext, Crawler extends BasicCrawler<any> = BasicCrawler<Context>> {
|
|
37
13
|
state: StatisticState;
|
|
38
14
|
crawler: Crawler;
|
|
@@ -40,7 +16,10 @@ export interface StatusMessageCallbackParams<Context extends CrawlingContext = B
|
|
|
40
16
|
message: string;
|
|
41
17
|
}
|
|
42
18
|
export type StatusMessageCallback<Context extends CrawlingContext = BasicCrawlingContext, Crawler extends BasicCrawler<any> = BasicCrawler<Context>> = (params: StatusMessageCallbackParams<Context, Crawler>) => Awaitable<void>;
|
|
43
|
-
export
|
|
19
|
+
export type RequireContextPipeline<DefaultContextType extends CrawlingContext, FinalContextType extends DefaultContextType> = DefaultContextType extends FinalContextType ? {} : {
|
|
20
|
+
contextPipelineBuilder: () => ContextPipeline<CrawlingContext, FinalContextType>;
|
|
21
|
+
};
|
|
22
|
+
export interface BasicCrawlerOptions<Context extends CrawlingContext = CrawlingContext, ContextExtension = Dictionary<never>, ExtendedContext extends Context = Context & ContextExtension> {
|
|
44
23
|
/**
|
|
45
24
|
* User-provided function that performs the logic of the crawler. It is called for each URL to crawl.
|
|
46
25
|
*
|
|
@@ -58,7 +37,35 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
58
37
|
* The exceptions are logged to the request using the
|
|
59
38
|
* {@link Request.pushErrorMessage|`Request.pushErrorMessage()`} function.
|
|
60
39
|
*/
|
|
61
|
-
requestHandler?: RequestHandler<
|
|
40
|
+
requestHandler?: RequestHandler<ExtendedContext>;
|
|
41
|
+
/**
|
|
42
|
+
* Allows the user to extend the crawling context passed to the request handler with custom functionality.
|
|
43
|
+
*
|
|
44
|
+
* **Example usage:**
|
|
45
|
+
*
|
|
46
|
+
* ```javascript
|
|
47
|
+
* import { BasicCrawler } from 'crawlee';
|
|
48
|
+
*
|
|
49
|
+
* // Create a crawler instance
|
|
50
|
+
* const crawler = new BasicCrawler({
|
|
51
|
+
* extendContext(context) => ({
|
|
52
|
+
* async customHelper() {
|
|
53
|
+
* await context.pushData({ url: context.request.url })
|
|
54
|
+
* }
|
|
55
|
+
* }),
|
|
56
|
+
* async requestHandler(context) {
|
|
57
|
+
* await context.customHelper();
|
|
58
|
+
* },
|
|
59
|
+
* });
|
|
60
|
+
* ```
|
|
61
|
+
*/
|
|
62
|
+
extendContext?: (context: Context) => Awaitable<ContextExtension>;
|
|
63
|
+
/**
|
|
64
|
+
* *Intended for BasicCrawler subclasses*. Prepares a context pipeline that transforms the initial crawling context into the shape given by the `Context` type parameter.
|
|
65
|
+
*
|
|
66
|
+
* The option is not required if your crawler subclass does not extend the crawling context with custom information or helpers.
|
|
67
|
+
*/
|
|
68
|
+
contextPipelineBuilder?: () => ContextPipeline<CrawlingContext, Context>;
|
|
62
69
|
/**
|
|
63
70
|
* Static list of URLs to be processed.
|
|
64
71
|
* If not provided, the crawler will open the default request queue when the {@link BasicCrawler.addRequests|`crawler.addRequests()`} function is called.
|
|
@@ -73,6 +80,13 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
73
80
|
* it is a shortcut for running `crawler.addRequests()` before the `crawler.run()`.
|
|
74
81
|
*/
|
|
75
82
|
requestQueue?: RequestProvider;
|
|
83
|
+
/**
|
|
84
|
+
* Allows explicitly configuring a request manager. Mutually exclusive with the `requestQueue` and `requestList` options.
|
|
85
|
+
*
|
|
86
|
+
* This enables explicitly configuring the crawler to use `RequestManagerTandem`, for instance.
|
|
87
|
+
* If using this, the type of `BasicCrawler.requestQueue` may not be fully compatible with the `RequestProvider` class.
|
|
88
|
+
*/
|
|
89
|
+
requestManager?: IRequestManager;
|
|
76
90
|
/**
|
|
77
91
|
* Timeout in which the function passed as {@link BasicCrawlerOptions.requestHandler|`requestHandler`} needs to finish, in seconds.
|
|
78
92
|
* @default 60
|
|
@@ -87,7 +101,7 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
87
101
|
* Second argument is the `Error` instance that
|
|
88
102
|
* represents the last error thrown during processing of the request.
|
|
89
103
|
*/
|
|
90
|
-
errorHandler?: ErrorHandler<
|
|
104
|
+
errorHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
91
105
|
/**
|
|
92
106
|
* A function to handle requests that failed more than {@link BasicCrawlerOptions.maxRequestRetries|`maxRequestRetries`} times.
|
|
93
107
|
*
|
|
@@ -96,7 +110,7 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
96
110
|
* Second argument is the `Error` instance that
|
|
97
111
|
* represents the last error thrown during processing of the request.
|
|
98
112
|
*/
|
|
99
|
-
failedRequestHandler?: ErrorHandler<
|
|
113
|
+
failedRequestHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
100
114
|
/**
|
|
101
115
|
* Specifies the maximum number of retries allowed for a request if its processing fails.
|
|
102
116
|
* This includes retries due to navigation errors or errors thrown from user-supplied functions
|
|
@@ -126,12 +140,18 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
126
140
|
* > *NOTE:* In cases of parallel crawling, the actual number of pages visited might be slightly higher than this value.
|
|
127
141
|
*/
|
|
128
142
|
maxRequestsPerCrawl?: number;
|
|
143
|
+
/**
|
|
144
|
+
* Maximum depth of the crawl. If not set, the crawl will continue until all requests are processed.
|
|
145
|
+
* Setting this to `0` will only process the initial requests, skipping all links enqueued by `crawlingContext.enqueueLinks` and `crawlingContext.addRequests`.
|
|
146
|
+
* Passing `1` will process the initial requests and all links enqueued by `crawlingContext.enqueueLinks` and `crawlingContext.addRequests` in the handler for initial requests.
|
|
147
|
+
*/
|
|
148
|
+
maxCrawlDepth?: number;
|
|
129
149
|
/**
|
|
130
150
|
* Custom options passed to the underlying {@link AutoscaledPool} constructor.
|
|
131
151
|
* > *NOTE:* The {@link AutoscaledPoolOptions.runTaskFunction|`runTaskFunction`}
|
|
132
|
-
* and
|
|
133
|
-
*
|
|
134
|
-
*
|
|
152
|
+
* option is provided by the crawler and cannot be overridden.
|
|
153
|
+
* However, we can provide custom implementations of {@link AutoscaledPoolOptions.isFinishedFunction|`isFinishedFunction`}
|
|
154
|
+
* and {@link AutoscaledPoolOptions.isTaskReadyFunction|`isTaskReadyFunction`}.
|
|
135
155
|
*/
|
|
136
156
|
autoscaledPoolOptions?: AutoscaledPoolOptions;
|
|
137
157
|
/**
|
|
@@ -199,11 +219,20 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
199
219
|
/**
|
|
200
220
|
* If set to `true`, the crawler will automatically try to fetch the robots.txt file for each domain,
|
|
201
221
|
* and skip those that are not allowed. This also prevents disallowed URLs to be added via `enqueueLinks`.
|
|
222
|
+
*
|
|
223
|
+
* If an object is provided, it may contain a `userAgent` property to specify which user-agent
|
|
224
|
+
* should be used when checking the robots.txt file. If not provided, the default user-agent `*` will be used.
|
|
202
225
|
*/
|
|
203
|
-
respectRobotsTxtFile?: boolean
|
|
226
|
+
respectRobotsTxtFile?: boolean | {
|
|
227
|
+
userAgent?: string;
|
|
228
|
+
};
|
|
204
229
|
/**
|
|
205
230
|
* When a request is skipped for some reason, you can use this callback to act on it.
|
|
206
|
-
* This is currently fired
|
|
231
|
+
* This is currently fired for requests skipped
|
|
232
|
+
* 1. based on robots.txt file,
|
|
233
|
+
* 2. because they don't match enqueueLinks filters,
|
|
234
|
+
* 3. because they are redirected to a URL that doesn't match the enqueueLinks strategy,
|
|
235
|
+
* 4. or because the {@link BasicCrawlerOptions.maxRequestsPerCrawl|`maxRequestsPerCrawl`} limit has been reached
|
|
207
236
|
*/
|
|
208
237
|
onSkippedRequest?: SkippedRequestCallback;
|
|
209
238
|
/** @internal */
|
|
@@ -223,6 +252,11 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
223
252
|
* Defaults to a new instance of {@link GotScrapingHttpClient}
|
|
224
253
|
*/
|
|
225
254
|
httpClient?: BaseHttpClient;
|
|
255
|
+
/**
|
|
256
|
+
* If set, the crawler will be configured for all connections to use
|
|
257
|
+
* the Proxy URLs provided and rotated according to the configuration.
|
|
258
|
+
*/
|
|
259
|
+
proxyConfiguration?: ProxyConfiguration;
|
|
226
260
|
}
|
|
227
261
|
/**
|
|
228
262
|
* A set of options that you can toggle to enable experimental features in Crawlee.
|
|
@@ -303,7 +337,7 @@ export interface CrawlerExperiments {
|
|
|
303
337
|
* ```
|
|
304
338
|
* @category Crawlers
|
|
305
339
|
*/
|
|
306
|
-
export declare class BasicCrawler<Context extends CrawlingContext =
|
|
340
|
+
export declare class BasicCrawler<Context extends CrawlingContext = CrawlingContext, ContextExtension = Dictionary<never>, ExtendedContext extends Context = Context & ContextExtension> {
|
|
307
341
|
readonly config: Configuration;
|
|
308
342
|
protected static readonly CRAWLEE_STATE_KEY = "CRAWLEE_STATE";
|
|
309
343
|
/**
|
|
@@ -321,6 +355,10 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
321
355
|
* Only available if used by the crawler.
|
|
322
356
|
*/
|
|
323
357
|
requestQueue?: RequestProvider;
|
|
358
|
+
/**
|
|
359
|
+
* The main request-handling component of the crawler. It's initialized during the crawler startup.
|
|
360
|
+
*/
|
|
361
|
+
protected requestManager?: IRequestManager;
|
|
324
362
|
/**
|
|
325
363
|
* A reference to the underlying {@link SessionPool} class that manages the crawler's {@link Session|sessions}.
|
|
326
364
|
* Only available if used by the crawler.
|
|
@@ -334,40 +372,57 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
334
372
|
* or to abort it by calling {@link AutoscaledPool.abort|`autoscaledPool.abort()`}.
|
|
335
373
|
*/
|
|
336
374
|
autoscaledPool?: AutoscaledPool;
|
|
375
|
+
/**
|
|
376
|
+
* A reference to the underlying {@link ProxyConfiguration} class that manages the crawler's proxies.
|
|
377
|
+
* Only available if used by the crawler.
|
|
378
|
+
*/
|
|
379
|
+
proxyConfiguration?: ProxyConfiguration;
|
|
337
380
|
/**
|
|
338
381
|
* Default {@link Router} instance that will be used if we don't specify any {@link BasicCrawlerOptions.requestHandler|`requestHandler`}.
|
|
339
382
|
* See {@link Router.addHandler|`router.addHandler()`} and {@link Router.addDefaultHandler|`router.addDefaultHandler()`}.
|
|
340
383
|
*/
|
|
341
|
-
readonly router: RouterHandler<
|
|
384
|
+
readonly router: RouterHandler<Context>;
|
|
385
|
+
private contextPipelineBuilder;
|
|
386
|
+
private _contextPipeline?;
|
|
387
|
+
get contextPipeline(): ContextPipeline<CrawlingContext, ExtendedContext>;
|
|
342
388
|
running: boolean;
|
|
343
389
|
hasFinishedBefore: boolean;
|
|
344
390
|
readonly log: Log;
|
|
345
|
-
protected requestHandler: RequestHandler<
|
|
346
|
-
protected errorHandler?: ErrorHandler<
|
|
347
|
-
protected failedRequestHandler?: ErrorHandler<
|
|
391
|
+
protected requestHandler: RequestHandler<ExtendedContext>;
|
|
392
|
+
protected errorHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
393
|
+
protected failedRequestHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
348
394
|
protected requestHandlerTimeoutMillis: number;
|
|
349
395
|
protected internalTimeoutMillis: number;
|
|
350
396
|
protected maxRequestRetries: number;
|
|
397
|
+
protected maxCrawlDepth?: number;
|
|
351
398
|
protected sameDomainDelayMillis: number;
|
|
352
399
|
protected domainAccessedTime: Map<string, number>;
|
|
353
400
|
protected maxSessionRotations: number;
|
|
401
|
+
protected maxRequestsPerCrawl?: number;
|
|
354
402
|
protected handledRequestsCount: number;
|
|
355
403
|
protected statusMessageLoggingInterval: number;
|
|
356
404
|
protected statusMessageCallback?: StatusMessageCallback;
|
|
357
405
|
protected sessionPoolOptions: SessionPoolOptions;
|
|
358
406
|
protected useSessionPool: boolean;
|
|
359
|
-
protected crawlingContexts: Map<string, Context>;
|
|
360
407
|
protected autoscaledPoolOptions: AutoscaledPoolOptions;
|
|
361
408
|
protected events: EventManager;
|
|
362
409
|
protected httpClient: BaseHttpClient;
|
|
363
410
|
protected retryOnBlocked: boolean;
|
|
364
|
-
protected respectRobotsTxtFile: boolean
|
|
411
|
+
protected respectRobotsTxtFile: boolean | {
|
|
412
|
+
userAgent?: string;
|
|
413
|
+
};
|
|
365
414
|
protected onSkippedRequest?: SkippedRequestCallback;
|
|
366
415
|
private _closeEvents?;
|
|
416
|
+
private shouldLogMaxProcessedRequestsExceeded;
|
|
417
|
+
private shouldLogMaxEnqueuedRequestsExceeded;
|
|
367
418
|
private experiments;
|
|
368
419
|
private readonly robotsTxtFileCache;
|
|
369
420
|
private _experimentWarnings;
|
|
370
421
|
protected static optionsShape: {
|
|
422
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
423
|
+
contextPipelineBuilder: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
424
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
425
|
+
extendContext: import("ow").Predicate<Function> & import("ow").BasePredicate<Function | undefined>;
|
|
371
426
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
372
427
|
requestList: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
373
428
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
@@ -388,12 +443,16 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
388
443
|
maxSessionRotations: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
389
444
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
390
445
|
maxRequestsPerCrawl: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
446
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
447
|
+
maxCrawlDepth: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
391
448
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
392
449
|
autoscaledPoolOptions: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
393
450
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
394
451
|
sessionPoolOptions: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
395
452
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
396
453
|
useSessionPool: import("ow").BooleanPredicate & import("ow").BasePredicate<boolean | undefined>;
|
|
454
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
455
|
+
proxyConfiguration: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
397
456
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
398
457
|
statusMessageLoggingInterval: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
399
458
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
@@ -401,7 +460,7 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
401
460
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
402
461
|
retryOnBlocked: import("ow").BooleanPredicate & import("ow").BasePredicate<boolean | undefined>;
|
|
403
462
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
404
|
-
respectRobotsTxtFile: import("ow").
|
|
463
|
+
respectRobotsTxtFile: import("ow").AnyPredicate<boolean | object>;
|
|
405
464
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
406
465
|
onSkippedRequest: import("ow").Predicate<Function> & import("ow").BasePredicate<Function | undefined>;
|
|
407
466
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
@@ -424,7 +483,8 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
424
483
|
/**
|
|
425
484
|
* All `BasicCrawler` parameters are passed via an options object.
|
|
426
485
|
*/
|
|
427
|
-
constructor(options?: BasicCrawlerOptions<Context>,
|
|
486
|
+
constructor(options?: BasicCrawlerOptions<Context, ContextExtension, ExtendedContext> & RequireContextPipeline<CrawlingContext, Context>, // cast because the constructor logic handles missing `contextPipelineBuilder` - the type is just for DX
|
|
487
|
+
config?: Configuration);
|
|
428
488
|
/**
|
|
429
489
|
* Checks if the given error is a proxy error by comparing its message to a list of known proxy error messages.
|
|
430
490
|
* Used for retrying requests that failed due to proxy errors.
|
|
@@ -432,12 +492,6 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
432
492
|
* @param error The error to check.
|
|
433
493
|
*/
|
|
434
494
|
protected isProxyError(error: Error): boolean;
|
|
435
|
-
/**
|
|
436
|
-
* Checks whether the given crawling context is getting blocked by anti-bot protection using several heuristics.
|
|
437
|
-
* Returns `false` if the request is not blocked, otherwise returns a string with a description of the block reason.
|
|
438
|
-
* @param _crawlingContext The crawling context to check.
|
|
439
|
-
*/
|
|
440
|
-
protected isRequestBlocked(_crawlingContext: Context): Promise<string | false>;
|
|
441
495
|
/**
|
|
442
496
|
* This method is periodically called by the crawler, every `statusMessageLoggingInterval` seconds.
|
|
443
497
|
*/
|
|
@@ -453,15 +507,20 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
453
507
|
* @param [requests] The requests to add.
|
|
454
508
|
* @param [options] Options for the request queue.
|
|
455
509
|
*/
|
|
456
|
-
run(requests?:
|
|
510
|
+
run(requests?: RequestsLike, options?: CrawlerRunOptions): Promise<FinalStatistics>;
|
|
457
511
|
/**
|
|
458
512
|
* Gracefully stops the current run of the crawler.
|
|
459
513
|
*
|
|
460
514
|
* All the tasks active at the time of calling this method will be allowed to finish.
|
|
515
|
+
*
|
|
516
|
+
* To stop the crawler immediately, use {@link BasicCrawler.teardown|`crawler.teardown()`} instead.
|
|
461
517
|
*/
|
|
462
518
|
stop(message?: string): void;
|
|
463
519
|
getRequestQueue(): Promise<RequestProvider>;
|
|
464
520
|
useState<State extends Dictionary = Dictionary>(defaultValue?: State): Promise<State>;
|
|
521
|
+
protected get pendingRequestCountApproximation(): number;
|
|
522
|
+
protected calculateEnqueuedRequestLimit(explicitLimit?: number): number | undefined;
|
|
523
|
+
protected handleSkippedRequest(options: Parameters<SkippedRequestCallback>[0]): Promise<void>;
|
|
465
524
|
/**
|
|
466
525
|
* Adds requests to the queue in batches. By default, it will resolve after the initial batch is added, and continue
|
|
467
526
|
* adding the rest in background. You can configure the batch size via `batchSize` option and the sleep time in between
|
|
@@ -473,7 +532,7 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
473
532
|
* @param requests The requests to add
|
|
474
533
|
* @param options Options for the request queue
|
|
475
534
|
*/
|
|
476
|
-
addRequests(requests:
|
|
535
|
+
addRequests(requests: ReadonlyDeep<RequestsLike>, options?: CrawlerAddRequestsOptions): Promise<CrawlerAddRequestsResult>;
|
|
477
536
|
/**
|
|
478
537
|
* Pushes data to the specified {@link Dataset}, or the default crawler {@link Dataset} by calling {@link Dataset.pushData}.
|
|
479
538
|
*/
|
|
@@ -491,8 +550,11 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
491
550
|
* Supported formats are currently 'json' and 'csv', and will be inferred from the `path` automatically.
|
|
492
551
|
*/
|
|
493
552
|
exportData<Data>(path: string, format?: 'json' | 'csv', options?: DatasetExportOptions): Promise<Data[]>;
|
|
553
|
+
/**
|
|
554
|
+
* Initializes the crawler.
|
|
555
|
+
*/
|
|
494
556
|
protected _init(): Promise<void>;
|
|
495
|
-
protected
|
|
557
|
+
protected runRequestHandler(crawlingContext: CrawlingContext): Promise<void>;
|
|
496
558
|
/**
|
|
497
559
|
* Handles blocked request
|
|
498
560
|
*/
|
|
@@ -501,31 +563,42 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
501
563
|
protected getRobotsTxtFileForUrl(url: string): Promise<RobotsTxtFile | undefined>;
|
|
502
564
|
protected _pauseOnMigration(): Promise<void>;
|
|
503
565
|
/**
|
|
504
|
-
*
|
|
505
|
-
* and RequestQueue is present then enqueues it to the queue first.
|
|
566
|
+
* Initializes the RequestManager based on the configured requestList and requestQueue.
|
|
506
567
|
*/
|
|
507
|
-
|
|
568
|
+
private initializeRequestManager;
|
|
508
569
|
/**
|
|
509
|
-
*
|
|
510
|
-
* Can be used to clean up orphaned browser pages.
|
|
570
|
+
* Fetches the next request to process from the underlying request provider.
|
|
511
571
|
*/
|
|
512
|
-
protected
|
|
572
|
+
protected _fetchNextRequest(): Promise<Request<Dictionary> | null>;
|
|
513
573
|
/**
|
|
514
574
|
* Delays processing of the request based on the `sameDomainDelaySecs` option,
|
|
515
575
|
* adding it back to the queue after the timeout passes. Returns `true` if the request
|
|
516
576
|
* should be ignored and will be reclaimed to the queue once ready.
|
|
517
577
|
*/
|
|
518
|
-
protected delayRequest(request: Request, source: IRequestList | RequestProvider): boolean;
|
|
578
|
+
protected delayRequest(request: Request, source: IRequestList | RequestProvider | IRequestManager): boolean;
|
|
519
579
|
/**
|
|
520
580
|
* Wrapper around requestHandler that fetches requests from RequestList/RequestQueue
|
|
521
581
|
* then retries them in a case of an error, etc.
|
|
522
582
|
*/
|
|
523
583
|
protected _runTaskFunction(): Promise<void>;
|
|
524
584
|
/**
|
|
525
|
-
*
|
|
585
|
+
* Wrapper around the crawling context's `enqueueLinks` method:
|
|
586
|
+
* - Injects `crawlDepth` to each request being added based on the crawling context request.
|
|
587
|
+
* - Provides defaults for the `enqueueLinks` options based on the crawler configuration.
|
|
588
|
+
* - These options can be overridden by the user.
|
|
589
|
+
* @internal
|
|
590
|
+
*/
|
|
591
|
+
protected enqueueLinksWithCrawlDepth(options: SetRequired<EnqueueLinksOptions, 'urls'>, request: Request<Dictionary>, requestQueue: RequestProvider): Promise<BatchAddRequestsResult>;
|
|
592
|
+
/**
|
|
593
|
+
* Generator function that yields requests injected with the given crawl depth.
|
|
594
|
+
* @internal
|
|
595
|
+
*/
|
|
596
|
+
protected addCrawlDepthRequestGenerator(requests: RequestsLike, newRequestDepth: number): AsyncGenerator<Source, void, undefined>;
|
|
597
|
+
/**
|
|
598
|
+
* Run async callback with given timeout and retry. Returns the result of the callback.
|
|
526
599
|
* @ignore
|
|
527
600
|
*/
|
|
528
|
-
protected _timeoutAndRetry(handler: () => Promise<
|
|
601
|
+
protected _timeoutAndRetry<T>(handler: () => Promise<T>, timeout: number, error: Error | string, maxRetries?: number, retried?: number): Promise<T>;
|
|
529
602
|
/**
|
|
530
603
|
* Returns true if either RequestList or RequestQueue have a request ready for processing.
|
|
531
604
|
*/
|
|
@@ -535,12 +608,17 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
535
608
|
*/
|
|
536
609
|
protected _defaultIsFinishedFunction(): Promise<boolean>;
|
|
537
610
|
private _rotateSession;
|
|
611
|
+
/**
|
|
612
|
+
* Unwraps errors thrown by the context pipeline to get the actual user error.
|
|
613
|
+
* RequestHandlerError and ContextPipelineInitializationError wrap the actual error.
|
|
614
|
+
*/
|
|
615
|
+
private unwrapError;
|
|
538
616
|
/**
|
|
539
617
|
* Handles errors thrown by user provided requestHandler()
|
|
540
618
|
*/
|
|
541
|
-
protected _requestFunctionErrorHandler(error: Error, crawlingContext:
|
|
619
|
+
protected _requestFunctionErrorHandler(error: Error, crawlingContext: CrawlingContext, source: IRequestList | IRequestManager): Promise<void>;
|
|
542
620
|
protected _tagUserHandlerError<T>(cb: () => unknown): Promise<T>;
|
|
543
|
-
protected _handleFailedRequestHandler(crawlingContext:
|
|
621
|
+
protected _handleFailedRequestHandler(crawlingContext: CrawlingContext, error: Error): Promise<void>;
|
|
544
622
|
/**
|
|
545
623
|
* Resolves the most verbose error message from a thrown error
|
|
546
624
|
* @param error The error received
|
|
@@ -549,23 +627,21 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
549
627
|
protected _getMessageFromError(error: Error, forceStack?: boolean): string | TimeoutError | undefined;
|
|
550
628
|
protected _canRequestBeRetried(request: Request, error: Error): boolean;
|
|
551
629
|
/**
|
|
552
|
-
* Updates handledRequestsCount from possibly stored counts,
|
|
553
|
-
* usually after worker migration. Since one of the stores
|
|
554
|
-
* needs to have priority when both are present,
|
|
555
|
-
* it is the request queue, because generally, the request
|
|
556
|
-
* list will first be dumped into the queue and then left
|
|
557
|
-
* empty.
|
|
630
|
+
* Updates handledRequestsCount from possibly stored counts, usually after worker migration.
|
|
558
631
|
*/
|
|
559
632
|
protected _loadHandledRequestCount(): Promise<void>;
|
|
560
633
|
protected _executeHooks<HookLike extends (...args: any[]) => Awaitable<void>>(hooks: HookLike[], ...args: Parameters<HookLike>): Promise<void>;
|
|
561
634
|
/**
|
|
562
|
-
*
|
|
563
|
-
*
|
|
635
|
+
* Stops the crawler immediately.
|
|
636
|
+
*
|
|
637
|
+
* This method doesn't wait for currently active requests to finish.
|
|
638
|
+
*
|
|
639
|
+
* To stop the crawler gracefully (waiting for all running requests to finish), use {@link BasicCrawler.stop|`crawler.stop()`} instead.
|
|
564
640
|
*/
|
|
565
641
|
teardown(): Promise<void>;
|
|
566
642
|
protected _getCookieHeaderFromRequest(request: Request): string;
|
|
567
643
|
private _getRequestQueue;
|
|
568
|
-
|
|
644
|
+
private requestMatchesEnqueueStrategy;
|
|
569
645
|
}
|
|
570
646
|
export interface CreateContextOptions {
|
|
571
647
|
request: Request;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"basic-crawler.d.ts","sourceRoot":"","sources":["../../src/internals/basic-crawler.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACR,yBAAyB,EACzB,wBAAwB,EACxB,qBAAqB,EACrB,cAAc,EACd,eAAe,EACf,oBAAoB,EACpB,mBAAmB,EACnB,YAAY,EACZ,eAAe,EACf,sBAAsB,EACtB,YAAY,EACZ,
|
|
1
|
+
{"version":3,"file":"basic-crawler.d.ts","sourceRoot":"","sources":["../../src/internals/basic-crawler.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACR,yBAAyB,EACzB,wBAAwB,EACxB,qBAAqB,EACrB,cAAc,EACd,eAAe,EACf,oBAAoB,EACpB,mBAAmB,EACnB,YAAY,EACZ,eAAe,EACf,sBAAsB,EACtB,YAAY,EACZ,eAAe,EACf,kBAAkB,EAClB,SAAS,EACT,OAAO,EACP,YAAY,EAEZ,aAAa,EACb,YAAY,EACZ,OAAO,EACP,kBAAkB,EAClB,sBAAsB,EACtB,MAAM,EACN,iBAAiB,EACjB,cAAc,EACjB,MAAM,eAAe,CAAC;AACvB,OAAO,EACH,cAAc,EACd,aAAa,EACb,eAAe,EAKf,OAAO,EAYP,eAAe,EAOf,WAAW,EACX,UAAU,EAEb,MAAM,eAAe,CAAC;AACvB,OAAO,KAAK,EAAE,SAAS,EAAE,sBAAsB,EAAE,UAAU,EAAE,uBAAuB,EAAE,MAAM,gBAAgB,CAAC;AAC7G,OAAO,EAA8C,aAAa,EAAuB,MAAM,gBAAgB,CAAC;AAKhH,OAAO,KAAK,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,WAAW,CAAC;AAG3D,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AAEtC,OAAO,EAAuB,YAAY,EAAa,MAAM,gBAAgB,CAAC;AAK9E,MAAM,WAAW,oBAAoB,CAAC,QAAQ,SAAS,UAAU,GAAG,UAAU,CAAE,SAAQ,eAAe,CAAC,QAAQ,CAAC;CAAG;AAapH,MAAM,MAAM,cAAc,CAAC,OAAO,SAAS,eAAe,GAAG,eAAe,IAAI,CAAC,MAAM,EAAE,OAAO,KAAK,SAAS,CAAC,IAAI,CAAC,CAAC;AAErH,MAAM,MAAM,YAAY,CACpB,OAAO,SAAS,eAAe,GAAG,eAAe,EACjD,eAAe,SAAS,OAAO,GAAG,OAAO,IACzC,CAAC,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,eAAe,CAAC,EAAE,KAAK,EAAE,KAAK,KAAK,SAAS,CAAC,IAAI,CAAC,CAAC;AAElF,MAAM,WAAW,2BAA2B,CACxC,OAAO,SAAS,eAAe,GAAG,oBAAoB,EACtD,OAAO,SAAS,YAAY,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,OAAO,CAAC;IAEzD,KAAK,EAAE,cAAc,CAAC;IACtB,OAAO,EAAE,OAAO,CAAC;IACjB,aAAa,EAAE,cAAc,CAAC;IAC9B,OAAO,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,qBAAqB,CAC7B,OAAO,SAAS,eAAe,GAAG,oBAAoB,EACtD,OAAO,SAAS,YAAY,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,OAAO,CAAC,IACzD,CAAC,MAAM,EAAE,2BAA2B,CAAC,OAAO,EAAE,OAAO,CAAC,KAAK,SAAS,CAAC,IAAI,CAAC,CAAC;AAE/E,MAAM,MAAM,sBAAsB,CAC9B,kBAAkB,SAAS,eAAe,EAC1C,gBAAgB,SAAS,kBAAkB,IAC3C,kBAAkB,SAAS,gBAAgB,GACzC,EAAE,GACF;IAAE,sBAAsB,EAAE,MAAM,eAAe,CAAC,eAAe,EAAE,gBAAgB,CAAC,CAAA;CAAE,CAAC;AAE3F,MAAM,WAAW,mBAAmB,CAChC,OAAO,SAAS,eAAe,GAAG,eAAe,EACjD,gBAAgB,GAAG,UAAU,CAAC,KAAK,CAAC,EACpC,eAAe,SAAS,OAAO,GAAG,OAAO,GAAG,gBAAgB;IAE5D;;;;;;;;;;;;;;;;OAgBG;IACH,cAAc,CAAC,EAAE,cAAc,CAAC,eAAe,CAAC,CAAC;IAEjD;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,SAAS,CAAC,gBAAgB,CAAC,CAAC;IAElE;;;;OAIG;IACH,sBAAsB,CAAC,EAAE,MAAM,eAAe,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;IAEzE;;;;;OAKG;IACH,WAAW,CAAC,EAAE,YAAY,CAAC;IAE3B;;;;;OAKG;IACH,YAAY,CAAC,EAAE,eAAe,CAAC;IAE/B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,eAAe,CAAC;IAEjC;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAEnC;;;;;;;;OAQG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IAE9D;;;;;;;OAOG;IACH,oBAAoB,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IAEtE;;;;;;;;OAQG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAE7B;;;;;;OAMG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAE7B;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAE7B;;;;OAIG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,qBAAqB,CAAC;IAE9C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;;OAIG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB;;;OAGG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IAExC;;OAEG;IACH,4BAA4B,CAAC,EAAE,MAAM,CAAC;IAEtC;;;;;;;;;;;;;;;OAeG;IACH,qBAAqB,CAAC,EAAE,qBAAqB,CAAC;IAE9C;;;;;;OAMG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;;;;;OAMG;IACH,oBAAoB,CAAC,EAAE,OAAO,GAAG;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IAExD;;;;;;;OAOG;IACH,gBAAgB,CAAC,EAAE,sBAAsB,CAAC;IAE1C,gBAAgB;IAChB,GAAG,CAAC,EAAE,GAAG,CAAC;IAEV;;;OAGG;IACH,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAEjC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,iBAAiB,CAAC;IAEtC;;;OAGG;IACH,UAAU,CAAC,EAAE,cAAc,CAAC;IAE5B;;;OAGG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;CAC3C;AAED;;;;;GAKG;AACH,MAAM,WAAW,kBAAkB;IAC/B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+DG;AACH,qBAAa,YAAY,CACrB,OAAO,SAAS,eAAe,GAAG,eAAe,EACjD,gBAAgB,GAAG,UAAU,CAAC,KAAK,CAAC,EACpC,eAAe,SAAS,OAAO,GAAG,OAAO,GAAG,gBAAgB;IAoJxD,QAAQ,CAAC,MAAM;IAlJnB,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,iBAAiB,mBAAmB;IAE9D;;OAEG;IACH,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC;IAE3B;;;OAGG;IACH,WAAW,CAAC,EAAE,YAAY,CAAC;IAE3B;;;;OAIG;IACH,YAAY,CAAC,EAAE,eAAe,CAAC;IAE/B;;OAEG;IACH,SAAS,CAAC,cAAc,CAAC,EAAE,eAAe,CAAC;IAE3C;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAE1B;;;;;;OAMG;IACH,cAAc,CAAC,EAAE,cAAc,CAAC;IAEhC;;;OAGG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IAExC;;;OAGG;IACH,QAAQ,CAAC,MAAM,EAAE,aAAa,CAAC,OAAO,CAAC,CAA4B;IAEnE,OAAO,CAAC,sBAAsB,CAA0D;IACxF,OAAO,CAAC,gBAAgB,CAAC,CAAoD;IAE7E,IAAI,eAAe,IAAI,eAAe,CAAC,eAAe,EAAE,eAAe,CAAC,CAMvE;IAED,OAAO,UAAS;IAChB,iBAAiB,UAAS;IAE1B,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC;IAClB,SAAS,CAAC,cAAc,EAAG,cAAc,CAAC,eAAe,CAAC,CAAC;IAC3D,SAAS,CAAC,YAAY,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IACxE,SAAS,CAAC,oBAAoB,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IAChF,SAAS,CAAC,2BAA2B,EAAG,MAAM,CAAC;IAC/C,SAAS,CAAC,qBAAqB,EAAE,MAAM,CAAC;IACxC,SAAS,CAAC,iBAAiB,EAAE,MAAM,CAAC;IACpC,SAAS,CAAC,aAAa,CAAC,EAAE,MAAM,CAAC;IACjC,SAAS,CAAC,qBAAqB,EAAE,MAAM,CAAC;IACxC,SAAS,CAAC,kBAAkB,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAClD,SAAS,CAAC,mBAAmB,EAAE,MAAM,CAAC;IACtC,SAAS,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC;IACvC,SAAS,CAAC,oBAAoB,SAAK;IACnC,SAAS,CAAC,4BAA4B,EAAE,MAAM,CAAC;IAC/C,SAAS,CAAC,qBAAqB,CAAC,EAAE,qBAAqB,CAAC;IACxD,SAAS,CAAC,kBAAkB,EAAE,kBAAkB,CAAC;IACjD,SAAS,CAAC,cAAc,EAAE,OAAO,CAAC;IAClC,SAAS,CAAC,qBAAqB,EAAE,qBAAqB,CAAC;IACvD,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC;IAC/B,SAAS,CAAC,UAAU,EAAE,cAAc,CAAC;IACrC,SAAS,CAAC,cAAc,EAAE,OAAO,CAAC;IAClC,SAAS,CAAC,oBAAoB,EAAE,OAAO,GAAG;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IACjE,SAAS,CAAC,gBAAgB,CAAC,EAAE,sBAAsB,CAAC;IACpD,OAAO,CAAC,YAAY,CAAC,CAAU;IAC/B,OAAO,CAAC,qCAAqC,CAAQ;IACrD,OAAO,CAAC,oCAAoC,CAAQ;IACpD,OAAO,CAAC,WAAW,CAAqB;IACxC,OAAO,CAAC,QAAQ,CAAC,kBAAkB,CAA0B;IAC7D,OAAO,CAAC,mBAAmB,CAA0D;IAErF,SAAS,CAAC,MAAM,CAAC,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MA0C3B;IAEF;;OAEG;gBAEC,OAAO,GAAE,mBAAmB,CAAC,OAAO,EAAE,gBAAgB,EAAE,eAAe,CAAC,GACpE,sBAAsB,CAAC,eAAe,EAAE,OAAO,CAAa,EAAE,wGAAwG;IACjK,MAAM,gBAAkC;IA+NrD;;;;;OAKG;IACH,SAAS,CAAC,YAAY,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO;IAI7C;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,GAAE,uBAA4B;IAmB7E,OAAO,CAAC,iBAAiB;IAgDzB;;;;;;;;;OASG;IACG,GAAG,CAAC,QAAQ,CAAC,EAAE,YAAY,EAAE,OAAO,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,eAAe,CAAC;IA0HzF;;;;;;OAMG;IACH,IAAI,CAAC,OAAO,SAA6C,GAAG,IAAI;IAY1D,eAAe,IAAI,OAAO,CAAC,eAAe,CAAC;IAsB3C,QAAQ,CAAC,KAAK,SAAS,UAAU,GAAG,UAAU,EAAE,YAAY,GAAS,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;IAKjG,SAAS,KAAK,gCAAgC,IAAI,MAAM,CAEvD;IAED,SAAS,CAAC,6BAA6B,CAAC,aAAa,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;cAanE,oBAAoB,CAAC,OAAO,EAAE,UAAU,CAAC,sBAAsB,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAmBnG;;;;;;;;;;OAUG;IACG,WAAW,CACb,QAAQ,EAAE,YAAY,CAAC,YAAY,CAAC,EACpC,OAAO,GAAE,yBAA8B,GACxC,OAAO,CAAC,wBAAwB,CAAC;IA4EpC;;OAEG;IACG,QAAQ,CAAC,IAAI,EAAE,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,eAAe,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKjG;;OAEG;IACG,UAAU,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIrD;;OAEG;IACG,OAAO,CAAC,GAAG,IAAI,EAAE,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IAKtF;;;OAGG;IACG,UAAU,CAAC,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,EAAE,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IAmD9G;;OAEG;cACa,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;cAqBtB,iBAAiB,CAAC,eAAe,EAAE,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC;IAUlF;;OAEG;IACH,SAAS,CAAC,sBAAsB,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM;YAQvD,6BAA6B;cAW3B,sBAAsB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,GAAG,SAAS,CAAC;cAuBvE,iBAAiB;IAuCjC;;OAEG;YACW,wBAAwB;IAoBtC;;OAEG;cACa,iBAAiB;IAQjC;;;;OAIG;IACH,SAAS,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,GAAG,eAAe,GAAG,eAAe;IAsCjG;;;OAGG;cACa,gBAAgB;IA0JhC;;;;;;OAMG;cACa,0BAA0B,CACtC,OAAO,EAAE,WAAW,CAAC,mBAAmB,EAAE,MAAM,CAAC,EACjD,OAAO,EAAE,OAAO,CAAC,UAAU,CAAC,EAC5B,YAAY,EAAE,eAAe,GAC9B,OAAO,CAAC,sBAAsB,CAAC;IA0BlC;;;OAGG;cACc,6BAA6B,CAC1C,QAAQ,EAAE,YAAY,EACtB,eAAe,EAAE,MAAM,GACxB,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,SAAS,CAAC;IAW1C;;;OAGG;cACa,gBAAgB,CAAC,CAAC,EAC9B,OAAO,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,EACzB,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,KAAK,GAAG,MAAM,EACrB,UAAU,SAAI,EACd,OAAO,SAAI,GACZ,OAAO,CAAC,CAAC,CAAC;IAcb;;OAEG;cACa,oBAAoB;IAIpC;;OAEG;cACa,0BAA0B;YAI5B,cAAc;IAQ5B;;;OAGG;IACH,OAAO,CAAC,WAAW;IAWnB;;OAEG;cACa,4BAA4B,CACxC,KAAK,EAAE,KAAK,EACZ,eAAe,EAAE,eAAe,EAChC,MAAM,EAAE,YAAY,GAAG,eAAe,GACvC,OAAO,CAAC,IAAI,CAAC;cA4DA,oBAAoB,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,CAAC;cAStD,2BAA2B,CAAC,eAAe,EAAE,eAAe,EAAE,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;IAe1G;;;;OAIG;IACH,SAAS,CAAC,oBAAoB,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,UAAQ;IAmB/D,SAAS,CAAC,oBAAoB,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;IAoB7D;;OAEG;cACa,wBAAwB,IAAI,OAAO,CAAC,IAAI,CAAC;cAMzC,aAAa,CAAC,QAAQ,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,SAAS,CAAC,IAAI,CAAC,EAC9E,KAAK,EAAE,QAAQ,EAAE,EACjB,GAAG,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IASjC;;;;;;OAMG;IACG,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAY/B,SAAS,CAAC,2BAA2B,CAAC,OAAO,EAAE,OAAO;YAWxC,gBAAgB;IAc9B,OAAO,CAAC,6BAA6B;CA6CxC;AAED,MAAM,WAAW,oBAAoB;IACjC,OAAO,EAAE,OAAO,CAAC;IACjB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,SAAS,CAAC,EAAE,SAAS,CAAC;CACzB;AAED,MAAM,WAAW,yBAA0B,SAAQ,yBAAyB;CAAG;AAE/E,MAAM,WAAW,wBAAyB,SAAQ,wBAAwB;CAAG;AAE7E,MAAM,WAAW,iBAAkB,SAAQ,yBAAyB;IAChE;;;;OAIG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAgB,iBAAiB,CAC7B,OAAO,SAAS,oBAAoB,GAAG,oBAAoB,EAC3D,QAAQ,SAAS,UAAU,GAAG,sBAAsB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAC1E,MAAM,CAAC,EAAE,YAAY,CAAC,OAAO,EAAE,QAAQ,CAAC,0BAEzC"}
|