@crawlee/basic 4.0.0-beta.2 → 4.0.0-beta.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/index.d.ts +1 -1
- package/index.d.ts.map +1 -1
- package/index.js +0 -1
- package/index.js.map +1 -1
- package/internals/basic-crawler.d.ts +175 -78
- package/internals/basic-crawler.d.ts.map +1 -1
- package/internals/basic-crawler.js +359 -152
- package/internals/basic-crawler.js.map +1 -1
- package/internals/send-request.d.ts +3 -4
- package/internals/send-request.d.ts.map +1 -1
- package/internals/send-request.js +3 -18
- package/internals/send-request.js.map +1 -1
- package/package.json +5 -5
- package/internals/constants.d.ts +0 -7
- package/internals/constants.d.ts.map +0 -1
- package/internals/constants.js +0 -7
- package/internals/constants.js.map +0 -1
- package/tsconfig.build.tsbuildinfo +0 -1
package/README.md
CHANGED
|
@@ -9,6 +9,10 @@
|
|
|
9
9
|
<small>A web scraping and browser automation library</small>
|
|
10
10
|
</h1>
|
|
11
11
|
|
|
12
|
+
<p align=center>
|
|
13
|
+
<a href="https://trendshift.io/repositories/5179" target="_blank"><img src="https://trendshift.io/api/badge/repositories/5179" alt="apify%2Fcrawlee | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
|
|
14
|
+
</p>
|
|
15
|
+
|
|
12
16
|
<p align=center>
|
|
13
17
|
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/v/@crawlee/core.svg" alt="NPM latest version" data-canonical-src="https://img.shields.io/npm/v/@crawlee/core/next.svg" style="max-width: 100%;"></a>
|
|
14
18
|
<a href="https://www.npmjs.com/package/@crawlee/core" rel="nofollow"><img src="https://img.shields.io/npm/dm/@crawlee/core.svg" alt="Downloads" data-canonical-src="https://img.shields.io/npm/dm/@crawlee/core.svg" style="max-width: 100%;"></a>
|
|
@@ -24,7 +28,7 @@ Crawlee is available as the [`crawlee`](https://www.npmjs.com/package/crawlee) N
|
|
|
24
28
|
|
|
25
29
|
> 👉 **View full documentation, guides and examples on the [Crawlee project website](https://crawlee.dev)** 👈
|
|
26
30
|
|
|
27
|
-
>
|
|
31
|
+
> Do you prefer 🐍 Python instead of JavaScript? [👉 Checkout Crawlee for Python 👈](https://github.com/apify/crawlee-python).
|
|
28
32
|
|
|
29
33
|
## Installation
|
|
30
34
|
|
package/index.d.ts
CHANGED
package/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC;AAC7C,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC;AAC7C,OAAO,EAAE,WAAW,EAAE,UAAU,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,gBAAgB,CAAC"}
|
package/index.js
CHANGED
package/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,8BAA8B,CAAC"}
|
|
@@ -1,38 +1,14 @@
|
|
|
1
|
-
import type { AddRequestsBatchedOptions, AddRequestsBatchedResult, AutoscaledPoolOptions,
|
|
2
|
-
import { AutoscaledPool, Configuration, Dataset, RequestProvider, SessionPool, Statistics } from '@crawlee/core';
|
|
3
|
-
import type { Awaitable, BatchAddRequestsResult, Dictionary, SetStatusMessageOptions } from '@crawlee/types';
|
|
1
|
+
import type { AddRequestsBatchedOptions, AddRequestsBatchedResult, AutoscaledPoolOptions, CrawlingContext, DatasetExportOptions, EnqueueLinksOptions, EventManager, FinalStatistics, GetUserDataFromRequest, IRequestList, IRequestManager, ProxyConfiguration, Request, RequestsLike, RouterHandler, RouterRoutes, Session, SessionPoolOptions, SkippedRequestCallback, Source, StatisticsOptions, StatisticState } from '@crawlee/core';
|
|
2
|
+
import { AutoscaledPool, Configuration, ContextPipeline, Dataset, RequestProvider, SessionPool, Statistics } from '@crawlee/core';
|
|
3
|
+
import type { Awaitable, BaseHttpClient, BatchAddRequestsResult, Dictionary, ProxyInfo, SetStatusMessageOptions } from '@crawlee/types';
|
|
4
4
|
import { RobotsTxtFile } from '@crawlee/utils';
|
|
5
|
-
import type { SetRequired } from 'type-fest';
|
|
5
|
+
import type { ReadonlyDeep, SetRequired } from 'type-fest';
|
|
6
6
|
import type { Log } from '@apify/log';
|
|
7
7
|
import { TimeoutError } from '@apify/timeout';
|
|
8
|
-
export interface BasicCrawlingContext<UserData extends Dictionary = Dictionary> extends CrawlingContext<
|
|
9
|
-
/**
|
|
10
|
-
* This function automatically finds and enqueues links from the current page, adding them to the {@link RequestQueue}
|
|
11
|
-
* currently used by the crawler.
|
|
12
|
-
*
|
|
13
|
-
* Optionally, the function allows you to filter the target links' URLs using an array of globs or regular expressions
|
|
14
|
-
* and override settings of the enqueued {@link Request} objects.
|
|
15
|
-
*
|
|
16
|
-
* Check out the [Crawl a website with relative links](https://crawlee.dev/js/docs/examples/crawl-relative-links) example
|
|
17
|
-
* for more details regarding its usage.
|
|
18
|
-
*
|
|
19
|
-
* **Example usage**
|
|
20
|
-
*
|
|
21
|
-
* ```ts
|
|
22
|
-
* async requestHandler({ enqueueLinks }) {
|
|
23
|
-
* await enqueueLinks({
|
|
24
|
-
* urls: [...],
|
|
25
|
-
* });
|
|
26
|
-
* },
|
|
27
|
-
* ```
|
|
28
|
-
*
|
|
29
|
-
* @param [options] All `enqueueLinks()` parameters are passed via an options object.
|
|
30
|
-
* @returns Promise that resolves to {@link BatchAddRequestsResult} object.
|
|
31
|
-
*/
|
|
32
|
-
enqueueLinks(options?: SetRequired<EnqueueLinksOptions, 'urls'>): Promise<BatchAddRequestsResult>;
|
|
8
|
+
export interface BasicCrawlingContext<UserData extends Dictionary = Dictionary> extends CrawlingContext<UserData> {
|
|
33
9
|
}
|
|
34
|
-
export type RequestHandler<Context extends CrawlingContext =
|
|
35
|
-
export type ErrorHandler<Context extends CrawlingContext =
|
|
10
|
+
export type RequestHandler<Context extends CrawlingContext = CrawlingContext> = (inputs: Context) => Awaitable<void>;
|
|
11
|
+
export type ErrorHandler<Context extends CrawlingContext = CrawlingContext, ExtendedContext extends Context = Context> = (inputs: Context & Partial<ExtendedContext>, error: Error) => Awaitable<void>;
|
|
36
12
|
export interface StatusMessageCallbackParams<Context extends CrawlingContext = BasicCrawlingContext, Crawler extends BasicCrawler<any> = BasicCrawler<Context>> {
|
|
37
13
|
state: StatisticState;
|
|
38
14
|
crawler: Crawler;
|
|
@@ -40,7 +16,10 @@ export interface StatusMessageCallbackParams<Context extends CrawlingContext = B
|
|
|
40
16
|
message: string;
|
|
41
17
|
}
|
|
42
18
|
export type StatusMessageCallback<Context extends CrawlingContext = BasicCrawlingContext, Crawler extends BasicCrawler<any> = BasicCrawler<Context>> = (params: StatusMessageCallbackParams<Context, Crawler>) => Awaitable<void>;
|
|
43
|
-
export
|
|
19
|
+
export type RequireContextPipeline<DefaultContextType extends CrawlingContext, FinalContextType extends DefaultContextType> = DefaultContextType extends FinalContextType ? {} : {
|
|
20
|
+
contextPipelineBuilder: () => ContextPipeline<CrawlingContext, FinalContextType>;
|
|
21
|
+
};
|
|
22
|
+
export interface BasicCrawlerOptions<Context extends CrawlingContext = CrawlingContext, ContextExtension = Dictionary<never>, ExtendedContext extends Context = Context & ContextExtension> {
|
|
44
23
|
/**
|
|
45
24
|
* User-provided function that performs the logic of the crawler. It is called for each URL to crawl.
|
|
46
25
|
*
|
|
@@ -58,7 +37,35 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
58
37
|
* The exceptions are logged to the request using the
|
|
59
38
|
* {@link Request.pushErrorMessage|`Request.pushErrorMessage()`} function.
|
|
60
39
|
*/
|
|
61
|
-
requestHandler?: RequestHandler<
|
|
40
|
+
requestHandler?: RequestHandler<ExtendedContext>;
|
|
41
|
+
/**
|
|
42
|
+
* Allows the user to extend the crawling context passed to the request handler with custom functionality.
|
|
43
|
+
*
|
|
44
|
+
* **Example usage:**
|
|
45
|
+
*
|
|
46
|
+
* ```javascript
|
|
47
|
+
* import { BasicCrawler } from 'crawlee';
|
|
48
|
+
*
|
|
49
|
+
* // Create a crawler instance
|
|
50
|
+
* const crawler = new BasicCrawler({
|
|
51
|
+
* extendContext(context) => ({
|
|
52
|
+
* async customHelper() {
|
|
53
|
+
* await context.pushData({ url: context.request.url })
|
|
54
|
+
* }
|
|
55
|
+
* }),
|
|
56
|
+
* async requestHandler(context) {
|
|
57
|
+
* await context.customHelper();
|
|
58
|
+
* },
|
|
59
|
+
* });
|
|
60
|
+
* ```
|
|
61
|
+
*/
|
|
62
|
+
extendContext?: (context: Context) => Awaitable<ContextExtension>;
|
|
63
|
+
/**
|
|
64
|
+
* *Intended for BasicCrawler subclasses*. Prepares a context pipeline that transforms the initial crawling context into the shape given by the `Context` type parameter.
|
|
65
|
+
*
|
|
66
|
+
* The option is not required if your crawler subclass does not extend the crawling context with custom information or helpers.
|
|
67
|
+
*/
|
|
68
|
+
contextPipelineBuilder?: () => ContextPipeline<CrawlingContext, Context>;
|
|
62
69
|
/**
|
|
63
70
|
* Static list of URLs to be processed.
|
|
64
71
|
* If not provided, the crawler will open the default request queue when the {@link BasicCrawler.addRequests|`crawler.addRequests()`} function is called.
|
|
@@ -73,6 +80,13 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
73
80
|
* it is a shortcut for running `crawler.addRequests()` before the `crawler.run()`.
|
|
74
81
|
*/
|
|
75
82
|
requestQueue?: RequestProvider;
|
|
83
|
+
/**
|
|
84
|
+
* Allows explicitly configuring a request manager. Mutually exclusive with the `requestQueue` and `requestList` options.
|
|
85
|
+
*
|
|
86
|
+
* This enables explicitly configuring the crawler to use `RequestManagerTandem`, for instance.
|
|
87
|
+
* If using this, the type of `BasicCrawler.requestQueue` may not be fully compatible with the `RequestProvider` class.
|
|
88
|
+
*/
|
|
89
|
+
requestManager?: IRequestManager;
|
|
76
90
|
/**
|
|
77
91
|
* Timeout in which the function passed as {@link BasicCrawlerOptions.requestHandler|`requestHandler`} needs to finish, in seconds.
|
|
78
92
|
* @default 60
|
|
@@ -87,7 +101,7 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
87
101
|
* Second argument is the `Error` instance that
|
|
88
102
|
* represents the last error thrown during processing of the request.
|
|
89
103
|
*/
|
|
90
|
-
errorHandler?: ErrorHandler<
|
|
104
|
+
errorHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
91
105
|
/**
|
|
92
106
|
* A function to handle requests that failed more than {@link BasicCrawlerOptions.maxRequestRetries|`maxRequestRetries`} times.
|
|
93
107
|
*
|
|
@@ -96,7 +110,7 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
96
110
|
* Second argument is the `Error` instance that
|
|
97
111
|
* represents the last error thrown during processing of the request.
|
|
98
112
|
*/
|
|
99
|
-
failedRequestHandler?: ErrorHandler<
|
|
113
|
+
failedRequestHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
100
114
|
/**
|
|
101
115
|
* Specifies the maximum number of retries allowed for a request if its processing fails.
|
|
102
116
|
* This includes retries due to navigation errors or errors thrown from user-supplied functions
|
|
@@ -126,12 +140,18 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
126
140
|
* > *NOTE:* In cases of parallel crawling, the actual number of pages visited might be slightly higher than this value.
|
|
127
141
|
*/
|
|
128
142
|
maxRequestsPerCrawl?: number;
|
|
143
|
+
/**
|
|
144
|
+
* Maximum depth of the crawl. If not set, the crawl will continue until all requests are processed.
|
|
145
|
+
* Setting this to `0` will only process the initial requests, skipping all links enqueued by `crawlingContext.enqueueLinks` and `crawlingContext.addRequests`.
|
|
146
|
+
* Passing `1` will process the initial requests and all links enqueued by `crawlingContext.enqueueLinks` and `crawlingContext.addRequests` in the handler for initial requests.
|
|
147
|
+
*/
|
|
148
|
+
maxCrawlDepth?: number;
|
|
129
149
|
/**
|
|
130
150
|
* Custom options passed to the underlying {@link AutoscaledPool} constructor.
|
|
131
151
|
* > *NOTE:* The {@link AutoscaledPoolOptions.runTaskFunction|`runTaskFunction`}
|
|
132
|
-
* and
|
|
133
|
-
*
|
|
134
|
-
*
|
|
152
|
+
* option is provided by the crawler and cannot be overridden.
|
|
153
|
+
* However, we can provide custom implementations of {@link AutoscaledPoolOptions.isFinishedFunction|`isFinishedFunction`}
|
|
154
|
+
* and {@link AutoscaledPoolOptions.isTaskReadyFunction|`isTaskReadyFunction`}.
|
|
135
155
|
*/
|
|
136
156
|
autoscaledPoolOptions?: AutoscaledPoolOptions;
|
|
137
157
|
/**
|
|
@@ -199,11 +219,20 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
199
219
|
/**
|
|
200
220
|
* If set to `true`, the crawler will automatically try to fetch the robots.txt file for each domain,
|
|
201
221
|
* and skip those that are not allowed. This also prevents disallowed URLs to be added via `enqueueLinks`.
|
|
222
|
+
*
|
|
223
|
+
* If an object is provided, it may contain a `userAgent` property to specify which user-agent
|
|
224
|
+
* should be used when checking the robots.txt file. If not provided, the default user-agent `*` will be used.
|
|
202
225
|
*/
|
|
203
|
-
respectRobotsTxtFile?: boolean
|
|
226
|
+
respectRobotsTxtFile?: boolean | {
|
|
227
|
+
userAgent?: string;
|
|
228
|
+
};
|
|
204
229
|
/**
|
|
205
230
|
* When a request is skipped for some reason, you can use this callback to act on it.
|
|
206
|
-
* This is currently fired
|
|
231
|
+
* This is currently fired for requests skipped
|
|
232
|
+
* 1. based on robots.txt file,
|
|
233
|
+
* 2. because they don't match enqueueLinks filters,
|
|
234
|
+
* 3. because they are redirected to a URL that doesn't match the enqueueLinks strategy,
|
|
235
|
+
* 4. or because the {@link BasicCrawlerOptions.maxRequestsPerCrawl|`maxRequestsPerCrawl`} limit has been reached
|
|
207
236
|
*/
|
|
208
237
|
onSkippedRequest?: SkippedRequestCallback;
|
|
209
238
|
/** @internal */
|
|
@@ -223,6 +252,23 @@ export interface BasicCrawlerOptions<Context extends CrawlingContext = BasicCraw
|
|
|
223
252
|
* Defaults to a new instance of {@link GotScrapingHttpClient}
|
|
224
253
|
*/
|
|
225
254
|
httpClient?: BaseHttpClient;
|
|
255
|
+
/**
|
|
256
|
+
* If set, the crawler will be configured for all connections to use
|
|
257
|
+
* the Proxy URLs provided and rotated according to the configuration.
|
|
258
|
+
*/
|
|
259
|
+
proxyConfiguration?: ProxyConfiguration;
|
|
260
|
+
/**
|
|
261
|
+
* A unique identifier for the crawler instance. This ID is used to isolate the state returned by
|
|
262
|
+
* {@link BasicCrawler.useState|`crawler.useState()`} from other crawler instances.
|
|
263
|
+
*
|
|
264
|
+
* When multiple crawler instances use `useState()` without an explicit `id`, they will share the same
|
|
265
|
+
* state object for backward compatibility. A warning will be logged in this case.
|
|
266
|
+
*
|
|
267
|
+
* To ensure each crawler has its own isolated state that also persists across script restarts
|
|
268
|
+
* (e.g., during Apify migrations), provide a stable, unique ID for each crawler instance.
|
|
269
|
+
*
|
|
270
|
+
*/
|
|
271
|
+
id?: string;
|
|
226
272
|
}
|
|
227
273
|
/**
|
|
228
274
|
* A set of options that you can toggle to enable experimental features in Crawlee.
|
|
@@ -303,9 +349,14 @@ export interface CrawlerExperiments {
|
|
|
303
349
|
* ```
|
|
304
350
|
* @category Crawlers
|
|
305
351
|
*/
|
|
306
|
-
export declare class BasicCrawler<Context extends CrawlingContext =
|
|
352
|
+
export declare class BasicCrawler<Context extends CrawlingContext = CrawlingContext, ContextExtension = Dictionary<never>, ExtendedContext extends Context = Context & ContextExtension> {
|
|
307
353
|
readonly config: Configuration;
|
|
308
354
|
protected static readonly CRAWLEE_STATE_KEY = "CRAWLEE_STATE";
|
|
355
|
+
/**
|
|
356
|
+
* Tracks crawler instances that accessed shared state without having an explicit id.
|
|
357
|
+
* Used to detect and warn about multiple crawlers sharing the same state.
|
|
358
|
+
*/
|
|
359
|
+
private static useStateCrawlerIds;
|
|
309
360
|
/**
|
|
310
361
|
* A reference to the underlying {@link Statistics} class that collects and logs run statistics for requests.
|
|
311
362
|
*/
|
|
@@ -321,6 +372,10 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
321
372
|
* Only available if used by the crawler.
|
|
322
373
|
*/
|
|
323
374
|
requestQueue?: RequestProvider;
|
|
375
|
+
/**
|
|
376
|
+
* The main request-handling component of the crawler. It's initialized during the crawler startup.
|
|
377
|
+
*/
|
|
378
|
+
protected requestManager?: IRequestManager;
|
|
324
379
|
/**
|
|
325
380
|
* A reference to the underlying {@link SessionPool} class that manages the crawler's {@link Session|sessions}.
|
|
326
381
|
* Only available if used by the crawler.
|
|
@@ -334,40 +389,59 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
334
389
|
* or to abort it by calling {@link AutoscaledPool.abort|`autoscaledPool.abort()`}.
|
|
335
390
|
*/
|
|
336
391
|
autoscaledPool?: AutoscaledPool;
|
|
392
|
+
/**
|
|
393
|
+
* A reference to the underlying {@link ProxyConfiguration} class that manages the crawler's proxies.
|
|
394
|
+
* Only available if used by the crawler.
|
|
395
|
+
*/
|
|
396
|
+
proxyConfiguration?: ProxyConfiguration;
|
|
337
397
|
/**
|
|
338
398
|
* Default {@link Router} instance that will be used if we don't specify any {@link BasicCrawlerOptions.requestHandler|`requestHandler`}.
|
|
339
399
|
* See {@link Router.addHandler|`router.addHandler()`} and {@link Router.addDefaultHandler|`router.addDefaultHandler()`}.
|
|
340
400
|
*/
|
|
341
|
-
readonly router: RouterHandler<
|
|
401
|
+
readonly router: RouterHandler<Context>;
|
|
402
|
+
private contextPipelineBuilder;
|
|
403
|
+
private _contextPipeline?;
|
|
404
|
+
get contextPipeline(): ContextPipeline<CrawlingContext, ExtendedContext>;
|
|
342
405
|
running: boolean;
|
|
343
406
|
hasFinishedBefore: boolean;
|
|
344
407
|
readonly log: Log;
|
|
345
|
-
protected requestHandler: RequestHandler<
|
|
346
|
-
protected errorHandler?: ErrorHandler<
|
|
347
|
-
protected failedRequestHandler?: ErrorHandler<
|
|
408
|
+
protected requestHandler: RequestHandler<ExtendedContext>;
|
|
409
|
+
protected errorHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
410
|
+
protected failedRequestHandler?: ErrorHandler<CrawlingContext, ExtendedContext>;
|
|
348
411
|
protected requestHandlerTimeoutMillis: number;
|
|
349
412
|
protected internalTimeoutMillis: number;
|
|
350
413
|
protected maxRequestRetries: number;
|
|
414
|
+
protected maxCrawlDepth?: number;
|
|
351
415
|
protected sameDomainDelayMillis: number;
|
|
352
416
|
protected domainAccessedTime: Map<string, number>;
|
|
353
417
|
protected maxSessionRotations: number;
|
|
418
|
+
protected maxRequestsPerCrawl?: number;
|
|
354
419
|
protected handledRequestsCount: number;
|
|
355
420
|
protected statusMessageLoggingInterval: number;
|
|
356
421
|
protected statusMessageCallback?: StatusMessageCallback;
|
|
357
422
|
protected sessionPoolOptions: SessionPoolOptions;
|
|
358
423
|
protected useSessionPool: boolean;
|
|
359
|
-
protected crawlingContexts: Map<string, Context>;
|
|
360
424
|
protected autoscaledPoolOptions: AutoscaledPoolOptions;
|
|
361
425
|
protected events: EventManager;
|
|
362
426
|
protected httpClient: BaseHttpClient;
|
|
363
427
|
protected retryOnBlocked: boolean;
|
|
364
|
-
protected respectRobotsTxtFile: boolean
|
|
428
|
+
protected respectRobotsTxtFile: boolean | {
|
|
429
|
+
userAgent?: string;
|
|
430
|
+
};
|
|
365
431
|
protected onSkippedRequest?: SkippedRequestCallback;
|
|
366
432
|
private _closeEvents?;
|
|
433
|
+
private shouldLogMaxProcessedRequestsExceeded;
|
|
434
|
+
private shouldLogMaxEnqueuedRequestsExceeded;
|
|
367
435
|
private experiments;
|
|
368
436
|
private readonly robotsTxtFileCache;
|
|
369
437
|
private _experimentWarnings;
|
|
438
|
+
private readonly crawlerId;
|
|
439
|
+
private readonly hasExplicitId;
|
|
370
440
|
protected static optionsShape: {
|
|
441
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
442
|
+
contextPipelineBuilder: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
443
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
444
|
+
extendContext: import("ow").Predicate<Function> & import("ow").BasePredicate<Function | undefined>;
|
|
371
445
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
372
446
|
requestList: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
373
447
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
@@ -388,12 +462,16 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
388
462
|
maxSessionRotations: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
389
463
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
390
464
|
maxRequestsPerCrawl: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
465
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
466
|
+
maxCrawlDepth: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
391
467
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
392
468
|
autoscaledPoolOptions: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
393
469
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
394
470
|
sessionPoolOptions: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
395
471
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
396
472
|
useSessionPool: import("ow").BooleanPredicate & import("ow").BasePredicate<boolean | undefined>;
|
|
473
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
474
|
+
proxyConfiguration: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
397
475
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
398
476
|
statusMessageLoggingInterval: import("ow").NumberPredicate & import("ow").BasePredicate<number | undefined>;
|
|
399
477
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
@@ -401,7 +479,7 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
401
479
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
402
480
|
retryOnBlocked: import("ow").BooleanPredicate & import("ow").BasePredicate<boolean | undefined>;
|
|
403
481
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
404
|
-
respectRobotsTxtFile: import("ow").
|
|
482
|
+
respectRobotsTxtFile: import("ow").AnyPredicate<boolean | object>;
|
|
405
483
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
406
484
|
onSkippedRequest: import("ow").Predicate<Function> & import("ow").BasePredicate<Function | undefined>;
|
|
407
485
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
@@ -420,11 +498,14 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
420
498
|
experiments: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
421
499
|
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
422
500
|
statisticsOptions: import("ow").ObjectPredicate<object> & import("ow").BasePredicate<object | undefined>;
|
|
501
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
502
|
+
id: import("ow").StringPredicate & import("ow").BasePredicate<string | undefined>;
|
|
423
503
|
};
|
|
424
504
|
/**
|
|
425
505
|
* All `BasicCrawler` parameters are passed via an options object.
|
|
426
506
|
*/
|
|
427
|
-
constructor(options?: BasicCrawlerOptions<Context>,
|
|
507
|
+
constructor(options?: BasicCrawlerOptions<Context, ContextExtension, ExtendedContext> & RequireContextPipeline<CrawlingContext, Context>, // cast because the constructor logic handles missing `contextPipelineBuilder` - the type is just for DX
|
|
508
|
+
config?: Configuration);
|
|
428
509
|
/**
|
|
429
510
|
* Checks if the given error is a proxy error by comparing its message to a list of known proxy error messages.
|
|
430
511
|
* Used for retrying requests that failed due to proxy errors.
|
|
@@ -432,12 +513,6 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
432
513
|
* @param error The error to check.
|
|
433
514
|
*/
|
|
434
515
|
protected isProxyError(error: Error): boolean;
|
|
435
|
-
/**
|
|
436
|
-
* Checks whether the given crawling context is getting blocked by anti-bot protection using several heuristics.
|
|
437
|
-
* Returns `false` if the request is not blocked, otherwise returns a string with a description of the block reason.
|
|
438
|
-
* @param _crawlingContext The crawling context to check.
|
|
439
|
-
*/
|
|
440
|
-
protected isRequestBlocked(_crawlingContext: Context): Promise<string | false>;
|
|
441
516
|
/**
|
|
442
517
|
* This method is periodically called by the crawler, every `statusMessageLoggingInterval` seconds.
|
|
443
518
|
*/
|
|
@@ -453,15 +528,20 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
453
528
|
* @param [requests] The requests to add.
|
|
454
529
|
* @param [options] Options for the request queue.
|
|
455
530
|
*/
|
|
456
|
-
run(requests?:
|
|
531
|
+
run(requests?: RequestsLike, options?: CrawlerRunOptions): Promise<FinalStatistics>;
|
|
457
532
|
/**
|
|
458
533
|
* Gracefully stops the current run of the crawler.
|
|
459
534
|
*
|
|
460
535
|
* All the tasks active at the time of calling this method will be allowed to finish.
|
|
536
|
+
*
|
|
537
|
+
* To stop the crawler immediately, use {@link BasicCrawler.teardown|`crawler.teardown()`} instead.
|
|
461
538
|
*/
|
|
462
539
|
stop(message?: string): void;
|
|
463
540
|
getRequestQueue(): Promise<RequestProvider>;
|
|
464
541
|
useState<State extends Dictionary = Dictionary>(defaultValue?: State): Promise<State>;
|
|
542
|
+
protected get pendingRequestCountApproximation(): number;
|
|
543
|
+
protected calculateEnqueuedRequestLimit(explicitLimit?: number): number | undefined;
|
|
544
|
+
protected handleSkippedRequest(options: Parameters<SkippedRequestCallback>[0]): Promise<void>;
|
|
465
545
|
/**
|
|
466
546
|
* Adds requests to the queue in batches. By default, it will resolve after the initial batch is added, and continue
|
|
467
547
|
* adding the rest in background. You can configure the batch size via `batchSize` option and the sleep time in between
|
|
@@ -473,7 +553,7 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
473
553
|
* @param requests The requests to add
|
|
474
554
|
* @param options Options for the request queue
|
|
475
555
|
*/
|
|
476
|
-
addRequests(requests:
|
|
556
|
+
addRequests(requests: ReadonlyDeep<RequestsLike>, options?: CrawlerAddRequestsOptions): Promise<CrawlerAddRequestsResult>;
|
|
477
557
|
/**
|
|
478
558
|
* Pushes data to the specified {@link Dataset}, or the default crawler {@link Dataset} by calling {@link Dataset.pushData}.
|
|
479
559
|
*/
|
|
@@ -491,8 +571,11 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
491
571
|
* Supported formats are currently 'json' and 'csv', and will be inferred from the `path` automatically.
|
|
492
572
|
*/
|
|
493
573
|
exportData<Data>(path: string, format?: 'json' | 'csv', options?: DatasetExportOptions): Promise<Data[]>;
|
|
574
|
+
/**
|
|
575
|
+
* Initializes the crawler.
|
|
576
|
+
*/
|
|
494
577
|
protected _init(): Promise<void>;
|
|
495
|
-
protected
|
|
578
|
+
protected runRequestHandler(crawlingContext: CrawlingContext): Promise<void>;
|
|
496
579
|
/**
|
|
497
580
|
* Handles blocked request
|
|
498
581
|
*/
|
|
@@ -501,31 +584,42 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
501
584
|
protected getRobotsTxtFileForUrl(url: string): Promise<RobotsTxtFile | undefined>;
|
|
502
585
|
protected _pauseOnMigration(): Promise<void>;
|
|
503
586
|
/**
|
|
504
|
-
*
|
|
505
|
-
* and RequestQueue is present then enqueues it to the queue first.
|
|
587
|
+
* Initializes the RequestManager based on the configured requestList and requestQueue.
|
|
506
588
|
*/
|
|
507
|
-
|
|
589
|
+
private initializeRequestManager;
|
|
508
590
|
/**
|
|
509
|
-
*
|
|
510
|
-
* Can be used to clean up orphaned browser pages.
|
|
591
|
+
* Fetches the next request to process from the underlying request provider.
|
|
511
592
|
*/
|
|
512
|
-
protected
|
|
593
|
+
protected _fetchNextRequest(): Promise<Request<Dictionary> | null>;
|
|
513
594
|
/**
|
|
514
595
|
* Delays processing of the request based on the `sameDomainDelaySecs` option,
|
|
515
596
|
* adding it back to the queue after the timeout passes. Returns `true` if the request
|
|
516
597
|
* should be ignored and will be reclaimed to the queue once ready.
|
|
517
598
|
*/
|
|
518
|
-
protected delayRequest(request: Request, source: IRequestList | RequestProvider): boolean;
|
|
599
|
+
protected delayRequest(request: Request, source: IRequestList | RequestProvider | IRequestManager): boolean;
|
|
519
600
|
/**
|
|
520
601
|
* Wrapper around requestHandler that fetches requests from RequestList/RequestQueue
|
|
521
602
|
* then retries them in a case of an error, etc.
|
|
522
603
|
*/
|
|
523
604
|
protected _runTaskFunction(): Promise<void>;
|
|
524
605
|
/**
|
|
525
|
-
*
|
|
606
|
+
* Wrapper around the crawling context's `enqueueLinks` method:
|
|
607
|
+
* - Injects `crawlDepth` to each request being added based on the crawling context request.
|
|
608
|
+
* - Provides defaults for the `enqueueLinks` options based on the crawler configuration.
|
|
609
|
+
* - These options can be overridden by the user.
|
|
610
|
+
* @internal
|
|
611
|
+
*/
|
|
612
|
+
protected enqueueLinksWithCrawlDepth(options: SetRequired<EnqueueLinksOptions, 'urls'>, request: Request<Dictionary>, requestQueue: RequestProvider): Promise<BatchAddRequestsResult>;
|
|
613
|
+
/**
|
|
614
|
+
* Generator function that yields requests injected with the given crawl depth.
|
|
615
|
+
* @internal
|
|
616
|
+
*/
|
|
617
|
+
protected addCrawlDepthRequestGenerator(requests: RequestsLike, newRequestDepth: number): AsyncGenerator<Source, void, undefined>;
|
|
618
|
+
/**
|
|
619
|
+
* Run async callback with given timeout and retry. Returns the result of the callback.
|
|
526
620
|
* @ignore
|
|
527
621
|
*/
|
|
528
|
-
protected _timeoutAndRetry(handler: () => Promise<
|
|
622
|
+
protected _timeoutAndRetry<T>(handler: () => Promise<T>, timeout: number, error: Error | string, maxRetries?: number, retried?: number): Promise<T>;
|
|
529
623
|
/**
|
|
530
624
|
* Returns true if either RequestList or RequestQueue have a request ready for processing.
|
|
531
625
|
*/
|
|
@@ -535,12 +629,17 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
535
629
|
*/
|
|
536
630
|
protected _defaultIsFinishedFunction(): Promise<boolean>;
|
|
537
631
|
private _rotateSession;
|
|
632
|
+
/**
|
|
633
|
+
* Unwraps errors thrown by the context pipeline to get the actual user error.
|
|
634
|
+
* RequestHandlerError and ContextPipelineInitializationError wrap the actual error.
|
|
635
|
+
*/
|
|
636
|
+
private unwrapError;
|
|
538
637
|
/**
|
|
539
638
|
* Handles errors thrown by user provided requestHandler()
|
|
540
639
|
*/
|
|
541
|
-
protected _requestFunctionErrorHandler(error: Error, crawlingContext:
|
|
640
|
+
protected _requestFunctionErrorHandler(error: Error, crawlingContext: CrawlingContext, source: IRequestList | IRequestManager): Promise<void>;
|
|
542
641
|
protected _tagUserHandlerError<T>(cb: () => unknown): Promise<T>;
|
|
543
|
-
protected _handleFailedRequestHandler(crawlingContext:
|
|
642
|
+
protected _handleFailedRequestHandler(crawlingContext: CrawlingContext, error: Error): Promise<void>;
|
|
544
643
|
/**
|
|
545
644
|
* Resolves the most verbose error message from a thrown error
|
|
546
645
|
* @param error The error received
|
|
@@ -549,23 +648,21 @@ export declare class BasicCrawler<Context extends CrawlingContext = BasicCrawlin
|
|
|
549
648
|
protected _getMessageFromError(error: Error, forceStack?: boolean): string | TimeoutError | undefined;
|
|
550
649
|
protected _canRequestBeRetried(request: Request, error: Error): boolean;
|
|
551
650
|
/**
|
|
552
|
-
* Updates handledRequestsCount from possibly stored counts,
|
|
553
|
-
* usually after worker migration. Since one of the stores
|
|
554
|
-
* needs to have priority when both are present,
|
|
555
|
-
* it is the request queue, because generally, the request
|
|
556
|
-
* list will first be dumped into the queue and then left
|
|
557
|
-
* empty.
|
|
651
|
+
* Updates handledRequestsCount from possibly stored counts, usually after worker migration.
|
|
558
652
|
*/
|
|
559
653
|
protected _loadHandledRequestCount(): Promise<void>;
|
|
560
654
|
protected _executeHooks<HookLike extends (...args: any[]) => Awaitable<void>>(hooks: HookLike[], ...args: Parameters<HookLike>): Promise<void>;
|
|
561
655
|
/**
|
|
562
|
-
*
|
|
563
|
-
*
|
|
656
|
+
* Stops the crawler immediately.
|
|
657
|
+
*
|
|
658
|
+
* This method doesn't wait for currently active requests to finish.
|
|
659
|
+
*
|
|
660
|
+
* To stop the crawler gracefully (waiting for all running requests to finish), use {@link BasicCrawler.stop|`crawler.stop()`} instead.
|
|
564
661
|
*/
|
|
565
662
|
teardown(): Promise<void>;
|
|
566
663
|
protected _getCookieHeaderFromRequest(request: Request): string;
|
|
567
664
|
private _getRequestQueue;
|
|
568
|
-
|
|
665
|
+
private requestMatchesEnqueueStrategy;
|
|
569
666
|
}
|
|
570
667
|
export interface CreateContextOptions {
|
|
571
668
|
request: Request;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"basic-crawler.d.ts","sourceRoot":"","sources":["../../src/internals/basic-crawler.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACR,yBAAyB,EACzB,wBAAwB,EACxB,qBAAqB,EACrB,
|
|
1
|
+
{"version":3,"file":"basic-crawler.d.ts","sourceRoot":"","sources":["../../src/internals/basic-crawler.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EACR,yBAAyB,EACzB,wBAAwB,EACxB,qBAAqB,EACrB,eAAe,EACf,oBAAoB,EACpB,mBAAmB,EACnB,YAAY,EACZ,eAAe,EACf,sBAAsB,EACtB,YAAY,EACZ,eAAe,EACf,kBAAkB,EAClB,OAAO,EACP,YAAY,EAEZ,aAAa,EACb,YAAY,EACZ,OAAO,EACP,kBAAkB,EAClB,sBAAsB,EACtB,MAAM,EACN,iBAAiB,EACjB,cAAc,EACjB,MAAM,eAAe,CAAC;AACvB,OAAO,EACH,cAAc,EACd,aAAa,EACb,eAAe,EAKf,OAAO,EAYP,eAAe,EAOf,WAAW,EACX,UAAU,EAEb,MAAM,eAAe,CAAC;AACvB,OAAO,KAAK,EACR,SAAS,EACT,cAAc,EACd,sBAAsB,EACtB,UAAU,EACV,SAAS,EACT,uBAAuB,EAC1B,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAA8C,aAAa,EAAuB,MAAM,gBAAgB,CAAC;AAKhH,OAAO,KAAK,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,WAAW,CAAC;AAG3D,OAAO,KAAK,EAAE,GAAG,EAAE,MAAM,YAAY,CAAC;AAEtC,OAAO,EAAuB,YAAY,EAAa,MAAM,gBAAgB,CAAC;AAK9E,MAAM,WAAW,oBAAoB,CAAC,QAAQ,SAAS,UAAU,GAAG,UAAU,CAAE,SAAQ,eAAe,CAAC,QAAQ,CAAC;CAAG;AAapH,MAAM,MAAM,cAAc,CAAC,OAAO,SAAS,eAAe,GAAG,eAAe,IAAI,CAAC,MAAM,EAAE,OAAO,KAAK,SAAS,CAAC,IAAI,CAAC,CAAC;AAErH,MAAM,MAAM,YAAY,CACpB,OAAO,SAAS,eAAe,GAAG,eAAe,EACjD,eAAe,SAAS,OAAO,GAAG,OAAO,IACzC,CAAC,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,eAAe,CAAC,EAAE,KAAK,EAAE,KAAK,KAAK,SAAS,CAAC,IAAI,CAAC,CAAC;AAElF,MAAM,WAAW,2BAA2B,CACxC,OAAO,SAAS,eAAe,GAAG,oBAAoB,EACtD,OAAO,SAAS,YAAY,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,OAAO,CAAC;IAEzD,KAAK,EAAE,cAAc,CAAC;IACtB,OAAO,EAAE,OAAO,CAAC;IACjB,aAAa,EAAE,cAAc,CAAC;IAC9B,OAAO,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,qBAAqB,CAC7B,OAAO,SAAS,eAAe,GAAG,oBAAoB,EACtD,OAAO,SAAS,YAAY,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,OAAO,CAAC,IACzD,CAAC,MAAM,EAAE,2BAA2B,CAAC,OAAO,EAAE,OAAO,CAAC,KAAK,SAAS,CAAC,IAAI,CAAC,CAAC;AAE/E,MAAM,MAAM,sBAAsB,CAC9B,kBAAkB,SAAS,eAAe,EAC1C,gBAAgB,SAAS,kBAAkB,IAC3C,kBAAkB,SAAS,gBAAgB,GACzC,EAAE,GACF;IAAE,sBAAsB,EAAE,MAAM,eAAe,CAAC,eAAe,EAAE,gBAAgB,CAAC,CAAA;CAAE,CAAC;AAE3F,MAAM,WAAW,mBAAmB,CAChC,OAAO,SAAS,eAAe,GAAG,eAAe,EACjD,gBAAgB,GAAG,UAAU,CAAC,KAAK,CAAC,EACpC,eAAe,SAAS,OAAO,GAAG,OAAO,GAAG,gBAAgB;IAE5D;;;;;;;;;;;;;;;;OAgBG;IACH,cAAc,CAAC,EAAE,cAAc,CAAC,eAAe,CAAC,CAAC;IAEjD;;;;;;;;;;;;;;;;;;;;OAoBG;IACH,aAAa,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,SAAS,CAAC,gBAAgB,CAAC,CAAC;IAElE;;;;OAIG;IACH,sBAAsB,CAAC,EAAE,MAAM,eAAe,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;IAEzE;;;;;OAKG;IACH,WAAW,CAAC,EAAE,YAAY,CAAC;IAE3B;;;;;OAKG;IACH,YAAY,CAAC,EAAE,eAAe,CAAC;IAE/B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,eAAe,CAAC;IAEjC;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IAEnC;;;;;;;;OAQG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IAE9D;;;;;;;OAOG;IACH,oBAAoB,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IAEtE;;;;;;;;OAQG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAE7B;;;;;;OAMG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAE7B;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAE7B;;;;OAIG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;;;;;OAMG;IACH,qBAAqB,CAAC,EAAE,qBAAqB,CAAC;IAE9C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;;OAIG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB;;;OAGG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IAExC;;OAEG;IACH,4BAA4B,CAAC,EAAE,MAAM,CAAC;IAEtC;;;;;;;;;;;;;;;OAeG;IACH,qBAAqB,CAAC,EAAE,qBAAqB,CAAC;IAE9C;;;;;;OAMG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;;;;;OAMG;IACH,oBAAoB,CAAC,EAAE,OAAO,GAAG;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IAExD;;;;;;;OAOG;IACH,gBAAgB,CAAC,EAAE,sBAAsB,CAAC;IAE1C,gBAAgB;IAChB,GAAG,CAAC,EAAE,GAAG,CAAC;IAEV;;;OAGG;IACH,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAEjC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,iBAAiB,CAAC;IAEtC;;;OAGG;IACH,UAAU,CAAC,EAAE,cAAc,CAAC;IAE5B;;;OAGG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IAExC;;;;;;;;;;OAUG;IACH,EAAE,CAAC,EAAE,MAAM,CAAC;CACf;AAED;;;;;GAKG;AACH,MAAM,WAAW,kBAAkB;IAC/B;;;;;OAKG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+DG;AACH,qBAAa,YAAY,CACrB,OAAO,SAAS,eAAe,GAAG,eAAe,EACjD,gBAAgB,GAAG,UAAU,CAAC,KAAK,CAAC,EACpC,eAAe,SAAS,OAAO,GAAG,OAAO,GAAG,gBAAgB;IA8JxD,QAAQ,CAAC,MAAM;IA5JnB,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,iBAAiB,mBAAmB;IAE9D;;;OAGG;IACH,OAAO,CAAC,MAAM,CAAC,kBAAkB,CAAqB;IAEtD;;OAEG;IACH,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC;IAE3B;;;OAGG;IACH,WAAW,CAAC,EAAE,YAAY,CAAC;IAE3B;;;;OAIG;IACH,YAAY,CAAC,EAAE,eAAe,CAAC;IAE/B;;OAEG;IACH,SAAS,CAAC,cAAc,CAAC,EAAE,eAAe,CAAC;IAE3C;;;OAGG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAE1B;;;;;;OAMG;IACH,cAAc,CAAC,EAAE,cAAc,CAAC;IAEhC;;;OAGG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IAExC;;;OAGG;IACH,QAAQ,CAAC,MAAM,EAAE,aAAa,CAAC,OAAO,CAAC,CAA4B;IAEnE,OAAO,CAAC,sBAAsB,CAA0D;IACxF,OAAO,CAAC,gBAAgB,CAAC,CAAoD;IAE7E,IAAI,eAAe,IAAI,eAAe,CAAC,eAAe,EAAE,eAAe,CAAC,CAMvE;IAED,OAAO,UAAS;IAChB,iBAAiB,UAAS;IAE1B,QAAQ,CAAC,GAAG,EAAE,GAAG,CAAC;IAClB,SAAS,CAAC,cAAc,EAAG,cAAc,CAAC,eAAe,CAAC,CAAC;IAC3D,SAAS,CAAC,YAAY,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IACxE,SAAS,CAAC,oBAAoB,CAAC,EAAE,YAAY,CAAC,eAAe,EAAE,eAAe,CAAC,CAAC;IAChF,SAAS,CAAC,2BAA2B,EAAG,MAAM,CAAC;IAC/C,SAAS,CAAC,qBAAqB,EAAE,MAAM,CAAC;IACxC,SAAS,CAAC,iBAAiB,EAAE,MAAM,CAAC;IACpC,SAAS,CAAC,aAAa,CAAC,EAAE,MAAM,CAAC;IACjC,SAAS,CAAC,qBAAqB,EAAE,MAAM,CAAC;IACxC,SAAS,CAAC,kBAAkB,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAClD,SAAS,CAAC,mBAAmB,EAAE,MAAM,CAAC;IACtC,SAAS,CAAC,mBAAmB,CAAC,EAAE,MAAM,CAAC;IACvC,SAAS,CAAC,oBAAoB,SAAK;IACnC,SAAS,CAAC,4BAA4B,EAAE,MAAM,CAAC;IAC/C,SAAS,CAAC,qBAAqB,CAAC,EAAE,qBAAqB,CAAC;IACxD,SAAS,CAAC,kBAAkB,EAAE,kBAAkB,CAAC;IACjD,SAAS,CAAC,cAAc,EAAE,OAAO,CAAC;IAClC,SAAS,CAAC,qBAAqB,EAAE,qBAAqB,CAAC;IACvD,SAAS,CAAC,MAAM,EAAE,YAAY,CAAC;IAC/B,SAAS,CAAC,UAAU,EAAE,cAAc,CAAC;IACrC,SAAS,CAAC,cAAc,EAAE,OAAO,CAAC;IAClC,SAAS,CAAC,oBAAoB,EAAE,OAAO,GAAG;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC;IACjE,SAAS,CAAC,gBAAgB,CAAC,EAAE,sBAAsB,CAAC;IACpD,OAAO,CAAC,YAAY,CAAC,CAAU;IAC/B,OAAO,CAAC,qCAAqC,CAAQ;IACrD,OAAO,CAAC,oCAAoC,CAAQ;IACpD,OAAO,CAAC,WAAW,CAAqB;IACxC,OAAO,CAAC,QAAQ,CAAC,kBAAkB,CAA0B;IAC7D,OAAO,CAAC,mBAAmB,CAA0D;IACrF,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAS;IACnC,OAAO,CAAC,QAAQ,CAAC,aAAa,CAAU;IAExC,SAAS,CAAC,MAAM,CAAC,YAAY;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;MA4C3B;IAEF;;OAEG;gBAEC,OAAO,GAAE,mBAAmB,CAAC,OAAO,EAAE,gBAAgB,EAAE,eAAe,CAAC,GACpE,sBAAsB,CAAC,eAAe,EAAE,OAAO,CAAa,EAAE,wGAAwG;IACjK,MAAM,gBAAkC;IAuOrD;;;;;OAKG;IACH,SAAS,CAAC,YAAY,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO;IAI7C;;OAEG;IACG,gBAAgB,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,GAAE,uBAA4B;IAmB7E,OAAO,CAAC,iBAAiB;IAgDzB;;;;;;;;;OASG;IACG,GAAG,CAAC,QAAQ,CAAC,EAAE,YAAY,EAAE,OAAO,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,eAAe,CAAC;IA0HzF;;;;;;OAMG;IACH,IAAI,CAAC,OAAO,SAA6C,GAAG,IAAI;IAY1D,eAAe,IAAI,OAAO,CAAC,eAAe,CAAC;IAsB3C,QAAQ,CAAC,KAAK,SAAS,UAAU,GAAG,UAAU,EAAE,YAAY,GAAS,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC;IAsBjG,SAAS,KAAK,gCAAgC,IAAI,MAAM,CAEvD;IAED,SAAS,CAAC,6BAA6B,CAAC,aAAa,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;cAanE,oBAAoB,CAAC,OAAO,EAAE,UAAU,CAAC,sBAAsB,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAmBnG;;;;;;;;;;OAUG;IACG,WAAW,CACb,QAAQ,EAAE,YAAY,CAAC,YAAY,CAAC,EACpC,OAAO,GAAE,yBAA8B,GACxC,OAAO,CAAC,wBAAwB,CAAC;IA4EpC;;OAEG;IACG,QAAQ,CAAC,IAAI,EAAE,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,eAAe,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKjG;;OAEG;IACG,UAAU,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAIrD;;OAEG;IACG,OAAO,CAAC,GAAG,IAAI,EAAE,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IAKtF;;;OAGG;IACG,UAAU,CAAC,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,EAAE,OAAO,CAAC,EAAE,oBAAoB,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IAmD9G;;OAEG;cACa,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;cAqBtB,iBAAiB,CAAC,eAAe,EAAE,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC;IAUlF;;OAEG;IACH,SAAS,CAAC,sBAAsB,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM;YAQvD,6BAA6B;cAW3B,sBAAsB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,aAAa,GAAG,SAAS,CAAC;cAuBvE,iBAAiB;IAuCjC;;OAEG;YACW,wBAAwB;IAoBtC;;OAEG;cACa,iBAAiB;IAQjC;;;;OAIG;IACH,SAAS,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,GAAG,eAAe,GAAG,eAAe;IAsCjG;;;OAGG;cACa,gBAAgB;IA0JhC;;;;;;OAMG;cACa,0BAA0B,CACtC,OAAO,EAAE,WAAW,CAAC,mBAAmB,EAAE,MAAM,CAAC,EACjD,OAAO,EAAE,OAAO,CAAC,UAAU,CAAC,EAC5B,YAAY,EAAE,eAAe,GAC9B,OAAO,CAAC,sBAAsB,CAAC;IA0BlC;;;OAGG;cACc,6BAA6B,CAC1C,QAAQ,EAAE,YAAY,EACtB,eAAe,EAAE,MAAM,GACxB,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,SAAS,CAAC;IAW1C;;;OAGG;cACa,gBAAgB,CAAC,CAAC,EAC9B,OAAO,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,EACzB,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,KAAK,GAAG,MAAM,EACrB,UAAU,SAAI,EACd,OAAO,SAAI,GACZ,OAAO,CAAC,CAAC,CAAC;IAcb;;OAEG;cACa,oBAAoB;IAIpC;;OAEG;cACa,0BAA0B;YAI5B,cAAc;IAQ5B;;;OAGG;IACH,OAAO,CAAC,WAAW;IAWnB;;OAEG;cACa,4BAA4B,CACxC,KAAK,EAAE,KAAK,EACZ,eAAe,EAAE,eAAe,EAChC,MAAM,EAAE,YAAY,GAAG,eAAe,GACvC,OAAO,CAAC,IAAI,CAAC;cA4DA,oBAAoB,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,GAAG,OAAO,CAAC,CAAC,CAAC;cAStD,2BAA2B,CAAC,eAAe,EAAE,eAAe,EAAE,KAAK,EAAE,KAAK,GAAG,OAAO,CAAC,IAAI,CAAC;IAe1G;;;;OAIG;IACH,SAAS,CAAC,oBAAoB,CAAC,KAAK,EAAE,KAAK,EAAE,UAAU,UAAQ;IAmB/D,SAAS,CAAC,oBAAoB,CAAC,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;IAoB7D;;OAEG;cACa,wBAAwB,IAAI,OAAO,CAAC,IAAI,CAAC;cAMzC,aAAa,CAAC,QAAQ,SAAS,CAAC,GAAG,IAAI,EAAE,GAAG,EAAE,KAAK,SAAS,CAAC,IAAI,CAAC,EAC9E,KAAK,EAAE,QAAQ,EAAE,EACjB,GAAG,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IASjC;;;;;;OAMG;IACG,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAY/B,SAAS,CAAC,2BAA2B,CAAC,OAAO,EAAE,OAAO;YAWxC,gBAAgB;IAc9B,OAAO,CAAC,6BAA6B;CA6CxC;AAED,MAAM,WAAW,oBAAoB;IACjC,OAAO,EAAE,OAAO,CAAC;IACjB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,SAAS,CAAC,EAAE,SAAS,CAAC;CACzB;AAED,MAAM,WAAW,yBAA0B,SAAQ,yBAAyB;CAAG;AAE/E,MAAM,WAAW,wBAAyB,SAAQ,wBAAwB;CAAG;AAE7E,MAAM,WAAW,iBAAkB,SAAQ,yBAAyB;IAChE;;;;OAIG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;CAC/B;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,wBAAgB,iBAAiB,CAC7B,OAAO,SAAS,oBAAoB,GAAG,oBAAoB,EAC3D,QAAQ,SAAS,UAAU,GAAG,sBAAsB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,EAC1E,MAAM,CAAC,EAAE,YAAY,CAAC,OAAO,EAAE,QAAQ,CAAC,0BAEzC"}
|