@crawlee/linkedom 3.7.1-beta.5 → 3.7.1-beta.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,5 +1,8 @@
|
|
|
1
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
1
2
|
/// <reference types="node" />
|
|
3
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
2
4
|
/// <reference types="node/http" />
|
|
5
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
3
6
|
/// <reference types="node_modules/got/dist/source/core/timed-out" />
|
|
4
7
|
import type { IncomingMessage } from 'http';
|
|
5
8
|
import type { HttpCrawlerOptions, InternalHttpCrawlingContext, InternalHttpHook, ErrorHandler, RequestHandler, EnqueueLinksOptions, GetUserDataFromRequest, RouterRoutes, RequestProvider } from '@crawlee/http';
|
|
@@ -110,6 +113,7 @@ export declare class LinkeDOMCrawler extends HttpCrawler<LinkeDOMCrawlingContext
|
|
|
110
113
|
window: Window & typeof globalThis;
|
|
111
114
|
readonly body: string;
|
|
112
115
|
readonly document: Document;
|
|
116
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
113
117
|
enqueueLinks: (enqueueOptions?: LinkeDOMCrawlerEnqueueLinksOptions) => Promise<import("@crawlee/types").BatchAddRequestsResult>;
|
|
114
118
|
}>;
|
|
115
119
|
}
|
|
@@ -121,6 +125,7 @@ interface EnqueueLinksInternalOptions {
|
|
|
121
125
|
finalRequestUrl?: string;
|
|
122
126
|
}
|
|
123
127
|
/** @internal */
|
|
128
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
124
129
|
export declare function linkedomCrawlerEnqueueLinks({ options, window, requestQueue, originalRequestUrl, finalRequestUrl }: EnqueueLinksInternalOptions): Promise<import("@crawlee/types").BatchAddRequestsResult>;
|
|
125
130
|
/**
|
|
126
131
|
* Creates new {@apilink Router} instance that works based on request labels.
|
|
@@ -146,6 +151,7 @@ export declare function linkedomCrawlerEnqueueLinks({ options, window, requestQu
|
|
|
146
151
|
* await crawler.run();
|
|
147
152
|
* ```
|
|
148
153
|
*/
|
|
154
|
+
// @ts-ignore optional peer dependency or compatibility with es2022
|
|
149
155
|
export declare function createLinkeDOMRouter<Context extends LinkeDOMCrawlingContext = LinkeDOMCrawlingContext, UserData extends Dictionary = GetUserDataFromRequest<Context['request']>>(routes?: RouterRoutes<Context, UserData>): import("@crawlee/http").RouterHandler<Context>;
|
|
150
156
|
export {};
|
|
151
157
|
//# sourceMappingURL=linkedom-crawler.d.ts.map
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@crawlee/linkedom",
|
|
3
|
-
"version": "3.7.1-beta.
|
|
3
|
+
"version": "3.7.1-beta.6",
|
|
4
4
|
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=16.0.0"
|
|
@@ -55,8 +55,8 @@
|
|
|
55
55
|
"dependencies": {
|
|
56
56
|
"@apify/timeout": "^0.3.0",
|
|
57
57
|
"@apify/utilities": "^2.7.10",
|
|
58
|
-
"@crawlee/http": "3.7.1-beta.
|
|
59
|
-
"@crawlee/types": "3.7.1-beta.
|
|
58
|
+
"@crawlee/http": "3.7.1-beta.6",
|
|
59
|
+
"@crawlee/types": "3.7.1-beta.6",
|
|
60
60
|
"linkedom": "^0.16.0",
|
|
61
61
|
"ow": "^0.28.2",
|
|
62
62
|
"tslib": "^2.4.0"
|
|
@@ -68,5 +68,5 @@
|
|
|
68
68
|
}
|
|
69
69
|
}
|
|
70
70
|
},
|
|
71
|
-
"gitHead": "
|
|
71
|
+
"gitHead": "01664e58799374a6acc2aa9db397566c1acc69d4"
|
|
72
72
|
}
|