@crawlee/jsdom 3.16.1-beta.34 → 3.16.1-beta.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.mjs +6 -0
- package/package.json +5 -5
package/index.mjs
CHANGED
|
@@ -32,6 +32,7 @@ export const LoggerJson = mod.LoggerJson;
|
|
|
32
32
|
export const LoggerText = mod.LoggerText;
|
|
33
33
|
export const MAX_POOL_SIZE = mod.MAX_POOL_SIZE;
|
|
34
34
|
export const MAX_QUERIES_FOR_CONSISTENCY = mod.MAX_QUERIES_FOR_CONSISTENCY;
|
|
35
|
+
export const MemoryLoadSignal = mod.MemoryLoadSignal;
|
|
35
36
|
export const MinimumSpeedStream = mod.MinimumSpeedStream;
|
|
36
37
|
export const MissingRouteError = mod.MissingRouteError;
|
|
37
38
|
export const NonRetryableError = mod.NonRetryableError;
|
|
@@ -60,6 +61,7 @@ export const Session = mod.Session;
|
|
|
60
61
|
export const SessionError = mod.SessionError;
|
|
61
62
|
export const SessionPool = mod.SessionPool;
|
|
62
63
|
export const SitemapRequestList = mod.SitemapRequestList;
|
|
64
|
+
export const SnapshotStore = mod.SnapshotStore;
|
|
63
65
|
export const Snapshotter = mod.Snapshotter;
|
|
64
66
|
export const Statistics = mod.Statistics;
|
|
65
67
|
export const StorageManager = mod.StorageManager;
|
|
@@ -73,7 +75,10 @@ export const constructRegExpObjectsFromPseudoUrls = mod.constructRegExpObjectsFr
|
|
|
73
75
|
export const constructRegExpObjectsFromRegExps = mod.constructRegExpObjectsFromRegExps;
|
|
74
76
|
export const cookieStringToToughCookie = mod.cookieStringToToughCookie;
|
|
75
77
|
export const createBasicRouter = mod.createBasicRouter;
|
|
78
|
+
export const createClientLoadSignal = mod.createClientLoadSignal;
|
|
79
|
+
export const createCpuLoadSignal = mod.createCpuLoadSignal;
|
|
76
80
|
export const createDeserialize = mod.createDeserialize;
|
|
81
|
+
export const createEventLoopLoadSignal = mod.createEventLoopLoadSignal;
|
|
77
82
|
export const createFileRouter = mod.createFileRouter;
|
|
78
83
|
export const createHttpRouter = mod.createHttpRouter;
|
|
79
84
|
export const createJSDOMRouter = mod.createJSDOMRouter;
|
|
@@ -82,6 +87,7 @@ export const createRequests = mod.createRequests;
|
|
|
82
87
|
export const deserializeArray = mod.deserializeArray;
|
|
83
88
|
export const domCrawlerEnqueueLinks = mod.domCrawlerEnqueueLinks;
|
|
84
89
|
export const enqueueLinks = mod.enqueueLinks;
|
|
90
|
+
export const evaluateLoadSignalSample = mod.evaluateLoadSignalSample;
|
|
85
91
|
export const filterRequestsByPatterns = mod.filterRequestsByPatterns;
|
|
86
92
|
export const getCookiesFromResponse = mod.getCookiesFromResponse;
|
|
87
93
|
export const getDefaultCookieExpirationDate = mod.getDefaultCookieExpirationDate;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@crawlee/jsdom",
|
|
3
|
-
"version": "3.16.1-beta.
|
|
3
|
+
"version": "3.16.1-beta.35",
|
|
4
4
|
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",
|
|
5
5
|
"engines": {
|
|
6
6
|
"node": ">=16.0.0"
|
|
@@ -55,9 +55,9 @@
|
|
|
55
55
|
"dependencies": {
|
|
56
56
|
"@apify/timeout": "^0.3.0",
|
|
57
57
|
"@apify/utilities": "^2.7.10",
|
|
58
|
-
"@crawlee/http": "3.16.1-beta.
|
|
59
|
-
"@crawlee/types": "3.16.1-beta.
|
|
60
|
-
"@crawlee/utils": "3.16.1-beta.
|
|
58
|
+
"@crawlee/http": "3.16.1-beta.35",
|
|
59
|
+
"@crawlee/types": "3.16.1-beta.35",
|
|
60
|
+
"@crawlee/utils": "3.16.1-beta.35",
|
|
61
61
|
"@types/jsdom": "^21.0.0",
|
|
62
62
|
"cheerio": "1.0.0-rc.12",
|
|
63
63
|
"jsdom": "^26.0.0",
|
|
@@ -71,5 +71,5 @@
|
|
|
71
71
|
}
|
|
72
72
|
}
|
|
73
73
|
},
|
|
74
|
-
"gitHead": "
|
|
74
|
+
"gitHead": "3d5fd8c11142549231b986efde65fe0cdd219776"
|
|
75
75
|
}
|