@arcblock/crawler 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/blocklet.d.ts +6 -0
- package/dist/blocklet.js +199 -0
- package/dist/cache.d.ts +10 -0
- package/dist/cache.js +119 -0
- package/dist/config.d.ts +10 -0
- package/dist/config.js +17 -0
- package/dist/crawler.d.ts +28 -0
- package/dist/crawler.js +314 -0
- package/dist/db/index.d.ts +1 -0
- package/dist/db/index.js +41 -0
- package/dist/db/job.d.ts +33 -0
- package/dist/db/job.js +54 -0
- package/dist/db/snapshot.d.ts +31 -0
- package/dist/db/snapshot.js +52 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.js +45 -0
- package/dist/middleware.d.ts +4 -0
- package/dist/middleware.js +44 -0
- package/dist/puppeteer.d.ts +16 -0
- package/dist/puppeteer.js +318 -0
- package/dist/utils.d.ts +15 -0
- package/dist/utils.js +239 -0
- package/esm/blocklet.d.ts +6 -0
- package/esm/blocklet.js +190 -0
- package/esm/cache.d.ts +10 -0
- package/esm/cache.js +114 -0
- package/esm/config.d.ts +10 -0
- package/esm/config.js +11 -0
- package/esm/crawler.d.ts +28 -0
- package/esm/crawler.js +301 -0
- package/esm/db/index.d.ts +1 -0
- package/esm/db/index.js +35 -0
- package/esm/db/job.d.ts +33 -0
- package/esm/db/job.js +50 -0
- package/esm/db/snapshot.d.ts +31 -0
- package/esm/db/snapshot.js +48 -0
- package/esm/index.d.ts +6 -0
- package/esm/index.js +26 -0
- package/esm/middleware.d.ts +4 -0
- package/esm/middleware.js +41 -0
- package/esm/puppeteer.d.ts +16 -0
- package/esm/puppeteer.js +272 -0
- package/esm/utils.d.ts +15 -0
- package/esm/utils.js +220 -0
- package/package.json +11 -3
- package/src/blocklet.ts +0 -223
- package/src/cache.ts +0 -117
- package/src/config.ts +0 -13
- package/src/crawler.ts +0 -364
- package/src/db/index.ts +0 -27
- package/src/db/job.ts +0 -93
- package/src/db/snapshot.ts +0 -89
- package/src/index.ts +0 -19
- package/src/middleware.ts +0 -46
- package/src/puppeteer.ts +0 -296
- package/src/utils.ts +0 -240
- package/third.d.ts +0 -1
- package/tsconfig.json +0 -9
package/dist/blocklet.js
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
exports.cancelCronCrawlBlocklet = exports.initCronCrawlBlocklet = exports.crawlBlocklet = void 0;
|
|
16
|
+
const cron_1 = __importDefault(require("@abtnode/cron"));
|
|
17
|
+
const config_1 = require("@blocklet/sdk/lib/config");
|
|
18
|
+
const debounce_1 = __importDefault(require("lodash/debounce"));
|
|
19
|
+
const ufo_1 = require("ufo");
|
|
20
|
+
const cache_1 = require("./cache");
|
|
21
|
+
const config_2 = require("./config");
|
|
22
|
+
const crawler_1 = require("./crawler");
|
|
23
|
+
const puppeteer_1 = require("./puppeteer");
|
|
24
|
+
const utils_1 = require("./utils");
|
|
25
|
+
// record crawl blocklet running
|
|
26
|
+
const crawlBlockletRunningMap = new Map();
|
|
27
|
+
// crawl blocklet sitemap urls
|
|
28
|
+
const crawlBlocklet = () => __awaiter(void 0, void 0, void 0, function* () {
|
|
29
|
+
// @ts-ignore
|
|
30
|
+
const { mountPoint, did } = (0, utils_1.getComponentInfo)();
|
|
31
|
+
if (crawlBlockletRunningMap.has(did) && crawlBlockletRunningMap.get(did)) {
|
|
32
|
+
config_2.logger.info(`Crawler blocklet ${did} is running, skip it`);
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
// check has browser can use
|
|
36
|
+
try {
|
|
37
|
+
const browser = yield (0, puppeteer_1.getBrowser)();
|
|
38
|
+
if (!browser) {
|
|
39
|
+
throw new Error('No Browser can use');
|
|
40
|
+
}
|
|
41
|
+
config_2.logger.info('Crawler blocklet existing can use browser');
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
config_2.logger.info(`Crawler blocklet abort by error: ${(error === null || error === void 0 ? void 0 : error.message) || (error === null || error === void 0 ? void 0 : error.reason) || error}`);
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const { appUrl } = config_2.config;
|
|
48
|
+
if (!appUrl) {
|
|
49
|
+
throw new Error('appUrl not found');
|
|
50
|
+
}
|
|
51
|
+
const sitemapList = yield (0, utils_1.getSitemapList)(appUrl);
|
|
52
|
+
const matchMountPoint = (0, ufo_1.joinURL)(appUrl, !mountPoint || mountPoint === '/' ? '' : mountPoint);
|
|
53
|
+
const otherMountPointList = config_1.components
|
|
54
|
+
.filter((item) => item.mountPoint && item.mountPoint !== mountPoint)
|
|
55
|
+
.map((item) => item.mountPoint);
|
|
56
|
+
// get can use loc
|
|
57
|
+
const blockletLocList = sitemapList.filter((item) => {
|
|
58
|
+
var _a;
|
|
59
|
+
if (mountPoint !== '/') {
|
|
60
|
+
return ((_a = item === null || item === void 0 ? void 0 : item.url) === null || _a === void 0 ? void 0 : _a.indexOf(matchMountPoint)) > -1;
|
|
61
|
+
}
|
|
62
|
+
// if mountPoint is /, skip other mountPoint
|
|
63
|
+
return otherMountPointList.every((mountPoint) => { var _a; return ((_a = item === null || item === void 0 ? void 0 : item.url) === null || _a === void 0 ? void 0 : _a.indexOf(mountPoint)) === -1; });
|
|
64
|
+
});
|
|
65
|
+
const canUseBlockletLocList = [];
|
|
66
|
+
const lastmodMap = new Map();
|
|
67
|
+
let skipBlockletLocTotal = 0;
|
|
68
|
+
let blockletLocTotal = 0;
|
|
69
|
+
yield Promise.all(blockletLocList.map((item) => __awaiter(void 0, void 0, void 0, function* () {
|
|
70
|
+
var _a;
|
|
71
|
+
let tempLocList = [];
|
|
72
|
+
if (item.url) {
|
|
73
|
+
tempLocList.push(item.url);
|
|
74
|
+
}
|
|
75
|
+
if (((_a = item === null || item === void 0 ? void 0 : item.links) === null || _a === void 0 ? void 0 : _a.length) > 0) {
|
|
76
|
+
tempLocList.push(...item.links.map((ytem) => ytem.url));
|
|
77
|
+
}
|
|
78
|
+
blockletLocTotal += tempLocList.length;
|
|
79
|
+
// @ts-ignore
|
|
80
|
+
tempLocList = (yield Promise.all(tempLocList.map((loc) => __awaiter(void 0, void 0, void 0, function* () {
|
|
81
|
+
try {
|
|
82
|
+
const { lastModified: cacheLastModified } = yield cache_1.useCache.get((0, utils_1.getRelativePath)(loc));
|
|
83
|
+
// sitemap item lastmod is same as cache lastModified, skip it
|
|
84
|
+
if (item.lastmod &&
|
|
85
|
+
cacheLastModified &&
|
|
86
|
+
new Date(cacheLastModified).getTime() === new Date(item.lastmod).getTime()) {
|
|
87
|
+
skipBlockletLocTotal++;
|
|
88
|
+
return false;
|
|
89
|
+
}
|
|
90
|
+
return loc;
|
|
91
|
+
}
|
|
92
|
+
catch (error) {
|
|
93
|
+
// ignore error
|
|
94
|
+
}
|
|
95
|
+
// if can not get cache, return loc
|
|
96
|
+
return loc;
|
|
97
|
+
})))).filter(Boolean);
|
|
98
|
+
tempLocList.forEach((loc) => {
|
|
99
|
+
if (item.lastmod)
|
|
100
|
+
lastmodMap.set(loc, item.lastmod);
|
|
101
|
+
});
|
|
102
|
+
canUseBlockletLocList.push(...tempLocList);
|
|
103
|
+
})));
|
|
104
|
+
const crawlerLogText = (step = '') => [
|
|
105
|
+
`Crawler sitemap.xml about ${did} ${step}: `,
|
|
106
|
+
{
|
|
107
|
+
blockletLocTotal,
|
|
108
|
+
canUseBlockletLocTotal: canUseBlockletLocList.length,
|
|
109
|
+
skipBlockletLocTotal,
|
|
110
|
+
lastmodMapTotal: lastmodMap.size,
|
|
111
|
+
},
|
|
112
|
+
];
|
|
113
|
+
config_2.logger.info(...crawlerLogText('start'));
|
|
114
|
+
try {
|
|
115
|
+
// record crawl blocklet running
|
|
116
|
+
crawlBlockletRunningMap.set(did, true);
|
|
117
|
+
yield (0, crawler_1.createCrawlJob)({
|
|
118
|
+
// @ts-ignore
|
|
119
|
+
urls: canUseBlockletLocList,
|
|
120
|
+
saveToRedis: true,
|
|
121
|
+
lastmodMap,
|
|
122
|
+
// formatPageContent: async ({ page }: { page: any; url: string; lastmod?: string }) => {
|
|
123
|
+
// const pageContent = await page.evaluate(() => {
|
|
124
|
+
// const removeElements = (tagName: string) => {
|
|
125
|
+
// const elements = document.querySelectorAll(tagName);
|
|
126
|
+
// for (let i = elements.length - 1; i >= 0; i--) {
|
|
127
|
+
// try {
|
|
128
|
+
// elements[i]?.parentNode?.removeChild(elements[i] as Node);
|
|
129
|
+
// } catch (error) {
|
|
130
|
+
// // do noting
|
|
131
|
+
// }
|
|
132
|
+
// }
|
|
133
|
+
// };
|
|
134
|
+
// // remove script, style, link, noscript
|
|
135
|
+
// // removeElements('script');
|
|
136
|
+
// // removeElements('style');
|
|
137
|
+
// // removeElements('link');
|
|
138
|
+
// // removeElements('noscript');
|
|
139
|
+
// // remove uploader
|
|
140
|
+
// removeElements('[id="uploader-container"]');
|
|
141
|
+
// removeElements('[class^="uppy-"]');
|
|
142
|
+
// // remove point up component
|
|
143
|
+
// removeElements('[id="point-up-component"]');
|
|
144
|
+
// // add meta tag to record crawler
|
|
145
|
+
// const meta = document.createElement('meta');
|
|
146
|
+
// meta.name = 'blocklet-crawler';
|
|
147
|
+
// meta.content = 'true';
|
|
148
|
+
// document.head.appendChild(meta);
|
|
149
|
+
// return document.documentElement.outerHTML;
|
|
150
|
+
// });
|
|
151
|
+
// return pageContent;
|
|
152
|
+
// },
|
|
153
|
+
});
|
|
154
|
+
config_2.logger.info(...crawlerLogText('success'));
|
|
155
|
+
yield (0, puppeteer_1.closeBrowser)({
|
|
156
|
+
trimCache: true,
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
catch (error) {
|
|
160
|
+
config_2.logger.info('Crawler blocklet abort by error', error);
|
|
161
|
+
}
|
|
162
|
+
finally {
|
|
163
|
+
// delete crawl blocklet running
|
|
164
|
+
crawlBlockletRunningMap.delete(did);
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
exports.crawlBlocklet = crawlBlocklet;
|
|
168
|
+
const CRON_CRAWL_BLOCKLET_KEY = 'cron-crawl-blocklet';
|
|
169
|
+
let cronCrawlBlockletJob = null;
|
|
170
|
+
// init cron crawl blocklet
|
|
171
|
+
const initCronCrawlBlocklet = ({ time = '0 0 */12 * * *', // every 12 hours
|
|
172
|
+
options, } = {}) => {
|
|
173
|
+
if (!cronCrawlBlockletJob) {
|
|
174
|
+
cronCrawlBlockletJob = cron_1.default.init({
|
|
175
|
+
context: {},
|
|
176
|
+
jobs: [
|
|
177
|
+
{
|
|
178
|
+
name: CRON_CRAWL_BLOCKLET_KEY,
|
|
179
|
+
time,
|
|
180
|
+
fn: (0, debounce_1.default)(exports.crawlBlocklet),
|
|
181
|
+
options: Object.assign({ runOnInit: false }, options),
|
|
182
|
+
},
|
|
183
|
+
],
|
|
184
|
+
onError: (err) => {
|
|
185
|
+
console.error('run job failed', err);
|
|
186
|
+
},
|
|
187
|
+
});
|
|
188
|
+
}
|
|
189
|
+
return cronCrawlBlockletJob;
|
|
190
|
+
};
|
|
191
|
+
exports.initCronCrawlBlocklet = initCronCrawlBlocklet;
|
|
192
|
+
const cancelCronCrawlBlocklet = () => {
|
|
193
|
+
if (cronCrawlBlockletJob) {
|
|
194
|
+
cronCrawlBlockletJob.jobs[CRON_CRAWL_BLOCKLET_KEY].stop();
|
|
195
|
+
cronCrawlBlockletJob = null;
|
|
196
|
+
config_2.logger.info('Cron crawl blocklet stop, clear crawl queue');
|
|
197
|
+
}
|
|
198
|
+
};
|
|
199
|
+
exports.cancelCronCrawlBlocklet = cancelCronCrawlBlocklet;
|
package/dist/cache.d.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export declare const cachePool: import("generic-pool").Pool<any>;
|
|
2
|
+
export declare const memoryPool: import("generic-pool").Pool<Map<string, any>>;
|
|
3
|
+
export declare const withCache: (cb: Function) => Promise<any>;
|
|
4
|
+
export declare const formatKey: (key: string) => string;
|
|
5
|
+
export declare const useCache: {
|
|
6
|
+
get: (key: string) => Promise<any>;
|
|
7
|
+
set: (key: string, value: any, options?: any) => Promise<any>;
|
|
8
|
+
remove: (key: string) => Promise<any>;
|
|
9
|
+
list: (key?: string) => Promise<any>;
|
|
10
|
+
};
|
package/dist/cache.js
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var _a;
|
|
12
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
13
|
+
exports.useCache = exports.formatKey = exports.withCache = exports.memoryPool = exports.cachePool = void 0;
|
|
14
|
+
const generic_pool_1 = require("generic-pool");
|
|
15
|
+
const redis_1 = require("redis");
|
|
16
|
+
const config_1 = require("./config");
|
|
17
|
+
const cacheKeyPrefix = ((_a = process.env) === null || _a === void 0 ? void 0 : _a.BLOCKLET_REAL_DID) ? `${process.env.BLOCKLET_REAL_DID}:` : '';
|
|
18
|
+
const MAX_REDIS_RETRY = 3;
|
|
19
|
+
const ttl = 1000 * 60 * 60 * 24 * 7;
|
|
20
|
+
exports.cachePool = (0, generic_pool_1.createPool)({
|
|
21
|
+
create: () => __awaiter(void 0, void 0, void 0, function* () {
|
|
22
|
+
try {
|
|
23
|
+
const { redisUrl } = config_1.config;
|
|
24
|
+
const redisClient = (0, redis_1.createClient)({
|
|
25
|
+
url: redisUrl,
|
|
26
|
+
socket: {
|
|
27
|
+
// @ts-ignore
|
|
28
|
+
reconnectStrategy: (retries) => {
|
|
29
|
+
if (retries >= MAX_REDIS_RETRY) {
|
|
30
|
+
return new Error('Retry Time Exhausted');
|
|
31
|
+
}
|
|
32
|
+
return Math.min(retries * 500, 1000 * 3);
|
|
33
|
+
},
|
|
34
|
+
},
|
|
35
|
+
});
|
|
36
|
+
redisClient.on('error', (err) => config_1.logger.warn('Redis Client Error:', err));
|
|
37
|
+
yield redisClient.connect();
|
|
38
|
+
config_1.logger.info(`Successfully connected to Redis: ${redisUrl}`);
|
|
39
|
+
return redisClient;
|
|
40
|
+
}
|
|
41
|
+
catch (error) {
|
|
42
|
+
config_1.logger.warn('Redis connection failed', error);
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
}),
|
|
46
|
+
destroy: (client) => __awaiter(void 0, void 0, void 0, function* () {
|
|
47
|
+
// if is redis client
|
|
48
|
+
if (client.isReady) {
|
|
49
|
+
yield client.quit();
|
|
50
|
+
}
|
|
51
|
+
}),
|
|
52
|
+
}, {
|
|
53
|
+
max: 2, // 2 clients
|
|
54
|
+
min: 0,
|
|
55
|
+
// evictionRunIntervalMillis: 0,
|
|
56
|
+
});
|
|
57
|
+
exports.memoryPool = (0, generic_pool_1.createPool)({
|
|
58
|
+
create: () => {
|
|
59
|
+
const map = new Map();
|
|
60
|
+
// @ts-ignore
|
|
61
|
+
map.del = map.delete;
|
|
62
|
+
return Promise.resolve(map);
|
|
63
|
+
},
|
|
64
|
+
destroy: (client) => {
|
|
65
|
+
client.clear();
|
|
66
|
+
return Promise.resolve();
|
|
67
|
+
},
|
|
68
|
+
}, {
|
|
69
|
+
max: 10,
|
|
70
|
+
min: 0,
|
|
71
|
+
});
|
|
72
|
+
const withCache = (cb) => __awaiter(void 0, void 0, void 0, function* () {
|
|
73
|
+
const pool = config_1.config.redisUrl ? exports.cachePool : exports.memoryPool;
|
|
74
|
+
const client = yield pool.acquire();
|
|
75
|
+
if (client) {
|
|
76
|
+
try {
|
|
77
|
+
return cb(client);
|
|
78
|
+
}
|
|
79
|
+
finally {
|
|
80
|
+
// release client to pool, let other use
|
|
81
|
+
yield pool.release(client);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
exports.withCache = withCache;
|
|
86
|
+
const formatKey = (key) => {
|
|
87
|
+
return `${cacheKeyPrefix}${key}`;
|
|
88
|
+
};
|
|
89
|
+
exports.formatKey = formatKey;
|
|
90
|
+
exports.useCache = {
|
|
91
|
+
get: (key) => {
|
|
92
|
+
return (0, exports.withCache)((client) => __awaiter(void 0, void 0, void 0, function* () {
|
|
93
|
+
const value = yield client.get((0, exports.formatKey)(key));
|
|
94
|
+
try {
|
|
95
|
+
return JSON.parse(value);
|
|
96
|
+
}
|
|
97
|
+
catch (error) {
|
|
98
|
+
// ignore error
|
|
99
|
+
}
|
|
100
|
+
return value;
|
|
101
|
+
}));
|
|
102
|
+
},
|
|
103
|
+
set: (key, value, options) => {
|
|
104
|
+
return (0, exports.withCache)((client) => {
|
|
105
|
+
const formatValue = typeof value === 'string' ? value : JSON.stringify(value);
|
|
106
|
+
return client.set((0, exports.formatKey)(key), formatValue, Object.assign({ PX: ttl }, options));
|
|
107
|
+
});
|
|
108
|
+
},
|
|
109
|
+
remove: (key) => {
|
|
110
|
+
return (0, exports.withCache)((client) => {
|
|
111
|
+
return client.del((0, exports.formatKey)(key));
|
|
112
|
+
});
|
|
113
|
+
},
|
|
114
|
+
list: (key = '*') => {
|
|
115
|
+
return (0, exports.withCache)((client) => {
|
|
116
|
+
return client.keys((0, exports.formatKey)(key));
|
|
117
|
+
});
|
|
118
|
+
},
|
|
119
|
+
};
|
package/dist/config.d.ts
ADDED
package/dist/config.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.config = exports.logger = void 0;
|
|
7
|
+
const logger_1 = __importDefault(require("@blocklet/logger"));
|
|
8
|
+
exports.logger = (0, logger_1.default)('crawler', { level: process.env.LOG_LEVEL || 'info' });
|
|
9
|
+
exports.config = {
|
|
10
|
+
redisUrl: process.env.REDIS_URL,
|
|
11
|
+
dataDir: process.env.BLOCKLET_DATA_DIR,
|
|
12
|
+
appDir: process.env.BLOCKLET_APP_DIR || process.cwd(),
|
|
13
|
+
appUrl: process.env.BLOCKLET_APP_URL,
|
|
14
|
+
puppeteerPath: process.env.PUPPETEER_EXECUTABLE_PATH,
|
|
15
|
+
cacheDir: process.env.BLOCKLET_CACHE_DIR,
|
|
16
|
+
testOnInitialize: process.env.NODE_ENV === 'production',
|
|
17
|
+
};
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { JobState } from './db/job';
|
|
2
|
+
import { SnapshotModel } from './db/snapshot';
|
|
3
|
+
export declare function createCrawlQueue(): void;
|
|
4
|
+
export declare function getDataDir(): Promise<{
|
|
5
|
+
htmlDir: string;
|
|
6
|
+
screenshotDir: string;
|
|
7
|
+
}>;
|
|
8
|
+
export declare const getPageContent: ({ url, formatPageContent, includeScreenshot, includeHtml, width, height, quality, timeout, fullPage, }: {
|
|
9
|
+
url: string;
|
|
10
|
+
formatPageContent?: Function;
|
|
11
|
+
includeScreenshot?: boolean;
|
|
12
|
+
includeHtml?: boolean;
|
|
13
|
+
width?: number;
|
|
14
|
+
height?: number;
|
|
15
|
+
quality?: number;
|
|
16
|
+
timeout?: number;
|
|
17
|
+
fullPage?: boolean;
|
|
18
|
+
}) => Promise<{
|
|
19
|
+
html: string;
|
|
20
|
+
screenshot: Uint8Array<ArrayBufferLike> | null;
|
|
21
|
+
}>;
|
|
22
|
+
export declare function createCrawlJob(params: JobState, callback?: (snapshot: SnapshotModel | null) => void): Promise<any>;
|
|
23
|
+
export declare function getJob(condition: Partial<JobState>): Promise<any>;
|
|
24
|
+
export declare function formatSnapshot(snapshot: SnapshotModel, columns?: Array<keyof SnapshotModel>): Promise<SnapshotModel>;
|
|
25
|
+
/**
|
|
26
|
+
* get snapshot from db or crawl queue
|
|
27
|
+
*/
|
|
28
|
+
export declare function getSnapshot(jobId: string): Promise<SnapshotModel | null>;
|