bdy 1.14.6-dev → 1.14.7-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "bdy",
3
3
  "preferGlobal": false,
4
- "version": "1.14.6-dev",
4
+ "version": "1.14.7-beta",
5
5
  "type": "commonjs",
6
6
  "license": "MIT",
7
7
  "scripts": {
@@ -206,6 +206,10 @@ class AgentManagerClass {
206
206
  return;
207
207
  }
208
208
  logger_1.default.setDebug(!!data.debug);
209
+ logger_1.default.info(`MANAGER REQUEST: change debug to ${!!data.debug}`);
210
+ this.serverOutput(res, {
211
+ success: true,
212
+ });
209
213
  }
210
214
  async processTunnelStop(req, res) {
211
215
  if (!this.agent) {
@@ -0,0 +1,193 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const utils_1 = require("../../utils");
7
+ const commander_1 = require("commander");
8
+ const texts_1 = require("../../texts");
9
+ const validation_1 = require("../../visualTest/validation");
10
+ const output_1 = __importDefault(require("../../output"));
11
+ const requests_1 = require("../../visualTest/requests");
12
+ const zod_1 = require("zod");
13
+ const node_zlib_1 = require("node:zlib");
14
+ const tar_stream_1 = __importDefault(require("tar-stream"));
15
+ const promises_1 = require("node:stream/promises");
16
+ const node_fs_1 = require("node:fs");
17
+ const node_path_1 = __importDefault(require("node:path"));
18
+ const promises_2 = require("node:fs/promises");
19
+ const commandScrape = (0, utils_1.newCommand)('scrape', texts_1.DESC_COMMAND_VT_SCRAPE);
20
+ commandScrape.argument('<url>', texts_1.OPTION_SCRAPE_URL);
21
+ commandScrape.option('--follow', texts_1.OPTION_SCRAPE_FOLLOW, false);
22
+ commandScrape.addOption(new commander_1.Option('--outputType <type>', texts_1.OPTION_SCRAPE_OUTPUT_TYPE)
23
+ .choices(['jpeg', 'png', 'md', 'html'])
24
+ .makeOptionMandatory());
25
+ commandScrape.option('--quality <quality>', texts_1.OPTION_SCRAPE_QUALITY);
26
+ commandScrape.option('--fullPage', texts_1.OPTION_SCRAPE_FULL_PAGE, false);
27
+ commandScrape.option('--cssSelector <selector>', texts_1.OPTION_SCRAPE_CSS_SELECTOR);
28
+ commandScrape.option('--xpathSelector <selector>', texts_1.OPTION_SCRAPE_XPATH_SELECTOR);
29
+ commandScrape.addOption(new commander_1.Option('--browser <browser>', texts_1.OPTION_SCRAPE_BROWSER)
30
+ .choices(['chrome', 'firefox', 'safari'])
31
+ .default('chrome'));
32
+ commandScrape.option('--viewport <viewport>', texts_1.OPTION_SCRAPE_VIEWPORT, '1920x1080');
33
+ commandScrape.option('--devicePixelRatio <ratio>', texts_1.OPTION_SCRAPE_DEVICE_PIXEL_RATIO, '1');
34
+ commandScrape.option('--waitForElement <selector>', texts_1.OPTION_SCRAPE_WAIT_FOR_ELEMENT);
35
+ commandScrape.option('--darkMode', texts_1.OPTION_SCRAPE_DARK_MODE, false);
36
+ commandScrape.option('--delay <delay>', texts_1.OPTION_SCRAPE_DELAY, '0');
37
+ commandScrape.option('--outputDir <dir>', texts_1.OPTION_SCRAPE_OUTPUT_DIR, '.');
38
+ commandScrape.action(async (inputUrl, options) => {
39
+ if (!(0, validation_1.checkToken)()) {
40
+ output_1.default.exitError(texts_1.ERR_MISSING_VT_TOKEN);
41
+ }
42
+ const { url, follow, outputType, quality, outputDir, fullPage, cssSelector, xpathSelector, browser, viewport, devicePixelRatio, darkMode, delay, waitForElement, } = validateInputAndOptions(inputUrl, options);
43
+ try {
44
+ const { buildId } = await (0, requests_1.sendScrap)(url, outputType, follow, quality, fullPage, cssSelector, xpathSelector, browser, viewport, devicePixelRatio, darkMode, delay, waitForElement);
45
+ output_1.default.normal('Starting scrape session');
46
+ const status = await watchSessionStatus(buildId);
47
+ if (!status.ok) {
48
+ output_1.default.exitError(`Unexpected error while watching session status: ${status.error}`);
49
+ }
50
+ output_1.default.normal('Downloading scrape package');
51
+ const scrapPackageStream = await (0, requests_1.downloadScrapPackage)(buildId);
52
+ const brotliDecompressor = (0, node_zlib_1.createBrotliDecompress)();
53
+ const unpack = tar_stream_1.default.extract();
54
+ unpack.on('entry', async (header, stream, next) => {
55
+ const currentDir = process.cwd();
56
+ const preparedOutputDir = outputDir.startsWith('.')
57
+ ? node_path_1.default.join(currentDir, outputDir)
58
+ : outputDir;
59
+ const newFilePath = node_path_1.default.join(preparedOutputDir, header.name);
60
+ try {
61
+ if (header.type === 'file') {
62
+ await (0, promises_2.mkdir)(node_path_1.default.dirname(newFilePath), { recursive: true });
63
+ const fileWriteStream = (0, node_fs_1.createWriteStream)(newFilePath);
64
+ await (0, promises_1.pipeline)(stream, fileWriteStream);
65
+ next();
66
+ }
67
+ else {
68
+ stream.resume();
69
+ next();
70
+ }
71
+ }
72
+ catch (entryError) {
73
+ output_1.default.error(`Error processing entry ${header.name}: ${entryError}`);
74
+ next(entryError);
75
+ }
76
+ });
77
+ await (0, promises_1.pipeline)(scrapPackageStream, brotliDecompressor, unpack);
78
+ output_1.default.exitSuccess('Downloading scrape package finished');
79
+ }
80
+ catch (error) {
81
+ output_1.default.exitError(`${error}`);
82
+ }
83
+ });
84
+ function validateInputAndOptions(input, options) {
85
+ const urlSchema = zod_1.z.string().url();
86
+ const optionsSchema = zod_1.z.object({
87
+ follow: zod_1.z.boolean(),
88
+ outputType: zod_1.z.enum(['jpeg', 'png', 'md', 'html']),
89
+ quality: zod_1.z.coerce.number().min(1).max(100).optional(),
90
+ outputDir: zod_1.z.string().default('.'),
91
+ fullPage: zod_1.z.boolean().optional(),
92
+ cssSelector: zod_1.z.string().optional(),
93
+ xpathSelector: zod_1.z.string().optional(),
94
+ browser: zod_1.z.enum(['chrome', 'firefox', 'safari']),
95
+ viewport: zod_1.z
96
+ .string()
97
+ .refine((value) => {
98
+ const [width, height] = value.split('x');
99
+ return (width &&
100
+ height &&
101
+ !isNaN(Number(width)) &&
102
+ !isNaN(Number(height)) &&
103
+ Number(width) > 0 &&
104
+ Number(height) > 0);
105
+ }, 'Invalid viewport format, example: 1920x1080')
106
+ .transform((value) => {
107
+ const [width, height] = value.split('x');
108
+ return {
109
+ width: Number(width),
110
+ height: Number(height),
111
+ };
112
+ }),
113
+ devicePixelRatio: zod_1.z.coerce.number().min(1).max(4),
114
+ darkMode: zod_1.z.boolean(),
115
+ delay: zod_1.z.coerce.number().min(0).max(10000),
116
+ waitForElement: zod_1.z.string().optional(),
117
+ });
118
+ try {
119
+ const url = urlSchema.parse(input);
120
+ const { follow, outputType, quality, outputDir, fullPage, cssSelector, xpathSelector, browser, viewport, devicePixelRatio, darkMode, delay, waitForElement, } = optionsSchema.parse(options);
121
+ if (typeof quality === 'number' && outputType !== 'jpeg') {
122
+ output_1.default.exitError('Quality is only supported for jpeg output type, use --outputType jpeg');
123
+ }
124
+ if (cssSelector && xpathSelector) {
125
+ output_1.default.exitError('Only one of --cssSelector or --xpathSelector can be used');
126
+ }
127
+ return {
128
+ url,
129
+ follow,
130
+ outputType,
131
+ quality,
132
+ outputDir,
133
+ fullPage,
134
+ cssSelector,
135
+ xpathSelector,
136
+ browser,
137
+ viewport,
138
+ devicePixelRatio,
139
+ darkMode,
140
+ delay,
141
+ waitForElement,
142
+ };
143
+ }
144
+ catch (error) {
145
+ if (error instanceof zod_1.ZodError) {
146
+ output_1.default.exitError(error.errors.map((e) => `${e.path}: ${e.message}`).join(', '));
147
+ }
148
+ else {
149
+ throw error;
150
+ }
151
+ }
152
+ }
153
+ async function watchSessionStatus(buildId) {
154
+ return new Promise((resolve) => {
155
+ const eventSource = (0, requests_1.connectToScrapSession)(buildId);
156
+ eventSource.addEventListener('SESSION_STATUS', (event) => {
157
+ const data = JSON.parse(event.data);
158
+ if (data.status === 'GATHER_URLS_COMPLETED') {
159
+ output_1.default.normal(`Gathering URLs completed, found ${data.text} URLs`);
160
+ }
161
+ else if (data.status === 'GATHER_URLS_FAILED') {
162
+ output_1.default.error('Gathering URLs failed');
163
+ }
164
+ else if (data.status === 'SCRAPE_URL_COMPLETED') {
165
+ output_1.default.normal(`Scraping ${data.text} completed`);
166
+ }
167
+ else if (data.status === 'SCRAPE_URL_FAILED') {
168
+ output_1.default.error(`Scraping ${data.text} failed`);
169
+ }
170
+ else if (data.status === 'CREATE_PACKAGE_COMPLETED') {
171
+ output_1.default.normal('Package created');
172
+ }
173
+ else if (data.status === 'CREATE_PACKAGE_FAILED') {
174
+ output_1.default.error('Package creation failed');
175
+ }
176
+ else if (data.status === 'FINISHED') {
177
+ eventSource.close();
178
+ output_1.default.normal('Scrape session finished');
179
+ resolve({ ok: true });
180
+ }
181
+ });
182
+ eventSource.addEventListener('error', (event) => {
183
+ if (event.code) {
184
+ eventSource.close();
185
+ if (event.code === 410) {
186
+ output_1.default.normal('Scrape session finished');
187
+ }
188
+ resolve({ ok: event.code === 410, error: event.code });
189
+ }
190
+ });
191
+ });
192
+ }
193
+ exports.default = commandScrape;
@@ -35,6 +35,7 @@ class Logger {
35
35
  }
36
36
  setDebug(debug) {
37
37
  this.debugOn = debug;
38
+ this.getPino().level = this.debugOn ? 'debug' : 'info';
38
39
  }
39
40
  getPino() {
40
41
  if (!this.p) {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "bdy",
3
3
  "preferGlobal": false,
4
- "version": "1.14.6-dev",
4
+ "version": "1.14.7-beta",
5
5
  "type": "commonjs",
6
6
  "license": "MIT",
7
7
  "scripts": {