bdy 1.18.30-dev → 1.18.31-dev-commands-changes-869c3r8yn

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/distTs/package.json +1 -1
  2. package/distTs/src/api/client.js +83 -0
  3. package/distTs/src/command/crawl/link.js +61 -0
  4. package/distTs/src/command/crawl/run.js +147 -0
  5. package/distTs/src/command/crawl/validation.js +154 -0
  6. package/distTs/src/command/crawl.js +13 -0
  7. package/distTs/src/command/login.js +22 -4
  8. package/distTs/src/command/pipeline/run/start.js +101 -0
  9. package/distTs/src/command/pipeline/run/status.js +34 -0
  10. package/distTs/src/command/tests/capture/validation.js +46 -0
  11. package/distTs/src/command/tests/capture.js +103 -0
  12. package/distTs/src/command/tests/unit/link.js +61 -0
  13. package/distTs/src/command/tests/unit/upload.js +91 -0
  14. package/distTs/src/command/tests/unit.js +13 -0
  15. package/distTs/src/command/tests/visual/link.js +61 -0
  16. package/distTs/src/command/tests/visual/session/close.js +32 -0
  17. package/distTs/src/command/tests/visual/session/create.js +86 -0
  18. package/distTs/src/command/tests/visual/session.js +13 -0
  19. package/distTs/src/command/tests/visual/setup.js +20 -0
  20. package/distTs/src/command/tests/visual/shared/validation.js +145 -0
  21. package/distTs/src/command/tests/visual/upload.js +141 -0
  22. package/distTs/src/command/tests/visual.js +17 -0
  23. package/distTs/src/command/tests.js +15 -0
  24. package/distTs/src/command/whoami.js +12 -0
  25. package/distTs/src/crawl/requests.js +141 -0
  26. package/distTs/src/index.js +4 -6
  27. package/distTs/src/input.js +78 -0
  28. package/distTs/src/output/pipeline.js +915 -0
  29. package/distTs/src/project/cfg.js +39 -0
  30. package/distTs/src/texts.js +71 -43
  31. package/distTs/src/types/crawl.js +2 -0
  32. package/distTs/src/types/pipeline.js +424 -0
  33. package/distTs/src/unitTest/context.js +26 -0
  34. package/distTs/src/unitTest/requests.js +23 -10
  35. package/distTs/src/visualTest/context.js +42 -31
  36. package/distTs/src/visualTest/requests.js +39 -139
  37. package/distTs/src/visualTest/resources.js +40 -38
  38. package/distTs/src/visualTest/server.js +2 -2
  39. package/distTs/src/visualTest/snapshots.js +18 -17
  40. package/distTs/src/visualTest/validation.js +2 -10
  41. package/package.json +1 -1
  42. package/distTs/src/command/project/get.js +0 -18
  43. package/distTs/src/command/project/set.js +0 -31
  44. package/distTs/src/command/sandbox/get/yaml.js +0 -30
  45. package/distTs/src/command/vt/scrape.js +0 -193
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "bdy",
3
3
  "preferGlobal": false,
4
- "version": "1.18.30-dev",
4
+ "version": "1.18.31-dev-commands-changes-869c3r8yn",
5
5
  "type": "commonjs",
6
6
  "license": "MIT",
7
7
  "scripts": {
@@ -241,6 +241,72 @@ class ApiClient {
241
241
  parseResponseBody: true,
242
242
  });
243
243
  }
244
+ async getVtSuites(workspace, project) {
245
+ return await this.request({
246
+ method: 'GET',
247
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/visual-tests/suites`,
248
+ parseResponseBody: true,
249
+ });
250
+ }
251
+ async getCrawlSuites(workspace, project) {
252
+ return await this.request({
253
+ method: 'GET',
254
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/crawl/suites`,
255
+ parseResponseBody: true,
256
+ });
257
+ }
258
+ async getUtSuites(workspace, project) {
259
+ return await this.request({
260
+ method: 'GET',
261
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/unit-tests/suites`,
262
+ parseResponseBody: true,
263
+ });
264
+ }
265
+ async createVtSuite(workspace, project, body) {
266
+ return await this.request({
267
+ method: 'POST',
268
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/visual-tests/suites`,
269
+ body,
270
+ parseResponseBody: true,
271
+ });
272
+ }
273
+ async createCrawlSuite(workspace, project, body) {
274
+ return await this.request({
275
+ method: 'POST',
276
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/crawl/suites`,
277
+ body,
278
+ parseResponseBody: true,
279
+ });
280
+ }
281
+ async createUtSuite(workspace, project, body) {
282
+ return await this.request({
283
+ method: 'POST',
284
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/unit-tests/suites`,
285
+ body,
286
+ parseResponseBody: true,
287
+ });
288
+ }
289
+ async getVtSuiteToken(workspace, project, suiteId) {
290
+ return await this.request({
291
+ method: 'GET',
292
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/visual-tests/suites/${encodeURIComponent(suiteId)}/token`,
293
+ parseResponseBody: true,
294
+ });
295
+ }
296
+ async getCrawlSuiteToken(workspace, project, suiteId) {
297
+ return await this.request({
298
+ method: 'GET',
299
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/crawl/suites/${encodeURIComponent(suiteId)}/token`,
300
+ parseResponseBody: true,
301
+ });
302
+ }
303
+ async getUtSuiteToken(workspace, project, suiteId) {
304
+ return await this.request({
305
+ method: 'GET',
306
+ path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/unit-tests/suites/${encodeURIComponent(suiteId)}/token`,
307
+ parseResponseBody: true,
308
+ });
309
+ }
244
310
  async getInvokerEmails() {
245
311
  return await this.request({
246
312
  method: 'GET',
@@ -688,6 +754,23 @@ class ApiClient {
688
754
  parseResponseBody: true,
689
755
  });
690
756
  }
757
+ async resolveIdentifiers(workspace, params) {
758
+ let query = '';
759
+ Object.entries(params).forEach(([key, value]) => {
760
+ if (value === undefined)
761
+ return;
762
+ if (!query)
763
+ query += '?';
764
+ else
765
+ query += '&';
766
+ query += encodeURIComponent(key) + '=' + encodeURIComponent(value);
767
+ });
768
+ return await this.request({
769
+ method: 'GET',
770
+ path: `/workspaces/${encodeURIComponent(workspace)}/identifiers${query}`,
771
+ parseResponseBody: true,
772
+ });
773
+ }
691
774
  async getPipelineByIdentifier(workspace, project, identifier) {
692
775
  return await this.getResourceByIdentifier(workspace, {
693
776
  project,
@@ -0,0 +1,61 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const utils_1 = require("../../utils");
7
+ const texts_1 = require("../../texts");
8
+ const output_1 = __importDefault(require("../../output"));
9
+ const commandCrawlLink = (0, utils_1.newCommand)('link', texts_1.DESC_COMMAND_CRAWL_LINK);
10
+ commandCrawlLink.option('-w, --workspace <workspace>', texts_1.OPTION_REST_API_WORKSPACE);
11
+ commandCrawlLink.option('-p, --project <project>', texts_1.OPTION_REST_API_PROJECT);
12
+ commandCrawlLink.option('-s, --suite <suite>', texts_1.OPTION_SUITE_IDENTIFIER);
13
+ commandCrawlLink.action(async (options) => {
14
+ const Input = require('../../input').default;
15
+ const ProjectCfg = require('../../project/cfg').default;
16
+ output_1.default.handleSignals();
17
+ const workspace = Input.restApiWorkspace(options.workspace);
18
+ const project = Input.restApiProject(options.project);
19
+ const client = Input.restApiTokenClient(false, options.api, options.region);
20
+ let suiteIdentifier = options.suite;
21
+ if (!suiteIdentifier) {
22
+ const opt = await output_1.default.inputMenuAdv(texts_1.TXT_COMMAND_SUITE_SELECT, [
23
+ {
24
+ name: texts_1.TXT_COMMAND_SUITE_CREATE_NEW,
25
+ description: texts_1.TXT_COMMAND_SUITE_CREATE_NEW_CRAWL_DESC,
26
+ value: 'new',
27
+ },
28
+ {
29
+ name: texts_1.TXT_COMMAND_SUITE_LINK_EXISTING,
30
+ description: texts_1.TXT_COMMAND_SUITE_LINK_EXISTING_DESC,
31
+ value: 'existing',
32
+ },
33
+ ]);
34
+ if (opt === 'new') {
35
+ const name = await output_1.default.inputString(texts_1.TXT_COMMAND_SUITE_NAME);
36
+ const response = await client.createCrawlSuite(workspace, project, { name, identifier: name });
37
+ output_1.default.okSign();
38
+ output_1.default.normal(texts_1.TXT_COMMAND_SUITE_CREATED);
39
+ suiteIdentifier = response.identifier;
40
+ }
41
+ else {
42
+ const result = await client.getCrawlSuites(workspace, project);
43
+ const suites = result?.suites || [];
44
+ if (!suites.length) {
45
+ output_1.default.exitError(texts_1.ERR_NO_CRAWL_SUITES);
46
+ }
47
+ if (suites.length === 1) {
48
+ suiteIdentifier = suites[0].identifier;
49
+ }
50
+ else {
51
+ const items = suites.map((s) => s.name || s.identifier);
52
+ const index = await output_1.default.inputMenu(texts_1.TXT_COMMAND_CRAWL_LINK_SELECT, items);
53
+ suiteIdentifier = suites[index].identifier;
54
+ }
55
+ }
56
+ }
57
+ ProjectCfg.setSuite((0, utils_1.getWorkingDir)(), 'crawl', suiteIdentifier);
58
+ output_1.default.okSign();
59
+ output_1.default.exitSuccess(texts_1.TXT_COMMAND_CRAWL_LINK_SUCCESS);
60
+ });
61
+ exports.default = commandCrawlLink;
@@ -0,0 +1,147 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const utils_1 = require("../../utils");
7
+ const commander_1 = require("commander");
8
+ const texts_1 = require("../../texts");
9
+ const output_1 = __importDefault(require("../../output"));
10
+ const node_zlib_1 = require("node:zlib");
11
+ const promises_1 = require("node:stream/promises");
12
+ const node_fs_1 = require("node:fs");
13
+ const node_path_1 = __importDefault(require("node:path"));
14
+ const promises_2 = require("node:fs/promises");
15
+ const commandCrawlRun = (0, utils_1.newCommand)('run', texts_1.DESC_COMMAND_CRAWL_RUN);
16
+ commandCrawlRun.argument('[url]', texts_1.OPTION_CRAWL_URL);
17
+ commandCrawlRun.option('--follow', texts_1.OPTION_CRAWL_FOLLOW, false);
18
+ commandCrawlRun.option('--respectRobots', texts_1.OPTION_COMPARE_RESPECT_ROBOTS, false);
19
+ commandCrawlRun.addOption(new commander_1.Option('--outputType <type>', texts_1.OPTION_CRAWL_OUTPUT_TYPE).choices([
20
+ 'jpeg',
21
+ 'png',
22
+ 'md',
23
+ 'html',
24
+ ]));
25
+ commandCrawlRun.option('--outputTypes <json>', texts_1.OPTION_CRAWL_OUTPUT_TYPES);
26
+ commandCrawlRun.option('--quality <quality>', texts_1.OPTION_CRAWL_QUALITY);
27
+ commandCrawlRun.option('--fullPage', texts_1.OPTION_CRAWL_FULL_PAGE, false);
28
+ commandCrawlRun.option('--cssSelector <selector>', texts_1.OPTION_CRAWL_CSS_SELECTOR);
29
+ commandCrawlRun.option('--xpathSelector <selector>', texts_1.OPTION_CRAWL_XPATH_SELECTOR);
30
+ commandCrawlRun.addOption(new commander_1.Option('--colorScheme <scheme>', texts_1.OPTION_CRAWL_COLOR_SCHEME).choices([
31
+ 'LIGHT',
32
+ 'DARK',
33
+ 'LIGHT_AND_DARK',
34
+ ]));
35
+ commandCrawlRun.option('--browsers <browsers>', texts_1.OPTION_CRAWL_BROWSERS);
36
+ commandCrawlRun.option('--devices <devices>', texts_1.OPTION_CRAWL_DEVICES);
37
+ commandCrawlRun.option('--waitFor <waitFors...>', texts_1.OPTION_COMPARE_WAIT_FOR);
38
+ commandCrawlRun.option('--cookie <cookies...>', texts_1.OPTION_COMPARE_COOKIE);
39
+ commandCrawlRun.option('--header <headers...>', texts_1.OPTION_COMPARE_HEADER);
40
+ commandCrawlRun.option('--localStorage <items...>', texts_1.OPTION_CRAWL_LOCAL_STORAGE);
41
+ commandCrawlRun.option('--delay <delays...>', texts_1.OPTION_CRAWL_DELAY);
42
+ commandCrawlRun.option('--outputDir <dir>', texts_1.OPTION_CRAWL_OUTPUT_DIR, '.');
43
+ commandCrawlRun.action(async (inputUrl, options) => {
44
+ const { downloadCrawlPackage, sendCrawl } = require('../../crawl/requests');
45
+ const { createCrawlContext, applyToken, applyCiAndCommitInfo } = require('../../visualTest/context');
46
+ const { validateInputAndOptions } = require('./validation');
47
+ const { getCiAndGitInfo } = require('@buddy-works/ci-info');
48
+ const tar = require('tar-stream');
49
+ const Input = require('../../input').default;
50
+ const token = await Input.crawlSuiteToken();
51
+ if (!token) {
52
+ output_1.default.exitError(texts_1.ERR_MISSING_CRAWL_TOKEN);
53
+ }
54
+ const ctx = createCrawlContext();
55
+ applyToken(ctx, token);
56
+ const { url, follow, respectRobots, outputTypes, outputDir, colorScheme, browsers, devices, cookies, requestHeaders, delays, waitForSelectors, localStorage, } = validateInputAndOptions(inputUrl, options);
57
+ try {
58
+ const ciAndGitInfo = await getCiAndGitInfo({});
59
+ applyCiAndCommitInfo(ctx, ciAndGitInfo);
60
+ const { buildId } = await sendCrawl(ctx, url, follow, respectRobots, outputTypes, colorScheme, browsers, devices, cookies, requestHeaders, delays, waitForSelectors, localStorage);
61
+ const status = await watchSessionStatus(ctx, buildId);
62
+ if (!status.ok) {
63
+ output_1.default.exitError(`Crawl session failed: ${status.error}`);
64
+ }
65
+ output_1.default.normal('Downloading crawl package');
66
+ const crawlPackageStream = await downloadCrawlPackage(ctx, buildId);
67
+ const brotliDecompressor = (0, node_zlib_1.createBrotliDecompress)();
68
+ const unpack = tar.extract();
69
+ unpack.on('entry', async (header, stream, next) => {
70
+ const currentDir = process.cwd();
71
+ const preparedOutputDir = outputDir.startsWith('.')
72
+ ? node_path_1.default.join(currentDir, outputDir)
73
+ : outputDir;
74
+ const newFilePath = node_path_1.default.join(preparedOutputDir, header.name);
75
+ try {
76
+ if (header.type === 'file') {
77
+ await (0, promises_2.mkdir)(node_path_1.default.dirname(newFilePath), { recursive: true });
78
+ const fileWriteStream = (0, node_fs_1.createWriteStream)(newFilePath);
79
+ await (0, promises_1.pipeline)(stream, fileWriteStream);
80
+ next();
81
+ }
82
+ else {
83
+ stream.resume();
84
+ next();
85
+ }
86
+ }
87
+ catch (entryError) {
88
+ output_1.default.error(`Error processing entry ${header.name}: ${entryError}`);
89
+ next(entryError);
90
+ }
91
+ });
92
+ await (0, promises_1.pipeline)(crawlPackageStream, brotliDecompressor, unpack);
93
+ output_1.default.exitSuccess('Downloading crawl package finished');
94
+ }
95
+ catch (error) {
96
+ output_1.default.exitError(`${error}`);
97
+ }
98
+ });
99
+ async function watchSessionStatus(ctx, buildId) {
100
+ const { connectToCrawlSession } = require('../../crawl/requests');
101
+ return new Promise((resolve) => {
102
+ const eventSource = connectToCrawlSession(ctx, buildId);
103
+ eventSource.addEventListener('SESSION_STATUS', (event) => {
104
+ const data = JSON.parse(event.data);
105
+ if (data.status === 'STARTED') {
106
+ output_1.default.normal('Crawl session started');
107
+ }
108
+ else if (data.status === 'GATHER_URLS_COMPLETED') {
109
+ output_1.default.normal(`Gathering URLs completed, found ${data.text} URLs`);
110
+ }
111
+ else if (data.status === 'GATHER_URLS_FAILED') {
112
+ output_1.default.error('Gathering URLs failed');
113
+ }
114
+ else if (data.status === 'CRAWL_URL_COMPLETED') {
115
+ output_1.default.normal(`Crawling ${data.text} completed`);
116
+ }
117
+ else if (data.status === 'CRAWL_URL_FAILED') {
118
+ output_1.default.error(`Crawling ${data.text} failed`);
119
+ }
120
+ else if (data.status === 'CREATE_PACKAGE_COMPLETED') {
121
+ output_1.default.normal('Package created');
122
+ }
123
+ else if (data.status === 'CREATE_PACKAGE_FAILED') {
124
+ output_1.default.error('Package creation failed');
125
+ }
126
+ else if (data.status === 'ERROR') {
127
+ eventSource.close();
128
+ resolve({ ok: false, error: data.text });
129
+ }
130
+ else if (data.status === 'FINISHED') {
131
+ eventSource.close();
132
+ output_1.default.normal('Crawl session finished');
133
+ resolve({ ok: true });
134
+ }
135
+ });
136
+ eventSource.addEventListener('error', (event) => {
137
+ if (event.code) {
138
+ eventSource.close();
139
+ if (event.code === 410) {
140
+ output_1.default.normal('Crawl session finished');
141
+ }
142
+ resolve({ ok: event.code === 410, error: event.code });
143
+ }
144
+ });
145
+ });
146
+ }
147
+ exports.default = commandCrawlRun;
@@ -0,0 +1,154 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.validateInputAndOptions = validateInputAndOptions;
7
+ const zod_1 = require("zod");
8
+ const output_1 = __importDefault(require("../../output"));
9
+ const validation_1 = require("../tests/visual/shared/validation");
10
+ const urlSchema = zod_1.z.string().url().optional();
11
+ const browserSchema = zod_1.z.enum(['chrome', 'firefox', 'safari']);
12
+ const browsersListSchema = zod_1.z
13
+ .string()
14
+ .transform((value) => value
15
+ .split(',')
16
+ .map((browser) => browser.trim().toLowerCase())
17
+ .filter((browser) => browser.length > 0))
18
+ .refine((browsers) => browsers.length > 0, {
19
+ message: 'Invalid browsers list. Supported values: chrome,firefox,safari',
20
+ })
21
+ .pipe(zod_1.z.array(browserSchema))
22
+ .transform((browsers) => Array.from(new Set(browsers.map((browser) => browser === 'chrome'
23
+ ? 'CHROMIUM'
24
+ : browser === 'firefox'
25
+ ? 'FIREFOX'
26
+ : 'WEBKIT'))));
27
+ const optionsSchema = zod_1.z.object({
28
+ follow: zod_1.z.boolean(),
29
+ respectRobots: zod_1.z.boolean().optional(),
30
+ outputType: zod_1.z.enum(['jpeg', 'png', 'md', 'html']).optional(),
31
+ outputTypes: zod_1.z.string().optional(),
32
+ quality: zod_1.z.coerce.number().min(1).max(100).optional(),
33
+ outputDir: zod_1.z.string().default('.'),
34
+ fullPage: zod_1.z.boolean().optional(),
35
+ cssSelector: zod_1.z.string().optional(),
36
+ xpathSelector: zod_1.z.string().optional(),
37
+ colorScheme: zod_1.z.enum(['LIGHT', 'DARK', 'LIGHT_AND_DARK']).optional(),
38
+ browsers: browsersListSchema.optional(),
39
+ devices: zod_1.z.string().optional(),
40
+ delay: validation_1.delaySchema,
41
+ waitFor: validation_1.waitForSchema,
42
+ cookie: validation_1.cookieSchema,
43
+ header: validation_1.headerSchema,
44
+ localStorage: zod_1.z
45
+ .array(zod_1.z.string().regex(/^(?:([^:]+)::)?([^=]+)=(.*)$/, {
46
+ message: "LocalStorage option must follow pattern '[scope::]key=value' (scope is optional)",
47
+ }))
48
+ .optional()
49
+ .transform((value) => value?.map((v) => {
50
+ const { scope, key, value } = (0, validation_1.parseScopedKeyValue)(v);
51
+ return { scope, key, value };
52
+ })),
53
+ });
54
+ function validateInputAndOptions(input, options) {
55
+ try {
56
+ const url = urlSchema.parse(input);
57
+ const { follow, respectRobots, outputType, outputTypes: rawOutputTypes, quality, outputDir, fullPage, cssSelector, xpathSelector, colorScheme, browsers: parsedBrowsers, devices: rawDevices, delay, waitFor, cookie, header, localStorage, } = optionsSchema.parse(options);
58
+ let parsedOutputTypes;
59
+ if (rawOutputTypes) {
60
+ try {
61
+ const outputTypeEntrySchema = zod_1.z.array(zod_1.z
62
+ .object({
63
+ type: zod_1.z.string().transform((v) => v.toUpperCase()),
64
+ selector: zod_1.z
65
+ .object({
66
+ type: zod_1.z.enum(['CSS', 'XPATH']).optional(),
67
+ value: zod_1.z.string().optional(),
68
+ })
69
+ .optional(),
70
+ quality: zod_1.z.number().min(1).max(100).optional(),
71
+ fullPage: zod_1.z.boolean().optional(),
72
+ })
73
+ .transform((data) => ({
74
+ ...data,
75
+ type: data.type,
76
+ })));
77
+ parsedOutputTypes = outputTypeEntrySchema.parse(JSON.parse(rawOutputTypes));
78
+ }
79
+ catch {
80
+ output_1.default.exitError("Invalid --outputTypes value. Use JSON array, e.g. --outputTypes '[{\"type\":\"png\"},{\"type\":\"jpeg\",\"quality\":80}]'");
81
+ }
82
+ }
83
+ else if (outputType) {
84
+ if (typeof quality === 'number' && outputType !== 'jpeg') {
85
+ output_1.default.exitError('Quality is only supported for jpeg output type, use --outputType jpeg');
86
+ }
87
+ if (cssSelector && xpathSelector) {
88
+ output_1.default.exitError('Only one of --cssSelector or --xpathSelector can be used');
89
+ }
90
+ const entry = {
91
+ type: outputType.toUpperCase(),
92
+ };
93
+ if (cssSelector) {
94
+ entry.selector = { type: 'CSS', value: cssSelector };
95
+ }
96
+ else if (xpathSelector) {
97
+ entry.selector = { type: 'XPATH', value: xpathSelector };
98
+ }
99
+ if (typeof quality === 'number') {
100
+ entry.quality = quality;
101
+ }
102
+ if (fullPage) {
103
+ entry.fullPage = fullPage;
104
+ }
105
+ parsedOutputTypes = [entry];
106
+ }
107
+ let parsedDevices;
108
+ if (rawDevices) {
109
+ try {
110
+ const dimensionSchema = zod_1.z
111
+ .string()
112
+ .regex(/^\d+x\d+$/, 'Must be in format "widthxheight"')
113
+ .transform((val) => {
114
+ const [width, height] = val.split('x').map(Number);
115
+ return { width, height };
116
+ });
117
+ const deviceSchema = zod_1.z.array(zod_1.z.object({
118
+ name: zod_1.z.string().optional(),
119
+ viewport: dimensionSchema,
120
+ screen: dimensionSchema,
121
+ devicePixelRatio: zod_1.z.number().positive(),
122
+ isMobile: zod_1.z.boolean(),
123
+ }));
124
+ parsedDevices = deviceSchema.parse(JSON.parse(rawDevices));
125
+ }
126
+ catch {
127
+ output_1.default.exitError('Invalid --devices value. Use JSON array, e.g. --devices \'[{"viewport":"1920x1080","screen":"1920x1080","devicePixelRatio":1,"isMobile":false}]\'');
128
+ }
129
+ }
130
+ return {
131
+ url,
132
+ follow,
133
+ respectRobots,
134
+ outputTypes: parsedOutputTypes,
135
+ outputDir,
136
+ colorScheme,
137
+ browsers: parsedBrowsers,
138
+ devices: parsedDevices,
139
+ cookies: cookie,
140
+ requestHeaders: header,
141
+ delays: delay ?? [],
142
+ waitForSelectors: waitFor,
143
+ localStorage,
144
+ };
145
+ }
146
+ catch (error) {
147
+ if (error instanceof zod_1.ZodError) {
148
+ output_1.default.exitError(error.errors.map((e) => `${e.path}: ${e.message}`).join(', '));
149
+ }
150
+ else {
151
+ throw error;
152
+ }
153
+ }
154
+ }
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const utils_1 = require("../utils");
7
+ const texts_1 = require("../texts");
8
+ const run_1 = __importDefault(require("./crawl/run"));
9
+ const link_1 = __importDefault(require("./crawl/link"));
10
+ const commandCrawl = (0, utils_1.newCommand)('crawl', texts_1.DESC_COMMAND_CRAWL);
11
+ commandCrawl.addCommand(run_1.default);
12
+ commandCrawl.addCommand(link_1.default);
13
+ exports.default = commandCrawl;
@@ -34,6 +34,20 @@ const OAUTH_CLIENT_APP_SCOPES = [
34
34
  function getRedirectUrl(api) {
35
35
  return `https://${api}/oauth2/cli`;
36
36
  }
37
+ async function waitForOpen(abortCode, seconds = 3) {
38
+ output_1.default.normal(texts_1.TXT_LOGIN_OPENING);
39
+ let star = true;
40
+ const now = Date.now();
41
+ for (;;) {
42
+ output_1.default.clearPreviousLine();
43
+ output_1.default.normal(star ? texts_1.TXT_LOGIN_OPENING_STAR : texts_1.TXT_LOGIN_OPENING);
44
+ star = !star;
45
+ await (0, utils_1.sleep)(500);
46
+ if (Date.now() > now + seconds * 1000 || abortCode.signal.aborted)
47
+ break;
48
+ }
49
+ output_1.default.clearPreviousLine();
50
+ }
37
51
  async function oauthServer(api, clientId, clientSecret) {
38
52
  const ApiClient = require('../api/client').default;
39
53
  const open = require('open').default;
@@ -48,7 +62,7 @@ async function oauthServer(api, clientId, clientSecret) {
48
62
  if (res)
49
63
  res.end(urlState);
50
64
  s.close();
51
- abortCode.abort('OK');
65
+ abortCode.abort();
52
66
  resolve({
53
67
  token: response.access_token,
54
68
  refreshToken: response.refresh_token,
@@ -84,8 +98,12 @@ async function oauthServer(api, clientId, clientSecret) {
84
98
  catch {
85
99
  // do nothing
86
100
  }
87
- output_1.default.normal(texts_1.TXT_LOGIN_OPEN_URL);
88
- output_1.default.normal(url.replace(/%/g, '%%'));
101
+ await waitForOpen(abortCode);
102
+ if (abortCode.signal.aborted)
103
+ return;
104
+ output_1.default.dim(texts_1.TXT_LOGIN_OPEN_URL);
105
+ output_1.default.normal('');
106
+ output_1.default.dim(url.replace(/%/g, '%%'));
89
107
  output_1.default.normal('');
90
108
  for (;;) {
91
109
  try {
@@ -96,6 +114,7 @@ async function oauthServer(api, clientId, clientSecret) {
96
114
  }
97
115
  catch (err) {
98
116
  if (err.name === 'AbortPromptError') {
117
+ output_1.default.clearPreviousLine();
99
118
  break;
100
119
  }
101
120
  else if (err.name === 'ExitPromptError') {
@@ -211,7 +230,6 @@ commandLogin.action(async (options) => {
211
230
  if (!output_1.default.isStdInTTY()) {
212
231
  output_1.default.exitError(texts_1.ERR_TOKEN_NOT_PROVIDED);
213
232
  }
214
- output_1.default.normal(texts_1.TXT_LOGIN_OAUTH);
215
233
  const { token, refreshToken, clientId, clientSecret, clientToken } = await authorizeOAuth(api);
216
234
  await authorizeToken(api, token, refreshToken, clientId, clientSecret, clientToken, workspace);
217
235
  }
@@ -0,0 +1,101 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const utils_1 = require("../../../utils");
7
+ const texts_1 = require("../../../texts");
8
+ const output_1 = __importDefault(require("../../../output"));
9
+ const input_1 = __importDefault(require("../../../input"));
10
+ const commandPipelineRunStart = (0, utils_1.newCommand)('start', texts_1.DESC_COMMAND_PIPELINE_RUN_START);
11
+ commandPipelineRunStart.option('-w, --workspace <domain>', texts_1.OPTION_REST_API_WORKSPACE);
12
+ commandPipelineRunStart.option('-p, --project <name>', texts_1.OPTION_REST_API_PROJECT);
13
+ commandPipelineRunStart.option('-b, --branch <branch>', texts_1.OPTION_PIPELINE_RUN_BRANCH);
14
+ commandPipelineRunStart.option('-t, --tag <tag>', texts_1.OPTION_PIPELINE_RUN_TAG);
15
+ commandPipelineRunStart.option('-pr, --pull-request <pull request>', texts_1.OPTION_PIPELINE_RUN_PULL_REQUEST);
16
+ commandPipelineRunStart.option('-r, --revision <revision>', texts_1.OPTION_PIPELINE_RUN_REVISION);
17
+ commandPipelineRunStart.option('--comment <comment>', texts_1.OPTION_PIPELINE_RUN_COMMENT);
18
+ commandPipelineRunStart.option('-f, --refresh', texts_1.OPTION_PIPELINE_RUN_REFRESH);
19
+ commandPipelineRunStart.option('-c, --clear-cache', texts_1.OPTION_PIPELINE_RUN_CLEAR_CACHE);
20
+ commandPipelineRunStart.option('--priority <priority>', texts_1.OPTION_PIPELINE_RUN_PRIORITY);
21
+ commandPipelineRunStart.option('-v, --variable <variables...>', texts_1.OPTION_PIPELINE_RUN_VAR);
22
+ commandPipelineRunStart.option('-vm, --variable-masked <variables...>', texts_1.OPTION_PIPELINE_RUN_VAR);
23
+ commandPipelineRunStart.option('--schedule <date>', texts_1.OPTION_PIPELINE_RUN_DELAY);
24
+ commandPipelineRunStart.option('--action <actions...>', texts_1.OPTION_PIPELINE_RUN_ACTION);
25
+ commandPipelineRunStart.option('--no-wait', texts_1.OPTION_PIPELINE_RUN_NO_WAIT);
26
+ commandPipelineRunStart.argument('<identifier>', texts_1.OPTION_PIPELINE_RUN_ARGUMENT);
27
+ commandPipelineRunStart.usage('<identifier> [options]');
28
+ commandPipelineRunStart.addHelpText('after', texts_1.EXAMPLE_PIPELINE_RUN_START);
29
+ commandPipelineRunStart.action(async (identifier, options) => {
30
+ const workspace = input_1.default.restApiWorkspace(options.workspace);
31
+ const project = input_1.default.restApiProject(options.project);
32
+ const client = input_1.default.restApiTokenClient();
33
+ const data = await client.getPipelineByIdentifier(workspace, project, identifier);
34
+ if (!data || !data.domain) {
35
+ output_1.default.exitError(texts_1.ERR_WORKSPACE_NOT_FOUND);
36
+ }
37
+ if (!data.project_identifier) {
38
+ output_1.default.exitError(texts_1.ERR_PROJECT_NOT_FOUND);
39
+ }
40
+ if (!data.pipeline_id) {
41
+ output_1.default.exitError(texts_1.ERR_PIPELINE_NOT_FOUND);
42
+ }
43
+ const body = {};
44
+ if (options.branch) {
45
+ body.branch = {
46
+ name: options.branch,
47
+ };
48
+ }
49
+ if (options.tag) {
50
+ body.tag = {
51
+ name: options.tag,
52
+ };
53
+ }
54
+ if (options.pullRequest) {
55
+ body.pull_request = {
56
+ name: `pull/${options.pullRequest}`,
57
+ };
58
+ }
59
+ if (options.revision) {
60
+ body.to_revision = {
61
+ revision: options.revision,
62
+ };
63
+ }
64
+ if (options.comment) {
65
+ body.comment = options.comment;
66
+ }
67
+ if (options.refresh) {
68
+ body.refresh = true;
69
+ }
70
+ if (options.clearCache) {
71
+ body.clear_cache = true;
72
+ }
73
+ const priority = input_1.default.pipelineRunPriority(options.priority);
74
+ if (priority) {
75
+ body.priority = priority;
76
+ }
77
+ body.variables = [];
78
+ if (options.variable) {
79
+ body.variables = body.variables.concat(input_1.default.pipelineRunVariable(options.variable, false));
80
+ }
81
+ if (options.variableMasked) {
82
+ body.variables = body.variables.concat(input_1.default.pipelineRunVariable(options.variableMasked, true));
83
+ }
84
+ const delay = input_1.default.pipelineRunDelay(options.schedule);
85
+ if (delay) {
86
+ body.delay_until = delay;
87
+ }
88
+ const actions = input_1.default.pipelineRunAction(options.action);
89
+ if (actions) {
90
+ body.actions_to_run = actions;
91
+ }
92
+ const result = await client.pipelineRun(workspace, project, data.pipeline_id, body);
93
+ if (options.noWait) {
94
+ output_1.default.exitSuccess((0, texts_1.TXT_PIPELINE_RUN_SUCCESS)(result.id, result.html_url));
95
+ }
96
+ else {
97
+ const OutputPipeline = require('../../../output/pipeline').default;
98
+ await OutputPipeline.runStatus(client, workspace, project, data.pipeline_id, result.id, false);
99
+ }
100
+ });
101
+ exports.default = commandPipelineRunStart;