bdy 1.19.7-dev → 1.19.9-dev-pipeline
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/distTs/package.json +1 -1
- package/distTs/src/api/client.js +103 -2
- package/distTs/src/command/crawl/link.js +61 -0
- package/distTs/src/command/crawl/run.js +147 -0
- package/distTs/src/command/crawl/validation.js +154 -0
- package/distTs/src/command/crawl.js +13 -0
- package/distTs/src/command/pipeline/create.js +45 -0
- package/distTs/src/command/pipeline/get.js +42 -0
- package/distTs/src/command/pipeline/list.js +43 -0
- package/distTs/src/command/pipeline/run/apply.js +62 -0
- package/distTs/src/command/pipeline/run/approve.js +62 -0
- package/distTs/src/command/pipeline/run/cancel.js +36 -0
- package/distTs/src/command/pipeline/run/list.js +52 -0
- package/distTs/src/command/pipeline/run/logs.js +37 -0
- package/distTs/src/command/pipeline/run/retry.js +36 -0
- package/distTs/src/command/pipeline/run/start.js +96 -0
- package/distTs/src/command/pipeline/run/status.js +35 -0
- package/distTs/src/command/pipeline/run.js +22 -125
- package/distTs/src/command/pipeline/update.js +41 -0
- package/distTs/src/command/pipeline/yaml.js +38 -0
- package/distTs/src/command/pipeline.js +23 -0
- package/distTs/src/command/project/link.js +11 -11
- package/distTs/src/command/tests/capture/validation.js +46 -0
- package/distTs/src/command/tests/capture.js +103 -0
- package/distTs/src/command/tests/unit/link.js +61 -0
- package/distTs/src/command/tests/unit/upload.js +91 -0
- package/distTs/src/command/tests/unit.js +13 -0
- package/distTs/src/command/tests/visual/link.js +61 -0
- package/distTs/src/command/tests/visual/session/close.js +32 -0
- package/distTs/src/command/tests/visual/session/create.js +86 -0
- package/distTs/src/command/tests/visual/session.js +13 -0
- package/distTs/src/command/tests/visual/setup.js +20 -0
- package/distTs/src/command/tests/visual/shared/validation.js +145 -0
- package/distTs/src/command/tests/visual/upload.js +141 -0
- package/distTs/src/command/tests/visual.js +17 -0
- package/distTs/src/command/tests.js +15 -0
- package/distTs/src/crawl/requests.js +141 -0
- package/distTs/src/input.js +11 -10
- package/distTs/src/output/pipeline.js +1563 -0
- package/distTs/src/output.js +149 -31
- package/distTs/src/texts.js +84 -34
- package/distTs/src/tunnel/output/interactive/tunnel.js +2 -2
- package/distTs/src/types/crawl.js +2 -0
- package/distTs/src/types/pipeline.js +424 -0
- package/distTs/src/unitTest/context.js +26 -0
- package/package.json +1 -1
- package/distTs/src/command/project/get.js +0 -18
- package/distTs/src/command/project/set.js +0 -31
- package/distTs/src/command/sandbox/get/yaml.js +0 -30
- package/distTs/src/command/vt/scrape.js +0 -193
package/distTs/package.json
CHANGED
package/distTs/src/api/client.js
CHANGED
|
@@ -216,6 +216,79 @@ class ApiClient {
|
|
|
216
216
|
throw new Error(texts_1.ERR_REST_API_GENERAL_ERROR);
|
|
217
217
|
}
|
|
218
218
|
}
|
|
219
|
+
async getPipelineRunActionLogs(workspace, project, pipelineId, executionId, actionExecutionId, offset, limit) {
|
|
220
|
+
const query = {
|
|
221
|
+
offset: String(offset),
|
|
222
|
+
limit: String(limit),
|
|
223
|
+
};
|
|
224
|
+
return await this.request({
|
|
225
|
+
method: 'GET',
|
|
226
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/executions/${encodeURIComponent(executionId)}/action_executions/${encodeURIComponent(actionExecutionId)}/logs`,
|
|
227
|
+
query,
|
|
228
|
+
parseResponseBody: true,
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
async getPipelineRunActionExecution(workspace, project, pipelineId, executionId, actionExecutionId) {
|
|
232
|
+
return await this.request({
|
|
233
|
+
method: 'GET',
|
|
234
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/executions/${encodeURIComponent(executionId)}/action_executions/${encodeURIComponent(actionExecutionId)}`,
|
|
235
|
+
parseResponseBody: true,
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
async updatePipelineYml(workspace, project, pipelineId, body) {
|
|
239
|
+
return await this.request({
|
|
240
|
+
method: 'PATCH',
|
|
241
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/yaml`,
|
|
242
|
+
body,
|
|
243
|
+
parseResponseBody: true
|
|
244
|
+
});
|
|
245
|
+
}
|
|
246
|
+
async createPipeline(workspace, project, body) {
|
|
247
|
+
return await this.request({
|
|
248
|
+
method: 'POST',
|
|
249
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines`,
|
|
250
|
+
body,
|
|
251
|
+
parseResponseBody: true
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
async getPipelines(workspace, project, page = 1, perPage = 10) {
|
|
255
|
+
const query = {
|
|
256
|
+
page: String(page),
|
|
257
|
+
per_page: String(perPage),
|
|
258
|
+
};
|
|
259
|
+
return await this.request({
|
|
260
|
+
method: 'GET',
|
|
261
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines`,
|
|
262
|
+
query,
|
|
263
|
+
parseResponseBody: true,
|
|
264
|
+
});
|
|
265
|
+
}
|
|
266
|
+
async getPipelineYml(workspace, project, pipelineId) {
|
|
267
|
+
return await this.request({
|
|
268
|
+
method: 'GET',
|
|
269
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/yaml`,
|
|
270
|
+
parseResponseBody: true,
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
async getPipeline(workspace, project, pipelineId) {
|
|
274
|
+
return await this.request({
|
|
275
|
+
method: 'GET',
|
|
276
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}`,
|
|
277
|
+
parseResponseBody: true,
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
async getPipelineRuns(workspace, project, pipelineId, page = 1, perPage = 10) {
|
|
281
|
+
const query = {
|
|
282
|
+
page: String(page),
|
|
283
|
+
per_page: String(perPage),
|
|
284
|
+
};
|
|
285
|
+
return await this.request({
|
|
286
|
+
method: 'GET',
|
|
287
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/executions`,
|
|
288
|
+
query,
|
|
289
|
+
parseResponseBody: true,
|
|
290
|
+
});
|
|
291
|
+
}
|
|
219
292
|
async getPipelineRun(workspace, project, pipelineId, executionId) {
|
|
220
293
|
return await this.request({
|
|
221
294
|
method: 'GET',
|
|
@@ -256,6 +329,34 @@ class ApiClient {
|
|
|
256
329
|
parseResponseBody: true,
|
|
257
330
|
});
|
|
258
331
|
}
|
|
332
|
+
async pipelineRunRetry(workspace, project, pipelineId, runId) {
|
|
333
|
+
return await this.request({
|
|
334
|
+
method: 'PATCH',
|
|
335
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/executions/${encodeURIComponent(runId)}`,
|
|
336
|
+
body: {
|
|
337
|
+
operation: 'RETRY',
|
|
338
|
+
},
|
|
339
|
+
parseResponseBody: true,
|
|
340
|
+
});
|
|
341
|
+
}
|
|
342
|
+
async pipelineRunApply(workspace, project, pipelineId, runId, body) {
|
|
343
|
+
return await this.request({
|
|
344
|
+
method: 'PATCH',
|
|
345
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/executions/${encodeURIComponent(runId)}`,
|
|
346
|
+
body,
|
|
347
|
+
parseResponseBody: true,
|
|
348
|
+
});
|
|
349
|
+
}
|
|
350
|
+
async pipelineRunCancel(workspace, project, pipelineId, runId) {
|
|
351
|
+
return await this.request({
|
|
352
|
+
method: 'PATCH',
|
|
353
|
+
path: `/workspaces/${encodeURIComponent(workspace)}/projects/${encodeURIComponent(project)}/pipelines/${encodeURIComponent(pipelineId)}/executions/${encodeURIComponent(runId)}`,
|
|
354
|
+
body: {
|
|
355
|
+
operation: 'CANCEL',
|
|
356
|
+
},
|
|
357
|
+
parseResponseBody: true,
|
|
358
|
+
});
|
|
359
|
+
}
|
|
259
360
|
async pipelineRun(workspace, project, pipelineId, body) {
|
|
260
361
|
return await this.request({
|
|
261
362
|
method: 'POST',
|
|
@@ -721,14 +822,14 @@ class ApiClient {
|
|
|
721
822
|
return await this.request({
|
|
722
823
|
method: 'POST',
|
|
723
824
|
path: `/workspaces/${encodeURIComponent(workspace)}/sandboxes/${encodeURIComponent(sandboxId)}/apps/${encodeURIComponent(appId)}/stop`,
|
|
724
|
-
parseResponseBody: true
|
|
825
|
+
parseResponseBody: true,
|
|
725
826
|
});
|
|
726
827
|
}
|
|
727
828
|
async startSandboxApp(workspace, sandboxId, appId) {
|
|
728
829
|
return await this.request({
|
|
729
830
|
method: 'POST',
|
|
730
831
|
path: `/workspaces/${encodeURIComponent(workspace)}/sandboxes/${encodeURIComponent(sandboxId)}/apps/${encodeURIComponent(appId)}/start`,
|
|
731
|
-
parseResponseBody: true
|
|
832
|
+
parseResponseBody: true,
|
|
732
833
|
});
|
|
733
834
|
}
|
|
734
835
|
async getSandboxAppLogs(workspace, sandboxId, appId, cursor) {
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const utils_1 = require("../../utils");
|
|
7
|
+
const texts_1 = require("../../texts");
|
|
8
|
+
const output_1 = __importDefault(require("../../output"));
|
|
9
|
+
const commandCrawlLink = (0, utils_1.newCommand)('link', texts_1.DESC_COMMAND_CRAWL_LINK);
|
|
10
|
+
commandCrawlLink.option('-w, --workspace <workspace>', texts_1.OPTION_REST_API_WORKSPACE);
|
|
11
|
+
commandCrawlLink.option('-p, --project <project>', texts_1.OPTION_REST_API_PROJECT);
|
|
12
|
+
commandCrawlLink.option('-s, --suite <suite>', texts_1.OPTION_SUITE_IDENTIFIER);
|
|
13
|
+
commandCrawlLink.action(async (options) => {
|
|
14
|
+
const Input = require('../../input').default;
|
|
15
|
+
const ProjectCfg = require('../../project/cfg').default;
|
|
16
|
+
output_1.default.handleSignals();
|
|
17
|
+
const workspace = Input.restApiWorkspace(options.workspace);
|
|
18
|
+
const project = Input.restApiProject(options.project);
|
|
19
|
+
const client = Input.restApiTokenClient(false, options.api, options.region);
|
|
20
|
+
let suiteIdentifier = options.suite;
|
|
21
|
+
if (!suiteIdentifier) {
|
|
22
|
+
const opt = await output_1.default.inputMenuAdv(texts_1.TXT_COMMAND_SUITE_SELECT, [
|
|
23
|
+
{
|
|
24
|
+
name: texts_1.TXT_COMMAND_SUITE_CREATE_NEW,
|
|
25
|
+
description: texts_1.TXT_COMMAND_SUITE_CREATE_NEW_CRAWL_DESC,
|
|
26
|
+
value: 'new',
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
name: texts_1.TXT_COMMAND_SUITE_LINK_EXISTING,
|
|
30
|
+
description: texts_1.TXT_COMMAND_SUITE_LINK_EXISTING_DESC,
|
|
31
|
+
value: 'existing',
|
|
32
|
+
},
|
|
33
|
+
]);
|
|
34
|
+
if (opt === 'new') {
|
|
35
|
+
const name = await output_1.default.inputString(texts_1.TXT_COMMAND_SUITE_NAME);
|
|
36
|
+
const response = await client.createCrawlSuite(workspace, project, { name, identifier: name });
|
|
37
|
+
output_1.default.okSign();
|
|
38
|
+
output_1.default.normal(texts_1.TXT_COMMAND_SUITE_CREATED);
|
|
39
|
+
suiteIdentifier = response.identifier;
|
|
40
|
+
}
|
|
41
|
+
else {
|
|
42
|
+
const result = await client.getCrawlSuites(workspace, project);
|
|
43
|
+
const suites = result?.suites || [];
|
|
44
|
+
if (!suites.length) {
|
|
45
|
+
output_1.default.exitError(texts_1.ERR_NO_CRAWL_SUITES);
|
|
46
|
+
}
|
|
47
|
+
if (suites.length === 1) {
|
|
48
|
+
suiteIdentifier = suites[0].identifier;
|
|
49
|
+
}
|
|
50
|
+
else {
|
|
51
|
+
const items = suites.map((s) => s.name || s.identifier);
|
|
52
|
+
const index = await output_1.default.inputMenu(texts_1.TXT_COMMAND_CRAWL_LINK_SELECT, items);
|
|
53
|
+
suiteIdentifier = suites[index].identifier;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
ProjectCfg.setSuite((0, utils_1.getWorkingDir)(), 'crawl', suiteIdentifier);
|
|
58
|
+
output_1.default.okSign();
|
|
59
|
+
output_1.default.exitSuccess(texts_1.TXT_COMMAND_CRAWL_LINK_SUCCESS);
|
|
60
|
+
});
|
|
61
|
+
exports.default = commandCrawlLink;
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const utils_1 = require("../../utils");
|
|
7
|
+
const commander_1 = require("commander");
|
|
8
|
+
const texts_1 = require("../../texts");
|
|
9
|
+
const output_1 = __importDefault(require("../../output"));
|
|
10
|
+
const node_zlib_1 = require("node:zlib");
|
|
11
|
+
const promises_1 = require("node:stream/promises");
|
|
12
|
+
const node_fs_1 = require("node:fs");
|
|
13
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
14
|
+
const promises_2 = require("node:fs/promises");
|
|
15
|
+
const commandCrawlRun = (0, utils_1.newCommand)('run', texts_1.DESC_COMMAND_CRAWL_RUN);
|
|
16
|
+
commandCrawlRun.argument('[url]', texts_1.OPTION_CRAWL_URL);
|
|
17
|
+
commandCrawlRun.option('--follow', texts_1.OPTION_CRAWL_FOLLOW, false);
|
|
18
|
+
commandCrawlRun.option('--respectRobots', texts_1.OPTION_COMPARE_RESPECT_ROBOTS, false);
|
|
19
|
+
commandCrawlRun.addOption(new commander_1.Option('--outputType <type>', texts_1.OPTION_CRAWL_OUTPUT_TYPE).choices([
|
|
20
|
+
'jpeg',
|
|
21
|
+
'png',
|
|
22
|
+
'md',
|
|
23
|
+
'html',
|
|
24
|
+
]));
|
|
25
|
+
commandCrawlRun.option('--outputTypes <json>', texts_1.OPTION_CRAWL_OUTPUT_TYPES);
|
|
26
|
+
commandCrawlRun.option('--quality <quality>', texts_1.OPTION_CRAWL_QUALITY);
|
|
27
|
+
commandCrawlRun.option('--fullPage', texts_1.OPTION_CRAWL_FULL_PAGE, false);
|
|
28
|
+
commandCrawlRun.option('--cssSelector <selector>', texts_1.OPTION_CRAWL_CSS_SELECTOR);
|
|
29
|
+
commandCrawlRun.option('--xpathSelector <selector>', texts_1.OPTION_CRAWL_XPATH_SELECTOR);
|
|
30
|
+
commandCrawlRun.addOption(new commander_1.Option('--colorScheme <scheme>', texts_1.OPTION_CRAWL_COLOR_SCHEME).choices([
|
|
31
|
+
'LIGHT',
|
|
32
|
+
'DARK',
|
|
33
|
+
'LIGHT_AND_DARK',
|
|
34
|
+
]));
|
|
35
|
+
commandCrawlRun.option('--browsers <browsers>', texts_1.OPTION_CRAWL_BROWSERS);
|
|
36
|
+
commandCrawlRun.option('--devices <devices>', texts_1.OPTION_CRAWL_DEVICES);
|
|
37
|
+
commandCrawlRun.option('--waitFor <waitFors...>', texts_1.OPTION_COMPARE_WAIT_FOR);
|
|
38
|
+
commandCrawlRun.option('--cookie <cookies...>', texts_1.OPTION_COMPARE_COOKIE);
|
|
39
|
+
commandCrawlRun.option('--header <headers...>', texts_1.OPTION_COMPARE_HEADER);
|
|
40
|
+
commandCrawlRun.option('--localStorage <items...>', texts_1.OPTION_CRAWL_LOCAL_STORAGE);
|
|
41
|
+
commandCrawlRun.option('--delay <delays...>', texts_1.OPTION_CRAWL_DELAY);
|
|
42
|
+
commandCrawlRun.option('--outputDir <dir>', texts_1.OPTION_CRAWL_OUTPUT_DIR, '.');
|
|
43
|
+
commandCrawlRun.action(async (inputUrl, options) => {
|
|
44
|
+
const { downloadCrawlPackage, sendCrawl } = require('../../crawl/requests');
|
|
45
|
+
const { createCrawlContext, applyToken, applyCiAndCommitInfo } = require('../../visualTest/context');
|
|
46
|
+
const { validateInputAndOptions } = require('./validation');
|
|
47
|
+
const { getCiAndGitInfo } = require('@buddy-works/ci-info');
|
|
48
|
+
const tar = require('tar-stream');
|
|
49
|
+
const Input = require('../../input').default;
|
|
50
|
+
const token = await Input.crawlSuiteToken();
|
|
51
|
+
if (!token) {
|
|
52
|
+
output_1.default.exitError(texts_1.ERR_MISSING_CRAWL_TOKEN);
|
|
53
|
+
}
|
|
54
|
+
const ctx = createCrawlContext();
|
|
55
|
+
applyToken(ctx, token);
|
|
56
|
+
const { url, follow, respectRobots, outputTypes, outputDir, colorScheme, browsers, devices, cookies, requestHeaders, delays, waitForSelectors, localStorage, } = validateInputAndOptions(inputUrl, options);
|
|
57
|
+
try {
|
|
58
|
+
const ciAndGitInfo = await getCiAndGitInfo({});
|
|
59
|
+
applyCiAndCommitInfo(ctx, ciAndGitInfo);
|
|
60
|
+
const { buildId } = await sendCrawl(ctx, url, follow, respectRobots, outputTypes, colorScheme, browsers, devices, cookies, requestHeaders, delays, waitForSelectors, localStorage);
|
|
61
|
+
const status = await watchSessionStatus(ctx, buildId);
|
|
62
|
+
if (!status.ok) {
|
|
63
|
+
output_1.default.exitError(`Crawl session failed: ${status.error}`);
|
|
64
|
+
}
|
|
65
|
+
output_1.default.normal('Downloading crawl package');
|
|
66
|
+
const crawlPackageStream = await downloadCrawlPackage(ctx, buildId);
|
|
67
|
+
const brotliDecompressor = (0, node_zlib_1.createBrotliDecompress)();
|
|
68
|
+
const unpack = tar.extract();
|
|
69
|
+
unpack.on('entry', async (header, stream, next) => {
|
|
70
|
+
const currentDir = process.cwd();
|
|
71
|
+
const preparedOutputDir = outputDir.startsWith('.')
|
|
72
|
+
? node_path_1.default.join(currentDir, outputDir)
|
|
73
|
+
: outputDir;
|
|
74
|
+
const newFilePath = node_path_1.default.join(preparedOutputDir, header.name);
|
|
75
|
+
try {
|
|
76
|
+
if (header.type === 'file') {
|
|
77
|
+
await (0, promises_2.mkdir)(node_path_1.default.dirname(newFilePath), { recursive: true });
|
|
78
|
+
const fileWriteStream = (0, node_fs_1.createWriteStream)(newFilePath);
|
|
79
|
+
await (0, promises_1.pipeline)(stream, fileWriteStream);
|
|
80
|
+
next();
|
|
81
|
+
}
|
|
82
|
+
else {
|
|
83
|
+
stream.resume();
|
|
84
|
+
next();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
catch (entryError) {
|
|
88
|
+
output_1.default.error(`Error processing entry ${header.name}: ${entryError}`);
|
|
89
|
+
next(entryError);
|
|
90
|
+
}
|
|
91
|
+
});
|
|
92
|
+
await (0, promises_1.pipeline)(crawlPackageStream, brotliDecompressor, unpack);
|
|
93
|
+
output_1.default.exitSuccess('Downloading crawl package finished');
|
|
94
|
+
}
|
|
95
|
+
catch (error) {
|
|
96
|
+
output_1.default.exitError(`${error}`);
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
async function watchSessionStatus(ctx, buildId) {
|
|
100
|
+
const { connectToCrawlSession } = require('../../crawl/requests');
|
|
101
|
+
return new Promise((resolve) => {
|
|
102
|
+
const eventSource = connectToCrawlSession(ctx, buildId);
|
|
103
|
+
eventSource.addEventListener('SESSION_STATUS', (event) => {
|
|
104
|
+
const data = JSON.parse(event.data);
|
|
105
|
+
if (data.status === 'STARTED') {
|
|
106
|
+
output_1.default.normal('Crawl session started');
|
|
107
|
+
}
|
|
108
|
+
else if (data.status === 'GATHER_URLS_COMPLETED') {
|
|
109
|
+
output_1.default.normal(`Gathering URLs completed, found ${data.text} URLs`);
|
|
110
|
+
}
|
|
111
|
+
else if (data.status === 'GATHER_URLS_FAILED') {
|
|
112
|
+
output_1.default.error('Gathering URLs failed');
|
|
113
|
+
}
|
|
114
|
+
else if (data.status === 'CRAWL_URL_COMPLETED') {
|
|
115
|
+
output_1.default.normal(`Crawling ${data.text} completed`);
|
|
116
|
+
}
|
|
117
|
+
else if (data.status === 'CRAWL_URL_FAILED') {
|
|
118
|
+
output_1.default.error(`Crawling ${data.text} failed`);
|
|
119
|
+
}
|
|
120
|
+
else if (data.status === 'CREATE_PACKAGE_COMPLETED') {
|
|
121
|
+
output_1.default.normal('Package created');
|
|
122
|
+
}
|
|
123
|
+
else if (data.status === 'CREATE_PACKAGE_FAILED') {
|
|
124
|
+
output_1.default.error('Package creation failed');
|
|
125
|
+
}
|
|
126
|
+
else if (data.status === 'ERROR') {
|
|
127
|
+
eventSource.close();
|
|
128
|
+
resolve({ ok: false, error: data.text });
|
|
129
|
+
}
|
|
130
|
+
else if (data.status === 'FINISHED') {
|
|
131
|
+
eventSource.close();
|
|
132
|
+
output_1.default.normal('Crawl session finished');
|
|
133
|
+
resolve({ ok: true });
|
|
134
|
+
}
|
|
135
|
+
});
|
|
136
|
+
eventSource.addEventListener('error', (event) => {
|
|
137
|
+
if (event.code) {
|
|
138
|
+
eventSource.close();
|
|
139
|
+
if (event.code === 410) {
|
|
140
|
+
output_1.default.normal('Crawl session finished');
|
|
141
|
+
}
|
|
142
|
+
resolve({ ok: event.code === 410, error: event.code });
|
|
143
|
+
}
|
|
144
|
+
});
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
exports.default = commandCrawlRun;
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.validateInputAndOptions = validateInputAndOptions;
|
|
7
|
+
const zod_1 = require("zod");
|
|
8
|
+
const output_1 = __importDefault(require("../../output"));
|
|
9
|
+
const validation_1 = require("../tests/visual/shared/validation");
|
|
10
|
+
const urlSchema = zod_1.z.string().url().optional();
|
|
11
|
+
const browserSchema = zod_1.z.enum(['chrome', 'firefox', 'safari']);
|
|
12
|
+
const browsersListSchema = zod_1.z
|
|
13
|
+
.string()
|
|
14
|
+
.transform((value) => value
|
|
15
|
+
.split(',')
|
|
16
|
+
.map((browser) => browser.trim().toLowerCase())
|
|
17
|
+
.filter((browser) => browser.length > 0))
|
|
18
|
+
.refine((browsers) => browsers.length > 0, {
|
|
19
|
+
message: 'Invalid browsers list. Supported values: chrome,firefox,safari',
|
|
20
|
+
})
|
|
21
|
+
.pipe(zod_1.z.array(browserSchema))
|
|
22
|
+
.transform((browsers) => Array.from(new Set(browsers.map((browser) => browser === 'chrome'
|
|
23
|
+
? 'CHROMIUM'
|
|
24
|
+
: browser === 'firefox'
|
|
25
|
+
? 'FIREFOX'
|
|
26
|
+
: 'WEBKIT'))));
|
|
27
|
+
const optionsSchema = zod_1.z.object({
|
|
28
|
+
follow: zod_1.z.boolean(),
|
|
29
|
+
respectRobots: zod_1.z.boolean().optional(),
|
|
30
|
+
outputType: zod_1.z.enum(['jpeg', 'png', 'md', 'html']).optional(),
|
|
31
|
+
outputTypes: zod_1.z.string().optional(),
|
|
32
|
+
quality: zod_1.z.coerce.number().min(1).max(100).optional(),
|
|
33
|
+
outputDir: zod_1.z.string().default('.'),
|
|
34
|
+
fullPage: zod_1.z.boolean().optional(),
|
|
35
|
+
cssSelector: zod_1.z.string().optional(),
|
|
36
|
+
xpathSelector: zod_1.z.string().optional(),
|
|
37
|
+
colorScheme: zod_1.z.enum(['LIGHT', 'DARK', 'LIGHT_AND_DARK']).optional(),
|
|
38
|
+
browsers: browsersListSchema.optional(),
|
|
39
|
+
devices: zod_1.z.string().optional(),
|
|
40
|
+
delay: validation_1.delaySchema,
|
|
41
|
+
waitFor: validation_1.waitForSchema,
|
|
42
|
+
cookie: validation_1.cookieSchema,
|
|
43
|
+
header: validation_1.headerSchema,
|
|
44
|
+
localStorage: zod_1.z
|
|
45
|
+
.array(zod_1.z.string().regex(/^(?:([^:]+)::)?([^=]+)=(.*)$/, {
|
|
46
|
+
message: "LocalStorage option must follow pattern '[scope::]key=value' (scope is optional)",
|
|
47
|
+
}))
|
|
48
|
+
.optional()
|
|
49
|
+
.transform((value) => value?.map((v) => {
|
|
50
|
+
const { scope, key, value } = (0, validation_1.parseScopedKeyValue)(v);
|
|
51
|
+
return { scope, key, value };
|
|
52
|
+
})),
|
|
53
|
+
});
|
|
54
|
+
function validateInputAndOptions(input, options) {
|
|
55
|
+
try {
|
|
56
|
+
const url = urlSchema.parse(input);
|
|
57
|
+
const { follow, respectRobots, outputType, outputTypes: rawOutputTypes, quality, outputDir, fullPage, cssSelector, xpathSelector, colorScheme, browsers: parsedBrowsers, devices: rawDevices, delay, waitFor, cookie, header, localStorage, } = optionsSchema.parse(options);
|
|
58
|
+
let parsedOutputTypes;
|
|
59
|
+
if (rawOutputTypes) {
|
|
60
|
+
try {
|
|
61
|
+
const outputTypeEntrySchema = zod_1.z.array(zod_1.z
|
|
62
|
+
.object({
|
|
63
|
+
type: zod_1.z.string().transform((v) => v.toUpperCase()),
|
|
64
|
+
selector: zod_1.z
|
|
65
|
+
.object({
|
|
66
|
+
type: zod_1.z.enum(['CSS', 'XPATH']).optional(),
|
|
67
|
+
value: zod_1.z.string().optional(),
|
|
68
|
+
})
|
|
69
|
+
.optional(),
|
|
70
|
+
quality: zod_1.z.number().min(1).max(100).optional(),
|
|
71
|
+
fullPage: zod_1.z.boolean().optional(),
|
|
72
|
+
})
|
|
73
|
+
.transform((data) => ({
|
|
74
|
+
...data,
|
|
75
|
+
type: data.type,
|
|
76
|
+
})));
|
|
77
|
+
parsedOutputTypes = outputTypeEntrySchema.parse(JSON.parse(rawOutputTypes));
|
|
78
|
+
}
|
|
79
|
+
catch {
|
|
80
|
+
output_1.default.exitError("Invalid --outputTypes value. Use JSON array, e.g. --outputTypes '[{\"type\":\"png\"},{\"type\":\"jpeg\",\"quality\":80}]'");
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
else if (outputType) {
|
|
84
|
+
if (typeof quality === 'number' && outputType !== 'jpeg') {
|
|
85
|
+
output_1.default.exitError('Quality is only supported for jpeg output type, use --outputType jpeg');
|
|
86
|
+
}
|
|
87
|
+
if (cssSelector && xpathSelector) {
|
|
88
|
+
output_1.default.exitError('Only one of --cssSelector or --xpathSelector can be used');
|
|
89
|
+
}
|
|
90
|
+
const entry = {
|
|
91
|
+
type: outputType.toUpperCase(),
|
|
92
|
+
};
|
|
93
|
+
if (cssSelector) {
|
|
94
|
+
entry.selector = { type: 'CSS', value: cssSelector };
|
|
95
|
+
}
|
|
96
|
+
else if (xpathSelector) {
|
|
97
|
+
entry.selector = { type: 'XPATH', value: xpathSelector };
|
|
98
|
+
}
|
|
99
|
+
if (typeof quality === 'number') {
|
|
100
|
+
entry.quality = quality;
|
|
101
|
+
}
|
|
102
|
+
if (fullPage) {
|
|
103
|
+
entry.fullPage = fullPage;
|
|
104
|
+
}
|
|
105
|
+
parsedOutputTypes = [entry];
|
|
106
|
+
}
|
|
107
|
+
let parsedDevices;
|
|
108
|
+
if (rawDevices) {
|
|
109
|
+
try {
|
|
110
|
+
const dimensionSchema = zod_1.z
|
|
111
|
+
.string()
|
|
112
|
+
.regex(/^\d+x\d+$/, 'Must be in format "widthxheight"')
|
|
113
|
+
.transform((val) => {
|
|
114
|
+
const [width, height] = val.split('x').map(Number);
|
|
115
|
+
return { width, height };
|
|
116
|
+
});
|
|
117
|
+
const deviceSchema = zod_1.z.array(zod_1.z.object({
|
|
118
|
+
name: zod_1.z.string().optional(),
|
|
119
|
+
viewport: dimensionSchema,
|
|
120
|
+
screen: dimensionSchema,
|
|
121
|
+
devicePixelRatio: zod_1.z.number().positive(),
|
|
122
|
+
isMobile: zod_1.z.boolean(),
|
|
123
|
+
}));
|
|
124
|
+
parsedDevices = deviceSchema.parse(JSON.parse(rawDevices));
|
|
125
|
+
}
|
|
126
|
+
catch {
|
|
127
|
+
output_1.default.exitError('Invalid --devices value. Use JSON array, e.g. --devices \'[{"viewport":"1920x1080","screen":"1920x1080","devicePixelRatio":1,"isMobile":false}]\'');
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
return {
|
|
131
|
+
url,
|
|
132
|
+
follow,
|
|
133
|
+
respectRobots,
|
|
134
|
+
outputTypes: parsedOutputTypes,
|
|
135
|
+
outputDir,
|
|
136
|
+
colorScheme,
|
|
137
|
+
browsers: parsedBrowsers,
|
|
138
|
+
devices: parsedDevices,
|
|
139
|
+
cookies: cookie,
|
|
140
|
+
requestHeaders: header,
|
|
141
|
+
delays: delay ?? [],
|
|
142
|
+
waitForSelectors: waitFor,
|
|
143
|
+
localStorage,
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
catch (error) {
|
|
147
|
+
if (error instanceof zod_1.ZodError) {
|
|
148
|
+
output_1.default.exitError(error.errors.map((e) => `${e.path}: ${e.message}`).join(', '));
|
|
149
|
+
}
|
|
150
|
+
else {
|
|
151
|
+
throw error;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const utils_1 = require("../utils");
|
|
7
|
+
const texts_1 = require("../texts");
|
|
8
|
+
const run_1 = __importDefault(require("./crawl/run"));
|
|
9
|
+
const link_1 = __importDefault(require("./crawl/link"));
|
|
10
|
+
const commandCrawl = (0, utils_1.newCommand)('crawl', texts_1.DESC_COMMAND_CRAWL);
|
|
11
|
+
commandCrawl.addCommand(run_1.default);
|
|
12
|
+
commandCrawl.addCommand(link_1.default);
|
|
13
|
+
exports.default = commandCrawl;
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const utils_1 = require("../../utils");
|
|
7
|
+
const texts_1 = require("../../texts");
|
|
8
|
+
const input_1 = __importDefault(require("../../input"));
|
|
9
|
+
const output_1 = __importDefault(require("../../output"));
|
|
10
|
+
const commandPipelineCreate = (0, utils_1.newCommand)('create', texts_1.DESC_COMMAND_PIPELINE_CREATE);
|
|
11
|
+
commandPipelineCreate.alias('add');
|
|
12
|
+
commandPipelineCreate.option('-w, --workspace <domain>', texts_1.OPTION_REST_API_WORKSPACE);
|
|
13
|
+
commandPipelineCreate.option('-p, --project <name>', texts_1.OPTION_REST_API_PROJECT);
|
|
14
|
+
commandPipelineCreate.option('-n, --name <name>', texts_1.OPTION_PIPELINE_NAME);
|
|
15
|
+
commandPipelineCreate.option('-i, --identifier <identifier>', texts_1.OPTION_PIPELINE_IDENTIFIER);
|
|
16
|
+
commandPipelineCreate.option('--yaml <content|@path>', texts_1.OPTION_PIPELINE_YAML);
|
|
17
|
+
commandPipelineCreate.addHelpText('after', `\nEXAMPLES:${texts_1.EXAMPLE_PIPELINE_CREATE}`);
|
|
18
|
+
commandPipelineCreate.action(async (options) => {
|
|
19
|
+
const workspace = input_1.default.restApiWorkspace(options.workspace);
|
|
20
|
+
const project = input_1.default.restApiProject(options.project);
|
|
21
|
+
const client = input_1.default.restApiTokenClient();
|
|
22
|
+
const humanId = require('human-id').default;
|
|
23
|
+
const timestamp = Date.now().toString(36);
|
|
24
|
+
const baseName = humanId({ separator: '-', capitalize: false });
|
|
25
|
+
const defaultIdentifier = `${baseName}-${timestamp}`;
|
|
26
|
+
let body = {
|
|
27
|
+
name: options.name || options.identifier || baseName,
|
|
28
|
+
identifier: options.identifier || defaultIdentifier,
|
|
29
|
+
};
|
|
30
|
+
let result = await client.createPipeline(workspace, project, body);
|
|
31
|
+
if (options.yaml) {
|
|
32
|
+
const yaml = input_1.default.restApiYaml(options.yaml);
|
|
33
|
+
body = {
|
|
34
|
+
yaml: Buffer.from(yaml, 'utf8').toString('base64'),
|
|
35
|
+
};
|
|
36
|
+
await client.updatePipelineYml(workspace, project, result.id, body);
|
|
37
|
+
}
|
|
38
|
+
result = await client.getPipeline(workspace, project, result.id);
|
|
39
|
+
output_1.default.dim('Identifier: ', false);
|
|
40
|
+
output_1.default.cyan(result.identifier);
|
|
41
|
+
output_1.default.cyan(result.html_url);
|
|
42
|
+
output_1.default.green(texts_1.TXT_PIPELINE_CREATED);
|
|
43
|
+
output_1.default.exitNormal();
|
|
44
|
+
});
|
|
45
|
+
exports.default = commandPipelineCreate;
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const utils_1 = require("../../utils");
|
|
7
|
+
const texts_1 = require("../../texts");
|
|
8
|
+
const input_1 = __importDefault(require("../../input"));
|
|
9
|
+
const output_1 = __importDefault(require("../../output"));
|
|
10
|
+
const commandPipelineGet = (0, utils_1.newCommand)('get', texts_1.DESC_COMMAND_PIPELINE_GET);
|
|
11
|
+
commandPipelineGet.option('-w, --workspace <domain>', texts_1.OPTION_REST_API_WORKSPACE);
|
|
12
|
+
commandPipelineGet.option('-p, --project <name>', texts_1.OPTION_REST_API_PROJECT);
|
|
13
|
+
commandPipelineGet.argument('<identifier>', texts_1.OPTION_PIPELINE_IDENTIFIER);
|
|
14
|
+
commandPipelineGet.addHelpText('after', `\nEXAMPLES:${texts_1.EXAMPLE_PIPELINE_GET}`);
|
|
15
|
+
commandPipelineGet.action(async (identifier, options) => {
|
|
16
|
+
const workspace = input_1.default.restApiWorkspace(options.workspace);
|
|
17
|
+
const project = input_1.default.restApiProject(options.project);
|
|
18
|
+
const client = input_1.default.restApiTokenClient();
|
|
19
|
+
const data = await client.getPipelineByIdentifier(workspace, project, identifier);
|
|
20
|
+
if (!data || !data.domain) {
|
|
21
|
+
output_1.default.exitError(texts_1.ERR_WORKSPACE_NOT_FOUND);
|
|
22
|
+
}
|
|
23
|
+
if (!data.project_identifier) {
|
|
24
|
+
output_1.default.exitError(texts_1.ERR_PROJECT_NOT_FOUND);
|
|
25
|
+
}
|
|
26
|
+
if (!data.pipeline_id) {
|
|
27
|
+
output_1.default.exitError(texts_1.ERR_PIPELINE_NOT_FOUND);
|
|
28
|
+
}
|
|
29
|
+
const pipeline = await client.getPipeline(workspace, project, data.pipeline_id);
|
|
30
|
+
const OutputPipeline = require('../../output/pipeline').default;
|
|
31
|
+
const table = [
|
|
32
|
+
['Field', 'Value'],
|
|
33
|
+
['Name', pipeline.name || '-'],
|
|
34
|
+
['Status', OutputPipeline.runStatusText(pipeline.last_execution_status || '-')],
|
|
35
|
+
['ID', String(pipeline.id || '-')],
|
|
36
|
+
['Identifier', pipeline.identifier || '-'],
|
|
37
|
+
['URL', pipeline.html_url || '-'],
|
|
38
|
+
];
|
|
39
|
+
output_1.default.table(table);
|
|
40
|
+
output_1.default.exitNormal();
|
|
41
|
+
});
|
|
42
|
+
exports.default = commandPipelineGet;
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const utils_1 = require("../../utils");
|
|
7
|
+
const texts_1 = require("../../texts");
|
|
8
|
+
const input_1 = __importDefault(require("../../input"));
|
|
9
|
+
const output_1 = __importDefault(require("../../output"));
|
|
10
|
+
const commandPipelineList = (0, utils_1.newCommand)('list', texts_1.DESC_COMMAND_PIPELINE_LIST);
|
|
11
|
+
commandPipelineList.alias('ls');
|
|
12
|
+
commandPipelineList.option('-w, --workspace <domain>', texts_1.OPTION_REST_API_WORKSPACE);
|
|
13
|
+
commandPipelineList.option('-p, --project <name>', texts_1.OPTION_REST_API_PROJECT);
|
|
14
|
+
commandPipelineList.option('--page <number>', texts_1.OPTION_REST_API_PAGE, '1');
|
|
15
|
+
commandPipelineList.option('--per-page <number>', texts_1.OPTION_REST_API_PER_PAGE, '10');
|
|
16
|
+
commandPipelineList.addHelpText('after', `\nEXAMPLES:${texts_1.EXAMPLE_PIPELINE_LIST}`);
|
|
17
|
+
commandPipelineList.action(async (options) => {
|
|
18
|
+
const page = input_1.default.restApiPage(options.page);
|
|
19
|
+
const perPage = input_1.default.restApiPerPage(options.perPage);
|
|
20
|
+
const workspace = input_1.default.restApiWorkspace(options.workspace);
|
|
21
|
+
const project = input_1.default.restApiProject(options.project);
|
|
22
|
+
const client = input_1.default.restApiTokenClient();
|
|
23
|
+
const r = await client.getPipelines(workspace, project, page, perPage);
|
|
24
|
+
if (!r.pipelines.length) {
|
|
25
|
+
output_1.default.exitError(texts_1.ERR_PIPELINES_NOT_FOUND);
|
|
26
|
+
}
|
|
27
|
+
const data = [
|
|
28
|
+
['NAME', 'STATUS', 'ID', 'IDENTIFIER', 'URL']
|
|
29
|
+
];
|
|
30
|
+
const OutputPipeline = require('../../output/pipeline').default;
|
|
31
|
+
for (const pipeline of r.pipelines) {
|
|
32
|
+
data.push([
|
|
33
|
+
pipeline.name || '-',
|
|
34
|
+
OutputPipeline.runStatusText(pipeline.last_execution_status || '-'),
|
|
35
|
+
String(pipeline.id || '-'),
|
|
36
|
+
pipeline.identifier || '-',
|
|
37
|
+
pipeline.html_url || '-',
|
|
38
|
+
]);
|
|
39
|
+
}
|
|
40
|
+
output_1.default.table(data);
|
|
41
|
+
output_1.default.exitNormal();
|
|
42
|
+
});
|
|
43
|
+
exports.default = commandPipelineList;
|