bdy 1.8.16-dev → 1.8.18-dev
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/distTs/package.json
CHANGED
package/distTs/src/output.js
CHANGED
|
@@ -38,7 +38,12 @@ class Output {
|
|
|
38
38
|
static error(txt) {
|
|
39
39
|
terminal.red(`${txt}\n`);
|
|
40
40
|
}
|
|
41
|
-
static
|
|
41
|
+
static debug(txt) {
|
|
42
|
+
if (context_1.debug) {
|
|
43
|
+
this.normal(txt);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
static async debugAction(text, action) {
|
|
42
47
|
if (context_1.debug) {
|
|
43
48
|
const t1 = performance.now();
|
|
44
49
|
const result = await action;
|
|
@@ -173,7 +173,7 @@ async function sendStorybook(snapshots, filePaths) {
|
|
|
173
173
|
return message;
|
|
174
174
|
}
|
|
175
175
|
async function sendRequest({ url, payload, multipart, }) {
|
|
176
|
-
output_1.default.
|
|
176
|
+
output_1.default.debug((0, texts_1.LOG_SENDING_REQUEST)(`${serviceUrl}${url}`));
|
|
177
177
|
const init = {
|
|
178
178
|
method: 'GET',
|
|
179
179
|
redirect: 'follow',
|
|
@@ -21,7 +21,7 @@ async function collectResources({ url, widths = [1280], resourceDiscoveryTimeout
|
|
|
21
21
|
});
|
|
22
22
|
}
|
|
23
23
|
else if (scrapedUrl && missingWidths.length > 0) {
|
|
24
|
-
const { scrapedResourcesUrls, duplicatedResourcesUrls } = await output_1.default.
|
|
24
|
+
const { scrapedResourcesUrls, duplicatedResourcesUrls } = await output_1.default.debugAction((0, texts_1.DEBUG_RESOURCE_SCRAPPING_URL)(url), scrapeResources({
|
|
25
25
|
url,
|
|
26
26
|
widths: missingWidths,
|
|
27
27
|
height,
|
|
@@ -51,7 +51,7 @@ async function collectResources({ url, widths = [1280], resourceDiscoveryTimeout
|
|
|
51
51
|
});
|
|
52
52
|
}
|
|
53
53
|
else {
|
|
54
|
-
const { scrapedResourcesUrls, duplicatedResourcesUrls } = await output_1.default.
|
|
54
|
+
const { scrapedResourcesUrls, duplicatedResourcesUrls } = await output_1.default.debugAction((0, texts_1.DEBUG_RESOURCE_SCRAPPING_URL)(url), scrapeResources({
|
|
55
55
|
url,
|
|
56
56
|
widths,
|
|
57
57
|
height,
|
|
@@ -154,15 +154,15 @@ async function scrapeResources({ url, widths, height, resourceDiscoveryTimeout,
|
|
|
154
154
|
}
|
|
155
155
|
});
|
|
156
156
|
await page.goto(url);
|
|
157
|
-
await output_1.default.
|
|
158
|
-
await output_1.default.
|
|
157
|
+
await output_1.default.debugAction(texts_1.DEBUG_AUTO_SCROLL, autoScroll(page));
|
|
158
|
+
await output_1.default.debugAction(texts_1.DEBUG_AUTO_WIDTH, autoWidths(page, widths, height));
|
|
159
159
|
if (resourceDiscoveryTimeout > 0) {
|
|
160
160
|
await new Promise((resolve) => setTimeout(resolve, resourceDiscoveryTimeout));
|
|
161
161
|
if (context_js_1.debug) {
|
|
162
162
|
output_1.default.warning((0, texts_1.DEBUG_RESOURCE_DISCOVERY_TIMEOUT)(resourceDiscoveryTimeout));
|
|
163
163
|
}
|
|
164
164
|
}
|
|
165
|
-
await output_1.default.
|
|
165
|
+
await output_1.default.debugAction(texts_1.DEBUG_WAIT_FOR_IDLE, page.waitForNetworkIdle());
|
|
166
166
|
await browser.close();
|
|
167
167
|
return { scrapedResourcesUrls, duplicatedResourcesUrls };
|
|
168
168
|
}
|