acpilot 2.1.0 → 2.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +12 -10
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -44468,7 +44468,11 @@ function extractLinks(html3, pageUrl, baseUrl, allowedHostnames) {
|
|
|
44468
44468
|
const pathname = resolved.pathname.toLowerCase();
|
|
44469
44469
|
const lastDot = pathname.lastIndexOf(".");
|
|
44470
44470
|
if (lastDot !== -1 && REJECTED_EXTENSIONS.has(pathname.slice(lastDot))) return;
|
|
44471
|
-
|
|
44471
|
+
const rh = resolved.hostname.toLowerCase();
|
|
44472
|
+
const rIsLocal = rh === "localhost" || rh === "127.0.0.1" || rh === "::1" || rh === "0.0.0.0" || rh.startsWith("192.168.") || rh.startsWith("10.") || /^172\.(1[6-9]|2\d|3[01])\./.test(rh) || rh.endsWith(".local") || rh.endsWith(".localhost");
|
|
44473
|
+
if (!rIsLocal) {
|
|
44474
|
+
resolved.protocol = "https:";
|
|
44475
|
+
}
|
|
44472
44476
|
resolved.hash = "";
|
|
44473
44477
|
let normalised = resolved.origin.toLowerCase() + resolved.pathname;
|
|
44474
44478
|
if (normalised.endsWith("/") && resolved.pathname !== "/") {
|
|
@@ -44579,7 +44583,11 @@ async function extractLinksWithBrowser(page, url, baseUrl, allowedHostnames) {
|
|
|
44579
44583
|
const pathname = parsed.pathname.toLowerCase();
|
|
44580
44584
|
const lastDot = pathname.lastIndexOf(".");
|
|
44581
44585
|
if (lastDot !== -1 && REJECTED_EXTENSIONS.has(pathname.slice(lastDot))) continue;
|
|
44582
|
-
|
|
44586
|
+
const ph = parsed.hostname.toLowerCase();
|
|
44587
|
+
const pIsLocal = ph === "localhost" || ph === "127.0.0.1" || ph === "::1" || ph === "0.0.0.0" || ph.startsWith("192.168.") || ph.startsWith("10.") || /^172\.(1[6-9]|2\d|3[01])\./.test(ph) || ph.endsWith(".local") || ph.endsWith(".localhost");
|
|
44588
|
+
if (!pIsLocal) {
|
|
44589
|
+
parsed.protocol = "https:";
|
|
44590
|
+
}
|
|
44583
44591
|
parsed.hash = "";
|
|
44584
44592
|
let norm = parsed.origin.toLowerCase() + parsed.pathname;
|
|
44585
44593
|
if (norm.endsWith("/") && parsed.pathname !== "/") {
|
|
@@ -60568,7 +60576,7 @@ var init_scanner = __esm({
|
|
|
60568
60576
|
var readline = __toESM(require("readline"));
|
|
60569
60577
|
|
|
60570
60578
|
// package.json
|
|
60571
|
-
var version = "2.1.
|
|
60579
|
+
var version = "2.1.3";
|
|
60572
60580
|
|
|
60573
60581
|
// src/cli.ts
|
|
60574
60582
|
var PKG_VERSION = version;
|
|
@@ -60812,12 +60820,6 @@ async function main() {
|
|
|
60812
60820
|
args.scanMode = "network";
|
|
60813
60821
|
args.noCrawl = true;
|
|
60814
60822
|
}
|
|
60815
|
-
if (args.scanMode === "full") {
|
|
60816
|
-
const pagesStr = await ask(rl, "Max. Seitenanzahl", "50");
|
|
60817
|
-
maxPages = parseInt(pagesStr, 10) || 50;
|
|
60818
|
-
const depthStr = await ask(rl, "Max. Crawl-Tiefe", "3");
|
|
60819
|
-
maxDepth = parseInt(depthStr, 10) || 3;
|
|
60820
|
-
}
|
|
60821
60823
|
if ((args.scanMode === "full" || args.scanMode === "single") && puppeteerAvailable) {
|
|
60822
60824
|
const netChoice = await ask(rl, `Netzwerk-Resilienz Test? ${DIM}(j/n)${RESET}`, "n");
|
|
60823
60825
|
args.networkTest = netChoice.toLowerCase().startsWith("j") || netChoice.toLowerCase() === "y";
|
|
@@ -60896,7 +60898,7 @@ async function runScanInner(args, isJson, puppeteerAvailable) {
|
|
|
60896
60898
|
} else {
|
|
60897
60899
|
if (!isJson) {
|
|
60898
60900
|
printDivider("URL Discovery");
|
|
60899
|
-
console.log(` ${CYAN}\u27F3${RESET}
|
|
60901
|
+
console.log(` ${CYAN}\u27F3${RESET} Suche Seiten auf ${BOLD}${args.url}${RESET} ${DIM}(Sitemap, robots.txt, Link-Crawling)${RESET}
|
|
60900
60902
|
`);
|
|
60901
60903
|
}
|
|
60902
60904
|
const { discoverUrls: discoverUrls2 } = await Promise.resolve().then(() => (init_crawler(), crawler_exports));
|