sentinel-scanner 1.1.0-alpha.1 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/bin.ts", "../src/commands/spider.ts", "../src/modules/spider/index.ts", "../src/utils/index.ts"],
4
+ "sourcesContent": ["#!/usr/bin/env node --no-warnings\n\nimport yargs from \"yargs\";\nimport { hideBin } from \"yargs/helpers\";\nimport { spiderCommand } from \"./commands/spider\";\n\nconst commandHandler = yargs(hideBin(process.argv));\n\ncommandHandler.demandCommand();\ncommandHandler.scriptName(\"sentinel-scanner\");\ncommandHandler.usage(\"Usage: $0 <command> [options]\");\ncommandHandler.help().alias(\"help\", \"h\");\ncommandHandler.version().alias(\"version\", \"v\");\ncommandHandler.strict();\ncommandHandler.showHelpOnFail(true);\n\n// Handle Commands\ncommandHandler.command(spiderCommand);\n\ncommandHandler.parse();\n", "import fs from \"node:fs\";\nimport path from \"node:path\";\nimport type { ArgumentsCamelCase, CommandModule } from \"yargs\";\nimport SpiderScanner from \"../modules/spider\";\nimport { createLogger } from \"../utils\";\n\nexport type SpiderScannerCLIOptions = {\n\turl: string;\n\tdepth?: number;\n\toutput?: string;\n\tconcurrency?: number;\n\ttimeout?: number;\n\tretries?: number;\n};\n\nconst cliLogger = createLogger(\"CLI\");\n\nexport const spiderCommand: CommandModule = {\n\tcommand: \"spider\",\n\tdescribe:\n\t\t\"Crawl a website and get an array of URLs which are internal to the website\",\n\tbuilder: (yargs) => {\n\t\treturn yargs\n\t\t\t.option(\"url\", {\n\t\t\t\talias: \"u\",\n\t\t\t\ttype: \"string\",\n\t\t\t\tdescription: \"The URL of the website to scan\",\n\t\t\t\tdemandOption: true,\n\t\t\t\tcoerce: (url) => {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tnew URL(url);\n\n\t\t\t\t\t\treturn url;\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\tthrow new Error(`Invalid URL: ${url}`);\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t})\n\t\t\t.option(\"depth\", {\n\t\t\t\talias: \"d\",\n\t\t\t\ttype: \"number\",\n\t\t\t\tdescription: \"The maximum depth to crawl\",\n\t\t\t\tdefault: 250,\n\t\t\t\tcoerce: (depth) => {\n\t\t\t\t\tif (depth < 0) {\n\t\t\t\t\t\tthrow new Error(\"Depth must be a positive number\");\n\t\t\t\t\t}\n\n\t\t\t\t\tif (depth > 250) {\n\t\t\t\t\t\tthrow new Error(\"Depth must be less than 250\");\n\t\t\t\t\t}\n\n\t\t\t\t\treturn depth;\n\t\t\t\t},\n\t\t\t})\n\t\t\t.option(\"output\", {\n\t\t\t\talias: \"o\",\n\t\t\t\ttype: \"string\",\n\t\t\t\tdescription:\n\t\t\t\t\t\"The output file to write the results to. Must be a JSON file\",\n\t\t\t\tcoerce: (output) => {\n\t\t\t\t\ttry {\n\t\t\t\t\t\t// Should throw an error if the path is invalid\n\t\t\t\t\t\t// Should Be A JSON File\n\t\t\t\t\t\tconst resolvedPath = path.resolve(output);\n\t\t\t\t\t\tconst parsedPath = path.parse(resolvedPath);\n\n\t\t\t\t\t\tif (parsedPath.ext !== \".json\") {\n\t\t\t\t\t\t\tthrow new Error(\"Output file must be a JSON file\");\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tif (fs.existsSync(resolvedPath)) {\n\t\t\t\t\t\t\tthrow new Error(\"Output file already exists\");\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn resolvedPath;\n\t\t\t\t\t} catch (error) {\n\t\t\t\t\t\tthrow new Error(`Invalid output file: ${output}`);\n\t\t\t\t\t}\n\t\t\t\t},\n\t\t\t\tdefault: getDefaultFilePath(),\n\t\t\t})\n\t\t\t.option(\"concurrency\", {\n\t\t\t\talias: \"c\",\n\t\t\t\ttype: \"number\",\n\t\t\t\tdescription: \"The number of concurrent requests to make\",\n\t\t\t\tdefault: 10,\n\t\t\t\tcoerce: (concurrency) => {\n\t\t\t\t\tif (concurrency < 1) {\n\t\t\t\t\t\tthrow new Error(\"Concurrency must be a positive number\");\n\t\t\t\t\t}\n\n\t\t\t\t\tif (concurrency > 20) {\n\t\t\t\t\t\tthrow new Error(\"Concurrency must be less than 20\");\n\t\t\t\t\t}\n\n\t\t\t\t\treturn concurrency;\n\t\t\t\t},\n\t\t\t})\n\t\t\t.option(\"timeout\", {\n\t\t\t\talias: \"t\",\n\t\t\t\ttype: \"number\",\n\t\t\t\tdescription: \"The timeout for each request in milliseconds\",\n\t\t\t\tdefault: 5000,\n\t\t\t\tcoerce: (timeout) => {\n\t\t\t\t\tif (timeout < 0) {\n\t\t\t\t\t\tthrow new Error(\"Timeout must be a positive number\");\n\t\t\t\t\t}\n\n\t\t\t\t\tif (timeout > 25_000) {\n\t\t\t\t\t\tthrow new Error(\"Timeout must be less than 25,000\");\n\t\t\t\t\t}\n\n\t\t\t\t\treturn timeout;\n\t\t\t\t},\n\t\t\t})\n\t\t\t.option(\"retries\", {\n\t\t\t\talias: \"r\",\n\t\t\t\ttype: \"number\",\n\t\t\t\tdescription: \"The number of retries for each request\",\n\t\t\t\tdefault: 3,\n\t\t\t\tcoerce: (retries) => {\n\t\t\t\t\tif (retries < 0) {\n\t\t\t\t\t\tthrow new Error(\"Retries must be a positive number\");\n\t\t\t\t\t}\n\n\t\t\t\t\tif (retries > 10) {\n\t\t\t\t\t\tthrow new Error(\"Retries must be less than 10\");\n\t\t\t\t\t}\n\n\t\t\t\t\treturn retries;\n\t\t\t\t},\n\t\t\t});\n\t},\n\thandler: async (args) => {\n\t\ttry {\n\t\t\tconst argData = args as ArgumentsCamelCase<SpiderScannerCLIOptions>;\n\n\t\t\tconst scanner = new SpiderScanner(argData.url, {\n\t\t\t\tdepth: argData.depth ?? 250,\n\t\t\t\tconcurrency: argData.concurrency ?? 10,\n\t\t\t\ttimeout: argData.timeout ?? 5000,\n\t\t\t\tretries: argData.retries ?? 3,\n\t\t\t});\n\n\t\t\tcliLogger.info(\"Starting to crawl website\");\n\n\t\t\tconst results = await scanner.crawl();\n\n\t\t\tif (argData.output) {\n\t\t\t\tfs.writeFileSync(argData.output, JSON.stringify(results, null, 2));\n\t\t\t\tcliLogger.info(`Results written to ${argData.output}`);\n\t\t\t} else {\n\t\t\t\tconst resolvedPath = getDefaultFilePath();\n\t\t\t\tfs.writeFileSync(resolvedPath, JSON.stringify(results, null, 2));\n\t\t\t\tcliLogger.info(`Results written to ${resolvedPath}`);\n\t\t\t}\n\t\t} catch (error) {\n\t\t\tif (error instanceof Error) {\n\t\t\t\tcliLogger.error(error.message);\n\t\t\t}\n\t\t\tcliLogger.error(\"Failed to run spider command\");\n\t\t\tprocess.exit(1);\n\t\t}\n\t},\n};\n\nconst getDefaultFilePath = () => {\n\ttry {\n\t\tconst resolvedDir = path.resolve(\"sentinel_output\");\n\t\t// Check If Directory Exists\n\t\tif (!fs.existsSync(resolvedDir)) {\n\t\t\tfs.mkdirSync(resolvedDir);\n\t\t}\n\n\t\tconst resolvedPath = path.resolve(\n\t\t\t`sentinel_output/spider_${Date.now()}.json`,\n\t\t);\n\t\t// Check If File Exists\n\t\tif (fs.existsSync(resolvedPath)) {\n\t\t\tthrow new Error(\"Output file already exists\");\n\t\t}\n\t\tconst parsedPath = path.parse(resolvedPath);\n\n\t\tif (parsedPath.ext !== \".json\") {\n\t\t\tthrow new Error(\"Output file must be a JSON file\");\n\t\t}\n\n\t\treturn resolvedPath;\n\t} catch (error) {\n\t\tthrow new Error(\"Invalid output file\");\n\t}\n};\n", "import fetch from \"isomorphic-fetch\";\nimport jsdom from \"jsdom\";\nimport UserAgent from \"user-agents\";\nimport { createLogger } from \"../../utils\";\n\nexport interface SpiderScannerOptions {\n\tdepth?: number;\n\tconcurrency?: number;\n\tretries?: number;\n\ttimeout?: number;\n}\n\nexport default class SpiderScanner {\n\tprivate header: Record<string, string> = {\n\t\t\"User-Agent\": new UserAgent().toString(),\n\t};\n\tprivate url: URL;\n\tprivate logger = createLogger(\"SpiderScanner\");\n\n\tprivate depth: number;\n\tprivate concurrency: number;\n\tprivate retries: number;\n\tprivate timeout: number;\n\n\tconstructor(url: string, options: SpiderScannerOptions = {}) {\n\t\tconst {\n\t\t\tdepth = 250,\n\t\t\tconcurrency = 5,\n\t\t\tretries = 3,\n\t\t\ttimeout = 5000,\n\t\t} = options;\n\t\tthis.depth = depth;\n\t\tthis.concurrency = concurrency;\n\t\tthis.retries = retries;\n\t\tthis.timeout = timeout;\n\n\t\ttry {\n\t\t\tthis.url = new URL(url);\n\t\t\tthis.logger.info(\n\t\t\t\t`Initialized with URL: ${url}, User-Agent: ${this.header[\"User-Agent\"]}`,\n\t\t\t);\n\t\t} catch (error) {\n\t\t\tif (error instanceof TypeError) {\n\t\t\t\tthis.logger.error(\"Invalid URL\");\n\t\t\t\tthrow new Error(\"Invalid URL\");\n\t\t\t}\n\t\t\tthis.logger.error(`Unexpected error in constructor: ${error}`);\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\tprivate normalizeDomain(domain: string): string {\n\t\treturn domain.startsWith(\"www.\") ? domain.slice(4) : domain;\n\t}\n\n\tprivate convertRelativeUrlToAbsolute(url: string): string {\n\t\treturn new URL(url, this.url.toString()).toString();\n\t}\n\n\tprivate isInternalLink(url: string): boolean {\n\t\ttry {\n\t\t\tconst parsedUrl = new URL(url, this.url.href);\n\t\t\tif (![\"http:\", \"https:\"].includes(parsedUrl.protocol)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t\tconst baseDomain = this.normalizeDomain(this.url.hostname);\n\t\t\tconst parsedDomain = this.normalizeDomain(parsedUrl.hostname);\n\t\t\treturn parsedDomain === baseDomain;\n\t\t} catch (error) {\n\t\t\tthis.logger.warn(`Error parsing URL: ${url} - ${error}`);\n\t\t\treturn false;\n\t\t}\n\t}\n\n\tprivate async fetchWithRetries(\n\t\turl: string,\n\t\tretries: number,\n\t): Promise<string | null> {\n\t\tfor (let attempt = 1; attempt <= retries; attempt++) {\n\t\t\tconst controller = new AbortController();\n\t\t\tconst timeoutId = setTimeout(() => controller.abort(), this.timeout);\n\n\t\t\ttry {\n\t\t\t\tthis.logger.debug(`Fetching URL (Attempt ${attempt}): ${url}`);\n\t\t\t\tconst randomUserAgent = new UserAgent().toString();\n\t\t\t\tthis.logger.info(`Changing User-Agent to: ${randomUserAgent}`);\n\t\t\t\tthis.header[\"User-Agent\"] = randomUserAgent;\n\t\t\t\tconst response = await fetch(url, {\n\t\t\t\t\theaders: this.header,\n\t\t\t\t\tsignal: controller.signal,\n\t\t\t\t\tredirect: \"follow\",\n\t\t\t\t});\n\n\t\t\t\tclearTimeout(timeoutId);\n\n\t\t\t\tif (response.ok) {\n\t\t\t\t\tthis.logger.info(`Successfully fetched URL: ${url}`);\n\t\t\t\t\treturn await response.text();\n\t\t\t\t}\n\n\t\t\t\tthis.logger.warn(`Failed to fetch URL (${response.status}): ${url}`);\n\t\t\t} catch (error) {\n\t\t\t\tif ((error as Error).name === \"AbortError\") {\n\t\t\t\t\tthis.logger.warn(`Fetch timed out: ${url}`);\n\t\t\t\t} else {\n\t\t\t\t\tthis.logger.error(`Error fetching URL: ${url} - ${error}`);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n\n\tprivate extractLinks(html: string): string[] {\n\t\tconst { JSDOM } = jsdom;\n\t\tconst dom = new JSDOM(html);\n\t\tconst links = Array.from(dom.window.document.querySelectorAll(\"a\"));\n\t\tconst hrefs = links.map((link) => link.href);\n\t\tconst internalLinks = hrefs.filter((href) => this.isInternalLink(href));\n\t\tthis.logger.debug(\n\t\t\t`Extracted ${internalLinks.length} internal links from HTML content`,\n\t\t);\n\t\treturn internalLinks.map((link) => this.convertRelativeUrlToAbsolute(link));\n\t}\n\n\tpublic async crawl(): Promise<Array<string>> {\n\t\tconst visited = new Set<string>();\n\t\tconst queue = new Set<string>([this.url.href]);\n\t\tconst resultLinks = new Set<string>();\n\n\t\t// Assets to ignore\n\t\tconst assetExtensions = [\n\t\t\t\".css\",\n\t\t\t\".js\",\n\t\t\t\".png\",\n\t\t\t\".jpg\",\n\t\t\t\".jpeg\",\n\t\t\t\".gif\",\n\t\t\t\".svg\",\n\t\t\t\".ico\",\n\t\t\t\".webp\",\n\t\t\t\".mp4\",\n\t\t\t\".mp3\",\n\t\t\t\".wav\",\n\t\t\t\".avi\",\n\t\t\t\".mov\",\n\t\t\t\".webm\",\n\t\t\t\".pdf\",\n\t\t\t\".doc\",\n\t\t\t\".docx\",\n\t\t\t\".xls\",\n\t\t\t\".xlsx\",\n\t\t\t\".ppt\",\n\t\t\t\".pptx\",\n\t\t\t\".zip\",\n\t\t\t\".rar\",\n\t\t\t\".tar\",\n\t\t\t\".gz\",\n\t\t];\n\n\t\tconst fetchAndExtract = async (currentUrl: string) => {\n\t\t\tif (visited.has(currentUrl)) {\n\t\t\t\tthis.logger.debug(`Skipping already visited URL: ${currentUrl}`);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tvisited.add(currentUrl);\n\t\t\tthis.logger.info(`Visiting URL: ${currentUrl}`);\n\n\t\t\tconst html = await this.fetchWithRetries(currentUrl, this.retries);\n\t\t\tif (!html) return;\n\n\t\t\tconst links = this.extractLinks(html);\n\n\t\t\t// Filter out asset links\n\t\t\tfor (const link of links) {\n\t\t\t\tif (assetExtensions.some((ext) => link.endsWith(ext))) {\n\t\t\t\t\tthis.logger.debug(`Ignoring asset link: ${link}`);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tthis.logger.debug(`Found link: ${link}`);\n\t\t\t}\n\n\t\t\tfor (const link of links) {\n\t\t\t\tif (!visited.has(link) && queue.size < this.depth) {\n\t\t\t\t\tqueue.add(link);\n\t\t\t\t\tthis.logger.debug(`Added to queue: ${link}`);\n\t\t\t\t}\n\t\t\t}\n\t\t\tresultLinks.add(currentUrl);\n\t\t};\n\n\t\tconst processBatch = async () => {\n\t\t\tconst batch = Array.from(queue).slice(0, this.concurrency);\n\t\t\tfor (const url of batch) {\n\t\t\t\tqueue.delete(url);\n\t\t\t}\n\t\t\tawait Promise.allSettled(batch.map((url) => fetchAndExtract(url)));\n\t\t};\n\n\t\tthis.logger.info(\n\t\t\t`Starting crawl with depth: ${this.depth}, concurrency: ${this.concurrency}`,\n\t\t);\n\t\twhile (queue.size > 0 && visited.size < this.depth) {\n\t\t\tawait processBatch();\n\t\t}\n\n\t\tthis.logger.info(\n\t\t\t`Crawling completed. Total pages visited: ${resultLinks.size}`,\n\t\t);\n\n\t\treturn Array.from(resultLinks);\n\t}\n}\n", "import winston from \"winston\";\n\nexport const createLogger = (label: string) =>\n\twinston.createLogger({\n\t\tlevels: {\n\t\t\terror: 0,\n\t\t\twarn: 1,\n\t\t\tinfo: 2,\n\t\t\thttp: 3,\n\t\t\tverbose: 4,\n\t\t\tdebug: 5,\n\t\t\tsilly: 6,\n\t\t},\n\t\tformat: winston.format.combine(\n\t\t\twinston.format.label({ label }),\n\t\t\twinston.format.colorize(),\n\t\t\twinston.format.timestamp({\n\t\t\t\tformat: () => {\n\t\t\t\t\treturn new Date().toLocaleString(\"en-US\");\n\t\t\t\t},\n\t\t\t}),\n\t\t\twinston.format.align(),\n\t\t\twinston.format.printf(\n\t\t\t\t(info) =>\n\t\t\t\t\t`\\x1b[34m(${info.label})\\x1b[0m \\x1b[33m${info.timestamp}\\x1b[0m [${info.level}]: ${info.message}`,\n\t\t\t),\n\t\t),\n\t\ttransports: [new winston.transports.Console()],\n\t});\n"],
5
+ "mappings": ";;;AAEA,OAAO,WAAW;AAClB,SAAS,eAAe;;;ACHxB,OAAO,QAAQ;AACf,OAAO,UAAU;;;ACDjB,OAAO,WAAW;AAClB,OAAO,WAAW;AAClB,OAAO,eAAe;;;ACFtB,OAAO,aAAa;AAEb,IAAM,eAAe,CAAC,UAC5B,QAAQ,aAAa;AAAA,EACpB,QAAQ;AAAA,IACP,OAAO;AAAA,IACP,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,IACT,OAAO;AAAA,IACP,OAAO;AAAA,EACR;AAAA,EACA,QAAQ,QAAQ,OAAO;AAAA,IACtB,QAAQ,OAAO,MAAM,EAAE,MAAM,CAAC;AAAA,IAC9B,QAAQ,OAAO,SAAS;AAAA,IACxB,QAAQ,OAAO,UAAU;AAAA,MACxB,QAAQ,MAAM;AACb,gBAAO,oBAAI,KAAK,GAAE,eAAe,OAAO;AAAA,MACzC;AAAA,IACD,CAAC;AAAA,IACD,QAAQ,OAAO,MAAM;AAAA,IACrB,QAAQ,OAAO;AAAA,MACd,CAAC,SACA,YAAY,KAAK,KAAK,oBAAoB,KAAK,SAAS,YAAY,KAAK,KAAK,MAAM,KAAK,OAAO;AAAA,IAClG;AAAA,EACD;AAAA,EACA,YAAY,CAAC,IAAI,QAAQ,WAAW,QAAQ,CAAC;AAC9C,CAAC;;;ADhBF,IAAqB,gBAArB,MAAmC;AAAA,EAC1B,SAAiC;AAAA,IACxC,cAAc,IAAI,UAAU,EAAE,SAAS;AAAA,EACxC;AAAA,EACQ;AAAA,EACA,SAAS,aAAa,eAAe;AAAA,EAErC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,KAAa,UAAgC,CAAC,GAAG;AAC5D,UAAM;AAAA,MACL,QAAQ;AAAA,MACR,cAAc;AAAA,MACd,UAAU;AAAA,MACV,UAAU;AAAA,IACX,IAAI;AACJ,SAAK,QAAQ;AACb,SAAK,cAAc;AACnB,SAAK,UAAU;AACf,SAAK,UAAU;AAEf,QAAI;AACH,WAAK,MAAM,IAAI,IAAI,GAAG;AACtB,WAAK,OAAO;AAAA,QACX,yBAAyB,GAAG,iBAAiB,KAAK,OAAO,YAAY,CAAC;AAAA,MACvE;AAAA,IACD,SAAS,OAAO;AACf,UAAI,iBAAiB,WAAW;AAC/B,aAAK,OAAO,MAAM,aAAa;AAC/B,cAAM,IAAI,MAAM,aAAa;AAAA,MAC9B;AACA,WAAK,OAAO,MAAM,oCAAoC,KAAK,EAAE;AAC7D,YAAM;AAAA,IACP;AAAA,EACD;AAAA,EAEQ,gBAAgB,QAAwB;AAC/C,WAAO,OAAO,WAAW,MAAM,IAAI,OAAO,MAAM,CAAC,IAAI;AAAA,EACtD;AAAA,EAEQ,6BAA6B,KAAqB;AACzD,WAAO,IAAI,IAAI,KAAK,KAAK,IAAI,SAAS,CAAC,EAAE,SAAS;AAAA,EACnD;AAAA,EAEQ,eAAe,KAAsB;AAC5C,QAAI;AACH,YAAM,YAAY,IAAI,IAAI,KAAK,KAAK,IAAI,IAAI;AAC5C,UAAI,CAAC,CAAC,SAAS,QAAQ,EAAE,SAAS,UAAU,QAAQ,GAAG;AACtD,eAAO;AAAA,MACR;AACA,YAAM,aAAa,KAAK,gBAAgB,KAAK,IAAI,QAAQ;AACzD,YAAM,eAAe,KAAK,gBAAgB,UAAU,QAAQ;AAC5D,aAAO,iBAAiB;AAAA,IACzB,SAAS,OAAO;AACf,WAAK,OAAO,KAAK,sBAAsB,GAAG,MAAM,KAAK,EAAE;AACvD,aAAO;AAAA,IACR;AAAA,EACD;AAAA,EAEA,MAAc,iBACb,KACA,SACyB;AACzB,aAAS,UAAU,GAAG,WAAW,SAAS,WAAW;AACpD,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO;AAEnE,UAAI;AACH,aAAK,OAAO,MAAM,yBAAyB,OAAO,MAAM,GAAG,EAAE;AAC7D,cAAM,kBAAkB,IAAI,UAAU,EAAE,SAAS;AACjD,aAAK,OAAO,KAAK,2BAA2B,eAAe,EAAE;AAC7D,aAAK,OAAO,YAAY,IAAI;AAC5B,cAAM,WAAW,MAAM,MAAM,KAAK;AAAA,UACjC,SAAS,KAAK;AAAA,UACd,QAAQ,WAAW;AAAA,UACnB,UAAU;AAAA,QACX,CAAC;AAED,qBAAa,SAAS;AAEtB,YAAI,SAAS,IAAI;AAChB,eAAK,OAAO,KAAK,6BAA6B,GAAG,EAAE;AACnD,iBAAO,MAAM,SAAS,KAAK;AAAA,QAC5B;AAEA,aAAK,OAAO,KAAK,wBAAwB,SAAS,MAAM,MAAM,GAAG,EAAE;AAAA,MACpE,SAAS,OAAO;AACf,YAAK,MAAgB,SAAS,cAAc;AAC3C,eAAK,OAAO,KAAK,oBAAoB,GAAG,EAAE;AAAA,QAC3C,OAAO;AACN,eAAK,OAAO,MAAM,uBAAuB,GAAG,MAAM,KAAK,EAAE;AAAA,QAC1D;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR;AAAA,EAEQ,aAAa,MAAwB;AAC5C,UAAM,EAAE,MAAM,IAAI;AAClB,UAAM,MAAM,IAAI,MAAM,IAAI;AAC1B,UAAM,QAAQ,MAAM,KAAK,IAAI,OAAO,SAAS,iBAAiB,GAAG,CAAC;AAClE,UAAM,QAAQ,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI;AAC3C,UAAM,gBAAgB,MAAM,OAAO,CAAC,SAAS,KAAK,eAAe,IAAI,CAAC;AACtE,SAAK,OAAO;AAAA,MACX,aAAa,cAAc,MAAM;AAAA,IAClC;AACA,WAAO,cAAc,IAAI,CAAC,SAAS,KAAK,6BAA6B,IAAI,CAAC;AAAA,EAC3E;AAAA,EAEA,MAAa,QAAgC;AAC5C,UAAM,UAAU,oBAAI,IAAY;AAChC,UAAM,QAAQ,oBAAI,IAAY,CAAC,KAAK,IAAI,IAAI,CAAC;AAC7C,UAAM,cAAc,oBAAI,IAAY;AAGpC,UAAM,kBAAkB;AAAA,MACvB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAEA,UAAM,kBAAkB,OAAO,eAAuB;AACrD,UAAI,QAAQ,IAAI,UAAU,GAAG;AAC5B,aAAK,OAAO,MAAM,iCAAiC,UAAU,EAAE;AAC/D;AAAA,MACD;AACA,cAAQ,IAAI,UAAU;AACtB,WAAK,OAAO,KAAK,iBAAiB,UAAU,EAAE;AAE9C,YAAM,OAAO,MAAM,KAAK,iBAAiB,YAAY,KAAK,OAAO;AACjE,UAAI,CAAC,KAAM;AAEX,YAAM,QAAQ,KAAK,aAAa,IAAI;AAGpC,iBAAW,QAAQ,OAAO;AACzB,YAAI,gBAAgB,KAAK,CAAC,QAAQ,KAAK,SAAS,GAAG,CAAC,GAAG;AACtD,eAAK,OAAO,MAAM,wBAAwB,IAAI,EAAE;AAChD;AAAA,QACD;AACA,aAAK,OAAO,MAAM,eAAe,IAAI,EAAE;AAAA,MACxC;AAEA,iBAAW,QAAQ,OAAO;AACzB,YAAI,CAAC,QAAQ,IAAI,IAAI,KAAK,MAAM,OAAO,KAAK,OAAO;AAClD,gBAAM,IAAI,IAAI;AACd,eAAK,OAAO,MAAM,mBAAmB,IAAI,EAAE;AAAA,QAC5C;AAAA,MACD;AACA,kBAAY,IAAI,UAAU;AAAA,IAC3B;AAEA,UAAM,eAAe,YAAY;AAChC,YAAM,QAAQ,MAAM,KAAK,KAAK,EAAE,MAAM,GAAG,KAAK,WAAW;AACzD,iBAAW,OAAO,OAAO;AACxB,cAAM,OAAO,GAAG;AAAA,MACjB;AACA,YAAM,QAAQ,WAAW,MAAM,IAAI,CAAC,QAAQ,gBAAgB,GAAG,CAAC,CAAC;AAAA,IAClE;AAEA,SAAK,OAAO;AAAA,MACX,8BAA8B,KAAK,KAAK,kBAAkB,KAAK,WAAW;AAAA,IAC3E;AACA,WAAO,MAAM,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO;AACnD,YAAM,aAAa;AAAA,IACpB;AAEA,SAAK,OAAO;AAAA,MACX,4CAA4C,YAAY,IAAI;AAAA,IAC7D;AAEA,WAAO,MAAM,KAAK,WAAW;AAAA,EAC9B;AACD;;;ADpMA,IAAM,YAAY,aAAa,KAAK;AAE7B,IAAM,gBAA+B;AAAA,EAC3C,SAAS;AAAA,EACT,UACC;AAAA,EACD,SAAS,CAACA,WAAU;AACnB,WAAOA,OACL,OAAO,OAAO;AAAA,MACd,OAAO;AAAA,MACP,MAAM;AAAA,MACN,aAAa;AAAA,MACb,cAAc;AAAA,MACd,QAAQ,CAAC,QAAQ;AAChB,YAAI;AACH,cAAI,IAAI,GAAG;AAEX,iBAAO;AAAA,QACR,SAAS,OAAO;AACf,gBAAM,IAAI,MAAM,gBAAgB,GAAG,EAAE;AAAA,QACtC;AAAA,MACD;AAAA,IACD,CAAC,EACA,OAAO,SAAS;AAAA,MAChB,OAAO;AAAA,MACP,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,MACT,QAAQ,CAAC,UAAU;AAClB,YAAI,QAAQ,GAAG;AACd,gBAAM,IAAI,MAAM,iCAAiC;AAAA,QAClD;AAEA,YAAI,QAAQ,KAAK;AAChB,gBAAM,IAAI,MAAM,6BAA6B;AAAA,QAC9C;AAEA,eAAO;AAAA,MACR;AAAA,IACD,CAAC,EACA,OAAO,UAAU;AAAA,MACjB,OAAO;AAAA,MACP,MAAM;AAAA,MACN,aACC;AAAA,MACD,QAAQ,CAAC,WAAW;AACnB,YAAI;AAGH,gBAAM,eAAe,KAAK,QAAQ,MAAM;AACxC,gBAAM,aAAa,KAAK,MAAM,YAAY;AAE1C,cAAI,WAAW,QAAQ,SAAS;AAC/B,kBAAM,IAAI,MAAM,iCAAiC;AAAA,UAClD;AAEA,cAAI,GAAG,WAAW,YAAY,GAAG;AAChC,kBAAM,IAAI,MAAM,4BAA4B;AAAA,UAC7C;AAEA,iBAAO;AAAA,QACR,SAAS,OAAO;AACf,gBAAM,IAAI,MAAM,wBAAwB,MAAM,EAAE;AAAA,QACjD;AAAA,MACD;AAAA,MACA,SAAS,mBAAmB;AAAA,IAC7B,CAAC,EACA,OAAO,eAAe;AAAA,MACtB,OAAO;AAAA,MACP,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,MACT,QAAQ,CAAC,gBAAgB;AACxB,YAAI,cAAc,GAAG;AACpB,gBAAM,IAAI,MAAM,uCAAuC;AAAA,QACxD;AAEA,YAAI,cAAc,IAAI;AACrB,gBAAM,IAAI,MAAM,kCAAkC;AAAA,QACnD;AAEA,eAAO;AAAA,MACR;AAAA,IACD,CAAC,EACA,OAAO,WAAW;AAAA,MAClB,OAAO;AAAA,MACP,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,MACT,QAAQ,CAAC,YAAY;AACpB,YAAI,UAAU,GAAG;AAChB,gBAAM,IAAI,MAAM,mCAAmC;AAAA,QACpD;AAEA,YAAI,UAAU,MAAQ;AACrB,gBAAM,IAAI,MAAM,kCAAkC;AAAA,QACnD;AAEA,eAAO;AAAA,MACR;AAAA,IACD,CAAC,EACA,OAAO,WAAW;AAAA,MAClB,OAAO;AAAA,MACP,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,MACT,QAAQ,CAAC,YAAY;AACpB,YAAI,UAAU,GAAG;AAChB,gBAAM,IAAI,MAAM,mCAAmC;AAAA,QACpD;AAEA,YAAI,UAAU,IAAI;AACjB,gBAAM,IAAI,MAAM,8BAA8B;AAAA,QAC/C;AAEA,eAAO;AAAA,MACR;AAAA,IACD,CAAC;AAAA,EACH;AAAA,EACA,SAAS,OAAO,SAAS;AACxB,QAAI;AACH,YAAM,UAAU;AAEhB,YAAM,UAAU,IAAI,cAAc,QAAQ,KAAK;AAAA,QAC9C,OAAO,QAAQ,SAAS;AAAA,QACxB,aAAa,QAAQ,eAAe;AAAA,QACpC,SAAS,QAAQ,WAAW;AAAA,QAC5B,SAAS,QAAQ,WAAW;AAAA,MAC7B,CAAC;AAED,gBAAU,KAAK,2BAA2B;AAE1C,YAAM,UAAU,MAAM,QAAQ,MAAM;AAEpC,UAAI,QAAQ,QAAQ;AACnB,WAAG,cAAc,QAAQ,QAAQ,KAAK,UAAU,SAAS,MAAM,CAAC,CAAC;AACjE,kBAAU,KAAK,sBAAsB,QAAQ,MAAM,EAAE;AAAA,MACtD,OAAO;AACN,cAAM,eAAe,mBAAmB;AACxC,WAAG,cAAc,cAAc,KAAK,UAAU,SAAS,MAAM,CAAC,CAAC;AAC/D,kBAAU,KAAK,sBAAsB,YAAY,EAAE;AAAA,MACpD;AAAA,IACD,SAAS,OAAO;AACf,UAAI,iBAAiB,OAAO;AAC3B,kBAAU,MAAM,MAAM,OAAO;AAAA,MAC9B;AACA,gBAAU,MAAM,8BAA8B;AAC9C,cAAQ,KAAK,CAAC;AAAA,IACf;AAAA,EACD;AACD;AAEA,IAAM,qBAAqB,MAAM;AAChC,MAAI;AACH,UAAM,cAAc,KAAK,QAAQ,iBAAiB;AAElD,QAAI,CAAC,GAAG,WAAW,WAAW,GAAG;AAChC,SAAG,UAAU,WAAW;AAAA,IACzB;AAEA,UAAM,eAAe,KAAK;AAAA,MACzB,0BAA0B,KAAK,IAAI,CAAC;AAAA,IACrC;AAEA,QAAI,GAAG,WAAW,YAAY,GAAG;AAChC,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC7C;AACA,UAAM,aAAa,KAAK,MAAM,YAAY;AAE1C,QAAI,WAAW,QAAQ,SAAS;AAC/B,YAAM,IAAI,MAAM,iCAAiC;AAAA,IAClD;AAEA,WAAO;AAAA,EACR,SAAS,OAAO;AACf,UAAM,IAAI,MAAM,qBAAqB;AAAA,EACtC;AACD;;;AD1LA,IAAM,iBAAiB,MAAM,QAAQ,QAAQ,IAAI,CAAC;AAElD,eAAe,cAAc;AAC7B,eAAe,WAAW,kBAAkB;AAC5C,eAAe,MAAM,+BAA+B;AACpD,eAAe,KAAK,EAAE,MAAM,QAAQ,GAAG;AACvC,eAAe,QAAQ,EAAE,MAAM,WAAW,GAAG;AAC7C,eAAe,OAAO;AACtB,eAAe,eAAe,IAAI;AAGlC,eAAe,QAAQ,aAAa;AAEpC,eAAe,MAAM;",
6
+ "names": ["yargs"]
7
+ }
package/build/index.d.ts CHANGED
@@ -1 +1,25 @@
1
+ export declare class SpiderScanner {
2
+ private header;
3
+ private url;
4
+ private logger;
5
+ private depth;
6
+ private concurrency;
7
+ private retries;
8
+ private timeout;
9
+ constructor(url: string, options?: SpiderScannerOptions);
10
+ private normalizeDomain;
11
+ private convertRelativeUrlToAbsolute;
12
+ private isInternalLink;
13
+ private fetchWithRetries;
14
+ private extractLinks;
15
+ crawl(): Promise<Array<string>>;
16
+ }
17
+
18
+ export declare interface SpiderScannerOptions {
19
+ depth?: number;
20
+ concurrency?: number;
21
+ retries?: number;
22
+ timeout?: number;
23
+ }
24
+
1
25
  export { }
package/build/index.js CHANGED
@@ -1,48 +1,35 @@
1
- #!/usr/bin/env node
2
-
3
- // src/index.ts
4
- import yargs from "yargs";
5
- import { hideBin } from "yargs/helpers";
6
-
7
1
  // src/modules/spider/index.ts
8
2
  import fetch from "isomorphic-fetch";
9
3
  import jsdom from "jsdom";
10
4
  import UserAgent from "user-agents";
11
5
 
12
- // src/lib/logger.ts
13
- var Logger = class {
14
- moduleName;
15
- colors = {
16
- error: "\x1B[31m",
17
- info: "\x1B[32m",
18
- warn: "\x1B[33m",
19
- debug: "\x1B[35m",
20
- reset: "\x1B[0m",
21
- module: "\x1B[46m"
22
- };
23
- constructor(moduleName) {
24
- this.moduleName = moduleName;
25
- }
26
- formatMessage(level, ...message) {
27
- const timestamp = (/* @__PURE__ */ new Date()).toTimeString().split(" ")[0];
28
- return `[${level}] ${this.colors[level]}${this.colors.reset}${this.colors[level]}[${timestamp}]${this.colors.reset} ${this.colors.module}[${this.moduleName}]${this.colors.reset} ${this.colors[level]}${message}${this.colors.reset}`;
29
- }
30
- error(...message) {
31
- console.error(this.formatMessage("error", ...message));
32
- }
33
- info(...message) {
34
- console.info(this.formatMessage("info", ...message));
35
- }
36
- warn(...message) {
37
- console.warn(this.formatMessage("warn", ...message));
38
- }
39
- log(...message) {
40
- console.log(this.formatMessage("info", ...message));
41
- }
42
- debug(...message) {
43
- console.debug(this.formatMessage("debug", ...message));
44
- }
45
- };
6
+ // src/utils/index.ts
7
+ import winston from "winston";
8
+ var createLogger = (label) => winston.createLogger({
9
+ levels: {
10
+ error: 0,
11
+ warn: 1,
12
+ info: 2,
13
+ http: 3,
14
+ verbose: 4,
15
+ debug: 5,
16
+ silly: 6
17
+ },
18
+ format: winston.format.combine(
19
+ winston.format.label({ label }),
20
+ winston.format.colorize(),
21
+ winston.format.timestamp({
22
+ format: () => {
23
+ return (/* @__PURE__ */ new Date()).toLocaleString("en-US");
24
+ }
25
+ }),
26
+ winston.format.align(),
27
+ winston.format.printf(
28
+ (info) => `\x1B[34m(${info.label})\x1B[0m \x1B[33m${info.timestamp}\x1B[0m [${info.level}]: ${info.message}`
29
+ )
30
+ ),
31
+ transports: [new winston.transports.Console()]
32
+ });
46
33
 
47
34
  // src/modules/spider/index.ts
48
35
  var SpiderScanner = class {
@@ -50,12 +37,26 @@ var SpiderScanner = class {
50
37
  "User-Agent": new UserAgent().toString()
51
38
  };
52
39
  url;
53
- logger = new Logger("Spider");
54
- constructor(url) {
40
+ logger = createLogger("SpiderScanner");
41
+ depth;
42
+ concurrency;
43
+ retries;
44
+ timeout;
45
+ constructor(url, options = {}) {
46
+ const {
47
+ depth = 250,
48
+ concurrency = 5,
49
+ retries = 3,
50
+ timeout = 5e3
51
+ } = options;
52
+ this.depth = depth;
53
+ this.concurrency = concurrency;
54
+ this.retries = retries;
55
+ this.timeout = timeout;
55
56
  try {
56
57
  this.url = new URL(url);
57
58
  this.logger.info(
58
- `Initialized with URL: ${url} & User-Agent: ${this.header["User-Agent"]}`
59
+ `Initialized with URL: ${url}, User-Agent: ${this.header["User-Agent"]}`
59
60
  );
60
61
  } catch (error) {
61
62
  if (error instanceof TypeError) {
@@ -66,7 +67,6 @@ var SpiderScanner = class {
66
67
  throw error;
67
68
  }
68
69
  }
69
- // Normalize domains (removes 'www.')
70
70
  normalizeDomain(domain) {
71
71
  return domain.startsWith("www.") ? domain.slice(4) : domain;
72
72
  }
@@ -87,20 +87,35 @@ var SpiderScanner = class {
87
87
  return false;
88
88
  }
89
89
  }
90
- async fetchUrl(url) {
91
- try {
92
- this.logger.debug(`Fetching URL: ${url}`);
93
- const response = await fetch(url, { headers: this.header });
94
- if (!response.ok) {
90
+ async fetchWithRetries(url, retries) {
91
+ for (let attempt = 1; attempt <= retries; attempt++) {
92
+ const controller = new AbortController();
93
+ const timeoutId = setTimeout(() => controller.abort(), this.timeout);
94
+ try {
95
+ this.logger.debug(`Fetching URL (Attempt ${attempt}): ${url}`);
96
+ const randomUserAgent = new UserAgent().toString();
97
+ this.logger.info(`Changing User-Agent to: ${randomUserAgent}`);
98
+ this.header["User-Agent"] = randomUserAgent;
99
+ const response = await fetch(url, {
100
+ headers: this.header,
101
+ signal: controller.signal,
102
+ redirect: "follow"
103
+ });
104
+ clearTimeout(timeoutId);
105
+ if (response.ok) {
106
+ this.logger.info(`Successfully fetched URL: ${url}`);
107
+ return await response.text();
108
+ }
95
109
  this.logger.warn(`Failed to fetch URL (${response.status}): ${url}`);
96
- return null;
110
+ } catch (error) {
111
+ if (error.name === "AbortError") {
112
+ this.logger.warn(`Fetch timed out: ${url}`);
113
+ } else {
114
+ this.logger.error(`Error fetching URL: ${url} - ${error}`);
115
+ }
97
116
  }
98
- this.logger.info(`Successfully fetched URL: ${url}`);
99
- return await response.text();
100
- } catch (error) {
101
- this.logger.error(`Error fetching URL: ${url} - ${error}`);
102
- return null;
103
117
  }
118
+ return null;
104
119
  }
105
120
  extractLinks(html) {
106
121
  const { JSDOM } = jsdom;
@@ -113,33 +128,38 @@ var SpiderScanner = class {
113
128
  );
114
129
  return internalLinks.map((link) => this.convertRelativeUrlToAbsolute(link));
115
130
  }
116
- extractForms(html) {
117
- const { JSDOM } = jsdom;
118
- const dom = new JSDOM(html);
119
- const forms = Array.from(dom.window.document.querySelectorAll("form"));
120
- this.logger.debug(`Extracted ${forms.length} forms from HTML content`);
121
- return forms.map((form, index) => {
122
- const fields = Array.from(form.querySelectorAll("input")).map(
123
- (input) => ({
124
- name: input.name,
125
- id: input.id,
126
- class: input.className,
127
- type: input.type
128
- })
129
- );
130
- return {
131
- id: index,
132
- url: this.url.href,
133
- fields
134
- };
135
- });
136
- }
137
- // Main function to scan the website with concurrency support and return both links and forms
138
- async crawl(depth = 250, concurrency = 5) {
131
+ async crawl() {
139
132
  const visited = /* @__PURE__ */ new Set();
140
133
  const queue = /* @__PURE__ */ new Set([this.url.href]);
141
134
  const resultLinks = /* @__PURE__ */ new Set();
142
- const resultForms = /* @__PURE__ */ new Set();
135
+ const assetExtensions = [
136
+ ".css",
137
+ ".js",
138
+ ".png",
139
+ ".jpg",
140
+ ".jpeg",
141
+ ".gif",
142
+ ".svg",
143
+ ".ico",
144
+ ".webp",
145
+ ".mp4",
146
+ ".mp3",
147
+ ".wav",
148
+ ".avi",
149
+ ".mov",
150
+ ".webm",
151
+ ".pdf",
152
+ ".doc",
153
+ ".docx",
154
+ ".xls",
155
+ ".xlsx",
156
+ ".ppt",
157
+ ".pptx",
158
+ ".zip",
159
+ ".rar",
160
+ ".tar",
161
+ ".gz"
162
+ ];
143
163
  const fetchAndExtract = async (currentUrl) => {
144
164
  if (visited.has(currentUrl)) {
145
165
  this.logger.debug(`Skipping already visited URL: ${currentUrl}`);
@@ -147,15 +167,18 @@ var SpiderScanner = class {
147
167
  }
148
168
  visited.add(currentUrl);
149
169
  this.logger.info(`Visiting URL: ${currentUrl}`);
150
- const html = await this.fetchUrl(currentUrl);
170
+ const html = await this.fetchWithRetries(currentUrl, this.retries);
151
171
  if (!html) return;
152
172
  const links = this.extractLinks(html);
153
- const forms = this.extractForms(html);
154
- for (const form of forms) {
155
- resultForms.add(form);
173
+ for (const link of links) {
174
+ if (assetExtensions.some((ext) => link.endsWith(ext))) {
175
+ this.logger.debug(`Ignoring asset link: ${link}`);
176
+ continue;
177
+ }
178
+ this.logger.debug(`Found link: ${link}`);
156
179
  }
157
180
  for (const link of links) {
158
- if (!visited.has(link) && queue.size < depth) {
181
+ if (!visited.has(link) && queue.size < this.depth) {
159
182
  queue.add(link);
160
183
  this.logger.debug(`Added to queue: ${link}`);
161
184
  }
@@ -163,91 +186,25 @@ var SpiderScanner = class {
163
186
  resultLinks.add(currentUrl);
164
187
  };
165
188
  const processBatch = async () => {
166
- const batch = Array.from(queue).slice(0, concurrency);
189
+ const batch = Array.from(queue).slice(0, this.concurrency);
167
190
  for (const url of batch) {
168
191
  queue.delete(url);
169
192
  }
170
193
  await Promise.allSettled(batch.map((url) => fetchAndExtract(url)));
171
194
  };
172
195
  this.logger.info(
173
- `Starting crawl with depth: ${depth}, concurrency: ${concurrency}`
196
+ `Starting crawl with depth: ${this.depth}, concurrency: ${this.concurrency}`
174
197
  );
175
- while (queue.size > 0 && visited.size < depth) {
198
+ while (queue.size > 0 && visited.size < this.depth) {
176
199
  await processBatch();
177
200
  }
178
201
  this.logger.info(
179
- `Crawling completed. Total pages visited: ${resultLinks.size}, Total forms found: ${resultForms.size}`
202
+ `Crawling completed. Total pages visited: ${resultLinks.size}`
180
203
  );
181
- return {
182
- links: Array.from(resultLinks),
183
- forms: Array.from(resultForms)
184
- };
204
+ return Array.from(resultLinks);
185
205
  }
186
206
  };
187
-
188
- // src/index.ts
189
- var commandHandler = yargs(hideBin(process.argv));
190
- commandHandler.command(
191
- "xss",
192
- "Scan for XSS vulnerabilities",
193
- {
194
- url: {
195
- describe: "URL to scan",
196
- demandOption: true,
197
- type: "string",
198
- coerce: (value) => {
199
- try {
200
- new URL(value);
201
- return value;
202
- } catch (err) {
203
- throw new Error("Invalid URL format");
204
- }
205
- }
206
- },
207
- wordlist: {
208
- describe: "Path to wordlist file",
209
- type: "string"
210
- }
211
- },
212
- (argv) => {
213
- console.log("Scanning for XSS vulnerabilities...");
214
- console.log(`URL: ${argv.url}`);
215
- console.log(`Wordlist: ${argv.wordlist || "Default"}`);
216
- }
217
- );
218
- commandHandler.command(
219
- "spider",
220
- "Scan a website for vulnerabilities",
221
- {
222
- url: {
223
- describe: "URL to scan",
224
- demandOption: true,
225
- type: "string",
226
- coerce: (value) => {
227
- try {
228
- new URL(value);
229
- return value;
230
- } catch (err) {
231
- throw new Error("Invalid URL format");
232
- }
233
- }
234
- }
235
- },
236
- (argv) => {
237
- const spider = new SpiderScanner(argv.url);
238
- spider.crawl().then((output) => {
239
- console.log(
240
- JSON.stringify(
241
- {
242
- forms: output.forms,
243
- links: output.links
244
- },
245
- null,
246
- 2
247
- )
248
- );
249
- });
250
- }
251
- );
252
- commandHandler.parse();
207
+ export {
208
+ SpiderScanner
209
+ };
253
210
  //# sourceMappingURL=index.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../src/index.ts", "../src/modules/spider/index.ts", "../src/lib/logger.ts"],
4
- "sourcesContent": ["#!/usr/bin/env node\n\nimport yargs from \"yargs\";\nimport { hideBin } from \"yargs/helpers\";\nimport { SpiderScanner } from \"./modules\";\n\nconst commandHandler = yargs(hideBin(process.argv));\n\n/**\n * Command to scan for XSS vulnerabilities\n *\n * @param {string} url - URL to scan\n * @param {string} wordlist - Path to wordlist file\n * @returns {void}\n *\n * @example\n * npx sentinel-scanner xss --url https://example.com\n */\ncommandHandler.command(\n\t\"xss\",\n\t\"Scan for XSS vulnerabilities\",\n\t{\n\t\turl: {\n\t\t\tdescribe: \"URL to scan\",\n\t\t\tdemandOption: true,\n\t\t\ttype: \"string\",\n\t\t\tcoerce: (value) => {\n\t\t\t\ttry {\n\t\t\t\t\tnew URL(value);\n\t\t\t\t\treturn value;\n\t\t\t\t} catch (err) {\n\t\t\t\t\tthrow new Error(\"Invalid URL format\");\n\t\t\t\t}\n\t\t\t},\n\t\t},\n\t\twordlist: {\n\t\t\tdescribe: \"Path to wordlist file\",\n\t\t\ttype: \"string\",\n\t\t},\n\t},\n\t(argv) => {\n\t\tconsole.log(\"Scanning for XSS vulnerabilities...\");\n\t\tconsole.log(`URL: ${argv.url}`);\n\t\tconsole.log(`Wordlist: ${argv.wordlist || \"Default\"}`);\n\t},\n);\n\n// Command to Spider a website\ncommandHandler.command(\n\t\"spider\",\n\t\"Scan a website for vulnerabilities\",\n\t{\n\t\turl: {\n\t\t\tdescribe: \"URL to scan\",\n\t\t\tdemandOption: true,\n\t\t\ttype: \"string\",\n\t\t\tcoerce: (value) => {\n\t\t\t\ttry {\n\t\t\t\t\tnew URL(value);\n\t\t\t\t\treturn value;\n\t\t\t\t} catch (err) {\n\t\t\t\t\tthrow new Error(\"Invalid URL format\");\n\t\t\t\t}\n\t\t\t},\n\t\t},\n\t},\n\t(argv) => {\n\t\tconst spider = new SpiderScanner(argv.url);\n\n\t\tspider.crawl().then((output) => {\n\t\t\tconsole.log(\n\t\t\t\tJSON.stringify(\n\t\t\t\t\t{\n\t\t\t\t\t\tforms: output.forms,\n\t\t\t\t\t\tlinks: output.links,\n\t\t\t\t\t},\n\t\t\t\t\tnull,\n\t\t\t\t\t2,\n\t\t\t\t),\n\t\t\t);\n\t\t});\n\t},\n);\n\n// Parse arguments and handle commands\ncommandHandler.parse();\n", "import fetch from \"isomorphic-fetch\";\nimport jsdom from \"jsdom\";\nimport UserAgent from \"user-agents\";\nimport Logger from \"../../lib/logger\";\n\nexport type FormOutput = {\n\tid: number;\n\turl: string;\n\tfields: Array<{ name: string; id: string; class: string; type: string }>;\n};\n\nexport type CrawlOutput = {\n\tlinks: string[];\n\tforms: FormOutput[];\n};\n\nexport default class SpiderScanner {\n\tprivate header: Record<string, string> = {\n\t\t\"User-Agent\": new UserAgent().toString(),\n\t};\n\tprivate url: URL;\n\tprivate logger = new Logger(\"Spider\");\n\n\tconstructor(url: string) {\n\t\ttry {\n\t\t\tthis.url = new URL(url);\n\t\t\tthis.logger.info(\n\t\t\t\t`Initialized with URL: ${url} & User-Agent: ${this.header[\"User-Agent\"]}`,\n\t\t\t);\n\t\t} catch (error) {\n\t\t\tif (error instanceof TypeError) {\n\t\t\t\tthis.logger.error(\"Invalid URL\");\n\t\t\t\tthrow new Error(\"Invalid URL\");\n\t\t\t}\n\t\t\tthis.logger.error(`Unexpected error in constructor: ${error}`);\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\t// Normalize domains (removes 'www.')\n\tprivate normalizeDomain(domain: string): string {\n\t\treturn domain.startsWith(\"www.\") ? domain.slice(4) : domain;\n\t}\n\n\tprivate convertRelativeUrlToAbsolute(url: string): string {\n\t\treturn new URL(url, this.url.toString()).toString();\n\t}\n\n\tprivate isInternalLink(url: string): boolean {\n\t\ttry {\n\t\t\tconst parsedUrl = new URL(url, this.url.href);\n\t\t\tif (![\"http:\", \"https:\"].includes(parsedUrl.protocol)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t\tconst baseDomain = this.normalizeDomain(this.url.hostname);\n\t\t\tconst parsedDomain = this.normalizeDomain(parsedUrl.hostname);\n\t\t\treturn parsedDomain === baseDomain;\n\t\t} catch (error) {\n\t\t\tthis.logger.warn(`Error parsing URL: ${url} - ${error}`);\n\t\t\treturn false;\n\t\t}\n\t}\n\n\tprivate async fetchUrl(url: string): Promise<string | null> {\n\t\ttry {\n\t\t\tthis.logger.debug(`Fetching URL: ${url}`);\n\t\t\tconst response = await fetch(url, { headers: this.header });\n\t\t\tif (!response.ok) {\n\t\t\t\tthis.logger.warn(`Failed to fetch URL (${response.status}): ${url}`);\n\t\t\t\treturn null;\n\t\t\t}\n\t\t\tthis.logger.info(`Successfully fetched URL: ${url}`);\n\t\t\treturn await response.text();\n\t\t} catch (error) {\n\t\t\tthis.logger.error(`Error fetching URL: ${url} - ${error}`);\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tprivate extractLinks(html: string): string[] {\n\t\tconst { JSDOM } = jsdom;\n\t\tconst dom = new JSDOM(html);\n\t\tconst links = Array.from(dom.window.document.querySelectorAll(\"a\"));\n\t\tconst hrefs = links.map((link) => link.href);\n\t\tconst internalLinks = hrefs.filter((href) => this.isInternalLink(href));\n\t\tthis.logger.debug(\n\t\t\t`Extracted ${internalLinks.length} internal links from HTML content`,\n\t\t);\n\t\treturn internalLinks.map((link) => this.convertRelativeUrlToAbsolute(link));\n\t}\n\n\tprivate extractForms(html: string): FormOutput[] {\n\t\tconst { JSDOM } = jsdom;\n\t\tconst dom = new JSDOM(html);\n\t\tconst forms = Array.from(dom.window.document.querySelectorAll(\"form\"));\n\t\tthis.logger.debug(`Extracted ${forms.length} forms from HTML content`);\n\n\t\treturn forms.map((form, index) => {\n\t\t\tconst fields = Array.from(form.querySelectorAll(\"input\")).map(\n\t\t\t\t(input) => ({\n\t\t\t\t\tname: input.name,\n\t\t\t\t\tid: input.id,\n\t\t\t\t\tclass: input.className,\n\t\t\t\t\ttype: input.type,\n\t\t\t\t}),\n\t\t\t);\n\n\t\t\treturn {\n\t\t\t\tid: index,\n\t\t\t\turl: this.url.href,\n\t\t\t\tfields,\n\t\t\t};\n\t\t});\n\t}\n\n\t// Main function to scan the website with concurrency support and return both links and forms\n\tpublic async crawl(depth = 250, concurrency = 5): Promise<CrawlOutput> {\n\t\tconst visited = new Set<string>();\n\t\tconst queue = new Set<string>([this.url.href]);\n\t\tconst resultLinks = new Set<string>();\n\t\tconst resultForms = new Set<FormOutput>();\n\n\t\tconst fetchAndExtract = async (currentUrl: string) => {\n\t\t\tif (visited.has(currentUrl)) {\n\t\t\t\tthis.logger.debug(`Skipping already visited URL: ${currentUrl}`);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tvisited.add(currentUrl);\n\t\t\tthis.logger.info(`Visiting URL: ${currentUrl}`);\n\n\t\t\tconst html = await this.fetchUrl(currentUrl);\n\t\t\tif (!html) return;\n\n\t\t\t// Extract links and forms\n\t\t\tconst links = this.extractLinks(html);\n\t\t\tconst forms = this.extractForms(html);\n\n\t\t\tfor (const form of forms) {\n\t\t\t\tresultForms.add(form);\n\t\t\t}\n\n\t\t\tfor (const link of links) {\n\t\t\t\tif (!visited.has(link) && queue.size < depth) {\n\t\t\t\t\tqueue.add(link);\n\t\t\t\t\tthis.logger.debug(`Added to queue: ${link}`);\n\t\t\t\t}\n\t\t\t}\n\t\t\tresultLinks.add(currentUrl);\n\t\t};\n\n\t\tconst processBatch = async () => {\n\t\t\tconst batch = Array.from(queue).slice(0, concurrency);\n\t\t\tfor (const url of batch) {\n\t\t\t\tqueue.delete(url);\n\t\t\t}\n\t\t\tawait Promise.allSettled(batch.map((url) => fetchAndExtract(url)));\n\t\t};\n\n\t\tthis.logger.info(\n\t\t\t`Starting crawl with depth: ${depth}, concurrency: ${concurrency}`,\n\t\t);\n\t\twhile (queue.size > 0 && visited.size < depth) {\n\t\t\tawait processBatch();\n\t\t}\n\n\t\tthis.logger.info(\n\t\t\t`Crawling completed. Total pages visited: ${resultLinks.size}, Total forms found: ${resultForms.size}`,\n\t\t);\n\n\t\treturn {\n\t\t\tlinks: Array.from(resultLinks),\n\t\t\tforms: Array.from(resultForms),\n\t\t};\n\t}\n}\n", "export default class Logger {\n\tprivate moduleName: string;\n\tprivate colors = {\n\t\terror: \"\\x1b[31m\",\n\t\tinfo: \"\\x1b[32m\",\n\t\twarn: \"\\x1b[33m\",\n\t\tdebug: \"\\x1b[35m\",\n\t\treset: \"\\x1b[0m\",\n\t\tmodule: \"\\x1b[46m\",\n\t};\n\n\tconstructor(moduleName: string) {\n\t\tthis.moduleName = moduleName;\n\t}\n\n\tprivate formatMessage(\n\t\tlevel: keyof typeof this.colors,\n\t\t...message: string[]\n\t): string {\n\t\tconst timestamp = new Date().toTimeString().split(\" \")[0];\n\t\treturn `[${level}] ${this.colors[level]}${this.colors.reset}${this.colors[level]}[${timestamp}]${this.colors.reset} ${this.colors.module}[${this.moduleName}]${this.colors.reset} ${this.colors[level]}${message}${this.colors.reset}`;\n\t}\n\n\tpublic error(...message: string[]): void {\n\t\tconsole.error(this.formatMessage(\"error\", ...message));\n\t}\n\n\tpublic info(...message: string[]): void {\n\t\tconsole.info(this.formatMessage(\"info\", ...message));\n\t}\n\n\tpublic warn(...message: string[]): void {\n\t\tconsole.warn(this.formatMessage(\"warn\", ...message));\n\t}\n\n\tpublic log(...message: string[]): void {\n\t\tconsole.log(this.formatMessage(\"info\", ...message));\n\t}\n\n\tpublic debug(...message: string[]): void {\n\t\tconsole.debug(this.formatMessage(\"debug\", ...message));\n\t}\n}\n"],
5
- "mappings": ";;;AAEA,OAAO,WAAW;AAClB,SAAS,eAAe;;;ACHxB,OAAO,WAAW;AAClB,OAAO,WAAW;AAClB,OAAO,eAAe;;;ACFtB,IAAqB,SAArB,MAA4B;AAAA,EACnB;AAAA,EACA,SAAS;AAAA,IAChB,OAAO;AAAA,IACP,MAAM;AAAA,IACN,MAAM;AAAA,IACN,OAAO;AAAA,IACP,OAAO;AAAA,IACP,QAAQ;AAAA,EACT;AAAA,EAEA,YAAY,YAAoB;AAC/B,SAAK,aAAa;AAAA,EACnB;AAAA,EAEQ,cACP,UACG,SACM;AACT,UAAM,aAAY,oBAAI,KAAK,GAAE,aAAa,EAAE,MAAM,GAAG,EAAE,CAAC;AACxD,WAAO,IAAI,KAAK,KAAK,KAAK,OAAO,KAAK,CAAC,GAAG,KAAK,OAAO,KAAK,GAAG,KAAK,OAAO,KAAK,CAAC,IAAI,SAAS,IAAI,KAAK,OAAO,KAAK,IAAI,KAAK,OAAO,MAAM,IAAI,KAAK,UAAU,IAAI,KAAK,OAAO,KAAK,IAAI,KAAK,OAAO,KAAK,CAAC,GAAG,OAAO,GAAG,KAAK,OAAO,KAAK;AAAA,EACrO;AAAA,EAEO,SAAS,SAAyB;AACxC,YAAQ,MAAM,KAAK,cAAc,SAAS,GAAG,OAAO,CAAC;AAAA,EACtD;AAAA,EAEO,QAAQ,SAAyB;AACvC,YAAQ,KAAK,KAAK,cAAc,QAAQ,GAAG,OAAO,CAAC;AAAA,EACpD;AAAA,EAEO,QAAQ,SAAyB;AACvC,YAAQ,KAAK,KAAK,cAAc,QAAQ,GAAG,OAAO,CAAC;AAAA,EACpD;AAAA,EAEO,OAAO,SAAyB;AACtC,YAAQ,IAAI,KAAK,cAAc,QAAQ,GAAG,OAAO,CAAC;AAAA,EACnD;AAAA,EAEO,SAAS,SAAyB;AACxC,YAAQ,MAAM,KAAK,cAAc,SAAS,GAAG,OAAO,CAAC;AAAA,EACtD;AACD;;;AD1BA,IAAqB,gBAArB,MAAmC;AAAA,EAC1B,SAAiC;AAAA,IACxC,cAAc,IAAI,UAAU,EAAE,SAAS;AAAA,EACxC;AAAA,EACQ;AAAA,EACA,SAAS,IAAI,OAAO,QAAQ;AAAA,EAEpC,YAAY,KAAa;AACxB,QAAI;AACH,WAAK,MAAM,IAAI,IAAI,GAAG;AACtB,WAAK,OAAO;AAAA,QACX,yBAAyB,GAAG,kBAAkB,KAAK,OAAO,YAAY,CAAC;AAAA,MACxE;AAAA,IACD,SAAS,OAAO;AACf,UAAI,iBAAiB,WAAW;AAC/B,aAAK,OAAO,MAAM,aAAa;AAC/B,cAAM,IAAI,MAAM,aAAa;AAAA,MAC9B;AACA,WAAK,OAAO,MAAM,oCAAoC,KAAK,EAAE;AAC7D,YAAM;AAAA,IACP;AAAA,EACD;AAAA;AAAA,EAGQ,gBAAgB,QAAwB;AAC/C,WAAO,OAAO,WAAW,MAAM,IAAI,OAAO,MAAM,CAAC,IAAI;AAAA,EACtD;AAAA,EAEQ,6BAA6B,KAAqB;AACzD,WAAO,IAAI,IAAI,KAAK,KAAK,IAAI,SAAS,CAAC,EAAE,SAAS;AAAA,EACnD;AAAA,EAEQ,eAAe,KAAsB;AAC5C,QAAI;AACH,YAAM,YAAY,IAAI,IAAI,KAAK,KAAK,IAAI,IAAI;AAC5C,UAAI,CAAC,CAAC,SAAS,QAAQ,EAAE,SAAS,UAAU,QAAQ,GAAG;AACtD,eAAO;AAAA,MACR;AACA,YAAM,aAAa,KAAK,gBAAgB,KAAK,IAAI,QAAQ;AACzD,YAAM,eAAe,KAAK,gBAAgB,UAAU,QAAQ;AAC5D,aAAO,iBAAiB;AAAA,IACzB,SAAS,OAAO;AACf,WAAK,OAAO,KAAK,sBAAsB,GAAG,MAAM,KAAK,EAAE;AACvD,aAAO;AAAA,IACR;AAAA,EACD;AAAA,EAEA,MAAc,SAAS,KAAqC;AAC3D,QAAI;AACH,WAAK,OAAO,MAAM,iBAAiB,GAAG,EAAE;AACxC,YAAM,WAAW,MAAM,MAAM,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;AAC1D,UAAI,CAAC,SAAS,IAAI;AACjB,aAAK,OAAO,KAAK,wBAAwB,SAAS,MAAM,MAAM,GAAG,EAAE;AACnE,eAAO;AAAA,MACR;AACA,WAAK,OAAO,KAAK,6BAA6B,GAAG,EAAE;AACnD,aAAO,MAAM,SAAS,KAAK;AAAA,IAC5B,SAAS,OAAO;AACf,WAAK,OAAO,MAAM,uBAAuB,GAAG,MAAM,KAAK,EAAE;AACzD,aAAO;AAAA,IACR;AAAA,EACD;AAAA,EAEQ,aAAa,MAAwB;AAC5C,UAAM,EAAE,MAAM,IAAI;AAClB,UAAM,MAAM,IAAI,MAAM,IAAI;AAC1B,UAAM,QAAQ,MAAM,KAAK,IAAI,OAAO,SAAS,iBAAiB,GAAG,CAAC;AAClE,UAAM,QAAQ,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI;AAC3C,UAAM,gBAAgB,MAAM,OAAO,CAAC,SAAS,KAAK,eAAe,IAAI,CAAC;AACtE,SAAK,OAAO;AAAA,MACX,aAAa,cAAc,MAAM;AAAA,IAClC;AACA,WAAO,cAAc,IAAI,CAAC,SAAS,KAAK,6BAA6B,IAAI,CAAC;AAAA,EAC3E;AAAA,EAEQ,aAAa,MAA4B;AAChD,UAAM,EAAE,MAAM,IAAI;AAClB,UAAM,MAAM,IAAI,MAAM,IAAI;AAC1B,UAAM,QAAQ,MAAM,KAAK,IAAI,OAAO,SAAS,iBAAiB,MAAM,CAAC;AACrE,SAAK,OAAO,MAAM,aAAa,MAAM,MAAM,0BAA0B;AAErE,WAAO,MAAM,IAAI,CAAC,MAAM,UAAU;AACjC,YAAM,SAAS,MAAM,KAAK,KAAK,iBAAiB,OAAO,CAAC,EAAE;AAAA,QACzD,CAAC,WAAW;AAAA,UACX,MAAM,MAAM;AAAA,UACZ,IAAI,MAAM;AAAA,UACV,OAAO,MAAM;AAAA,UACb,MAAM,MAAM;AAAA,QACb;AAAA,MACD;AAEA,aAAO;AAAA,QACN,IAAI;AAAA,QACJ,KAAK,KAAK,IAAI;AAAA,QACd;AAAA,MACD;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA,EAGA,MAAa,MAAM,QAAQ,KAAK,cAAc,GAAyB;AACtE,UAAM,UAAU,oBAAI,IAAY;AAChC,UAAM,QAAQ,oBAAI,IAAY,CAAC,KAAK,IAAI,IAAI,CAAC;AAC7C,UAAM,cAAc,oBAAI,IAAY;AACpC,UAAM,cAAc,oBAAI,IAAgB;AAExC,UAAM,kBAAkB,OAAO,eAAuB;AACrD,UAAI,QAAQ,IAAI,UAAU,GAAG;AAC5B,aAAK,OAAO,MAAM,iCAAiC,UAAU,EAAE;AAC/D;AAAA,MACD;AACA,cAAQ,IAAI,UAAU;AACtB,WAAK,OAAO,KAAK,iBAAiB,UAAU,EAAE;AAE9C,YAAM,OAAO,MAAM,KAAK,SAAS,UAAU;AAC3C,UAAI,CAAC,KAAM;AAGX,YAAM,QAAQ,KAAK,aAAa,IAAI;AACpC,YAAM,QAAQ,KAAK,aAAa,IAAI;AAEpC,iBAAW,QAAQ,OAAO;AACzB,oBAAY,IAAI,IAAI;AAAA,MACrB;AAEA,iBAAW,QAAQ,OAAO;AACzB,YAAI,CAAC,QAAQ,IAAI,IAAI,KAAK,MAAM,OAAO,OAAO;AAC7C,gBAAM,IAAI,IAAI;AACd,eAAK,OAAO,MAAM,mBAAmB,IAAI,EAAE;AAAA,QAC5C;AAAA,MACD;AACA,kBAAY,IAAI,UAAU;AAAA,IAC3B;AAEA,UAAM,eAAe,YAAY;AAChC,YAAM,QAAQ,MAAM,KAAK,KAAK,EAAE,MAAM,GAAG,WAAW;AACpD,iBAAW,OAAO,OAAO;AACxB,cAAM,OAAO,GAAG;AAAA,MACjB;AACA,YAAM,QAAQ,WAAW,MAAM,IAAI,CAAC,QAAQ,gBAAgB,GAAG,CAAC,CAAC;AAAA,IAClE;AAEA,SAAK,OAAO;AAAA,MACX,8BAA8B,KAAK,kBAAkB,WAAW;AAAA,IACjE;AACA,WAAO,MAAM,OAAO,KAAK,QAAQ,OAAO,OAAO;AAC9C,YAAM,aAAa;AAAA,IACpB;AAEA,SAAK,OAAO;AAAA,MACX,4CAA4C,YAAY,IAAI,wBAAwB,YAAY,IAAI;AAAA,IACrG;AAEA,WAAO;AAAA,MACN,OAAO,MAAM,KAAK,WAAW;AAAA,MAC7B,OAAO,MAAM,KAAK,WAAW;AAAA,IAC9B;AAAA,EACD;AACD;;;ADxKA,IAAM,iBAAiB,MAAM,QAAQ,QAAQ,IAAI,CAAC;AAYlD,eAAe;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,IACC,KAAK;AAAA,MACJ,UAAU;AAAA,MACV,cAAc;AAAA,MACd,MAAM;AAAA,MACN,QAAQ,CAAC,UAAU;AAClB,YAAI;AACH,cAAI,IAAI,KAAK;AACb,iBAAO;AAAA,QACR,SAAS,KAAK;AACb,gBAAM,IAAI,MAAM,oBAAoB;AAAA,QACrC;AAAA,MACD;AAAA,IACD;AAAA,IACA,UAAU;AAAA,MACT,UAAU;AAAA,MACV,MAAM;AAAA,IACP;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AACT,YAAQ,IAAI,qCAAqC;AACjD,YAAQ,IAAI,QAAQ,KAAK,GAAG,EAAE;AAC9B,YAAQ,IAAI,aAAa,KAAK,YAAY,SAAS,EAAE;AAAA,EACtD;AACD;AAGA,eAAe;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,IACC,KAAK;AAAA,MACJ,UAAU;AAAA,MACV,cAAc;AAAA,MACd,MAAM;AAAA,MACN,QAAQ,CAAC,UAAU;AAClB,YAAI;AACH,cAAI,IAAI,KAAK;AACb,iBAAO;AAAA,QACR,SAAS,KAAK;AACb,gBAAM,IAAI,MAAM,oBAAoB;AAAA,QACrC;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAAA,EACA,CAAC,SAAS;AACT,UAAM,SAAS,IAAI,cAAc,KAAK,GAAG;AAEzC,WAAO,MAAM,EAAE,KAAK,CAAC,WAAW;AAC/B,cAAQ;AAAA,QACP,KAAK;AAAA,UACJ;AAAA,YACC,OAAO,OAAO;AAAA,YACd,OAAO,OAAO;AAAA,UACf;AAAA,UACA;AAAA,UACA;AAAA,QACD;AAAA,MACD;AAAA,IACD,CAAC;AAAA,EACF;AACD;AAGA,eAAe,MAAM;",
3
+ "sources": ["../src/modules/spider/index.ts", "../src/utils/index.ts"],
4
+ "sourcesContent": ["import fetch from \"isomorphic-fetch\";\nimport jsdom from \"jsdom\";\nimport UserAgent from \"user-agents\";\nimport { createLogger } from \"../../utils\";\n\nexport interface SpiderScannerOptions {\n\tdepth?: number;\n\tconcurrency?: number;\n\tretries?: number;\n\ttimeout?: number;\n}\n\nexport default class SpiderScanner {\n\tprivate header: Record<string, string> = {\n\t\t\"User-Agent\": new UserAgent().toString(),\n\t};\n\tprivate url: URL;\n\tprivate logger = createLogger(\"SpiderScanner\");\n\n\tprivate depth: number;\n\tprivate concurrency: number;\n\tprivate retries: number;\n\tprivate timeout: number;\n\n\tconstructor(url: string, options: SpiderScannerOptions = {}) {\n\t\tconst {\n\t\t\tdepth = 250,\n\t\t\tconcurrency = 5,\n\t\t\tretries = 3,\n\t\t\ttimeout = 5000,\n\t\t} = options;\n\t\tthis.depth = depth;\n\t\tthis.concurrency = concurrency;\n\t\tthis.retries = retries;\n\t\tthis.timeout = timeout;\n\n\t\ttry {\n\t\t\tthis.url = new URL(url);\n\t\t\tthis.logger.info(\n\t\t\t\t`Initialized with URL: ${url}, User-Agent: ${this.header[\"User-Agent\"]}`,\n\t\t\t);\n\t\t} catch (error) {\n\t\t\tif (error instanceof TypeError) {\n\t\t\t\tthis.logger.error(\"Invalid URL\");\n\t\t\t\tthrow new Error(\"Invalid URL\");\n\t\t\t}\n\t\t\tthis.logger.error(`Unexpected error in constructor: ${error}`);\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\tprivate normalizeDomain(domain: string): string {\n\t\treturn domain.startsWith(\"www.\") ? domain.slice(4) : domain;\n\t}\n\n\tprivate convertRelativeUrlToAbsolute(url: string): string {\n\t\treturn new URL(url, this.url.toString()).toString();\n\t}\n\n\tprivate isInternalLink(url: string): boolean {\n\t\ttry {\n\t\t\tconst parsedUrl = new URL(url, this.url.href);\n\t\t\tif (![\"http:\", \"https:\"].includes(parsedUrl.protocol)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t\tconst baseDomain = this.normalizeDomain(this.url.hostname);\n\t\t\tconst parsedDomain = this.normalizeDomain(parsedUrl.hostname);\n\t\t\treturn parsedDomain === baseDomain;\n\t\t} catch (error) {\n\t\t\tthis.logger.warn(`Error parsing URL: ${url} - ${error}`);\n\t\t\treturn false;\n\t\t}\n\t}\n\n\tprivate async fetchWithRetries(\n\t\turl: string,\n\t\tretries: number,\n\t): Promise<string | null> {\n\t\tfor (let attempt = 1; attempt <= retries; attempt++) {\n\t\t\tconst controller = new AbortController();\n\t\t\tconst timeoutId = setTimeout(() => controller.abort(), this.timeout);\n\n\t\t\ttry {\n\t\t\t\tthis.logger.debug(`Fetching URL (Attempt ${attempt}): ${url}`);\n\t\t\t\tconst randomUserAgent = new UserAgent().toString();\n\t\t\t\tthis.logger.info(`Changing User-Agent to: ${randomUserAgent}`);\n\t\t\t\tthis.header[\"User-Agent\"] = randomUserAgent;\n\t\t\t\tconst response = await fetch(url, {\n\t\t\t\t\theaders: this.header,\n\t\t\t\t\tsignal: controller.signal,\n\t\t\t\t\tredirect: \"follow\",\n\t\t\t\t});\n\n\t\t\t\tclearTimeout(timeoutId);\n\n\t\t\t\tif (response.ok) {\n\t\t\t\t\tthis.logger.info(`Successfully fetched URL: ${url}`);\n\t\t\t\t\treturn await response.text();\n\t\t\t\t}\n\n\t\t\t\tthis.logger.warn(`Failed to fetch URL (${response.status}): ${url}`);\n\t\t\t} catch (error) {\n\t\t\t\tif ((error as Error).name === \"AbortError\") {\n\t\t\t\t\tthis.logger.warn(`Fetch timed out: ${url}`);\n\t\t\t\t} else {\n\t\t\t\t\tthis.logger.error(`Error fetching URL: ${url} - ${error}`);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n\n\tprivate extractLinks(html: string): string[] {\n\t\tconst { JSDOM } = jsdom;\n\t\tconst dom = new JSDOM(html);\n\t\tconst links = Array.from(dom.window.document.querySelectorAll(\"a\"));\n\t\tconst hrefs = links.map((link) => link.href);\n\t\tconst internalLinks = hrefs.filter((href) => this.isInternalLink(href));\n\t\tthis.logger.debug(\n\t\t\t`Extracted ${internalLinks.length} internal links from HTML content`,\n\t\t);\n\t\treturn internalLinks.map((link) => this.convertRelativeUrlToAbsolute(link));\n\t}\n\n\tpublic async crawl(): Promise<Array<string>> {\n\t\tconst visited = new Set<string>();\n\t\tconst queue = new Set<string>([this.url.href]);\n\t\tconst resultLinks = new Set<string>();\n\n\t\t// Assets to ignore\n\t\tconst assetExtensions = [\n\t\t\t\".css\",\n\t\t\t\".js\",\n\t\t\t\".png\",\n\t\t\t\".jpg\",\n\t\t\t\".jpeg\",\n\t\t\t\".gif\",\n\t\t\t\".svg\",\n\t\t\t\".ico\",\n\t\t\t\".webp\",\n\t\t\t\".mp4\",\n\t\t\t\".mp3\",\n\t\t\t\".wav\",\n\t\t\t\".avi\",\n\t\t\t\".mov\",\n\t\t\t\".webm\",\n\t\t\t\".pdf\",\n\t\t\t\".doc\",\n\t\t\t\".docx\",\n\t\t\t\".xls\",\n\t\t\t\".xlsx\",\n\t\t\t\".ppt\",\n\t\t\t\".pptx\",\n\t\t\t\".zip\",\n\t\t\t\".rar\",\n\t\t\t\".tar\",\n\t\t\t\".gz\",\n\t\t];\n\n\t\tconst fetchAndExtract = async (currentUrl: string) => {\n\t\t\tif (visited.has(currentUrl)) {\n\t\t\t\tthis.logger.debug(`Skipping already visited URL: ${currentUrl}`);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tvisited.add(currentUrl);\n\t\t\tthis.logger.info(`Visiting URL: ${currentUrl}`);\n\n\t\t\tconst html = await this.fetchWithRetries(currentUrl, this.retries);\n\t\t\tif (!html) return;\n\n\t\t\tconst links = this.extractLinks(html);\n\n\t\t\t// Filter out asset links\n\t\t\tfor (const link of links) {\n\t\t\t\tif (assetExtensions.some((ext) => link.endsWith(ext))) {\n\t\t\t\t\tthis.logger.debug(`Ignoring asset link: ${link}`);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tthis.logger.debug(`Found link: ${link}`);\n\t\t\t}\n\n\t\t\tfor (const link of links) {\n\t\t\t\tif (!visited.has(link) && queue.size < this.depth) {\n\t\t\t\t\tqueue.add(link);\n\t\t\t\t\tthis.logger.debug(`Added to queue: ${link}`);\n\t\t\t\t}\n\t\t\t}\n\t\t\tresultLinks.add(currentUrl);\n\t\t};\n\n\t\tconst processBatch = async () => {\n\t\t\tconst batch = Array.from(queue).slice(0, this.concurrency);\n\t\t\tfor (const url of batch) {\n\t\t\t\tqueue.delete(url);\n\t\t\t}\n\t\t\tawait Promise.allSettled(batch.map((url) => fetchAndExtract(url)));\n\t\t};\n\n\t\tthis.logger.info(\n\t\t\t`Starting crawl with depth: ${this.depth}, concurrency: ${this.concurrency}`,\n\t\t);\n\t\twhile (queue.size > 0 && visited.size < this.depth) {\n\t\t\tawait processBatch();\n\t\t}\n\n\t\tthis.logger.info(\n\t\t\t`Crawling completed. Total pages visited: ${resultLinks.size}`,\n\t\t);\n\n\t\treturn Array.from(resultLinks);\n\t}\n}\n", "import winston from \"winston\";\n\nexport const createLogger = (label: string) =>\n\twinston.createLogger({\n\t\tlevels: {\n\t\t\terror: 0,\n\t\t\twarn: 1,\n\t\t\tinfo: 2,\n\t\t\thttp: 3,\n\t\t\tverbose: 4,\n\t\t\tdebug: 5,\n\t\t\tsilly: 6,\n\t\t},\n\t\tformat: winston.format.combine(\n\t\t\twinston.format.label({ label }),\n\t\t\twinston.format.colorize(),\n\t\t\twinston.format.timestamp({\n\t\t\t\tformat: () => {\n\t\t\t\t\treturn new Date().toLocaleString(\"en-US\");\n\t\t\t\t},\n\t\t\t}),\n\t\t\twinston.format.align(),\n\t\t\twinston.format.printf(\n\t\t\t\t(info) =>\n\t\t\t\t\t`\\x1b[34m(${info.label})\\x1b[0m \\x1b[33m${info.timestamp}\\x1b[0m [${info.level}]: ${info.message}`,\n\t\t\t),\n\t\t),\n\t\ttransports: [new winston.transports.Console()],\n\t});\n"],
5
+ "mappings": ";AAAA,OAAO,WAAW;AAClB,OAAO,WAAW;AAClB,OAAO,eAAe;;;ACFtB,OAAO,aAAa;AAEb,IAAM,eAAe,CAAC,UAC5B,QAAQ,aAAa;AAAA,EACpB,QAAQ;AAAA,IACP,OAAO;AAAA,IACP,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,SAAS;AAAA,IACT,OAAO;AAAA,IACP,OAAO;AAAA,EACR;AAAA,EACA,QAAQ,QAAQ,OAAO;AAAA,IACtB,QAAQ,OAAO,MAAM,EAAE,MAAM,CAAC;AAAA,IAC9B,QAAQ,OAAO,SAAS;AAAA,IACxB,QAAQ,OAAO,UAAU;AAAA,MACxB,QAAQ,MAAM;AACb,gBAAO,oBAAI,KAAK,GAAE,eAAe,OAAO;AAAA,MACzC;AAAA,IACD,CAAC;AAAA,IACD,QAAQ,OAAO,MAAM;AAAA,IACrB,QAAQ,OAAO;AAAA,MACd,CAAC,SACA,YAAY,KAAK,KAAK,oBAAoB,KAAK,SAAS,YAAY,KAAK,KAAK,MAAM,KAAK,OAAO;AAAA,IAClG;AAAA,EACD;AAAA,EACA,YAAY,CAAC,IAAI,QAAQ,WAAW,QAAQ,CAAC;AAC9C,CAAC;;;ADhBF,IAAqB,gBAArB,MAAmC;AAAA,EAC1B,SAAiC;AAAA,IACxC,cAAc,IAAI,UAAU,EAAE,SAAS;AAAA,EACxC;AAAA,EACQ;AAAA,EACA,SAAS,aAAa,eAAe;AAAA,EAErC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAER,YAAY,KAAa,UAAgC,CAAC,GAAG;AAC5D,UAAM;AAAA,MACL,QAAQ;AAAA,MACR,cAAc;AAAA,MACd,UAAU;AAAA,MACV,UAAU;AAAA,IACX,IAAI;AACJ,SAAK,QAAQ;AACb,SAAK,cAAc;AACnB,SAAK,UAAU;AACf,SAAK,UAAU;AAEf,QAAI;AACH,WAAK,MAAM,IAAI,IAAI,GAAG;AACtB,WAAK,OAAO;AAAA,QACX,yBAAyB,GAAG,iBAAiB,KAAK,OAAO,YAAY,CAAC;AAAA,MACvE;AAAA,IACD,SAAS,OAAO;AACf,UAAI,iBAAiB,WAAW;AAC/B,aAAK,OAAO,MAAM,aAAa;AAC/B,cAAM,IAAI,MAAM,aAAa;AAAA,MAC9B;AACA,WAAK,OAAO,MAAM,oCAAoC,KAAK,EAAE;AAC7D,YAAM;AAAA,IACP;AAAA,EACD;AAAA,EAEQ,gBAAgB,QAAwB;AAC/C,WAAO,OAAO,WAAW,MAAM,IAAI,OAAO,MAAM,CAAC,IAAI;AAAA,EACtD;AAAA,EAEQ,6BAA6B,KAAqB;AACzD,WAAO,IAAI,IAAI,KAAK,KAAK,IAAI,SAAS,CAAC,EAAE,SAAS;AAAA,EACnD;AAAA,EAEQ,eAAe,KAAsB;AAC5C,QAAI;AACH,YAAM,YAAY,IAAI,IAAI,KAAK,KAAK,IAAI,IAAI;AAC5C,UAAI,CAAC,CAAC,SAAS,QAAQ,EAAE,SAAS,UAAU,QAAQ,GAAG;AACtD,eAAO;AAAA,MACR;AACA,YAAM,aAAa,KAAK,gBAAgB,KAAK,IAAI,QAAQ;AACzD,YAAM,eAAe,KAAK,gBAAgB,UAAU,QAAQ;AAC5D,aAAO,iBAAiB;AAAA,IACzB,SAAS,OAAO;AACf,WAAK,OAAO,KAAK,sBAAsB,GAAG,MAAM,KAAK,EAAE;AACvD,aAAO;AAAA,IACR;AAAA,EACD;AAAA,EAEA,MAAc,iBACb,KACA,SACyB;AACzB,aAAS,UAAU,GAAG,WAAW,SAAS,WAAW;AACpD,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,YAAY,WAAW,MAAM,WAAW,MAAM,GAAG,KAAK,OAAO;AAEnE,UAAI;AACH,aAAK,OAAO,MAAM,yBAAyB,OAAO,MAAM,GAAG,EAAE;AAC7D,cAAM,kBAAkB,IAAI,UAAU,EAAE,SAAS;AACjD,aAAK,OAAO,KAAK,2BAA2B,eAAe,EAAE;AAC7D,aAAK,OAAO,YAAY,IAAI;AAC5B,cAAM,WAAW,MAAM,MAAM,KAAK;AAAA,UACjC,SAAS,KAAK;AAAA,UACd,QAAQ,WAAW;AAAA,UACnB,UAAU;AAAA,QACX,CAAC;AAED,qBAAa,SAAS;AAEtB,YAAI,SAAS,IAAI;AAChB,eAAK,OAAO,KAAK,6BAA6B,GAAG,EAAE;AACnD,iBAAO,MAAM,SAAS,KAAK;AAAA,QAC5B;AAEA,aAAK,OAAO,KAAK,wBAAwB,SAAS,MAAM,MAAM,GAAG,EAAE;AAAA,MACpE,SAAS,OAAO;AACf,YAAK,MAAgB,SAAS,cAAc;AAC3C,eAAK,OAAO,KAAK,oBAAoB,GAAG,EAAE;AAAA,QAC3C,OAAO;AACN,eAAK,OAAO,MAAM,uBAAuB,GAAG,MAAM,KAAK,EAAE;AAAA,QAC1D;AAAA,MACD;AAAA,IACD;AACA,WAAO;AAAA,EACR;AAAA,EAEQ,aAAa,MAAwB;AAC5C,UAAM,EAAE,MAAM,IAAI;AAClB,UAAM,MAAM,IAAI,MAAM,IAAI;AAC1B,UAAM,QAAQ,MAAM,KAAK,IAAI,OAAO,SAAS,iBAAiB,GAAG,CAAC;AAClE,UAAM,QAAQ,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI;AAC3C,UAAM,gBAAgB,MAAM,OAAO,CAAC,SAAS,KAAK,eAAe,IAAI,CAAC;AACtE,SAAK,OAAO;AAAA,MACX,aAAa,cAAc,MAAM;AAAA,IAClC;AACA,WAAO,cAAc,IAAI,CAAC,SAAS,KAAK,6BAA6B,IAAI,CAAC;AAAA,EAC3E;AAAA,EAEA,MAAa,QAAgC;AAC5C,UAAM,UAAU,oBAAI,IAAY;AAChC,UAAM,QAAQ,oBAAI,IAAY,CAAC,KAAK,IAAI,IAAI,CAAC;AAC7C,UAAM,cAAc,oBAAI,IAAY;AAGpC,UAAM,kBAAkB;AAAA,MACvB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAEA,UAAM,kBAAkB,OAAO,eAAuB;AACrD,UAAI,QAAQ,IAAI,UAAU,GAAG;AAC5B,aAAK,OAAO,MAAM,iCAAiC,UAAU,EAAE;AAC/D;AAAA,MACD;AACA,cAAQ,IAAI,UAAU;AACtB,WAAK,OAAO,KAAK,iBAAiB,UAAU,EAAE;AAE9C,YAAM,OAAO,MAAM,KAAK,iBAAiB,YAAY,KAAK,OAAO;AACjE,UAAI,CAAC,KAAM;AAEX,YAAM,QAAQ,KAAK,aAAa,IAAI;AAGpC,iBAAW,QAAQ,OAAO;AACzB,YAAI,gBAAgB,KAAK,CAAC,QAAQ,KAAK,SAAS,GAAG,CAAC,GAAG;AACtD,eAAK,OAAO,MAAM,wBAAwB,IAAI,EAAE;AAChD;AAAA,QACD;AACA,aAAK,OAAO,MAAM,eAAe,IAAI,EAAE;AAAA,MACxC;AAEA,iBAAW,QAAQ,OAAO;AACzB,YAAI,CAAC,QAAQ,IAAI,IAAI,KAAK,MAAM,OAAO,KAAK,OAAO;AAClD,gBAAM,IAAI,IAAI;AACd,eAAK,OAAO,MAAM,mBAAmB,IAAI,EAAE;AAAA,QAC5C;AAAA,MACD;AACA,kBAAY,IAAI,UAAU;AAAA,IAC3B;AAEA,UAAM,eAAe,YAAY;AAChC,YAAM,QAAQ,MAAM,KAAK,KAAK,EAAE,MAAM,GAAG,KAAK,WAAW;AACzD,iBAAW,OAAO,OAAO;AACxB,cAAM,OAAO,GAAG;AAAA,MACjB;AACA,YAAM,QAAQ,WAAW,MAAM,IAAI,CAAC,QAAQ,gBAAgB,GAAG,CAAC,CAAC;AAAA,IAClE;AAEA,SAAK,OAAO;AAAA,MACX,8BAA8B,KAAK,KAAK,kBAAkB,KAAK,WAAW;AAAA,IAC3E;AACA,WAAO,MAAM,OAAO,KAAK,QAAQ,OAAO,KAAK,OAAO;AACnD,YAAM,aAAa;AAAA,IACpB;AAEA,SAAK,OAAO;AAAA,MACX,4CAA4C,YAAY,IAAI;AAAA,IAC7D;AAEA,WAAO,MAAM,KAAK,WAAW;AAAA,EAC9B;AACD;",
6
6
  "names": []
7
7
  }
package/package.json CHANGED
@@ -1,10 +1,10 @@
1
1
  {
2
2
  "name": "sentinel-scanner",
3
3
  "description": "[WIP] An open-source web app vulnerability scanner developed by Rebackk.",
4
- "version": "1.1.0-alpha.1",
4
+ "version": "1.1.0",
5
5
  "exports": "./build/index.js",
6
6
  "types": "./build/index.d.ts",
7
- "bin": "./build/index.js",
7
+ "bin": "./build/bin.js",
8
8
  "type": "module",
9
9
  "main": "./build/index.js",
10
10
  "license": "Apache-2.0",
@@ -28,10 +28,9 @@
28
28
  "test": "node --disable-warning=ExperimentalWarning --experimental-strip-types ./scripts/test.ts test",
29
29
  "test:watch": "node --disable-warning=ExperimentalWarning --experimental-strip-types ./scripts/test.ts test:watch",
30
30
  "test:coverage": "node --disable-warning=ExperimentalWarning --experimental-strip-types ./scripts/test.ts test:coverage",
31
- "spell:check": "cspell \"{README.md,CODE_OF_CONDUCT.md,CONTRIBUTING.md,.github/*.md,src/**/*.ts}\"",
31
+ "spell:check": "cspell \"{DISCLAIMER.md,README.md,CODE_OF_CONDUCT.md,CONTRIBUTING.md,.github/*.md,src/**/*.ts}\"",
32
32
  "cz": "cz",
33
33
  "semantic-release": "semantic-release",
34
- "postinstall": "prisma generate",
35
34
  "prepare": "husky"
36
35
  },
37
36
  "devDependencies": {
@@ -55,10 +54,10 @@
55
54
  "cz-conventional-changelog": "^3.3.0",
56
55
  "esbuild": "^0.23.1",
57
56
  "husky": "^9.1.6",
58
- "prisma": "^5.22.0",
59
57
  "rimraf": "^6.0.1",
60
58
  "semantic-release": "^24.2.0",
61
- "typescript": "^5.6.3"
59
+ "typescript": "^5.4.2",
60
+ "winston": "^3.17.0"
62
61
  },
63
62
  "config": {
64
63
  "commitizen": {
@@ -66,10 +65,9 @@
66
65
  }
67
66
  },
68
67
  "dependencies": {
69
- "@prisma/client": "^5.22.0",
70
68
  "isomorphic-fetch": "^3.0.0",
71
69
  "jsdom": "^25.0.1",
72
- "user-agents": "^1.1.353",
70
+ "user-agents": "^1.1.359",
73
71
  "yargs": "^17.7.2"
74
72
  }
75
73
  }
package/scripts/build.ts CHANGED
@@ -62,7 +62,10 @@ async function build(): Promise<void> {
62
62
  sourcemap: true,
63
63
  external: [],
64
64
  bundle: true,
65
- entryPoints: [path.join(srcPath, "index.ts")],
65
+ entryPoints: [
66
+ path.join(srcPath, "index.ts"),
67
+ path.join(srcPath, "bin.ts"),
68
+ ],
66
69
  outdir: buildPath,
67
70
  packages: "external",
68
71
  });
package/src/bin.ts ADDED
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env node --no-warnings
2
+
3
+ import yargs from "yargs";
4
+ import { hideBin } from "yargs/helpers";
5
+ import { spiderCommand } from "./commands/spider";
6
+
7
+ const commandHandler = yargs(hideBin(process.argv));
8
+
9
+ commandHandler.demandCommand();
10
+ commandHandler.scriptName("sentinel-scanner");
11
+ commandHandler.usage("Usage: $0 <command> [options]");
12
+ commandHandler.help().alias("help", "h");
13
+ commandHandler.version().alias("version", "v");
14
+ commandHandler.strict();
15
+ commandHandler.showHelpOnFail(true);
16
+
17
+ // Handle Commands
18
+ commandHandler.command(spiderCommand);
19
+
20
+ commandHandler.parse();