coomer-downloader 3.4.2 → 3.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +146 -142
- package/package.json +1 -1
- package/src/api/providers/reddit.ts +48 -42
- package/src/cli/{args-handler.ts → parse-args.ts} +1 -1
- package/src/core/downloader.ts +6 -1
- package/src/index.ts +6 -50
- package/src/main.ts +42 -0
- package/src/utils/error.ts +30 -0
package/dist/index.js
CHANGED
|
@@ -421,142 +421,37 @@ var PlainFileAPI = class {
|
|
|
421
421
|
};
|
|
422
422
|
|
|
423
423
|
// src/api/providers/reddit.ts
|
|
424
|
-
import * as cheerio2 from "cheerio";
|
|
425
424
|
import { fetch as fetch4 } from "undici";
|
|
426
425
|
|
|
427
|
-
// src/utils/logger.ts
|
|
428
|
-
import pino from "pino";
|
|
429
|
-
var logger = pino(
|
|
430
|
-
{
|
|
431
|
-
level: "debug"
|
|
432
|
-
},
|
|
433
|
-
pino.destination({
|
|
434
|
-
dest: "./debug.log",
|
|
435
|
-
append: false,
|
|
436
|
-
sync: true
|
|
437
|
-
})
|
|
438
|
-
);
|
|
439
|
-
var logger_default = logger;
|
|
440
|
-
|
|
441
|
-
// src/api/providers/reddit.ts
|
|
442
|
-
async function getUserPage(user, offset) {
|
|
443
|
-
const url = `https://nsfw.xxx/page/${offset}?nsfw[]=0&types[]=image&types[]=video&types[]=gallery&slider=1&jsload=1&user=${user}&_=${Date.now()}`;
|
|
444
|
-
return fetch4(url).then((r) => r.text());
|
|
445
|
-
}
|
|
446
|
-
async function getUserPosts(user) {
|
|
447
|
-
const posts = [];
|
|
448
|
-
for (let i = 1; i < 1e5; i++) {
|
|
449
|
-
const page = await getUserPage(user, i);
|
|
450
|
-
if (page.length < 1) break;
|
|
451
|
-
const $ = cheerio2.load(page);
|
|
452
|
-
const newPosts = $("a").map((_, a) => $(a).attr("href")).get().filter((href) => href?.startsWith("https://nsfw.xxx/post"));
|
|
453
|
-
logger_default.debug({ count: posts.length });
|
|
454
|
-
posts.push(...newPosts);
|
|
455
|
-
}
|
|
456
|
-
return posts;
|
|
457
|
-
}
|
|
458
|
-
async function getPostsData(posts) {
|
|
459
|
-
const filelist = new CoomerFileList();
|
|
460
|
-
for (const post of posts) {
|
|
461
|
-
const page = await fetch4(post).then((r) => r.text());
|
|
462
|
-
const $ = cheerio2.load(page);
|
|
463
|
-
const src = $(".sh-section .sh-section__image img").attr("src") || $(".sh-section .sh-section__image video source").attr("src") || null;
|
|
464
|
-
if (!src) continue;
|
|
465
|
-
const slug = post.split("post/")[1].split("?")[0];
|
|
466
|
-
const date = $(".sh-section .sh-section__passed").first().text().replace(/ /g, "-") || "";
|
|
467
|
-
const ext = src.split(".").pop();
|
|
468
|
-
const name = `${slug}-${date}.${ext}`;
|
|
469
|
-
logger_default.debug({ hehe: filelist.files.length, src });
|
|
470
|
-
filelist.files.push(CoomerFile.from({ name, url: src }));
|
|
471
|
-
}
|
|
472
|
-
return filelist;
|
|
473
|
-
}
|
|
474
|
-
var RedditAPI = class {
|
|
475
|
-
testURL(url) {
|
|
476
|
-
return /^\/user\/[\w-]+$/.test(url.pathname);
|
|
477
|
-
}
|
|
478
|
-
async getData(url) {
|
|
479
|
-
const user = url.match(/^\/user\/([\w-]+)/)?.[1];
|
|
480
|
-
const posts = await getUserPosts(user);
|
|
481
|
-
const filelist = await getPostsData(posts);
|
|
482
|
-
filelist.dirName = `${user}-reddit`;
|
|
483
|
-
return filelist;
|
|
484
|
-
}
|
|
485
|
-
};
|
|
486
|
-
|
|
487
|
-
// src/api/resolver.ts
|
|
488
|
-
var providers = [RedditAPI, CoomerAPI, BunkrAPI, GofileAPI, PlainFileAPI];
|
|
489
|
-
async function resolveAPI(url_) {
|
|
490
|
-
const url = new URL(url_);
|
|
491
|
-
for (const p of providers) {
|
|
492
|
-
const provider = new p();
|
|
493
|
-
if (provider.testURL(url)) {
|
|
494
|
-
const filelist = await provider.getData(url.toString());
|
|
495
|
-
filelist.provider = provider;
|
|
496
|
-
return filelist;
|
|
497
|
-
}
|
|
498
|
-
}
|
|
499
|
-
throw Error("Invalid URL");
|
|
500
|
-
}
|
|
501
|
-
|
|
502
|
-
// src/cli/args-handler.ts
|
|
503
|
-
import yargs from "yargs";
|
|
504
|
-
import { hideBin } from "yargs/helpers";
|
|
505
|
-
function argumentHander() {
|
|
506
|
-
return yargs(hideBin(process.argv)).option("url", {
|
|
507
|
-
alias: "u",
|
|
508
|
-
type: "string",
|
|
509
|
-
description: "A URL from Coomer/Kemono/Bunkr/GoFile, a Reddit user (u/<username>), or a direct file link",
|
|
510
|
-
demandOption: true
|
|
511
|
-
}).option("dir", {
|
|
512
|
-
type: "string",
|
|
513
|
-
description: "The directory where files will be downloaded",
|
|
514
|
-
default: "./"
|
|
515
|
-
}).option("media", {
|
|
516
|
-
type: "string",
|
|
517
|
-
choices: ["video", "image"],
|
|
518
|
-
description: "The type of media to download: 'video', 'image', or 'all'. 'all' is the default."
|
|
519
|
-
}).option("include", {
|
|
520
|
-
type: "string",
|
|
521
|
-
default: "",
|
|
522
|
-
description: "Filter file names by a comma-separated list of keywords to include"
|
|
523
|
-
}).option("exclude", {
|
|
524
|
-
type: "string",
|
|
525
|
-
default: "",
|
|
526
|
-
description: "Filter file names by a comma-separated list of keywords to exclude"
|
|
527
|
-
}).option("min-size", {
|
|
528
|
-
type: "string",
|
|
529
|
-
default: "",
|
|
530
|
-
description: 'Minimum file size to download. Example: "1mb" or "500kb"'
|
|
531
|
-
}).option("max-size", {
|
|
532
|
-
type: "string",
|
|
533
|
-
default: "",
|
|
534
|
-
description: 'Maximum file size to download. Example: "1mb" or "500kb"'
|
|
535
|
-
}).option("skip", {
|
|
536
|
-
type: "number",
|
|
537
|
-
default: 0,
|
|
538
|
-
description: "Skips the first N files in the download queue"
|
|
539
|
-
}).option("remove-dupilicates", {
|
|
540
|
-
type: "boolean",
|
|
541
|
-
default: true,
|
|
542
|
-
description: "removes duplicates by url and file hash"
|
|
543
|
-
}).help().alias("help", "h").parseSync();
|
|
544
|
-
}
|
|
545
|
-
|
|
546
|
-
// src/cli/ui/index.tsx
|
|
547
|
-
import { render } from "ink";
|
|
548
|
-
import React10 from "react";
|
|
549
|
-
|
|
550
|
-
// src/cli/ui/app.tsx
|
|
551
|
-
import { Box as Box7 } from "ink";
|
|
552
|
-
import React9 from "react";
|
|
553
|
-
|
|
554
426
|
// src/core/downloader.ts
|
|
555
427
|
import fs2 from "node:fs";
|
|
556
428
|
import { Readable, Transform } from "node:stream";
|
|
557
429
|
import { pipeline as pipeline2 } from "node:stream/promises";
|
|
558
430
|
import { Subject } from "rxjs";
|
|
559
431
|
|
|
432
|
+
// src/utils/error.ts
|
|
433
|
+
function printError(err, options = {}) {
|
|
434
|
+
const e = err;
|
|
435
|
+
const status = Number(
|
|
436
|
+
e?.response?.status || e?.status || e?.message?.match(/\d{3}/)?.[0] || 500
|
|
437
|
+
);
|
|
438
|
+
const type = e?.code || e?.name || "Error";
|
|
439
|
+
const message = e?.message || "No details";
|
|
440
|
+
const quietList = options.quiet ?? [403, 404];
|
|
441
|
+
const isQuiet = quietList.includes(status);
|
|
442
|
+
console.error(
|
|
443
|
+
`\x1B[31m[ERROR]\x1B[0m \x1B[33m${status}\x1B[0m | \x1B[36m${type}\x1B[0m: ${message}`
|
|
444
|
+
);
|
|
445
|
+
if (options.context) {
|
|
446
|
+
console.error("\x1B[90mContext:\x1B[0m", options.context);
|
|
447
|
+
}
|
|
448
|
+
if (!isQuiet && e?.stack) {
|
|
449
|
+
console.error(`
|
|
450
|
+
\x1B[90mStack Trace:
|
|
451
|
+
${e.stack}\x1B[0m`);
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
560
455
|
// src/utils/promise.ts
|
|
561
456
|
async function sleep(time) {
|
|
562
457
|
return new Promise((resolve) => setTimeout(resolve, time));
|
|
@@ -708,7 +603,11 @@ var Downloader = class {
|
|
|
708
603
|
for (const file of this.filelist.files) {
|
|
709
604
|
file.active = true;
|
|
710
605
|
this.subject.next({ type: "FILE_DOWNLOADING_START" });
|
|
711
|
-
|
|
606
|
+
try {
|
|
607
|
+
await this.downloadFile(file);
|
|
608
|
+
} catch (e) {
|
|
609
|
+
printError(e, { quiet: [403], context: file.url });
|
|
610
|
+
}
|
|
712
611
|
file.active = false;
|
|
713
612
|
this.subject.next({ type: "FILE_DOWNLOADING_END" });
|
|
714
613
|
}
|
|
@@ -716,6 +615,114 @@ var Downloader = class {
|
|
|
716
615
|
}
|
|
717
616
|
};
|
|
718
617
|
|
|
618
|
+
// src/api/providers/reddit.ts
|
|
619
|
+
async function getUserPage(user, offset) {
|
|
620
|
+
const url = `https://nsfw.xxx/api/v1/user/${user}/newest?page=${offset}&types[]=image&types[]=video&types[]=gallery&nsfw[]=0&nsfw[]=1&nsfw[]=2&nsfw[]=3&nsfw[]=4`;
|
|
621
|
+
const res = await fetch4(url).then((r) => r.json());
|
|
622
|
+
return res;
|
|
623
|
+
}
|
|
624
|
+
async function getUserPostsData(user) {
|
|
625
|
+
const filelist = new CoomerFileList();
|
|
626
|
+
for (let i = 1; i < 1e4; i++) {
|
|
627
|
+
const { data } = await getUserPage(user, i);
|
|
628
|
+
if (data.posts.length < 1) break;
|
|
629
|
+
data.posts.forEach((post) => {
|
|
630
|
+
const date = post.publishedAt;
|
|
631
|
+
const title = post.content.title;
|
|
632
|
+
const name = `${date} ${title}`;
|
|
633
|
+
const preview = post.data.url;
|
|
634
|
+
const files = (post.data.videos_v2 || []).filter((f) => !f.url.includes("imgur"));
|
|
635
|
+
if (files?.length === 0 && preview) {
|
|
636
|
+
files.push({ format: "jpg", url: preview });
|
|
637
|
+
}
|
|
638
|
+
files.forEach(({ format, url }, i2) => {
|
|
639
|
+
const index = i2 > 0 ? ` ${i2}` : "";
|
|
640
|
+
const _name = `${name}${index}.${format}`;
|
|
641
|
+
filelist.files.push(CoomerFile.from({ name: _name, url }));
|
|
642
|
+
});
|
|
643
|
+
});
|
|
644
|
+
}
|
|
645
|
+
return filelist;
|
|
646
|
+
}
|
|
647
|
+
var RedditAPI = class {
|
|
648
|
+
testURL(url) {
|
|
649
|
+
return /^\/user\/[\w-]+$/.test(url.pathname);
|
|
650
|
+
}
|
|
651
|
+
async getData(url) {
|
|
652
|
+
const user = url.match(/\/user\/([\w-]+)/)?.[1];
|
|
653
|
+
const filelist = await getUserPostsData(user);
|
|
654
|
+
filelist.dirName = `${user}-reddit`;
|
|
655
|
+
return filelist;
|
|
656
|
+
}
|
|
657
|
+
};
|
|
658
|
+
|
|
659
|
+
// src/api/resolver.ts
|
|
660
|
+
var providers = [RedditAPI, CoomerAPI, BunkrAPI, GofileAPI, PlainFileAPI];
|
|
661
|
+
async function resolveAPI(url_) {
|
|
662
|
+
const url = new URL(url_);
|
|
663
|
+
for (const p of providers) {
|
|
664
|
+
const provider = new p();
|
|
665
|
+
if (provider.testURL(url)) {
|
|
666
|
+
const filelist = await provider.getData(url.toString());
|
|
667
|
+
filelist.provider = provider;
|
|
668
|
+
return filelist;
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
throw Error("Invalid URL");
|
|
672
|
+
}
|
|
673
|
+
|
|
674
|
+
// src/cli/parse-args.ts
|
|
675
|
+
import yargs from "yargs";
|
|
676
|
+
import { hideBin } from "yargs/helpers";
|
|
677
|
+
function parseArgs() {
|
|
678
|
+
return yargs(hideBin(process.argv)).option("url", {
|
|
679
|
+
alias: "u",
|
|
680
|
+
type: "string",
|
|
681
|
+
description: "A URL from Coomer/Kemono/Bunkr/GoFile, a Reddit user (u/<username>), or a direct file link",
|
|
682
|
+
demandOption: true
|
|
683
|
+
}).option("dir", {
|
|
684
|
+
type: "string",
|
|
685
|
+
description: "The directory where files will be downloaded",
|
|
686
|
+
default: "./"
|
|
687
|
+
}).option("media", {
|
|
688
|
+
type: "string",
|
|
689
|
+
choices: ["video", "image"],
|
|
690
|
+
description: "The type of media to download: 'video', 'image', or 'all'. 'all' is the default."
|
|
691
|
+
}).option("include", {
|
|
692
|
+
type: "string",
|
|
693
|
+
default: "",
|
|
694
|
+
description: "Filter file names by a comma-separated list of keywords to include"
|
|
695
|
+
}).option("exclude", {
|
|
696
|
+
type: "string",
|
|
697
|
+
default: "",
|
|
698
|
+
description: "Filter file names by a comma-separated list of keywords to exclude"
|
|
699
|
+
}).option("min-size", {
|
|
700
|
+
type: "string",
|
|
701
|
+
default: "",
|
|
702
|
+
description: 'Minimum file size to download. Example: "1mb" or "500kb"'
|
|
703
|
+
}).option("max-size", {
|
|
704
|
+
type: "string",
|
|
705
|
+
default: "",
|
|
706
|
+
description: 'Maximum file size to download. Example: "1mb" or "500kb"'
|
|
707
|
+
}).option("skip", {
|
|
708
|
+
type: "number",
|
|
709
|
+
default: 0,
|
|
710
|
+
description: "Skips the first N files in the download queue"
|
|
711
|
+
}).option("remove-dupilicates", {
|
|
712
|
+
type: "boolean",
|
|
713
|
+
default: true,
|
|
714
|
+
description: "removes duplicates by url and file hash"
|
|
715
|
+
}).help().alias("help", "h").parseSync();
|
|
716
|
+
}
|
|
717
|
+
|
|
718
|
+
// src/cli/ui/index.tsx
|
|
719
|
+
import { render } from "ink";
|
|
720
|
+
import React10 from "react";
|
|
721
|
+
|
|
722
|
+
// src/cli/ui/app.tsx
|
|
723
|
+
import { Box as Box7 } from "ink";
|
|
724
|
+
import React9 from "react";
|
|
725
|
+
|
|
719
726
|
// src/cli/ui/components/file.tsx
|
|
720
727
|
import { Box as Box2, Spacer, Text as Text2 } from "ink";
|
|
721
728
|
import React3 from "react";
|
|
@@ -881,7 +888,7 @@ import { Box as Box6, Spacer as Spacer2, Text as Text6 } from "ink";
|
|
|
881
888
|
import React8 from "react";
|
|
882
889
|
|
|
883
890
|
// package.json
|
|
884
|
-
var version = "3.4.
|
|
891
|
+
var version = "3.4.3";
|
|
885
892
|
|
|
886
893
|
// src/cli/ui/components/titlebar.tsx
|
|
887
894
|
function TitleBar() {
|
|
@@ -915,10 +922,10 @@ function createReactInk() {
|
|
|
915
922
|
return render(/* @__PURE__ */ React10.createElement(App, null));
|
|
916
923
|
}
|
|
917
924
|
|
|
918
|
-
// src/
|
|
919
|
-
async function
|
|
925
|
+
// src/main.ts
|
|
926
|
+
async function main() {
|
|
920
927
|
createReactInk();
|
|
921
|
-
const { url, dir, media, include, exclude, minSize, maxSize, skip, removeDupilicates } =
|
|
928
|
+
const { url, dir, media, include, exclude, minSize, maxSize, skip, removeDupilicates } = parseArgs();
|
|
922
929
|
const filelist = await resolveAPI(url);
|
|
923
930
|
filelist.setDirPath(dir).skip(skip).filterByText(include, exclude).filterByMediaType(media);
|
|
924
931
|
if (removeDupilicates) {
|
|
@@ -935,12 +942,9 @@ async function run() {
|
|
|
935
942
|
await filelist.removeDuplicatesByHash();
|
|
936
943
|
}
|
|
937
944
|
}
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
process2.exit(1);
|
|
945
|
-
}
|
|
946
|
-
})();
|
|
945
|
+
|
|
946
|
+
// src/index.ts
|
|
947
|
+
main().then(() => process2.exit(0)).catch((err) => {
|
|
948
|
+
console.error(err);
|
|
949
|
+
process2.exit(1);
|
|
950
|
+
});
|
package/package.json
CHANGED
|
@@ -1,55 +1,62 @@
|
|
|
1
|
-
import * as cheerio from 'cheerio';
|
|
2
1
|
import { fetch } from 'undici';
|
|
3
|
-
import { CoomerFile } from '../../core
|
|
2
|
+
import { CoomerFile } from '../../core';
|
|
4
3
|
import { CoomerFileList } from '../../core/filelist';
|
|
5
|
-
import logger from '../../utils/logger';
|
|
6
4
|
import type { ProviderAPI } from '../provider';
|
|
7
5
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
6
|
+
type RedditAPIPosts = {
|
|
7
|
+
data: {
|
|
8
|
+
posts: Array<{
|
|
9
|
+
id: number;
|
|
10
|
+
content: {
|
|
11
|
+
title: string;
|
|
12
|
+
description: string;
|
|
13
|
+
};
|
|
14
|
+
data: {
|
|
15
|
+
url: string;
|
|
16
|
+
videos: {
|
|
17
|
+
mp4: string;
|
|
18
|
+
};
|
|
19
|
+
videos_v2: Array<{
|
|
20
|
+
format: string;
|
|
21
|
+
url: string;
|
|
22
|
+
}>;
|
|
23
|
+
};
|
|
24
|
+
publishedAt: string;
|
|
25
|
+
}>;
|
|
26
|
+
};
|
|
27
|
+
};
|
|
18
28
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
.filter((href) => href?.startsWith('https://nsfw.xxx/post'));
|
|
24
|
-
|
|
25
|
-
logger.debug({ count: posts.length });
|
|
26
|
-
posts.push(...newPosts);
|
|
27
|
-
}
|
|
28
|
-
return posts;
|
|
29
|
+
async function getUserPage(user: string, offset: number): Promise<RedditAPIPosts> {
|
|
30
|
+
const url = `https://nsfw.xxx/api/v1/user/${user}/newest?page=${offset}&types[]=image&types[]=video&types[]=gallery&nsfw[]=0&nsfw[]=1&nsfw[]=2&nsfw[]=3&nsfw[]=4`;
|
|
31
|
+
const res = await fetch(url).then((r) => r.json());
|
|
32
|
+
return res as RedditAPIPosts;
|
|
29
33
|
}
|
|
30
34
|
|
|
31
|
-
async function
|
|
35
|
+
async function getUserPostsData(user: string): Promise<CoomerFileList> {
|
|
32
36
|
const filelist = new CoomerFileList();
|
|
33
|
-
for (const post of posts) {
|
|
34
|
-
const page = await fetch(post).then((r) => r.text());
|
|
35
|
-
const $ = cheerio.load(page);
|
|
36
37
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
null;
|
|
38
|
+
for (let i = 1; i < 10_000; i++) {
|
|
39
|
+
const { data } = await getUserPage(user, i);
|
|
40
|
+
if (data.posts.length < 1) break;
|
|
41
41
|
|
|
42
|
-
|
|
42
|
+
data.posts.forEach((post) => {
|
|
43
|
+
const date = post.publishedAt;
|
|
44
|
+
const title = post.content.title;
|
|
45
|
+
const name = `${date} ${title}`;
|
|
43
46
|
|
|
44
|
-
|
|
45
|
-
const date =
|
|
46
|
-
$('.sh-section .sh-section__passed').first().text().replace(/ /g, '-') || '';
|
|
47
|
+
const preview = post.data.url;
|
|
47
48
|
|
|
48
|
-
|
|
49
|
-
|
|
49
|
+
const files = (post.data.videos_v2 || []).filter((f) => !f.url.includes('imgur'));
|
|
50
|
+
if (files?.length === 0 && preview) {
|
|
51
|
+
files.push({ format: 'jpg', url: preview });
|
|
52
|
+
}
|
|
50
53
|
|
|
51
|
-
|
|
52
|
-
|
|
54
|
+
files.forEach(({ format, url }, i) => {
|
|
55
|
+
const index = i > 0 ? ` ${i}` : '';
|
|
56
|
+
const _name = `${name}${index}.${format}`;
|
|
57
|
+
filelist.files.push(CoomerFile.from({ name: _name, url }));
|
|
58
|
+
});
|
|
59
|
+
});
|
|
53
60
|
}
|
|
54
61
|
|
|
55
62
|
return filelist;
|
|
@@ -61,9 +68,8 @@ export class RedditAPI implements ProviderAPI {
|
|
|
61
68
|
}
|
|
62
69
|
|
|
63
70
|
public async getData(url: string): Promise<CoomerFileList> {
|
|
64
|
-
const user = url.match(
|
|
65
|
-
const
|
|
66
|
-
const filelist = await getPostsData(posts);
|
|
71
|
+
const user = url.match(/\/user\/([\w-]+)/)?.[1] as string;
|
|
72
|
+
const filelist = await getUserPostsData(user);
|
|
67
73
|
filelist.dirName = `${user}-reddit`;
|
|
68
74
|
return filelist;
|
|
69
75
|
}
|
package/src/core/downloader.ts
CHANGED
|
@@ -3,6 +3,7 @@ import { Readable, Transform } from 'node:stream';
|
|
|
3
3
|
import { pipeline } from 'node:stream/promises';
|
|
4
4
|
import { Subject } from 'rxjs';
|
|
5
5
|
import type { AbortControllerSubject, DownloaderSubject } from '../types';
|
|
6
|
+
import { printError } from '../utils/error';
|
|
6
7
|
import { deleteFile, getFileSize, mkdir } from '../utils/io';
|
|
7
8
|
import { sleep } from '../utils/promise';
|
|
8
9
|
import { fetchByteRange } from '../utils/requests';
|
|
@@ -152,7 +153,11 @@ export class Downloader {
|
|
|
152
153
|
|
|
153
154
|
this.subject.next({ type: 'FILE_DOWNLOADING_START' });
|
|
154
155
|
|
|
155
|
-
|
|
156
|
+
try {
|
|
157
|
+
await this.downloadFile(file);
|
|
158
|
+
} catch (e) {
|
|
159
|
+
printError(e, { quiet: [403], context: file.url });
|
|
160
|
+
}
|
|
156
161
|
|
|
157
162
|
file.active = false;
|
|
158
163
|
|
package/src/index.ts
CHANGED
|
@@ -1,55 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/env -S node --no-warnings=ExperimentalWarning
|
|
2
2
|
|
|
3
3
|
import process from 'node:process';
|
|
4
|
-
import {
|
|
5
|
-
import { argumentHander } from './cli/args-handler';
|
|
6
|
-
import { createReactInk } from './cli/ui';
|
|
7
|
-
import { useInkStore } from './cli/ui/store';
|
|
8
|
-
import { Downloader } from './core';
|
|
9
|
-
import { parseSizeValue } from './utils/filters';
|
|
10
|
-
import { setGlobalHeaders } from './utils/requests';
|
|
4
|
+
import { main } from './main';
|
|
11
5
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
argumentHander();
|
|
17
|
-
|
|
18
|
-
const filelist = await resolveAPI(url);
|
|
19
|
-
|
|
20
|
-
filelist
|
|
21
|
-
.setDirPath(dir)
|
|
22
|
-
.skip(skip)
|
|
23
|
-
.filterByText(include, exclude)
|
|
24
|
-
.filterByMediaType(media);
|
|
25
|
-
|
|
26
|
-
if (removeDupilicates) {
|
|
27
|
-
filelist.removeURLDuplicates();
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
const minSizeBytes = minSize ? parseSizeValue(minSize) : undefined;
|
|
31
|
-
const maxSizeBytes = maxSize ? parseSizeValue(maxSize) : undefined;
|
|
32
|
-
|
|
33
|
-
await filelist.calculateFileSizes();
|
|
34
|
-
|
|
35
|
-
setGlobalHeaders({ Referer: url });
|
|
36
|
-
|
|
37
|
-
const downloader = new Downloader(filelist, minSizeBytes, maxSizeBytes);
|
|
38
|
-
useInkStore.getState().setDownloader(downloader);
|
|
39
|
-
|
|
40
|
-
await downloader.downloadFiles();
|
|
41
|
-
|
|
42
|
-
if (removeDupilicates) {
|
|
43
|
-
await filelist.removeDuplicatesByHash();
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
(async () => {
|
|
48
|
-
try {
|
|
49
|
-
await run();
|
|
50
|
-
process.exit(0);
|
|
51
|
-
} catch (err) {
|
|
52
|
-
console.error('Fatal error:', err);
|
|
6
|
+
main()
|
|
7
|
+
.then(() => process.exit(0))
|
|
8
|
+
.catch((err) => {
|
|
9
|
+
console.error(err);
|
|
53
10
|
process.exit(1);
|
|
54
|
-
}
|
|
55
|
-
})();
|
|
11
|
+
});
|
package/src/main.ts
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { resolveAPI } from './api';
|
|
2
|
+
import { parseArgs } from './cli/parse-args';
|
|
3
|
+
import { createReactInk } from './cli/ui';
|
|
4
|
+
import { useInkStore } from './cli/ui/store';
|
|
5
|
+
import { Downloader } from './core';
|
|
6
|
+
import { parseSizeValue } from './utils/filters';
|
|
7
|
+
import { setGlobalHeaders } from './utils/requests';
|
|
8
|
+
|
|
9
|
+
export async function main() {
|
|
10
|
+
createReactInk();
|
|
11
|
+
|
|
12
|
+
const { url, dir, media, include, exclude, minSize, maxSize, skip, removeDupilicates } =
|
|
13
|
+
parseArgs();
|
|
14
|
+
|
|
15
|
+
const filelist = await resolveAPI(url);
|
|
16
|
+
|
|
17
|
+
filelist
|
|
18
|
+
.setDirPath(dir)
|
|
19
|
+
.skip(skip)
|
|
20
|
+
.filterByText(include, exclude)
|
|
21
|
+
.filterByMediaType(media);
|
|
22
|
+
|
|
23
|
+
if (removeDupilicates) {
|
|
24
|
+
filelist.removeURLDuplicates();
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const minSizeBytes = minSize ? parseSizeValue(minSize) : undefined;
|
|
28
|
+
const maxSizeBytes = maxSize ? parseSizeValue(maxSize) : undefined;
|
|
29
|
+
|
|
30
|
+
await filelist.calculateFileSizes();
|
|
31
|
+
|
|
32
|
+
setGlobalHeaders({ Referer: url });
|
|
33
|
+
|
|
34
|
+
const downloader = new Downloader(filelist, minSizeBytes, maxSizeBytes);
|
|
35
|
+
useInkStore.getState().setDownloader(downloader);
|
|
36
|
+
|
|
37
|
+
await downloader.downloadFiles();
|
|
38
|
+
|
|
39
|
+
if (removeDupilicates) {
|
|
40
|
+
await filelist.removeDuplicatesByHash();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
interface PrintOptions {
|
|
2
|
+
quiet?: number[];
|
|
3
|
+
context?: any;
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
export function printError(err: unknown, options: PrintOptions = {}): void {
|
|
7
|
+
const e = err as any;
|
|
8
|
+
|
|
9
|
+
const status = Number(
|
|
10
|
+
e?.response?.status || e?.status || e?.message?.match(/\d{3}/)?.[0] || 500,
|
|
11
|
+
);
|
|
12
|
+
|
|
13
|
+
const type = e?.code || e?.name || 'Error';
|
|
14
|
+
const message = e?.message || 'No details';
|
|
15
|
+
|
|
16
|
+
const quietList = options.quiet ?? [403, 404];
|
|
17
|
+
const isQuiet = quietList.includes(status);
|
|
18
|
+
|
|
19
|
+
console.error(
|
|
20
|
+
`\x1b[31m[ERROR]\x1b[0m \x1b[33m${status}\x1b[0m | \x1b[36m${type}\x1b[0m: ${message}`,
|
|
21
|
+
);
|
|
22
|
+
|
|
23
|
+
if (options.context) {
|
|
24
|
+
console.error('\x1b[90mContext:\x1b[0m', options.context);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (!isQuiet && e?.stack) {
|
|
28
|
+
console.error(`\n\x1b[90mStack Trace:\n${e.stack}\x1b[0m`);
|
|
29
|
+
}
|
|
30
|
+
}
|