coomer-downloader 3.3.2 → 3.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2,9 +2,12 @@
2
2
 
3
3
  [![NPM](https://nodei.co/npm/coomer-downloader.svg?color=lightgrey)](https://www.npmjs.com/package/coomer-downloader)
4
4
 
5
+ <img src="https://github.com/smartacephale/coomer-downloader/blob/main/docs/images/Screenshot%2001.jpg?raw=true" width="500"></img>
5
6
 
6
7
  ### Features
7
- * script keeps track of downloaded files and resume downloading if it's crashed.
8
+ * Script keeps track of downloaded files and resume downloading if it's crashed.
9
+ * Skip files, filter by text and media type
10
+ * Preview images in console. For now partial downloads not supported.
8
11
 
9
12
  ### Install:
10
13
  ```
@@ -42,12 +45,20 @@ npx coomer-downloader --u https://random.com/file.mp4
42
45
  ### Options:
43
46
  ```
44
47
  --dir <directory>
45
- --media <video|image|all>
48
+ --media <video|image>
46
49
  --skip <number>
47
50
 
48
- # include/exclude filters files by name
51
+ # Removes duplicates by url and file hash
52
+ --remove-dupilicates <true|false>
53
+
54
+ # Filter by file name
49
55
  --include <keyword1>
50
56
  --exclude <keyword2>
57
+
58
+ # Filter by min/max file size. Example: "1mb" or "500kb"
59
+ --min-size <keyword>
60
+ --max-size <keyword>
61
+
51
62
  ```
52
63
 
53
64
  ```
package/biome.json CHANGED
@@ -1,5 +1,5 @@
1
1
  {
2
- "$schema": "https://biomejs.dev/schemas/2.2.6/schema.json",
2
+ "$schema": "https://biomejs.dev/schemas/2.3.10/schema.json",
3
3
  "assist": {
4
4
  "actions": {
5
5
  "source": {
@@ -14,13 +14,14 @@
14
14
  "indentStyle": "space",
15
15
  "indentWidth": 2,
16
16
  "lineEnding": "lf",
17
- "lineWidth": 100
17
+ "lineWidth": 90
18
18
  },
19
19
  "javascript": {
20
20
  "formatter": {
21
21
  "quoteStyle": "single",
22
22
  "semicolons": "always"
23
- }
23
+ },
24
+ "jsxRuntime": "reactClassic"
24
25
  },
25
26
  "linter": {
26
27
  "enabled": true,
@@ -37,7 +38,8 @@
37
38
  "useNumberNamespace": "off"
38
39
  },
39
40
  "suspicious": {
40
- "noRedundantUseStrict": "off"
41
+ "noRedundantUseStrict": "off",
42
+ "noControlCharactersInRegex": "info"
41
43
  }
42
44
  }
43
45
  }
package/dist/index.js CHANGED
@@ -7,37 +7,11 @@ import process2 from "node:process";
7
7
  import * as cheerio from "cheerio";
8
8
  import { fetch } from "undici";
9
9
 
10
- // src/services/file.ts
11
- import os from "node:os";
12
- import path from "node:path";
13
-
14
- // src/utils/filters.ts
15
- function isImage(name) {
16
- return /\.(jpg|jpeg|png|gif|bmp|tiff|webp|avif)$/i.test(name);
17
- }
18
- function isVideo(name) {
19
- return /\.(mp4|m4v|avi|mov|mkv|webm|flv|wmv|mpeg|mpg|3gp)$/i.test(name);
20
- }
21
- function testMediaType(name, type) {
22
- return type === "all" ? true : type === "image" ? isImage(name) : isVideo(name);
23
- }
24
- function includesAllWords(str, words) {
25
- if (!words.length) return true;
26
- return words.every((w) => str.includes(w));
27
- }
28
- function includesNoWords(str, words) {
29
- if (!words.length) return true;
30
- return words.every((w) => !str.includes(w));
31
- }
32
- function parseQuery(query) {
33
- return query.split(",").map((x) => x.toLowerCase().trim()).filter((_) => _);
34
- }
35
- function filterString(text, include, exclude) {
36
- return includesAllWords(text, parseQuery(include)) && includesNoWords(text, parseQuery(exclude));
37
- }
38
-
39
10
  // src/utils/io.ts
11
+ import { createHash } from "node:crypto";
40
12
  import fs from "node:fs";
13
+ import { access, constants, unlink } from "node:fs/promises";
14
+ import { pipeline } from "node:stream/promises";
41
15
  async function getFileSize(filepath) {
42
16
  let size = 0;
43
17
  if (fs.existsSync(filepath)) {
@@ -45,15 +19,29 @@ async function getFileSize(filepath) {
45
19
  }
46
20
  return size;
47
21
  }
22
+ async function getFileHash(filepath) {
23
+ const hash = createHash("sha256");
24
+ const filestream = fs.createReadStream(filepath);
25
+ await pipeline(filestream, hash);
26
+ return hash.digest("hex");
27
+ }
48
28
  function mkdir(filepath) {
49
29
  if (!fs.existsSync(filepath)) {
50
30
  fs.mkdirSync(filepath, { recursive: true });
51
31
  }
52
32
  }
33
+ async function deleteFile(path2) {
34
+ await access(path2, constants.F_OK);
35
+ await unlink(path2);
36
+ }
37
+ function sanitizeFilename(name) {
38
+ if (!name) return name;
39
+ return name.replace(/[<>:"/\\|?*\x00-\x1F]/g, "-").replace(/\s+/g, " ").trim().replace(/[.]+$/, "");
40
+ }
53
41
 
54
42
  // src/services/file.ts
55
43
  var CoomerFile = class _CoomerFile {
56
- constructor(name, url, filepath, size, downloaded = 0, content) {
44
+ constructor(name, url, filepath = "", size, downloaded = 0, content) {
57
45
  this.name = name;
58
46
  this.url = url;
59
47
  this.filepath = filepath;
@@ -62,7 +50,8 @@ var CoomerFile = class _CoomerFile {
62
50
  this.content = content;
63
51
  }
64
52
  active = false;
65
- async getDownloadedSize() {
53
+ hash;
54
+ async calcDownloadedSize() {
66
55
  this.downloaded = await getFileSize(this.filepath);
67
56
  return this;
68
57
  }
@@ -74,6 +63,68 @@ var CoomerFile = class _CoomerFile {
74
63
  return new _CoomerFile(f.name, f.url, f.filepath, f.size, f.downloaded, f.content);
75
64
  }
76
65
  };
66
+
67
+ // src/services/filelist.ts
68
+ import os from "node:os";
69
+ import path from "node:path";
70
+
71
+ // src/utils/duplicates.ts
72
+ function collectUniquesAndDuplicatesBy(xs, k) {
73
+ const seen = /* @__PURE__ */ new Set();
74
+ return xs.reduce(
75
+ (acc, item) => {
76
+ if (seen.has(item[k])) {
77
+ acc.duplicates.push(item);
78
+ } else {
79
+ seen.add(item[k]);
80
+ acc.uniques.push(item);
81
+ }
82
+ return acc;
83
+ },
84
+ { uniques: [], duplicates: [] }
85
+ );
86
+ }
87
+ function removeDuplicatesBy(xs, k) {
88
+ return [...new Map(xs.map((x) => [x[k], x])).values()];
89
+ }
90
+
91
+ // src/utils/filters.ts
92
+ function includesAllWords(str, words) {
93
+ if (!words.length) return true;
94
+ return words.every((w) => str.includes(w));
95
+ }
96
+ function includesNoWords(str, words) {
97
+ if (!words.length) return true;
98
+ return words.every((w) => !str.includes(w));
99
+ }
100
+ function parseQuery(query) {
101
+ return query.split(",").map((x) => x.toLowerCase().trim()).filter((_) => _);
102
+ }
103
+ function filterString(text, include, exclude) {
104
+ return includesAllWords(text, parseQuery(include)) && includesNoWords(text, parseQuery(exclude));
105
+ }
106
+ function parseSizeValue(s) {
107
+ if (!s) return NaN;
108
+ const m = s.match(/^([0-9]+(?:\.[0-9]+)?)(b|kb|mb|gb)?$/i);
109
+ if (!m) return NaN;
110
+ const val = parseFloat(m[1]);
111
+ const unit = (m[2] || "b").toLowerCase();
112
+ const mult = unit === "kb" ? 1024 : unit === "mb" ? 1024 ** 2 : unit === "gb" ? 1024 ** 3 : 1;
113
+ return Math.floor(val * mult);
114
+ }
115
+
116
+ // src/utils/mediatypes.ts
117
+ function isImage(name) {
118
+ return /\.(jpg|jpeg|png|gif|bmp|tiff|webp|avif)$/i.test(name);
119
+ }
120
+ function isVideo(name) {
121
+ return /\.(mp4|m4v|avi|mov|mkv|webm|flv|wmv|mpeg|mpg|3gp)$/i.test(name);
122
+ }
123
+ function testMediaType(name, type) {
124
+ return type === "image" ? isImage(name) : isVideo(name);
125
+ }
126
+
127
+ // src/services/filelist.ts
77
128
  var CoomerFileList = class {
78
129
  constructor(files = []) {
79
130
  this.files = files;
@@ -88,7 +139,8 @@ var CoomerFileList = class {
88
139
  this.dirPath = path.join(os.homedir(), path.join(dir, dirName));
89
140
  }
90
141
  this.files.forEach((file) => {
91
- file.filepath = path.join(this.dirPath, file.name);
142
+ const safeName = sanitizeFilename(file.name) || file.name;
143
+ file.filepath = path.join(this.dirPath, safeName);
92
144
  });
93
145
  return this;
94
146
  }
@@ -108,8 +160,9 @@ var CoomerFileList = class {
108
160
  }
109
161
  async calculateFileSizes() {
110
162
  for (const file of this.files) {
111
- await file.getDownloadedSize();
163
+ await file.calcDownloadedSize();
112
164
  }
165
+ return this;
113
166
  }
114
167
  getActiveFiles() {
115
168
  return this.files.filter((f) => f.active);
@@ -117,6 +170,19 @@ var CoomerFileList = class {
117
170
  getDownloaded() {
118
171
  return this.files.filter((f) => f.size && f.size <= f.downloaded);
119
172
  }
173
+ async removeDuplicatesByHash() {
174
+ for (const file of this.files) {
175
+ file.hash = await getFileHash(file.filepath);
176
+ }
177
+ const { duplicates } = collectUniquesAndDuplicatesBy(this.files, "hash");
178
+ duplicates.forEach((f) => {
179
+ deleteFile(f.filepath);
180
+ });
181
+ }
182
+ removeURLDuplicates() {
183
+ this.files = removeDuplicatesBy(this.files, "url");
184
+ return this;
185
+ }
120
186
  };
121
187
 
122
188
  // src/api/bunkr.ts
@@ -132,7 +198,9 @@ function decryptEncryptedUrl(encryptionData) {
132
198
  const secretKey = `SECRET_KEY_${Math.floor(encryptionData.timestamp / 3600)}`;
133
199
  const encryptedUrlBuffer = Buffer.from(encryptionData.url, "base64");
134
200
  const secretKeyBuffer = Buffer.from(secretKey, "utf-8");
135
- return Array.from(encryptedUrlBuffer).map((byte, i) => String.fromCharCode(byte ^ secretKeyBuffer[i % secretKeyBuffer.length])).join("");
201
+ return Array.from(encryptedUrlBuffer).map(
202
+ (byte, i) => String.fromCharCode(byte ^ secretKeyBuffer[i % secretKeyBuffer.length])
203
+ ).join("");
136
204
  }
137
205
  async function getFileData(url, name) {
138
206
  const slug = url.split("/").pop();
@@ -225,10 +293,10 @@ async function getUserPostsAPI(user, offset) {
225
293
  async function getUserFiles(user) {
226
294
  const userPosts = [];
227
295
  const offset = 50;
228
- for (let i = 0; i < 1e3; i++) {
296
+ for (let i = 0; i < 1e4; i++) {
229
297
  const posts = await getUserPostsAPI(user, i * offset);
230
298
  userPosts.push(...posts);
231
- if (posts.length < 50) break;
299
+ if (posts.length < offset) break;
232
300
  }
233
301
  const filelist = new CoomerFileList();
234
302
  for (const p of userPosts) {
@@ -239,13 +307,23 @@ async function getUserFiles(user) {
239
307
  const postFiles = [...p.attachments, p.file].filter((f) => f.path).map((f, i) => {
240
308
  const ext = f.name.split(".").pop();
241
309
  const name = `${datentitle} ${i + 1}.${ext}`;
242
- const url = `${user.domain}/${f.path}`;
310
+ const url = getUrl(f, user);
243
311
  return CoomerFile.from({ name, url, content });
244
312
  });
245
313
  filelist.files.push(...postFiles);
246
314
  }
247
315
  return filelist;
248
316
  }
317
+ function getUrl(f, user) {
318
+ const normalizedPath = f.path.replace(/^\/+/, "/");
319
+ let url = "";
320
+ try {
321
+ url = new URL(normalizedPath, user.domain).toString();
322
+ } catch (_) {
323
+ url = `${user.domain}/${normalizedPath.replace(/^\//, "")}`;
324
+ }
325
+ return url;
326
+ }
249
327
  async function parseUser(url) {
250
328
  const [_, domain, service, id] = url.match(
251
329
  /(https:\/\/\w+\.\w+)\/(\w+)\/user\/([\w|.|-]+)/
@@ -320,7 +398,6 @@ async function getUserPage(user, offset) {
320
398
  return fetch4(url).then((r) => r.text());
321
399
  }
322
400
  async function getUserPosts(user) {
323
- console.log("Fetching user posts...");
324
401
  const posts = [];
325
402
  for (let i = 1; i < 1e5; i++) {
326
403
  const page = await getUserPage(user, i);
@@ -332,7 +409,6 @@ async function getUserPosts(user) {
332
409
  return posts;
333
410
  }
334
411
  async function getPostsData(posts) {
335
- console.log("Fetching posts data...");
336
412
  const filelist = new CoomerFileList();
337
413
  for (const post of posts) {
338
414
  const page = await fetch4(post).then((r) => r.text());
@@ -349,7 +425,9 @@ async function getPostsData(posts) {
349
425
  }
350
426
  async function getRedditData(url) {
351
427
  const user = url.match(/u\/(\w+)/)?.[1];
428
+ console.log("Fetching user posts...");
352
429
  const posts = await getUserPosts(user);
430
+ console.log("Fetching posts data...");
353
431
  const filelist = await getPostsData(posts);
354
432
  filelist.dirName = `${user}-reddit`;
355
433
  return filelist;
@@ -400,8 +478,7 @@ function argumentHander() {
400
478
  default: "./"
401
479
  }).option("media", {
402
480
  type: "string",
403
- choices: ["video", "image", "all"],
404
- default: "all",
481
+ choices: ["video", "image"],
405
482
  description: "The type of media to download: 'video', 'image', or 'all'. 'all' is the default."
406
483
  }).option("include", {
407
484
  type: "string",
@@ -411,10 +488,22 @@ function argumentHander() {
411
488
  type: "string",
412
489
  default: "",
413
490
  description: "Filter file names by a comma-separated list of keywords to exclude"
491
+ }).option("min-size", {
492
+ type: "string",
493
+ default: "",
494
+ description: 'Minimum file size to download. Example: "1mb" or "500kb"'
495
+ }).option("max-size", {
496
+ type: "string",
497
+ default: "",
498
+ description: 'Maximum file size to download. Example: "1mb" or "500kb"'
414
499
  }).option("skip", {
415
500
  type: "number",
416
501
  default: 0,
417
502
  description: "Skips the first N files in the download queue"
503
+ }).option("remove-dupilicates", {
504
+ type: "boolean",
505
+ default: true,
506
+ description: "removes duplicates by url and file hash"
418
507
  }).help().alias("help", "h").parseSync();
419
508
  }
420
509
 
@@ -558,7 +647,7 @@ import { Box as Box6, Spacer as Spacer2, Text as Text6 } from "ink";
558
647
  import React7 from "react";
559
648
 
560
649
  // package.json
561
- var version = "3.3.2";
650
+ var version = "3.4.1";
562
651
 
563
652
  // src/cli/ui/components/titlebar.tsx
564
653
  function TitleBar() {
@@ -566,18 +655,30 @@ function TitleBar() {
566
655
  }
567
656
 
568
657
  // src/cli/ui/hooks/downloader.ts
569
- import { useEffect as useEffect2, useState as useState2 } from "react";
658
+ import { useRef, useSyncExternalStore } from "react";
570
659
  var useDownloaderHook = () => {
571
660
  const downloader = useInkStore((state) => state.downloader);
572
- const filelist = downloader?.filelist;
573
- const [_, setHelper] = useState2(0);
574
- useEffect2(() => {
575
- downloader?.subject.subscribe(({ type }) => {
576
- if (type === "FILE_DOWNLOADING_START" || type === "FILE_DOWNLOADING_END" || type === "CHUNK_DOWNLOADING_UPDATE") {
577
- setHelper(Date.now());
578
- }
579
- });
580
- });
661
+ const versionRef = useRef(0);
662
+ useSyncExternalStore(
663
+ (onStoreChange) => {
664
+ if (!downloader) return () => {
665
+ };
666
+ const sub = downloader.subject.subscribe(({ type }) => {
667
+ const targets = [
668
+ "FILE_DOWNLOADING_START",
669
+ "FILE_DOWNLOADING_END",
670
+ "CHUNK_DOWNLOADING_UPDATE"
671
+ ];
672
+ if (targets.includes(type)) {
673
+ versionRef.current++;
674
+ onStoreChange();
675
+ }
676
+ });
677
+ return () => sub.unsubscribe();
678
+ },
679
+ () => versionRef.current
680
+ );
681
+ return downloader?.filelist;
581
682
  };
582
683
 
583
684
  // src/cli/ui/hooks/input.ts
@@ -598,11 +699,8 @@ var useInputHook = () => {
598
699
  // src/cli/ui/app.tsx
599
700
  function App() {
600
701
  useInputHook();
601
- useDownloaderHook();
602
- const downloader = useInkStore((state) => state.downloader);
603
- const filelist = downloader?.filelist;
604
- const isFilelist = filelist instanceof CoomerFileList;
605
- return /* @__PURE__ */ React8.createElement(Box7, { borderStyle: "single", flexDirection: "column", borderColor: "blue", width: 80 }, /* @__PURE__ */ React8.createElement(TitleBar, null), !isFilelist ? /* @__PURE__ */ React8.createElement(Loading, null) : /* @__PURE__ */ React8.createElement(React8.Fragment, null, /* @__PURE__ */ React8.createElement(Box7, null, /* @__PURE__ */ React8.createElement(Box7, null, /* @__PURE__ */ React8.createElement(FileListStateBox, { filelist })), /* @__PURE__ */ React8.createElement(Box7, { flexBasis: 29 }, /* @__PURE__ */ React8.createElement(KeyboardControlsInfo, null))), filelist.getActiveFiles().map((file) => {
702
+ const filelist = useDownloaderHook();
703
+ return /* @__PURE__ */ React8.createElement(Box7, { borderStyle: "single", flexDirection: "column", borderColor: "blue", width: 80 }, /* @__PURE__ */ React8.createElement(TitleBar, null), !(filelist instanceof CoomerFileList) ? /* @__PURE__ */ React8.createElement(Loading, null) : /* @__PURE__ */ React8.createElement(React8.Fragment, null, /* @__PURE__ */ React8.createElement(Box7, null, /* @__PURE__ */ React8.createElement(Box7, null, /* @__PURE__ */ React8.createElement(FileListStateBox, { filelist })), /* @__PURE__ */ React8.createElement(Box7, { flexBasis: 30 }, /* @__PURE__ */ React8.createElement(KeyboardControlsInfo, null))), filelist?.getActiveFiles().map((file) => {
606
704
  return /* @__PURE__ */ React8.createElement(FileBox, { file, key: file.name });
607
705
  })));
608
706
  }
@@ -615,7 +713,7 @@ function createReactInk() {
615
713
  // src/services/downloader.ts
616
714
  import fs2 from "node:fs";
617
715
  import { Readable, Transform } from "node:stream";
618
- import { pipeline } from "node:stream/promises";
716
+ import { pipeline as pipeline2 } from "node:stream/promises";
619
717
  import { Subject } from "rxjs";
620
718
 
621
719
  // src/utils/promise.ts
@@ -650,7 +748,7 @@ var Timer = class _Timer {
650
748
  this.start();
651
749
  return this;
652
750
  }
653
- static withAbortController(timeout, abortControllerSubject, message = "Timeout") {
751
+ static withAbortController(timeout, abortControllerSubject, message = "TIMEOUT") {
654
752
  const callback = () => {
655
753
  abortControllerSubject.next(message);
656
754
  };
@@ -661,8 +759,10 @@ var Timer = class _Timer {
661
759
 
662
760
  // src/services/downloader.ts
663
761
  var Downloader = class {
664
- constructor(filelist, chunkTimeout = 3e4, chunkFetchRetries = 5, fetchRetries = 7) {
762
+ constructor(filelist, minSize, maxSize, chunkTimeout = 3e4, chunkFetchRetries = 5, fetchRetries = 7) {
665
763
  this.filelist = filelist;
764
+ this.minSize = minSize;
765
+ this.maxSize = maxSize;
666
766
  this.chunkTimeout = chunkTimeout;
667
767
  this.chunkFetchRetries = chunkFetchRetries;
668
768
  this.fetchRetries = fetchRetries;
@@ -680,8 +780,10 @@ var Downloader = class {
680
780
  async fetchStream(file, stream, sizeOld = 0, retries = this.chunkFetchRetries) {
681
781
  const signal = this.abortController.signal;
682
782
  const subject = this.subject;
683
- const { timer } = Timer.withAbortController(this.chunkTimeout, this.abortControllerSubject);
684
- let i;
783
+ const { timer } = Timer.withAbortController(
784
+ this.chunkTimeout,
785
+ this.abortControllerSubject
786
+ );
685
787
  try {
686
788
  const fileStream = fs2.createWriteStream(file.filepath, { flags: "a" });
687
789
  const progressStream = new Transform({
@@ -694,7 +796,7 @@ var Downloader = class {
694
796
  }
695
797
  });
696
798
  subject.next({ type: "CHUNK_DOWNLOADING_START" });
697
- await pipeline(stream, progressStream, fileStream, { signal });
799
+ await pipeline2(stream, progressStream, fileStream, { signal });
698
800
  } catch (error) {
699
801
  if (signal.aborted) {
700
802
  if (signal.reason === "FILE_SKIP") return;
@@ -711,12 +813,22 @@ var Downloader = class {
711
813
  } finally {
712
814
  subject.next({ type: "CHUNK_DOWNLOADING_END" });
713
815
  timer.stop();
714
- clearInterval(i);
715
816
  }
716
817
  }
717
818
  skip() {
718
819
  this.abortControllerSubject.next("FILE_SKIP");
719
820
  }
821
+ filterFileSize(file) {
822
+ if (!file.size) return;
823
+ if (this.minSize && file.size < this.minSize || this.maxSize && file.size > this.maxSize) {
824
+ try {
825
+ deleteFile(file.filepath);
826
+ } catch {
827
+ }
828
+ this.skip();
829
+ return;
830
+ }
831
+ }
720
832
  async downloadFile(file, retries = this.fetchRetries) {
721
833
  const signal = this.abortController.signal;
722
834
  try {
@@ -729,6 +841,7 @@ var Downloader = class {
729
841
  if (!contentLength && file.downloaded > 0) return;
730
842
  const restFileSize = parseInt(contentLength);
731
843
  file.size = restFileSize + file.downloaded;
844
+ this.filterFileSize(file);
732
845
  if (file.size > file.downloaded && response.body) {
733
846
  const stream = Readable.fromWeb(response.body);
734
847
  stream.setMaxListeners(20);
@@ -765,14 +878,29 @@ var Downloader = class {
765
878
  // src/index.ts
766
879
  async function run() {
767
880
  createReactInk();
768
- const { url, dir, media, include, exclude, skip } = argumentHander();
881
+ const { url, dir, media, include, exclude, minSize, maxSize, skip, removeDupilicates } = argumentHander();
769
882
  const filelist = await apiHandler(url);
770
883
  filelist.setDirPath(dir).skip(skip).filterByText(include, exclude).filterByMediaType(media);
884
+ if (removeDupilicates) {
885
+ filelist.removeURLDuplicates();
886
+ }
887
+ const minSizeBytes = minSize ? parseSizeValue(minSize) : void 0;
888
+ const maxSizeBytes = maxSize ? parseSizeValue(maxSize) : void 0;
771
889
  await filelist.calculateFileSizes();
772
890
  setGlobalHeaders({ Referer: url });
773
- const downloader = new Downloader(filelist);
891
+ const downloader = new Downloader(filelist, minSizeBytes, maxSizeBytes);
774
892
  useInkStore.getState().setDownloader(downloader);
775
893
  await downloader.downloadFiles();
776
- process2.kill(process2.pid, "SIGINT");
894
+ if (removeDupilicates) {
895
+ await filelist.removeDuplicatesByHash();
896
+ }
777
897
  }
778
- run();
898
+ (async () => {
899
+ try {
900
+ await run();
901
+ process2.exit(0);
902
+ } catch (err) {
903
+ console.error("Fatal error:", err);
904
+ process2.exit(1);
905
+ }
906
+ })();
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "coomer-downloader",
3
- "version": "3.3.2",
3
+ "version": "3.4.1",
4
4
  "author": "smartacephal",
5
5
  "license": "MIT",
6
6
  "description": "Downloads images/videos from Coomer/Kemono, Bunkr, GoFile, Reddit-NSFW user posts",
@@ -58,6 +58,7 @@
58
58
  "eslint-config-xo-react": "^0.27.0",
59
59
  "eslint-plugin-react": "^7.32.2",
60
60
  "eslint-plugin-react-hooks": "^4.6.0",
61
+ "pino": "^10.1.0",
61
62
  "tsx": "^4.20.6"
62
63
  }
63
64
  }
package/src/api/bunkr.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import * as cheerio from 'cheerio';
2
2
  import { fetch } from 'undici';
3
- import { CoomerFile, CoomerFileList } from '../services/file';
3
+ import { CoomerFile } from '../services/file';
4
+ import { CoomerFileList } from '../services/filelist';
4
5
 
5
6
  type EncData = { url: string; timestamp: number };
6
7
 
@@ -18,7 +19,9 @@ function decryptEncryptedUrl(encryptionData: EncData) {
18
19
  const encryptedUrlBuffer = Buffer.from(encryptionData.url, 'base64');
19
20
  const secretKeyBuffer = Buffer.from(secretKey, 'utf-8');
20
21
  return Array.from(encryptedUrlBuffer)
21
- .map((byte, i) => String.fromCharCode(byte ^ secretKeyBuffer[i % secretKeyBuffer.length]))
22
+ .map((byte, i) =>
23
+ String.fromCharCode(byte ^ secretKeyBuffer[i % secretKeyBuffer.length]),
24
+ )
22
25
  .join('');
23
26
  }
24
27
 
@@ -1,5 +1,6 @@
1
- import { CoomerFile, CoomerFileList } from '../services/file';
2
- import { isImage } from '../utils/filters';
1
+ import { CoomerFile } from '../services/file';
2
+ import { CoomerFileList } from '../services/filelist';
3
+ import { isImage } from '../utils/mediatypes';
3
4
  import { fetchWithGlobalHeader, setGlobalHeaders } from '../utils/requests';
4
5
 
5
6
  type CoomerAPIUser = { domain: string; service: string; id: string; name?: string };
@@ -34,7 +35,10 @@ async function getUserProfileData(user: CoomerAPIUser): Promise<CoomerAPIUserDat
34
35
  return result as CoomerAPIUserData;
35
36
  }
36
37
 
37
- async function getUserPostsAPI(user: CoomerAPIUser, offset: number): Promise<CoomerAPIPost[]> {
38
+ async function getUserPostsAPI(
39
+ user: CoomerAPIUser,
40
+ offset: number,
41
+ ): Promise<CoomerAPIPost[]> {
38
42
  const url = `${user.domain}/api/v1/${user.service}/user/${user.id}/posts?o=${offset}`;
39
43
  const posts = await fetchWithGlobalHeader(url).then((r) => r.json());
40
44
  return posts as CoomerAPIPost[];
@@ -44,10 +48,10 @@ export async function getUserFiles(user: CoomerAPIUser): Promise<CoomerFileList>
44
48
  const userPosts = [];
45
49
 
46
50
  const offset = 50;
47
- for (let i = 0; i < 1000; i++) {
51
+ for (let i = 0; i < 10_000; i++) {
48
52
  const posts = await getUserPostsAPI(user, i * offset);
49
53
  userPosts.push(...posts);
50
- if (posts.length < 50) break;
54
+ if (posts.length < offset) break;
51
55
  }
52
56
 
53
57
  const filelist = new CoomerFileList();
@@ -63,7 +67,7 @@ export async function getUserFiles(user: CoomerAPIUser): Promise<CoomerFileList>
63
67
  .map((f, i) => {
64
68
  const ext = f.name.split('.').pop();
65
69
  const name = `${datentitle} ${i + 1}.${ext}`;
66
- const url = `${user.domain}/${f.path}`;
70
+ const url = getUrl(f, user);
67
71
  return CoomerFile.from({ name, url, content });
68
72
  });
69
73
 
@@ -73,6 +77,19 @@ export async function getUserFiles(user: CoomerAPIUser): Promise<CoomerFileList>
73
77
  return filelist;
74
78
  }
75
79
 
80
+ function getUrl(f: CoomerAPIFile, user: CoomerAPIUser) {
81
+ // Normalize f.path to avoid protocol-relative or multiple-leading-slash paths
82
+ const normalizedPath = f.path.replace(/^\/+/, '/');
83
+ let url = '';
84
+ try {
85
+ url = new URL(normalizedPath, user.domain).toString();
86
+ } catch (_) {
87
+ // Fallback: join with a single slash
88
+ url = `${user.domain}/${normalizedPath.replace(/^\//, '')}`;
89
+ }
90
+ return url;
91
+ }
92
+
76
93
  async function parseUser(url: string): Promise<CoomerAPIUser> {
77
94
  const [_, domain, service, id] = url.match(
78
95
  /(https:\/\/\w+\.\w+)\/(\w+)\/user\/([\w|.|-]+)/,
package/src/api/gofile.ts CHANGED
@@ -1,5 +1,6 @@
1
1
  import { fetch } from 'undici';
2
- import { CoomerFile, CoomerFileList } from '../services/file';
2
+ import { CoomerFile } from '../services/file';
3
+ import { CoomerFileList } from '../services/filelist';
3
4
  import { setGlobalHeaders } from '../utils/requests';
4
5
 
5
6
  type GoFileAPIToken = { status: string; data: { token: string } };
package/src/api/index.ts CHANGED
@@ -1,4 +1,4 @@
1
- import type { CoomerFileList } from '../services/file';
1
+ import type { CoomerFileList } from '../services/filelist';
2
2
  import { getBunkrData } from './bunkr';
3
3
  import { getCoomerData } from './coomer-api';
4
4
  import { getGofileData } from './gofile';