coomer-downloader 3.1.0 → 3.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,86 +1,151 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/index.ts
4
- import os from "node:os";
5
- import path2 from "node:path";
6
4
  import process2 from "node:process";
7
5
 
8
6
  // src/api/bunkr.ts
9
7
  import * as cheerio from "cheerio";
10
- import { fetch as fetch2 } from "undici";
8
+ import { fetch } from "undici";
11
9
 
12
- // src/utils/downloader.ts
13
- import fs2 from "node:fs";
10
+ // src/utils/file.ts
11
+ import os from "node:os";
14
12
  import path from "node:path";
15
- import { Readable, Transform } from "node:stream";
16
- import { pipeline } from "node:stream/promises";
17
- import { Subject } from "rxjs";
18
13
 
19
- // src/api/coomer-api.ts
20
- var SERVERS = ["n1", "n2", "n3", "n4"];
21
- function tryFixCoomerUrl(url, attempts) {
22
- if (attempts < 2 && isImage(url)) {
23
- return url.replace(/\/data\//, "/thumbnail/data/").replace(/n\d\./, "img.");
24
- }
25
- const server = url.match(/n\d\./)?.[0].slice(0, 2);
26
- const i = SERVERS.indexOf(server);
27
- if (i !== -1) {
28
- const newServer = SERVERS[(i + 1) % SERVERS.length];
29
- return url.replace(/n\d./, `${newServer}.`);
30
- }
31
- return url;
14
+ // src/utils/filters.ts
15
+ function isImage(name) {
16
+ return /\.(jpg|jpeg|png|gif|bmp|tiff|webp|avif)$/i.test(name);
32
17
  }
33
- async function getUserProfileAPI(user) {
34
- const url = `${user.domain}/api/v1/${user.service}/user/${user.id}/profile`;
35
- const result = await fetchWithGlobalHeader(url).then((r) => r.json());
36
- return result;
18
+ function isVideo(name) {
19
+ return /\.(mp4|m4v|avi|mov|mkv|webm|flv|wmv|mpeg|mpg|3gp)$/i.test(name);
37
20
  }
38
- async function getUserPostsAPI(user, offset) {
39
- const url = `${user.domain}/api/v1/${user.service}/user/${user.id}/posts?o=${offset}`;
40
- const posts = await fetchWithGlobalHeader(url).then((r) => r.json());
41
- return posts;
21
+ function testMediaType(name, type) {
22
+ return type === "all" ? true : type === "image" ? isImage(name) : isVideo(name);
42
23
  }
43
- async function getUserFiles(user, mediaType) {
44
- const userPosts = [];
45
- const offset = 50;
46
- for (let i = 0; i < 1e3; i++) {
47
- const posts = await getUserPostsAPI(user, i * offset);
48
- userPosts.push(...posts);
49
- if (posts.length < 50) break;
24
+ function includesAllWords(str, words) {
25
+ if (!words.length) return true;
26
+ return words.every((w) => str.includes(w));
27
+ }
28
+ function includesNoWords(str, words) {
29
+ if (!words.length) return true;
30
+ return words.every((w) => !str.includes(w));
31
+ }
32
+ function parseQuery(query) {
33
+ return query.split(",").map((x) => x.toLowerCase().trim()).filter((_) => _);
34
+ }
35
+ function filterString(text, include, exclude) {
36
+ return includesAllWords(text, parseQuery(include)) && includesNoWords(text, parseQuery(exclude));
37
+ }
38
+
39
+ // src/utils/file.ts
40
+ var CoomerFile = class _CoomerFile {
41
+ constructor(name, url, filepath, size, downloaded, content) {
42
+ this.name = name;
43
+ this.url = url;
44
+ this.filepath = filepath;
45
+ this.size = size;
46
+ this.downloaded = downloaded;
47
+ this.content = content;
50
48
  }
51
- const files = [];
52
- for (const p of userPosts) {
53
- const title = p.title.match(/\w+/g)?.join(" ") || "";
54
- const content = p.content;
55
- const date = p.published.replace(/T/, " ");
56
- const datentitle = `${date} ${title}`.trim();
57
- const postFiles = [...p.attachments, p.file].filter((f) => f.path).filter((f) => testMediaType(f.name, mediaType)).map((f, i) => {
58
- const ext = f.name.split(".").pop();
59
- const name = `${datentitle} ${i + 1}.${ext}`;
60
- const url = `${user.domain}/${f.path}`;
61
- return { name, url, content };
49
+ state = "pause";
50
+ get textContent() {
51
+ const text = `${this.name || ""} ${this.content || ""}`.toLowerCase();
52
+ return text;
53
+ }
54
+ static from(f) {
55
+ return new _CoomerFile(f.name, f.url, f.filepath, f.size, f.downloaded, f.content);
56
+ }
57
+ };
58
+ var CoomerFileList = class {
59
+ constructor(files = []) {
60
+ this.files = files;
61
+ }
62
+ dirPath;
63
+ dirName;
64
+ setDirPath(dir, dirName) {
65
+ dirName = dirName || this.dirName;
66
+ if (dir === "./") {
67
+ this.dirPath = path.resolve(dir, dirName);
68
+ } else {
69
+ this.dirPath = path.join(os.homedir(), path.join(dir, dirName));
70
+ }
71
+ this.files.forEach((file) => {
72
+ file.filepath = path.join(this.dirPath, file.name);
62
73
  });
63
- files.push(...postFiles);
74
+ return this;
75
+ }
76
+ filterByText(include, exclude) {
77
+ this.files = this.files.filter((f) => filterString(f.textContent, include, exclude));
78
+ return this;
79
+ }
80
+ filterByMediaType(media) {
81
+ if (media) {
82
+ this.files = this.files.filter((f) => testMediaType(f.name, media));
83
+ }
84
+ return this;
85
+ }
86
+ skip(n) {
87
+ this.files = this.files.slice(n);
88
+ return this;
64
89
  }
65
- return files;
90
+ };
91
+
92
+ // src/api/bunkr.ts
93
+ async function getEncryptionData(slug) {
94
+ const response = await fetch("https://bunkr.cr/api/vs", {
95
+ method: "POST",
96
+ headers: { "Content-Type": "application/json" },
97
+ body: JSON.stringify({ slug })
98
+ });
99
+ return await response.json();
66
100
  }
67
- async function parseUser(url) {
68
- const [_, domain, service, id] = url.match(
69
- /(https:\/\/\w+\.\w+)\/(\w+)\/user\/([\w|.|-]+)/
70
- );
71
- if (!domain || !service || !id) console.error("Invalid URL", url);
72
- const { name } = await getUserProfileAPI({ domain, service, id });
73
- return { domain, service, id, name };
101
+ function decryptEncryptedUrl(encryptionData) {
102
+ const secretKey = `SECRET_KEY_${Math.floor(encryptionData.timestamp / 3600)}`;
103
+ const encryptedUrlBuffer = Buffer.from(encryptionData.url, "base64");
104
+ const secretKeyBuffer = Buffer.from(secretKey, "utf-8");
105
+ return Array.from(encryptedUrlBuffer).map((byte, i) => String.fromCharCode(byte ^ secretKeyBuffer[i % secretKeyBuffer.length])).join("");
74
106
  }
75
- async function getCoomerData(url, mediaType) {
76
- setGlobalHeaders({ accept: "text/css" });
77
- const user = await parseUser(url);
78
- const dirName = `${user.name}-${user.service}`;
79
- const files = await getUserFiles(user, mediaType);
80
- return { dirName, files };
107
+ async function getFileData(url, name) {
108
+ const slug = url.split("/").pop();
109
+ const encryptionData = await getEncryptionData(slug);
110
+ const src = decryptEncryptedUrl(encryptionData);
111
+ return CoomerFile.from({ name, url: src });
112
+ }
113
+ async function getGalleryFiles(url) {
114
+ const filelist = new CoomerFileList();
115
+ const page = await fetch(url).then((r) => r.text());
116
+ const $ = cheerio.load(page);
117
+ const dirName = $("title").text();
118
+ filelist.dirName = `${dirName.split("|")[0].trim()}-bunkr`;
119
+ const url_ = new URL(url);
120
+ if (url_.pathname.startsWith("/f/")) {
121
+ const fileName = $("h1").text();
122
+ const singleFile = await getFileData(url, fileName);
123
+ filelist.files.push(singleFile);
124
+ return filelist;
125
+ }
126
+ const fileNames = Array.from($("div[title]").map((_, e) => $(e).attr("title")));
127
+ const data = Array.from($("a").map((_, e) => $(e).attr("href"))).filter((a) => /\/f\/\w+/.test(a)).map((a, i) => ({
128
+ url: `${url_.origin}${a}`,
129
+ name: fileNames[i] || url.split("/").pop()
130
+ }));
131
+ for (const { name, url: url2 } of data) {
132
+ const res = await getFileData(url2, name);
133
+ filelist.files.push(res);
134
+ }
135
+ return filelist;
136
+ }
137
+ async function getBunkrData(url) {
138
+ const filelist = await getGalleryFiles(url);
139
+ return filelist;
81
140
  }
82
141
 
83
- // src/utils/files.ts
142
+ // src/utils/downloader.ts
143
+ import fs2 from "node:fs";
144
+ import { Readable, Transform } from "node:stream";
145
+ import { pipeline } from "node:stream/promises";
146
+ import { Subject } from "rxjs";
147
+
148
+ // src/utils/io.ts
84
149
  import fs from "node:fs";
85
150
  async function getFileSize(filepath) {
86
151
  let size = 0;
@@ -138,7 +203,7 @@ var PromiseRetry = class _PromiseRetry {
138
203
  // src/utils/requests.ts
139
204
  import { CookieAgent } from "http-cookie-agent/undici";
140
205
  import { CookieJar } from "tough-cookie";
141
- import { fetch, interceptors, setGlobalDispatcher } from "undici";
206
+ import { fetch as fetch2, interceptors, setGlobalDispatcher } from "undici";
142
207
  function setCookieJarDispatcher() {
143
208
  const jar = new CookieJar();
144
209
  const agent = new CookieAgent({ cookies: { jar } }).compose(interceptors.retry()).compose(interceptors.redirect({ maxRedirections: 3 }));
@@ -156,12 +221,12 @@ function setGlobalHeaders(headers) {
156
221
  }
157
222
  function fetchWithGlobalHeader(url) {
158
223
  const requestHeaders = new Headers(HeadersDefault);
159
- return fetch(url, { headers: requestHeaders });
224
+ return fetch2(url, { headers: requestHeaders });
160
225
  }
161
226
  function fetchByteRange(url, downloadedSize) {
162
227
  const requestHeaders = new Headers(HeadersDefault);
163
228
  requestHeaders.set("Range", `bytes=${downloadedSize}-`);
164
- return fetch(url, { headers: requestHeaders });
229
+ return fetch2(url, { headers: requestHeaders });
165
230
  }
166
231
 
167
232
  // src/utils/timer.ts
@@ -205,100 +270,77 @@ var Timer = class _Timer {
205
270
  };
206
271
 
207
272
  // src/utils/downloader.ts
208
- var subject = new Subject();
209
- var CHUNK_TIMEOUT = 3e4;
210
- var CHUNK_FETCH_RETRIES = 5;
211
- var FETCH_RETRIES = 7;
212
- async function fetchStream(file, stream) {
213
- const { timer, signal } = Timer.withSignal(CHUNK_TIMEOUT, "CHUNK_TIMEOUT");
214
- const fileStream = fs2.createWriteStream(file.filepath, { flags: "a" });
215
- const progressStream = new Transform({
216
- transform(chunk, _encoding, callback) {
217
- this.push(chunk);
218
- file.downloaded += chunk.length;
219
- timer.reset();
220
- subject.next({ type: "CHUNK_DOWNLOADING_UPDATE", file });
221
- callback();
273
+ var Downloader = class {
274
+ constructor(chunkTimeout = 3e4, chunkFetchRetries = 5, fetchRetries = 7) {
275
+ this.chunkTimeout = chunkTimeout;
276
+ this.chunkFetchRetries = chunkFetchRetries;
277
+ this.fetchRetries = fetchRetries;
278
+ }
279
+ subject = new Subject();
280
+ async fetchStream(file, stream) {
281
+ const { subject, chunkTimeout } = this;
282
+ const { timer, signal } = Timer.withSignal(chunkTimeout, "chunkTimeout");
283
+ const fileStream = fs2.createWriteStream(file.filepath, { flags: "a" });
284
+ const progressStream = new Transform({
285
+ transform(chunk, _encoding, callback) {
286
+ this.push(chunk);
287
+ file.downloaded += chunk.length;
288
+ timer.reset();
289
+ subject.next({ type: "CHUNK_DOWNLOADING_UPDATE", file });
290
+ callback();
291
+ }
292
+ });
293
+ try {
294
+ subject.next({ type: "CHUNK_DOWNLOADING_START", file });
295
+ await pipeline(stream, progressStream, fileStream, { signal });
296
+ } catch (error) {
297
+ console.error(error.name === "AbortError" ? signal.reason : error);
298
+ } finally {
299
+ subject.next({ type: "CHUNK_DOWNLOADING_END", file });
222
300
  }
223
- });
224
- try {
225
- subject.next({ type: "CHUNK_DOWNLOADING_START", file });
226
- await pipeline(stream, progressStream, fileStream, { signal });
227
- } catch (error) {
228
- console.error(error.name === "AbortError" ? signal.reason : error);
229
- } finally {
230
- subject.next({ type: "CHUNK_DOWNLOADING_END", file });
231
- }
232
- }
233
- async function downloadFile(file) {
234
- file.downloaded = await getFileSize(file.filepath);
235
- const response = await fetchByteRange(file.url, file.downloaded);
236
- if (!response?.ok && response?.status !== 416) {
237
- throw new Error(`HTTP error! status: ${response?.status}`);
238
- }
239
- const contentLength = response.headers.get("Content-Length");
240
- if (!contentLength && file.downloaded > 0) {
241
- return;
242
- }
243
- const restFileSize = parseInt(contentLength);
244
- file.size = restFileSize + file.downloaded;
245
- if (file.size > file.downloaded && response.body) {
246
- const stream = Readable.fromWeb(response.body);
247
- const sizeOld = file.downloaded;
248
- await PromiseRetry.create({
249
- retries: CHUNK_FETCH_RETRIES,
250
- callback: () => {
251
- if (sizeOld !== file.downloaded) {
252
- return { newRetries: 5 };
301
+ }
302
+ async downloadFile(file) {
303
+ file.downloaded = await getFileSize(file.filepath);
304
+ const response = await fetchByteRange(file.url, file.downloaded);
305
+ if (!response?.ok && response?.status !== 416) {
306
+ throw new Error(`HTTP error! status: ${response?.status}`);
307
+ }
308
+ const contentLength = response.headers.get("Content-Length");
309
+ if (!contentLength && file.downloaded > 0) return;
310
+ const restFileSize = parseInt(contentLength);
311
+ file.size = restFileSize + file.downloaded;
312
+ if (file.size > file.downloaded && response.body) {
313
+ const stream = Readable.fromWeb(response.body);
314
+ const sizeOld = file.downloaded;
315
+ await PromiseRetry.create({
316
+ retries: this.chunkFetchRetries,
317
+ callback: () => {
318
+ if (sizeOld !== file.downloaded) {
319
+ return { newRetries: 5 };
320
+ }
253
321
  }
254
- }
255
- }).execute(async () => await fetchStream(file, stream));
256
- }
257
- subject.next({ type: "FILE_DOWNLOADING_END" });
258
- }
259
- async function downloadFiles(data, downloadDir) {
260
- mkdir(downloadDir);
261
- subject.next({ type: "FILES_DOWNLOADING_START", filesCount: data.length });
262
- for (const [_, file] of data.entries()) {
263
- file.filepath = path.join(downloadDir, file.name);
264
- subject.next({ type: "FILE_DOWNLOADING_START" });
265
- await PromiseRetry.create({
266
- retries: FETCH_RETRIES,
267
- callback: (retries) => {
268
- if (/coomer|kemono/.test(file.url)) {
269
- file.url = tryFixCoomerUrl(file.url, retries);
322
+ }).execute(async () => await this.fetchStream(file, stream));
323
+ }
324
+ this.subject.next({ type: "FILE_DOWNLOADING_END" });
325
+ }
326
+ async downloadFiles(filelist) {
327
+ mkdir(filelist.dirPath);
328
+ this.subject.next({ type: "FILES_DOWNLOADING_START", filesCount: filelist.files.length });
329
+ for (const file of filelist.files) {
330
+ this.subject.next({ type: "FILE_DOWNLOADING_START" });
331
+ await PromiseRetry.create({
332
+ retries: this.fetchRetries,
333
+ callback: (retries) => {
334
+ if (/coomer|kemono/.test(file.url)) {
335
+ file.url = tryFixCoomerUrl(file.url, retries);
336
+ }
270
337
  }
271
- }
272
- }).execute(async () => await downloadFile(file));
273
- subject.next({ type: "FILE_DOWNLOADING_END" });
338
+ }).execute(async () => await this.downloadFile(file));
339
+ this.subject.next({ type: "FILE_DOWNLOADING_END" });
340
+ }
341
+ this.subject.next({ type: "FILES_DOWNLOADING_END" });
274
342
  }
275
- subject.next({ type: "FILES_DOWNLOADING_END" });
276
- }
277
-
278
- // src/utils/filters.ts
279
- var isImage = (name) => /\.(jpg|jpeg|png|gif|bmp|tiff|webp|avif)$/i.test(name);
280
- var isVideo = (name) => /\.(mp4|m4v|avi|mov|mkv|webm|flv|wmv|mpeg|mpg|3gp)$/i.test(name);
281
- var testMediaType = (name, type) => type === "all" ? true : type === "image" ? isImage(name) : isVideo(name);
282
- function includesAllWords(str, words) {
283
- if (!words.length) return true;
284
- return words.every((w) => str.includes(w));
285
- }
286
- function includesNoWords(str, words) {
287
- if (!words.length) return true;
288
- return words.every((w) => !str.includes(w));
289
- }
290
- function parseQuery(query) {
291
- return query.split(",").map((x) => x.toLowerCase().trim()).filter((_) => _);
292
- }
293
- function filterString(text, include, exclude) {
294
- return includesAllWords(text, parseQuery(include)) && includesNoWords(text, parseQuery(exclude));
295
- }
296
- function filterKeywords(files, include, exclude) {
297
- return files.filter((f) => {
298
- const text = `${f.name || ""} ${f.content || ""}`.toLowerCase();
299
- return filterString(text, include, exclude);
300
- });
301
- }
343
+ };
302
344
 
303
345
  // src/utils/multibar.ts
304
346
  import { MultiBar } from "cli-progress";
@@ -324,13 +366,13 @@ var config = {
324
366
  hideCursor: true,
325
367
  format: "{percentage}% | {filename} | {value}/{total}{size}"
326
368
  };
327
- function createMultibar() {
369
+ function createMultibar(downloader) {
328
370
  const multibar = new MultiBar(config);
329
371
  let bar;
330
372
  let minibar;
331
373
  let filename;
332
374
  let index = 0;
333
- subject.subscribe({
375
+ downloader.subject.subscribe({
334
376
  next: ({ type, filesCount, file }) => {
335
377
  switch (type) {
336
378
  case "FILES_DOWNLOADING_START":
@@ -367,54 +409,68 @@ function createMultibar() {
367
409
  });
368
410
  }
369
411
 
370
- // src/api/bunkr.ts
371
- async function getEncryptionData(slug) {
372
- const response = await fetch2("https://bunkr.cr/api/vs", {
373
- method: "POST",
374
- headers: { "Content-Type": "application/json" },
375
- body: JSON.stringify({ slug })
376
- });
377
- return await response.json();
412
+ // src/api/coomer-api.ts
413
+ var SERVERS = ["n1", "n2", "n3", "n4"];
414
+ function tryFixCoomerUrl(url, attempts) {
415
+ if (attempts < 2 && isImage(url)) {
416
+ return url.replace(/\/data\//, "/thumbnail/data/").replace(/n\d\./, "img.");
417
+ }
418
+ const server = url.match(/n\d\./)?.[0].slice(0, 2);
419
+ const i = SERVERS.indexOf(server);
420
+ if (i !== -1) {
421
+ const newServer = SERVERS[(i + 1) % SERVERS.length];
422
+ return url.replace(/n\d./, `${newServer}.`);
423
+ }
424
+ return url;
378
425
  }
379
- function decryptEncryptedUrl(encryptionData) {
380
- const secretKey = `SECRET_KEY_${Math.floor(encryptionData.timestamp / 3600)}`;
381
- const encryptedUrlBuffer = Buffer.from(encryptionData.url, "base64");
382
- const secretKeyBuffer = Buffer.from(secretKey, "utf-8");
383
- return Array.from(encryptedUrlBuffer).map((byte, i) => String.fromCharCode(byte ^ secretKeyBuffer[i % secretKeyBuffer.length])).join("");
426
+ async function getUserProfileData(user) {
427
+ const url = `${user.domain}/api/v1/${user.service}/user/${user.id}/profile`;
428
+ const result = await fetchWithGlobalHeader(url).then((r) => r.json());
429
+ return result;
384
430
  }
385
- async function getFileData(url, name) {
386
- const slug = url.split("/").pop();
387
- const encryptionData = await getEncryptionData(slug);
388
- const src = decryptEncryptedUrl(encryptionData);
389
- return { name, url: src };
431
+ async function getUserPostsAPI(user, offset) {
432
+ const url = `${user.domain}/api/v1/${user.service}/user/${user.id}/posts?o=${offset}`;
433
+ const posts = await fetchWithGlobalHeader(url).then((r) => r.json());
434
+ return posts;
390
435
  }
391
- async function getGalleryFiles(url, mediaType) {
392
- const data = [];
393
- const page = await fetch2(url).then((r) => r.text());
394
- const $ = cheerio.load(page);
395
- const title = $("title").text();
396
- const url_ = new URL(url);
397
- if (url_.pathname.startsWith("/f/")) {
398
- const fileName = $("h1").text();
399
- const singleFile = await getFileData(url, fileName);
400
- data.push(singleFile);
401
- return { title, files: data.filter((f) => testMediaType(f.name, mediaType)) };
436
+ async function getUserFiles(user) {
437
+ const userPosts = [];
438
+ const offset = 50;
439
+ for (let i = 0; i < 1e3; i++) {
440
+ const posts = await getUserPostsAPI(user, i * offset);
441
+ userPosts.push(...posts);
442
+ if (posts.length < 50) break;
402
443
  }
403
- const fileNames = Array.from($("div[title]").map((_, e) => $(e).attr("title")));
404
- const files = Array.from($("a").map((_, e) => $(e).attr("href"))).filter((a) => /\/f\/\w+/.test(a)).map((a, i) => ({
405
- url: `${url_.origin}${a}`,
406
- name: fileNames[i] || url.split("/").pop()
407
- }));
408
- for (const { name, url: url2 } of files) {
409
- const res = await getFileData(url2, name);
410
- data.push(res);
444
+ const filelist = new CoomerFileList();
445
+ for (const p of userPosts) {
446
+ const title = p.title.match(/\w+/g)?.join(" ") || "";
447
+ const content = p.content;
448
+ const date = p.published.replace(/T/, " ");
449
+ const datentitle = `${date} ${title}`.trim();
450
+ const postFiles = [...p.attachments, p.file].filter((f) => f.path).map((f, i) => {
451
+ const ext = f.name.split(".").pop();
452
+ const name = `${datentitle} ${i + 1}.${ext}`;
453
+ const url = `${user.domain}/${f.path}`;
454
+ return CoomerFile.from({ name, url, content });
455
+ });
456
+ filelist.files.push(...postFiles);
411
457
  }
412
- return { title, files: data.filter((f) => testMediaType(f.name, mediaType)) };
458
+ return filelist;
413
459
  }
414
- async function getBunkrData(url, mediaType) {
415
- const { files, title } = await getGalleryFiles(url, mediaType);
416
- const dirName = `${title.split("|")[0].trim()}-bunkr`;
417
- return { dirName, files };
460
+ async function parseUser(url) {
461
+ const [_, domain, service, id] = url.match(
462
+ /(https:\/\/\w+\.\w+)\/(\w+)\/user\/([\w|.|-]+)/
463
+ );
464
+ if (!domain || !service || !id) console.error("Invalid URL", url);
465
+ const { name } = await getUserProfileData({ domain, service, id });
466
+ return { domain, service, id, name };
467
+ }
468
+ async function getCoomerData(url) {
469
+ setGlobalHeaders({ accept: "text/css" });
470
+ const user = await parseUser(url);
471
+ const filelist = await getUserFiles(user);
472
+ filelist.dirName = `${user.name}-${user.service}`;
473
+ return filelist;
418
474
  }
419
475
 
420
476
  // src/api/gofile.ts
@@ -449,22 +505,22 @@ async function getFolderFiles(id, token, websiteToken) {
449
505
  throw new Error(`HTTP error! status: ${response.status}`);
450
506
  }
451
507
  const data = await response.json();
452
- const files = Object.values(data.data.children).map((f) => ({
453
- url: f.link,
454
- name: f.name
455
- }));
456
- return files;
508
+ const files = Object.values(data.data.children).map(
509
+ (f) => CoomerFile.from({
510
+ url: f.link,
511
+ name: f.name
512
+ })
513
+ );
514
+ return new CoomerFileList(files);
457
515
  }
458
- async function getGofileData(url, mediaType) {
516
+ async function getGofileData(url) {
459
517
  const id = url.match(/gofile.io\/d\/(\w+)/)?.[1];
460
- const dirName = `gofile-${id}`;
461
518
  const token = await getToken();
462
519
  const websiteToken = await getWebsiteToken();
463
- const files = (await getFolderFiles(id, token, websiteToken)).filter(
464
- (f) => testMediaType(f.name, mediaType)
465
- );
520
+ const filelist = await getFolderFiles(id, token, websiteToken);
521
+ filelist.dirName = `gofile-${id}`;
466
522
  setGlobalHeaders({ Cookie: `accountToken=${token}` });
467
- return { dirName, files };
523
+ return filelist;
468
524
  }
469
525
 
470
526
  // src/api/nsfw.xxx.ts
@@ -486,9 +542,9 @@ async function getUserPosts(user) {
486
542
  }
487
543
  return posts;
488
544
  }
489
- async function getPostsData(posts, mediaType) {
545
+ async function getPostsData(posts) {
490
546
  console.log("Fetching posts data...");
491
- const data = [];
547
+ const filelist = new CoomerFileList();
492
548
  for (const post of posts) {
493
549
  const page = await fetch4(post).then((r) => r.text());
494
550
  const $ = cheerio2.load(page);
@@ -498,49 +554,46 @@ async function getPostsData(posts, mediaType) {
498
554
  const date = $(".sh-section .sh-section__passed").first().text().replace(/ /g, "-") || "";
499
555
  const ext = src.split(".").pop();
500
556
  const name = `${slug}-${date}.${ext}`;
501
- data.push({ name, url: src });
557
+ filelist.files.push(CoomerFile.from({ name, url: src }));
502
558
  }
503
- return data.filter((f) => testMediaType(f.name, mediaType));
559
+ return filelist;
504
560
  }
505
- async function getRedditData(url, mediaType) {
561
+ async function getRedditData(url) {
506
562
  const user = url.match(/u\/(\w+)/)?.[1];
507
563
  const posts = await getUserPosts(user);
508
- const files = await getPostsData(posts, mediaType);
509
- const dirName = `${user}-reddit`;
510
- return { dirName, files };
564
+ const filelist = await getPostsData(posts);
565
+ filelist.dirName = `${user}-reddit`;
566
+ return filelist;
511
567
  }
512
568
 
513
569
  // src/api/plain-curl.ts
514
570
  async function getPlainFileData(url) {
515
- return {
516
- dirName: "",
517
- files: [
518
- {
519
- name: url.split("/").pop(),
520
- url
521
- }
522
- ]
523
- };
571
+ const name = url.split("/").pop();
572
+ const file = CoomerFile.from({ name, url });
573
+ const filelist = new CoomerFileList([file]);
574
+ filelist.dirName = "";
575
+ return filelist;
524
576
  }
525
577
 
526
578
  // src/api/index.ts
527
- async function apiHandler(url, mediaType) {
528
- if (/^u\/\w+$/.test(url.trim())) {
529
- return getRedditData(url, mediaType);
579
+ async function apiHandler(url_) {
580
+ const url = new URL(url_);
581
+ if (/^u\/\w+$/.test(url.origin)) {
582
+ return getRedditData(url.href);
530
583
  }
531
- if (/coomer|kemono/.test(url)) {
532
- return getCoomerData(url, mediaType);
584
+ if (/coomer|kemono/.test(url.origin)) {
585
+ return getCoomerData(url.href);
533
586
  }
534
- if (/bunkr/.test(url)) {
535
- return getBunkrData(url, mediaType);
587
+ if (/bunkr/.test(url.origin)) {
588
+ return getBunkrData(url.href);
536
589
  }
537
- if (/gofile\.io/.test(url)) {
538
- return getGofileData(url, mediaType);
590
+ if (/gofile\.io/.test(url.origin)) {
591
+ return getGofileData(url.href);
539
592
  }
540
- if (/\.\w+/.test(url.split("/").pop())) {
541
- return getPlainFileData(url);
593
+ if (/\.\w+/.test(url.pathname)) {
594
+ return getPlainFileData(url.href);
542
595
  }
543
- console.error("Wrong URL.");
596
+ throw Error("Invalid URL");
544
597
  }
545
598
 
546
599
  // src/args-handler.ts
@@ -579,20 +632,24 @@ function argumentHander() {
579
632
  // src/index.ts
580
633
  async function run() {
581
634
  const { url, dir, media, include, exclude, skip } = argumentHander();
582
- const { dirName, files } = await apiHandler(url, media);
583
- const downloadDir = dir === "./" ? path2.resolve(dir, dirName) : path2.join(os.homedir(), path2.join(dir, dirName));
584
- const filteredFiles = filterKeywords(files.slice(skip), include, exclude);
635
+ const filelist = await apiHandler(url);
636
+ const found = filelist.files.length;
637
+ filelist.setDirPath(dir);
638
+ filelist.skip(skip);
639
+ filelist.filterByText(include, exclude);
640
+ filelist.filterByMediaType(media);
585
641
  console.table([
586
642
  {
587
- found: files.length,
643
+ found,
588
644
  skip,
589
- filtered: files.length - filteredFiles.length - skip,
590
- folder: downloadDir
645
+ filtered: found - filelist.files.length,
646
+ folder: filelist.dirPath
591
647
  }
592
648
  ]);
593
649
  setGlobalHeaders({ Referer: url });
594
- createMultibar();
595
- await downloadFiles(filteredFiles, downloadDir);
650
+ const downloader = new Downloader();
651
+ createMultibar(downloader);
652
+ await downloader.downloadFiles(filelist);
596
653
  process2.kill(process2.pid, "SIGINT");
597
654
  }
598
655
  run();