coomer-downloader 2.6.5 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,11 @@
1
+ export { downloadFiles } from './downloader';
2
+ export { getFileSize, mkdir } from './files';
3
+ export { filterKeywords, isImage, isVideo, testMediaType } from './filters';
4
+ export { createMultibar } from './multibar';
5
+ export {
6
+ fetchByteRange,
7
+ fetchWithGlobalHeader,
8
+ HeadersDefault,
9
+ setGlobalHeaders,
10
+ } from './requests';
11
+ export { b2mb } from './strings';
@@ -0,0 +1,62 @@
1
+ import { MultiBar, type Options, type SingleBar } from 'cli-progress';
2
+ import { subject } from './downloader';
3
+ import { b2mb, formatNameStdout } from './strings';
4
+
5
+ const config: Options = {
6
+ clearOnComplete: true,
7
+ gracefulExit: true,
8
+ autopadding: true,
9
+ hideCursor: true,
10
+ format: '{percentage}% | {filename} | {value}/{total}{size}',
11
+ };
12
+
13
+ export function createMultibar() {
14
+ const multibar = new MultiBar(config);
15
+ let bar: SingleBar;
16
+ let minibar: SingleBar;
17
+ let filename: string;
18
+ let index = 0;
19
+
20
+ subject.subscribe({
21
+ next: ({ type, filesCount, file }) => {
22
+ switch (type) {
23
+ case 'FILES_DOWNLOADING_START':
24
+ bar?.stop();
25
+ bar = multibar.create(filesCount as number, 0);
26
+ break;
27
+
28
+ case 'FILES_DOWNLOADING_END':
29
+ bar?.stop();
30
+ break;
31
+
32
+ case 'FILE_DOWNLOADING_START':
33
+ bar?.update(++index, { filename: 'Downloaded files', size: '' });
34
+ break;
35
+
36
+ case 'FILE_DOWNLOADING_END':
37
+ multibar.remove(minibar);
38
+ break;
39
+
40
+ case 'CHUNK_DOWNLOADING_START':
41
+ multibar?.remove(minibar);
42
+ filename = formatNameStdout(file?.filepath as string);
43
+ minibar = multibar.create(b2mb(file?.size as number), b2mb(file?.downloaded as number));
44
+ break;
45
+
46
+ case 'CHUNK_DOWNLOADING_UPDATE':
47
+ minibar?.update(b2mb(file?.downloaded as number), {
48
+ filename: filename as string,
49
+ size: 'mb',
50
+ });
51
+ break;
52
+
53
+ case 'CHUNK_DOWNLOADING_END':
54
+ multibar?.remove(minibar);
55
+ break;
56
+
57
+ default:
58
+ break;
59
+ }
60
+ },
61
+ });
62
+ }
@@ -0,0 +1,53 @@
1
+ export async function sleep(time: number) {
2
+ return new Promise((resolve) => setTimeout(resolve, time));
3
+ }
4
+
5
+ type PromiseRetryCallback = (retries: number, error: Error) => void | { newRetries?: number };
6
+
7
+ interface PromiseRetryOptions {
8
+ retries?: number;
9
+ callback?: PromiseRetryCallback;
10
+ delay?: number;
11
+ }
12
+
13
+ export class PromiseRetry {
14
+ private retries: number;
15
+ private delay: number;
16
+ private callback?: PromiseRetryCallback;
17
+
18
+ constructor(options: PromiseRetryOptions) {
19
+ this.retries = options.retries || 3;
20
+ this.delay = options.delay || 1000;
21
+ this.callback = options.callback;
22
+ }
23
+
24
+ async execute(fn: () => Promise<void>) {
25
+ let retries = this.retries;
26
+
27
+ while (true) {
28
+ try {
29
+ return await fn();
30
+ } catch (error) {
31
+ if (retries <= 0) {
32
+ throw error;
33
+ }
34
+
35
+ if (this.callback) {
36
+ const res = this.callback(retries, error as Error);
37
+ if (res) {
38
+ const { newRetries } = res;
39
+ if (newRetries === 0) throw error;
40
+ this.retries = newRetries || retries;
41
+ }
42
+ }
43
+
44
+ await sleep(this.delay);
45
+ retries--;
46
+ }
47
+ }
48
+ }
49
+
50
+ static create(options: PromiseRetryOptions) {
51
+ return new PromiseRetry(options);
52
+ }
53
+ }
@@ -0,0 +1,36 @@
1
+ import { CookieAgent } from 'http-cookie-agent/undici';
2
+ import { CookieJar } from 'tough-cookie';
3
+ import { fetch, interceptors, setGlobalDispatcher } from 'undici';
4
+
5
+ function setCookieJarDispatcher() {
6
+ const jar = new CookieJar();
7
+ const agent = new CookieAgent({ cookies: { jar } })
8
+ .compose(interceptors.retry())
9
+ .compose(interceptors.redirect({ maxRedirections: 3 }));
10
+ setGlobalDispatcher(agent);
11
+ }
12
+
13
+ setCookieJarDispatcher();
14
+
15
+ export const HeadersDefault = new Headers({
16
+ accept: 'application/json, text/css',
17
+ 'User-Agent':
18
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
19
+ });
20
+
21
+ export function setGlobalHeaders(headers: Record<string, string>) {
22
+ Object.keys(headers).forEach((k) => {
23
+ HeadersDefault.set(k, headers[k]);
24
+ });
25
+ }
26
+
27
+ export function fetchWithGlobalHeader(url: string) {
28
+ const requestHeaders = new Headers(HeadersDefault);
29
+ return fetch(url, { headers: requestHeaders });
30
+ }
31
+
32
+ export function fetchByteRange(url: string, downloadedSize: number) {
33
+ const requestHeaders = new Headers(HeadersDefault);
34
+ requestHeaders.set('Range', `bytes=${downloadedSize}-`);
35
+ return fetch(url, { headers: requestHeaders });
36
+ }
@@ -0,0 +1,21 @@
1
+ export function b2mb(bytes: number) {
2
+ return Number.parseFloat((bytes / 1048576).toFixed(2));
3
+ }
4
+
5
+ export function sanitizeString(str: string) {
6
+ return (
7
+ str
8
+ .match(/(\w| |-)/g)
9
+ ?.join('')
10
+ .replace(/ +/g, ' ') || ''
11
+ );
12
+ }
13
+
14
+ export function formatNameStdout(pathname: string) {
15
+ const name = pathname.split('/').pop() || '';
16
+ const consoleWidth = process.stdout.columns;
17
+ const width = Math.max((consoleWidth / 2) | 0, 40);
18
+ if (name.length < width) return name.trim();
19
+ const result = `${name.slice(0, width - 15)} ... ${name.slice(-10)}`.replace(/ +/g, ' ');
20
+ return result;
21
+ }
@@ -0,0 +1,47 @@
1
+ export class Timer {
2
+ private timer: NodeJS.Timeout | undefined = undefined;
3
+
4
+ constructor(
5
+ private timeout = 10_000,
6
+ private timeoutCallback: () => void,
7
+ ) {
8
+ this.timeout = timeout;
9
+ }
10
+
11
+ start() {
12
+ this.timer = setTimeout(() => {
13
+ this.stop();
14
+ this.timeoutCallback();
15
+ }, this.timeout);
16
+ return this;
17
+ }
18
+
19
+ stop() {
20
+ if (this.timer) {
21
+ clearTimeout(this.timer);
22
+ this.timer = undefined;
23
+ }
24
+ return this;
25
+ }
26
+
27
+ reset() {
28
+ this.stop();
29
+ this.start();
30
+ return this;
31
+ }
32
+
33
+ static withSignal(timeout?: number, message?: string) {
34
+ const controller = new AbortController();
35
+
36
+ const callback = () => {
37
+ controller.abort(message);
38
+ };
39
+
40
+ const timer = new Timer(timeout, callback).start();
41
+
42
+ return {
43
+ timer,
44
+ signal: controller.signal,
45
+ };
46
+ }
47
+ }
@@ -0,0 +1 @@
1
+ /// <reference types="vite/client" />
package/tsconfig.json ADDED
@@ -0,0 +1,24 @@
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ESNext",
4
+ "useDefineForClassFields": true,
5
+ "module": "esnext",
6
+ "lib": ["ESNext", "DOM", "DOM.Iterable"],
7
+ "skipLibCheck": true,
8
+
9
+ "moduleResolution": "bundler",
10
+ "isolatedModules": true,
11
+ "moduleDetection": "force",
12
+
13
+ "declaration": true,
14
+ "typeRoots": ["./dist/index.d.ts", "./src/types", "./node_modules/@types"],
15
+ "outDir": "./dist",
16
+ "rootDir": "./src",
17
+
18
+ "strict": true,
19
+ "noUnusedLocals": true,
20
+ "noUnusedParameters": true,
21
+ "noFallthroughCasesInSwitch": true
22
+ },
23
+ "include": ["src", "temp"]
24
+ }
package/index.js DELETED
@@ -1,34 +0,0 @@
1
- #!/usr/bin/env node
2
- import os from 'node:os';
3
- import path from 'node:path';
4
- import process from 'node:process';
5
- import { downloadFiles } from './src/downloader.js';
6
- import { apiHandler } from './src/api/index.js';
7
- import { argumentHander } from './src/args-handler.js';
8
- import { filterKeywords } from './src/utils/index.js';
9
-
10
- async function run() {
11
- const { url, dir, media, include, exclude, skip = 0 } = argumentHander();
12
-
13
- const { dirName, files, headerData = {} } = await apiHandler(url, media);
14
-
15
- console.log(' ', files.length, 'files found');
16
-
17
- const downloadDir =
18
- dir === './'
19
- ? path.resolve(dir, dirName)
20
- : path.join(os.homedir(), path.join(dir, dirName));
21
-
22
- const filteredFiles = filterKeywords(files, include, exclude).slice(skip);
23
-
24
- if (skip) console.log(skip, 'files skipped');
25
-
26
- await downloadFiles(filteredFiles, downloadDir, {
27
- Referer: url,
28
- ...headerData,
29
- });
30
-
31
- process.kill(process.pid, 'SIGINT');
32
- }
33
-
34
- run();
@@ -1,78 +0,0 @@
1
- import { fetch_, isImage, testMediaType } from '../utils/index.js';
2
-
3
- const SERVERS = ['n1', 'n2', 'n3', 'n4'];
4
-
5
- export function tryFixCoomerUrl(url, attempts) {
6
- if (attempts < 2 && isImage(url)) {
7
- return url.replace(/\/data\//, '/thumbnail/data/').replace(/n\d\./, 'img.');
8
- }
9
- const server = url.match(/n\d\./)[0].slice(0, 2);
10
- const i = SERVERS.indexOf(server);
11
- if (i !== -1) {
12
- const newServer = SERVERS[(i + 1) % SERVERS.length];
13
- return url.replace(/n\d./, `${newServer}.`);
14
- }
15
- return url;
16
- }
17
-
18
- const userProfileAPI = ({ domain, service, id }) =>
19
- `${domain}/api/v1/${service}/user/${id}/profile`;
20
-
21
- const userPostsAPI = (user, offset) =>
22
- `${user.domain}/api/v1/${user.service}/user/${user.id}/posts?o=${offset}`;
23
-
24
- export async function getUserFiles(user, typeFilter) {
25
- const userPosts = [];
26
-
27
- const offset = 50;
28
- for (let i = 0; i < 1000; i++) {
29
- const posts = await fetch_(userPostsAPI(user, offset * i)).then((r) =>
30
- r.json(),
31
- );
32
- userPosts.push(...posts);
33
- if (posts.length < 50) break;
34
- }
35
-
36
- const files = [];
37
-
38
- for (const p of userPosts) {
39
- const title = p.title.match(/\w+/g)?.join(' ') || '';
40
- const content = p.content;
41
- const date = p.published.replace(/T/, ' ');
42
- const datentitle = `${date} ${title}`.trim();
43
-
44
- const postFiles = [...p.attachments, p.file]
45
- .filter((f) => f.path)
46
- .filter((f) => testMediaType(f.name, typeFilter))
47
- .map((f, i) => {
48
- const ext = f.name.split('.').pop();
49
- const name = `${datentitle} ${i + 1}.${ext}`;
50
- const src = `${user.domain}/${f.path}`;
51
- return { name, src, content };
52
- });
53
-
54
- files.push(...postFiles);
55
- }
56
-
57
- return files;
58
- }
59
-
60
- async function parseUser(url) {
61
- const [_, domain, service, id] = url.match(
62
- /(https:\/\/\w+\.\w+)\/(\w+)\/user\/([\w|\.|-]+)/,
63
- );
64
- if (!domain || !service || !id) console.error('Invalid URL', url);
65
-
66
- const { name } = await fetch_(userProfileAPI({ domain, service, id })).then(
67
- (r) => r.json(),
68
- );
69
-
70
- return { domain, service, id, name };
71
- }
72
-
73
- export async function getCoomerData(url, mediaType) {
74
- const user = await parseUser(url);
75
- const dirName = `${user.name}-${user.service}`;
76
- const files = await getUserFiles(user, mediaType);
77
- return { dirName, files };
78
- }
package/src/api/index.js DELETED
@@ -1,24 +0,0 @@
1
- import { getBunkrData } from './bunkr.js';
2
- import { getCoomerData } from './coomer-api.js';
3
- import { getGofileData } from './gofile.js';
4
- import { getRedditData } from './nsfw.xxx.js';
5
- import { getPlainFileData } from './plain-curl.js';
6
-
7
- export async function apiHandler(url, mediaType) {
8
- if (/^u\/\w+$/.test(url.trim())) {
9
- return getRedditData(url, mediaType);
10
- }
11
- if (/coomer|kemono/.test(url)) {
12
- return getCoomerData(url, mediaType);
13
- }
14
- if (/bunkr/.test(url)) {
15
- return getBunkrData(url, mediaType);
16
- }
17
- if (/gofile\.io/.test(url)) {
18
- return getGofileData(url, mediaType);
19
- }
20
- if (/\.\w+/.test(url.split('/').pop())) {
21
- return getPlainFileData(url);
22
- }
23
- console.error('Wrong URL.');
24
- }
@@ -1,11 +0,0 @@
1
- export async function getPlainFileData(url) {
2
- return {
3
- dirName: '',
4
- files: [
5
- {
6
- name: url.split('/').pop(),
7
- src: url,
8
- },
9
- ],
10
- };
11
- }
@@ -1,42 +0,0 @@
1
- import yargs from 'yargs';
2
- import { hideBin } from 'yargs/helpers';
3
-
4
- export function argumentHander() {
5
- return yargs(hideBin(process.argv))
6
- .option('url', {
7
- alias: 'u',
8
- type: 'string',
9
- description:
10
- 'Coomer/Kemono/Bunkr/GoFile, u/<reddit-username> or plain file url',
11
- demandOption: true,
12
- })
13
- .option('dir', {
14
- type: 'string',
15
- description: 'Directory to download files to',
16
- default: './',
17
- })
18
- .option('media', {
19
- type: 'string',
20
- choices: ['video', 'image', 'all'],
21
- default: 'all',
22
- description:
23
- "Download media type: 'video', 'image', or 'all', 'all' is default",
24
- })
25
- .option('include', {
26
- type: 'string',
27
- default: '',
28
- description: 'filter files with names which includes keywords',
29
- })
30
- .option('exclude', {
31
- type: 'string',
32
- default: '',
33
- description: 'filter files with names which excludes keywords',
34
- })
35
- .option('skip', {
36
- type: 'string',
37
- default: '',
38
- description: 'skips N files in download queue',
39
- })
40
- .help()
41
- .alias('help', 'h').argv;
42
- }
package/src/downloader.js DELETED
@@ -1,91 +0,0 @@
1
- import fs from 'node:fs';
2
- import path from 'node:path';
3
- import { Transform } from 'node:stream';
4
- import { pipeline } from 'node:stream/promises';
5
- import { b2mb, fetchByteRange, DEFAULT_HEADERS, getFileSize } from './utils/index.js';
6
- import { MultiBar } from 'cli-progress';
7
- import { tryFixCoomerUrl } from './api/coomer-api.js';
8
-
9
- const multibar = new MultiBar({
10
- clearOnComplete: true,
11
- gracefulExit: true,
12
- autopadding: true,
13
- hideCursor: true,
14
- format: '{percentage}% | {filename} | {value}/{total}{size}',
15
- });
16
-
17
- async function downloadFile(url, outputFile, attempts = 7) {
18
- let response;
19
-
20
- try {
21
- let existingFileSize = await getFileSize(outputFile);
22
-
23
- response = await fetchByteRange(url, existingFileSize);
24
-
25
- if (!response.ok && response.status !== 416) {
26
- throw new Error(`HTTP error! status: ${response.status}`);
27
- }
28
-
29
- const contentLength = parseInt(response.headers.get('Content-Length'));
30
-
31
- if (!response.headers.get('Content-Range') && existingFileSize > 0) {
32
- return;
33
- }
34
-
35
- const fileStream = fs.createWriteStream(outputFile, { flags: 'a' });
36
-
37
- const restFileSize = contentLength;
38
- const totalFileSize = restFileSize + existingFileSize;
39
-
40
- if (totalFileSize > existingFileSize) {
41
- const bar = multibar.create(b2mb(totalFileSize), b2mb(existingFileSize));
42
- const filename = outputFile.slice(-40);
43
-
44
- const progressStream = new Transform({
45
- transform(chunk, _encoding, callback) {
46
- this.push(chunk);
47
- existingFileSize += chunk.length;
48
- bar.update(b2mb(existingFileSize), { filename, size: 'mb' });
49
- callback();
50
- },
51
- });
52
-
53
- await pipeline(response.body, progressStream, fileStream);
54
- multibar.remove(bar);
55
- }
56
- } catch (error) {
57
- if (attempts < 1) {
58
- console.error(url);
59
- console.error(error);
60
- } else {
61
- let newUrl = url;
62
- if (/coomer|kemono/.test(response.url)) {
63
- newUrl = tryFixCoomerUrl(response.url, attempts);
64
- }
65
- await downloadFile(newUrl, outputFile, attempts - 1);
66
- }
67
- }
68
- }
69
-
70
- export async function downloadFiles(data, downloadDir, headers) {
71
- if (!fs.existsSync(downloadDir)) {
72
- fs.mkdirSync(downloadDir, { recursive: true });
73
- }
74
-
75
- Object.keys(headers).forEach((k) => DEFAULT_HEADERS.set(k, headers[k]));
76
-
77
- const bar = multibar.create(data.length, 0);
78
-
79
- for (const [index, { name, src }] of data.entries()) {
80
- const filePath = path.join(downloadDir, name);
81
- try {
82
- bar.update(index + 1, { filename: 'Downloaded files', size: '' });
83
- await downloadFile(src.replace(/[\/]+/g, '/'), filePath);
84
- } catch (error) {
85
- console.error(`\nError downloading ${name}:`, error.message);
86
- console.error(src);
87
- }
88
- }
89
-
90
- bar.stop();
91
- }
@@ -1,62 +0,0 @@
1
- import fs from 'node:fs';
2
- import nodeFetch from 'node-fetch';
3
- import makeFetchCookie from 'fetch-cookie';
4
- export { TransformWithTimeout } from './streams.js';
5
-
6
- export const fetch = makeFetchCookie(nodeFetch);
7
-
8
- export const DEFAULT_HEADERS = new Headers({
9
- 'accept': 'application/json',
10
- 'accept': 'text/css',
11
- 'User-Agent':
12
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
13
- });
14
-
15
- export function fetch_(url) {
16
- const requestHeaders = new Headers(DEFAULT_HEADERS);
17
- const headers = Object.fromEntries(requestHeaders.entries());
18
- return fetch(url, { headers });
19
- }
20
-
21
- export function fetchByteRange(url, existingFileSize) {
22
- const requestHeaders = new Headers(DEFAULT_HEADERS);
23
-
24
- requestHeaders.set('Range', `bytes=${existingFileSize}-`);
25
-
26
- const headers = Object.fromEntries(requestHeaders.entries());
27
-
28
- return fetch(url, { headers });
29
- }
30
-
31
- export const isImage = (name) =>
32
- /\.(jpg|jpeg|png|gif|bmp|tiff|webp|avif)$/i.test(name);
33
-
34
- export const isVideo = (name) =>
35
- /\.(mp4|m4v|avi|mov|mkv|webm|flv|wmv|mpeg|mpg|3gp)$/i.test(name);
36
-
37
- export const testMediaType = (name, type) =>
38
- type === 'all' ? true : type === 'image' ? isImage(name) : isVideo(name);
39
-
40
- export const b2mb = (b) => Number.parseFloat((b / 1048576).toFixed(2));
41
-
42
- export function filterKeywords(files, include, exclude) {
43
- const incl = include.split(',').map((x) => x.toLowerCase().trim());
44
- const excl = exclude.split(',').map((x) => x.toLowerCase().trim());
45
-
46
- const isValid = text => incl.some(e => text.includes(e)) &&
47
- (!exclude.trim().length || excl.every((e) => !text.includes(e)));
48
-
49
- return files
50
- .filter(f => {
51
- const text = `${f.name || ""} ${f.content || ""}`.toLowerCase();
52
- return isValid(text);
53
- });
54
- }
55
-
56
- export async function getFileSize(file) {
57
- let size = 0;
58
- if (fs.existsSync(file)) {
59
- size = (await fs.promises.stat(file)).size || 0;
60
- }
61
- return size;
62
- }
@@ -1,40 +0,0 @@
1
- import { Transform } from 'node:stream';
2
- import { setInterval, clearInterval } from 'node:timers';
3
-
4
- export class TransformWithTimeout extends Transform {
5
- lastChunkReceived;
6
- timeoutMilliseconds;
7
- timeoutInterval;
8
-
9
- constructor(timeoutSeconds, options) {
10
- super(options);
11
- if (timeoutSeconds === undefined) {
12
- throw new Error(
13
- 'new Timeout(timeoutSeconds): timeoutSeconds is a required parameter.',
14
- );
15
- }
16
- this.timeoutMilliseconds = timeoutSeconds * 1000;
17
- this.timeoutInterval = setInterval(
18
- () => this._check(),
19
- this.timeoutMilliseconds,
20
- );
21
- }
22
-
23
- _check() {
24
- const millisecondsSinceLast = new Date().getTime() - this.lastChunkReceived;
25
- if (millisecondsSinceLast > this.timeoutMilliseconds) {
26
- this.emit('error', new Error(`Timed out: ${millisecondsSinceLast}ms`));
27
- }
28
- }
29
-
30
- _transform(chunk, encoding, callback) {
31
- this.lastChunkReceived = new Date().getTime();
32
- this.push(chunk);
33
- callback();
34
- }
35
-
36
- _flush(callback) {
37
- clearInterval(this.timeoutInterval);
38
- callback();
39
- }
40
- }