coomer-downloader 2.6.5 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -0
- package/biome.json +18 -9
- package/build.js +15 -0
- package/dist/index.js +598 -0
- package/package.json +21 -10
- package/src/api/{bunkr.js → bunkr.ts} +19 -19
- package/src/api/coomer-api.ts +93 -0
- package/src/api/{gofile.js → gofile.ts} +19 -14
- package/src/api/index.ts +28 -0
- package/src/api/{nsfw.xxx.js → nsfw.xxx.ts} +14 -13
- package/src/api/plain-curl.ts +13 -0
- package/src/args-handler.ts +55 -0
- package/src/index.ts +38 -0
- package/src/types/index.ts +23 -0
- package/src/utils/downloader.ts +102 -0
- package/src/utils/files.ts +15 -0
- package/src/utils/filters.ts +37 -0
- package/src/utils/index.ts +11 -0
- package/src/utils/multibar.ts +62 -0
- package/src/utils/promise.ts +53 -0
- package/src/utils/requests.ts +36 -0
- package/src/utils/strings.ts +21 -0
- package/src/utils/timer.ts +47 -0
- package/src/vite-env.d.ts +1 -0
- package/tsconfig.json +24 -0
- package/index.js +0 -34
- package/src/api/coomer-api.js +0 -78
- package/src/api/index.js +0 -24
- package/src/api/plain-curl.js +0 -11
- package/src/args-handler.js +0 -42
- package/src/downloader.js +0 -91
- package/src/utils/index.js +0 -62
- package/src/utils/streams.js +0 -40
package/README.md
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
## Coomer / Kemono / Bunkr / GoFile / Reddit-NSFW Gallery Downloader
|
|
2
2
|
|
|
3
|
+
[](https://www.npmjs.com/package/coomer-downloader)
|
|
4
|
+
|
|
5
|
+
|
|
3
6
|
### Features
|
|
4
7
|
* script keeps track of downloaded files and resume downloading if it's crashed.
|
|
5
8
|
|
package/biome.json
CHANGED
|
@@ -1,14 +1,20 @@
|
|
|
1
1
|
{
|
|
2
|
-
"$schema": "https://biomejs.dev/schemas/
|
|
3
|
-
"
|
|
2
|
+
"$schema": "https://biomejs.dev/schemas/2.2.6/schema.json",
|
|
3
|
+
"assist": {
|
|
4
|
+
"actions": {
|
|
5
|
+
"source": {
|
|
6
|
+
"organizeImports": "on",
|
|
7
|
+
"useSortedKeys": "off"
|
|
8
|
+
}
|
|
9
|
+
},
|
|
4
10
|
"enabled": true
|
|
5
11
|
},
|
|
6
12
|
"formatter": {
|
|
7
13
|
"enabled": true,
|
|
8
|
-
"indentWidth": 2,
|
|
9
14
|
"indentStyle": "space",
|
|
10
|
-
"
|
|
11
|
-
"lineEnding": "lf"
|
|
15
|
+
"indentWidth": 2,
|
|
16
|
+
"lineEnding": "lf",
|
|
17
|
+
"lineWidth": 100
|
|
12
18
|
},
|
|
13
19
|
"javascript": {
|
|
14
20
|
"formatter": {
|
|
@@ -19,14 +25,17 @@
|
|
|
19
25
|
"linter": {
|
|
20
26
|
"enabled": true,
|
|
21
27
|
"rules": {
|
|
28
|
+
"complexity": {
|
|
29
|
+
"noForEach": "off",
|
|
30
|
+
"noStaticOnlyClass": "off"
|
|
31
|
+
},
|
|
32
|
+
"correctness": {
|
|
33
|
+
"useParseIntRadix": "off"
|
|
34
|
+
},
|
|
22
35
|
"recommended": true,
|
|
23
36
|
"style": {
|
|
24
37
|
"useNumberNamespace": "off"
|
|
25
38
|
},
|
|
26
|
-
"complexity": {
|
|
27
|
-
"noStaticOnlyClass": "off",
|
|
28
|
-
"noForEach": "off"
|
|
29
|
-
},
|
|
30
39
|
"suspicious": {
|
|
31
40
|
"noRedundantUseStrict": "off"
|
|
32
41
|
}
|
package/build.js
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import esbuild from 'esbuild';
|
|
2
|
+
|
|
3
|
+
esbuild.build({
|
|
4
|
+
entryPoints: ['src/index.ts'],
|
|
5
|
+
bundle: true,
|
|
6
|
+
platform: 'node',
|
|
7
|
+
format: 'esm',
|
|
8
|
+
packages: 'external',
|
|
9
|
+
outfile: 'dist/index.js',
|
|
10
|
+
// minify: true,
|
|
11
|
+
target: ['esnext']
|
|
12
|
+
}).catch(() => process.exit(1))
|
|
13
|
+
|
|
14
|
+
// "build": "esbuild src/index.ts --bundle --platform=node --format=esm --packages=external --outfile=dist/index.js"
|
|
15
|
+
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,598 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import os from "node:os";
|
|
5
|
+
import path2 from "node:path";
|
|
6
|
+
import process2 from "node:process";
|
|
7
|
+
|
|
8
|
+
// src/api/bunkr.ts
|
|
9
|
+
import * as cheerio from "cheerio";
|
|
10
|
+
import { fetch as fetch2 } from "undici";
|
|
11
|
+
|
|
12
|
+
// src/utils/downloader.ts
|
|
13
|
+
import fs2 from "node:fs";
|
|
14
|
+
import path from "node:path";
|
|
15
|
+
import { Readable, Transform } from "node:stream";
|
|
16
|
+
import { pipeline } from "node:stream/promises";
|
|
17
|
+
import { Subject } from "rxjs";
|
|
18
|
+
|
|
19
|
+
// src/api/coomer-api.ts
|
|
20
|
+
var SERVERS = ["n1", "n2", "n3", "n4"];
|
|
21
|
+
function tryFixCoomerUrl(url, attempts) {
|
|
22
|
+
if (attempts < 2 && isImage(url)) {
|
|
23
|
+
return url.replace(/\/data\//, "/thumbnail/data/").replace(/n\d\./, "img.");
|
|
24
|
+
}
|
|
25
|
+
const server = url.match(/n\d\./)?.[0].slice(0, 2);
|
|
26
|
+
const i = SERVERS.indexOf(server);
|
|
27
|
+
if (i !== -1) {
|
|
28
|
+
const newServer = SERVERS[(i + 1) % SERVERS.length];
|
|
29
|
+
return url.replace(/n\d./, `${newServer}.`);
|
|
30
|
+
}
|
|
31
|
+
return url;
|
|
32
|
+
}
|
|
33
|
+
async function getUserProfileAPI(user) {
|
|
34
|
+
const url = `${user.domain}/api/v1/${user.service}/user/${user.id}/profile`;
|
|
35
|
+
const result = await fetchWithGlobalHeader(url).then((r) => r.json());
|
|
36
|
+
return result;
|
|
37
|
+
}
|
|
38
|
+
async function getUserPostsAPI(user, offset) {
|
|
39
|
+
const url = `${user.domain}/api/v1/${user.service}/user/${user.id}/posts?o=${offset}`;
|
|
40
|
+
const posts = await fetchWithGlobalHeader(url).then((r) => r.json());
|
|
41
|
+
return posts;
|
|
42
|
+
}
|
|
43
|
+
async function getUserFiles(user, mediaType) {
|
|
44
|
+
const userPosts = [];
|
|
45
|
+
const offset = 50;
|
|
46
|
+
for (let i = 0; i < 1e3; i++) {
|
|
47
|
+
const posts = await getUserPostsAPI(user, i * offset);
|
|
48
|
+
userPosts.push(...posts);
|
|
49
|
+
if (posts.length < 50) break;
|
|
50
|
+
}
|
|
51
|
+
const files = [];
|
|
52
|
+
for (const p of userPosts) {
|
|
53
|
+
const title = p.title.match(/\w+/g)?.join(" ") || "";
|
|
54
|
+
const content = p.content;
|
|
55
|
+
const date = p.published.replace(/T/, " ");
|
|
56
|
+
const datentitle = `${date} ${title}`.trim();
|
|
57
|
+
const postFiles = [...p.attachments, p.file].filter((f) => f.path).filter((f) => testMediaType(f.name, mediaType)).map((f, i) => {
|
|
58
|
+
const ext = f.name.split(".").pop();
|
|
59
|
+
const name = `${datentitle} ${i + 1}.${ext}`;
|
|
60
|
+
const url = `${user.domain}/${f.path}`;
|
|
61
|
+
return { name, url, content };
|
|
62
|
+
});
|
|
63
|
+
files.push(...postFiles);
|
|
64
|
+
}
|
|
65
|
+
return files;
|
|
66
|
+
}
|
|
67
|
+
async function parseUser(url) {
|
|
68
|
+
const [_, domain, service, id] = url.match(
|
|
69
|
+
/(https:\/\/\w+\.\w+)\/(\w+)\/user\/([\w|.|-]+)/
|
|
70
|
+
);
|
|
71
|
+
if (!domain || !service || !id) console.error("Invalid URL", url);
|
|
72
|
+
const { name } = await getUserProfileAPI({ domain, service, id });
|
|
73
|
+
return { domain, service, id, name };
|
|
74
|
+
}
|
|
75
|
+
async function getCoomerData(url, mediaType) {
|
|
76
|
+
setGlobalHeaders({ accept: "text/css" });
|
|
77
|
+
const user = await parseUser(url);
|
|
78
|
+
const dirName = `${user.name}-${user.service}`;
|
|
79
|
+
const files = await getUserFiles(user, mediaType);
|
|
80
|
+
return { dirName, files };
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// src/utils/files.ts
|
|
84
|
+
import fs from "node:fs";
|
|
85
|
+
async function getFileSize(filepath) {
|
|
86
|
+
let size = 0;
|
|
87
|
+
if (fs.existsSync(filepath)) {
|
|
88
|
+
size = (await fs.promises.stat(filepath)).size || 0;
|
|
89
|
+
}
|
|
90
|
+
return size;
|
|
91
|
+
}
|
|
92
|
+
function mkdir(filepath) {
|
|
93
|
+
if (!fs.existsSync(filepath)) {
|
|
94
|
+
fs.mkdirSync(filepath, { recursive: true });
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// src/utils/promise.ts
|
|
99
|
+
async function sleep(time) {
|
|
100
|
+
return new Promise((resolve) => setTimeout(resolve, time));
|
|
101
|
+
}
|
|
102
|
+
var PromiseRetry = class _PromiseRetry {
|
|
103
|
+
retries;
|
|
104
|
+
delay;
|
|
105
|
+
callback;
|
|
106
|
+
constructor(options) {
|
|
107
|
+
this.retries = options.retries || 3;
|
|
108
|
+
this.delay = options.delay || 1e3;
|
|
109
|
+
this.callback = options.callback;
|
|
110
|
+
}
|
|
111
|
+
async execute(fn) {
|
|
112
|
+
let retries = this.retries;
|
|
113
|
+
while (true) {
|
|
114
|
+
try {
|
|
115
|
+
return await fn();
|
|
116
|
+
} catch (error) {
|
|
117
|
+
if (retries <= 0) {
|
|
118
|
+
throw error;
|
|
119
|
+
}
|
|
120
|
+
if (this.callback) {
|
|
121
|
+
const res = this.callback(retries, error);
|
|
122
|
+
if (res) {
|
|
123
|
+
const { newRetries } = res;
|
|
124
|
+
if (newRetries === 0) throw error;
|
|
125
|
+
this.retries = newRetries || retries;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
await sleep(this.delay);
|
|
129
|
+
retries--;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
static create(options) {
|
|
134
|
+
return new _PromiseRetry(options);
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
|
|
138
|
+
// src/utils/requests.ts
|
|
139
|
+
import { CookieAgent } from "http-cookie-agent/undici";
|
|
140
|
+
import { CookieJar } from "tough-cookie";
|
|
141
|
+
import { fetch, interceptors, setGlobalDispatcher } from "undici";
|
|
142
|
+
function setCookieJarDispatcher() {
|
|
143
|
+
const jar = new CookieJar();
|
|
144
|
+
const agent = new CookieAgent({ cookies: { jar } }).compose(interceptors.retry()).compose(interceptors.redirect({ maxRedirections: 3 }));
|
|
145
|
+
setGlobalDispatcher(agent);
|
|
146
|
+
}
|
|
147
|
+
setCookieJarDispatcher();
|
|
148
|
+
var HeadersDefault = new Headers({
|
|
149
|
+
accept: "application/json, text/css",
|
|
150
|
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
|
|
151
|
+
});
|
|
152
|
+
function setGlobalHeaders(headers) {
|
|
153
|
+
Object.keys(headers).forEach((k) => {
|
|
154
|
+
HeadersDefault.set(k, headers[k]);
|
|
155
|
+
});
|
|
156
|
+
}
|
|
157
|
+
function fetchWithGlobalHeader(url) {
|
|
158
|
+
const requestHeaders = new Headers(HeadersDefault);
|
|
159
|
+
return fetch(url, { headers: requestHeaders });
|
|
160
|
+
}
|
|
161
|
+
function fetchByteRange(url, downloadedSize) {
|
|
162
|
+
const requestHeaders = new Headers(HeadersDefault);
|
|
163
|
+
requestHeaders.set("Range", `bytes=${downloadedSize}-`);
|
|
164
|
+
return fetch(url, { headers: requestHeaders });
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// src/utils/timer.ts
|
|
168
|
+
var Timer = class _Timer {
|
|
169
|
+
constructor(timeout = 1e4, timeoutCallback) {
|
|
170
|
+
this.timeout = timeout;
|
|
171
|
+
this.timeoutCallback = timeoutCallback;
|
|
172
|
+
this.timeout = timeout;
|
|
173
|
+
}
|
|
174
|
+
timer = void 0;
|
|
175
|
+
start() {
|
|
176
|
+
this.timer = setTimeout(() => {
|
|
177
|
+
this.stop();
|
|
178
|
+
this.timeoutCallback();
|
|
179
|
+
}, this.timeout);
|
|
180
|
+
return this;
|
|
181
|
+
}
|
|
182
|
+
stop() {
|
|
183
|
+
if (this.timer) {
|
|
184
|
+
clearTimeout(this.timer);
|
|
185
|
+
this.timer = void 0;
|
|
186
|
+
}
|
|
187
|
+
return this;
|
|
188
|
+
}
|
|
189
|
+
reset() {
|
|
190
|
+
this.stop();
|
|
191
|
+
this.start();
|
|
192
|
+
return this;
|
|
193
|
+
}
|
|
194
|
+
static withSignal(timeout, message) {
|
|
195
|
+
const controller = new AbortController();
|
|
196
|
+
const callback = () => {
|
|
197
|
+
controller.abort(message);
|
|
198
|
+
};
|
|
199
|
+
const timer = new _Timer(timeout, callback).start();
|
|
200
|
+
return {
|
|
201
|
+
timer,
|
|
202
|
+
signal: controller.signal
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
};
|
|
206
|
+
|
|
207
|
+
// src/utils/downloader.ts
|
|
208
|
+
var subject = new Subject();
|
|
209
|
+
var CHUNK_TIMEOUT = 3e4;
|
|
210
|
+
var CHUNK_FETCH_RETRIES = 5;
|
|
211
|
+
var FETCH_RETRIES = 7;
|
|
212
|
+
async function fetchStream(file, stream) {
|
|
213
|
+
const { timer, signal } = Timer.withSignal(CHUNK_TIMEOUT, "CHUNK_TIMEOUT");
|
|
214
|
+
const fileStream = fs2.createWriteStream(file.filepath, { flags: "a" });
|
|
215
|
+
const progressStream = new Transform({
|
|
216
|
+
transform(chunk, _encoding, callback) {
|
|
217
|
+
this.push(chunk);
|
|
218
|
+
file.downloaded += chunk.length;
|
|
219
|
+
timer.reset();
|
|
220
|
+
subject.next({ type: "CHUNK_DOWNLOADING_UPDATE", file });
|
|
221
|
+
callback();
|
|
222
|
+
}
|
|
223
|
+
});
|
|
224
|
+
try {
|
|
225
|
+
subject.next({ type: "CHUNK_DOWNLOADING_START", file });
|
|
226
|
+
await pipeline(stream, progressStream, fileStream, { signal });
|
|
227
|
+
} catch (error) {
|
|
228
|
+
console.error(error.name === "AbortError" ? signal.reason : error);
|
|
229
|
+
} finally {
|
|
230
|
+
subject.next({ type: "CHUNK_DOWNLOADING_END", file });
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
async function downloadFile(file) {
|
|
234
|
+
file.downloaded = await getFileSize(file.filepath);
|
|
235
|
+
const response = await fetchByteRange(file.url, file.downloaded);
|
|
236
|
+
if (!response?.ok && response?.status !== 416) {
|
|
237
|
+
throw new Error(`HTTP error! status: ${response?.status}`);
|
|
238
|
+
}
|
|
239
|
+
const contentLength = response.headers.get("Content-Length");
|
|
240
|
+
if (!contentLength && file.downloaded > 0) {
|
|
241
|
+
return;
|
|
242
|
+
}
|
|
243
|
+
const restFileSize = parseInt(contentLength);
|
|
244
|
+
file.size = restFileSize + file.downloaded;
|
|
245
|
+
if (file.size > file.downloaded && response.body) {
|
|
246
|
+
const stream = Readable.fromWeb(response.body);
|
|
247
|
+
const sizeOld = file.downloaded;
|
|
248
|
+
await PromiseRetry.create({
|
|
249
|
+
retries: CHUNK_FETCH_RETRIES,
|
|
250
|
+
callback: () => {
|
|
251
|
+
if (sizeOld !== file.downloaded) {
|
|
252
|
+
return { newRetries: 5 };
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}).execute(async () => await fetchStream(file, stream));
|
|
256
|
+
}
|
|
257
|
+
subject.next({ type: "FILE_DOWNLOADING_END" });
|
|
258
|
+
}
|
|
259
|
+
async function downloadFiles(data, downloadDir) {
|
|
260
|
+
mkdir(downloadDir);
|
|
261
|
+
subject.next({ type: "FILES_DOWNLOADING_START", filesCount: data.length });
|
|
262
|
+
for (const [_, file] of data.entries()) {
|
|
263
|
+
file.filepath = path.join(downloadDir, file.name);
|
|
264
|
+
subject.next({ type: "FILE_DOWNLOADING_START" });
|
|
265
|
+
await PromiseRetry.create({
|
|
266
|
+
retries: FETCH_RETRIES,
|
|
267
|
+
callback: (retries) => {
|
|
268
|
+
if (/coomer|kemono/.test(file.url)) {
|
|
269
|
+
file.url = tryFixCoomerUrl(file.url, retries);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
}).execute(async () => await downloadFile(file));
|
|
273
|
+
subject.next({ type: "FILE_DOWNLOADING_END" });
|
|
274
|
+
}
|
|
275
|
+
subject.next({ type: "FILES_DOWNLOADING_END" });
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
// src/utils/filters.ts
|
|
279
|
+
var isImage = (name) => /\.(jpg|jpeg|png|gif|bmp|tiff|webp|avif)$/i.test(name);
|
|
280
|
+
var isVideo = (name) => /\.(mp4|m4v|avi|mov|mkv|webm|flv|wmv|mpeg|mpg|3gp)$/i.test(name);
|
|
281
|
+
var testMediaType = (name, type) => type === "all" ? true : type === "image" ? isImage(name) : isVideo(name);
|
|
282
|
+
function includesAllWords(str, words) {
|
|
283
|
+
if (!words.length) return true;
|
|
284
|
+
return words.every((w) => str.includes(w));
|
|
285
|
+
}
|
|
286
|
+
function includesNoWords(str, words) {
|
|
287
|
+
if (!words.length) return true;
|
|
288
|
+
return words.every((w) => !str.includes(w));
|
|
289
|
+
}
|
|
290
|
+
function parseQuery(query) {
|
|
291
|
+
return query.split(",").map((x) => x.toLowerCase().trim()).filter((_) => _);
|
|
292
|
+
}
|
|
293
|
+
function filterString(text, include, exclude) {
|
|
294
|
+
return includesAllWords(text, parseQuery(include)) && includesNoWords(text, parseQuery(exclude));
|
|
295
|
+
}
|
|
296
|
+
function filterKeywords(files, include, exclude) {
|
|
297
|
+
return files.filter((f) => {
|
|
298
|
+
const text = `${f.name || ""} ${f.content || ""}`.toLowerCase();
|
|
299
|
+
return filterString(text, include, exclude);
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// src/utils/multibar.ts
|
|
304
|
+
import { MultiBar } from "cli-progress";
|
|
305
|
+
|
|
306
|
+
// src/utils/strings.ts
|
|
307
|
+
function b2mb(bytes) {
|
|
308
|
+
return Number.parseFloat((bytes / 1048576).toFixed(2));
|
|
309
|
+
}
|
|
310
|
+
function formatNameStdout(pathname) {
|
|
311
|
+
const name = pathname.split("/").pop() || "";
|
|
312
|
+
const consoleWidth = process.stdout.columns;
|
|
313
|
+
const width = Math.max(consoleWidth / 2 | 0, 40);
|
|
314
|
+
if (name.length < width) return name.trim();
|
|
315
|
+
const result = `${name.slice(0, width - 15)} ... ${name.slice(-10)}`.replace(/ +/g, " ");
|
|
316
|
+
return result;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// src/utils/multibar.ts
|
|
320
|
+
var config = {
|
|
321
|
+
clearOnComplete: true,
|
|
322
|
+
gracefulExit: true,
|
|
323
|
+
autopadding: true,
|
|
324
|
+
hideCursor: true,
|
|
325
|
+
format: "{percentage}% | {filename} | {value}/{total}{size}"
|
|
326
|
+
};
|
|
327
|
+
function createMultibar() {
|
|
328
|
+
const multibar = new MultiBar(config);
|
|
329
|
+
let bar;
|
|
330
|
+
let minibar;
|
|
331
|
+
let filename;
|
|
332
|
+
let index = 0;
|
|
333
|
+
subject.subscribe({
|
|
334
|
+
next: ({ type, filesCount, file }) => {
|
|
335
|
+
switch (type) {
|
|
336
|
+
case "FILES_DOWNLOADING_START":
|
|
337
|
+
bar?.stop();
|
|
338
|
+
bar = multibar.create(filesCount, 0);
|
|
339
|
+
break;
|
|
340
|
+
case "FILES_DOWNLOADING_END":
|
|
341
|
+
bar?.stop();
|
|
342
|
+
break;
|
|
343
|
+
case "FILE_DOWNLOADING_START":
|
|
344
|
+
bar?.update(++index, { filename: "Downloaded files", size: "" });
|
|
345
|
+
break;
|
|
346
|
+
case "FILE_DOWNLOADING_END":
|
|
347
|
+
multibar.remove(minibar);
|
|
348
|
+
break;
|
|
349
|
+
case "CHUNK_DOWNLOADING_START":
|
|
350
|
+
multibar?.remove(minibar);
|
|
351
|
+
filename = formatNameStdout(file?.filepath);
|
|
352
|
+
minibar = multibar.create(b2mb(file?.size), b2mb(file?.downloaded));
|
|
353
|
+
break;
|
|
354
|
+
case "CHUNK_DOWNLOADING_UPDATE":
|
|
355
|
+
minibar?.update(b2mb(file?.downloaded), {
|
|
356
|
+
filename,
|
|
357
|
+
size: "mb"
|
|
358
|
+
});
|
|
359
|
+
break;
|
|
360
|
+
case "CHUNK_DOWNLOADING_END":
|
|
361
|
+
multibar?.remove(minibar);
|
|
362
|
+
break;
|
|
363
|
+
default:
|
|
364
|
+
break;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
});
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// src/api/bunkr.ts
|
|
371
|
+
async function getEncryptionData(slug) {
|
|
372
|
+
const response = await fetch2("https://bunkr.cr/api/vs", {
|
|
373
|
+
method: "POST",
|
|
374
|
+
headers: { "Content-Type": "application/json" },
|
|
375
|
+
body: JSON.stringify({ slug })
|
|
376
|
+
});
|
|
377
|
+
return await response.json();
|
|
378
|
+
}
|
|
379
|
+
function decryptEncryptedUrl(encryptionData) {
|
|
380
|
+
const secretKey = `SECRET_KEY_${Math.floor(encryptionData.timestamp / 3600)}`;
|
|
381
|
+
const encryptedUrlBuffer = Buffer.from(encryptionData.url, "base64");
|
|
382
|
+
const secretKeyBuffer = Buffer.from(secretKey, "utf-8");
|
|
383
|
+
return Array.from(encryptedUrlBuffer).map((byte, i) => String.fromCharCode(byte ^ secretKeyBuffer[i % secretKeyBuffer.length])).join("");
|
|
384
|
+
}
|
|
385
|
+
async function getFileData(url, name) {
|
|
386
|
+
const slug = url.split("/").pop();
|
|
387
|
+
const encryptionData = await getEncryptionData(slug);
|
|
388
|
+
const src = decryptEncryptedUrl(encryptionData);
|
|
389
|
+
return { name, url: src };
|
|
390
|
+
}
|
|
391
|
+
async function getGalleryFiles(url, mediaType) {
|
|
392
|
+
const data = [];
|
|
393
|
+
const page = await fetch2(url).then((r) => r.text());
|
|
394
|
+
const $ = cheerio.load(page);
|
|
395
|
+
const title = $("title").text();
|
|
396
|
+
const url_ = new URL(url);
|
|
397
|
+
if (url_.pathname.startsWith("/f/")) {
|
|
398
|
+
const fileName = $("h1").text();
|
|
399
|
+
const singleFile = await getFileData(url, fileName);
|
|
400
|
+
data.push(singleFile);
|
|
401
|
+
return { title, files: data.filter((f) => testMediaType(f.name, mediaType)) };
|
|
402
|
+
}
|
|
403
|
+
const fileNames = Array.from($("div[title]").map((_, e) => $(e).attr("title")));
|
|
404
|
+
const files = Array.from($("a").map((_, e) => $(e).attr("href"))).filter((a) => /\/f\/\w+/.test(a)).map((a, i) => ({
|
|
405
|
+
url: `${url_.origin}${a}`,
|
|
406
|
+
name: fileNames[i] || url.split("/").pop()
|
|
407
|
+
}));
|
|
408
|
+
for (const { name, url: url2 } of files) {
|
|
409
|
+
const res = await getFileData(url2, name);
|
|
410
|
+
data.push(res);
|
|
411
|
+
}
|
|
412
|
+
return { title, files: data.filter((f) => testMediaType(f.name, mediaType)) };
|
|
413
|
+
}
|
|
414
|
+
async function getBunkrData(url, mediaType) {
|
|
415
|
+
const { files, title } = await getGalleryFiles(url, mediaType);
|
|
416
|
+
const dirName = `${title.split("|")[0].trim()}-bunkr`;
|
|
417
|
+
return { dirName, files };
|
|
418
|
+
}
|
|
419
|
+
|
|
420
|
+
// src/api/gofile.ts
|
|
421
|
+
import { fetch as fetch3 } from "undici";
|
|
422
|
+
async function getToken() {
|
|
423
|
+
const response = await fetch3("https://api.gofile.io/accounts", {
|
|
424
|
+
method: "POST"
|
|
425
|
+
});
|
|
426
|
+
const data = await response.json();
|
|
427
|
+
if (data.status === "ok") {
|
|
428
|
+
return data.data.token;
|
|
429
|
+
}
|
|
430
|
+
throw new Error("cannot get token");
|
|
431
|
+
}
|
|
432
|
+
async function getWebsiteToken() {
|
|
433
|
+
const response = await fetch3("https://gofile.io/dist/js/global.js");
|
|
434
|
+
const alljs = await response.text();
|
|
435
|
+
const match = alljs.match(/appdata\.wt = "([^"]+)"/);
|
|
436
|
+
if (match?.[1]) {
|
|
437
|
+
return match[1];
|
|
438
|
+
}
|
|
439
|
+
throw new Error("cannot get wt");
|
|
440
|
+
}
|
|
441
|
+
async function getFolderFiles(id, token, websiteToken) {
|
|
442
|
+
const url = `https://api.gofile.io/contents/${id}?wt=${websiteToken}&cache=true}`;
|
|
443
|
+
const response = await fetch3(url, {
|
|
444
|
+
headers: {
|
|
445
|
+
Authorization: `Bearer ${token}`
|
|
446
|
+
}
|
|
447
|
+
});
|
|
448
|
+
if (!response.ok) {
|
|
449
|
+
throw new Error(`HTTP error! status: ${response.status}`);
|
|
450
|
+
}
|
|
451
|
+
const data = await response.json();
|
|
452
|
+
const files = Object.values(data.data.children).map((f) => ({
|
|
453
|
+
url: f.link,
|
|
454
|
+
name: f.name
|
|
455
|
+
}));
|
|
456
|
+
return files;
|
|
457
|
+
}
|
|
458
|
+
async function getGofileData(url, mediaType) {
|
|
459
|
+
const id = url.match(/gofile.io\/d\/(\w+)/)?.[1];
|
|
460
|
+
const dirName = `gofile-${id}`;
|
|
461
|
+
const token = await getToken();
|
|
462
|
+
const websiteToken = await getWebsiteToken();
|
|
463
|
+
const files = (await getFolderFiles(id, token, websiteToken)).filter(
|
|
464
|
+
(f) => testMediaType(f.name, mediaType)
|
|
465
|
+
);
|
|
466
|
+
setGlobalHeaders({ Cookie: `accountToken=${token}` });
|
|
467
|
+
return { dirName, files };
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
// src/api/nsfw.xxx.ts
|
|
471
|
+
import * as cheerio2 from "cheerio";
|
|
472
|
+
import { fetch as fetch4 } from "undici";
|
|
473
|
+
async function getUserPage(user, offset) {
|
|
474
|
+
const url = `https://nsfw.xxx/page/${offset}?nsfw[]=0&types[]=image&types[]=video&types[]=gallery&slider=1&jsload=1&user=${user}&_=${Date.now()}`;
|
|
475
|
+
return fetch4(url).then((r) => r.text());
|
|
476
|
+
}
|
|
477
|
+
async function getUserPosts(user) {
|
|
478
|
+
console.log("Fetching user posts...");
|
|
479
|
+
const posts = [];
|
|
480
|
+
for (let i = 1; i < 1e5; i++) {
|
|
481
|
+
const page = await getUserPage(user, i);
|
|
482
|
+
if (page.length < 1) break;
|
|
483
|
+
const $ = cheerio2.load(page);
|
|
484
|
+
const newPosts = $("a").map((_, a) => $(a).attr("href")).get().filter((href) => href?.startsWith("https://nsfw.xxx/post"));
|
|
485
|
+
posts.push(...newPosts);
|
|
486
|
+
}
|
|
487
|
+
return posts;
|
|
488
|
+
}
|
|
489
|
+
async function getPostsData(posts, mediaType) {
|
|
490
|
+
console.log("Fetching posts data...");
|
|
491
|
+
const data = [];
|
|
492
|
+
for (const post of posts) {
|
|
493
|
+
const page = await fetch4(post).then((r) => r.text());
|
|
494
|
+
const $ = cheerio2.load(page);
|
|
495
|
+
const src = $(".sh-section .sh-section__image img").attr("src") || $(".sh-section .sh-section__image video source").attr("src") || null;
|
|
496
|
+
if (!src) continue;
|
|
497
|
+
const slug = post.split("post/")[1].split("?")[0];
|
|
498
|
+
const date = $(".sh-section .sh-section__passed").first().text().replace(/ /g, "-") || "";
|
|
499
|
+
const ext = src.split(".").pop();
|
|
500
|
+
const name = `${slug}-${date}.${ext}`;
|
|
501
|
+
data.push({ name, url: src });
|
|
502
|
+
}
|
|
503
|
+
return data.filter((f) => testMediaType(f.name, mediaType));
|
|
504
|
+
}
|
|
505
|
+
async function getRedditData(url, mediaType) {
|
|
506
|
+
const user = url.match(/u\/(\w+)/)?.[1];
|
|
507
|
+
const posts = await getUserPosts(user);
|
|
508
|
+
const files = await getPostsData(posts, mediaType);
|
|
509
|
+
const dirName = `${user}-reddit`;
|
|
510
|
+
return { dirName, files };
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
// src/api/plain-curl.ts
|
|
514
|
+
async function getPlainFileData(url) {
|
|
515
|
+
return {
|
|
516
|
+
dirName: "",
|
|
517
|
+
files: [
|
|
518
|
+
{
|
|
519
|
+
name: url.split("/").pop(),
|
|
520
|
+
url
|
|
521
|
+
}
|
|
522
|
+
]
|
|
523
|
+
};
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// src/api/index.ts
|
|
527
|
+
async function apiHandler(url, mediaType) {
|
|
528
|
+
if (/^u\/\w+$/.test(url.trim())) {
|
|
529
|
+
return getRedditData(url, mediaType);
|
|
530
|
+
}
|
|
531
|
+
if (/coomer|kemono/.test(url)) {
|
|
532
|
+
return getCoomerData(url, mediaType);
|
|
533
|
+
}
|
|
534
|
+
if (/bunkr/.test(url)) {
|
|
535
|
+
return getBunkrData(url, mediaType);
|
|
536
|
+
}
|
|
537
|
+
if (/gofile\.io/.test(url)) {
|
|
538
|
+
return getGofileData(url, mediaType);
|
|
539
|
+
}
|
|
540
|
+
if (/\.\w+/.test(url.split("/").pop())) {
|
|
541
|
+
return getPlainFileData(url);
|
|
542
|
+
}
|
|
543
|
+
console.error("Wrong URL.");
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
// src/args-handler.ts
|
|
547
|
+
import yargs from "yargs";
|
|
548
|
+
import { hideBin } from "yargs/helpers";
|
|
549
|
+
function argumentHander() {
|
|
550
|
+
return yargs(hideBin(process.argv)).option("url", {
|
|
551
|
+
alias: "u",
|
|
552
|
+
type: "string",
|
|
553
|
+
description: "A URL from Coomer/Kemono/Bunkr/GoFile, a Reddit user (u/<username>), or a direct file link",
|
|
554
|
+
demandOption: true
|
|
555
|
+
}).option("dir", {
|
|
556
|
+
type: "string",
|
|
557
|
+
description: "The directory where files will be downloaded",
|
|
558
|
+
default: "./"
|
|
559
|
+
}).option("media", {
|
|
560
|
+
type: "string",
|
|
561
|
+
choices: ["video", "image", "all"],
|
|
562
|
+
default: "all",
|
|
563
|
+
description: "The type of media to download: 'video', 'image', or 'all'. 'all' is the default."
|
|
564
|
+
}).option("include", {
|
|
565
|
+
type: "string",
|
|
566
|
+
default: "",
|
|
567
|
+
description: "Filter file names by a comma-separated list of keywords to include"
|
|
568
|
+
}).option("exclude", {
|
|
569
|
+
type: "string",
|
|
570
|
+
default: "",
|
|
571
|
+
description: "Filter file names by a comma-separated list of keywords to exclude"
|
|
572
|
+
}).option("skip", {
|
|
573
|
+
type: "number",
|
|
574
|
+
default: 0,
|
|
575
|
+
description: "Skips the first N files in the download queue"
|
|
576
|
+
}).help().alias("help", "h").parseSync();
|
|
577
|
+
}
|
|
578
|
+
|
|
579
|
+
// src/index.ts
|
|
580
|
+
async function run() {
|
|
581
|
+
const { url, dir, media, include, exclude, skip } = argumentHander();
|
|
582
|
+
const { dirName, files } = await apiHandler(url, media);
|
|
583
|
+
const downloadDir = dir === "./" ? path2.resolve(dir, dirName) : path2.join(os.homedir(), path2.join(dir, dirName));
|
|
584
|
+
const filteredFiles = filterKeywords(files.slice(skip), include, exclude);
|
|
585
|
+
console.table([
|
|
586
|
+
{
|
|
587
|
+
found: files.length,
|
|
588
|
+
skip,
|
|
589
|
+
filtered: files.length - filteredFiles.length - skip,
|
|
590
|
+
folder: downloadDir
|
|
591
|
+
}
|
|
592
|
+
]);
|
|
593
|
+
setGlobalHeaders({ Referer: url });
|
|
594
|
+
createMultibar();
|
|
595
|
+
await downloadFiles(filteredFiles, downloadDir);
|
|
596
|
+
process2.kill(process2.pid, "SIGINT");
|
|
597
|
+
}
|
|
598
|
+
run();
|