curlie 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +202 -0
- package/bin/index.js +70 -0
- package/bin/localfile.txt +1 -0
- package/eslint.config.js +7 -0
- package/lib/dnsHandler.js +141 -0
- package/lib/fileHandler.js +77 -0
- package/lib/flags.js +141 -0
- package/lib/ftpHandler.js +123 -0
- package/lib/httpHandler.js +316 -0
- package/lib/monitor.js +61 -0
- package/lib/utils.js +588 -0
- package/package.json +30 -0
package/lib/utils.js
ADDED
|
@@ -0,0 +1,588 @@
|
|
|
1
|
+
import fs, { existsSync } from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { pipeline } from "stream";
|
|
4
|
+
import { Transform, Readable } from "stream";
|
|
5
|
+
import { createSpeedMonitorStream } from "./monitor.js";
|
|
6
|
+
|
|
7
|
+
export function expandLocalVars(url) {
|
|
8
|
+
if (!url) return url;
|
|
9
|
+
return url
|
|
10
|
+
.replace("$(pwd)", process.cwd())
|
|
11
|
+
.replace("${pwd}", process.cwd())
|
|
12
|
+
.replace("$PWD", process.cwd());
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export function isFileURL(url) {
|
|
16
|
+
return url.startsWith("file://");
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function isFtp(url) {
|
|
20
|
+
return url.startsWith("ftp://");
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export function applyAuth(headers = {}, flags) {
|
|
24
|
+
if (flags.user) {
|
|
25
|
+
const [u, p] = flags.user.split(":");
|
|
26
|
+
headers["Authorization"] =
|
|
27
|
+
"Basic " + Buffer.from(`${u}:${p}`).toString("base64");
|
|
28
|
+
}
|
|
29
|
+
if (flags.bearer) headers["Authorization"] = `Bearer ${flags.bearer}`;
|
|
30
|
+
if (flags.apikey) headers["Authorization"] = `Api-Key ${flags.apikey}`;
|
|
31
|
+
return headers;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function writeOutput(data, flags) {
|
|
35
|
+
if (!flags.output) {
|
|
36
|
+
if (Buffer.isBuffer(data)) {
|
|
37
|
+
process.stderr.write("Binary data detected. Use -o or --output-dir.\n");
|
|
38
|
+
process.exit(1);
|
|
39
|
+
} else if (flags.rateLimit) {
|
|
40
|
+
const buf = Buffer.isBuffer(data) ? data : Buffer.from(data, "utf8");
|
|
41
|
+
|
|
42
|
+
const readableStream = Readable.from(buf);
|
|
43
|
+
|
|
44
|
+
const rate = Number(flags.rateLimit);
|
|
45
|
+
|
|
46
|
+
const rateLimitStream = createRateLimitStream(rate);
|
|
47
|
+
|
|
48
|
+
pipeline(readableStream, rateLimitStream, process.stdout, (err) => {
|
|
49
|
+
if (err && err.code !== "EPIPE") {
|
|
50
|
+
console.error("Pipeline failed:", err.message);
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
process.stdout.write(data);
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (flags.noClobber && fs.existsSync(flags.output)) return;
|
|
62
|
+
|
|
63
|
+
const dir = path.dirname(flags.output);
|
|
64
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
65
|
+
|
|
66
|
+
const mode = flags["create-file-mode"]
|
|
67
|
+
? parseInt(flags["create-file-mode"], 8)
|
|
68
|
+
: 0o644;
|
|
69
|
+
|
|
70
|
+
fs.writeFileSync(flags.output, data, { mode });
|
|
71
|
+
fs.chmodSync(flags.output, mode);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export function isBinaryResponse(res) {
|
|
75
|
+
const ct = res.headers["content-type"] || "";
|
|
76
|
+
return (
|
|
77
|
+
ct.includes("application/octet-stream") ||
|
|
78
|
+
ct.includes("image/") ||
|
|
79
|
+
ct.includes("application/pdf")
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
export function getStreamDestination(res, flags, parsed, isBinary) {
|
|
84
|
+
if (flags.outputDir && flags.remoteName) {
|
|
85
|
+
const filename = path.basename(parsed.pathname) || "index.html";
|
|
86
|
+
const fullPath = path.join(flags.outputDir, filename);
|
|
87
|
+
|
|
88
|
+
fs.mkdirSync(flags.outputDir, { recursive: true });
|
|
89
|
+
return fs.createWriteStream(fullPath);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// -O -R set remote time stamp to the file
|
|
93
|
+
var filename;
|
|
94
|
+
if (flags.remoteName && !flags.preserveTimeStamp) {
|
|
95
|
+
filename = path.basename(parsed.pathname);
|
|
96
|
+
const writeStream = fs.createWriteStream(filename);
|
|
97
|
+
|
|
98
|
+
try {
|
|
99
|
+
streamResponse(res, writeStream);
|
|
100
|
+
} catch (err) {
|
|
101
|
+
console.error("Failed to stream response to file:", err);
|
|
102
|
+
process.exit(1);
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return writeStream;
|
|
106
|
+
} else if (flags.remoteName && flags.preserveTimeStamp) {
|
|
107
|
+
filename = path.basename(parsed.pathname);
|
|
108
|
+
const timeStamp = res.headers["last-modified"] || res.headers["date"];
|
|
109
|
+
|
|
110
|
+
const writeStream = fs.createWriteStream(filename);
|
|
111
|
+
|
|
112
|
+
try {
|
|
113
|
+
streamResponse(res, writeStream);
|
|
114
|
+
} catch (err) {
|
|
115
|
+
console.error("Failed to stream response to file:", err);
|
|
116
|
+
process.exit(1);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (timeStamp) {
|
|
120
|
+
const date = new Date(timeStamp);
|
|
121
|
+
fs.utimes(filename, date, date, (err) => {
|
|
122
|
+
if (err)
|
|
123
|
+
console.error(
|
|
124
|
+
`Failed to update the timestamp for ${timeStamp}:`,
|
|
125
|
+
err,
|
|
126
|
+
);
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
return writeStream;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (flags.output && flags.output !== "-") {
|
|
133
|
+
if (flags.noClobber && fs.existsSync(flags.output)) {
|
|
134
|
+
res.resume();
|
|
135
|
+
return null;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const dir = path.dirname(flags.output);
|
|
139
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
140
|
+
return fs.createWriteStream(flags.output);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (flags.maxFilesize) {
|
|
144
|
+
let downloaded = 0;
|
|
145
|
+
res.on("data", (chunk) => {
|
|
146
|
+
downloaded += chunk.length;
|
|
147
|
+
|
|
148
|
+
if (flags.maxFilesize && downloaded > flags.maxFilesize) {
|
|
149
|
+
res.removeAllListeners("data");
|
|
150
|
+
res.destroy(new Error("Maximum file size exceeded"));
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
});
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (isBinary && !flags.output) {
|
|
157
|
+
console.error(
|
|
158
|
+
"binary output can mess up your terminal use -o <FILE> to output to a file",
|
|
159
|
+
);
|
|
160
|
+
return;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
return process.stdout;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
export function streamResponse(res, destination, flags = {}) {
|
|
167
|
+
if (!destination) return;
|
|
168
|
+
|
|
169
|
+
const streams = [res];
|
|
170
|
+
|
|
171
|
+
if (flags.rateLimit) {
|
|
172
|
+
streams.push(createRateLimitStream(flags.rateLimit));
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
if (flags.speedLimit && flags.speedTime) {
|
|
176
|
+
streams.push(createSpeedMonitorStream(flags.speedLimit, flags.speedTime));
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
streams.push(destination);
|
|
180
|
+
|
|
181
|
+
pipeline(...streams, (err) => {
|
|
182
|
+
if (err) console.error("Pipeline failed:", err.message);
|
|
183
|
+
});
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
export function bufferResponse(res, flags, resolve) {
|
|
187
|
+
let stream = res;
|
|
188
|
+
if (flags.speedLimit && flags.speedTime) {
|
|
189
|
+
stream = res.pipe(
|
|
190
|
+
createSpeedMonitorStream(flags.speedLimit, flags.speedTime),
|
|
191
|
+
);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
let data = "";
|
|
195
|
+
stream.setEncoding("utf8");
|
|
196
|
+
|
|
197
|
+
stream.on("data", (chunk) => {
|
|
198
|
+
data += chunk;
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
stream.on("end", () => {
|
|
202
|
+
writeOutput(data, flags);
|
|
203
|
+
if (resolve) resolve();
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
stream.on("error", (err) => {
|
|
207
|
+
console.error("Transfer aborted:", err.message);
|
|
208
|
+
process.exit(28); // Exit code 28: Operation timeout
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
export function showHelp(module) {
|
|
213
|
+
const synopsis = `Usage: nodeCurl [options] <url>
|
|
214
|
+
nodeCurl ftp://<host>/<path>
|
|
215
|
+
nodeCurl file://<path>`;
|
|
216
|
+
|
|
217
|
+
const examples = `Examples:
|
|
218
|
+
nodeCurl -k https://example.com
|
|
219
|
+
nodeCurl -o file.html https://example.com/file
|
|
220
|
+
nodeCurl -L https://example.com/redirect
|
|
221
|
+
nodeCurl -d "name=value" https://example.com/post
|
|
222
|
+
nodeCurl -X POST -d '{"json":"data"}' https://api.example.com
|
|
223
|
+
nodeCurl -c cookies.txt -b cookies.txt https://example.com
|
|
224
|
+
nodeCurl --resolve example.com:443:127.0.0.1 https://example.com`;
|
|
225
|
+
|
|
226
|
+
if (!module) {
|
|
227
|
+
console.log(synopsis);
|
|
228
|
+
console.log(`
|
|
229
|
+
Options:
|
|
230
|
+
-k, --insecure Allow insecure server connections (SSL)
|
|
231
|
+
-L, --location Follow HTTP redirects
|
|
232
|
+
-O, --remote-name Write output using remote filename
|
|
233
|
+
-o, --output <file> Write to file
|
|
234
|
+
-T, --upload <file> Upload file
|
|
235
|
+
-d, --data <data> HTTP POST data
|
|
236
|
+
-G, --get Send GET request (default)
|
|
237
|
+
-X, --request <cmd> HTTP request method
|
|
238
|
+
-H, --header <header> Add request header
|
|
239
|
+
-A, --user-agent <ua> Set User-Agent
|
|
240
|
+
-e, --referer <URL> Set Referer
|
|
241
|
+
-b, --cookie <data> Send cookies (string or file)
|
|
242
|
+
-c, --cookie-jar <file> Save cookies to file
|
|
243
|
+
-i, --include Include response headers
|
|
244
|
+
-I, --head Fetch headers only
|
|
245
|
+
-v, --verbose Verbose mode
|
|
246
|
+
-u, --user <user:pass> HTTP authentication
|
|
247
|
+
-4, --ipv4 Resolve to IPv4 only
|
|
248
|
+
-6, --ipv6 Resolve to IPv6 only
|
|
249
|
+
--limit-rate <speed> Limit download speed (e.g. 1M, 100K)
|
|
250
|
+
--max-time <sec> Maximum time for transfer
|
|
251
|
+
--connect-timeout <sec> Connection timeout
|
|
252
|
+
--retry <num> Retry on transient errors
|
|
253
|
+
--resolve <host:port:ip> Custom DNS resolve
|
|
254
|
+
--dns-servers <addrs> Custom DNS servers
|
|
255
|
+
--doh-url <url> DNS-over-HTTPS
|
|
256
|
+
--max-redirs <num> Max redirects to follow
|
|
257
|
+
-Z, --parallel <urls> Parallel downloads
|
|
258
|
+
--parallel-max <num> Max parallel connections
|
|
259
|
+
|
|
260
|
+
FTP Options:
|
|
261
|
+
-l, --list-only List directory
|
|
262
|
+
-a, --append Append to file
|
|
263
|
+
-Q, --quote <cmd> Send FTP command
|
|
264
|
+
|
|
265
|
+
${examples}
|
|
266
|
+
`);
|
|
267
|
+
return;
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
switch (module) {
|
|
271
|
+
case "ftp":
|
|
272
|
+
console.log(`Usage: nodeCurl ftp [options] ftp://<host>/<path>
|
|
273
|
+
|
|
274
|
+
Options:
|
|
275
|
+
-u, --user <user:password> Authentication
|
|
276
|
+
-l, --list-only List directory contents
|
|
277
|
+
-I, --head Get file metadata only
|
|
278
|
+
-o, --output <file> Download to local file
|
|
279
|
+
-T, --upload <file> Upload local file
|
|
280
|
+
-a, --append Append to remote file
|
|
281
|
+
-X, --request <command> Raw FTP command
|
|
282
|
+
-Q, --quote <command> Command before transfer
|
|
283
|
+
--ssl Use TLS/FTPS
|
|
284
|
+
-v, --verbose Show protocol details
|
|
285
|
+
|
|
286
|
+
Examples:
|
|
287
|
+
nodeCurl -l ftp://ftp.example.com/
|
|
288
|
+
nodeCurl -o file.txt ftp://ftp.example.com/file.txt
|
|
289
|
+
nodeCurl -T local.txt ftp://ftp.example.com/upload/`);
|
|
290
|
+
break;
|
|
291
|
+
|
|
292
|
+
case "http":
|
|
293
|
+
console.log(`Usage: nodeCurl [options] <url>
|
|
294
|
+
|
|
295
|
+
Options:
|
|
296
|
+
-X, --request <method> HTTP method (GET, POST, PUT, DELETE, etc)
|
|
297
|
+
-d, --data <data> POST data
|
|
298
|
+
--json <json> JSON data (sets Content-Type: application/json)
|
|
299
|
+
-H, --header <header> Add custom header (Header: Value)
|
|
300
|
+
-A, --user-agent <name> User-Agent string
|
|
301
|
+
-e, --referer <URL> Referer URL
|
|
302
|
+
-b, --cookie <data> Cookie string or @filename
|
|
303
|
+
-c, --cookie-jar <file> Save cookies to file
|
|
304
|
+
-i, --include Include response headers
|
|
305
|
+
-v, --verbose Show request/response details
|
|
306
|
+
-k, --insecure Allow insecure SSL
|
|
307
|
+
-L, --location Follow redirects
|
|
308
|
+
--max-redirs <num> Max redirects
|
|
309
|
+
--retry <num> Retry on error
|
|
310
|
+
--limit-rate <speed> Limit speed
|
|
311
|
+
|
|
312
|
+
Examples:
|
|
313
|
+
nodeCurl https://example.com
|
|
314
|
+
nodeCurl -X POST -d "name=test" https://api.example.com
|
|
315
|
+
nodeCurl -H "Authorization: Bearer token" https://api.example.com
|
|
316
|
+
nodeCurl -L https://example.com/redirect
|
|
317
|
+
nodeCurl -c cookies.txt -b cookies.txt https://example.com`);
|
|
318
|
+
break;
|
|
319
|
+
|
|
320
|
+
case "file":
|
|
321
|
+
console.log(`Usage: nodeCurl file://<path>
|
|
322
|
+
|
|
323
|
+
Options:
|
|
324
|
+
-H, --head Show file info (size, modified date)
|
|
325
|
+
-T, --upload Write to local file
|
|
326
|
+
|
|
327
|
+
Examples:
|
|
328
|
+
nodeCurl file:///home/user/test.txt
|
|
329
|
+
nodeCurl -H file:///home/user/test.txt
|
|
330
|
+
nodeCurl -T source.txt file:///dest.txt`);
|
|
331
|
+
break;
|
|
332
|
+
|
|
333
|
+
case "output":
|
|
334
|
+
console.log(`Output Options:
|
|
335
|
+
-o, --output <file> Write to specific file
|
|
336
|
+
-O, --remote-name Write using remote filename
|
|
337
|
+
--output-dir <dir> Output directory
|
|
338
|
+
-i, --include Include response headers
|
|
339
|
+
-I, --head Headers only
|
|
340
|
+
-R, --remote-time Preserve remote timestamp
|
|
341
|
+
--no-clobber Don't overwrite files
|
|
342
|
+
-N, --no-buffer No buffering
|
|
343
|
+
--create-dirs Create directories
|
|
344
|
+
--create-file-mode <mode> File permissions
|
|
345
|
+
|
|
346
|
+
Examples:
|
|
347
|
+
nodeCurl -o page.html https://example.com
|
|
348
|
+
nodeCurl -O https://example.com/file.txt
|
|
349
|
+
nodeCurl --output-dir ./downloads https://example.com/`);
|
|
350
|
+
break;
|
|
351
|
+
|
|
352
|
+
case "connection":
|
|
353
|
+
console.log(`Connection Options:
|
|
354
|
+
--limit-rate <speed> Speed limit (e.g. 1M, 100K, 1024)
|
|
355
|
+
--max-time <seconds> Total timeout
|
|
356
|
+
--connect-timeout <seconds> Connection timeout
|
|
357
|
+
-Y, --speed-limit <speed> Abort if too slow
|
|
358
|
+
-y, --speed-time <seconds> Slow period before abort
|
|
359
|
+
--max-filesize <bytes> Max response size
|
|
360
|
+
-Z, --parallel <urls> Parallel downloads
|
|
361
|
+
--parallel-max <num> Max parallel
|
|
362
|
+
--retry <num> Retry count
|
|
363
|
+
--retry-delay <seconds> Delay between retries
|
|
364
|
+
|
|
365
|
+
Examples:
|
|
366
|
+
nodeCurl --limit-rate 1M https://example.com/file
|
|
367
|
+
nodeCurl --max-time 60 https://example.com
|
|
368
|
+
nodeCurl -Z url1 url2 url3
|
|
369
|
+
nodeCurl --retry 3 https://unreliable.example.com`);
|
|
370
|
+
break;
|
|
371
|
+
|
|
372
|
+
case "dns":
|
|
373
|
+
console.log(`DNS Options:
|
|
374
|
+
--resolve <host:port:addr> Custom address for host:port
|
|
375
|
+
--dns-servers <addrs> DNS servers (comma-separated)
|
|
376
|
+
--doh-url <url> DNS-over-HTTPS URL
|
|
377
|
+
-4, --ipv4 IPv4 only
|
|
378
|
+
-6, --ipv6 IPv6 only
|
|
379
|
+
|
|
380
|
+
Examples:
|
|
381
|
+
nodeCurl --resolve example.com:443:127.0.0.1 https://example.com
|
|
382
|
+
nodeCurl --dns-servers 1.1.1.1,8.8.8.8 https://example.com
|
|
383
|
+
nodeCurl --doh-url https://dns.google/resolve https://example.com
|
|
384
|
+
nodeCurl --ipv4 https://example.com`);
|
|
385
|
+
break;
|
|
386
|
+
|
|
387
|
+
default:
|
|
388
|
+
console.log(synopsis);
|
|
389
|
+
console.log(`\nTry 'nodeCurl -h' for more options`);
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
process.exit(0);
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
export const testWritable = async (client, remotePath) => {
|
|
396
|
+
const tmpFile = remotePath + ".tmp_check";
|
|
397
|
+
try {
|
|
398
|
+
await client.uploadFrom(Buffer.from("test"), tmpFile);
|
|
399
|
+
await client.remove(tmpFile);
|
|
400
|
+
return true;
|
|
401
|
+
} catch {
|
|
402
|
+
return false;
|
|
403
|
+
}
|
|
404
|
+
};
|
|
405
|
+
|
|
406
|
+
export function createRateLimitStream(bytesPerSecond) {
|
|
407
|
+
if (typeof bytesPerSecond !== "number" || bytesPerSecond <= 0) {
|
|
408
|
+
// Return a passthrough transform stream if rate limiting is invalid
|
|
409
|
+
return new Transform({
|
|
410
|
+
transform(chunk, encoding, callback) {
|
|
411
|
+
callback(null, chunk);
|
|
412
|
+
},
|
|
413
|
+
});
|
|
414
|
+
// console.log("this is a not a number console");
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
// console.log("this is a number console");
|
|
418
|
+
const interval = 1000;
|
|
419
|
+
let lastTime = Date.now();
|
|
420
|
+
let bytesSent = 0;
|
|
421
|
+
|
|
422
|
+
return new Transform({
|
|
423
|
+
transform(chunk, encoding, callback) {
|
|
424
|
+
const now = Date.now();
|
|
425
|
+
const elapsed = now - lastTime;
|
|
426
|
+
|
|
427
|
+
if (elapsed >= interval) {
|
|
428
|
+
bytesSent = 0;
|
|
429
|
+
lastTime = now;
|
|
430
|
+
this.push(chunk);
|
|
431
|
+
callback();
|
|
432
|
+
return;
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
const remainingTime = interval - elapsed;
|
|
436
|
+
const allowedBytes = Math.floor(
|
|
437
|
+
(bytesPerSecond * remainingTime) / interval,
|
|
438
|
+
);
|
|
439
|
+
|
|
440
|
+
if (bytesSent + chunk.length <= allowedBytes) {
|
|
441
|
+
bytesSent += chunk.length;
|
|
442
|
+
this.push(chunk);
|
|
443
|
+
callback();
|
|
444
|
+
} else {
|
|
445
|
+
const bytesToWait = bytesSent + chunk.length - allowedBytes;
|
|
446
|
+
const waitTime = (bytesToWait / bytesPerSecond) * 1000;
|
|
447
|
+
|
|
448
|
+
setTimeout(() => {
|
|
449
|
+
bytesSent = chunk.length;
|
|
450
|
+
lastTime = Date.now();
|
|
451
|
+
this.push(chunk);
|
|
452
|
+
callback();
|
|
453
|
+
}, waitTime);
|
|
454
|
+
}
|
|
455
|
+
},
|
|
456
|
+
});
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
function wait(ms) {
|
|
460
|
+
// console.log(Date.now(), "waiting", ms, "ms");
|
|
461
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
462
|
+
}
|
|
463
|
+
|
|
464
|
+
export async function mayWait(flags) {
|
|
465
|
+
if (!flags.parallelImmediate) {
|
|
466
|
+
await wait(100);
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
export function loadCookieJar(flags) {
|
|
471
|
+
const cookies = [];
|
|
472
|
+
|
|
473
|
+
if (flags.cookie) {
|
|
474
|
+
if (flags.cookie.includes("=")) {
|
|
475
|
+
const [name, ...valueParts] = flags.cookie.split("=");
|
|
476
|
+
cookies.push({
|
|
477
|
+
name: name.trim(),
|
|
478
|
+
value: valueParts.join("=").trim(),
|
|
479
|
+
domain: "",
|
|
480
|
+
path: "/",
|
|
481
|
+
expireTime: null,
|
|
482
|
+
});
|
|
483
|
+
} else if (fs.existsSync(flags.cookie)) {
|
|
484
|
+
const content = fs.readFileSync(flags.cookie, "utf8");
|
|
485
|
+
const lines = content.split("\n");
|
|
486
|
+
for (const line of lines) {
|
|
487
|
+
const trimmed = line.trim();
|
|
488
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
489
|
+
const parts = trimmed.split("\t");
|
|
490
|
+
if (parts.length >= 7) {
|
|
491
|
+
cookies.push({
|
|
492
|
+
domain: parts[0],
|
|
493
|
+
flag: parts[1] === "TRUE",
|
|
494
|
+
path: parts[2],
|
|
495
|
+
secure: parts[3] === "TRUE",
|
|
496
|
+
expireTime: parseInt(parts[4]) * 1000,
|
|
497
|
+
name: parts[5],
|
|
498
|
+
value: parts[6],
|
|
499
|
+
});
|
|
500
|
+
}
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
return cookies;
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
export function saveCookieJar(jarPath, setCookies, hostname) {
|
|
509
|
+
const existing = new Map();
|
|
510
|
+
|
|
511
|
+
if (fs.existsSync(jarPath)) {
|
|
512
|
+
const content = fs.readFileSync(jarPath, "utf8");
|
|
513
|
+
const lines = content.split("\n");
|
|
514
|
+
for (const line of lines) {
|
|
515
|
+
const trimmed = line.trim();
|
|
516
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
517
|
+
const parts = trimmed.split("\t");
|
|
518
|
+
if (parts.length >= 7) {
|
|
519
|
+
const key = `${parts[0]}|${parts[5]}`;
|
|
520
|
+
existing.set(key, {
|
|
521
|
+
domain: parts[0],
|
|
522
|
+
flag: parts[1] === "TRUE",
|
|
523
|
+
path: parts[2],
|
|
524
|
+
secure: parts[3] === "TRUE",
|
|
525
|
+
expireTime: parseInt(parts[4]) * 1000,
|
|
526
|
+
name: parts[5],
|
|
527
|
+
value: parts[6],
|
|
528
|
+
});
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
const now = Date.now();
|
|
534
|
+
const oneYear = now + 365 * 24 * 60 * 60 * 1000;
|
|
535
|
+
|
|
536
|
+
for (const cookieStr of setCookies) {
|
|
537
|
+
const parts = cookieStr.split(";");
|
|
538
|
+
const [nameValue, ...attrs] = parts;
|
|
539
|
+
const [name, ...valueParts] = nameValue.split("=");
|
|
540
|
+
const value = valueParts.join("=");
|
|
541
|
+
|
|
542
|
+
let domain = hostname;
|
|
543
|
+
let path = "/";
|
|
544
|
+
let secure = false;
|
|
545
|
+
let expireTime = null;
|
|
546
|
+
|
|
547
|
+
for (const attr of attrs) {
|
|
548
|
+
const [key, val] = attr.trim().split("=");
|
|
549
|
+
const lowerKey = key.toLowerCase();
|
|
550
|
+
if (lowerKey === "domain") {
|
|
551
|
+
domain = val || hostname;
|
|
552
|
+
} else if (lowerKey === "path") {
|
|
553
|
+
path = val || "/";
|
|
554
|
+
} else if (lowerKey === "secure") {
|
|
555
|
+
secure = true;
|
|
556
|
+
} else if (lowerKey === "expires") {
|
|
557
|
+
expireTime = new Date(val).getTime() / 1000;
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
if (!expireTime) {
|
|
562
|
+
expireTime = Math.floor(oneYear / 1000);
|
|
563
|
+
}
|
|
564
|
+
|
|
565
|
+
const key = `${domain}|${name}`;
|
|
566
|
+
existing.set(key, {
|
|
567
|
+
domain,
|
|
568
|
+
flag: domain.startsWith("."),
|
|
569
|
+
path,
|
|
570
|
+
secure,
|
|
571
|
+
expireTime: expireTime * 1000,
|
|
572
|
+
name,
|
|
573
|
+
value,
|
|
574
|
+
});
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
let output = "# Netscape HTTP Cookie File\n";
|
|
578
|
+
output += "# This file was generated by nodeCurl\n\n";
|
|
579
|
+
|
|
580
|
+
for (const cookie of existing.values()) {
|
|
581
|
+
const flag = cookie.flag ? "TRUE" : "FALSE";
|
|
582
|
+
const secure = cookie.secure ? "TRUE" : "FALSE";
|
|
583
|
+
const expire = Math.floor((cookie.expireTime || oneYear) / 1000);
|
|
584
|
+
output += `${cookie.domain}\t${flag}\t${cookie.path}\t${secure}\t${expire}\t${cookie.name}\t${cookie.value}\n`;
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
fs.writeFileSync(jarPath, output);
|
|
588
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "curlie",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "A curl-like CLI tool for HTTP, HTTPS, FTP, and file operations",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "bin/index.js",
|
|
7
|
+
"bin": {
|
|
8
|
+
"curlie": "./bin/index.js"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"test": "echo \"No tests\" && exit 0"
|
|
12
|
+
},
|
|
13
|
+
"keywords": ["curl", "http", "https", "ftp", "cli", "network", "download", "request"],
|
|
14
|
+
"author": "Muhammad Ahmad",
|
|
15
|
+
"license": "MIT",
|
|
16
|
+
"dependencies": {
|
|
17
|
+
"basic-ftp": "^5.0.5"
|
|
18
|
+
},
|
|
19
|
+
"engines": {
|
|
20
|
+
"node": ">=14.0.0"
|
|
21
|
+
},
|
|
22
|
+
"repository": {
|
|
23
|
+
"type": "git",
|
|
24
|
+
"url": "git+https://github.com/muhammad-ahmad-ma/nodeCurl.git"
|
|
25
|
+
},
|
|
26
|
+
"bugs": {
|
|
27
|
+
"url": "https://github.com/muhammad-ahmad-ma/nodeCurl/issues"
|
|
28
|
+
},
|
|
29
|
+
"homepage": "https://github.com/muhammad-ahmad-ma/nodeCurl#readme"
|
|
30
|
+
}
|