@chahakshah/terabox-api 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +9 -0
- package/api.js +2465 -0
- package/helper.js +363 -0
- package/index.d.ts +434 -0
- package/package.json +38 -0
package/helper.js
ADDED
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import crypto from 'node:crypto';
|
|
4
|
+
import readline from 'node:readline';
|
|
5
|
+
import { Readable } from 'node:stream';
|
|
6
|
+
|
|
7
|
+
import crc32 from 'crc-32';
|
|
8
|
+
import { filesize } from 'filesize';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Utility helper functions for TeraBox API requests
|
|
12
|
+
* @module helper
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Calculate proper chunk size for upload process
|
|
17
|
+
* @param {integer} fileSize - File size in bytes
|
|
18
|
+
* @param {Boolean} is_vip - VIP user flag
|
|
19
|
+
* @returns {integer} Calculated chunk size
|
|
20
|
+
*/
|
|
21
|
+
function getChunkSize(fileSize, is_vip = true) {
|
|
22
|
+
const MiB = 1024 * 1024;
|
|
23
|
+
const GiB = 1024 * MiB;
|
|
24
|
+
|
|
25
|
+
const limitSizes = [4, 8, 16, 32, 64, 128];
|
|
26
|
+
|
|
27
|
+
if(!is_vip){
|
|
28
|
+
return limitSizes.at(0) * MiB;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
for (const limit of limitSizes) {
|
|
32
|
+
if (fileSize <= limit * GiB) {
|
|
33
|
+
return limit * MiB;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
return limitSizes.at(-1) * MiB;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Calculate hashes for specific local file
|
|
42
|
+
* @param {string} filePath - Path to local file
|
|
43
|
+
* @returns {Object} Calculated hashes for specific local file
|
|
44
|
+
*/
|
|
45
|
+
async function hashFile(filePath) {
|
|
46
|
+
const stat = fs.statSync(filePath);
|
|
47
|
+
const sliceSize = 256 * 1024;
|
|
48
|
+
const splitSize = getChunkSize(stat.size);
|
|
49
|
+
const hashedData = newProgressData();
|
|
50
|
+
|
|
51
|
+
let crcHash = 0;
|
|
52
|
+
const fileHash = crypto.createHash('md5');
|
|
53
|
+
const sliceHash = crypto.createHash('md5');
|
|
54
|
+
let chunkHash = crypto.createHash('md5');
|
|
55
|
+
|
|
56
|
+
const hashData = {
|
|
57
|
+
crc32: 0,
|
|
58
|
+
slice: '',
|
|
59
|
+
file: '',
|
|
60
|
+
etag: '',
|
|
61
|
+
chunks: []
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
let bytesRead = 0;
|
|
65
|
+
let allBytesRead = 0;
|
|
66
|
+
|
|
67
|
+
const stream = fs.createReadStream(filePath);
|
|
68
|
+
|
|
69
|
+
try {
|
|
70
|
+
for await (const data of stream) {
|
|
71
|
+
fileHash.update(data);
|
|
72
|
+
|
|
73
|
+
crcHash = crc32.buf(data, crcHash);
|
|
74
|
+
|
|
75
|
+
let offset = 0;
|
|
76
|
+
while (offset < data.length) {
|
|
77
|
+
const remaining = data.length - offset;
|
|
78
|
+
|
|
79
|
+
const sliceRemaining = sliceSize - allBytesRead;
|
|
80
|
+
const chunkRemaining = splitSize - bytesRead;
|
|
81
|
+
|
|
82
|
+
const sliceAllowed = allBytesRead < sliceSize;
|
|
83
|
+
const readLimit = sliceAllowed
|
|
84
|
+
? Math.min(remaining, chunkRemaining, sliceRemaining)
|
|
85
|
+
: Math.min(remaining, chunkRemaining);
|
|
86
|
+
|
|
87
|
+
const chunk = data.subarray(offset, offset + readLimit);
|
|
88
|
+
chunkHash.update(chunk);
|
|
89
|
+
|
|
90
|
+
if (sliceAllowed) {
|
|
91
|
+
sliceHash.update(chunk);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
offset += readLimit;
|
|
95
|
+
allBytesRead += readLimit;
|
|
96
|
+
bytesRead += readLimit;
|
|
97
|
+
|
|
98
|
+
if (bytesRead >= splitSize) {
|
|
99
|
+
hashData.chunks.push(chunkHash.digest('hex'));
|
|
100
|
+
chunkHash = crypto.createHash('md5');
|
|
101
|
+
bytesRead = 0;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
hashedData.all = hashedData.parts[0] = allBytesRead;
|
|
106
|
+
printProgressLog('Hashing', hashedData, stat.size);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (bytesRead > 0) {
|
|
110
|
+
hashData.chunks.push(chunkHash.digest('hex'));
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
hashData.crc32 = crcHash >>> 0;
|
|
114
|
+
hashData.slice = sliceHash.digest('hex');
|
|
115
|
+
hashData.file = fileHash.digest('hex');
|
|
116
|
+
hashData.etag = hashData.file;
|
|
117
|
+
|
|
118
|
+
if(hashData.chunks.length > 1){
|
|
119
|
+
const chunksJSON = JSON.stringify(hashData.chunks);
|
|
120
|
+
const chunksEtag = crypto.createHash('md5').update(chunksJSON).digest('hex');
|
|
121
|
+
hashData.etag = `${chunksEtag}-${hashData.chunks.length}`;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
console.log();
|
|
125
|
+
return hashData;
|
|
126
|
+
}
|
|
127
|
+
catch (error) {
|
|
128
|
+
console.log();
|
|
129
|
+
throw error;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async function runWithConcurrencyLimit(data, tasks, limit) {
|
|
134
|
+
let index = 0;
|
|
135
|
+
let failed = false;
|
|
136
|
+
|
|
137
|
+
const runTask = async () => {
|
|
138
|
+
while (index < tasks.length && !failed) {
|
|
139
|
+
const currentIndex = index++;
|
|
140
|
+
await tasks[currentIndex]();
|
|
141
|
+
}
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
const workers = Array.from({ length: limit }, () => runTask());
|
|
145
|
+
|
|
146
|
+
try{
|
|
147
|
+
await Promise.all(workers);
|
|
148
|
+
}
|
|
149
|
+
catch(error){
|
|
150
|
+
console.error('\n[ERROR]', unwrapErrorMessage(error));
|
|
151
|
+
failed = true;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
return {ok: !failed, data: data};
|
|
155
|
+
};
|
|
156
|
+
|
|
157
|
+
function printProgressLog(prepText, sentData, fsize){
|
|
158
|
+
readline.cursorTo(process.stdout, 0, null);
|
|
159
|
+
|
|
160
|
+
const uploadedBytesSum = Object.values(sentData.parts).reduce((acc, value) => acc + value, 0);
|
|
161
|
+
const uploadedBytesStr = filesize(uploadedBytesSum, {standard: 'iec', round: 3, pad: true, separator: '.'});
|
|
162
|
+
const filesizeBytesStr = filesize(fsize, {standard: 'iec', round: 3, pad: true});
|
|
163
|
+
const uploadedBytesFStr = `(${uploadedBytesStr}/${filesizeBytesStr})`;
|
|
164
|
+
|
|
165
|
+
const uploadSpeed = sentData.all * 1000 / (Date.now() - sentData.start) || 0;
|
|
166
|
+
const uploadSpeedStr = filesize(uploadSpeed, {standard: 'si', round: 2, pad: true, separator: '.'}) + '/s';
|
|
167
|
+
|
|
168
|
+
const remainingTimeInt = Math.max((fsize - uploadedBytesSum) / uploadSpeed, 0);
|
|
169
|
+
const remainingTimeSec = remainingTimeInt > 99*3636+35 ? 99*3636+35 : remainingTimeInt;
|
|
170
|
+
const remainingSeconds = Math.floor(remainingTimeSec % 60);
|
|
171
|
+
const remainingMinutes = Math.floor((remainingTimeSec % 3600) / 60);
|
|
172
|
+
const remainingHours = Math.floor(remainingTimeSec / 3600);
|
|
173
|
+
const [remH, remM, remS] = [remainingHours, remainingMinutes, remainingSeconds].map(t => String(t).padStart(2, '0'));
|
|
174
|
+
const remainingTimeStr = `${remH}h${remM}m${remS}s left...`;
|
|
175
|
+
|
|
176
|
+
const percentage = Math.floor((uploadedBytesSum / fsize) * 100);
|
|
177
|
+
const percentageFStr = `${percentage}% ${uploadedBytesFStr}`;
|
|
178
|
+
const uploadStatusArr = [percentageFStr, uploadSpeedStr, remainingTimeStr];
|
|
179
|
+
process.stdout.write(`${prepText}: ${uploadStatusArr.join(', ')}`);
|
|
180
|
+
readline.clearLine(process.stdout, 1);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
function md5MismatchText(hash1, hash2, partnum, total){
|
|
184
|
+
return [
|
|
185
|
+
`MD5 hash mismatch for file (part: ${partnum} of ${total})`,
|
|
186
|
+
`[Actual MD5:${hash1} / Got MD5:${hash2}]`,
|
|
187
|
+
];
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
async function uploadChunkTask(app, data, file, partSeq, uploadData, externalAbort) {
|
|
191
|
+
const splitSize = getChunkSize(data.size);
|
|
192
|
+
const start = partSeq * splitSize;
|
|
193
|
+
const end = Math.min(start + splitSize, data.size) - 1;
|
|
194
|
+
const maxTries = uploadData.maxTries;
|
|
195
|
+
|
|
196
|
+
const uploadLog = (chunkSize) => {
|
|
197
|
+
uploadData.all += chunkSize;
|
|
198
|
+
uploadData.parts[partSeq] += chunkSize;
|
|
199
|
+
printProgressLog('Uploading', uploadData, data.size);
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const blob_size = end + 1 - start;
|
|
203
|
+
const buffer = Buffer.alloc(blob_size);
|
|
204
|
+
await file.read(buffer, 0, blob_size, start);
|
|
205
|
+
const blob = new Blob([buffer], { type: 'application/octet-stream' });
|
|
206
|
+
let is_ok = false;
|
|
207
|
+
|
|
208
|
+
for (let i = 0; i < maxTries; i++) {
|
|
209
|
+
if (externalAbort.aborted) {
|
|
210
|
+
break;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
try{
|
|
214
|
+
const res = await app.uploadChunk(data, partSeq, blob, null, externalAbort);
|
|
215
|
+
const chunkMd5 = data.hash.chunks[partSeq];
|
|
216
|
+
|
|
217
|
+
// check if we have chunks hash
|
|
218
|
+
if (app.CheckMd5Val(chunkMd5) && res.md5 !== chunkMd5){
|
|
219
|
+
const md5Err = md5MismatchText(chunkMd5, res.md5, partSeq+1, data.hash.chunks.length)
|
|
220
|
+
throw new Error(md5Err.join('\n\t'));
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
// check if we don't have chunk hash and data.hash_check not set to false
|
|
224
|
+
const skipChunkHashCheck = typeof data.hash_check === 'boolean' && data.hash_check === false;
|
|
225
|
+
if(!app.CheckMd5Val(chunkMd5) && !skipChunkHashCheck){
|
|
226
|
+
const calcChunkMd5 = crypto.createHash('md5').update(buffer).digest('hex');
|
|
227
|
+
if(calcChunkMd5 !== res.md5){
|
|
228
|
+
const md5Err = md5MismatchText(calcChunkMd5, res.md5, partSeq+1, data.hash.chunks.length)
|
|
229
|
+
throw new Error(md5Err.join('\n\t'));
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// update chunkMd5 to res.md5
|
|
234
|
+
if(app.CheckMd5Val(res.md5) && chunkMd5 !== res.md5){
|
|
235
|
+
data.hash.chunks[partSeq] = res.md5;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
// log uploaded
|
|
239
|
+
data.uploaded[partSeq] = true;
|
|
240
|
+
uploadLog(blob_size);
|
|
241
|
+
is_ok = true;
|
|
242
|
+
|
|
243
|
+
break;
|
|
244
|
+
}
|
|
245
|
+
catch(error){
|
|
246
|
+
if (externalAbort.aborted) {
|
|
247
|
+
break;
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
readline.clearLine(process.stdout, 0);
|
|
251
|
+
readline.cursorTo(process.stdout, 0, null);
|
|
252
|
+
|
|
253
|
+
let message = error.message;
|
|
254
|
+
if(error.cause){
|
|
255
|
+
message += ' Cause';
|
|
256
|
+
if(error.cause.errno){
|
|
257
|
+
message += ' #' + error.cause.errno;
|
|
258
|
+
}
|
|
259
|
+
if(error.cause.code){
|
|
260
|
+
message += ' ' + error.cause.code;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
const uplFailedMsg1 = ' -> Upload failed for part #' + (partSeq+1);
|
|
265
|
+
const uplFailedMsg2 = `: ${message}`;
|
|
266
|
+
const doRetry = i+1 != maxTries ? `, retry #${i+1}` : '';
|
|
267
|
+
|
|
268
|
+
process.stdout.write(uplFailedMsg1 + uplFailedMsg2 + doRetry + '...\n');
|
|
269
|
+
uploadLog(0);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
if(!is_ok){
|
|
274
|
+
throw new Error(`Upload failed! [PART #${partSeq+1}]`);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
function newProgressData() {
|
|
279
|
+
return {
|
|
280
|
+
all: 0,
|
|
281
|
+
start: Date.now(),
|
|
282
|
+
parts: {},
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
/**
|
|
287
|
+
* Helper function for uploading chunks to TeraBox
|
|
288
|
+
* @param {TeraBoxApp} app - File size in bytes
|
|
289
|
+
* @param {Object} data - Upload data parameters
|
|
290
|
+
* @param {integer} maxTasks - maximum task for uploading
|
|
291
|
+
* @param {integer} maxTries - maximum tries for chunk uploading
|
|
292
|
+
* @returns {Object} Upload data parameters and status
|
|
293
|
+
*/
|
|
294
|
+
async function uploadChunks(app, data, filePath, maxTasks = 10, maxTries = 5) {
|
|
295
|
+
const splitSize = getChunkSize(data.size);
|
|
296
|
+
const totalChunks = data.hash.chunks.length;
|
|
297
|
+
const lastChunkSize = data.size - splitSize * (data.hash.chunks.length - 1);
|
|
298
|
+
|
|
299
|
+
const tasks = [];
|
|
300
|
+
const uploadData = newProgressData();
|
|
301
|
+
const externalAbortController = new AbortController();
|
|
302
|
+
uploadData.maxTries = maxTries;
|
|
303
|
+
|
|
304
|
+
if(data.uploaded.filter(pStatus => pStatus == false).length > 0){
|
|
305
|
+
for (let partSeq = 0; partSeq < totalChunks; partSeq++) {
|
|
306
|
+
uploadData.parts[partSeq] = 0;
|
|
307
|
+
if(data.uploaded[partSeq]){
|
|
308
|
+
const chunkSize = partSeq < totalChunks - 1 ? splitSize : lastChunkSize;
|
|
309
|
+
uploadData.parts[partSeq] = splitSize;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
const file = await fs.promises.open(filePath, 'r');
|
|
314
|
+
for (let partSeq = 0; partSeq < totalChunks; partSeq++) {
|
|
315
|
+
if(!data.uploaded[partSeq]){
|
|
316
|
+
tasks.push(() => {
|
|
317
|
+
return uploadChunkTask(app, data, file, partSeq, uploadData, externalAbortController.signal);
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
printProgressLog('Uploading', uploadData, data.size);
|
|
323
|
+
const cMaxTasks = totalChunks > maxTasks ? maxTasks : totalChunks;
|
|
324
|
+
const upload_status = await runWithConcurrencyLimit(data, tasks, cMaxTasks);
|
|
325
|
+
|
|
326
|
+
console.log();
|
|
327
|
+
externalAbortController.abort();
|
|
328
|
+
await file.close();
|
|
329
|
+
|
|
330
|
+
return upload_status;
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
return {ok: true, data};
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
/**
|
|
337
|
+
* Helper function unwraping Error Message
|
|
338
|
+
* @param {Object} err - Error object
|
|
339
|
+
* @returns {Object} Error data
|
|
340
|
+
*/
|
|
341
|
+
function unwrapErrorMessage(err) {
|
|
342
|
+
if (!err) {
|
|
343
|
+
return;
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
let e = err;
|
|
347
|
+
let res = err.message;
|
|
348
|
+
while (e.cause) {
|
|
349
|
+
e = e.cause;
|
|
350
|
+
if (e.message) {
|
|
351
|
+
res += ': ' + e.message;
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
return res;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
export {
|
|
359
|
+
getChunkSize,
|
|
360
|
+
hashFile,
|
|
361
|
+
uploadChunks,
|
|
362
|
+
unwrapErrorMessage,
|
|
363
|
+
};
|