recker 1.0.2-0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +109 -0
- package/dist/cache/file-storage.d.ts +13 -0
- package/dist/cache/file-storage.d.ts.map +1 -0
- package/dist/cache/file-storage.js +50 -0
- package/dist/cache/memory-storage.d.ts +10 -0
- package/dist/cache/memory-storage.d.ts.map +1 -0
- package/dist/cache/memory-storage.js +29 -0
- package/dist/cache/redis-storage.d.ts +16 -0
- package/dist/cache/redis-storage.d.ts.map +1 -0
- package/dist/cache/redis-storage.js +25 -0
- package/dist/constants.d.ts +19 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/constants.js +18 -0
- package/dist/contract/index.d.ts +32 -0
- package/dist/contract/index.d.ts.map +1 -0
- package/dist/contract/index.js +67 -0
- package/dist/core/client.d.ts +107 -0
- package/dist/core/client.d.ts.map +1 -0
- package/dist/core/client.js +475 -0
- package/dist/core/errors.d.ts +19 -0
- package/dist/core/errors.d.ts.map +1 -0
- package/dist/core/errors.js +34 -0
- package/dist/core/request-promise.d.ts +24 -0
- package/dist/core/request-promise.d.ts.map +1 -0
- package/dist/core/request-promise.js +77 -0
- package/dist/core/request.d.ts +15 -0
- package/dist/core/request.d.ts.map +1 -0
- package/dist/core/request.js +44 -0
- package/dist/core/response.d.ts +33 -0
- package/dist/core/response.d.ts.map +1 -0
- package/dist/core/response.js +154 -0
- package/dist/index.d.ts +40 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +39 -0
- package/dist/mcp/client.d.ts +59 -0
- package/dist/mcp/client.d.ts.map +1 -0
- package/dist/mcp/client.js +195 -0
- package/dist/mcp/index.d.ts +3 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/index.js +2 -0
- package/dist/mcp/types.d.ts +151 -0
- package/dist/mcp/types.d.ts.map +1 -0
- package/dist/mcp/types.js +1 -0
- package/dist/plugins/cache.d.ts +10 -0
- package/dist/plugins/cache.d.ts.map +1 -0
- package/dist/plugins/cache.js +72 -0
- package/dist/plugins/circuit-breaker.d.ts +14 -0
- package/dist/plugins/circuit-breaker.d.ts.map +1 -0
- package/dist/plugins/circuit-breaker.js +100 -0
- package/dist/plugins/compression.d.ts +5 -0
- package/dist/plugins/compression.d.ts.map +1 -0
- package/dist/plugins/compression.js +128 -0
- package/dist/plugins/cookie-jar.d.ts +6 -0
- package/dist/plugins/cookie-jar.d.ts.map +1 -0
- package/dist/plugins/cookie-jar.js +72 -0
- package/dist/plugins/dedup.d.ts +6 -0
- package/dist/plugins/dedup.d.ts.map +1 -0
- package/dist/plugins/dedup.js +34 -0
- package/dist/plugins/graphql.d.ts +13 -0
- package/dist/plugins/graphql.d.ts.map +1 -0
- package/dist/plugins/graphql.js +39 -0
- package/dist/plugins/har-player.d.ts +7 -0
- package/dist/plugins/har-player.d.ts.map +1 -0
- package/dist/plugins/har-player.js +53 -0
- package/dist/plugins/har-recorder.d.ts +7 -0
- package/dist/plugins/har-recorder.d.ts.map +1 -0
- package/dist/plugins/har-recorder.js +67 -0
- package/dist/plugins/logger.d.ts +11 -0
- package/dist/plugins/logger.d.ts.map +1 -0
- package/dist/plugins/logger.js +72 -0
- package/dist/plugins/pagination.d.ts +17 -0
- package/dist/plugins/pagination.d.ts.map +1 -0
- package/dist/plugins/pagination.js +105 -0
- package/dist/plugins/proxy-rotator.d.ts +8 -0
- package/dist/plugins/proxy-rotator.d.ts.map +1 -0
- package/dist/plugins/proxy-rotator.js +35 -0
- package/dist/plugins/rate-limit.d.ts +8 -0
- package/dist/plugins/rate-limit.d.ts.map +1 -0
- package/dist/plugins/rate-limit.js +57 -0
- package/dist/plugins/retry.d.ts +14 -0
- package/dist/plugins/retry.d.ts.map +1 -0
- package/dist/plugins/retry.js +92 -0
- package/dist/plugins/server-timing.d.ts +8 -0
- package/dist/plugins/server-timing.d.ts.map +1 -0
- package/dist/plugins/server-timing.js +24 -0
- package/dist/plugins/xsrf.d.ts +10 -0
- package/dist/plugins/xsrf.d.ts.map +1 -0
- package/dist/plugins/xsrf.js +48 -0
- package/dist/runner/request-runner.d.ts +47 -0
- package/dist/runner/request-runner.d.ts.map +1 -0
- package/dist/runner/request-runner.js +89 -0
- package/dist/transport/fetch.d.ts +6 -0
- package/dist/transport/fetch.d.ts.map +1 -0
- package/dist/transport/fetch.js +153 -0
- package/dist/transport/undici.d.ts +23 -0
- package/dist/transport/undici.d.ts.map +1 -0
- package/dist/transport/undici.js +218 -0
- package/dist/types/index.d.ts +251 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +1 -0
- package/dist/utils/agent-manager.d.ts +29 -0
- package/dist/utils/agent-manager.d.ts.map +1 -0
- package/dist/utils/agent-manager.js +133 -0
- package/dist/utils/body.d.ts +11 -0
- package/dist/utils/body.d.ts.map +1 -0
- package/dist/utils/body.js +136 -0
- package/dist/utils/cert.d.ts +12 -0
- package/dist/utils/cert.d.ts.map +1 -0
- package/dist/utils/cert.js +32 -0
- package/dist/utils/concurrency.d.ts +21 -0
- package/dist/utils/concurrency.d.ts.map +1 -0
- package/dist/utils/concurrency.js +116 -0
- package/dist/utils/dns.d.ts +7 -0
- package/dist/utils/dns.d.ts.map +1 -0
- package/dist/utils/dns.js +26 -0
- package/dist/utils/doh.d.ts +3 -0
- package/dist/utils/doh.d.ts.map +1 -0
- package/dist/utils/doh.js +35 -0
- package/dist/utils/header-parser.d.ts +81 -0
- package/dist/utils/header-parser.d.ts.map +1 -0
- package/dist/utils/header-parser.js +457 -0
- package/dist/utils/html-cleaner.d.ts +2 -0
- package/dist/utils/html-cleaner.d.ts.map +1 -0
- package/dist/utils/html-cleaner.js +21 -0
- package/dist/utils/logger.d.ts +33 -0
- package/dist/utils/logger.d.ts.map +1 -0
- package/dist/utils/logger.js +160 -0
- package/dist/utils/progress.d.ts +4 -0
- package/dist/utils/progress.d.ts.map +1 -0
- package/dist/utils/progress.js +49 -0
- package/dist/utils/request-pool.d.ts +23 -0
- package/dist/utils/request-pool.d.ts.map +1 -0
- package/dist/utils/request-pool.js +100 -0
- package/dist/utils/sse.d.ts +8 -0
- package/dist/utils/sse.d.ts.map +1 -0
- package/dist/utils/sse.js +62 -0
- package/dist/utils/streaming.d.ts +18 -0
- package/dist/utils/streaming.d.ts.map +1 -0
- package/dist/utils/streaming.js +83 -0
- package/dist/utils/task-pool.d.ts +38 -0
- package/dist/utils/task-pool.js +104 -0
- package/dist/utils/try-fn.d.ts +4 -0
- package/dist/utils/try-fn.d.ts.map +1 -0
- package/dist/utils/try-fn.js +53 -0
- package/dist/utils/upload.d.ts +10 -0
- package/dist/utils/upload.d.ts.map +1 -0
- package/dist/utils/upload.js +45 -0
- package/dist/utils/user-agent.d.ts +45 -0
- package/dist/utils/user-agent.d.ts.map +1 -0
- package/dist/utils/user-agent.js +100 -0
- package/dist/utils/whois.d.ts +15 -0
- package/dist/utils/whois.d.ts.map +1 -0
- package/dist/utils/whois.js +159 -0
- package/dist/websocket/client.d.ts +38 -0
- package/dist/websocket/client.d.ts.map +1 -0
- package/dist/websocket/client.js +184 -0
- package/package.json +100 -0
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
export function createProgressStream(stream, onProgress, total) {
|
|
2
|
+
let loaded = 0;
|
|
3
|
+
let startTime = Date.now();
|
|
4
|
+
let lastUpdate = startTime;
|
|
5
|
+
return new ReadableStream({
|
|
6
|
+
async start(controller) {
|
|
7
|
+
const reader = stream.getReader();
|
|
8
|
+
try {
|
|
9
|
+
while (true) {
|
|
10
|
+
const { done, value } = await reader.read();
|
|
11
|
+
if (done) {
|
|
12
|
+
controller.close();
|
|
13
|
+
break;
|
|
14
|
+
}
|
|
15
|
+
loaded += value.byteLength;
|
|
16
|
+
const now = Date.now();
|
|
17
|
+
const elapsed = (now - startTime) / 1000;
|
|
18
|
+
const rate = elapsed > 0 ? loaded / elapsed : 0;
|
|
19
|
+
if (now - lastUpdate > 100 || loaded === total) {
|
|
20
|
+
const progress = {
|
|
21
|
+
loaded,
|
|
22
|
+
total,
|
|
23
|
+
percent: total ? (loaded / total) * 100 : undefined,
|
|
24
|
+
rate,
|
|
25
|
+
estimated: total && rate > 0 ? ((total - loaded) / rate) * 1000 : undefined,
|
|
26
|
+
};
|
|
27
|
+
onProgress(progress);
|
|
28
|
+
lastUpdate = now;
|
|
29
|
+
}
|
|
30
|
+
controller.enqueue(value);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
catch (error) {
|
|
34
|
+
controller.error(error);
|
|
35
|
+
throw error;
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
export function calculateProgress(loaded, total) {
|
|
41
|
+
const progress = {
|
|
42
|
+
loaded,
|
|
43
|
+
total,
|
|
44
|
+
};
|
|
45
|
+
if (total && total > 0) {
|
|
46
|
+
progress.percent = (loaded / total) * 100;
|
|
47
|
+
}
|
|
48
|
+
return progress;
|
|
49
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { Middleware } from '../types/index.js';
|
|
2
|
+
export interface RequestPoolOptions {
|
|
3
|
+
concurrency?: number;
|
|
4
|
+
requestsPerInterval?: number;
|
|
5
|
+
interval?: number;
|
|
6
|
+
}
|
|
7
|
+
export declare class RequestPool {
|
|
8
|
+
private readonly concurrency;
|
|
9
|
+
private readonly requestsPerInterval?;
|
|
10
|
+
private readonly interval?;
|
|
11
|
+
private queue;
|
|
12
|
+
private active;
|
|
13
|
+
private windowStart;
|
|
14
|
+
private startedInWindow;
|
|
15
|
+
private waitingTimer?;
|
|
16
|
+
constructor(options?: RequestPoolOptions);
|
|
17
|
+
run<T>(fn: () => Promise<T>, signal?: AbortSignal): Promise<T>;
|
|
18
|
+
private _removeFromQueue;
|
|
19
|
+
private _canStart;
|
|
20
|
+
private _schedule;
|
|
21
|
+
asMiddleware(): Middleware;
|
|
22
|
+
}
|
|
23
|
+
//# sourceMappingURL=request-pool.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"request-pool.d.ts","sourceRoot":"","sources":["../../src/utils/request-pool.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAA+C,MAAM,mBAAmB,CAAC;AAEjG,MAAM,WAAW,kBAAkB;IAKjC,WAAW,CAAC,EAAE,MAAM,CAAC;IAKrB,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAI7B,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAiBD,qBAAa,WAAW;IACtB,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAS;IACrC,OAAO,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAS;IAC9C,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAS;IACnC,OAAO,CAAC,KAAK,CAAiC;IAC9C,OAAO,CAAC,MAAM,CAAK;IACnB,OAAO,CAAC,WAAW,CAAK;IACxB,OAAO,CAAC,eAAe,CAAK;IAC5B,OAAO,CAAC,YAAY,CAAC,CAAiB;gBAE1B,OAAO,GAAE,kBAAuB;IAM5C,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,CAAC,CAAC;IAuB9D,OAAO,CAAC,gBAAgB;IAOxB,OAAO,CAAC,SAAS;IAqBjB,OAAO,CAAC,SAAS;IA4DjB,YAAY,IAAI,UAAU;CAM3B"}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
export class RequestPool {
|
|
2
|
+
concurrency;
|
|
3
|
+
requestsPerInterval;
|
|
4
|
+
interval;
|
|
5
|
+
queue = [];
|
|
6
|
+
active = 0;
|
|
7
|
+
windowStart = 0;
|
|
8
|
+
startedInWindow = 0;
|
|
9
|
+
waitingTimer;
|
|
10
|
+
constructor(options = {}) {
|
|
11
|
+
this.concurrency = options.concurrency ?? Number.POSITIVE_INFINITY;
|
|
12
|
+
this.requestsPerInterval = options.requestsPerInterval;
|
|
13
|
+
this.interval = options.interval;
|
|
14
|
+
}
|
|
15
|
+
run(fn, signal) {
|
|
16
|
+
if (signal?.aborted) {
|
|
17
|
+
return Promise.reject(signal.reason ?? new Error('Request aborted before enqueue'));
|
|
18
|
+
}
|
|
19
|
+
return new Promise((resolve, reject) => {
|
|
20
|
+
const request = { fn, resolve, reject, signal };
|
|
21
|
+
if (signal) {
|
|
22
|
+
const onAbort = () => {
|
|
23
|
+
this._removeFromQueue(request);
|
|
24
|
+
reject(signal.reason ?? new Error('Request aborted while queued'));
|
|
25
|
+
this._schedule();
|
|
26
|
+
};
|
|
27
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
28
|
+
request.abortCleanup = () => signal.removeEventListener('abort', onAbort);
|
|
29
|
+
}
|
|
30
|
+
this.queue.push(request);
|
|
31
|
+
this._schedule();
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
_removeFromQueue(request) {
|
|
35
|
+
const index = this.queue.indexOf(request);
|
|
36
|
+
if (index >= 0) {
|
|
37
|
+
this.queue.splice(index, 1);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
_canStart(now) {
|
|
41
|
+
if (this.active >= this.concurrency) {
|
|
42
|
+
return false;
|
|
43
|
+
}
|
|
44
|
+
if (this.requestsPerInterval == null || this.interval == null) {
|
|
45
|
+
return true;
|
|
46
|
+
}
|
|
47
|
+
if (now - this.windowStart >= this.interval) {
|
|
48
|
+
this.windowStart = now;
|
|
49
|
+
this.startedInWindow = 0;
|
|
50
|
+
}
|
|
51
|
+
if (this.startedInWindow < this.requestsPerInterval) {
|
|
52
|
+
return true;
|
|
53
|
+
}
|
|
54
|
+
return false;
|
|
55
|
+
}
|
|
56
|
+
_schedule() {
|
|
57
|
+
if (this.waitingTimer) {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
const now = Date.now();
|
|
61
|
+
while (this.queue.length > 0 && this._canStart(Date.now())) {
|
|
62
|
+
const request = this.queue.shift();
|
|
63
|
+
if (request.signal?.aborted) {
|
|
64
|
+
request.abortCleanup?.();
|
|
65
|
+
request.reject(request.signal.reason ?? new Error('Request aborted while queued'));
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
this.active++;
|
|
69
|
+
this.startedInWindow++;
|
|
70
|
+
const clearAbort = request.abortCleanup;
|
|
71
|
+
if (clearAbort) {
|
|
72
|
+
clearAbort();
|
|
73
|
+
request.abortCleanup = undefined;
|
|
74
|
+
}
|
|
75
|
+
Promise.resolve()
|
|
76
|
+
.then(() => request.fn())
|
|
77
|
+
.then((result) => request.resolve(result))
|
|
78
|
+
.catch((error) => request.reject(error))
|
|
79
|
+
.finally(() => {
|
|
80
|
+
this.active--;
|
|
81
|
+
this._schedule();
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
if (this.queue.length > 0 &&
|
|
85
|
+
this.requestsPerInterval != null &&
|
|
86
|
+
this.interval != null &&
|
|
87
|
+
!this._canStart(Date.now())) {
|
|
88
|
+
const wait = Math.max(0, this.windowStart + this.interval - Date.now());
|
|
89
|
+
this.waitingTimer = setTimeout(() => {
|
|
90
|
+
this.waitingTimer = undefined;
|
|
91
|
+
this._schedule();
|
|
92
|
+
}, wait);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
asMiddleware() {
|
|
96
|
+
return async (req, next) => {
|
|
97
|
+
return this.run(() => next(req), req.signal);
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"sse.d.ts","sourceRoot":"","sources":["../../src/utils/sse.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,QAAQ;IACvB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,wBAAuB,QAAQ,CAAC,QAAQ,EAAE,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAC,CA4E5E"}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
export async function* parseSSE(response) {
|
|
2
|
+
if (!response.body)
|
|
3
|
+
throw new Error('Response body is null');
|
|
4
|
+
const reader = response.body.getReader();
|
|
5
|
+
const decoder = new TextDecoder();
|
|
6
|
+
let buffer = '';
|
|
7
|
+
let currentEvent = { data: '' };
|
|
8
|
+
try {
|
|
9
|
+
while (true) {
|
|
10
|
+
const { done, value } = await reader.read();
|
|
11
|
+
if (done)
|
|
12
|
+
break;
|
|
13
|
+
buffer += decoder.decode(value, { stream: true });
|
|
14
|
+
const lines = buffer.split(/\r\n|\r|\n/);
|
|
15
|
+
buffer = lines.pop() || '';
|
|
16
|
+
for (const line of lines) {
|
|
17
|
+
if (line.trim() === '') {
|
|
18
|
+
if (currentEvent.data || currentEvent.event || currentEvent.id) {
|
|
19
|
+
if (currentEvent.data.endsWith('\n')) {
|
|
20
|
+
currentEvent.data = currentEvent.data.slice(0, -1);
|
|
21
|
+
}
|
|
22
|
+
yield currentEvent;
|
|
23
|
+
currentEvent = { data: '' };
|
|
24
|
+
}
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
const colonIndex = line.indexOf(':');
|
|
28
|
+
let field = line;
|
|
29
|
+
let value = '';
|
|
30
|
+
if (colonIndex !== -1) {
|
|
31
|
+
field = line.slice(0, colonIndex);
|
|
32
|
+
value = line.slice(colonIndex + 1);
|
|
33
|
+
if (value.startsWith(' ')) {
|
|
34
|
+
value = value.slice(1);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
switch (field) {
|
|
38
|
+
case 'data':
|
|
39
|
+
currentEvent.data += value + '\n';
|
|
40
|
+
break;
|
|
41
|
+
case 'event':
|
|
42
|
+
currentEvent.event = value;
|
|
43
|
+
break;
|
|
44
|
+
case 'id':
|
|
45
|
+
currentEvent.id = value;
|
|
46
|
+
break;
|
|
47
|
+
case 'retry':
|
|
48
|
+
const retry = parseInt(value, 10);
|
|
49
|
+
if (!isNaN(retry)) {
|
|
50
|
+
currentEvent.retry = retry;
|
|
51
|
+
}
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
if (buffer.trim() !== '') {
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
finally {
|
|
60
|
+
reader.releaseLock();
|
|
61
|
+
}
|
|
62
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Readable } from 'node:stream';
|
|
2
|
+
export declare function webToNodeStream(webStream: ReadableStream): Readable;
|
|
3
|
+
export declare function nodeToWebStream(nodeStream: Readable): ReadableStream<Uint8Array>;
|
|
4
|
+
export interface StreamProgressOptions {
|
|
5
|
+
onProgress?: (progress: {
|
|
6
|
+
loaded: number;
|
|
7
|
+
total?: number;
|
|
8
|
+
percent?: number;
|
|
9
|
+
}) => void;
|
|
10
|
+
total?: number;
|
|
11
|
+
}
|
|
12
|
+
export declare function trackStreamProgress(stream: Readable, options?: StreamProgressOptions): Readable;
|
|
13
|
+
export declare function pipeStream(source: Readable, destination: NodeJS.WritableStream, options?: StreamProgressOptions): Promise<void>;
|
|
14
|
+
export declare function createUploadStream(source: Readable): {
|
|
15
|
+
stream: ReadableStream<Uint8Array<ArrayBufferLike>>;
|
|
16
|
+
promise: Promise<void>;
|
|
17
|
+
};
|
|
18
|
+
//# sourceMappingURL=streaming.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"streaming.d.ts","sourceRoot":"","sources":["../../src/utils/streaming.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,QAAQ,EAAe,MAAM,aAAa,CAAC;AAOpD,wBAAgB,eAAe,CAAC,SAAS,EAAE,cAAc,GAAG,QAAQ,CAsBnE;AAMD,wBAAgB,eAAe,CAAC,UAAU,EAAE,QAAQ,GAAG,cAAc,CAAC,UAAU,CAAC,CAoBhF;AAMD,MAAM,WAAW,qBAAqB;IACpC,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,KAAK,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,KAAK,IAAI,CAAC;IACtF,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,wBAAgB,mBAAmB,CACjC,MAAM,EAAE,QAAQ,EAChB,OAAO,GAAE,qBAA0B,GAClC,QAAQ,CA6BV;AAMD,wBAAsB,UAAU,CAC9B,MAAM,EAAE,QAAQ,EAChB,WAAW,EAAE,MAAM,CAAC,cAAc,EAClC,OAAO,GAAE,qBAA0B,GAClC,OAAO,CAAC,IAAI,CAAC,CAMf;AAMD,wBAAgB,kBAAkB,CAAC,MAAM,EAAE,QAAQ;;;EAYlD"}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { Readable, PassThrough } from 'node:stream';
|
|
2
|
+
import { pipeline } from 'node:stream/promises';
|
|
3
|
+
export function webToNodeStream(webStream) {
|
|
4
|
+
const reader = webStream.getReader();
|
|
5
|
+
return new Readable({
|
|
6
|
+
async read() {
|
|
7
|
+
try {
|
|
8
|
+
const { done, value } = await reader.read();
|
|
9
|
+
if (done) {
|
|
10
|
+
this.push(null);
|
|
11
|
+
}
|
|
12
|
+
else {
|
|
13
|
+
this.push(value);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
catch (error) {
|
|
17
|
+
this.destroy(error);
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
destroy(error, callback) {
|
|
21
|
+
reader.cancel().finally(() => callback(error));
|
|
22
|
+
}
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
export function nodeToWebStream(nodeStream) {
|
|
26
|
+
return new ReadableStream({
|
|
27
|
+
start(controller) {
|
|
28
|
+
nodeStream.on('data', (chunk) => {
|
|
29
|
+
controller.enqueue(new Uint8Array(chunk));
|
|
30
|
+
});
|
|
31
|
+
nodeStream.on('end', () => {
|
|
32
|
+
controller.close();
|
|
33
|
+
});
|
|
34
|
+
nodeStream.on('error', (err) => {
|
|
35
|
+
controller.error(err);
|
|
36
|
+
});
|
|
37
|
+
},
|
|
38
|
+
cancel() {
|
|
39
|
+
nodeStream.destroy();
|
|
40
|
+
}
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
export function trackStreamProgress(stream, options = {}) {
|
|
44
|
+
const { onProgress, total } = options;
|
|
45
|
+
let loaded = 0;
|
|
46
|
+
const startTime = Date.now();
|
|
47
|
+
let lastUpdate = startTime;
|
|
48
|
+
const passThrough = new PassThrough();
|
|
49
|
+
stream.on('data', (chunk) => {
|
|
50
|
+
loaded += chunk.length;
|
|
51
|
+
const now = Date.now();
|
|
52
|
+
if (onProgress && now - lastUpdate > 100) {
|
|
53
|
+
const percent = total ? (loaded / total) * 100 : undefined;
|
|
54
|
+
onProgress({ loaded, total, percent });
|
|
55
|
+
lastUpdate = now;
|
|
56
|
+
}
|
|
57
|
+
});
|
|
58
|
+
stream.on('end', () => {
|
|
59
|
+
if (onProgress) {
|
|
60
|
+
const percent = total ? 100 : undefined;
|
|
61
|
+
onProgress({ loaded, total, percent });
|
|
62
|
+
}
|
|
63
|
+
});
|
|
64
|
+
stream.pipe(passThrough);
|
|
65
|
+
return passThrough;
|
|
66
|
+
}
|
|
67
|
+
export async function pipeStream(source, destination, options = {}) {
|
|
68
|
+
const tracked = options.onProgress
|
|
69
|
+
? trackStreamProgress(source, options)
|
|
70
|
+
: source;
|
|
71
|
+
await pipeline(tracked, destination);
|
|
72
|
+
}
|
|
73
|
+
export function createUploadStream(source) {
|
|
74
|
+
const passThrough = new PassThrough();
|
|
75
|
+
source.pipe(passThrough);
|
|
76
|
+
return {
|
|
77
|
+
stream: nodeToWebStream(passThrough),
|
|
78
|
+
promise: new Promise((resolve, reject) => {
|
|
79
|
+
passThrough.on('finish', resolve);
|
|
80
|
+
passThrough.on('error', reject);
|
|
81
|
+
})
|
|
82
|
+
};
|
|
83
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
export interface TaskPoolOptions {
|
|
2
|
+
/**
|
|
3
|
+
* Max concurrent tasks allowed to execute at once.
|
|
4
|
+
* @default Infinity (no concurrency cap)
|
|
5
|
+
*/
|
|
6
|
+
concurrency?: number;
|
|
7
|
+
/**
|
|
8
|
+
* Requests allowed per interval window.
|
|
9
|
+
* When provided with `interval`, starts will be spaced to respect the cap.
|
|
10
|
+
*/
|
|
11
|
+
requestsPerInterval?: number;
|
|
12
|
+
/**
|
|
13
|
+
* Interval window length in milliseconds.
|
|
14
|
+
*/
|
|
15
|
+
interval?: number;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Lightweight TaskPool for rate limiting and concurrency control.
|
|
19
|
+
*
|
|
20
|
+
* - Limits concurrent executions.
|
|
21
|
+
* - Enforces a max start rate (`requestsPerInterval` / `interval`) via a sliding window.
|
|
22
|
+
* - Respects AbortSignal both while queued and while running.
|
|
23
|
+
*/
|
|
24
|
+
export declare class TaskPool {
|
|
25
|
+
private readonly concurrency;
|
|
26
|
+
private readonly requestsPerInterval?;
|
|
27
|
+
private readonly interval?;
|
|
28
|
+
private queue;
|
|
29
|
+
private active;
|
|
30
|
+
private windowStart;
|
|
31
|
+
private startedInWindow;
|
|
32
|
+
private waitingTimer?;
|
|
33
|
+
constructor(options?: TaskPoolOptions);
|
|
34
|
+
run<T>(fn: () => Promise<T>, signal?: AbortSignal): Promise<T>;
|
|
35
|
+
private _removeFromQueue;
|
|
36
|
+
private _canStart;
|
|
37
|
+
private _schedule;
|
|
38
|
+
}
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lightweight TaskPool for rate limiting and concurrency control.
|
|
3
|
+
*
|
|
4
|
+
* - Limits concurrent executions.
|
|
5
|
+
* - Enforces a max start rate (`requestsPerInterval` / `interval`) via a sliding window.
|
|
6
|
+
* - Respects AbortSignal both while queued and while running.
|
|
7
|
+
*/
|
|
8
|
+
export class TaskPool {
|
|
9
|
+
concurrency;
|
|
10
|
+
requestsPerInterval;
|
|
11
|
+
interval;
|
|
12
|
+
queue = [];
|
|
13
|
+
active = 0;
|
|
14
|
+
windowStart = 0;
|
|
15
|
+
startedInWindow = 0;
|
|
16
|
+
waitingTimer;
|
|
17
|
+
constructor(options = {}) {
|
|
18
|
+
this.concurrency = options.concurrency ?? Number.POSITIVE_INFINITY;
|
|
19
|
+
this.requestsPerInterval = options.requestsPerInterval;
|
|
20
|
+
this.interval = options.interval;
|
|
21
|
+
}
|
|
22
|
+
run(fn, signal) {
|
|
23
|
+
if (signal?.aborted) {
|
|
24
|
+
return Promise.reject(signal.reason ?? new Error('Task aborted before enqueue'));
|
|
25
|
+
}
|
|
26
|
+
return new Promise((resolve, reject) => {
|
|
27
|
+
const task = { fn, resolve, reject, signal };
|
|
28
|
+
if (signal) {
|
|
29
|
+
const onAbort = () => {
|
|
30
|
+
this._removeFromQueue(task);
|
|
31
|
+
reject(signal.reason ?? new Error('Task aborted while queued'));
|
|
32
|
+
this._schedule();
|
|
33
|
+
};
|
|
34
|
+
signal.addEventListener('abort', onAbort, { once: true });
|
|
35
|
+
task.abortCleanup = () => signal.removeEventListener('abort', onAbort);
|
|
36
|
+
}
|
|
37
|
+
this.queue.push(task);
|
|
38
|
+
this._schedule();
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
_removeFromQueue(task) {
|
|
42
|
+
const index = this.queue.indexOf(task);
|
|
43
|
+
if (index >= 0) {
|
|
44
|
+
this.queue.splice(index, 1);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
_canStart(now) {
|
|
48
|
+
if (this.active >= this.concurrency) {
|
|
49
|
+
return false;
|
|
50
|
+
}
|
|
51
|
+
if (this.requestsPerInterval == null || this.interval == null) {
|
|
52
|
+
return true;
|
|
53
|
+
}
|
|
54
|
+
if (now - this.windowStart >= this.interval) {
|
|
55
|
+
this.windowStart = now;
|
|
56
|
+
this.startedInWindow = 0;
|
|
57
|
+
}
|
|
58
|
+
if (this.startedInWindow < this.requestsPerInterval) {
|
|
59
|
+
return true;
|
|
60
|
+
}
|
|
61
|
+
return false;
|
|
62
|
+
}
|
|
63
|
+
_schedule() {
|
|
64
|
+
if (this.waitingTimer) {
|
|
65
|
+
// There's already a timer waiting for the next window; let it fire.
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
const now = Date.now();
|
|
69
|
+
while (this.queue.length > 0 && this._canStart(Date.now())) {
|
|
70
|
+
const task = this.queue.shift();
|
|
71
|
+
if (task.signal?.aborted) {
|
|
72
|
+
task.abortCleanup?.();
|
|
73
|
+
task.reject(task.signal.reason ?? new Error('Task aborted while queued'));
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
this.active++;
|
|
77
|
+
this.startedInWindow++;
|
|
78
|
+
const clearAbort = task.abortCleanup;
|
|
79
|
+
if (clearAbort) {
|
|
80
|
+
clearAbort();
|
|
81
|
+
task.abortCleanup = undefined;
|
|
82
|
+
}
|
|
83
|
+
Promise.resolve()
|
|
84
|
+
.then(() => task.fn())
|
|
85
|
+
.then((result) => task.resolve(result))
|
|
86
|
+
.catch((error) => task.reject(error))
|
|
87
|
+
.finally(() => {
|
|
88
|
+
this.active--;
|
|
89
|
+
this._schedule();
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
// If rate limit prevents starting now, schedule when the window resets
|
|
93
|
+
if (this.queue.length > 0 &&
|
|
94
|
+
this.requestsPerInterval != null &&
|
|
95
|
+
this.interval != null &&
|
|
96
|
+
!this._canStart(Date.now())) {
|
|
97
|
+
const wait = Math.max(0, this.windowStart + this.interval - Date.now());
|
|
98
|
+
this.waitingTimer = setTimeout(() => {
|
|
99
|
+
this.waitingTimer = undefined;
|
|
100
|
+
this._schedule();
|
|
101
|
+
}, wait);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export declare function tryFn<T>(fnOrPromise: (() => Promise<T>) | Promise<T> | (() => T) | T): Promise<[boolean, Error | null, T | undefined]> | [boolean, Error | null, T | undefined];
|
|
2
|
+
export declare function tryFnSync<T>(fn: () => T): [boolean, Error | null, T | undefined];
|
|
3
|
+
export default tryFn;
|
|
4
|
+
//# sourceMappingURL=try-fn.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"try-fn.d.ts","sourceRoot":"","sources":["../../src/utils/try-fn.ts"],"names":[],"mappings":"AAqBA,wBAAgB,KAAK,CAAC,CAAC,EACrB,WAAW,EAAE,CAAC,MAAM,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,GAC3D,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,GAAG,IAAI,EAAE,CAAC,GAAG,SAAS,CAAC,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,GAAG,IAAI,EAAE,CAAC,GAAG,SAAS,CAAC,CAwD1F;AAED,wBAAgB,SAAS,CAAC,CAAC,EAAE,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,GAAG,IAAI,EAAE,CAAC,GAAG,SAAS,CAAC,CAOhF;AAED,eAAe,KAAK,CAAC"}
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
export function tryFn(fnOrPromise) {
|
|
2
|
+
if (fnOrPromise == null) {
|
|
3
|
+
const err = new Error('fnOrPromise cannot be null or undefined');
|
|
4
|
+
err.stack = new Error().stack;
|
|
5
|
+
return [false, err, undefined];
|
|
6
|
+
}
|
|
7
|
+
if (typeof fnOrPromise === 'function') {
|
|
8
|
+
try {
|
|
9
|
+
const result = fnOrPromise();
|
|
10
|
+
if (result == null) {
|
|
11
|
+
return [true, null, result];
|
|
12
|
+
}
|
|
13
|
+
if (typeof result.then === 'function') {
|
|
14
|
+
return result
|
|
15
|
+
.then((data) => [true, null, data])
|
|
16
|
+
.catch((error) => {
|
|
17
|
+
if (error instanceof Error &&
|
|
18
|
+
Object.isExtensible(error)) {
|
|
19
|
+
const desc = Object.getOwnPropertyDescriptor(error, 'stack');
|
|
20
|
+
if (!desc || (desc.writable && desc.configurable)) {
|
|
21
|
+
try {
|
|
22
|
+
}
|
|
23
|
+
catch (_) { }
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
return [false, error instanceof Error ? error : new Error(String(error)), undefined];
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
return [true, null, result];
|
|
30
|
+
}
|
|
31
|
+
catch (error) {
|
|
32
|
+
return [false, error instanceof Error ? error : new Error(String(error)), undefined];
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
if (typeof fnOrPromise.then === 'function') {
|
|
36
|
+
return Promise.resolve(fnOrPromise)
|
|
37
|
+
.then((data) => [true, null, data])
|
|
38
|
+
.catch((error) => {
|
|
39
|
+
return [false, error instanceof Error ? error : new Error(String(error)), undefined];
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
return [true, null, fnOrPromise];
|
|
43
|
+
}
|
|
44
|
+
export function tryFnSync(fn) {
|
|
45
|
+
try {
|
|
46
|
+
const result = fn();
|
|
47
|
+
return [true, null, result];
|
|
48
|
+
}
|
|
49
|
+
catch (err) {
|
|
50
|
+
return [false, err instanceof Error ? err : new Error(String(err)), undefined];
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
export default tryFn;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { Readable } from 'node:stream';
|
|
2
|
+
export interface UploadOptions {
|
|
3
|
+
file: Readable | Buffer;
|
|
4
|
+
chunkSize?: number;
|
|
5
|
+
concurrency?: number;
|
|
6
|
+
uploadChunk: (chunk: Buffer, index: number, total: number) => Promise<void>;
|
|
7
|
+
onProgress?: (loaded: number, total: number) => void;
|
|
8
|
+
}
|
|
9
|
+
export declare function uploadParallel(options: UploadOptions): Promise<void>;
|
|
10
|
+
//# sourceMappingURL=upload.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../../src/utils/upload.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,QAAQ,GAAG,MAAM,CAAC;IACxB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5E,UAAU,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;CACtD;AAED,wBAAsB,cAAc,CAAC,OAAO,EAAE,aAAa,iBAiE1D"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
export async function uploadParallel(options) {
|
|
2
|
+
const chunkSize = options.chunkSize || 5 * 1024 * 1024;
|
|
3
|
+
const concurrency = options.concurrency || 3;
|
|
4
|
+
let buffer = Buffer.alloc(0);
|
|
5
|
+
let chunkIndex = 0;
|
|
6
|
+
let loaded = 0;
|
|
7
|
+
const queue = [];
|
|
8
|
+
const processChunk = async (chunk, idx) => {
|
|
9
|
+
await options.uploadChunk(chunk, idx, 0);
|
|
10
|
+
loaded += chunk.length;
|
|
11
|
+
if (options.onProgress)
|
|
12
|
+
options.onProgress(loaded, 0);
|
|
13
|
+
};
|
|
14
|
+
if (Buffer.isBuffer(options.file)) {
|
|
15
|
+
const total = options.file.length;
|
|
16
|
+
for (let i = 0; i < total; i += chunkSize) {
|
|
17
|
+
const chunk = options.file.slice(i, i + chunkSize);
|
|
18
|
+
const p = processChunk(chunk, chunkIndex++);
|
|
19
|
+
queue.push(p);
|
|
20
|
+
if (queue.length >= concurrency) {
|
|
21
|
+
await Promise.race(queue);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
await Promise.all(queue);
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
for await (const chunk of options.file) {
|
|
28
|
+
buffer = Buffer.concat([buffer, chunk]);
|
|
29
|
+
while (buffer.length >= chunkSize) {
|
|
30
|
+
const chunkToUpload = buffer.slice(0, chunkSize);
|
|
31
|
+
buffer = buffer.slice(chunkSize);
|
|
32
|
+
const p = processChunk(chunkToUpload, chunkIndex++);
|
|
33
|
+
queue.push(p);
|
|
34
|
+
if (queue.length >= concurrency) {
|
|
35
|
+
const finished = await Promise.race(queue);
|
|
36
|
+
await Promise.all(queue);
|
|
37
|
+
queue.length = 0;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
if (buffer.length > 0) {
|
|
42
|
+
queue.push(processChunk(buffer, chunkIndex++));
|
|
43
|
+
}
|
|
44
|
+
await Promise.all(queue);
|
|
45
|
+
}
|