@azure/storage-common 12.0.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -0
- package/dist/browser/BufferScheduler-browser.d.mts.map +1 -0
- package/dist/browser/BufferScheduler-browser.mjs.map +1 -0
- package/dist/browser/BufferScheduler.d.ts +3 -0
- package/dist/browser/BufferScheduler.js +8 -0
- package/dist/browser/BuffersStream.d.ts +41 -0
- package/dist/browser/BuffersStream.d.ts.map +1 -0
- package/dist/browser/BuffersStream.js +83 -0
- package/dist/browser/BuffersStream.js.map +1 -0
- package/dist/browser/PooledBuffer.d.ts +63 -0
- package/dist/browser/PooledBuffer.d.ts.map +1 -0
- package/dist/browser/PooledBuffer.js +87 -0
- package/dist/browser/PooledBuffer.js.map +1 -0
- package/dist/browser/cache.d.ts +3 -0
- package/dist/browser/cache.d.ts.map +1 -0
- package/dist/browser/cache.js +11 -0
- package/dist/browser/cache.js.map +1 -0
- package/dist/browser/index.d.ts +3 -0
- package/dist/browser/index.d.ts.map +1 -0
- package/dist/browser/index.js +5 -0
- package/dist/browser/index.js.map +1 -0
- package/dist/browser/package.json +3 -0
- package/dist/commonjs/BufferScheduler.d.ts +157 -0
- package/dist/commonjs/BufferScheduler.d.ts.map +1 -0
- package/dist/commonjs/BufferScheduler.js +256 -0
- package/dist/commonjs/BufferScheduler.js.map +1 -0
- package/dist/commonjs/BuffersStream.d.ts +41 -0
- package/dist/commonjs/BuffersStream.d.ts.map +1 -0
- package/dist/commonjs/BuffersStream.js +87 -0
- package/dist/commonjs/BuffersStream.js.map +1 -0
- package/dist/commonjs/PooledBuffer.d.ts +63 -0
- package/dist/commonjs/PooledBuffer.d.ts.map +1 -0
- package/dist/commonjs/PooledBuffer.js +92 -0
- package/dist/commonjs/PooledBuffer.js.map +1 -0
- package/dist/commonjs/cache.d.ts +3 -0
- package/dist/commonjs/cache.d.ts.map +1 -0
- package/dist/commonjs/cache.js +14 -0
- package/dist/commonjs/cache.js.map +1 -0
- package/dist/commonjs/index.d.ts +3 -0
- package/dist/commonjs/index.d.ts.map +1 -0
- package/dist/commonjs/index.js +10 -0
- package/dist/commonjs/index.js.map +1 -0
- package/dist/commonjs/package.json +3 -0
- package/dist/commonjs/tsdoc-metadata.json +11 -0
- package/dist/esm/BufferScheduler.d.ts +157 -0
- package/dist/esm/BufferScheduler.d.ts.map +1 -0
- package/dist/esm/BufferScheduler.js +252 -0
- package/dist/esm/BufferScheduler.js.map +1 -0
- package/dist/esm/BuffersStream.d.ts +41 -0
- package/dist/esm/BuffersStream.d.ts.map +1 -0
- package/dist/esm/BuffersStream.js +83 -0
- package/dist/esm/BuffersStream.js.map +1 -0
- package/dist/esm/PooledBuffer.d.ts +63 -0
- package/dist/esm/PooledBuffer.d.ts.map +1 -0
- package/dist/esm/PooledBuffer.js +87 -0
- package/dist/esm/PooledBuffer.js.map +1 -0
- package/dist/esm/cache.d.ts +3 -0
- package/dist/esm/cache.d.ts.map +1 -0
- package/dist/esm/cache.js +11 -0
- package/dist/esm/cache.js.map +1 -0
- package/dist/esm/index.d.ts +3 -0
- package/dist/esm/index.d.ts.map +1 -0
- package/dist/esm/index.js +5 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/package.json +3 -0
- package/dist/react-native/BufferScheduler.d.ts +157 -0
- package/dist/react-native/BufferScheduler.d.ts.map +1 -0
- package/dist/react-native/BufferScheduler.js +252 -0
- package/dist/react-native/BufferScheduler.js.map +1 -0
- package/dist/react-native/BuffersStream.d.ts +41 -0
- package/dist/react-native/BuffersStream.d.ts.map +1 -0
- package/dist/react-native/BuffersStream.js +83 -0
- package/dist/react-native/BuffersStream.js.map +1 -0
- package/dist/react-native/PooledBuffer.d.ts +63 -0
- package/dist/react-native/PooledBuffer.d.ts.map +1 -0
- package/dist/react-native/PooledBuffer.js +87 -0
- package/dist/react-native/PooledBuffer.js.map +1 -0
- package/dist/react-native/cache.d.ts +3 -0
- package/dist/react-native/cache.d.ts.map +1 -0
- package/dist/react-native/cache.js +11 -0
- package/dist/react-native/cache.js.map +1 -0
- package/dist/react-native/index.d.ts +3 -0
- package/dist/react-native/index.d.ts.map +1 -0
- package/dist/react-native/index.js +5 -0
- package/dist/react-native/index.js.map +1 -0
- package/dist/react-native/package.json +3 -0
- package/package.json +104 -0
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import type { Readable } from "node:stream";
|
|
2
|
+
/**
|
|
3
|
+
* This class provides a buffer container which conceptually has no hard size limit.
|
|
4
|
+
* It accepts a capacity, an array of input buffers and the total length of input data.
|
|
5
|
+
* It will allocate an internal "buffer" of the capacity and fill the data in the input buffers
|
|
6
|
+
* into the internal "buffer" serially with respect to the total length.
|
|
7
|
+
* Then by calling PooledBuffer.getReadableStream(), you can get a readable stream
|
|
8
|
+
* assembled from all the data in the internal "buffer".
|
|
9
|
+
*/
|
|
10
|
+
export declare class PooledBuffer {
|
|
11
|
+
/**
|
|
12
|
+
* Internal buffers used to keep the data.
|
|
13
|
+
* Each buffer has a length of the maxBufferLength except last one.
|
|
14
|
+
*/
|
|
15
|
+
private buffers;
|
|
16
|
+
/**
|
|
17
|
+
* The total size of internal buffers.
|
|
18
|
+
*/
|
|
19
|
+
private readonly capacity;
|
|
20
|
+
/**
|
|
21
|
+
* The total size of data contained in internal buffers.
|
|
22
|
+
*/
|
|
23
|
+
private _size;
|
|
24
|
+
/**
|
|
25
|
+
* The size of the data contained in the pooled buffers.
|
|
26
|
+
*/
|
|
27
|
+
get size(): number;
|
|
28
|
+
/**
|
|
29
|
+
* Creates an instance of PooledBuffer with given capacity.
|
|
30
|
+
* Internal buffers are allocated but contains no data.
|
|
31
|
+
* Users may call the {@link PooledBuffer.fill} method to fill this
|
|
32
|
+
* pooled buffer with data.
|
|
33
|
+
*
|
|
34
|
+
* @param capacity - Total capacity of the internal buffers
|
|
35
|
+
*/
|
|
36
|
+
constructor(capacity: number);
|
|
37
|
+
/**
|
|
38
|
+
* Creates an instance of PooledBuffer with given capacity.
|
|
39
|
+
* Internal buffers are allocated and filled with data in the input buffers serially
|
|
40
|
+
* with respect to the total length.
|
|
41
|
+
*
|
|
42
|
+
* @param capacity - Total capacity of the internal buffers
|
|
43
|
+
* @param buffers - Input buffers containing the data to be filled in the pooled buffer
|
|
44
|
+
* @param totalLength - Total length of the data to be filled in.
|
|
45
|
+
*/
|
|
46
|
+
constructor(capacity: number, buffers: Buffer[], totalLength: number);
|
|
47
|
+
/**
|
|
48
|
+
* Fill the internal buffers with data in the input buffers serially
|
|
49
|
+
* with respect to the total length and the total capacity of the internal buffers.
|
|
50
|
+
* Data copied will be shift out of the input buffers.
|
|
51
|
+
*
|
|
52
|
+
* @param buffers - Input buffers containing the data to be filled in the pooled buffer
|
|
53
|
+
* @param totalLength - Total length of the data to be filled in.
|
|
54
|
+
*
|
|
55
|
+
*/
|
|
56
|
+
fill(buffers: Buffer[], totalLength: number): void;
|
|
57
|
+
/**
|
|
58
|
+
* Get the readable stream assembled from all the data in the internal buffers.
|
|
59
|
+
*
|
|
60
|
+
*/
|
|
61
|
+
getReadableStream(): Readable;
|
|
62
|
+
}
|
|
63
|
+
//# sourceMappingURL=PooledBuffer.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"PooledBuffer.d.ts","sourceRoot":"","sources":["../../src/PooledBuffer.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAS5C;;;;;;;GAOG;AACH,qBAAa,YAAY;IACvB;;;OAGG;IACH,OAAO,CAAC,OAAO,CAAgB;IAE/B;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAS;IAElC;;OAEG;IACH,OAAO,CAAC,KAAK,CAAS;IAEtB;;OAEG;IACH,IAAW,IAAI,IAAI,MAAM,CAExB;IAED;;;;;;;OAOG;gBACS,QAAQ,EAAE,MAAM;IAE5B;;;;;;;;OAQG;gBACS,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,EAAE,WAAW,EAAE,MAAM;IAoBpE;;;;;;;;OAQG;IACI,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,WAAW,EAAE,MAAM,GAAG,IAAI;IAiCzD;;;OAGG;IACI,iBAAiB,IAAI,QAAQ;CAGrC"}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
// Copyright (c) Microsoft Corporation.
|
|
2
|
+
// Licensed under the MIT License.
|
|
3
|
+
import { BuffersStream } from "./BuffersStream.js";
|
|
4
|
+
import buffer from "node:buffer";
|
|
5
|
+
/**
|
|
6
|
+
* maxBufferLength is max size of each buffer in the pooled buffers.
|
|
7
|
+
*/
|
|
8
|
+
const maxBufferLength = buffer.constants.MAX_LENGTH;
|
|
9
|
+
/**
|
|
10
|
+
* This class provides a buffer container which conceptually has no hard size limit.
|
|
11
|
+
* It accepts a capacity, an array of input buffers and the total length of input data.
|
|
12
|
+
* It will allocate an internal "buffer" of the capacity and fill the data in the input buffers
|
|
13
|
+
* into the internal "buffer" serially with respect to the total length.
|
|
14
|
+
* Then by calling PooledBuffer.getReadableStream(), you can get a readable stream
|
|
15
|
+
* assembled from all the data in the internal "buffer".
|
|
16
|
+
*/
|
|
17
|
+
export class PooledBuffer {
|
|
18
|
+
/**
|
|
19
|
+
* The size of the data contained in the pooled buffers.
|
|
20
|
+
*/
|
|
21
|
+
get size() {
|
|
22
|
+
return this._size;
|
|
23
|
+
}
|
|
24
|
+
constructor(capacity, buffers, totalLength) {
|
|
25
|
+
/**
|
|
26
|
+
* Internal buffers used to keep the data.
|
|
27
|
+
* Each buffer has a length of the maxBufferLength except last one.
|
|
28
|
+
*/
|
|
29
|
+
this.buffers = [];
|
|
30
|
+
this.capacity = capacity;
|
|
31
|
+
this._size = 0;
|
|
32
|
+
// allocate
|
|
33
|
+
const bufferNum = Math.ceil(capacity / maxBufferLength);
|
|
34
|
+
for (let i = 0; i < bufferNum; i++) {
|
|
35
|
+
let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;
|
|
36
|
+
if (len === 0) {
|
|
37
|
+
len = maxBufferLength;
|
|
38
|
+
}
|
|
39
|
+
this.buffers.push(Buffer.allocUnsafe(len));
|
|
40
|
+
}
|
|
41
|
+
if (buffers) {
|
|
42
|
+
this.fill(buffers, totalLength);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Fill the internal buffers with data in the input buffers serially
|
|
47
|
+
* with respect to the total length and the total capacity of the internal buffers.
|
|
48
|
+
* Data copied will be shift out of the input buffers.
|
|
49
|
+
*
|
|
50
|
+
* @param buffers - Input buffers containing the data to be filled in the pooled buffer
|
|
51
|
+
* @param totalLength - Total length of the data to be filled in.
|
|
52
|
+
*
|
|
53
|
+
*/
|
|
54
|
+
fill(buffers, totalLength) {
|
|
55
|
+
this._size = Math.min(this.capacity, totalLength);
|
|
56
|
+
let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0;
|
|
57
|
+
while (totalCopiedNum < this._size) {
|
|
58
|
+
const source = buffers[i];
|
|
59
|
+
const target = this.buffers[j];
|
|
60
|
+
const copiedNum = source.copy(target, targetOffset, sourceOffset);
|
|
61
|
+
totalCopiedNum += copiedNum;
|
|
62
|
+
sourceOffset += copiedNum;
|
|
63
|
+
targetOffset += copiedNum;
|
|
64
|
+
if (sourceOffset === source.length) {
|
|
65
|
+
i++;
|
|
66
|
+
sourceOffset = 0;
|
|
67
|
+
}
|
|
68
|
+
if (targetOffset === target.length) {
|
|
69
|
+
j++;
|
|
70
|
+
targetOffset = 0;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
// clear copied from source buffers
|
|
74
|
+
buffers.splice(0, i);
|
|
75
|
+
if (buffers.length > 0) {
|
|
76
|
+
buffers[0] = buffers[0].slice(sourceOffset);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Get the readable stream assembled from all the data in the internal buffers.
|
|
81
|
+
*
|
|
82
|
+
*/
|
|
83
|
+
getReadableStream() {
|
|
84
|
+
return new BuffersStream(this.buffers, this.size);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
//# sourceMappingURL=PooledBuffer.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"PooledBuffer.js","sourceRoot":"","sources":["../../src/PooledBuffer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AAEnD,OAAO,MAAM,MAAM,aAAa,CAAC;AAEjC;;GAEG;AAEH,MAAM,eAAe,GAAG,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC;AAEpD;;;;;;;GAOG;AACH,MAAM,OAAO,YAAY;IAiBvB;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAsBD,YAAY,QAAgB,EAAE,OAAkB,EAAE,WAAoB;QA3CtE;;;WAGG;QACK,YAAO,GAAa,EAAE,CAAC;QAwC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;QAEf,WAAW;QACX,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC;YACnC,IAAI,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC;YAC7E,IAAI,GAAG,KAAK,CAAC,EAAE,CAAC;gBACd,GAAG,GAAG,eAAe,CAAC;YACxB,CAAC;YACD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;QAC7C,CAAC;QAED,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,WAAY,CAAC,CAAC;QACnC,CAAC;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,IAAI,CAAC,OAAiB,EAAE,WAAmB;QAChD,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,IAAI,CAAC,GAAG,CAAC,EACP,CAAC,GAAG,CAAC,EACL,YAAY,GAAG,CAAC,EAChB,YAAY,GAAG,CAAC,EAChB,cAAc,GAAG,CAAC,CAAC;QACrB,OAAO,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC;YACnC,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YAC/B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,YAAY,CAAC,CAAC;YAElE,cAAc,IAAI,SAAS,CAAC;YAC5B,YAAY,IAAI,SAAS,CAAC;YAC1B,YAAY,IAAI,SAAS,CAAC;YAC1B,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE,CAAC;gBACnC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;YACnB,CAAC;YACD,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE,CAAC;gBACnC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;YACnB,CAAC;QACH,CAAC;QAED,mCAAmC;QACnC,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACrB,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACvB,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;QAC9C,CAAC;IACH,CAAC;IAED;;;OAGG;IACI,iBAAiB;QACtB,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;IACpD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport { BuffersStream } from \"./BuffersStream.js\";\nimport type { Readable } from \"node:stream\";\nimport buffer from \"node:buffer\";\n\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n\nconst maxBufferLength = buffer.constants.MAX_LENGTH;\n\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nexport class PooledBuffer {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n private buffers: Buffer[] = [];\n\n /**\n * The total size of internal buffers.\n */\n private readonly capacity: number;\n\n /**\n * The total size of data contained in internal buffers.\n */\n private _size: number;\n\n /**\n * The size of the data contained in the pooled buffers.\n */\n public get size(): number {\n return this._size;\n }\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated but contains no data.\n * Users may call the {@link PooledBuffer.fill} method to fill this\n * pooled buffer with data.\n *\n * @param capacity - Total capacity of the internal buffers\n */\n constructor(capacity: number);\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated and filled with data in the input buffers serially\n * with respect to the total length.\n *\n * @param capacity - Total capacity of the internal buffers\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n */\n constructor(capacity: number, buffers: Buffer[], totalLength: number);\n constructor(capacity: number, buffers?: Buffer[], totalLength?: number) {\n this.capacity = capacity;\n this._size = 0;\n\n // allocate\n const bufferNum = Math.ceil(capacity / maxBufferLength);\n for (let i = 0; i < bufferNum; i++) {\n let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n\n if (buffers) {\n this.fill(buffers, totalLength!);\n }\n }\n\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n public fill(buffers: Buffer[], totalLength: number): void {\n this._size = Math.min(this.capacity, totalLength);\n\n let i = 0,\n j = 0,\n targetOffset = 0,\n sourceOffset = 0,\n totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n const source = buffers[i];\n const target = this.buffers[j];\n const copiedNum = source.copy(target, targetOffset, sourceOffset);\n\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n }\n\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n public getReadableStream(): Readable {\n return new BuffersStream(this.buffers, this.size);\n }\n}\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cache.d.ts","sourceRoot":"","sources":["../../src/cache.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,2BAA2B,CAAC;AAK5D,wBAAgB,0BAA0B,IAAI,UAAU,CAKvD"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
// Copyright (c) Microsoft Corporation.
|
|
2
|
+
// Licensed under the MIT License.
|
|
3
|
+
import { createDefaultHttpClient } from "@azure/core-rest-pipeline";
|
|
4
|
+
let _defaultHttpClient;
|
|
5
|
+
export function getCachedDefaultHttpClient() {
|
|
6
|
+
if (!_defaultHttpClient) {
|
|
7
|
+
_defaultHttpClient = createDefaultHttpClient();
|
|
8
|
+
}
|
|
9
|
+
return _defaultHttpClient;
|
|
10
|
+
}
|
|
11
|
+
//# sourceMappingURL=cache.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"cache.js","sourceRoot":"","sources":["../../src/cache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AAEpE,IAAI,kBAA8B,CAAC;AAEnC,MAAM,UAAU,0BAA0B;IACxC,IAAI,CAAC,kBAAkB,EAAE,CAAC;QACxB,kBAAkB,GAAG,uBAAuB,EAAE,CAAC;IACjD,CAAC;IACD,OAAO,kBAAkB,CAAC;AAC5B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nimport type { HttpClient } from \"@azure/core-rest-pipeline\";\nimport { createDefaultHttpClient } from \"@azure/core-rest-pipeline\";\n\nlet _defaultHttpClient: HttpClient;\n\nexport function getCachedDefaultHttpClient(): HttpClient {\n if (!_defaultHttpClient) {\n _defaultHttpClient = createDefaultHttpClient();\n }\n return _defaultHttpClient;\n}\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAGA,cAAc,sBAAsB,CAAC;AACrC,OAAO,EAAE,0BAA0B,EAAE,MAAM,YAAY,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,sBAAsB,CAAC;AACrC,OAAO,EAAE,0BAA0B,EAAE,MAAM,YAAY,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT License.\n\nexport * from \"./BufferScheduler.js\";\nexport { getCachedDefaultHttpClient } from \"./cache.js\";\n"]}
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* OutgoingHandler is an async function triggered by BufferScheduler.
|
|
3
|
+
*/
|
|
4
|
+
export declare type OutgoingHandler = (body: () => NodeJS.ReadableStream, length: number, offset?: number) => Promise<any>;
|
|
5
|
+
/**
|
|
6
|
+
* This class accepts a Node.js Readable stream as input, and keeps reading data
|
|
7
|
+
* from the stream into the internal buffer structure, until it reaches maxBuffers.
|
|
8
|
+
* Every available buffer will try to trigger outgoingHandler.
|
|
9
|
+
*
|
|
10
|
+
* The internal buffer structure includes an incoming buffer array, and a outgoing
|
|
11
|
+
* buffer array. The incoming buffer array includes the "empty" buffers can be filled
|
|
12
|
+
* with new incoming data. The outgoing array includes the filled buffers to be
|
|
13
|
+
* handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.
|
|
14
|
+
*
|
|
15
|
+
* NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING
|
|
16
|
+
*
|
|
17
|
+
* NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers
|
|
18
|
+
*
|
|
19
|
+
* PERFORMANCE IMPROVEMENT TIPS:
|
|
20
|
+
* 1. Input stream highWaterMark is better to set a same value with bufferSize
|
|
21
|
+
* parameter, which will avoid Buffer.concat() operations.
|
|
22
|
+
* 2. concurrency should set a smaller value than maxBuffers, which is helpful to
|
|
23
|
+
* reduce the possibility when a outgoing handler waits for the stream data.
|
|
24
|
+
* in this situation, outgoing handlers are blocked.
|
|
25
|
+
* Outgoing queue shouldn't be empty.
|
|
26
|
+
*/
|
|
27
|
+
export declare class BufferScheduler {
|
|
28
|
+
/**
|
|
29
|
+
* Size of buffers in incoming and outgoing queues. This class will try to align
|
|
30
|
+
* data read from Readable stream into buffer chunks with bufferSize defined.
|
|
31
|
+
*/
|
|
32
|
+
private readonly bufferSize;
|
|
33
|
+
/**
|
|
34
|
+
* How many buffers can be created or maintained.
|
|
35
|
+
*/
|
|
36
|
+
private readonly maxBuffers;
|
|
37
|
+
/**
|
|
38
|
+
* A Node.js Readable stream.
|
|
39
|
+
*/
|
|
40
|
+
private readonly readable;
|
|
41
|
+
/**
|
|
42
|
+
* OutgoingHandler is an async function triggered by BufferScheduler when there
|
|
43
|
+
* are available buffers in outgoing array.
|
|
44
|
+
*/
|
|
45
|
+
private readonly outgoingHandler;
|
|
46
|
+
/**
|
|
47
|
+
* An internal event emitter.
|
|
48
|
+
*/
|
|
49
|
+
private readonly emitter;
|
|
50
|
+
/**
|
|
51
|
+
* Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers)
|
|
52
|
+
*/
|
|
53
|
+
private readonly concurrency;
|
|
54
|
+
/**
|
|
55
|
+
* An internal offset marker to track data offset in bytes of next outgoingHandler.
|
|
56
|
+
*/
|
|
57
|
+
private offset;
|
|
58
|
+
/**
|
|
59
|
+
* An internal marker to track whether stream is end.
|
|
60
|
+
*/
|
|
61
|
+
private isStreamEnd;
|
|
62
|
+
/**
|
|
63
|
+
* An internal marker to track whether stream or outgoingHandler returns error.
|
|
64
|
+
*/
|
|
65
|
+
private isError;
|
|
66
|
+
/**
|
|
67
|
+
* How many handlers are executing.
|
|
68
|
+
*/
|
|
69
|
+
private executingOutgoingHandlers;
|
|
70
|
+
/**
|
|
71
|
+
* Encoding of the input Readable stream which has string data type instead of Buffer.
|
|
72
|
+
*/
|
|
73
|
+
private encoding?;
|
|
74
|
+
/**
|
|
75
|
+
* How many buffers have been allocated.
|
|
76
|
+
*/
|
|
77
|
+
private numBuffers;
|
|
78
|
+
/**
|
|
79
|
+
* Because this class doesn't know how much data every time stream pops, which
|
|
80
|
+
* is defined by highWaterMarker of the stream. So BufferScheduler will cache
|
|
81
|
+
* data received from the stream, when data in unresolvedDataArray exceeds the
|
|
82
|
+
* blockSize defined, it will try to concat a blockSize of buffer, fill into available
|
|
83
|
+
* buffers from incoming and push to outgoing array.
|
|
84
|
+
*/
|
|
85
|
+
private unresolvedDataArray;
|
|
86
|
+
/**
|
|
87
|
+
* How much data consisted in unresolvedDataArray.
|
|
88
|
+
*/
|
|
89
|
+
private unresolvedLength;
|
|
90
|
+
/**
|
|
91
|
+
* The array includes all the available buffers can be used to fill data from stream.
|
|
92
|
+
*/
|
|
93
|
+
private incoming;
|
|
94
|
+
/**
|
|
95
|
+
* The array (queue) includes all the buffers filled from stream data.
|
|
96
|
+
*/
|
|
97
|
+
private outgoing;
|
|
98
|
+
/**
|
|
99
|
+
* Creates an instance of BufferScheduler.
|
|
100
|
+
*
|
|
101
|
+
* @param readable - A Node.js Readable stream
|
|
102
|
+
* @param bufferSize - Buffer size of every maintained buffer
|
|
103
|
+
* @param maxBuffers - How many buffers can be allocated
|
|
104
|
+
* @param outgoingHandler - An async function scheduled to be
|
|
105
|
+
* triggered when a buffer fully filled
|
|
106
|
+
* with stream data
|
|
107
|
+
* @param concurrency - Concurrency of executing outgoingHandlers (>0)
|
|
108
|
+
* @param encoding - [Optional] Encoding of Readable stream when it's a string stream
|
|
109
|
+
*/
|
|
110
|
+
constructor(readable: NodeJS.ReadableStream, bufferSize: number, maxBuffers: number, outgoingHandler: OutgoingHandler, concurrency: number, encoding?: BufferEncoding);
|
|
111
|
+
/**
|
|
112
|
+
* Start the scheduler, will return error when stream of any of the outgoingHandlers
|
|
113
|
+
* returns error.
|
|
114
|
+
*
|
|
115
|
+
*/
|
|
116
|
+
do(): Promise<void>;
|
|
117
|
+
/**
|
|
118
|
+
* Insert a new data into unresolved array.
|
|
119
|
+
*
|
|
120
|
+
* @param data -
|
|
121
|
+
*/
|
|
122
|
+
private appendUnresolvedData;
|
|
123
|
+
/**
|
|
124
|
+
* Try to shift a buffer with size in blockSize. The buffer returned may be less
|
|
125
|
+
* than blockSize when data in unresolvedDataArray is less than bufferSize.
|
|
126
|
+
*
|
|
127
|
+
*/
|
|
128
|
+
private shiftBufferFromUnresolvedDataArray;
|
|
129
|
+
/**
|
|
130
|
+
* Resolve data in unresolvedDataArray. For every buffer with size in blockSize
|
|
131
|
+
* shifted, it will try to get (or allocate a buffer) from incoming, and fill it,
|
|
132
|
+
* then push it into outgoing to be handled by outgoing handler.
|
|
133
|
+
*
|
|
134
|
+
* Return false when available buffers in incoming are not enough, else true.
|
|
135
|
+
*
|
|
136
|
+
* @returns Return false when buffers in incoming are not enough, else true.
|
|
137
|
+
*/
|
|
138
|
+
private resolveData;
|
|
139
|
+
/**
|
|
140
|
+
* Try to trigger a outgoing handler for every buffer in outgoing. Stop when
|
|
141
|
+
* concurrency reaches.
|
|
142
|
+
*/
|
|
143
|
+
private triggerOutgoingHandlers;
|
|
144
|
+
/**
|
|
145
|
+
* Trigger a outgoing handler for a buffer shifted from outgoing.
|
|
146
|
+
*
|
|
147
|
+
* @param buffer -
|
|
148
|
+
*/
|
|
149
|
+
private triggerOutgoingHandler;
|
|
150
|
+
/**
|
|
151
|
+
* Return buffer used by outgoing handler into incoming.
|
|
152
|
+
*
|
|
153
|
+
* @param buffer -
|
|
154
|
+
*/
|
|
155
|
+
private reuseBuffer;
|
|
156
|
+
}
|
|
157
|
+
//# sourceMappingURL=BufferScheduler.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"BufferScheduler.d.ts","sourceRoot":"","sources":["../../src/BufferScheduler.ts"],"names":[],"mappings":"AAMA;;GAEG;AACH,MAAM,CAAC,OAAO,MAAM,eAAe,GAAG,CACpC,IAAI,EAAE,MAAM,MAAM,CAAC,cAAc,EACjC,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,MAAM,KACZ,OAAO,CAAC,GAAG,CAAC,CAAC;AAElB;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,qBAAa,eAAe;IAC1B;;;OAGG;IACH,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAS;IAEpC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAS;IAEpC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAwB;IAEjD;;;OAGG;IACH,OAAO,CAAC,QAAQ,CAAC,eAAe,CAAkB;IAElD;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAoC;IAE5D;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAS;IAErC;;OAEG;IACH,OAAO,CAAC,MAAM,CAAa;IAE3B;;OAEG;IACH,OAAO,CAAC,WAAW,CAAkB;IAErC;;OAEG;IACH,OAAO,CAAC,OAAO,CAAkB;IAEjC;;OAEG;IACH,OAAO,CAAC,yBAAyB,CAAa;IAE9C;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,CAAiB;IAElC;;OAEG;IACH,OAAO,CAAC,UAAU,CAAa;IAE/B;;;;;;OAMG;IACH,OAAO,CAAC,mBAAmB,CAAgB;IAE3C;;OAEG;IACH,OAAO,CAAC,gBAAgB,CAAa;IAErC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAsB;IAEtC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAsB;IAEtC;;;;;;;;;;;OAWG;gBAED,QAAQ,EAAE,MAAM,CAAC,cAAc,EAC/B,UAAU,EAAE,MAAM,EAClB,UAAU,EAAE,MAAM,EAClB,eAAe,EAAE,eAAe,EAChC,WAAW,EAAE,MAAM,EACnB,QAAQ,CAAC,EAAE,cAAc;IAsB3B;;;;OAIG;IACU,EAAE,IAAI,OAAO,CAAC,IAAI,CAAC;IAgDhC;;;;OAIG;IACH,OAAO,CAAC,oBAAoB;IAK5B;;;;OAIG;IACH,OAAO,CAAC,kCAAkC;IAW1C;;;;;;;;OAQG;IACH,OAAO,CAAC,WAAW;IAuBnB;;;OAGG;YACW,uBAAuB;IAcrC;;;;OAIG;YACW,sBAAsB;IAsBpC;;;;OAIG;IACH,OAAO,CAAC,WAAW;CAMpB"}
|
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
// Copyright (c) Microsoft Corporation.
|
|
2
|
+
// Licensed under the MIT License.
|
|
3
|
+
import { EventEmitter } from "events";
|
|
4
|
+
import { PooledBuffer } from "./PooledBuffer.js";
|
|
5
|
+
/**
|
|
6
|
+
* This class accepts a Node.js Readable stream as input, and keeps reading data
|
|
7
|
+
* from the stream into the internal buffer structure, until it reaches maxBuffers.
|
|
8
|
+
* Every available buffer will try to trigger outgoingHandler.
|
|
9
|
+
*
|
|
10
|
+
* The internal buffer structure includes an incoming buffer array, and a outgoing
|
|
11
|
+
* buffer array. The incoming buffer array includes the "empty" buffers can be filled
|
|
12
|
+
* with new incoming data. The outgoing array includes the filled buffers to be
|
|
13
|
+
* handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.
|
|
14
|
+
*
|
|
15
|
+
* NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING
|
|
16
|
+
*
|
|
17
|
+
* NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers
|
|
18
|
+
*
|
|
19
|
+
* PERFORMANCE IMPROVEMENT TIPS:
|
|
20
|
+
* 1. Input stream highWaterMark is better to set a same value with bufferSize
|
|
21
|
+
* parameter, which will avoid Buffer.concat() operations.
|
|
22
|
+
* 2. concurrency should set a smaller value than maxBuffers, which is helpful to
|
|
23
|
+
* reduce the possibility when a outgoing handler waits for the stream data.
|
|
24
|
+
* in this situation, outgoing handlers are blocked.
|
|
25
|
+
* Outgoing queue shouldn't be empty.
|
|
26
|
+
*/
|
|
27
|
+
export class BufferScheduler {
|
|
28
|
+
/**
|
|
29
|
+
* Creates an instance of BufferScheduler.
|
|
30
|
+
*
|
|
31
|
+
* @param readable - A Node.js Readable stream
|
|
32
|
+
* @param bufferSize - Buffer size of every maintained buffer
|
|
33
|
+
* @param maxBuffers - How many buffers can be allocated
|
|
34
|
+
* @param outgoingHandler - An async function scheduled to be
|
|
35
|
+
* triggered when a buffer fully filled
|
|
36
|
+
* with stream data
|
|
37
|
+
* @param concurrency - Concurrency of executing outgoingHandlers (>0)
|
|
38
|
+
* @param encoding - [Optional] Encoding of Readable stream when it's a string stream
|
|
39
|
+
*/
|
|
40
|
+
constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) {
|
|
41
|
+
/**
|
|
42
|
+
* An internal event emitter.
|
|
43
|
+
*/
|
|
44
|
+
this.emitter = new EventEmitter();
|
|
45
|
+
/**
|
|
46
|
+
* An internal offset marker to track data offset in bytes of next outgoingHandler.
|
|
47
|
+
*/
|
|
48
|
+
this.offset = 0;
|
|
49
|
+
/**
|
|
50
|
+
* An internal marker to track whether stream is end.
|
|
51
|
+
*/
|
|
52
|
+
this.isStreamEnd = false;
|
|
53
|
+
/**
|
|
54
|
+
* An internal marker to track whether stream or outgoingHandler returns error.
|
|
55
|
+
*/
|
|
56
|
+
this.isError = false;
|
|
57
|
+
/**
|
|
58
|
+
* How many handlers are executing.
|
|
59
|
+
*/
|
|
60
|
+
this.executingOutgoingHandlers = 0;
|
|
61
|
+
/**
|
|
62
|
+
* How many buffers have been allocated.
|
|
63
|
+
*/
|
|
64
|
+
this.numBuffers = 0;
|
|
65
|
+
/**
|
|
66
|
+
* Because this class doesn't know how much data every time stream pops, which
|
|
67
|
+
* is defined by highWaterMarker of the stream. So BufferScheduler will cache
|
|
68
|
+
* data received from the stream, when data in unresolvedDataArray exceeds the
|
|
69
|
+
* blockSize defined, it will try to concat a blockSize of buffer, fill into available
|
|
70
|
+
* buffers from incoming and push to outgoing array.
|
|
71
|
+
*/
|
|
72
|
+
this.unresolvedDataArray = [];
|
|
73
|
+
/**
|
|
74
|
+
* How much data consisted in unresolvedDataArray.
|
|
75
|
+
*/
|
|
76
|
+
this.unresolvedLength = 0;
|
|
77
|
+
/**
|
|
78
|
+
* The array includes all the available buffers can be used to fill data from stream.
|
|
79
|
+
*/
|
|
80
|
+
this.incoming = [];
|
|
81
|
+
/**
|
|
82
|
+
* The array (queue) includes all the buffers filled from stream data.
|
|
83
|
+
*/
|
|
84
|
+
this.outgoing = [];
|
|
85
|
+
if (bufferSize <= 0) {
|
|
86
|
+
throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);
|
|
87
|
+
}
|
|
88
|
+
if (maxBuffers <= 0) {
|
|
89
|
+
throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);
|
|
90
|
+
}
|
|
91
|
+
if (concurrency <= 0) {
|
|
92
|
+
throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);
|
|
93
|
+
}
|
|
94
|
+
this.bufferSize = bufferSize;
|
|
95
|
+
this.maxBuffers = maxBuffers;
|
|
96
|
+
this.readable = readable;
|
|
97
|
+
this.outgoingHandler = outgoingHandler;
|
|
98
|
+
this.concurrency = concurrency;
|
|
99
|
+
this.encoding = encoding;
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Start the scheduler, will return error when stream of any of the outgoingHandlers
|
|
103
|
+
* returns error.
|
|
104
|
+
*
|
|
105
|
+
*/
|
|
106
|
+
async do() {
|
|
107
|
+
return new Promise((resolve, reject) => {
|
|
108
|
+
this.readable.on("data", (data) => {
|
|
109
|
+
data = typeof data === "string" ? Buffer.from(data, this.encoding) : data;
|
|
110
|
+
this.appendUnresolvedData(data);
|
|
111
|
+
if (!this.resolveData()) {
|
|
112
|
+
this.readable.pause();
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
this.readable.on("error", (err) => {
|
|
116
|
+
this.emitter.emit("error", err);
|
|
117
|
+
});
|
|
118
|
+
this.readable.on("end", () => {
|
|
119
|
+
this.isStreamEnd = true;
|
|
120
|
+
this.emitter.emit("checkEnd");
|
|
121
|
+
});
|
|
122
|
+
this.emitter.on("error", (err) => {
|
|
123
|
+
this.isError = true;
|
|
124
|
+
this.readable.pause();
|
|
125
|
+
reject(err);
|
|
126
|
+
});
|
|
127
|
+
this.emitter.on("checkEnd", () => {
|
|
128
|
+
if (this.outgoing.length > 0) {
|
|
129
|
+
this.triggerOutgoingHandlers();
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {
|
|
133
|
+
if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {
|
|
134
|
+
const buffer = this.shiftBufferFromUnresolvedDataArray();
|
|
135
|
+
this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)
|
|
136
|
+
.then(resolve)
|
|
137
|
+
.catch(reject);
|
|
138
|
+
}
|
|
139
|
+
else if (this.unresolvedLength >= this.bufferSize) {
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
resolve();
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Insert a new data into unresolved array.
|
|
151
|
+
*
|
|
152
|
+
* @param data -
|
|
153
|
+
*/
|
|
154
|
+
appendUnresolvedData(data) {
|
|
155
|
+
this.unresolvedDataArray.push(data);
|
|
156
|
+
this.unresolvedLength += data.length;
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Try to shift a buffer with size in blockSize. The buffer returned may be less
|
|
160
|
+
* than blockSize when data in unresolvedDataArray is less than bufferSize.
|
|
161
|
+
*
|
|
162
|
+
*/
|
|
163
|
+
shiftBufferFromUnresolvedDataArray(buffer) {
|
|
164
|
+
if (!buffer) {
|
|
165
|
+
buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
buffer.fill(this.unresolvedDataArray, this.unresolvedLength);
|
|
169
|
+
}
|
|
170
|
+
this.unresolvedLength -= buffer.size;
|
|
171
|
+
return buffer;
|
|
172
|
+
}
|
|
173
|
+
/**
|
|
174
|
+
* Resolve data in unresolvedDataArray. For every buffer with size in blockSize
|
|
175
|
+
* shifted, it will try to get (or allocate a buffer) from incoming, and fill it,
|
|
176
|
+
* then push it into outgoing to be handled by outgoing handler.
|
|
177
|
+
*
|
|
178
|
+
* Return false when available buffers in incoming are not enough, else true.
|
|
179
|
+
*
|
|
180
|
+
* @returns Return false when buffers in incoming are not enough, else true.
|
|
181
|
+
*/
|
|
182
|
+
resolveData() {
|
|
183
|
+
while (this.unresolvedLength >= this.bufferSize) {
|
|
184
|
+
let buffer;
|
|
185
|
+
if (this.incoming.length > 0) {
|
|
186
|
+
buffer = this.incoming.shift();
|
|
187
|
+
this.shiftBufferFromUnresolvedDataArray(buffer);
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
if (this.numBuffers < this.maxBuffers) {
|
|
191
|
+
buffer = this.shiftBufferFromUnresolvedDataArray();
|
|
192
|
+
this.numBuffers++;
|
|
193
|
+
}
|
|
194
|
+
else {
|
|
195
|
+
// No available buffer, wait for buffer returned
|
|
196
|
+
return false;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
this.outgoing.push(buffer);
|
|
200
|
+
this.triggerOutgoingHandlers();
|
|
201
|
+
}
|
|
202
|
+
return true;
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Try to trigger a outgoing handler for every buffer in outgoing. Stop when
|
|
206
|
+
* concurrency reaches.
|
|
207
|
+
*/
|
|
208
|
+
async triggerOutgoingHandlers() {
|
|
209
|
+
let buffer;
|
|
210
|
+
do {
|
|
211
|
+
if (this.executingOutgoingHandlers >= this.concurrency) {
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
buffer = this.outgoing.shift();
|
|
215
|
+
if (buffer) {
|
|
216
|
+
this.triggerOutgoingHandler(buffer);
|
|
217
|
+
}
|
|
218
|
+
} while (buffer);
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Trigger a outgoing handler for a buffer shifted from outgoing.
|
|
222
|
+
*
|
|
223
|
+
* @param buffer -
|
|
224
|
+
*/
|
|
225
|
+
async triggerOutgoingHandler(buffer) {
|
|
226
|
+
const bufferLength = buffer.size;
|
|
227
|
+
this.executingOutgoingHandlers++;
|
|
228
|
+
this.offset += bufferLength;
|
|
229
|
+
try {
|
|
230
|
+
await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength);
|
|
231
|
+
}
|
|
232
|
+
catch (err) {
|
|
233
|
+
this.emitter.emit("error", err);
|
|
234
|
+
return;
|
|
235
|
+
}
|
|
236
|
+
this.executingOutgoingHandlers--;
|
|
237
|
+
this.reuseBuffer(buffer);
|
|
238
|
+
this.emitter.emit("checkEnd");
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Return buffer used by outgoing handler into incoming.
|
|
242
|
+
*
|
|
243
|
+
* @param buffer -
|
|
244
|
+
*/
|
|
245
|
+
reuseBuffer(buffer) {
|
|
246
|
+
this.incoming.push(buffer);
|
|
247
|
+
if (!this.isError && this.resolveData() && !this.isStreamEnd) {
|
|
248
|
+
this.readable.resume();
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
//# sourceMappingURL=BufferScheduler.js.map
|