@aws-sdk/lib-storage 3.529.0 → 3.532.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist-cjs/index.js +56 -43
- package/dist-es/bytelength.js +4 -2
- package/dist-es/chunker.js +8 -10
- package/dist-es/chunks/getChunkStream.js +5 -5
- package/dist-es/chunks/{getChunkBuffer.js → getChunkUint8Array.js} +3 -3
- package/dist-es/chunks/getDataReadable.js +6 -1
- package/dist-es/chunks/getDataReadableStream.js +8 -2
- package/dist-types/chunker.d.ts +2 -1
- package/dist-types/chunks/getChunkStream.d.ts +1 -2
- package/dist-types/chunks/getChunkUint8Array.d.ts +2 -0
- package/dist-types/chunks/getDataReadable.d.ts +1 -2
- package/dist-types/chunks/getDataReadableStream.d.ts +1 -2
- package/dist-types/ts3.4/chunker.d.ts +2 -1
- package/dist-types/ts3.4/chunks/getChunkStream.d.ts +1 -1
- package/dist-types/ts3.4/chunks/{getChunkBuffer.d.ts → getChunkUint8Array.d.ts} +2 -2
- package/dist-types/ts3.4/chunks/getDataReadable.d.ts +3 -1
- package/dist-types/ts3.4/chunks/getDataReadableStream.d.ts +1 -1
- package/package.json +4 -3
- package/dist-types/chunks/getChunkBuffer.d.ts +0 -3
- /package/dist-cjs/chunks/{getChunkBuffer.js → getChunkUint8Array.js} +0 -0
package/dist-cjs/index.js
CHANGED
|
@@ -32,12 +32,14 @@ var import_smithy_client = require("@smithy/smithy-client");
|
|
|
32
32
|
var import_events = require("events");
|
|
33
33
|
|
|
34
34
|
// src/bytelength.ts
|
|
35
|
+
var import_buffer = require("buffer");
|
|
35
36
|
var import_runtimeConfig = require("././runtimeConfig");
|
|
36
37
|
var byteLength = /* @__PURE__ */ __name((input) => {
|
|
37
38
|
if (input === null || input === void 0)
|
|
38
39
|
return 0;
|
|
39
|
-
if (typeof input === "string")
|
|
40
|
-
|
|
40
|
+
if (typeof input === "string") {
|
|
41
|
+
return import_buffer.Buffer.byteLength(input);
|
|
42
|
+
}
|
|
41
43
|
if (typeof input.byteLength === "number") {
|
|
42
44
|
return input.byteLength;
|
|
43
45
|
} else if (typeof input.length === "number") {
|
|
@@ -58,60 +60,64 @@ var byteLength = /* @__PURE__ */ __name((input) => {
|
|
|
58
60
|
|
|
59
61
|
var import_stream = require("stream");
|
|
60
62
|
|
|
61
|
-
// src/chunks/getChunkBuffer.ts
|
|
62
|
-
async function* getChunkBuffer(data, partSize) {
|
|
63
|
-
let partNumber = 1;
|
|
64
|
-
let startByte = 0;
|
|
65
|
-
let endByte = partSize;
|
|
66
|
-
while (endByte < data.byteLength) {
|
|
67
|
-
yield {
|
|
68
|
-
partNumber,
|
|
69
|
-
data: data.slice(startByte, endByte)
|
|
70
|
-
};
|
|
71
|
-
partNumber += 1;
|
|
72
|
-
startByte = endByte;
|
|
73
|
-
endByte = startByte + partSize;
|
|
74
|
-
}
|
|
75
|
-
yield {
|
|
76
|
-
partNumber,
|
|
77
|
-
data: data.slice(startByte),
|
|
78
|
-
lastPart: true
|
|
79
|
-
};
|
|
80
|
-
}
|
|
81
|
-
__name(getChunkBuffer, "getChunkBuffer");
|
|
82
|
-
|
|
83
63
|
// src/chunks/getChunkStream.ts
|
|
84
|
-
|
|
64
|
+
|
|
85
65
|
async function* getChunkStream(data, partSize, getNextData) {
|
|
86
66
|
let partNumber = 1;
|
|
87
67
|
const currentBuffer = { chunks: [], length: 0 };
|
|
88
68
|
for await (const datum of getNextData(data)) {
|
|
89
69
|
currentBuffer.chunks.push(datum);
|
|
90
|
-
currentBuffer.length += datum.
|
|
70
|
+
currentBuffer.length += datum.byteLength;
|
|
91
71
|
while (currentBuffer.length >= partSize) {
|
|
92
72
|
const dataChunk = currentBuffer.chunks.length > 1 ? import_buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0];
|
|
93
73
|
yield {
|
|
94
74
|
partNumber,
|
|
95
|
-
data: dataChunk.
|
|
75
|
+
data: dataChunk.subarray(0, partSize)
|
|
96
76
|
};
|
|
97
|
-
currentBuffer.chunks = [dataChunk.
|
|
98
|
-
currentBuffer.length = currentBuffer.chunks[0].
|
|
77
|
+
currentBuffer.chunks = [dataChunk.subarray(partSize)];
|
|
78
|
+
currentBuffer.length = currentBuffer.chunks[0].byteLength;
|
|
99
79
|
partNumber += 1;
|
|
100
80
|
}
|
|
101
81
|
}
|
|
102
82
|
yield {
|
|
103
83
|
partNumber,
|
|
104
|
-
data: import_buffer.Buffer.concat(currentBuffer.chunks),
|
|
84
|
+
data: currentBuffer.chunks.length !== 1 ? import_buffer.Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0],
|
|
105
85
|
lastPart: true
|
|
106
86
|
};
|
|
107
87
|
}
|
|
108
88
|
__name(getChunkStream, "getChunkStream");
|
|
109
89
|
|
|
90
|
+
// src/chunks/getChunkUint8Array.ts
|
|
91
|
+
async function* getChunkUint8Array(data, partSize) {
|
|
92
|
+
let partNumber = 1;
|
|
93
|
+
let startByte = 0;
|
|
94
|
+
let endByte = partSize;
|
|
95
|
+
while (endByte < data.byteLength) {
|
|
96
|
+
yield {
|
|
97
|
+
partNumber,
|
|
98
|
+
data: data.subarray(startByte, endByte)
|
|
99
|
+
};
|
|
100
|
+
partNumber += 1;
|
|
101
|
+
startByte = endByte;
|
|
102
|
+
endByte = startByte + partSize;
|
|
103
|
+
}
|
|
104
|
+
yield {
|
|
105
|
+
partNumber,
|
|
106
|
+
data: data.subarray(startByte),
|
|
107
|
+
lastPart: true
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
__name(getChunkUint8Array, "getChunkUint8Array");
|
|
111
|
+
|
|
110
112
|
// src/chunks/getDataReadable.ts
|
|
111
113
|
|
|
112
114
|
async function* getDataReadable(data) {
|
|
113
115
|
for await (const chunk of data) {
|
|
114
|
-
|
|
116
|
+
if (import_buffer.Buffer.isBuffer(chunk) || chunk instanceof Uint8Array) {
|
|
117
|
+
yield chunk;
|
|
118
|
+
} else {
|
|
119
|
+
yield import_buffer.Buffer.from(chunk);
|
|
120
|
+
}
|
|
115
121
|
}
|
|
116
122
|
}
|
|
117
123
|
__name(getDataReadable, "getDataReadable");
|
|
@@ -123,9 +129,14 @@ async function* getDataReadableStream(data) {
|
|
|
123
129
|
try {
|
|
124
130
|
while (true) {
|
|
125
131
|
const { done, value } = await reader.read();
|
|
126
|
-
if (done)
|
|
132
|
+
if (done) {
|
|
127
133
|
return;
|
|
128
|
-
|
|
134
|
+
}
|
|
135
|
+
if (import_buffer.Buffer.isBuffer(value) || value instanceof Uint8Array) {
|
|
136
|
+
yield value;
|
|
137
|
+
} else {
|
|
138
|
+
yield import_buffer.Buffer.from(value);
|
|
139
|
+
}
|
|
129
140
|
}
|
|
130
141
|
} catch (e) {
|
|
131
142
|
throw e;
|
|
@@ -137,22 +148,24 @@ __name(getDataReadableStream, "getDataReadableStream");
|
|
|
137
148
|
|
|
138
149
|
// src/chunker.ts
|
|
139
150
|
var getChunk = /* @__PURE__ */ __name((data, partSize) => {
|
|
140
|
-
if (data instanceof
|
|
141
|
-
return
|
|
142
|
-
}
|
|
151
|
+
if (data instanceof Uint8Array) {
|
|
152
|
+
return getChunkUint8Array(data, partSize);
|
|
153
|
+
}
|
|
154
|
+
if (data instanceof import_stream.Readable) {
|
|
143
155
|
return getChunkStream(data, partSize, getDataReadable);
|
|
144
|
-
}
|
|
145
|
-
|
|
156
|
+
}
|
|
157
|
+
if (data instanceof String || typeof data === "string") {
|
|
158
|
+
return getChunkUint8Array(import_buffer.Buffer.from(data), partSize);
|
|
146
159
|
}
|
|
147
160
|
if (typeof data.stream === "function") {
|
|
148
161
|
return getChunkStream(data.stream(), partSize, getDataReadableStream);
|
|
149
|
-
}
|
|
162
|
+
}
|
|
163
|
+
if (data instanceof ReadableStream) {
|
|
150
164
|
return getChunkStream(data, partSize, getDataReadableStream);
|
|
151
|
-
} else {
|
|
152
|
-
throw new Error(
|
|
153
|
-
"Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;."
|
|
154
|
-
);
|
|
155
165
|
}
|
|
166
|
+
throw new Error(
|
|
167
|
+
"Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;."
|
|
168
|
+
);
|
|
156
169
|
}, "getChunk");
|
|
157
170
|
|
|
158
171
|
// src/Upload.ts
|
package/dist-es/bytelength.js
CHANGED
|
@@ -1,9 +1,11 @@
|
|
|
1
|
+
import { Buffer } from "buffer";
|
|
1
2
|
import { ClientDefaultValues } from "./runtimeConfig";
|
|
2
3
|
export const byteLength = (input) => {
|
|
3
4
|
if (input === null || input === undefined)
|
|
4
5
|
return 0;
|
|
5
|
-
if (typeof input === "string")
|
|
6
|
-
|
|
6
|
+
if (typeof input === "string") {
|
|
7
|
+
return Buffer.byteLength(input);
|
|
8
|
+
}
|
|
7
9
|
if (typeof input.byteLength === "number") {
|
|
8
10
|
return input.byteLength;
|
|
9
11
|
}
|
package/dist-es/chunker.js
CHANGED
|
@@ -1,26 +1,24 @@
|
|
|
1
1
|
import { Buffer } from "buffer";
|
|
2
2
|
import { Readable } from "stream";
|
|
3
|
-
import { getChunkBuffer } from "./chunks/getChunkBuffer";
|
|
4
3
|
import { getChunkStream } from "./chunks/getChunkStream";
|
|
4
|
+
import { getChunkUint8Array } from "./chunks/getChunkUint8Array";
|
|
5
5
|
import { getDataReadable } from "./chunks/getDataReadable";
|
|
6
6
|
import { getDataReadableStream } from "./chunks/getDataReadableStream";
|
|
7
7
|
export const getChunk = (data, partSize) => {
|
|
8
|
-
if (data instanceof
|
|
9
|
-
return
|
|
8
|
+
if (data instanceof Uint8Array) {
|
|
9
|
+
return getChunkUint8Array(data, partSize);
|
|
10
10
|
}
|
|
11
|
-
|
|
11
|
+
if (data instanceof Readable) {
|
|
12
12
|
return getChunkStream(data, partSize, getDataReadable);
|
|
13
13
|
}
|
|
14
|
-
|
|
15
|
-
return
|
|
14
|
+
if (data instanceof String || typeof data === "string") {
|
|
15
|
+
return getChunkUint8Array(Buffer.from(data), partSize);
|
|
16
16
|
}
|
|
17
17
|
if (typeof data.stream === "function") {
|
|
18
18
|
return getChunkStream(data.stream(), partSize, getDataReadableStream);
|
|
19
19
|
}
|
|
20
|
-
|
|
20
|
+
if (data instanceof ReadableStream) {
|
|
21
21
|
return getChunkStream(data, partSize, getDataReadableStream);
|
|
22
22
|
}
|
|
23
|
-
|
|
24
|
-
throw new Error("Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;.");
|
|
25
|
-
}
|
|
23
|
+
throw new Error("Body Data is unsupported format, expected data to be one of: string | Uint8Array | Buffer | Readable | ReadableStream | Blob;.");
|
|
26
24
|
};
|
|
@@ -4,21 +4,21 @@ export async function* getChunkStream(data, partSize, getNextData) {
|
|
|
4
4
|
const currentBuffer = { chunks: [], length: 0 };
|
|
5
5
|
for await (const datum of getNextData(data)) {
|
|
6
6
|
currentBuffer.chunks.push(datum);
|
|
7
|
-
currentBuffer.length += datum.
|
|
7
|
+
currentBuffer.length += datum.byteLength;
|
|
8
8
|
while (currentBuffer.length >= partSize) {
|
|
9
9
|
const dataChunk = currentBuffer.chunks.length > 1 ? Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0];
|
|
10
10
|
yield {
|
|
11
11
|
partNumber,
|
|
12
|
-
data: dataChunk.
|
|
12
|
+
data: dataChunk.subarray(0, partSize),
|
|
13
13
|
};
|
|
14
|
-
currentBuffer.chunks = [dataChunk.
|
|
15
|
-
currentBuffer.length = currentBuffer.chunks[0].
|
|
14
|
+
currentBuffer.chunks = [dataChunk.subarray(partSize)];
|
|
15
|
+
currentBuffer.length = currentBuffer.chunks[0].byteLength;
|
|
16
16
|
partNumber += 1;
|
|
17
17
|
}
|
|
18
18
|
}
|
|
19
19
|
yield {
|
|
20
20
|
partNumber,
|
|
21
|
-
data: Buffer.concat(currentBuffer.chunks),
|
|
21
|
+
data: currentBuffer.chunks.length !== 1 ? Buffer.concat(currentBuffer.chunks) : currentBuffer.chunks[0],
|
|
22
22
|
lastPart: true,
|
|
23
23
|
};
|
|
24
24
|
}
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
export async function*
|
|
1
|
+
export async function* getChunkUint8Array(data, partSize) {
|
|
2
2
|
let partNumber = 1;
|
|
3
3
|
let startByte = 0;
|
|
4
4
|
let endByte = partSize;
|
|
5
5
|
while (endByte < data.byteLength) {
|
|
6
6
|
yield {
|
|
7
7
|
partNumber,
|
|
8
|
-
data: data.
|
|
8
|
+
data: data.subarray(startByte, endByte),
|
|
9
9
|
};
|
|
10
10
|
partNumber += 1;
|
|
11
11
|
startByte = endByte;
|
|
@@ -13,7 +13,7 @@ export async function* getChunkBuffer(data, partSize) {
|
|
|
13
13
|
}
|
|
14
14
|
yield {
|
|
15
15
|
partNumber,
|
|
16
|
-
data: data.
|
|
16
|
+
data: data.subarray(startByte),
|
|
17
17
|
lastPart: true,
|
|
18
18
|
};
|
|
19
19
|
}
|
|
@@ -1,6 +1,11 @@
|
|
|
1
1
|
import { Buffer } from "buffer";
|
|
2
2
|
export async function* getDataReadable(data) {
|
|
3
3
|
for await (const chunk of data) {
|
|
4
|
-
|
|
4
|
+
if (Buffer.isBuffer(chunk) || chunk instanceof Uint8Array) {
|
|
5
|
+
yield chunk;
|
|
6
|
+
}
|
|
7
|
+
else {
|
|
8
|
+
yield Buffer.from(chunk);
|
|
9
|
+
}
|
|
5
10
|
}
|
|
6
11
|
}
|
|
@@ -4,9 +4,15 @@ export async function* getDataReadableStream(data) {
|
|
|
4
4
|
try {
|
|
5
5
|
while (true) {
|
|
6
6
|
const { done, value } = await reader.read();
|
|
7
|
-
if (done)
|
|
7
|
+
if (done) {
|
|
8
8
|
return;
|
|
9
|
-
|
|
9
|
+
}
|
|
10
|
+
if (Buffer.isBuffer(value) || value instanceof Uint8Array) {
|
|
11
|
+
yield value;
|
|
12
|
+
}
|
|
13
|
+
else {
|
|
14
|
+
yield Buffer.from(value);
|
|
15
|
+
}
|
|
10
16
|
}
|
|
11
17
|
}
|
|
12
18
|
catch (e) {
|
package/dist-types/chunker.d.ts
CHANGED
|
@@ -1,2 +1,3 @@
|
|
|
1
1
|
import { BodyDataTypes } from "./types";
|
|
2
|
-
|
|
2
|
+
import type { RawDataPart } from "./Upload";
|
|
3
|
+
export declare const getChunk: (data: BodyDataTypes, partSize: number) => AsyncGenerator<RawDataPart, void, undefined>;
|
|
@@ -1,3 +1,2 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
1
|
import { RawDataPart } from "../Upload";
|
|
3
|
-
export declare function getChunkStream<T>(data: T, partSize: number, getNextData: (data: T) => AsyncGenerator<
|
|
2
|
+
export declare function getChunkStream<T>(data: T, partSize: number, getNextData: (data: T) => AsyncGenerator<Uint8Array>): AsyncGenerator<RawDataPart, void, undefined>;
|
|
@@ -1,2 +1 @@
|
|
|
1
|
-
|
|
2
|
-
export declare function getDataReadableStream(data: ReadableStream): AsyncGenerator<Buffer>;
|
|
1
|
+
export declare function getDataReadableStream(data: ReadableStream): AsyncGenerator<Uint8Array>;
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { BodyDataTypes } from "./types";
|
|
2
|
+
import { RawDataPart } from "./Upload";
|
|
2
3
|
export declare const getChunk: (
|
|
3
4
|
data: BodyDataTypes,
|
|
4
5
|
partSize: number
|
|
5
|
-
) => AsyncGenerator<
|
|
6
|
+
) => AsyncGenerator<RawDataPart, void, undefined>;
|
|
@@ -2,5 +2,5 @@ import { RawDataPart } from "../Upload";
|
|
|
2
2
|
export declare function getChunkStream<T>(
|
|
3
3
|
data: T,
|
|
4
4
|
partSize: number,
|
|
5
|
-
getNextData: (data: T) => AsyncGenerator<
|
|
5
|
+
getNextData: (data: T) => AsyncGenerator<Uint8Array>
|
|
6
6
|
): AsyncGenerator<RawDataPart, void, undefined>;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aws-sdk/lib-storage",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.532.0",
|
|
4
4
|
"description": "Storage higher order operation",
|
|
5
5
|
"main": "./dist-cjs/index.js",
|
|
6
6
|
"module": "./dist-es/index.js",
|
|
@@ -14,7 +14,8 @@
|
|
|
14
14
|
"build:types:downlevel": "downlevel-dts dist-types dist-types/ts3.4",
|
|
15
15
|
"clean": "rimraf ./dist-* && rimraf *.tsbuildinfo",
|
|
16
16
|
"extract:docs": "api-extractor run --local",
|
|
17
|
-
"test": "jest"
|
|
17
|
+
"test": "jest",
|
|
18
|
+
"test:e2e": "jest -c jest.config.e2e.js"
|
|
18
19
|
},
|
|
19
20
|
"engines": {
|
|
20
21
|
"node": ">=14.0.0"
|
|
@@ -37,7 +38,7 @@
|
|
|
37
38
|
"@aws-sdk/client-s3": "^3.0.0"
|
|
38
39
|
},
|
|
39
40
|
"devDependencies": {
|
|
40
|
-
"@aws-sdk/client-s3": "3.529.
|
|
41
|
+
"@aws-sdk/client-s3": "3.529.1",
|
|
41
42
|
"@smithy/types": "^2.10.1",
|
|
42
43
|
"@tsconfig/recommended": "1.0.1",
|
|
43
44
|
"@types/node": "^14.14.31",
|
|
File without changes
|