@hanzo/s3 0.6.4 → 8.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +202 -0
- package/MAINTAINERS.md +62 -0
- package/README.md +262 -0
- package/README_zh_CN.md +192 -0
- package/dist/esm/AssumeRoleProvider.d.mts +86 -0
- package/dist/esm/AssumeRoleProvider.mjs +183 -0
- package/dist/esm/CredentialProvider.d.mts +22 -0
- package/dist/esm/CredentialProvider.mjs +48 -0
- package/dist/esm/Credentials.d.mts +22 -0
- package/dist/esm/Credentials.mjs +38 -0
- package/dist/esm/IamAwsProvider.d.mts +27 -0
- package/dist/esm/IamAwsProvider.mjs +189 -0
- package/dist/esm/errors.d.mts +82 -0
- package/dist/esm/errors.mjs +117 -0
- package/dist/esm/helpers.d.mts +156 -0
- package/dist/esm/helpers.mjs +218 -0
- package/dist/esm/internal/async.d.mts +9 -0
- package/dist/esm/internal/async.mjs +14 -0
- package/dist/esm/internal/callbackify.d.mts +1 -0
- package/dist/esm/internal/callbackify.mjs +15 -0
- package/dist/esm/internal/client.d.mts +394 -0
- package/dist/esm/internal/client.mjs +3007 -0
- package/dist/esm/internal/copy-conditions.d.mts +10 -0
- package/dist/esm/internal/copy-conditions.mjs +25 -0
- package/dist/esm/internal/extensions.d.mts +18 -0
- package/dist/esm/internal/extensions.mjs +114 -0
- package/dist/esm/internal/helper.d.mts +177 -0
- package/dist/esm/internal/helper.mjs +552 -0
- package/dist/esm/internal/join-host-port.d.mts +11 -0
- package/dist/esm/internal/join-host-port.mjs +23 -0
- package/dist/esm/internal/post-policy.d.mts +17 -0
- package/dist/esm/internal/post-policy.mjs +98 -0
- package/dist/esm/internal/request.d.mts +11 -0
- package/dist/esm/internal/request.mjs +75 -0
- package/dist/esm/internal/response.d.mts +8 -0
- package/dist/esm/internal/response.mjs +16 -0
- package/dist/esm/internal/s3-endpoints.d.mts +38 -0
- package/dist/esm/internal/s3-endpoints.mjs +68 -0
- package/dist/esm/internal/type.d.mts +482 -0
- package/dist/esm/internal/type.mjs +30 -0
- package/dist/esm/internal/xml-parser.d.mts +93 -0
- package/dist/esm/internal/xml-parser.mjs +819 -0
- package/dist/esm/notification.d.mts +58 -0
- package/dist/esm/notification.mjs +209 -0
- package/dist/esm/s3.d.mts +40 -0
- package/dist/esm/s3.mjs +86 -0
- package/dist/esm/signing.d.mts +5 -0
- package/dist/esm/signing.mjs +258 -0
- package/dist/main/AssumeRoleProvider.d.ts +86 -0
- package/dist/main/AssumeRoleProvider.js +191 -0
- package/dist/main/CredentialProvider.d.ts +22 -0
- package/dist/main/CredentialProvider.js +55 -0
- package/dist/main/Credentials.d.ts +22 -0
- package/dist/main/Credentials.js +45 -0
- package/dist/main/IamAwsProvider.d.ts +27 -0
- package/dist/main/IamAwsProvider.js +198 -0
- package/dist/main/errors.d.ts +82 -0
- package/dist/main/errors.js +138 -0
- package/dist/main/helpers.d.ts +156 -0
- package/dist/main/helpers.js +233 -0
- package/dist/main/internal/async.d.ts +9 -0
- package/dist/main/internal/async.js +24 -0
- package/dist/main/internal/callbackify.d.ts +1 -0
- package/dist/main/internal/callbackify.js +21 -0
- package/dist/main/internal/client.d.ts +394 -0
- package/dist/main/internal/client.js +3014 -0
- package/dist/main/internal/copy-conditions.d.ts +10 -0
- package/dist/main/internal/copy-conditions.js +31 -0
- package/dist/main/internal/extensions.d.ts +18 -0
- package/dist/main/internal/extensions.js +122 -0
- package/dist/main/internal/helper.d.ts +177 -0
- package/dist/main/internal/helper.js +608 -0
- package/dist/main/internal/join-host-port.d.ts +11 -0
- package/dist/main/internal/join-host-port.js +29 -0
- package/dist/main/internal/post-policy.d.ts +17 -0
- package/dist/main/internal/post-policy.js +107 -0
- package/dist/main/internal/request.d.ts +11 -0
- package/dist/main/internal/request.js +83 -0
- package/dist/main/internal/response.d.ts +8 -0
- package/dist/main/internal/response.js +24 -0
- package/dist/main/internal/s3-endpoints.d.ts +38 -0
- package/dist/main/internal/s3-endpoints.js +73 -0
- package/dist/main/internal/type.d.ts +482 -0
- package/dist/main/internal/type.js +42 -0
- package/dist/main/internal/xml-parser.d.ts +93 -0
- package/dist/main/internal/xml-parser.js +849 -0
- package/dist/main/notification.d.ts +58 -0
- package/dist/main/notification.js +230 -0
- package/dist/main/s3.d.ts +40 -0
- package/dist/main/s3.js +117 -0
- package/dist/main/signing.d.ts +5 -0
- package/dist/main/signing.js +269 -0
- package/package.json +146 -39
- package/src/AssumeRoleProvider.ts +262 -0
- package/src/CredentialProvider.ts +54 -0
- package/src/Credentials.ts +44 -0
- package/src/IamAwsProvider.ts +234 -0
- package/src/errors.ts +120 -0
- package/src/helpers.ts +354 -0
- package/src/internal/async.ts +14 -0
- package/src/internal/callbackify.ts +19 -0
- package/src/internal/client.ts +3412 -0
- package/src/internal/copy-conditions.ts +30 -0
- package/src/internal/extensions.ts +140 -0
- package/src/internal/helper.ts +606 -0
- package/src/internal/join-host-port.ts +23 -0
- package/src/internal/post-policy.ts +99 -0
- package/src/internal/request.ts +102 -0
- package/src/internal/response.ts +26 -0
- package/src/internal/s3-endpoints.ts +70 -0
- package/src/internal/type.ts +577 -0
- package/src/internal/xml-parser.ts +871 -0
- package/src/notification.ts +254 -0
- package/src/s3.ts +155 -0
- package/src/signing.ts +325 -0
- package/lib/index.js +0 -450
- package/lib/index.js.map +0 -7
- package/lib/perfTest.js +0 -91
- package/lib/perfTest.js.map +0 -7
|
@@ -0,0 +1,3412 @@
|
|
|
1
|
+
import * as crypto from 'node:crypto'
|
|
2
|
+
import * as fs from 'node:fs'
|
|
3
|
+
import type { IncomingHttpHeaders } from 'node:http'
|
|
4
|
+
import * as http from 'node:http'
|
|
5
|
+
import * as https from 'node:https'
|
|
6
|
+
import * as path from 'node:path'
|
|
7
|
+
import * as stream from 'node:stream'
|
|
8
|
+
|
|
9
|
+
import * as async from 'async'
|
|
10
|
+
import BlockStream2 from 'block-stream2'
|
|
11
|
+
import { isBrowser } from 'browser-or-node'
|
|
12
|
+
import _ from 'lodash'
|
|
13
|
+
import * as qs from 'query-string'
|
|
14
|
+
import xml2js from 'xml2js'
|
|
15
|
+
|
|
16
|
+
import { CredentialProvider } from '../CredentialProvider.ts'
|
|
17
|
+
import * as errors from '../errors.ts'
|
|
18
|
+
import type { SelectResults } from '../helpers.ts'
|
|
19
|
+
import {
|
|
20
|
+
CopyDestinationOptions,
|
|
21
|
+
CopySourceOptions,
|
|
22
|
+
DEFAULT_REGION,
|
|
23
|
+
LEGAL_HOLD_STATUS,
|
|
24
|
+
PRESIGN_EXPIRY_DAYS_MAX,
|
|
25
|
+
RETENTION_MODES,
|
|
26
|
+
RETENTION_VALIDITY_UNITS,
|
|
27
|
+
} from '../helpers.ts'
|
|
28
|
+
import type { NotificationEvent } from '../notification.ts'
|
|
29
|
+
import { NotificationConfig, NotificationPoller } from '../notification.ts'
|
|
30
|
+
import { postPresignSignatureV4, presignSignatureV4, signV4 } from '../signing.ts'
|
|
31
|
+
import { fsp, streamPromise } from './async.ts'
|
|
32
|
+
import { CopyConditions } from './copy-conditions.ts'
|
|
33
|
+
import { Extensions } from './extensions.ts'
|
|
34
|
+
import {
|
|
35
|
+
calculateEvenSplits,
|
|
36
|
+
extractMetadata,
|
|
37
|
+
getContentLength,
|
|
38
|
+
getScope,
|
|
39
|
+
getSourceVersionId,
|
|
40
|
+
getVersionId,
|
|
41
|
+
hashBinary,
|
|
42
|
+
insertContentType,
|
|
43
|
+
isAmazonEndpoint,
|
|
44
|
+
isBoolean,
|
|
45
|
+
isDefined,
|
|
46
|
+
isEmpty,
|
|
47
|
+
isNumber,
|
|
48
|
+
isObject,
|
|
49
|
+
isPlainObject,
|
|
50
|
+
isReadableStream,
|
|
51
|
+
isString,
|
|
52
|
+
isValidBucketName,
|
|
53
|
+
isValidEndpoint,
|
|
54
|
+
isValidObjectName,
|
|
55
|
+
isValidPort,
|
|
56
|
+
isValidPrefix,
|
|
57
|
+
isVirtualHostStyle,
|
|
58
|
+
makeDateLong,
|
|
59
|
+
PART_CONSTRAINTS,
|
|
60
|
+
partsRequired,
|
|
61
|
+
prependXAMZMeta,
|
|
62
|
+
readableStream,
|
|
63
|
+
sanitizeETag,
|
|
64
|
+
toMd5,
|
|
65
|
+
toSha256,
|
|
66
|
+
uriEscape,
|
|
67
|
+
uriResourceEscape,
|
|
68
|
+
} from './helper.ts'
|
|
69
|
+
import { joinHostPort } from './join-host-port.ts'
|
|
70
|
+
import { PostPolicy } from './post-policy.ts'
|
|
71
|
+
import { requestWithRetry } from './request.ts'
|
|
72
|
+
import { drainResponse, readAsBuffer, readAsString } from './response.ts'
|
|
73
|
+
import type { Region } from './s3-endpoints.ts'
|
|
74
|
+
import { getS3Endpoint } from './s3-endpoints.ts'
|
|
75
|
+
import type {
|
|
76
|
+
Binary,
|
|
77
|
+
BucketItem,
|
|
78
|
+
BucketItemFromList,
|
|
79
|
+
BucketItemStat,
|
|
80
|
+
BucketStream,
|
|
81
|
+
BucketVersioningConfiguration,
|
|
82
|
+
CopyObjectParams,
|
|
83
|
+
CopyObjectResult,
|
|
84
|
+
CopyObjectResultV2,
|
|
85
|
+
EncryptionConfig,
|
|
86
|
+
GetObjectLegalHoldOptions,
|
|
87
|
+
GetObjectOpts,
|
|
88
|
+
GetObjectRetentionOpts,
|
|
89
|
+
IncompleteUploadedBucketItem,
|
|
90
|
+
IRequest,
|
|
91
|
+
ItemBucketMetadata,
|
|
92
|
+
LifecycleConfig,
|
|
93
|
+
LifeCycleConfigParam,
|
|
94
|
+
ListObjectQueryOpts,
|
|
95
|
+
ListObjectQueryRes,
|
|
96
|
+
ListObjectV2Res,
|
|
97
|
+
NotificationConfigResult,
|
|
98
|
+
ObjectInfo,
|
|
99
|
+
ObjectLockConfigParam,
|
|
100
|
+
ObjectLockInfo,
|
|
101
|
+
ObjectMetaData,
|
|
102
|
+
ObjectRetentionInfo,
|
|
103
|
+
PostPolicyResult,
|
|
104
|
+
PreSignRequestParams,
|
|
105
|
+
PutObjectLegalHoldOptions,
|
|
106
|
+
PutTaggingParams,
|
|
107
|
+
RemoveObjectsParam,
|
|
108
|
+
RemoveObjectsRequestEntry,
|
|
109
|
+
RemoveObjectsResponse,
|
|
110
|
+
RemoveTaggingParams,
|
|
111
|
+
ReplicationConfig,
|
|
112
|
+
ReplicationConfigOpts,
|
|
113
|
+
RequestHeaders,
|
|
114
|
+
ResponseHeader,
|
|
115
|
+
ResultCallback,
|
|
116
|
+
Retention,
|
|
117
|
+
SelectOptions,
|
|
118
|
+
StatObjectOpts,
|
|
119
|
+
Tag,
|
|
120
|
+
TaggingOpts,
|
|
121
|
+
Tags,
|
|
122
|
+
Transport,
|
|
123
|
+
UploadedObjectInfo,
|
|
124
|
+
UploadPartConfig,
|
|
125
|
+
} from './type.ts'
|
|
126
|
+
import type { ListMultipartResult, UploadedPart } from './xml-parser.ts'
|
|
127
|
+
import {
|
|
128
|
+
parseBucketNotification,
|
|
129
|
+
parseCompleteMultipart,
|
|
130
|
+
parseInitiateMultipart,
|
|
131
|
+
parseListObjects,
|
|
132
|
+
parseListObjectsV2,
|
|
133
|
+
parseObjectLegalHoldConfig,
|
|
134
|
+
parseSelectObjectContentResponse,
|
|
135
|
+
uploadPartParser,
|
|
136
|
+
} from './xml-parser.ts'
|
|
137
|
+
import * as xmlParsers from './xml-parser.ts'
|
|
138
|
+
|
|
139
|
+
const xml = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true })
|
|
140
|
+
|
|
141
|
+
// will be replaced by bundler.
|
|
142
|
+
const Package = { version: process.env.HANZOS3_JS_PACKAGE_VERSION || 'development' }
|
|
143
|
+
|
|
144
|
+
const requestOptionProperties = [
|
|
145
|
+
'agent',
|
|
146
|
+
'ca',
|
|
147
|
+
'cert',
|
|
148
|
+
'ciphers',
|
|
149
|
+
'clientCertEngine',
|
|
150
|
+
'crl',
|
|
151
|
+
'dhparam',
|
|
152
|
+
'ecdhCurve',
|
|
153
|
+
'family',
|
|
154
|
+
'honorCipherOrder',
|
|
155
|
+
'key',
|
|
156
|
+
'passphrase',
|
|
157
|
+
'pfx',
|
|
158
|
+
'rejectUnauthorized',
|
|
159
|
+
'secureOptions',
|
|
160
|
+
'secureProtocol',
|
|
161
|
+
'servername',
|
|
162
|
+
'sessionIdContext',
|
|
163
|
+
] as const
|
|
164
|
+
|
|
165
|
+
export interface RetryOptions {
|
|
166
|
+
/**
|
|
167
|
+
* If this set to true, it will take precedence over all other retry options.
|
|
168
|
+
* @default false
|
|
169
|
+
*/
|
|
170
|
+
disableRetry?: boolean
|
|
171
|
+
/**
|
|
172
|
+
* The maximum amount of retries for a request.
|
|
173
|
+
* @default 1
|
|
174
|
+
*/
|
|
175
|
+
maximumRetryCount?: number
|
|
176
|
+
/**
|
|
177
|
+
* The minimum duration (in milliseconds) for the exponential backoff algorithm.
|
|
178
|
+
* @default 100
|
|
179
|
+
*/
|
|
180
|
+
baseDelayMs?: number
|
|
181
|
+
/**
|
|
182
|
+
* The maximum duration (in milliseconds) for the exponential backoff algorithm.
|
|
183
|
+
* @default 60000
|
|
184
|
+
*/
|
|
185
|
+
maximumDelayMs?: number
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
export interface ClientOptions {
|
|
189
|
+
endPoint: string
|
|
190
|
+
accessKey?: string
|
|
191
|
+
secretKey?: string
|
|
192
|
+
useSSL?: boolean
|
|
193
|
+
port?: number
|
|
194
|
+
region?: Region
|
|
195
|
+
transport?: Transport
|
|
196
|
+
sessionToken?: string
|
|
197
|
+
partSize?: number
|
|
198
|
+
pathStyle?: boolean
|
|
199
|
+
credentialsProvider?: CredentialProvider
|
|
200
|
+
s3AccelerateEndpoint?: string
|
|
201
|
+
transportAgent?: http.Agent
|
|
202
|
+
retryOptions?: RetryOptions
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
export type RequestOption = Partial<IRequest> & {
|
|
206
|
+
method: string
|
|
207
|
+
bucketName?: string
|
|
208
|
+
objectName?: string
|
|
209
|
+
query?: string
|
|
210
|
+
pathStyle?: boolean
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
export type NoResultCallback = (error: unknown) => void
|
|
214
|
+
|
|
215
|
+
export interface MakeBucketOpt {
|
|
216
|
+
ObjectLocking?: boolean
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
export interface RemoveOptions {
|
|
220
|
+
versionId?: string
|
|
221
|
+
governanceBypass?: boolean
|
|
222
|
+
forceDelete?: boolean
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
type Part = {
|
|
226
|
+
part: number
|
|
227
|
+
etag: string
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
export class TypedClient {
|
|
231
|
+
protected transport: Transport
|
|
232
|
+
protected host: string
|
|
233
|
+
protected port: number
|
|
234
|
+
protected protocol: string
|
|
235
|
+
protected accessKey: string
|
|
236
|
+
protected secretKey: string
|
|
237
|
+
protected sessionToken?: string
|
|
238
|
+
protected userAgent: string
|
|
239
|
+
protected anonymous: boolean
|
|
240
|
+
protected pathStyle: boolean
|
|
241
|
+
protected regionMap: Record<string, string>
|
|
242
|
+
public region?: string
|
|
243
|
+
protected credentialsProvider?: CredentialProvider
|
|
244
|
+
partSize: number = 64 * 1024 * 1024
|
|
245
|
+
protected overRidePartSize?: boolean
|
|
246
|
+
protected retryOptions: RetryOptions
|
|
247
|
+
|
|
248
|
+
protected maximumPartSize = 5 * 1024 * 1024 * 1024
|
|
249
|
+
protected maxObjectSize = 5 * 1024 * 1024 * 1024 * 1024
|
|
250
|
+
public enableSHA256: boolean
|
|
251
|
+
protected s3AccelerateEndpoint?: string
|
|
252
|
+
protected reqOptions: Record<string, unknown>
|
|
253
|
+
|
|
254
|
+
protected transportAgent: http.Agent
|
|
255
|
+
private readonly clientExtensions: Extensions
|
|
256
|
+
|
|
257
|
+
constructor(params: ClientOptions) {
|
|
258
|
+
// @ts-expect-error deprecated property
|
|
259
|
+
if (params.secure !== undefined) {
|
|
260
|
+
throw new Error('"secure" option deprecated, "useSSL" should be used instead')
|
|
261
|
+
}
|
|
262
|
+
// Default values if not specified.
|
|
263
|
+
if (params.useSSL === undefined) {
|
|
264
|
+
params.useSSL = true
|
|
265
|
+
}
|
|
266
|
+
if (!params.port) {
|
|
267
|
+
params.port = 0
|
|
268
|
+
}
|
|
269
|
+
// Validate input params.
|
|
270
|
+
if (!isValidEndpoint(params.endPoint)) {
|
|
271
|
+
throw new errors.InvalidEndpointError(`Invalid endPoint : ${params.endPoint}`)
|
|
272
|
+
}
|
|
273
|
+
if (!isValidPort(params.port)) {
|
|
274
|
+
throw new errors.InvalidArgumentError(`Invalid port : ${params.port}`)
|
|
275
|
+
}
|
|
276
|
+
if (!isBoolean(params.useSSL)) {
|
|
277
|
+
throw new errors.InvalidArgumentError(
|
|
278
|
+
`Invalid useSSL flag type : ${params.useSSL}, expected to be of type "boolean"`,
|
|
279
|
+
)
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// Validate region only if its set.
|
|
283
|
+
if (params.region) {
|
|
284
|
+
if (!isString(params.region)) {
|
|
285
|
+
throw new errors.InvalidArgumentError(`Invalid region : ${params.region}`)
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
const host = params.endPoint.toLowerCase()
|
|
290
|
+
let port = params.port
|
|
291
|
+
let protocol: string
|
|
292
|
+
let transport
|
|
293
|
+
let transportAgent: http.Agent
|
|
294
|
+
// Validate if configuration is not using SSL
|
|
295
|
+
// for constructing relevant endpoints.
|
|
296
|
+
if (params.useSSL) {
|
|
297
|
+
// Defaults to secure.
|
|
298
|
+
transport = https
|
|
299
|
+
protocol = 'https:'
|
|
300
|
+
port = port || 443
|
|
301
|
+
transportAgent = https.globalAgent
|
|
302
|
+
} else {
|
|
303
|
+
transport = http
|
|
304
|
+
protocol = 'http:'
|
|
305
|
+
port = port || 80
|
|
306
|
+
transportAgent = http.globalAgent
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
// if custom transport is set, use it.
|
|
310
|
+
if (params.transport) {
|
|
311
|
+
if (!isObject(params.transport)) {
|
|
312
|
+
throw new errors.InvalidArgumentError(
|
|
313
|
+
`Invalid transport type : ${params.transport}, expected to be type "object"`,
|
|
314
|
+
)
|
|
315
|
+
}
|
|
316
|
+
transport = params.transport
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// if custom transport agent is set, use it.
|
|
320
|
+
if (params.transportAgent) {
|
|
321
|
+
if (!isObject(params.transportAgent)) {
|
|
322
|
+
throw new errors.InvalidArgumentError(
|
|
323
|
+
`Invalid transportAgent type: ${params.transportAgent}, expected to be type "object"`,
|
|
324
|
+
)
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
transportAgent = params.transportAgent
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// User Agent should always following the below style.
|
|
331
|
+
// Please open an issue to discuss any new changes here.
|
|
332
|
+
//
|
|
333
|
+
// HanzoS3 (OS; ARCH) LIB/VER APP/VER
|
|
334
|
+
//
|
|
335
|
+
const libraryComments = `(${process.platform}; ${process.arch})`
|
|
336
|
+
const libraryAgent = `HanzoS3 ${libraryComments} hanzo-s3/${Package.version}`
|
|
337
|
+
// User agent block ends.
|
|
338
|
+
|
|
339
|
+
this.transport = transport
|
|
340
|
+
this.transportAgent = transportAgent
|
|
341
|
+
this.host = host
|
|
342
|
+
this.port = port
|
|
343
|
+
this.protocol = protocol
|
|
344
|
+
this.userAgent = `${libraryAgent}`
|
|
345
|
+
|
|
346
|
+
// Default path style is true
|
|
347
|
+
if (params.pathStyle === undefined) {
|
|
348
|
+
this.pathStyle = true
|
|
349
|
+
} else {
|
|
350
|
+
this.pathStyle = params.pathStyle
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
this.accessKey = params.accessKey ?? ''
|
|
354
|
+
this.secretKey = params.secretKey ?? ''
|
|
355
|
+
this.sessionToken = params.sessionToken
|
|
356
|
+
this.anonymous = !this.accessKey || !this.secretKey
|
|
357
|
+
|
|
358
|
+
if (params.credentialsProvider) {
|
|
359
|
+
this.anonymous = false
|
|
360
|
+
this.credentialsProvider = params.credentialsProvider
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
this.regionMap = {}
|
|
364
|
+
if (params.region) {
|
|
365
|
+
this.region = params.region
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
if (params.partSize) {
|
|
369
|
+
this.partSize = params.partSize
|
|
370
|
+
this.overRidePartSize = true
|
|
371
|
+
}
|
|
372
|
+
if (this.partSize < 5 * 1024 * 1024) {
|
|
373
|
+
throw new errors.InvalidArgumentError(`Part size should be greater than 5MB`)
|
|
374
|
+
}
|
|
375
|
+
if (this.partSize > 5 * 1024 * 1024 * 1024) {
|
|
376
|
+
throw new errors.InvalidArgumentError(`Part size should be less than 5GB`)
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
// SHA256 is enabled only for authenticated http requests. If the request is authenticated
|
|
380
|
+
// and the connection is https we use x-amz-content-sha256=UNSIGNED-PAYLOAD
|
|
381
|
+
// header for signature calculation.
|
|
382
|
+
this.enableSHA256 = !this.anonymous && !params.useSSL
|
|
383
|
+
|
|
384
|
+
this.s3AccelerateEndpoint = params.s3AccelerateEndpoint || undefined
|
|
385
|
+
this.reqOptions = {}
|
|
386
|
+
this.clientExtensions = new Extensions(this)
|
|
387
|
+
|
|
388
|
+
if (params.retryOptions) {
|
|
389
|
+
if (!isObject(params.retryOptions)) {
|
|
390
|
+
throw new errors.InvalidArgumentError(
|
|
391
|
+
`Invalid retryOptions type: ${params.retryOptions}, expected to be type "object"`,
|
|
392
|
+
)
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
this.retryOptions = params.retryOptions
|
|
396
|
+
} else {
|
|
397
|
+
this.retryOptions = {
|
|
398
|
+
disableRetry: false,
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
/**
|
|
403
|
+
* S3 extensions that aren't necessarily present for Amazon S3 compatible storage servers
|
|
404
|
+
*/
|
|
405
|
+
get extensions() {
|
|
406
|
+
return this.clientExtensions
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
/**
|
|
410
|
+
* @param endPoint - valid S3 acceleration end point
|
|
411
|
+
*/
|
|
412
|
+
setS3TransferAccelerate(endPoint: string) {
|
|
413
|
+
this.s3AccelerateEndpoint = endPoint
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
/**
|
|
417
|
+
* Sets the supported request options.
|
|
418
|
+
*/
|
|
419
|
+
public setRequestOptions(options: Pick<https.RequestOptions, (typeof requestOptionProperties)[number]>) {
|
|
420
|
+
if (!isObject(options)) {
|
|
421
|
+
throw new TypeError('request options should be of type "object"')
|
|
422
|
+
}
|
|
423
|
+
this.reqOptions = _.pick(options, requestOptionProperties)
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
/**
|
|
427
|
+
* This is s3 Specific and does not hold validity in any other Object storage.
|
|
428
|
+
*/
|
|
429
|
+
private getAccelerateEndPointIfSet(bucketName?: string, objectName?: string) {
|
|
430
|
+
if (!isEmpty(this.s3AccelerateEndpoint) && !isEmpty(bucketName) && !isEmpty(objectName)) {
|
|
431
|
+
// http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html
|
|
432
|
+
// Disable transfer acceleration for non-compliant bucket names.
|
|
433
|
+
if (bucketName.includes('.')) {
|
|
434
|
+
throw new Error(`Transfer Acceleration is not supported for non compliant bucket:${bucketName}`)
|
|
435
|
+
}
|
|
436
|
+
// If transfer acceleration is requested set new host.
|
|
437
|
+
// For more details about enabling transfer acceleration read here.
|
|
438
|
+
// http://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html
|
|
439
|
+
return this.s3AccelerateEndpoint
|
|
440
|
+
}
|
|
441
|
+
return false
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
/**
|
|
445
|
+
* Set application specific information.
|
|
446
|
+
* Generates User-Agent in the following style.
|
|
447
|
+
* HanzoS3 (OS; ARCH) LIB/VER APP/VER
|
|
448
|
+
*/
|
|
449
|
+
setAppInfo(appName: string, appVersion: string) {
|
|
450
|
+
if (!isString(appName)) {
|
|
451
|
+
throw new TypeError(`Invalid appName: ${appName}`)
|
|
452
|
+
}
|
|
453
|
+
if (appName.trim() === '') {
|
|
454
|
+
throw new errors.InvalidArgumentError('Input appName cannot be empty.')
|
|
455
|
+
}
|
|
456
|
+
if (!isString(appVersion)) {
|
|
457
|
+
throw new TypeError(`Invalid appVersion: ${appVersion}`)
|
|
458
|
+
}
|
|
459
|
+
if (appVersion.trim() === '') {
|
|
460
|
+
throw new errors.InvalidArgumentError('Input appVersion cannot be empty.')
|
|
461
|
+
}
|
|
462
|
+
this.userAgent = `${this.userAgent} ${appName}/${appVersion}`
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
/**
|
|
466
|
+
* returns options object that can be used with http.request()
|
|
467
|
+
* Takes care of constructing virtual-host-style or path-style hostname
|
|
468
|
+
*/
|
|
469
|
+
protected getRequestOptions(
|
|
470
|
+
opts: RequestOption & {
|
|
471
|
+
region: string
|
|
472
|
+
},
|
|
473
|
+
): IRequest & {
|
|
474
|
+
host: string
|
|
475
|
+
headers: Record<string, string>
|
|
476
|
+
} {
|
|
477
|
+
const method = opts.method
|
|
478
|
+
const region = opts.region
|
|
479
|
+
const bucketName = opts.bucketName
|
|
480
|
+
let objectName = opts.objectName
|
|
481
|
+
const headers = opts.headers
|
|
482
|
+
const query = opts.query
|
|
483
|
+
|
|
484
|
+
let reqOptions = {
|
|
485
|
+
method,
|
|
486
|
+
headers: {} as RequestHeaders,
|
|
487
|
+
protocol: this.protocol,
|
|
488
|
+
// If custom transportAgent was supplied earlier, we'll inject it here
|
|
489
|
+
agent: this.transportAgent,
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
// Verify if virtual host supported.
|
|
493
|
+
let virtualHostStyle
|
|
494
|
+
if (bucketName) {
|
|
495
|
+
virtualHostStyle = isVirtualHostStyle(this.host, this.protocol, bucketName, this.pathStyle)
|
|
496
|
+
}
|
|
497
|
+
|
|
498
|
+
let path = '/'
|
|
499
|
+
let host = this.host
|
|
500
|
+
|
|
501
|
+
let port: undefined | number
|
|
502
|
+
if (this.port) {
|
|
503
|
+
port = this.port
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
if (objectName) {
|
|
507
|
+
objectName = uriResourceEscape(objectName)
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
// For Amazon S3 endpoint, get endpoint based on region.
|
|
511
|
+
if (isAmazonEndpoint(host)) {
|
|
512
|
+
const accelerateEndPoint = this.getAccelerateEndPointIfSet(bucketName, objectName)
|
|
513
|
+
if (accelerateEndPoint) {
|
|
514
|
+
host = `${accelerateEndPoint}`
|
|
515
|
+
} else {
|
|
516
|
+
host = getS3Endpoint(region)
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
if (virtualHostStyle && !opts.pathStyle) {
|
|
521
|
+
// For all hosts which support virtual host style, `bucketName`
|
|
522
|
+
// is part of the hostname in the following format:
|
|
523
|
+
//
|
|
524
|
+
// var host = 'bucketName.example.com'
|
|
525
|
+
//
|
|
526
|
+
if (bucketName) {
|
|
527
|
+
host = `${bucketName}.${host}`
|
|
528
|
+
}
|
|
529
|
+
if (objectName) {
|
|
530
|
+
path = `/${objectName}`
|
|
531
|
+
}
|
|
532
|
+
} else {
|
|
533
|
+
// For all S3 compatible storage services we will fallback to
|
|
534
|
+
// path style requests, where `bucketName` is part of the URI
|
|
535
|
+
// path.
|
|
536
|
+
if (bucketName) {
|
|
537
|
+
path = `/${bucketName}`
|
|
538
|
+
}
|
|
539
|
+
if (objectName) {
|
|
540
|
+
path = `/${bucketName}/${objectName}`
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
if (query) {
|
|
545
|
+
path += `?${query}`
|
|
546
|
+
}
|
|
547
|
+
reqOptions.headers.host = host
|
|
548
|
+
if ((reqOptions.protocol === 'http:' && port !== 80) || (reqOptions.protocol === 'https:' && port !== 443)) {
|
|
549
|
+
reqOptions.headers.host = joinHostPort(host, port)
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
reqOptions.headers['user-agent'] = this.userAgent
|
|
553
|
+
if (headers) {
|
|
554
|
+
// have all header keys in lower case - to make signing easy
|
|
555
|
+
for (const [k, v] of Object.entries(headers)) {
|
|
556
|
+
reqOptions.headers[k.toLowerCase()] = v
|
|
557
|
+
}
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
// Use any request option specified in client.setRequestOptions()
|
|
561
|
+
reqOptions = Object.assign({}, this.reqOptions, reqOptions)
|
|
562
|
+
|
|
563
|
+
return {
|
|
564
|
+
...reqOptions,
|
|
565
|
+
headers: _.mapValues(_.pickBy(reqOptions.headers, isDefined), (v) => v.toString()),
|
|
566
|
+
host,
|
|
567
|
+
port,
|
|
568
|
+
path,
|
|
569
|
+
} satisfies https.RequestOptions
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
public async setCredentialsProvider(credentialsProvider: CredentialProvider) {
|
|
573
|
+
if (!(credentialsProvider instanceof CredentialProvider)) {
|
|
574
|
+
throw new Error('Unable to get credentials. Expected instance of CredentialProvider')
|
|
575
|
+
}
|
|
576
|
+
this.credentialsProvider = credentialsProvider
|
|
577
|
+
await this.checkAndRefreshCreds()
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
private async checkAndRefreshCreds() {
|
|
581
|
+
if (this.credentialsProvider) {
|
|
582
|
+
try {
|
|
583
|
+
const credentialsConf = await this.credentialsProvider.getCredentials()
|
|
584
|
+
this.accessKey = credentialsConf.getAccessKey()
|
|
585
|
+
this.secretKey = credentialsConf.getSecretKey()
|
|
586
|
+
this.sessionToken = credentialsConf.getSessionToken()
|
|
587
|
+
} catch (e) {
|
|
588
|
+
throw new Error(`Unable to get credentials: ${e}`, { cause: e })
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
private logStream?: stream.Writable
|
|
594
|
+
|
|
595
|
+
/**
|
|
596
|
+
* log the request, response, error
|
|
597
|
+
*/
|
|
598
|
+
private logHTTP(reqOptions: IRequest, response: http.IncomingMessage | null, err?: unknown) {
|
|
599
|
+
// if no logStream available return.
|
|
600
|
+
if (!this.logStream) {
|
|
601
|
+
return
|
|
602
|
+
}
|
|
603
|
+
if (!isObject(reqOptions)) {
|
|
604
|
+
throw new TypeError('reqOptions should be of type "object"')
|
|
605
|
+
}
|
|
606
|
+
if (response && !isReadableStream(response)) {
|
|
607
|
+
throw new TypeError('response should be of type "Stream"')
|
|
608
|
+
}
|
|
609
|
+
if (err && !(err instanceof Error)) {
|
|
610
|
+
throw new TypeError('err should be of type "Error"')
|
|
611
|
+
}
|
|
612
|
+
const logStream = this.logStream
|
|
613
|
+
const logHeaders = (headers: RequestHeaders) => {
|
|
614
|
+
Object.entries(headers).forEach(([k, v]) => {
|
|
615
|
+
if (k == 'authorization') {
|
|
616
|
+
if (isString(v)) {
|
|
617
|
+
const redactor = new RegExp('Signature=([0-9a-f]+)')
|
|
618
|
+
v = v.replace(redactor, 'Signature=**REDACTED**')
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
logStream.write(`${k}: ${v}\n`)
|
|
622
|
+
})
|
|
623
|
+
logStream.write('\n')
|
|
624
|
+
}
|
|
625
|
+
logStream.write(`REQUEST: ${reqOptions.method} ${reqOptions.path}\n`)
|
|
626
|
+
logHeaders(reqOptions.headers)
|
|
627
|
+
if (response) {
|
|
628
|
+
this.logStream.write(`RESPONSE: ${response.statusCode}\n`)
|
|
629
|
+
logHeaders(response.headers as RequestHeaders)
|
|
630
|
+
}
|
|
631
|
+
if (err) {
|
|
632
|
+
logStream.write('ERROR BODY:\n')
|
|
633
|
+
const errJSON = JSON.stringify(err, null, '\t')
|
|
634
|
+
logStream.write(`${errJSON}\n`)
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
/**
|
|
639
|
+
* Enable tracing
|
|
640
|
+
*/
|
|
641
|
+
public traceOn(stream?: stream.Writable) {
|
|
642
|
+
if (!stream) {
|
|
643
|
+
stream = process.stdout
|
|
644
|
+
}
|
|
645
|
+
this.logStream = stream
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
/**
|
|
649
|
+
* Disable tracing
|
|
650
|
+
*/
|
|
651
|
+
public traceOff() {
|
|
652
|
+
this.logStream = undefined
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
/**
|
|
656
|
+
* makeRequest is the primitive used by the apis for making S3 requests.
|
|
657
|
+
* payload can be empty string in case of no payload.
|
|
658
|
+
* statusCode is the expected statusCode. If response.statusCode does not match
|
|
659
|
+
* we parse the XML error and call the callback with the error message.
|
|
660
|
+
*
|
|
661
|
+
* A valid region is passed by the calls - listBuckets, makeBucket and getBucketRegion.
|
|
662
|
+
*
|
|
663
|
+
* @internal
|
|
664
|
+
*/
|
|
665
|
+
async makeRequestAsync(
|
|
666
|
+
options: RequestOption,
|
|
667
|
+
payload: Binary = '',
|
|
668
|
+
expectedCodes: number[] = [200],
|
|
669
|
+
region = '',
|
|
670
|
+
): Promise<http.IncomingMessage> {
|
|
671
|
+
if (!isObject(options)) {
|
|
672
|
+
throw new TypeError('options should be of type "object"')
|
|
673
|
+
}
|
|
674
|
+
if (!isString(payload) && !isObject(payload)) {
|
|
675
|
+
// Buffer is of type 'object'
|
|
676
|
+
throw new TypeError('payload should be of type "string" or "Buffer"')
|
|
677
|
+
}
|
|
678
|
+
expectedCodes.forEach((statusCode) => {
|
|
679
|
+
if (!isNumber(statusCode)) {
|
|
680
|
+
throw new TypeError('statusCode should be of type "number"')
|
|
681
|
+
}
|
|
682
|
+
})
|
|
683
|
+
if (!isString(region)) {
|
|
684
|
+
throw new TypeError('region should be of type "string"')
|
|
685
|
+
}
|
|
686
|
+
if (!options.headers) {
|
|
687
|
+
options.headers = {}
|
|
688
|
+
}
|
|
689
|
+
if (options.method === 'POST' || options.method === 'PUT' || options.method === 'DELETE') {
|
|
690
|
+
options.headers['content-length'] = payload.length.toString()
|
|
691
|
+
}
|
|
692
|
+
const sha256sum = this.enableSHA256 ? toSha256(payload) : ''
|
|
693
|
+
return this.makeRequestStreamAsync(options, payload, sha256sum, expectedCodes, region)
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
/**
|
|
697
|
+
* new request with promise
|
|
698
|
+
*
|
|
699
|
+
* No need to drain response, response body is not valid
|
|
700
|
+
*/
|
|
701
|
+
async makeRequestAsyncOmit(
|
|
702
|
+
options: RequestOption,
|
|
703
|
+
payload: Binary = '',
|
|
704
|
+
statusCodes: number[] = [200],
|
|
705
|
+
region = '',
|
|
706
|
+
): Promise<Omit<http.IncomingMessage, 'on'>> {
|
|
707
|
+
const res = await this.makeRequestAsync(options, payload, statusCodes, region)
|
|
708
|
+
await drainResponse(res)
|
|
709
|
+
return res
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
/**
|
|
713
|
+
* makeRequestStream will be used directly instead of makeRequest in case the payload
|
|
714
|
+
* is available as a stream. for ex. putObject
|
|
715
|
+
*
|
|
716
|
+
* @internal
|
|
717
|
+
*/
|
|
718
|
+
async makeRequestStreamAsync(
|
|
719
|
+
options: RequestOption,
|
|
720
|
+
body: stream.Readable | Binary,
|
|
721
|
+
sha256sum: string,
|
|
722
|
+
statusCodes: number[],
|
|
723
|
+
region: string,
|
|
724
|
+
): Promise<http.IncomingMessage> {
|
|
725
|
+
if (!isObject(options)) {
|
|
726
|
+
throw new TypeError('options should be of type "object"')
|
|
727
|
+
}
|
|
728
|
+
if (!(Buffer.isBuffer(body) || typeof body === 'string' || isReadableStream(body))) {
|
|
729
|
+
throw new errors.InvalidArgumentError(
|
|
730
|
+
`stream should be a Buffer, string or readable Stream, got ${typeof body} instead`,
|
|
731
|
+
)
|
|
732
|
+
}
|
|
733
|
+
if (!isString(sha256sum)) {
|
|
734
|
+
throw new TypeError('sha256sum should be of type "string"')
|
|
735
|
+
}
|
|
736
|
+
statusCodes.forEach((statusCode) => {
|
|
737
|
+
if (!isNumber(statusCode)) {
|
|
738
|
+
throw new TypeError('statusCode should be of type "number"')
|
|
739
|
+
}
|
|
740
|
+
})
|
|
741
|
+
if (!isString(region)) {
|
|
742
|
+
throw new TypeError('region should be of type "string"')
|
|
743
|
+
}
|
|
744
|
+
// sha256sum will be empty for anonymous or https requests
|
|
745
|
+
if (!this.enableSHA256 && sha256sum.length !== 0) {
|
|
746
|
+
throw new errors.InvalidArgumentError(`sha256sum expected to be empty for anonymous or https requests`)
|
|
747
|
+
}
|
|
748
|
+
// sha256sum should be valid for non-anonymous http requests.
|
|
749
|
+
if (this.enableSHA256 && sha256sum.length !== 64) {
|
|
750
|
+
throw new errors.InvalidArgumentError(`Invalid sha256sum : ${sha256sum}`)
|
|
751
|
+
}
|
|
752
|
+
|
|
753
|
+
await this.checkAndRefreshCreds()
|
|
754
|
+
|
|
755
|
+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
756
|
+
region = region || (await this.getBucketRegionAsync(options.bucketName!))
|
|
757
|
+
|
|
758
|
+
const reqOptions = this.getRequestOptions({ ...options, region })
|
|
759
|
+
if (!this.anonymous) {
|
|
760
|
+
// For non-anonymous https requests sha256sum is 'UNSIGNED-PAYLOAD' for signature calculation.
|
|
761
|
+
if (!this.enableSHA256) {
|
|
762
|
+
sha256sum = 'UNSIGNED-PAYLOAD'
|
|
763
|
+
}
|
|
764
|
+
const date = new Date()
|
|
765
|
+
reqOptions.headers['x-amz-date'] = makeDateLong(date)
|
|
766
|
+
reqOptions.headers['x-amz-content-sha256'] = sha256sum
|
|
767
|
+
if (this.sessionToken) {
|
|
768
|
+
reqOptions.headers['x-amz-security-token'] = this.sessionToken
|
|
769
|
+
}
|
|
770
|
+
reqOptions.headers.authorization = signV4(reqOptions, this.accessKey, this.secretKey, region, date, sha256sum)
|
|
771
|
+
}
|
|
772
|
+
|
|
773
|
+
const response = await requestWithRetry(
|
|
774
|
+
this.transport,
|
|
775
|
+
reqOptions,
|
|
776
|
+
body,
|
|
777
|
+
this.retryOptions.disableRetry === true ? 0 : this.retryOptions.maximumRetryCount,
|
|
778
|
+
this.retryOptions.baseDelayMs,
|
|
779
|
+
this.retryOptions.maximumDelayMs,
|
|
780
|
+
)
|
|
781
|
+
if (!response.statusCode) {
|
|
782
|
+
throw new Error("BUG: response doesn't have a statusCode")
|
|
783
|
+
}
|
|
784
|
+
|
|
785
|
+
if (!statusCodes.includes(response.statusCode)) {
|
|
786
|
+
// For an incorrect region, S3 server always sends back 400.
|
|
787
|
+
// But we will do cache invalidation for all errors so that,
|
|
788
|
+
// in future, if AWS S3 decides to send a different status code or
|
|
789
|
+
// XML error code we will still work fine.
|
|
790
|
+
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
|
791
|
+
delete this.regionMap[options.bucketName!]
|
|
792
|
+
|
|
793
|
+
const err = await xmlParsers.parseResponseError(response)
|
|
794
|
+
this.logHTTP(reqOptions, response, err)
|
|
795
|
+
throw err
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
this.logHTTP(reqOptions, response)
|
|
799
|
+
|
|
800
|
+
return response
|
|
801
|
+
}
|
|
802
|
+
|
|
803
|
+
/**
|
|
804
|
+
* gets the region of the bucket
|
|
805
|
+
*
|
|
806
|
+
* @param bucketName
|
|
807
|
+
*
|
|
808
|
+
*/
|
|
809
|
+
async getBucketRegionAsync(bucketName: string): Promise<string> {
|
|
810
|
+
if (!isValidBucketName(bucketName)) {
|
|
811
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name : ${bucketName}`)
|
|
812
|
+
}
|
|
813
|
+
|
|
814
|
+
// Region is set with constructor, return the region right here.
|
|
815
|
+
if (this.region) {
|
|
816
|
+
return this.region
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
const cached = this.regionMap[bucketName]
|
|
820
|
+
if (cached) {
|
|
821
|
+
return cached
|
|
822
|
+
}
|
|
823
|
+
|
|
824
|
+
const extractRegionAsync = async (response: http.IncomingMessage) => {
|
|
825
|
+
const body = await readAsString(response)
|
|
826
|
+
const region = xmlParsers.parseBucketRegion(body) || DEFAULT_REGION
|
|
827
|
+
this.regionMap[bucketName] = region
|
|
828
|
+
return region
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
const method = 'GET'
|
|
832
|
+
const query = 'location'
|
|
833
|
+
// `getBucketLocation` behaves differently in following ways for
|
|
834
|
+
// different environments.
|
|
835
|
+
//
|
|
836
|
+
// - For nodejs env we default to path style requests.
|
|
837
|
+
// - For browser env path style requests on buckets yields CORS
|
|
838
|
+
// error. To circumvent this problem we make a virtual host
|
|
839
|
+
// style request signed with 'us-east-1'. This request fails
|
|
840
|
+
// with an error 'AuthorizationHeaderMalformed', additionally
|
|
841
|
+
// the error XML also provides Region of the bucket. To validate
|
|
842
|
+
// this region is proper we retry the same request with the newly
|
|
843
|
+
// obtained region.
|
|
844
|
+
const pathStyle = this.pathStyle && !isBrowser
|
|
845
|
+
let region: string
|
|
846
|
+
try {
|
|
847
|
+
const res = await this.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], DEFAULT_REGION)
|
|
848
|
+
return extractRegionAsync(res)
|
|
849
|
+
} catch (e) {
|
|
850
|
+
// make alignment with mc cli
|
|
851
|
+
if (e instanceof errors.S3Error) {
|
|
852
|
+
const errCode = e.code
|
|
853
|
+
const errRegion = e.region
|
|
854
|
+
if (errCode === 'AccessDenied' && !errRegion) {
|
|
855
|
+
return DEFAULT_REGION
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
859
|
+
// @ts-ignore
|
|
860
|
+
if (!(e.name === 'AuthorizationHeaderMalformed')) {
|
|
861
|
+
throw e
|
|
862
|
+
}
|
|
863
|
+
// @ts-expect-error we set extra properties on error object
|
|
864
|
+
region = e.Region as string
|
|
865
|
+
if (!region) {
|
|
866
|
+
throw e
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
const res = await this.makeRequestAsync({ method, bucketName, query, pathStyle }, '', [200], region)
|
|
871
|
+
return await extractRegionAsync(res)
|
|
872
|
+
}
|
|
873
|
+
|
|
874
|
+
/**
|
|
875
|
+
* makeRequest is the primitive used by the apis for making S3 requests.
|
|
876
|
+
* payload can be empty string in case of no payload.
|
|
877
|
+
* statusCode is the expected statusCode. If response.statusCode does not match
|
|
878
|
+
* we parse the XML error and call the callback with the error message.
|
|
879
|
+
* A valid region is passed by the calls - listBuckets, makeBucket and
|
|
880
|
+
* getBucketRegion.
|
|
881
|
+
*
|
|
882
|
+
* @deprecated use `makeRequestAsync` instead
|
|
883
|
+
*/
|
|
884
|
+
makeRequest(
|
|
885
|
+
options: RequestOption,
|
|
886
|
+
payload: Binary = '',
|
|
887
|
+
expectedCodes: number[] = [200],
|
|
888
|
+
region = '',
|
|
889
|
+
returnResponse: boolean,
|
|
890
|
+
cb: (cb: unknown, result: http.IncomingMessage) => void,
|
|
891
|
+
) {
|
|
892
|
+
let prom: Promise<http.IncomingMessage>
|
|
893
|
+
if (returnResponse) {
|
|
894
|
+
prom = this.makeRequestAsync(options, payload, expectedCodes, region)
|
|
895
|
+
} else {
|
|
896
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
897
|
+
// @ts-expect-error compatible for old behaviour
|
|
898
|
+
prom = this.makeRequestAsyncOmit(options, payload, expectedCodes, region)
|
|
899
|
+
}
|
|
900
|
+
|
|
901
|
+
prom.then(
|
|
902
|
+
(result) => cb(null, result),
|
|
903
|
+
(err) => {
|
|
904
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
905
|
+
// @ts-ignore
|
|
906
|
+
cb(err)
|
|
907
|
+
},
|
|
908
|
+
)
|
|
909
|
+
}
|
|
910
|
+
|
|
911
|
+
/**
|
|
912
|
+
* makeRequestStream will be used directly instead of makeRequest in case the payload
|
|
913
|
+
* is available as a stream. for ex. putObject
|
|
914
|
+
*
|
|
915
|
+
* @deprecated use `makeRequestStreamAsync` instead
|
|
916
|
+
*/
|
|
917
|
+
makeRequestStream(
|
|
918
|
+
options: RequestOption,
|
|
919
|
+
stream: stream.Readable | Buffer,
|
|
920
|
+
sha256sum: string,
|
|
921
|
+
statusCodes: number[],
|
|
922
|
+
region: string,
|
|
923
|
+
returnResponse: boolean,
|
|
924
|
+
cb: (cb: unknown, result: http.IncomingMessage) => void,
|
|
925
|
+
) {
|
|
926
|
+
const executor = async () => {
|
|
927
|
+
const res = await this.makeRequestStreamAsync(options, stream, sha256sum, statusCodes, region)
|
|
928
|
+
if (!returnResponse) {
|
|
929
|
+
await drainResponse(res)
|
|
930
|
+
}
|
|
931
|
+
|
|
932
|
+
return res
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
executor().then(
|
|
936
|
+
(result) => cb(null, result),
|
|
937
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
938
|
+
// @ts-ignore
|
|
939
|
+
(err) => cb(err),
|
|
940
|
+
)
|
|
941
|
+
}
|
|
942
|
+
|
|
943
|
+
/**
|
|
944
|
+
* @deprecated use `getBucketRegionAsync` instead
|
|
945
|
+
*/
|
|
946
|
+
getBucketRegion(bucketName: string, cb: (err: unknown, region: string) => void) {
|
|
947
|
+
return this.getBucketRegionAsync(bucketName).then(
|
|
948
|
+
(result) => cb(null, result),
|
|
949
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
950
|
+
// @ts-ignore
|
|
951
|
+
(err) => cb(err),
|
|
952
|
+
)
|
|
953
|
+
}
|
|
954
|
+
|
|
955
|
+
// Bucket operations
|
|
956
|
+
|
|
957
|
+
/**
|
|
958
|
+
* Creates the bucket `bucketName`.
|
|
959
|
+
*
|
|
960
|
+
*/
|
|
961
|
+
async makeBucket(bucketName: string, region: Region = '', makeOpts?: MakeBucketOpt): Promise<void> {
|
|
962
|
+
if (!isValidBucketName(bucketName)) {
|
|
963
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
964
|
+
}
|
|
965
|
+
// Backward Compatibility
|
|
966
|
+
if (isObject(region)) {
|
|
967
|
+
makeOpts = region
|
|
968
|
+
region = ''
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
if (!isString(region)) {
|
|
972
|
+
throw new TypeError('region should be of type "string"')
|
|
973
|
+
}
|
|
974
|
+
if (makeOpts && !isObject(makeOpts)) {
|
|
975
|
+
throw new TypeError('makeOpts should be of type "object"')
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
let payload = ''
|
|
979
|
+
|
|
980
|
+
// Region already set in constructor, validate if
|
|
981
|
+
// caller requested bucket location is same.
|
|
982
|
+
if (region && this.region) {
|
|
983
|
+
if (region !== this.region) {
|
|
984
|
+
throw new errors.InvalidArgumentError(`Configured region ${this.region}, requested ${region}`)
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
// sending makeBucket request with XML containing 'us-east-1' fails. For
|
|
988
|
+
// default region server expects the request without body
|
|
989
|
+
if (region && region !== DEFAULT_REGION) {
|
|
990
|
+
payload = xml.buildObject({
|
|
991
|
+
CreateBucketConfiguration: {
|
|
992
|
+
$: { xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/' },
|
|
993
|
+
LocationConstraint: region,
|
|
994
|
+
},
|
|
995
|
+
})
|
|
996
|
+
}
|
|
997
|
+
const method = 'PUT'
|
|
998
|
+
const headers: RequestHeaders = {}
|
|
999
|
+
|
|
1000
|
+
if (makeOpts && makeOpts.ObjectLocking) {
|
|
1001
|
+
headers['x-amz-bucket-object-lock-enabled'] = true
|
|
1002
|
+
}
|
|
1003
|
+
|
|
1004
|
+
// For custom region clients default to custom region specified in client constructor
|
|
1005
|
+
const finalRegion = this.region || region || DEFAULT_REGION
|
|
1006
|
+
|
|
1007
|
+
const requestOpt: RequestOption = { method, bucketName, headers }
|
|
1008
|
+
|
|
1009
|
+
try {
|
|
1010
|
+
await this.makeRequestAsyncOmit(requestOpt, payload, [200], finalRegion)
|
|
1011
|
+
} catch (err: unknown) {
|
|
1012
|
+
if (region === '' || region === DEFAULT_REGION) {
|
|
1013
|
+
if (err instanceof errors.S3Error) {
|
|
1014
|
+
const errCode = err.code
|
|
1015
|
+
const errRegion = err.region
|
|
1016
|
+
if (errCode === 'AuthorizationHeaderMalformed' && errRegion !== '') {
|
|
1017
|
+
// Retry with region returned as part of error
|
|
1018
|
+
await this.makeRequestAsyncOmit(requestOpt, payload, [200], errCode)
|
|
1019
|
+
}
|
|
1020
|
+
}
|
|
1021
|
+
}
|
|
1022
|
+
throw err
|
|
1023
|
+
}
|
|
1024
|
+
}
|
|
1025
|
+
|
|
1026
|
+
/**
|
|
1027
|
+
* To check if a bucket already exists.
|
|
1028
|
+
*/
|
|
1029
|
+
async bucketExists(bucketName: string): Promise<boolean> {
|
|
1030
|
+
if (!isValidBucketName(bucketName)) {
|
|
1031
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1032
|
+
}
|
|
1033
|
+
const method = 'HEAD'
|
|
1034
|
+
try {
|
|
1035
|
+
await this.makeRequestAsyncOmit({ method, bucketName })
|
|
1036
|
+
} catch (err) {
|
|
1037
|
+
// @ts-ignore
|
|
1038
|
+
if (err.code === 'NoSuchBucket' || err.code === 'NotFound') {
|
|
1039
|
+
return false
|
|
1040
|
+
}
|
|
1041
|
+
throw err
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
return true
|
|
1045
|
+
}
|
|
1046
|
+
|
|
1047
|
+
async removeBucket(bucketName: string): Promise<void>
|
|
1048
|
+
|
|
1049
|
+
/**
|
|
1050
|
+
* @deprecated use promise style API
|
|
1051
|
+
*/
|
|
1052
|
+
removeBucket(bucketName: string, callback: NoResultCallback): void
|
|
1053
|
+
|
|
1054
|
+
async removeBucket(bucketName: string): Promise<void> {
|
|
1055
|
+
if (!isValidBucketName(bucketName)) {
|
|
1056
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1057
|
+
}
|
|
1058
|
+
const method = 'DELETE'
|
|
1059
|
+
await this.makeRequestAsyncOmit({ method, bucketName }, '', [204])
|
|
1060
|
+
delete this.regionMap[bucketName]
|
|
1061
|
+
}
|
|
1062
|
+
|
|
1063
|
+
/**
|
|
1064
|
+
* Callback is called with readable stream of the object content.
|
|
1065
|
+
*/
|
|
1066
|
+
async getObject(bucketName: string, objectName: string, getOpts?: GetObjectOpts): Promise<stream.Readable> {
|
|
1067
|
+
if (!isValidBucketName(bucketName)) {
|
|
1068
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1069
|
+
}
|
|
1070
|
+
if (!isValidObjectName(objectName)) {
|
|
1071
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1072
|
+
}
|
|
1073
|
+
return this.getPartialObject(bucketName, objectName, 0, 0, getOpts)
|
|
1074
|
+
}
|
|
1075
|
+
|
|
1076
|
+
/**
|
|
1077
|
+
* Callback is called with readable stream of the partial object content.
|
|
1078
|
+
* @param bucketName
|
|
1079
|
+
* @param objectName
|
|
1080
|
+
* @param offset
|
|
1081
|
+
* @param length - length of the object that will be read in the stream (optional, if not specified we read the rest of the file from the offset)
|
|
1082
|
+
* @param getOpts
|
|
1083
|
+
*/
|
|
1084
|
+
async getPartialObject(
|
|
1085
|
+
bucketName: string,
|
|
1086
|
+
objectName: string,
|
|
1087
|
+
offset: number,
|
|
1088
|
+
length = 0,
|
|
1089
|
+
getOpts?: GetObjectOpts,
|
|
1090
|
+
): Promise<stream.Readable> {
|
|
1091
|
+
if (!isValidBucketName(bucketName)) {
|
|
1092
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1093
|
+
}
|
|
1094
|
+
if (!isValidObjectName(objectName)) {
|
|
1095
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1096
|
+
}
|
|
1097
|
+
if (!isNumber(offset)) {
|
|
1098
|
+
throw new TypeError('offset should be of type "number"')
|
|
1099
|
+
}
|
|
1100
|
+
if (!isNumber(length)) {
|
|
1101
|
+
throw new TypeError('length should be of type "number"')
|
|
1102
|
+
}
|
|
1103
|
+
|
|
1104
|
+
let range = ''
|
|
1105
|
+
if (offset || length) {
|
|
1106
|
+
if (offset) {
|
|
1107
|
+
range = `bytes=${+offset}-`
|
|
1108
|
+
} else {
|
|
1109
|
+
range = 'bytes=0-'
|
|
1110
|
+
offset = 0
|
|
1111
|
+
}
|
|
1112
|
+
if (length) {
|
|
1113
|
+
range += `${+length + offset - 1}`
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
|
|
1117
|
+
let query = ''
|
|
1118
|
+
let headers: RequestHeaders = {
|
|
1119
|
+
...(range !== '' && { range }),
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
if (getOpts) {
|
|
1123
|
+
const sseHeaders: Record<string, string> = {
|
|
1124
|
+
...(getOpts.SSECustomerAlgorithm && {
|
|
1125
|
+
'X-Amz-Server-Side-Encryption-Customer-Algorithm': getOpts.SSECustomerAlgorithm,
|
|
1126
|
+
}),
|
|
1127
|
+
...(getOpts.SSECustomerKey && { 'X-Amz-Server-Side-Encryption-Customer-Key': getOpts.SSECustomerKey }),
|
|
1128
|
+
...(getOpts.SSECustomerKeyMD5 && {
|
|
1129
|
+
'X-Amz-Server-Side-Encryption-Customer-Key-MD5': getOpts.SSECustomerKeyMD5,
|
|
1130
|
+
}),
|
|
1131
|
+
}
|
|
1132
|
+
query = qs.stringify(getOpts)
|
|
1133
|
+
headers = {
|
|
1134
|
+
...prependXAMZMeta(sseHeaders),
|
|
1135
|
+
...headers,
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
|
|
1139
|
+
const expectedStatusCodes = [200]
|
|
1140
|
+
if (range) {
|
|
1141
|
+
expectedStatusCodes.push(206)
|
|
1142
|
+
}
|
|
1143
|
+
const method = 'GET'
|
|
1144
|
+
|
|
1145
|
+
return await this.makeRequestAsync({ method, bucketName, objectName, headers, query }, '', expectedStatusCodes)
|
|
1146
|
+
}
|
|
1147
|
+
|
|
1148
|
+
/**
|
|
1149
|
+
* download object content to a file.
|
|
1150
|
+
* This method will create a temp file named `${filename}.${base64(etag)}.part.s3` when downloading.
|
|
1151
|
+
*
|
|
1152
|
+
* @param bucketName - name of the bucket
|
|
1153
|
+
* @param objectName - name of the object
|
|
1154
|
+
* @param filePath - path to which the object data will be written to
|
|
1155
|
+
* @param getOpts - Optional object get option
|
|
1156
|
+
*/
|
|
1157
|
+
async fGetObject(bucketName: string, objectName: string, filePath: string, getOpts?: GetObjectOpts): Promise<void> {
|
|
1158
|
+
// Input validation.
|
|
1159
|
+
if (!isValidBucketName(bucketName)) {
|
|
1160
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1161
|
+
}
|
|
1162
|
+
if (!isValidObjectName(objectName)) {
|
|
1163
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1164
|
+
}
|
|
1165
|
+
if (!isString(filePath)) {
|
|
1166
|
+
throw new TypeError('filePath should be of type "string"')
|
|
1167
|
+
}
|
|
1168
|
+
|
|
1169
|
+
const downloadToTmpFile = async (): Promise<string> => {
|
|
1170
|
+
let partFileStream: stream.Writable
|
|
1171
|
+
const objStat = await this.statObject(bucketName, objectName, getOpts)
|
|
1172
|
+
const encodedEtag = Buffer.from(objStat.etag).toString('base64')
|
|
1173
|
+
const partFile = `${filePath}.${encodedEtag}.part.s3`
|
|
1174
|
+
|
|
1175
|
+
await fsp.mkdir(path.dirname(filePath), { recursive: true })
|
|
1176
|
+
|
|
1177
|
+
let offset = 0
|
|
1178
|
+
try {
|
|
1179
|
+
const stats = await fsp.stat(partFile)
|
|
1180
|
+
if (objStat.size === stats.size) {
|
|
1181
|
+
return partFile
|
|
1182
|
+
}
|
|
1183
|
+
offset = stats.size
|
|
1184
|
+
partFileStream = fs.createWriteStream(partFile, { flags: 'a' })
|
|
1185
|
+
} catch (e) {
|
|
1186
|
+
if (e instanceof Error && (e as unknown as { code: string }).code === 'ENOENT') {
|
|
1187
|
+
// file not exist
|
|
1188
|
+
partFileStream = fs.createWriteStream(partFile, { flags: 'w' })
|
|
1189
|
+
} else {
|
|
1190
|
+
// other error, maybe access deny
|
|
1191
|
+
throw e
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
|
|
1195
|
+
const downloadStream = await this.getPartialObject(bucketName, objectName, offset, 0, getOpts)
|
|
1196
|
+
|
|
1197
|
+
await streamPromise.pipeline(downloadStream, partFileStream)
|
|
1198
|
+
const stats = await fsp.stat(partFile)
|
|
1199
|
+
if (stats.size === objStat.size) {
|
|
1200
|
+
return partFile
|
|
1201
|
+
}
|
|
1202
|
+
|
|
1203
|
+
throw new Error('Size mismatch between downloaded file and the object')
|
|
1204
|
+
}
|
|
1205
|
+
|
|
1206
|
+
const partFile = await downloadToTmpFile()
|
|
1207
|
+
await fsp.rename(partFile, filePath)
|
|
1208
|
+
}
|
|
1209
|
+
|
|
1210
|
+
/**
|
|
1211
|
+
* Stat information of the object.
|
|
1212
|
+
*/
|
|
1213
|
+
async statObject(bucketName: string, objectName: string, statOpts?: StatObjectOpts): Promise<BucketItemStat> {
|
|
1214
|
+
const statOptDef = statOpts || {}
|
|
1215
|
+
if (!isValidBucketName(bucketName)) {
|
|
1216
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1217
|
+
}
|
|
1218
|
+
if (!isValidObjectName(objectName)) {
|
|
1219
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1220
|
+
}
|
|
1221
|
+
|
|
1222
|
+
if (!isObject(statOptDef)) {
|
|
1223
|
+
throw new errors.InvalidArgumentError('statOpts should be of type "object"')
|
|
1224
|
+
}
|
|
1225
|
+
|
|
1226
|
+
const query = qs.stringify(statOptDef)
|
|
1227
|
+
const method = 'HEAD'
|
|
1228
|
+
const res = await this.makeRequestAsyncOmit({ method, bucketName, objectName, query })
|
|
1229
|
+
|
|
1230
|
+
return {
|
|
1231
|
+
size: parseInt(res.headers['content-length'] as string),
|
|
1232
|
+
metaData: extractMetadata(res.headers as ResponseHeader),
|
|
1233
|
+
lastModified: new Date(res.headers['last-modified'] as string),
|
|
1234
|
+
versionId: getVersionId(res.headers as ResponseHeader),
|
|
1235
|
+
etag: sanitizeETag(res.headers.etag),
|
|
1236
|
+
}
|
|
1237
|
+
}
|
|
1238
|
+
|
|
1239
|
+
async removeObject(bucketName: string, objectName: string, removeOpts?: RemoveOptions): Promise<void> {
|
|
1240
|
+
if (!isValidBucketName(bucketName)) {
|
|
1241
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
1242
|
+
}
|
|
1243
|
+
if (!isValidObjectName(objectName)) {
|
|
1244
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1245
|
+
}
|
|
1246
|
+
|
|
1247
|
+
if (removeOpts && !isObject(removeOpts)) {
|
|
1248
|
+
throw new errors.InvalidArgumentError('removeOpts should be of type "object"')
|
|
1249
|
+
}
|
|
1250
|
+
|
|
1251
|
+
const method = 'DELETE'
|
|
1252
|
+
|
|
1253
|
+
const headers: RequestHeaders = {}
|
|
1254
|
+
if (removeOpts?.governanceBypass) {
|
|
1255
|
+
headers['X-Amz-Bypass-Governance-Retention'] = true
|
|
1256
|
+
}
|
|
1257
|
+
if (removeOpts?.forceDelete) {
|
|
1258
|
+
headers['x-force-delete'] = true
|
|
1259
|
+
}
|
|
1260
|
+
|
|
1261
|
+
const queryParams: Record<string, string> = {}
|
|
1262
|
+
if (removeOpts?.versionId) {
|
|
1263
|
+
queryParams.versionId = `${removeOpts.versionId}`
|
|
1264
|
+
}
|
|
1265
|
+
const query = qs.stringify(queryParams)
|
|
1266
|
+
|
|
1267
|
+
await this.makeRequestAsyncOmit({ method, bucketName, objectName, headers, query }, '', [200, 204])
|
|
1268
|
+
}
|
|
1269
|
+
|
|
1270
|
+
// Calls implemented below are related to multipart.
|
|
1271
|
+
|
|
1272
|
+
listIncompleteUploads(
|
|
1273
|
+
bucket: string,
|
|
1274
|
+
prefix: string,
|
|
1275
|
+
recursive: boolean,
|
|
1276
|
+
): BucketStream<IncompleteUploadedBucketItem> {
|
|
1277
|
+
if (prefix === undefined) {
|
|
1278
|
+
prefix = ''
|
|
1279
|
+
}
|
|
1280
|
+
if (recursive === undefined) {
|
|
1281
|
+
recursive = false
|
|
1282
|
+
}
|
|
1283
|
+
if (!isValidBucketName(bucket)) {
|
|
1284
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucket)
|
|
1285
|
+
}
|
|
1286
|
+
if (!isValidPrefix(prefix)) {
|
|
1287
|
+
throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`)
|
|
1288
|
+
}
|
|
1289
|
+
if (!isBoolean(recursive)) {
|
|
1290
|
+
throw new TypeError('recursive should be of type "boolean"')
|
|
1291
|
+
}
|
|
1292
|
+
const delimiter = recursive ? '' : '/'
|
|
1293
|
+
let keyMarker = ''
|
|
1294
|
+
let uploadIdMarker = ''
|
|
1295
|
+
const uploads: unknown[] = []
|
|
1296
|
+
let ended = false
|
|
1297
|
+
|
|
1298
|
+
// TODO: refactor this with async/await and `stream.Readable.from`
|
|
1299
|
+
const readStream = new stream.Readable({ objectMode: true })
|
|
1300
|
+
readStream._read = () => {
|
|
1301
|
+
// push one upload info per _read()
|
|
1302
|
+
if (uploads.length) {
|
|
1303
|
+
return readStream.push(uploads.shift())
|
|
1304
|
+
}
|
|
1305
|
+
if (ended) {
|
|
1306
|
+
return readStream.push(null)
|
|
1307
|
+
}
|
|
1308
|
+
this.listIncompleteUploadsQuery(bucket, prefix, keyMarker, uploadIdMarker, delimiter).then(
|
|
1309
|
+
(result) => {
|
|
1310
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
1311
|
+
// @ts-ignore
|
|
1312
|
+
result.prefixes.forEach((prefix) => uploads.push(prefix))
|
|
1313
|
+
async.eachSeries(
|
|
1314
|
+
result.uploads,
|
|
1315
|
+
(upload, cb) => {
|
|
1316
|
+
// for each incomplete upload add the sizes of its uploaded parts
|
|
1317
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
1318
|
+
// @ts-ignore
|
|
1319
|
+
this.listParts(bucket, upload.key, upload.uploadId).then(
|
|
1320
|
+
(parts: Part[]) => {
|
|
1321
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
1322
|
+
// @ts-ignore
|
|
1323
|
+
upload.size = parts.reduce((acc, item) => acc + item.size, 0)
|
|
1324
|
+
uploads.push(upload)
|
|
1325
|
+
cb()
|
|
1326
|
+
},
|
|
1327
|
+
(err: Error) => cb(err),
|
|
1328
|
+
)
|
|
1329
|
+
},
|
|
1330
|
+
(err) => {
|
|
1331
|
+
if (err) {
|
|
1332
|
+
readStream.emit('error', err)
|
|
1333
|
+
return
|
|
1334
|
+
}
|
|
1335
|
+
if (result.isTruncated) {
|
|
1336
|
+
keyMarker = result.nextKeyMarker
|
|
1337
|
+
uploadIdMarker = result.nextUploadIdMarker
|
|
1338
|
+
} else {
|
|
1339
|
+
ended = true
|
|
1340
|
+
}
|
|
1341
|
+
|
|
1342
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
1343
|
+
// @ts-ignore
|
|
1344
|
+
readStream._read()
|
|
1345
|
+
},
|
|
1346
|
+
)
|
|
1347
|
+
},
|
|
1348
|
+
(e) => {
|
|
1349
|
+
readStream.emit('error', e)
|
|
1350
|
+
},
|
|
1351
|
+
)
|
|
1352
|
+
}
|
|
1353
|
+
return readStream
|
|
1354
|
+
}
|
|
1355
|
+
|
|
1356
|
+
/**
|
|
1357
|
+
* Called by listIncompleteUploads to fetch a batch of incomplete uploads.
|
|
1358
|
+
*/
|
|
1359
|
+
async listIncompleteUploadsQuery(
|
|
1360
|
+
bucketName: string,
|
|
1361
|
+
prefix: string,
|
|
1362
|
+
keyMarker: string,
|
|
1363
|
+
uploadIdMarker: string,
|
|
1364
|
+
delimiter: string,
|
|
1365
|
+
): Promise<ListMultipartResult> {
|
|
1366
|
+
if (!isValidBucketName(bucketName)) {
|
|
1367
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1368
|
+
}
|
|
1369
|
+
if (!isString(prefix)) {
|
|
1370
|
+
throw new TypeError('prefix should be of type "string"')
|
|
1371
|
+
}
|
|
1372
|
+
if (!isString(keyMarker)) {
|
|
1373
|
+
throw new TypeError('keyMarker should be of type "string"')
|
|
1374
|
+
}
|
|
1375
|
+
if (!isString(uploadIdMarker)) {
|
|
1376
|
+
throw new TypeError('uploadIdMarker should be of type "string"')
|
|
1377
|
+
}
|
|
1378
|
+
if (!isString(delimiter)) {
|
|
1379
|
+
throw new TypeError('delimiter should be of type "string"')
|
|
1380
|
+
}
|
|
1381
|
+
const queries = []
|
|
1382
|
+
queries.push(`prefix=${uriEscape(prefix)}`)
|
|
1383
|
+
queries.push(`delimiter=${uriEscape(delimiter)}`)
|
|
1384
|
+
|
|
1385
|
+
if (keyMarker) {
|
|
1386
|
+
queries.push(`key-marker=${uriEscape(keyMarker)}`)
|
|
1387
|
+
}
|
|
1388
|
+
if (uploadIdMarker) {
|
|
1389
|
+
queries.push(`upload-id-marker=${uploadIdMarker}`)
|
|
1390
|
+
}
|
|
1391
|
+
|
|
1392
|
+
const maxUploads = 1000
|
|
1393
|
+
queries.push(`max-uploads=${maxUploads}`)
|
|
1394
|
+
queries.sort()
|
|
1395
|
+
queries.unshift('uploads')
|
|
1396
|
+
let query = ''
|
|
1397
|
+
if (queries.length > 0) {
|
|
1398
|
+
query = `${queries.join('&')}`
|
|
1399
|
+
}
|
|
1400
|
+
const method = 'GET'
|
|
1401
|
+
const res = await this.makeRequestAsync({ method, bucketName, query })
|
|
1402
|
+
const body = await readAsString(res)
|
|
1403
|
+
return xmlParsers.parseListMultipart(body)
|
|
1404
|
+
}
|
|
1405
|
+
|
|
1406
|
+
/**
|
|
1407
|
+
* Initiate a new multipart upload.
|
|
1408
|
+
* @internal
|
|
1409
|
+
*/
|
|
1410
|
+
async initiateNewMultipartUpload(bucketName: string, objectName: string, headers: RequestHeaders): Promise<string> {
|
|
1411
|
+
if (!isValidBucketName(bucketName)) {
|
|
1412
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1413
|
+
}
|
|
1414
|
+
if (!isValidObjectName(objectName)) {
|
|
1415
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1416
|
+
}
|
|
1417
|
+
if (!isObject(headers)) {
|
|
1418
|
+
throw new errors.InvalidObjectNameError('contentType should be of type "object"')
|
|
1419
|
+
}
|
|
1420
|
+
const method = 'POST'
|
|
1421
|
+
const query = 'uploads'
|
|
1422
|
+
const res = await this.makeRequestAsync({ method, bucketName, objectName, query, headers })
|
|
1423
|
+
const body = await readAsBuffer(res)
|
|
1424
|
+
return parseInitiateMultipart(body.toString())
|
|
1425
|
+
}
|
|
1426
|
+
|
|
1427
|
+
/**
|
|
1428
|
+
* Internal Method to abort a multipart upload request in case of any errors.
|
|
1429
|
+
*
|
|
1430
|
+
* @param bucketName - Bucket Name
|
|
1431
|
+
* @param objectName - Object Name
|
|
1432
|
+
* @param uploadId - id of a multipart upload to cancel during compose object sequence.
|
|
1433
|
+
*/
|
|
1434
|
+
async abortMultipartUpload(bucketName: string, objectName: string, uploadId: string): Promise<void> {
|
|
1435
|
+
const method = 'DELETE'
|
|
1436
|
+
const query = `uploadId=${uploadId}`
|
|
1437
|
+
|
|
1438
|
+
const requestOptions = { method, bucketName, objectName: objectName, query }
|
|
1439
|
+
await this.makeRequestAsyncOmit(requestOptions, '', [204])
|
|
1440
|
+
}
|
|
1441
|
+
|
|
1442
|
+
async findUploadId(bucketName: string, objectName: string): Promise<string | undefined> {
|
|
1443
|
+
if (!isValidBucketName(bucketName)) {
|
|
1444
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1445
|
+
}
|
|
1446
|
+
if (!isValidObjectName(objectName)) {
|
|
1447
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1448
|
+
}
|
|
1449
|
+
|
|
1450
|
+
let latestUpload: ListMultipartResult['uploads'][number] | undefined
|
|
1451
|
+
let keyMarker = ''
|
|
1452
|
+
let uploadIdMarker = ''
|
|
1453
|
+
for (;;) {
|
|
1454
|
+
const result = await this.listIncompleteUploadsQuery(bucketName, objectName, keyMarker, uploadIdMarker, '')
|
|
1455
|
+
for (const upload of result.uploads) {
|
|
1456
|
+
if (upload.key === objectName) {
|
|
1457
|
+
if (!latestUpload || upload.initiated.getTime() > latestUpload.initiated.getTime()) {
|
|
1458
|
+
latestUpload = upload
|
|
1459
|
+
}
|
|
1460
|
+
}
|
|
1461
|
+
}
|
|
1462
|
+
if (result.isTruncated) {
|
|
1463
|
+
keyMarker = result.nextKeyMarker
|
|
1464
|
+
uploadIdMarker = result.nextUploadIdMarker
|
|
1465
|
+
continue
|
|
1466
|
+
}
|
|
1467
|
+
|
|
1468
|
+
break
|
|
1469
|
+
}
|
|
1470
|
+
return latestUpload?.uploadId
|
|
1471
|
+
}
|
|
1472
|
+
|
|
1473
|
+
/**
|
|
1474
|
+
* this call will aggregate the parts on the server into a single object.
|
|
1475
|
+
*/
|
|
1476
|
+
async completeMultipartUpload(
|
|
1477
|
+
bucketName: string,
|
|
1478
|
+
objectName: string,
|
|
1479
|
+
uploadId: string,
|
|
1480
|
+
etags: {
|
|
1481
|
+
part: number
|
|
1482
|
+
etag?: string
|
|
1483
|
+
}[],
|
|
1484
|
+
): Promise<{ etag: string; versionId: string | null }> {
|
|
1485
|
+
if (!isValidBucketName(bucketName)) {
|
|
1486
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1487
|
+
}
|
|
1488
|
+
if (!isValidObjectName(objectName)) {
|
|
1489
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1490
|
+
}
|
|
1491
|
+
if (!isString(uploadId)) {
|
|
1492
|
+
throw new TypeError('uploadId should be of type "string"')
|
|
1493
|
+
}
|
|
1494
|
+
if (!isObject(etags)) {
|
|
1495
|
+
throw new TypeError('etags should be of type "Array"')
|
|
1496
|
+
}
|
|
1497
|
+
|
|
1498
|
+
if (!uploadId) {
|
|
1499
|
+
throw new errors.InvalidArgumentError('uploadId cannot be empty')
|
|
1500
|
+
}
|
|
1501
|
+
|
|
1502
|
+
const method = 'POST'
|
|
1503
|
+
const query = `uploadId=${uriEscape(uploadId)}`
|
|
1504
|
+
|
|
1505
|
+
const builder = new xml2js.Builder()
|
|
1506
|
+
const payload = builder.buildObject({
|
|
1507
|
+
CompleteMultipartUpload: {
|
|
1508
|
+
$: {
|
|
1509
|
+
xmlns: 'http://s3.amazonaws.com/doc/2006-03-01/',
|
|
1510
|
+
},
|
|
1511
|
+
Part: etags.map((etag) => {
|
|
1512
|
+
return {
|
|
1513
|
+
PartNumber: etag.part,
|
|
1514
|
+
ETag: etag.etag,
|
|
1515
|
+
}
|
|
1516
|
+
}),
|
|
1517
|
+
},
|
|
1518
|
+
})
|
|
1519
|
+
|
|
1520
|
+
const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload)
|
|
1521
|
+
const body = await readAsBuffer(res)
|
|
1522
|
+
const result = parseCompleteMultipart(body.toString())
|
|
1523
|
+
if (!result) {
|
|
1524
|
+
throw new Error('BUG: failed to parse server response')
|
|
1525
|
+
}
|
|
1526
|
+
|
|
1527
|
+
if (result.errCode) {
|
|
1528
|
+
// Multipart Complete API returns an error XML after a 200 http status
|
|
1529
|
+
throw new errors.S3Error(result.errMessage)
|
|
1530
|
+
}
|
|
1531
|
+
|
|
1532
|
+
return {
|
|
1533
|
+
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
1534
|
+
// @ts-ignore
|
|
1535
|
+
etag: result.etag as string,
|
|
1536
|
+
versionId: getVersionId(res.headers as ResponseHeader),
|
|
1537
|
+
}
|
|
1538
|
+
}
|
|
1539
|
+
|
|
1540
|
+
/**
|
|
1541
|
+
* Get part-info of all parts of an incomplete upload specified by uploadId.
|
|
1542
|
+
*/
|
|
1543
|
+
protected async listParts(bucketName: string, objectName: string, uploadId: string): Promise<UploadedPart[]> {
|
|
1544
|
+
if (!isValidBucketName(bucketName)) {
|
|
1545
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1546
|
+
}
|
|
1547
|
+
if (!isValidObjectName(objectName)) {
|
|
1548
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1549
|
+
}
|
|
1550
|
+
if (!isString(uploadId)) {
|
|
1551
|
+
throw new TypeError('uploadId should be of type "string"')
|
|
1552
|
+
}
|
|
1553
|
+
if (!uploadId) {
|
|
1554
|
+
throw new errors.InvalidArgumentError('uploadId cannot be empty')
|
|
1555
|
+
}
|
|
1556
|
+
|
|
1557
|
+
const parts: UploadedPart[] = []
|
|
1558
|
+
let marker = 0
|
|
1559
|
+
let result
|
|
1560
|
+
do {
|
|
1561
|
+
result = await this.listPartsQuery(bucketName, objectName, uploadId, marker)
|
|
1562
|
+
marker = result.marker
|
|
1563
|
+
parts.push(...result.parts)
|
|
1564
|
+
} while (result.isTruncated)
|
|
1565
|
+
|
|
1566
|
+
return parts
|
|
1567
|
+
}
|
|
1568
|
+
|
|
1569
|
+
/**
|
|
1570
|
+
* Called by listParts to fetch a batch of part-info
|
|
1571
|
+
*/
|
|
1572
|
+
private async listPartsQuery(bucketName: string, objectName: string, uploadId: string, marker: number) {
|
|
1573
|
+
if (!isValidBucketName(bucketName)) {
|
|
1574
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1575
|
+
}
|
|
1576
|
+
if (!isValidObjectName(objectName)) {
|
|
1577
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1578
|
+
}
|
|
1579
|
+
if (!isString(uploadId)) {
|
|
1580
|
+
throw new TypeError('uploadId should be of type "string"')
|
|
1581
|
+
}
|
|
1582
|
+
if (!isNumber(marker)) {
|
|
1583
|
+
throw new TypeError('marker should be of type "number"')
|
|
1584
|
+
}
|
|
1585
|
+
if (!uploadId) {
|
|
1586
|
+
throw new errors.InvalidArgumentError('uploadId cannot be empty')
|
|
1587
|
+
}
|
|
1588
|
+
|
|
1589
|
+
let query = `uploadId=${uriEscape(uploadId)}`
|
|
1590
|
+
if (marker) {
|
|
1591
|
+
query += `&part-number-marker=${marker}`
|
|
1592
|
+
}
|
|
1593
|
+
|
|
1594
|
+
const method = 'GET'
|
|
1595
|
+
const res = await this.makeRequestAsync({ method, bucketName, objectName, query })
|
|
1596
|
+
return xmlParsers.parseListParts(await readAsString(res))
|
|
1597
|
+
}
|
|
1598
|
+
|
|
1599
|
+
async listBuckets(): Promise<BucketItemFromList[]> {
|
|
1600
|
+
const method = 'GET'
|
|
1601
|
+
const regionConf = this.region || DEFAULT_REGION
|
|
1602
|
+
const httpRes = await this.makeRequestAsync({ method }, '', [200], regionConf)
|
|
1603
|
+
const xmlResult = await readAsString(httpRes)
|
|
1604
|
+
return xmlParsers.parseListBucket(xmlResult)
|
|
1605
|
+
}
|
|
1606
|
+
|
|
1607
|
+
/**
|
|
1608
|
+
* Calculate part size given the object size. Part size will be atleast this.partSize
|
|
1609
|
+
*/
|
|
1610
|
+
calculatePartSize(size: number) {
|
|
1611
|
+
if (!isNumber(size)) {
|
|
1612
|
+
throw new TypeError('size should be of type "number"')
|
|
1613
|
+
}
|
|
1614
|
+
if (size > this.maxObjectSize) {
|
|
1615
|
+
throw new TypeError(`size should not be more than ${this.maxObjectSize}`)
|
|
1616
|
+
}
|
|
1617
|
+
if (this.overRidePartSize) {
|
|
1618
|
+
return this.partSize
|
|
1619
|
+
}
|
|
1620
|
+
let partSize = this.partSize
|
|
1621
|
+
for (;;) {
|
|
1622
|
+
// while(true) {...} throws linting error.
|
|
1623
|
+
// If partSize is big enough to accomodate the object size, then use it.
|
|
1624
|
+
if (partSize * 10000 > size) {
|
|
1625
|
+
return partSize
|
|
1626
|
+
}
|
|
1627
|
+
// Try part sizes as 64MB, 80MB, 96MB etc.
|
|
1628
|
+
partSize += 16 * 1024 * 1024
|
|
1629
|
+
}
|
|
1630
|
+
}
|
|
1631
|
+
|
|
1632
|
+
/**
|
|
1633
|
+
* Uploads the object using contents from a file
|
|
1634
|
+
*/
|
|
1635
|
+
async fPutObject(bucketName: string, objectName: string, filePath: string, metaData?: ObjectMetaData) {
|
|
1636
|
+
if (!isValidBucketName(bucketName)) {
|
|
1637
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1638
|
+
}
|
|
1639
|
+
if (!isValidObjectName(objectName)) {
|
|
1640
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1641
|
+
}
|
|
1642
|
+
|
|
1643
|
+
if (!isString(filePath)) {
|
|
1644
|
+
throw new TypeError('filePath should be of type "string"')
|
|
1645
|
+
}
|
|
1646
|
+
if (metaData && !isObject(metaData)) {
|
|
1647
|
+
throw new TypeError('metaData should be of type "object"')
|
|
1648
|
+
}
|
|
1649
|
+
|
|
1650
|
+
// Inserts correct `content-type` attribute based on metaData and filePath
|
|
1651
|
+
metaData = insertContentType(metaData || {}, filePath)
|
|
1652
|
+
const stat = await fsp.stat(filePath)
|
|
1653
|
+
return await this.putObject(bucketName, objectName, fs.createReadStream(filePath), stat.size, metaData)
|
|
1654
|
+
}
|
|
1655
|
+
|
|
1656
|
+
/**
|
|
1657
|
+
* Uploading a stream, "Buffer" or "string".
|
|
1658
|
+
* It's recommended to pass `size` argument with stream.
|
|
1659
|
+
*/
|
|
1660
|
+
async putObject(
|
|
1661
|
+
bucketName: string,
|
|
1662
|
+
objectName: string,
|
|
1663
|
+
stream: stream.Readable | Buffer | string,
|
|
1664
|
+
size?: number,
|
|
1665
|
+
metaData?: ItemBucketMetadata,
|
|
1666
|
+
): Promise<UploadedObjectInfo> {
|
|
1667
|
+
if (!isValidBucketName(bucketName)) {
|
|
1668
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
1669
|
+
}
|
|
1670
|
+
if (!isValidObjectName(objectName)) {
|
|
1671
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1672
|
+
}
|
|
1673
|
+
|
|
1674
|
+
// We'll need to shift arguments to the left because of metaData
|
|
1675
|
+
// and size being optional.
|
|
1676
|
+
if (isObject(size)) {
|
|
1677
|
+
metaData = size
|
|
1678
|
+
}
|
|
1679
|
+
// Ensures Metadata has appropriate prefix for A3 API
|
|
1680
|
+
const headers = prependXAMZMeta(metaData)
|
|
1681
|
+
if (typeof stream === 'string' || stream instanceof Buffer) {
|
|
1682
|
+
// Adapts the non-stream interface into a stream.
|
|
1683
|
+
size = stream.length
|
|
1684
|
+
stream = readableStream(stream)
|
|
1685
|
+
} else if (!isReadableStream(stream)) {
|
|
1686
|
+
throw new TypeError('third argument should be of type "stream.Readable" or "Buffer" or "string"')
|
|
1687
|
+
}
|
|
1688
|
+
|
|
1689
|
+
if (isNumber(size) && size < 0) {
|
|
1690
|
+
throw new errors.InvalidArgumentError(`size cannot be negative, given size: ${size}`)
|
|
1691
|
+
}
|
|
1692
|
+
|
|
1693
|
+
// Get the part size and forward that to the BlockStream. Default to the
|
|
1694
|
+
// largest block size possible if necessary.
|
|
1695
|
+
if (!isNumber(size)) {
|
|
1696
|
+
size = this.maxObjectSize
|
|
1697
|
+
}
|
|
1698
|
+
|
|
1699
|
+
// Get the part size and forward that to the BlockStream. Default to the
|
|
1700
|
+
// largest block size possible if necessary.
|
|
1701
|
+
if (size === undefined) {
|
|
1702
|
+
const statSize = await getContentLength(stream)
|
|
1703
|
+
if (statSize !== null) {
|
|
1704
|
+
size = statSize
|
|
1705
|
+
}
|
|
1706
|
+
}
|
|
1707
|
+
|
|
1708
|
+
if (!isNumber(size)) {
|
|
1709
|
+
// Backward compatibility
|
|
1710
|
+
size = this.maxObjectSize
|
|
1711
|
+
}
|
|
1712
|
+
if (size === 0) {
|
|
1713
|
+
return this.uploadBuffer(bucketName, objectName, headers, Buffer.from(''))
|
|
1714
|
+
}
|
|
1715
|
+
|
|
1716
|
+
const partSize = this.calculatePartSize(size)
|
|
1717
|
+
if (typeof stream === 'string' || Buffer.isBuffer(stream) || size <= partSize) {
|
|
1718
|
+
const buf = isReadableStream(stream) ? await readAsBuffer(stream) : Buffer.from(stream)
|
|
1719
|
+
return this.uploadBuffer(bucketName, objectName, headers, buf)
|
|
1720
|
+
}
|
|
1721
|
+
|
|
1722
|
+
return this.uploadStream(bucketName, objectName, headers, stream, partSize)
|
|
1723
|
+
}
|
|
1724
|
+
|
|
1725
|
+
/**
|
|
1726
|
+
* method to upload buffer in one call
|
|
1727
|
+
* @private
|
|
1728
|
+
*/
|
|
1729
|
+
private async uploadBuffer(
|
|
1730
|
+
bucketName: string,
|
|
1731
|
+
objectName: string,
|
|
1732
|
+
headers: RequestHeaders,
|
|
1733
|
+
buf: Buffer,
|
|
1734
|
+
): Promise<UploadedObjectInfo> {
|
|
1735
|
+
const { md5sum, sha256sum } = hashBinary(buf, this.enableSHA256)
|
|
1736
|
+
headers['Content-Length'] = buf.length
|
|
1737
|
+
if (!this.enableSHA256) {
|
|
1738
|
+
headers['Content-MD5'] = md5sum
|
|
1739
|
+
}
|
|
1740
|
+
const res = await this.makeRequestStreamAsync(
|
|
1741
|
+
{
|
|
1742
|
+
method: 'PUT',
|
|
1743
|
+
bucketName,
|
|
1744
|
+
objectName,
|
|
1745
|
+
headers,
|
|
1746
|
+
},
|
|
1747
|
+
buf,
|
|
1748
|
+
sha256sum,
|
|
1749
|
+
[200],
|
|
1750
|
+
'',
|
|
1751
|
+
)
|
|
1752
|
+
await drainResponse(res)
|
|
1753
|
+
return {
|
|
1754
|
+
etag: sanitizeETag(res.headers.etag),
|
|
1755
|
+
versionId: getVersionId(res.headers as ResponseHeader),
|
|
1756
|
+
}
|
|
1757
|
+
}
|
|
1758
|
+
|
|
1759
|
+
/**
|
|
1760
|
+
* upload stream with MultipartUpload
|
|
1761
|
+
* @private
|
|
1762
|
+
*/
|
|
1763
|
+
private async uploadStream(
|
|
1764
|
+
bucketName: string,
|
|
1765
|
+
objectName: string,
|
|
1766
|
+
headers: RequestHeaders,
|
|
1767
|
+
body: stream.Readable,
|
|
1768
|
+
partSize: number,
|
|
1769
|
+
): Promise<UploadedObjectInfo> {
|
|
1770
|
+
// A map of the previously uploaded chunks, for resuming a file upload. This
|
|
1771
|
+
// will be null if we aren't resuming an upload.
|
|
1772
|
+
const oldParts: Record<number, Part> = {}
|
|
1773
|
+
|
|
1774
|
+
// Keep track of the etags for aggregating the chunks together later. Each
|
|
1775
|
+
// etag represents a single chunk of the file.
|
|
1776
|
+
const eTags: Part[] = []
|
|
1777
|
+
|
|
1778
|
+
const previousUploadId = await this.findUploadId(bucketName, objectName)
|
|
1779
|
+
let uploadId: string
|
|
1780
|
+
if (!previousUploadId) {
|
|
1781
|
+
uploadId = await this.initiateNewMultipartUpload(bucketName, objectName, headers)
|
|
1782
|
+
} else {
|
|
1783
|
+
uploadId = previousUploadId
|
|
1784
|
+
const oldTags = await this.listParts(bucketName, objectName, previousUploadId)
|
|
1785
|
+
oldTags.forEach((e) => {
|
|
1786
|
+
oldParts[e.part] = e
|
|
1787
|
+
})
|
|
1788
|
+
}
|
|
1789
|
+
|
|
1790
|
+
const chunkier = new BlockStream2({ size: partSize, zeroPadding: false })
|
|
1791
|
+
|
|
1792
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
1793
|
+
const [_, o] = await Promise.all([
|
|
1794
|
+
new Promise((resolve, reject) => {
|
|
1795
|
+
body.pipe(chunkier).on('error', reject)
|
|
1796
|
+
chunkier.on('end', resolve).on('error', reject)
|
|
1797
|
+
}),
|
|
1798
|
+
(async () => {
|
|
1799
|
+
let partNumber = 1
|
|
1800
|
+
|
|
1801
|
+
for await (const chunk of chunkier) {
|
|
1802
|
+
const md5 = crypto.createHash('md5').update(chunk).digest()
|
|
1803
|
+
|
|
1804
|
+
const oldPart = oldParts[partNumber]
|
|
1805
|
+
if (oldPart) {
|
|
1806
|
+
if (oldPart.etag === md5.toString('hex')) {
|
|
1807
|
+
eTags.push({ part: partNumber, etag: oldPart.etag })
|
|
1808
|
+
partNumber++
|
|
1809
|
+
continue
|
|
1810
|
+
}
|
|
1811
|
+
}
|
|
1812
|
+
|
|
1813
|
+
partNumber++
|
|
1814
|
+
|
|
1815
|
+
// now start to upload missing part
|
|
1816
|
+
const options: RequestOption = {
|
|
1817
|
+
method: 'PUT',
|
|
1818
|
+
query: qs.stringify({ partNumber, uploadId }),
|
|
1819
|
+
headers: {
|
|
1820
|
+
'Content-Length': chunk.length,
|
|
1821
|
+
'Content-MD5': md5.toString('base64'),
|
|
1822
|
+
},
|
|
1823
|
+
bucketName,
|
|
1824
|
+
objectName,
|
|
1825
|
+
}
|
|
1826
|
+
|
|
1827
|
+
const response = await this.makeRequestAsyncOmit(options, chunk)
|
|
1828
|
+
|
|
1829
|
+
let etag = response.headers.etag
|
|
1830
|
+
if (etag) {
|
|
1831
|
+
etag = etag.replace(/^"/, '').replace(/"$/, '')
|
|
1832
|
+
} else {
|
|
1833
|
+
etag = ''
|
|
1834
|
+
}
|
|
1835
|
+
|
|
1836
|
+
eTags.push({ part: partNumber, etag })
|
|
1837
|
+
}
|
|
1838
|
+
|
|
1839
|
+
return await this.completeMultipartUpload(bucketName, objectName, uploadId, eTags)
|
|
1840
|
+
})(),
|
|
1841
|
+
])
|
|
1842
|
+
|
|
1843
|
+
return o
|
|
1844
|
+
}
|
|
1845
|
+
|
|
1846
|
+
async removeBucketReplication(bucketName: string): Promise<void>
|
|
1847
|
+
removeBucketReplication(bucketName: string, callback: NoResultCallback): void
|
|
1848
|
+
async removeBucketReplication(bucketName: string): Promise<void> {
|
|
1849
|
+
if (!isValidBucketName(bucketName)) {
|
|
1850
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1851
|
+
}
|
|
1852
|
+
const method = 'DELETE'
|
|
1853
|
+
const query = 'replication'
|
|
1854
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [200, 204], '')
|
|
1855
|
+
}
|
|
1856
|
+
|
|
1857
|
+
setBucketReplication(bucketName: string, replicationConfig: ReplicationConfigOpts): void
|
|
1858
|
+
async setBucketReplication(bucketName: string, replicationConfig: ReplicationConfigOpts): Promise<void>
|
|
1859
|
+
async setBucketReplication(bucketName: string, replicationConfig: ReplicationConfigOpts) {
|
|
1860
|
+
if (!isValidBucketName(bucketName)) {
|
|
1861
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1862
|
+
}
|
|
1863
|
+
if (!isObject(replicationConfig)) {
|
|
1864
|
+
throw new errors.InvalidArgumentError('replicationConfig should be of type "object"')
|
|
1865
|
+
} else {
|
|
1866
|
+
if (_.isEmpty(replicationConfig.role)) {
|
|
1867
|
+
throw new errors.InvalidArgumentError('Role cannot be empty')
|
|
1868
|
+
} else if (replicationConfig.role && !isString(replicationConfig.role)) {
|
|
1869
|
+
throw new errors.InvalidArgumentError('Invalid value for role', replicationConfig.role)
|
|
1870
|
+
}
|
|
1871
|
+
if (_.isEmpty(replicationConfig.rules)) {
|
|
1872
|
+
throw new errors.InvalidArgumentError('Minimum one replication rule must be specified')
|
|
1873
|
+
}
|
|
1874
|
+
}
|
|
1875
|
+
const method = 'PUT'
|
|
1876
|
+
const query = 'replication'
|
|
1877
|
+
const headers: Record<string, string> = {}
|
|
1878
|
+
|
|
1879
|
+
const replicationParamsConfig = {
|
|
1880
|
+
ReplicationConfiguration: {
|
|
1881
|
+
Role: replicationConfig.role,
|
|
1882
|
+
Rule: replicationConfig.rules,
|
|
1883
|
+
},
|
|
1884
|
+
}
|
|
1885
|
+
|
|
1886
|
+
const builder = new xml2js.Builder({ renderOpts: { pretty: false }, headless: true })
|
|
1887
|
+
const payload = builder.buildObject(replicationParamsConfig)
|
|
1888
|
+
headers['Content-MD5'] = toMd5(payload)
|
|
1889
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query, headers }, payload)
|
|
1890
|
+
}
|
|
1891
|
+
|
|
1892
|
+
getBucketReplication(bucketName: string): void
|
|
1893
|
+
async getBucketReplication(bucketName: string): Promise<ReplicationConfig>
|
|
1894
|
+
async getBucketReplication(bucketName: string) {
|
|
1895
|
+
if (!isValidBucketName(bucketName)) {
|
|
1896
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1897
|
+
}
|
|
1898
|
+
const method = 'GET'
|
|
1899
|
+
const query = 'replication'
|
|
1900
|
+
|
|
1901
|
+
const httpRes = await this.makeRequestAsync({ method, bucketName, query }, '', [200, 204])
|
|
1902
|
+
const xmlResult = await readAsString(httpRes)
|
|
1903
|
+
return xmlParsers.parseReplicationConfig(xmlResult)
|
|
1904
|
+
}
|
|
1905
|
+
|
|
1906
|
+
getObjectLegalHold(
|
|
1907
|
+
bucketName: string,
|
|
1908
|
+
objectName: string,
|
|
1909
|
+
getOpts?: GetObjectLegalHoldOptions,
|
|
1910
|
+
callback?: ResultCallback<LEGAL_HOLD_STATUS>,
|
|
1911
|
+
): Promise<LEGAL_HOLD_STATUS>
|
|
1912
|
+
async getObjectLegalHold(
|
|
1913
|
+
bucketName: string,
|
|
1914
|
+
objectName: string,
|
|
1915
|
+
getOpts?: GetObjectLegalHoldOptions,
|
|
1916
|
+
): Promise<LEGAL_HOLD_STATUS> {
|
|
1917
|
+
if (!isValidBucketName(bucketName)) {
|
|
1918
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1919
|
+
}
|
|
1920
|
+
if (!isValidObjectName(objectName)) {
|
|
1921
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1922
|
+
}
|
|
1923
|
+
|
|
1924
|
+
if (getOpts) {
|
|
1925
|
+
if (!isObject(getOpts)) {
|
|
1926
|
+
throw new TypeError('getOpts should be of type "Object"')
|
|
1927
|
+
} else if (Object.keys(getOpts).length > 0 && getOpts.versionId && !isString(getOpts.versionId)) {
|
|
1928
|
+
throw new TypeError('versionId should be of type string.:', getOpts.versionId)
|
|
1929
|
+
}
|
|
1930
|
+
}
|
|
1931
|
+
|
|
1932
|
+
const method = 'GET'
|
|
1933
|
+
let query = 'legal-hold'
|
|
1934
|
+
|
|
1935
|
+
if (getOpts?.versionId) {
|
|
1936
|
+
query += `&versionId=${getOpts.versionId}`
|
|
1937
|
+
}
|
|
1938
|
+
|
|
1939
|
+
const httpRes = await this.makeRequestAsync({ method, bucketName, objectName, query }, '', [200])
|
|
1940
|
+
const strRes = await readAsString(httpRes)
|
|
1941
|
+
return parseObjectLegalHoldConfig(strRes)
|
|
1942
|
+
}
|
|
1943
|
+
|
|
1944
|
+
setObjectLegalHold(bucketName: string, objectName: string, setOpts?: PutObjectLegalHoldOptions): void
|
|
1945
|
+
async setObjectLegalHold(
|
|
1946
|
+
bucketName: string,
|
|
1947
|
+
objectName: string,
|
|
1948
|
+
setOpts = {
|
|
1949
|
+
status: LEGAL_HOLD_STATUS.ENABLED,
|
|
1950
|
+
} as PutObjectLegalHoldOptions,
|
|
1951
|
+
): Promise<void> {
|
|
1952
|
+
if (!isValidBucketName(bucketName)) {
|
|
1953
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
1954
|
+
}
|
|
1955
|
+
if (!isValidObjectName(objectName)) {
|
|
1956
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
1957
|
+
}
|
|
1958
|
+
|
|
1959
|
+
if (!isObject(setOpts)) {
|
|
1960
|
+
throw new TypeError('setOpts should be of type "Object"')
|
|
1961
|
+
} else {
|
|
1962
|
+
if (![LEGAL_HOLD_STATUS.ENABLED, LEGAL_HOLD_STATUS.DISABLED].includes(setOpts?.status)) {
|
|
1963
|
+
throw new TypeError('Invalid status: ' + setOpts.status)
|
|
1964
|
+
}
|
|
1965
|
+
if (setOpts.versionId && !setOpts.versionId.length) {
|
|
1966
|
+
throw new TypeError('versionId should be of type string.:' + setOpts.versionId)
|
|
1967
|
+
}
|
|
1968
|
+
}
|
|
1969
|
+
|
|
1970
|
+
const method = 'PUT'
|
|
1971
|
+
let query = 'legal-hold'
|
|
1972
|
+
|
|
1973
|
+
if (setOpts.versionId) {
|
|
1974
|
+
query += `&versionId=${setOpts.versionId}`
|
|
1975
|
+
}
|
|
1976
|
+
|
|
1977
|
+
const config = {
|
|
1978
|
+
Status: setOpts.status,
|
|
1979
|
+
}
|
|
1980
|
+
|
|
1981
|
+
const builder = new xml2js.Builder({ rootName: 'LegalHold', renderOpts: { pretty: false }, headless: true })
|
|
1982
|
+
const payload = builder.buildObject(config)
|
|
1983
|
+
const headers: Record<string, string> = {}
|
|
1984
|
+
headers['Content-MD5'] = toMd5(payload)
|
|
1985
|
+
|
|
1986
|
+
await this.makeRequestAsyncOmit({ method, bucketName, objectName, query, headers }, payload)
|
|
1987
|
+
}
|
|
1988
|
+
|
|
1989
|
+
/**
|
|
1990
|
+
* Get Tags associated with a Bucket
|
|
1991
|
+
*/
|
|
1992
|
+
async getBucketTagging(bucketName: string): Promise<Tag[]> {
|
|
1993
|
+
if (!isValidBucketName(bucketName)) {
|
|
1994
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
1995
|
+
}
|
|
1996
|
+
|
|
1997
|
+
const method = 'GET'
|
|
1998
|
+
const query = 'tagging'
|
|
1999
|
+
const requestOptions = { method, bucketName, query }
|
|
2000
|
+
|
|
2001
|
+
const response = await this.makeRequestAsync(requestOptions)
|
|
2002
|
+
const body = await readAsString(response)
|
|
2003
|
+
return xmlParsers.parseTagging(body)
|
|
2004
|
+
}
|
|
2005
|
+
|
|
2006
|
+
/**
|
|
2007
|
+
* Get the tags associated with a bucket OR an object
|
|
2008
|
+
*/
|
|
2009
|
+
async getObjectTagging(bucketName: string, objectName: string, getOpts?: GetObjectOpts): Promise<Tag[]> {
|
|
2010
|
+
const method = 'GET'
|
|
2011
|
+
let query = 'tagging'
|
|
2012
|
+
|
|
2013
|
+
if (!isValidBucketName(bucketName)) {
|
|
2014
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2015
|
+
}
|
|
2016
|
+
if (!isValidObjectName(objectName)) {
|
|
2017
|
+
throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName)
|
|
2018
|
+
}
|
|
2019
|
+
if (getOpts && !isObject(getOpts)) {
|
|
2020
|
+
throw new errors.InvalidArgumentError('getOpts should be of type "object"')
|
|
2021
|
+
}
|
|
2022
|
+
|
|
2023
|
+
if (getOpts && getOpts.versionId) {
|
|
2024
|
+
query = `${query}&versionId=${getOpts.versionId}`
|
|
2025
|
+
}
|
|
2026
|
+
const requestOptions: RequestOption = { method, bucketName, query }
|
|
2027
|
+
if (objectName) {
|
|
2028
|
+
requestOptions['objectName'] = objectName
|
|
2029
|
+
}
|
|
2030
|
+
|
|
2031
|
+
const response = await this.makeRequestAsync(requestOptions)
|
|
2032
|
+
const body = await readAsString(response)
|
|
2033
|
+
return xmlParsers.parseTagging(body)
|
|
2034
|
+
}
|
|
2035
|
+
|
|
2036
|
+
/**
|
|
2037
|
+
* Set the policy on a bucket or an object prefix.
|
|
2038
|
+
*/
|
|
2039
|
+
async setBucketPolicy(bucketName: string, policy: string): Promise<void> {
|
|
2040
|
+
// Validate arguments.
|
|
2041
|
+
if (!isValidBucketName(bucketName)) {
|
|
2042
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
2043
|
+
}
|
|
2044
|
+
if (!isString(policy)) {
|
|
2045
|
+
throw new errors.InvalidBucketPolicyError(`Invalid bucket policy: ${policy} - must be "string"`)
|
|
2046
|
+
}
|
|
2047
|
+
|
|
2048
|
+
const query = 'policy'
|
|
2049
|
+
|
|
2050
|
+
let method = 'DELETE'
|
|
2051
|
+
if (policy) {
|
|
2052
|
+
method = 'PUT'
|
|
2053
|
+
}
|
|
2054
|
+
|
|
2055
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query }, policy, [204], '')
|
|
2056
|
+
}
|
|
2057
|
+
|
|
2058
|
+
/**
|
|
2059
|
+
* Get the policy on a bucket or an object prefix.
|
|
2060
|
+
*/
|
|
2061
|
+
async getBucketPolicy(bucketName: string): Promise<string> {
|
|
2062
|
+
// Validate arguments.
|
|
2063
|
+
if (!isValidBucketName(bucketName)) {
|
|
2064
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
2065
|
+
}
|
|
2066
|
+
|
|
2067
|
+
const method = 'GET'
|
|
2068
|
+
const query = 'policy'
|
|
2069
|
+
const res = await this.makeRequestAsync({ method, bucketName, query })
|
|
2070
|
+
return await readAsString(res)
|
|
2071
|
+
}
|
|
2072
|
+
|
|
2073
|
+
async putObjectRetention(bucketName: string, objectName: string, retentionOpts: Retention = {}): Promise<void> {
|
|
2074
|
+
if (!isValidBucketName(bucketName)) {
|
|
2075
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
2076
|
+
}
|
|
2077
|
+
if (!isValidObjectName(objectName)) {
|
|
2078
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
2079
|
+
}
|
|
2080
|
+
if (!isObject(retentionOpts)) {
|
|
2081
|
+
throw new errors.InvalidArgumentError('retentionOpts should be of type "object"')
|
|
2082
|
+
} else {
|
|
2083
|
+
if (retentionOpts.governanceBypass && !isBoolean(retentionOpts.governanceBypass)) {
|
|
2084
|
+
throw new errors.InvalidArgumentError(`Invalid value for governanceBypass: ${retentionOpts.governanceBypass}`)
|
|
2085
|
+
}
|
|
2086
|
+
if (
|
|
2087
|
+
retentionOpts.mode &&
|
|
2088
|
+
![RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE].includes(retentionOpts.mode)
|
|
2089
|
+
) {
|
|
2090
|
+
throw new errors.InvalidArgumentError(`Invalid object retention mode: ${retentionOpts.mode}`)
|
|
2091
|
+
}
|
|
2092
|
+
if (retentionOpts.retainUntilDate && !isString(retentionOpts.retainUntilDate)) {
|
|
2093
|
+
throw new errors.InvalidArgumentError(`Invalid value for retainUntilDate: ${retentionOpts.retainUntilDate}`)
|
|
2094
|
+
}
|
|
2095
|
+
if (retentionOpts.versionId && !isString(retentionOpts.versionId)) {
|
|
2096
|
+
throw new errors.InvalidArgumentError(`Invalid value for versionId: ${retentionOpts.versionId}`)
|
|
2097
|
+
}
|
|
2098
|
+
}
|
|
2099
|
+
|
|
2100
|
+
const method = 'PUT'
|
|
2101
|
+
let query = 'retention'
|
|
2102
|
+
|
|
2103
|
+
const headers: RequestHeaders = {}
|
|
2104
|
+
if (retentionOpts.governanceBypass) {
|
|
2105
|
+
headers['X-Amz-Bypass-Governance-Retention'] = true
|
|
2106
|
+
}
|
|
2107
|
+
|
|
2108
|
+
const builder = new xml2js.Builder({ rootName: 'Retention', renderOpts: { pretty: false }, headless: true })
|
|
2109
|
+
const params: Record<string, string> = {}
|
|
2110
|
+
|
|
2111
|
+
if (retentionOpts.mode) {
|
|
2112
|
+
params.Mode = retentionOpts.mode
|
|
2113
|
+
}
|
|
2114
|
+
if (retentionOpts.retainUntilDate) {
|
|
2115
|
+
params.RetainUntilDate = retentionOpts.retainUntilDate
|
|
2116
|
+
}
|
|
2117
|
+
if (retentionOpts.versionId) {
|
|
2118
|
+
query += `&versionId=${retentionOpts.versionId}`
|
|
2119
|
+
}
|
|
2120
|
+
|
|
2121
|
+
const payload = builder.buildObject(params)
|
|
2122
|
+
|
|
2123
|
+
headers['Content-MD5'] = toMd5(payload)
|
|
2124
|
+
await this.makeRequestAsyncOmit({ method, bucketName, objectName, query, headers }, payload, [200, 204])
|
|
2125
|
+
}
|
|
2126
|
+
|
|
2127
|
+
getObjectLockConfig(bucketName: string, callback: ResultCallback<ObjectLockInfo>): void
|
|
2128
|
+
getObjectLockConfig(bucketName: string): void
|
|
2129
|
+
async getObjectLockConfig(bucketName: string): Promise<ObjectLockInfo>
|
|
2130
|
+
async getObjectLockConfig(bucketName: string) {
|
|
2131
|
+
if (!isValidBucketName(bucketName)) {
|
|
2132
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2133
|
+
}
|
|
2134
|
+
const method = 'GET'
|
|
2135
|
+
const query = 'object-lock'
|
|
2136
|
+
|
|
2137
|
+
const httpRes = await this.makeRequestAsync({ method, bucketName, query })
|
|
2138
|
+
const xmlResult = await readAsString(httpRes)
|
|
2139
|
+
return xmlParsers.parseObjectLockConfig(xmlResult)
|
|
2140
|
+
}
|
|
2141
|
+
|
|
2142
|
+
setObjectLockConfig(bucketName: string, lockConfigOpts: Omit<ObjectLockInfo, 'objectLockEnabled'>): void
|
|
2143
|
+
async setObjectLockConfig(
|
|
2144
|
+
bucketName: string,
|
|
2145
|
+
lockConfigOpts: Omit<ObjectLockInfo, 'objectLockEnabled'>,
|
|
2146
|
+
): Promise<void>
|
|
2147
|
+
async setObjectLockConfig(bucketName: string, lockConfigOpts: Omit<ObjectLockInfo, 'objectLockEnabled'>) {
|
|
2148
|
+
const retentionModes = [RETENTION_MODES.COMPLIANCE, RETENTION_MODES.GOVERNANCE]
|
|
2149
|
+
const validUnits = [RETENTION_VALIDITY_UNITS.DAYS, RETENTION_VALIDITY_UNITS.YEARS]
|
|
2150
|
+
|
|
2151
|
+
if (!isValidBucketName(bucketName)) {
|
|
2152
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2153
|
+
}
|
|
2154
|
+
|
|
2155
|
+
if (lockConfigOpts.mode && !retentionModes.includes(lockConfigOpts.mode)) {
|
|
2156
|
+
throw new TypeError(`lockConfigOpts.mode should be one of ${retentionModes}`)
|
|
2157
|
+
}
|
|
2158
|
+
if (lockConfigOpts.unit && !validUnits.includes(lockConfigOpts.unit)) {
|
|
2159
|
+
throw new TypeError(`lockConfigOpts.unit should be one of ${validUnits}`)
|
|
2160
|
+
}
|
|
2161
|
+
if (lockConfigOpts.validity && !isNumber(lockConfigOpts.validity)) {
|
|
2162
|
+
throw new TypeError(`lockConfigOpts.validity should be a number`)
|
|
2163
|
+
}
|
|
2164
|
+
|
|
2165
|
+
const method = 'PUT'
|
|
2166
|
+
const query = 'object-lock'
|
|
2167
|
+
|
|
2168
|
+
const config: ObjectLockConfigParam = {
|
|
2169
|
+
ObjectLockEnabled: 'Enabled',
|
|
2170
|
+
}
|
|
2171
|
+
const configKeys = Object.keys(lockConfigOpts)
|
|
2172
|
+
|
|
2173
|
+
const isAllKeysSet = ['unit', 'mode', 'validity'].every((lck) => configKeys.includes(lck))
|
|
2174
|
+
// Check if keys are present and all keys are present.
|
|
2175
|
+
if (configKeys.length > 0) {
|
|
2176
|
+
if (!isAllKeysSet) {
|
|
2177
|
+
throw new TypeError(
|
|
2178
|
+
`lockConfigOpts.mode,lockConfigOpts.unit,lockConfigOpts.validity all the properties should be specified.`,
|
|
2179
|
+
)
|
|
2180
|
+
} else {
|
|
2181
|
+
config.Rule = {
|
|
2182
|
+
DefaultRetention: {},
|
|
2183
|
+
}
|
|
2184
|
+
if (lockConfigOpts.mode) {
|
|
2185
|
+
config.Rule.DefaultRetention.Mode = lockConfigOpts.mode
|
|
2186
|
+
}
|
|
2187
|
+
if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.DAYS) {
|
|
2188
|
+
config.Rule.DefaultRetention.Days = lockConfigOpts.validity
|
|
2189
|
+
} else if (lockConfigOpts.unit === RETENTION_VALIDITY_UNITS.YEARS) {
|
|
2190
|
+
config.Rule.DefaultRetention.Years = lockConfigOpts.validity
|
|
2191
|
+
}
|
|
2192
|
+
}
|
|
2193
|
+
}
|
|
2194
|
+
|
|
2195
|
+
const builder = new xml2js.Builder({
|
|
2196
|
+
rootName: 'ObjectLockConfiguration',
|
|
2197
|
+
renderOpts: { pretty: false },
|
|
2198
|
+
headless: true,
|
|
2199
|
+
})
|
|
2200
|
+
const payload = builder.buildObject(config)
|
|
2201
|
+
|
|
2202
|
+
const headers: RequestHeaders = {}
|
|
2203
|
+
headers['Content-MD5'] = toMd5(payload)
|
|
2204
|
+
|
|
2205
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query, headers }, payload)
|
|
2206
|
+
}
|
|
2207
|
+
|
|
2208
|
+
async getBucketVersioning(bucketName: string): Promise<BucketVersioningConfiguration> {
|
|
2209
|
+
if (!isValidBucketName(bucketName)) {
|
|
2210
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2211
|
+
}
|
|
2212
|
+
const method = 'GET'
|
|
2213
|
+
const query = 'versioning'
|
|
2214
|
+
|
|
2215
|
+
const httpRes = await this.makeRequestAsync({ method, bucketName, query })
|
|
2216
|
+
const xmlResult = await readAsString(httpRes)
|
|
2217
|
+
return await xmlParsers.parseBucketVersioningConfig(xmlResult)
|
|
2218
|
+
}
|
|
2219
|
+
|
|
2220
|
+
async setBucketVersioning(bucketName: string, versionConfig: BucketVersioningConfiguration): Promise<void> {
|
|
2221
|
+
if (!isValidBucketName(bucketName)) {
|
|
2222
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2223
|
+
}
|
|
2224
|
+
if (!Object.keys(versionConfig).length) {
|
|
2225
|
+
throw new errors.InvalidArgumentError('versionConfig should be of type "object"')
|
|
2226
|
+
}
|
|
2227
|
+
|
|
2228
|
+
const method = 'PUT'
|
|
2229
|
+
const query = 'versioning'
|
|
2230
|
+
const builder = new xml2js.Builder({
|
|
2231
|
+
rootName: 'VersioningConfiguration',
|
|
2232
|
+
renderOpts: { pretty: false },
|
|
2233
|
+
headless: true,
|
|
2234
|
+
})
|
|
2235
|
+
const payload = builder.buildObject(versionConfig)
|
|
2236
|
+
|
|
2237
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query }, payload)
|
|
2238
|
+
}
|
|
2239
|
+
|
|
2240
|
+
private async setTagging(taggingParams: PutTaggingParams): Promise<void> {
|
|
2241
|
+
const { bucketName, objectName, tags, putOpts } = taggingParams
|
|
2242
|
+
const method = 'PUT'
|
|
2243
|
+
let query = 'tagging'
|
|
2244
|
+
|
|
2245
|
+
if (putOpts && putOpts?.versionId) {
|
|
2246
|
+
query = `${query}&versionId=${putOpts.versionId}`
|
|
2247
|
+
}
|
|
2248
|
+
const tagsList = []
|
|
2249
|
+
for (const [key, value] of Object.entries(tags)) {
|
|
2250
|
+
tagsList.push({ Key: key, Value: value })
|
|
2251
|
+
}
|
|
2252
|
+
const taggingConfig = {
|
|
2253
|
+
Tagging: {
|
|
2254
|
+
TagSet: {
|
|
2255
|
+
Tag: tagsList,
|
|
2256
|
+
},
|
|
2257
|
+
},
|
|
2258
|
+
}
|
|
2259
|
+
const headers = {} as RequestHeaders
|
|
2260
|
+
const builder = new xml2js.Builder({ headless: true, renderOpts: { pretty: false } })
|
|
2261
|
+
const payloadBuf = Buffer.from(builder.buildObject(taggingConfig))
|
|
2262
|
+
const requestOptions = {
|
|
2263
|
+
method,
|
|
2264
|
+
bucketName,
|
|
2265
|
+
query,
|
|
2266
|
+
headers,
|
|
2267
|
+
|
|
2268
|
+
...(objectName && { objectName: objectName }),
|
|
2269
|
+
}
|
|
2270
|
+
|
|
2271
|
+
headers['Content-MD5'] = toMd5(payloadBuf)
|
|
2272
|
+
|
|
2273
|
+
await this.makeRequestAsyncOmit(requestOptions, payloadBuf)
|
|
2274
|
+
}
|
|
2275
|
+
|
|
2276
|
+
private async removeTagging({ bucketName, objectName, removeOpts }: RemoveTaggingParams): Promise<void> {
|
|
2277
|
+
const method = 'DELETE'
|
|
2278
|
+
let query = 'tagging'
|
|
2279
|
+
|
|
2280
|
+
if (removeOpts && Object.keys(removeOpts).length && removeOpts.versionId) {
|
|
2281
|
+
query = `${query}&versionId=${removeOpts.versionId}`
|
|
2282
|
+
}
|
|
2283
|
+
const requestOptions = { method, bucketName, objectName, query }
|
|
2284
|
+
|
|
2285
|
+
if (objectName) {
|
|
2286
|
+
requestOptions['objectName'] = objectName
|
|
2287
|
+
}
|
|
2288
|
+
await this.makeRequestAsync(requestOptions, '', [200, 204])
|
|
2289
|
+
}
|
|
2290
|
+
|
|
2291
|
+
async setBucketTagging(bucketName: string, tags: Tags): Promise<void> {
|
|
2292
|
+
if (!isValidBucketName(bucketName)) {
|
|
2293
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2294
|
+
}
|
|
2295
|
+
if (!isPlainObject(tags)) {
|
|
2296
|
+
throw new errors.InvalidArgumentError('tags should be of type "object"')
|
|
2297
|
+
}
|
|
2298
|
+
if (Object.keys(tags).length > 10) {
|
|
2299
|
+
throw new errors.InvalidArgumentError('maximum tags allowed is 10"')
|
|
2300
|
+
}
|
|
2301
|
+
|
|
2302
|
+
await this.setTagging({ bucketName, tags })
|
|
2303
|
+
}
|
|
2304
|
+
|
|
2305
|
+
async removeBucketTagging(bucketName: string) {
|
|
2306
|
+
if (!isValidBucketName(bucketName)) {
|
|
2307
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2308
|
+
}
|
|
2309
|
+
await this.removeTagging({ bucketName })
|
|
2310
|
+
}
|
|
2311
|
+
|
|
2312
|
+
async setObjectTagging(bucketName: string, objectName: string, tags: Tags, putOpts?: TaggingOpts) {
|
|
2313
|
+
if (!isValidBucketName(bucketName)) {
|
|
2314
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2315
|
+
}
|
|
2316
|
+
if (!isValidObjectName(objectName)) {
|
|
2317
|
+
throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName)
|
|
2318
|
+
}
|
|
2319
|
+
|
|
2320
|
+
if (!isPlainObject(tags)) {
|
|
2321
|
+
throw new errors.InvalidArgumentError('tags should be of type "object"')
|
|
2322
|
+
}
|
|
2323
|
+
if (Object.keys(tags).length > 10) {
|
|
2324
|
+
throw new errors.InvalidArgumentError('Maximum tags allowed is 10"')
|
|
2325
|
+
}
|
|
2326
|
+
|
|
2327
|
+
await this.setTagging({ bucketName, objectName, tags, putOpts })
|
|
2328
|
+
}
|
|
2329
|
+
|
|
2330
|
+
async removeObjectTagging(bucketName: string, objectName: string, removeOpts: TaggingOpts) {
|
|
2331
|
+
if (!isValidBucketName(bucketName)) {
|
|
2332
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2333
|
+
}
|
|
2334
|
+
if (!isValidObjectName(objectName)) {
|
|
2335
|
+
throw new errors.InvalidBucketNameError('Invalid object name: ' + objectName)
|
|
2336
|
+
}
|
|
2337
|
+
if (removeOpts && Object.keys(removeOpts).length && !isObject(removeOpts)) {
|
|
2338
|
+
throw new errors.InvalidArgumentError('removeOpts should be of type "object"')
|
|
2339
|
+
}
|
|
2340
|
+
|
|
2341
|
+
await this.removeTagging({ bucketName, objectName, removeOpts })
|
|
2342
|
+
}
|
|
2343
|
+
|
|
2344
|
+
async selectObjectContent(
|
|
2345
|
+
bucketName: string,
|
|
2346
|
+
objectName: string,
|
|
2347
|
+
selectOpts: SelectOptions,
|
|
2348
|
+
): Promise<SelectResults | undefined> {
|
|
2349
|
+
if (!isValidBucketName(bucketName)) {
|
|
2350
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
2351
|
+
}
|
|
2352
|
+
if (!isValidObjectName(objectName)) {
|
|
2353
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
2354
|
+
}
|
|
2355
|
+
if (!_.isEmpty(selectOpts)) {
|
|
2356
|
+
if (!isString(selectOpts.expression)) {
|
|
2357
|
+
throw new TypeError('sqlExpression should be of type "string"')
|
|
2358
|
+
}
|
|
2359
|
+
if (!_.isEmpty(selectOpts.inputSerialization)) {
|
|
2360
|
+
if (!isObject(selectOpts.inputSerialization)) {
|
|
2361
|
+
throw new TypeError('inputSerialization should be of type "object"')
|
|
2362
|
+
}
|
|
2363
|
+
} else {
|
|
2364
|
+
throw new TypeError('inputSerialization is required')
|
|
2365
|
+
}
|
|
2366
|
+
if (!_.isEmpty(selectOpts.outputSerialization)) {
|
|
2367
|
+
if (!isObject(selectOpts.outputSerialization)) {
|
|
2368
|
+
throw new TypeError('outputSerialization should be of type "object"')
|
|
2369
|
+
}
|
|
2370
|
+
} else {
|
|
2371
|
+
throw new TypeError('outputSerialization is required')
|
|
2372
|
+
}
|
|
2373
|
+
} else {
|
|
2374
|
+
throw new TypeError('valid select configuration is required')
|
|
2375
|
+
}
|
|
2376
|
+
|
|
2377
|
+
const method = 'POST'
|
|
2378
|
+
const query = `select&select-type=2`
|
|
2379
|
+
|
|
2380
|
+
const config: Record<string, unknown>[] = [
|
|
2381
|
+
{
|
|
2382
|
+
Expression: selectOpts.expression,
|
|
2383
|
+
},
|
|
2384
|
+
{
|
|
2385
|
+
ExpressionType: selectOpts.expressionType || 'SQL',
|
|
2386
|
+
},
|
|
2387
|
+
{
|
|
2388
|
+
InputSerialization: [selectOpts.inputSerialization],
|
|
2389
|
+
},
|
|
2390
|
+
{
|
|
2391
|
+
OutputSerialization: [selectOpts.outputSerialization],
|
|
2392
|
+
},
|
|
2393
|
+
]
|
|
2394
|
+
|
|
2395
|
+
// Optional
|
|
2396
|
+
if (selectOpts.requestProgress) {
|
|
2397
|
+
config.push({ RequestProgress: selectOpts?.requestProgress })
|
|
2398
|
+
}
|
|
2399
|
+
// Optional
|
|
2400
|
+
if (selectOpts.scanRange) {
|
|
2401
|
+
config.push({ ScanRange: selectOpts.scanRange })
|
|
2402
|
+
}
|
|
2403
|
+
|
|
2404
|
+
const builder = new xml2js.Builder({
|
|
2405
|
+
rootName: 'SelectObjectContentRequest',
|
|
2406
|
+
renderOpts: { pretty: false },
|
|
2407
|
+
headless: true,
|
|
2408
|
+
})
|
|
2409
|
+
const payload = builder.buildObject(config)
|
|
2410
|
+
|
|
2411
|
+
const res = await this.makeRequestAsync({ method, bucketName, objectName, query }, payload)
|
|
2412
|
+
const body = await readAsBuffer(res)
|
|
2413
|
+
return parseSelectObjectContentResponse(body)
|
|
2414
|
+
}
|
|
2415
|
+
|
|
2416
|
+
private async applyBucketLifecycle(bucketName: string, policyConfig: LifeCycleConfigParam): Promise<void> {
|
|
2417
|
+
const method = 'PUT'
|
|
2418
|
+
const query = 'lifecycle'
|
|
2419
|
+
|
|
2420
|
+
const headers: RequestHeaders = {}
|
|
2421
|
+
const builder = new xml2js.Builder({
|
|
2422
|
+
rootName: 'LifecycleConfiguration',
|
|
2423
|
+
headless: true,
|
|
2424
|
+
renderOpts: { pretty: false },
|
|
2425
|
+
})
|
|
2426
|
+
const payload = builder.buildObject(policyConfig)
|
|
2427
|
+
headers['Content-MD5'] = toMd5(payload)
|
|
2428
|
+
|
|
2429
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query, headers }, payload)
|
|
2430
|
+
}
|
|
2431
|
+
|
|
2432
|
+
async removeBucketLifecycle(bucketName: string): Promise<void> {
|
|
2433
|
+
if (!isValidBucketName(bucketName)) {
|
|
2434
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2435
|
+
}
|
|
2436
|
+
const method = 'DELETE'
|
|
2437
|
+
const query = 'lifecycle'
|
|
2438
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204])
|
|
2439
|
+
}
|
|
2440
|
+
|
|
2441
|
+
async setBucketLifecycle(bucketName: string, lifeCycleConfig: LifeCycleConfigParam): Promise<void> {
|
|
2442
|
+
if (!isValidBucketName(bucketName)) {
|
|
2443
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2444
|
+
}
|
|
2445
|
+
if (_.isEmpty(lifeCycleConfig)) {
|
|
2446
|
+
await this.removeBucketLifecycle(bucketName)
|
|
2447
|
+
} else {
|
|
2448
|
+
await this.applyBucketLifecycle(bucketName, lifeCycleConfig)
|
|
2449
|
+
}
|
|
2450
|
+
}
|
|
2451
|
+
|
|
2452
|
+
async getBucketLifecycle(bucketName: string): Promise<LifecycleConfig | null> {
|
|
2453
|
+
if (!isValidBucketName(bucketName)) {
|
|
2454
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2455
|
+
}
|
|
2456
|
+
const method = 'GET'
|
|
2457
|
+
const query = 'lifecycle'
|
|
2458
|
+
|
|
2459
|
+
const res = await this.makeRequestAsync({ method, bucketName, query })
|
|
2460
|
+
const body = await readAsString(res)
|
|
2461
|
+
return xmlParsers.parseLifecycleConfig(body)
|
|
2462
|
+
}
|
|
2463
|
+
|
|
2464
|
+
async setBucketEncryption(bucketName: string, encryptionConfig?: EncryptionConfig): Promise<void> {
|
|
2465
|
+
if (!isValidBucketName(bucketName)) {
|
|
2466
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2467
|
+
}
|
|
2468
|
+
if (!_.isEmpty(encryptionConfig) && encryptionConfig.Rule.length > 1) {
|
|
2469
|
+
throw new errors.InvalidArgumentError('Invalid Rule length. Only one rule is allowed.: ' + encryptionConfig.Rule)
|
|
2470
|
+
}
|
|
2471
|
+
|
|
2472
|
+
let encryptionObj = encryptionConfig
|
|
2473
|
+
if (_.isEmpty(encryptionConfig)) {
|
|
2474
|
+
encryptionObj = {
|
|
2475
|
+
// Default Hanzo S3 Server Supported Rule
|
|
2476
|
+
Rule: [
|
|
2477
|
+
{
|
|
2478
|
+
ApplyServerSideEncryptionByDefault: {
|
|
2479
|
+
SSEAlgorithm: 'AES256',
|
|
2480
|
+
},
|
|
2481
|
+
},
|
|
2482
|
+
],
|
|
2483
|
+
}
|
|
2484
|
+
}
|
|
2485
|
+
|
|
2486
|
+
const method = 'PUT'
|
|
2487
|
+
const query = 'encryption'
|
|
2488
|
+
const builder = new xml2js.Builder({
|
|
2489
|
+
rootName: 'ServerSideEncryptionConfiguration',
|
|
2490
|
+
renderOpts: { pretty: false },
|
|
2491
|
+
headless: true,
|
|
2492
|
+
})
|
|
2493
|
+
const payload = builder.buildObject(encryptionObj)
|
|
2494
|
+
|
|
2495
|
+
const headers: RequestHeaders = {}
|
|
2496
|
+
headers['Content-MD5'] = toMd5(payload)
|
|
2497
|
+
|
|
2498
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query, headers }, payload)
|
|
2499
|
+
}
|
|
2500
|
+
|
|
2501
|
+
async getBucketEncryption(bucketName: string) {
|
|
2502
|
+
if (!isValidBucketName(bucketName)) {
|
|
2503
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2504
|
+
}
|
|
2505
|
+
const method = 'GET'
|
|
2506
|
+
const query = 'encryption'
|
|
2507
|
+
|
|
2508
|
+
const res = await this.makeRequestAsync({ method, bucketName, query })
|
|
2509
|
+
const body = await readAsString(res)
|
|
2510
|
+
return xmlParsers.parseBucketEncryptionConfig(body)
|
|
2511
|
+
}
|
|
2512
|
+
|
|
2513
|
+
async removeBucketEncryption(bucketName: string) {
|
|
2514
|
+
if (!isValidBucketName(bucketName)) {
|
|
2515
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2516
|
+
}
|
|
2517
|
+
const method = 'DELETE'
|
|
2518
|
+
const query = 'encryption'
|
|
2519
|
+
|
|
2520
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query }, '', [204])
|
|
2521
|
+
}
|
|
2522
|
+
|
|
2523
|
+
async getObjectRetention(
|
|
2524
|
+
bucketName: string,
|
|
2525
|
+
objectName: string,
|
|
2526
|
+
getOpts?: GetObjectRetentionOpts,
|
|
2527
|
+
): Promise<ObjectRetentionInfo | null | undefined> {
|
|
2528
|
+
if (!isValidBucketName(bucketName)) {
|
|
2529
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2530
|
+
}
|
|
2531
|
+
if (!isValidObjectName(objectName)) {
|
|
2532
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
2533
|
+
}
|
|
2534
|
+
if (getOpts && !isObject(getOpts)) {
|
|
2535
|
+
throw new errors.InvalidArgumentError('getOpts should be of type "object"')
|
|
2536
|
+
} else if (getOpts?.versionId && !isString(getOpts.versionId)) {
|
|
2537
|
+
throw new errors.InvalidArgumentError('versionId should be of type "string"')
|
|
2538
|
+
}
|
|
2539
|
+
|
|
2540
|
+
const method = 'GET'
|
|
2541
|
+
let query = 'retention'
|
|
2542
|
+
if (getOpts?.versionId) {
|
|
2543
|
+
query += `&versionId=${getOpts.versionId}`
|
|
2544
|
+
}
|
|
2545
|
+
const res = await this.makeRequestAsync({ method, bucketName, objectName, query })
|
|
2546
|
+
const body = await readAsString(res)
|
|
2547
|
+
return xmlParsers.parseObjectRetentionConfig(body)
|
|
2548
|
+
}
|
|
2549
|
+
|
|
2550
|
+
async removeObjects(bucketName: string, objectsList: RemoveObjectsParam): Promise<RemoveObjectsResponse[]> {
|
|
2551
|
+
if (!isValidBucketName(bucketName)) {
|
|
2552
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2553
|
+
}
|
|
2554
|
+
if (!Array.isArray(objectsList)) {
|
|
2555
|
+
throw new errors.InvalidArgumentError('objectsList should be a list')
|
|
2556
|
+
}
|
|
2557
|
+
|
|
2558
|
+
const runDeleteObjects = async (batch: RemoveObjectsParam): Promise<RemoveObjectsResponse[]> => {
|
|
2559
|
+
const delObjects: RemoveObjectsRequestEntry[] = batch.map((value) => {
|
|
2560
|
+
return isObject(value) ? { Key: value.name, VersionId: value.versionId } : { Key: value }
|
|
2561
|
+
})
|
|
2562
|
+
|
|
2563
|
+
const remObjects = { Delete: { Quiet: true, Object: delObjects } }
|
|
2564
|
+
const payload = Buffer.from(new xml2js.Builder({ headless: true }).buildObject(remObjects))
|
|
2565
|
+
const headers: RequestHeaders = { 'Content-MD5': toMd5(payload) }
|
|
2566
|
+
|
|
2567
|
+
const res = await this.makeRequestAsync({ method: 'POST', bucketName, query: 'delete', headers }, payload)
|
|
2568
|
+
const body = await readAsString(res)
|
|
2569
|
+
return xmlParsers.removeObjectsParser(body)
|
|
2570
|
+
}
|
|
2571
|
+
|
|
2572
|
+
const maxEntries = 1000 // max entries accepted in server for DeleteMultipleObjects API.
|
|
2573
|
+
// Client side batching
|
|
2574
|
+
const batches = []
|
|
2575
|
+
for (let i = 0; i < objectsList.length; i += maxEntries) {
|
|
2576
|
+
batches.push(objectsList.slice(i, i + maxEntries))
|
|
2577
|
+
}
|
|
2578
|
+
|
|
2579
|
+
const batchResults = await Promise.all(batches.map(runDeleteObjects))
|
|
2580
|
+
return batchResults.flat()
|
|
2581
|
+
}
|
|
2582
|
+
|
|
2583
|
+
async removeIncompleteUpload(bucketName: string, objectName: string): Promise<void> {
|
|
2584
|
+
if (!isValidBucketName(bucketName)) {
|
|
2585
|
+
throw new errors.IsValidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2586
|
+
}
|
|
2587
|
+
if (!isValidObjectName(objectName)) {
|
|
2588
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
2589
|
+
}
|
|
2590
|
+
const removeUploadId = await this.findUploadId(bucketName, objectName)
|
|
2591
|
+
const method = 'DELETE'
|
|
2592
|
+
const query = `uploadId=${removeUploadId}`
|
|
2593
|
+
await this.makeRequestAsyncOmit({ method, bucketName, objectName, query }, '', [204])
|
|
2594
|
+
}
|
|
2595
|
+
|
|
2596
|
+
private async copyObjectV1(
|
|
2597
|
+
targetBucketName: string,
|
|
2598
|
+
targetObjectName: string,
|
|
2599
|
+
sourceBucketNameAndObjectName: string,
|
|
2600
|
+
conditions?: null | CopyConditions,
|
|
2601
|
+
) {
|
|
2602
|
+
if (typeof conditions == 'function') {
|
|
2603
|
+
conditions = null
|
|
2604
|
+
}
|
|
2605
|
+
|
|
2606
|
+
if (!isValidBucketName(targetBucketName)) {
|
|
2607
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + targetBucketName)
|
|
2608
|
+
}
|
|
2609
|
+
if (!isValidObjectName(targetObjectName)) {
|
|
2610
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${targetObjectName}`)
|
|
2611
|
+
}
|
|
2612
|
+
if (!isString(sourceBucketNameAndObjectName)) {
|
|
2613
|
+
throw new TypeError('sourceBucketNameAndObjectName should be of type "string"')
|
|
2614
|
+
}
|
|
2615
|
+
if (sourceBucketNameAndObjectName === '') {
|
|
2616
|
+
throw new errors.InvalidPrefixError(`Empty source prefix`)
|
|
2617
|
+
}
|
|
2618
|
+
|
|
2619
|
+
if (conditions != null && !(conditions instanceof CopyConditions)) {
|
|
2620
|
+
throw new TypeError('conditions should be of type "CopyConditions"')
|
|
2621
|
+
}
|
|
2622
|
+
|
|
2623
|
+
const headers: RequestHeaders = {}
|
|
2624
|
+
headers['x-amz-copy-source'] = uriResourceEscape(sourceBucketNameAndObjectName)
|
|
2625
|
+
|
|
2626
|
+
if (conditions) {
|
|
2627
|
+
if (conditions.modified !== '') {
|
|
2628
|
+
headers['x-amz-copy-source-if-modified-since'] = conditions.modified
|
|
2629
|
+
}
|
|
2630
|
+
if (conditions.unmodified !== '') {
|
|
2631
|
+
headers['x-amz-copy-source-if-unmodified-since'] = conditions.unmodified
|
|
2632
|
+
}
|
|
2633
|
+
if (conditions.matchETag !== '') {
|
|
2634
|
+
headers['x-amz-copy-source-if-match'] = conditions.matchETag
|
|
2635
|
+
}
|
|
2636
|
+
if (conditions.matchETagExcept !== '') {
|
|
2637
|
+
headers['x-amz-copy-source-if-none-match'] = conditions.matchETagExcept
|
|
2638
|
+
}
|
|
2639
|
+
}
|
|
2640
|
+
|
|
2641
|
+
const method = 'PUT'
|
|
2642
|
+
|
|
2643
|
+
const res = await this.makeRequestAsync({
|
|
2644
|
+
method,
|
|
2645
|
+
bucketName: targetBucketName,
|
|
2646
|
+
objectName: targetObjectName,
|
|
2647
|
+
headers,
|
|
2648
|
+
})
|
|
2649
|
+
const body = await readAsString(res)
|
|
2650
|
+
return xmlParsers.parseCopyObject(body)
|
|
2651
|
+
}
|
|
2652
|
+
|
|
2653
|
+
private async copyObjectV2(
|
|
2654
|
+
sourceConfig: CopySourceOptions,
|
|
2655
|
+
destConfig: CopyDestinationOptions,
|
|
2656
|
+
): Promise<CopyObjectResultV2> {
|
|
2657
|
+
if (!(sourceConfig instanceof CopySourceOptions)) {
|
|
2658
|
+
throw new errors.InvalidArgumentError('sourceConfig should of type CopySourceOptions ')
|
|
2659
|
+
}
|
|
2660
|
+
if (!(destConfig instanceof CopyDestinationOptions)) {
|
|
2661
|
+
throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ')
|
|
2662
|
+
}
|
|
2663
|
+
if (!destConfig.validate()) {
|
|
2664
|
+
return Promise.reject()
|
|
2665
|
+
}
|
|
2666
|
+
if (!destConfig.validate()) {
|
|
2667
|
+
return Promise.reject()
|
|
2668
|
+
}
|
|
2669
|
+
|
|
2670
|
+
const headers = Object.assign({}, sourceConfig.getHeaders(), destConfig.getHeaders())
|
|
2671
|
+
|
|
2672
|
+
const bucketName = destConfig.Bucket
|
|
2673
|
+
const objectName = destConfig.Object
|
|
2674
|
+
|
|
2675
|
+
const method = 'PUT'
|
|
2676
|
+
|
|
2677
|
+
const res = await this.makeRequestAsync({ method, bucketName, objectName, headers })
|
|
2678
|
+
const body = await readAsString(res)
|
|
2679
|
+
const copyRes = xmlParsers.parseCopyObject(body)
|
|
2680
|
+
const resHeaders: IncomingHttpHeaders = res.headers
|
|
2681
|
+
|
|
2682
|
+
const sizeHeaderValue = resHeaders && resHeaders['content-length']
|
|
2683
|
+
const size = typeof sizeHeaderValue === 'number' ? sizeHeaderValue : undefined
|
|
2684
|
+
|
|
2685
|
+
return {
|
|
2686
|
+
Bucket: destConfig.Bucket,
|
|
2687
|
+
Key: destConfig.Object,
|
|
2688
|
+
LastModified: copyRes.lastModified,
|
|
2689
|
+
MetaData: extractMetadata(resHeaders as ResponseHeader),
|
|
2690
|
+
VersionId: getVersionId(resHeaders as ResponseHeader),
|
|
2691
|
+
SourceVersionId: getSourceVersionId(resHeaders as ResponseHeader),
|
|
2692
|
+
Etag: sanitizeETag(resHeaders.etag),
|
|
2693
|
+
Size: size,
|
|
2694
|
+
}
|
|
2695
|
+
}
|
|
2696
|
+
|
|
2697
|
+
async copyObject(source: CopySourceOptions, dest: CopyDestinationOptions): Promise<CopyObjectResult>
|
|
2698
|
+
async copyObject(
|
|
2699
|
+
targetBucketName: string,
|
|
2700
|
+
targetObjectName: string,
|
|
2701
|
+
sourceBucketNameAndObjectName: string,
|
|
2702
|
+
conditions?: CopyConditions,
|
|
2703
|
+
): Promise<CopyObjectResult>
|
|
2704
|
+
async copyObject(...allArgs: CopyObjectParams): Promise<CopyObjectResult> {
|
|
2705
|
+
if (typeof allArgs[0] === 'string') {
|
|
2706
|
+
const [targetBucketName, targetObjectName, sourceBucketNameAndObjectName, conditions] = allArgs as [
|
|
2707
|
+
string,
|
|
2708
|
+
string,
|
|
2709
|
+
string,
|
|
2710
|
+
CopyConditions?,
|
|
2711
|
+
]
|
|
2712
|
+
return await this.copyObjectV1(targetBucketName, targetObjectName, sourceBucketNameAndObjectName, conditions)
|
|
2713
|
+
}
|
|
2714
|
+
const [source, dest] = allArgs as [CopySourceOptions, CopyDestinationOptions]
|
|
2715
|
+
return await this.copyObjectV2(source, dest)
|
|
2716
|
+
}
|
|
2717
|
+
|
|
2718
|
+
async uploadPart(
|
|
2719
|
+
partConfig: {
|
|
2720
|
+
bucketName: string
|
|
2721
|
+
objectName: string
|
|
2722
|
+
uploadID: string
|
|
2723
|
+
partNumber: number
|
|
2724
|
+
headers: RequestHeaders
|
|
2725
|
+
},
|
|
2726
|
+
payload?: Binary,
|
|
2727
|
+
) {
|
|
2728
|
+
const { bucketName, objectName, uploadID, partNumber, headers } = partConfig
|
|
2729
|
+
|
|
2730
|
+
const method = 'PUT'
|
|
2731
|
+
const query = `uploadId=${uploadID}&partNumber=${partNumber}`
|
|
2732
|
+
const requestOptions = { method, bucketName, objectName: objectName, query, headers }
|
|
2733
|
+
const res = await this.makeRequestAsync(requestOptions, payload)
|
|
2734
|
+
const body = await readAsString(res)
|
|
2735
|
+
const partRes = uploadPartParser(body)
|
|
2736
|
+
const partEtagVal = sanitizeETag(res.headers.etag) || sanitizeETag(partRes.ETag)
|
|
2737
|
+
return {
|
|
2738
|
+
etag: partEtagVal,
|
|
2739
|
+
key: objectName,
|
|
2740
|
+
part: partNumber,
|
|
2741
|
+
}
|
|
2742
|
+
}
|
|
2743
|
+
|
|
2744
|
+
async composeObject(
|
|
2745
|
+
destObjConfig: CopyDestinationOptions,
|
|
2746
|
+
sourceObjList: CopySourceOptions[],
|
|
2747
|
+
{ maxConcurrency = 10 } = {},
|
|
2748
|
+
): Promise<boolean | { etag: string; versionId: string | null } | Promise<void> | CopyObjectResult> {
|
|
2749
|
+
const sourceFilesLength = sourceObjList.length
|
|
2750
|
+
|
|
2751
|
+
if (!Array.isArray(sourceObjList)) {
|
|
2752
|
+
throw new errors.InvalidArgumentError('sourceConfig should an array of CopySourceOptions ')
|
|
2753
|
+
}
|
|
2754
|
+
if (!(destObjConfig instanceof CopyDestinationOptions)) {
|
|
2755
|
+
throw new errors.InvalidArgumentError('destConfig should of type CopyDestinationOptions ')
|
|
2756
|
+
}
|
|
2757
|
+
|
|
2758
|
+
if (sourceFilesLength < 1 || sourceFilesLength > PART_CONSTRAINTS.MAX_PARTS_COUNT) {
|
|
2759
|
+
throw new errors.InvalidArgumentError(
|
|
2760
|
+
`"There must be as least one and up to ${PART_CONSTRAINTS.MAX_PARTS_COUNT} source objects.`,
|
|
2761
|
+
)
|
|
2762
|
+
}
|
|
2763
|
+
|
|
2764
|
+
for (let i = 0; i < sourceFilesLength; i++) {
|
|
2765
|
+
const sObj = sourceObjList[i] as CopySourceOptions
|
|
2766
|
+
if (!sObj.validate()) {
|
|
2767
|
+
return false
|
|
2768
|
+
}
|
|
2769
|
+
}
|
|
2770
|
+
|
|
2771
|
+
if (!(destObjConfig as CopyDestinationOptions).validate()) {
|
|
2772
|
+
return false
|
|
2773
|
+
}
|
|
2774
|
+
|
|
2775
|
+
const getStatOptions = (srcConfig: CopySourceOptions) => {
|
|
2776
|
+
let statOpts = {}
|
|
2777
|
+
if (!_.isEmpty(srcConfig.VersionID)) {
|
|
2778
|
+
statOpts = {
|
|
2779
|
+
versionId: srcConfig.VersionID,
|
|
2780
|
+
}
|
|
2781
|
+
}
|
|
2782
|
+
return statOpts
|
|
2783
|
+
}
|
|
2784
|
+
const srcObjectSizes: number[] = []
|
|
2785
|
+
let totalSize = 0
|
|
2786
|
+
let totalParts = 0
|
|
2787
|
+
|
|
2788
|
+
const sourceObjStats = sourceObjList.map((srcItem) =>
|
|
2789
|
+
this.statObject(srcItem.Bucket, srcItem.Object, getStatOptions(srcItem)),
|
|
2790
|
+
)
|
|
2791
|
+
|
|
2792
|
+
const srcObjectInfos = await Promise.all(sourceObjStats)
|
|
2793
|
+
|
|
2794
|
+
const validatedStats = srcObjectInfos.map((resItemStat, index) => {
|
|
2795
|
+
const srcConfig: CopySourceOptions | undefined = sourceObjList[index]
|
|
2796
|
+
|
|
2797
|
+
let srcCopySize = resItemStat.size
|
|
2798
|
+
// Check if a segment is specified, and if so, is the
|
|
2799
|
+
// segment within object bounds?
|
|
2800
|
+
if (srcConfig && srcConfig.MatchRange) {
|
|
2801
|
+
// Since range is specified,
|
|
2802
|
+
// 0 <= src.srcStart <= src.srcEnd
|
|
2803
|
+
// so only invalid case to check is:
|
|
2804
|
+
const srcStart = srcConfig.Start
|
|
2805
|
+
const srcEnd = srcConfig.End
|
|
2806
|
+
if (srcEnd >= srcCopySize || srcStart < 0) {
|
|
2807
|
+
throw new errors.InvalidArgumentError(
|
|
2808
|
+
`CopySrcOptions ${index} has invalid segment-to-copy [${srcStart}, ${srcEnd}] (size is ${srcCopySize})`,
|
|
2809
|
+
)
|
|
2810
|
+
}
|
|
2811
|
+
srcCopySize = srcEnd - srcStart + 1
|
|
2812
|
+
}
|
|
2813
|
+
|
|
2814
|
+
// Only the last source may be less than `absMinPartSize`
|
|
2815
|
+
if (srcCopySize < PART_CONSTRAINTS.ABS_MIN_PART_SIZE && index < sourceFilesLength - 1) {
|
|
2816
|
+
throw new errors.InvalidArgumentError(
|
|
2817
|
+
`CopySrcOptions ${index} is too small (${srcCopySize}) and it is not the last part.`,
|
|
2818
|
+
)
|
|
2819
|
+
}
|
|
2820
|
+
|
|
2821
|
+
// Is data to copy too large?
|
|
2822
|
+
totalSize += srcCopySize
|
|
2823
|
+
if (totalSize > PART_CONSTRAINTS.MAX_MULTIPART_PUT_OBJECT_SIZE) {
|
|
2824
|
+
throw new errors.InvalidArgumentError(`Cannot compose an object of size ${totalSize} (> 5TiB)`)
|
|
2825
|
+
}
|
|
2826
|
+
|
|
2827
|
+
// record source size
|
|
2828
|
+
srcObjectSizes[index] = srcCopySize
|
|
2829
|
+
|
|
2830
|
+
// calculate parts needed for current source
|
|
2831
|
+
totalParts += partsRequired(srcCopySize)
|
|
2832
|
+
// Do we need more parts than we are allowed?
|
|
2833
|
+
if (totalParts > PART_CONSTRAINTS.MAX_PARTS_COUNT) {
|
|
2834
|
+
throw new errors.InvalidArgumentError(
|
|
2835
|
+
`Your proposed compose object requires more than ${PART_CONSTRAINTS.MAX_PARTS_COUNT} parts`,
|
|
2836
|
+
)
|
|
2837
|
+
}
|
|
2838
|
+
|
|
2839
|
+
return resItemStat
|
|
2840
|
+
})
|
|
2841
|
+
|
|
2842
|
+
if ((totalParts === 1 && totalSize <= PART_CONSTRAINTS.MAX_PART_SIZE) || totalSize === 0) {
|
|
2843
|
+
return await this.copyObject(sourceObjList[0] as CopySourceOptions, destObjConfig) // use copyObjectV2
|
|
2844
|
+
}
|
|
2845
|
+
|
|
2846
|
+
// preserve etag to avoid modification of object while copying.
|
|
2847
|
+
for (let i = 0; i < sourceFilesLength; i++) {
|
|
2848
|
+
;(sourceObjList[i] as CopySourceOptions).MatchETag = (validatedStats[i] as BucketItemStat).etag
|
|
2849
|
+
}
|
|
2850
|
+
|
|
2851
|
+
const splitPartSizeList = validatedStats.map((resItemStat, idx) => {
|
|
2852
|
+
return calculateEvenSplits(srcObjectSizes[idx] as number, sourceObjList[idx] as CopySourceOptions)
|
|
2853
|
+
})
|
|
2854
|
+
|
|
2855
|
+
const getUploadPartConfigList = (uploadId: string) => {
|
|
2856
|
+
const uploadPartConfigList: UploadPartConfig[] = []
|
|
2857
|
+
|
|
2858
|
+
splitPartSizeList.forEach((splitSize, splitIndex: number) => {
|
|
2859
|
+
if (splitSize) {
|
|
2860
|
+
const { startIndex: startIdx, endIndex: endIdx, objInfo: objConfig } = splitSize
|
|
2861
|
+
|
|
2862
|
+
const partIndex = splitIndex + 1 // part index starts from 1.
|
|
2863
|
+
const totalUploads = Array.from(startIdx)
|
|
2864
|
+
|
|
2865
|
+
const headers = (sourceObjList[splitIndex] as CopySourceOptions).getHeaders()
|
|
2866
|
+
|
|
2867
|
+
totalUploads.forEach((splitStart, upldCtrIdx) => {
|
|
2868
|
+
const splitEnd = endIdx[upldCtrIdx]
|
|
2869
|
+
|
|
2870
|
+
const sourceObj = `${objConfig.Bucket}/${objConfig.Object}`
|
|
2871
|
+
headers['x-amz-copy-source'] = `${sourceObj}`
|
|
2872
|
+
headers['x-amz-copy-source-range'] = `bytes=${splitStart}-${splitEnd}`
|
|
2873
|
+
|
|
2874
|
+
const uploadPartConfig = {
|
|
2875
|
+
bucketName: destObjConfig.Bucket,
|
|
2876
|
+
objectName: destObjConfig.Object,
|
|
2877
|
+
uploadID: uploadId,
|
|
2878
|
+
partNumber: partIndex,
|
|
2879
|
+
headers: headers,
|
|
2880
|
+
sourceObj: sourceObj,
|
|
2881
|
+
}
|
|
2882
|
+
|
|
2883
|
+
uploadPartConfigList.push(uploadPartConfig)
|
|
2884
|
+
})
|
|
2885
|
+
}
|
|
2886
|
+
})
|
|
2887
|
+
|
|
2888
|
+
return uploadPartConfigList
|
|
2889
|
+
}
|
|
2890
|
+
|
|
2891
|
+
const uploadAllParts = async (uploadList: UploadPartConfig[]) => {
|
|
2892
|
+
const partUploads: Awaited<ReturnType<typeof this.uploadPart>>[] = []
|
|
2893
|
+
|
|
2894
|
+
// Process upload parts in batches to avoid too many concurrent requests
|
|
2895
|
+
for (const batch of _.chunk(uploadList, maxConcurrency)) {
|
|
2896
|
+
const batchResults = await Promise.all(batch.map((item) => this.uploadPart(item)))
|
|
2897
|
+
|
|
2898
|
+
partUploads.push(...batchResults)
|
|
2899
|
+
}
|
|
2900
|
+
|
|
2901
|
+
// Process results here if needed
|
|
2902
|
+
return partUploads
|
|
2903
|
+
}
|
|
2904
|
+
|
|
2905
|
+
const performUploadParts = async (uploadId: string) => {
|
|
2906
|
+
const uploadList = getUploadPartConfigList(uploadId)
|
|
2907
|
+
const partsRes = await uploadAllParts(uploadList)
|
|
2908
|
+
return partsRes.map((partCopy) => ({ etag: partCopy.etag, part: partCopy.part }))
|
|
2909
|
+
}
|
|
2910
|
+
|
|
2911
|
+
const newUploadHeaders = destObjConfig.getHeaders()
|
|
2912
|
+
|
|
2913
|
+
const uploadId = await this.initiateNewMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, newUploadHeaders)
|
|
2914
|
+
try {
|
|
2915
|
+
const partsDone = await performUploadParts(uploadId)
|
|
2916
|
+
return await this.completeMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId, partsDone)
|
|
2917
|
+
} catch (err) {
|
|
2918
|
+
return await this.abortMultipartUpload(destObjConfig.Bucket, destObjConfig.Object, uploadId)
|
|
2919
|
+
}
|
|
2920
|
+
}
|
|
2921
|
+
|
|
2922
|
+
async presignedUrl(
|
|
2923
|
+
method: string,
|
|
2924
|
+
bucketName: string,
|
|
2925
|
+
objectName: string,
|
|
2926
|
+
expires?: number | PreSignRequestParams | undefined,
|
|
2927
|
+
reqParams?: PreSignRequestParams | Date,
|
|
2928
|
+
requestDate?: Date,
|
|
2929
|
+
): Promise<string> {
|
|
2930
|
+
if (this.anonymous) {
|
|
2931
|
+
throw new errors.AnonymousRequestError(`Presigned ${method} url cannot be generated for anonymous requests`)
|
|
2932
|
+
}
|
|
2933
|
+
|
|
2934
|
+
if (!expires) {
|
|
2935
|
+
expires = PRESIGN_EXPIRY_DAYS_MAX
|
|
2936
|
+
}
|
|
2937
|
+
if (!reqParams) {
|
|
2938
|
+
reqParams = {}
|
|
2939
|
+
}
|
|
2940
|
+
if (!requestDate) {
|
|
2941
|
+
requestDate = new Date()
|
|
2942
|
+
}
|
|
2943
|
+
|
|
2944
|
+
// Type assertions
|
|
2945
|
+
if (expires && typeof expires !== 'number') {
|
|
2946
|
+
throw new TypeError('expires should be of type "number"')
|
|
2947
|
+
}
|
|
2948
|
+
if (reqParams && typeof reqParams !== 'object') {
|
|
2949
|
+
throw new TypeError('reqParams should be of type "object"')
|
|
2950
|
+
}
|
|
2951
|
+
if ((requestDate && !(requestDate instanceof Date)) || (requestDate && isNaN(requestDate?.getTime()))) {
|
|
2952
|
+
throw new TypeError('requestDate should be of type "Date" and valid')
|
|
2953
|
+
}
|
|
2954
|
+
|
|
2955
|
+
const query = reqParams ? qs.stringify(reqParams) : undefined
|
|
2956
|
+
|
|
2957
|
+
try {
|
|
2958
|
+
const region = await this.getBucketRegionAsync(bucketName)
|
|
2959
|
+
await this.checkAndRefreshCreds()
|
|
2960
|
+
const reqOptions = this.getRequestOptions({ method, region, bucketName, objectName, query })
|
|
2961
|
+
|
|
2962
|
+
return presignSignatureV4(
|
|
2963
|
+
reqOptions,
|
|
2964
|
+
this.accessKey,
|
|
2965
|
+
this.secretKey,
|
|
2966
|
+
this.sessionToken,
|
|
2967
|
+
region,
|
|
2968
|
+
requestDate,
|
|
2969
|
+
expires,
|
|
2970
|
+
)
|
|
2971
|
+
} catch (err) {
|
|
2972
|
+
if (err instanceof errors.InvalidBucketNameError) {
|
|
2973
|
+
throw new errors.InvalidArgumentError(`Unable to get bucket region for ${bucketName}.`)
|
|
2974
|
+
}
|
|
2975
|
+
|
|
2976
|
+
throw err
|
|
2977
|
+
}
|
|
2978
|
+
}
|
|
2979
|
+
|
|
2980
|
+
async presignedGetObject(
|
|
2981
|
+
bucketName: string,
|
|
2982
|
+
objectName: string,
|
|
2983
|
+
expires?: number,
|
|
2984
|
+
respHeaders?: PreSignRequestParams | Date,
|
|
2985
|
+
requestDate?: Date,
|
|
2986
|
+
): Promise<string> {
|
|
2987
|
+
if (!isValidBucketName(bucketName)) {
|
|
2988
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
2989
|
+
}
|
|
2990
|
+
if (!isValidObjectName(objectName)) {
|
|
2991
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
2992
|
+
}
|
|
2993
|
+
|
|
2994
|
+
const validRespHeaders = [
|
|
2995
|
+
'response-content-type',
|
|
2996
|
+
'response-content-language',
|
|
2997
|
+
'response-expires',
|
|
2998
|
+
'response-cache-control',
|
|
2999
|
+
'response-content-disposition',
|
|
3000
|
+
'response-content-encoding',
|
|
3001
|
+
]
|
|
3002
|
+
validRespHeaders.forEach((header) => {
|
|
3003
|
+
// @ts-ignore
|
|
3004
|
+
if (respHeaders !== undefined && respHeaders[header] !== undefined && !isString(respHeaders[header])) {
|
|
3005
|
+
throw new TypeError(`response header ${header} should be of type "string"`)
|
|
3006
|
+
}
|
|
3007
|
+
})
|
|
3008
|
+
return this.presignedUrl('GET', bucketName, objectName, expires, respHeaders, requestDate)
|
|
3009
|
+
}
|
|
3010
|
+
|
|
3011
|
+
async presignedPutObject(bucketName: string, objectName: string, expires?: number): Promise<string> {
|
|
3012
|
+
if (!isValidBucketName(bucketName)) {
|
|
3013
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
3014
|
+
}
|
|
3015
|
+
if (!isValidObjectName(objectName)) {
|
|
3016
|
+
throw new errors.InvalidObjectNameError(`Invalid object name: ${objectName}`)
|
|
3017
|
+
}
|
|
3018
|
+
|
|
3019
|
+
return this.presignedUrl('PUT', bucketName, objectName, expires)
|
|
3020
|
+
}
|
|
3021
|
+
|
|
3022
|
+
newPostPolicy(): PostPolicy {
|
|
3023
|
+
return new PostPolicy()
|
|
3024
|
+
}
|
|
3025
|
+
|
|
3026
|
+
async presignedPostPolicy(postPolicy: PostPolicy): Promise<PostPolicyResult> {
|
|
3027
|
+
if (this.anonymous) {
|
|
3028
|
+
throw new errors.AnonymousRequestError('Presigned POST policy cannot be generated for anonymous requests')
|
|
3029
|
+
}
|
|
3030
|
+
if (!isObject(postPolicy)) {
|
|
3031
|
+
throw new TypeError('postPolicy should be of type "object"')
|
|
3032
|
+
}
|
|
3033
|
+
const bucketName = postPolicy.formData.bucket as string
|
|
3034
|
+
try {
|
|
3035
|
+
const region = await this.getBucketRegionAsync(bucketName)
|
|
3036
|
+
|
|
3037
|
+
const date = new Date()
|
|
3038
|
+
const dateStr = makeDateLong(date)
|
|
3039
|
+
await this.checkAndRefreshCreds()
|
|
3040
|
+
|
|
3041
|
+
if (!postPolicy.policy.expiration) {
|
|
3042
|
+
// 'expiration' is mandatory field for S3.
|
|
3043
|
+
// Set default expiration date of 7 days.
|
|
3044
|
+
const expires = new Date()
|
|
3045
|
+
expires.setSeconds(PRESIGN_EXPIRY_DAYS_MAX)
|
|
3046
|
+
postPolicy.setExpires(expires)
|
|
3047
|
+
}
|
|
3048
|
+
|
|
3049
|
+
postPolicy.policy.conditions.push(['eq', '$x-amz-date', dateStr])
|
|
3050
|
+
postPolicy.formData['x-amz-date'] = dateStr
|
|
3051
|
+
|
|
3052
|
+
postPolicy.policy.conditions.push(['eq', '$x-amz-algorithm', 'AWS4-HMAC-SHA256'])
|
|
3053
|
+
postPolicy.formData['x-amz-algorithm'] = 'AWS4-HMAC-SHA256'
|
|
3054
|
+
|
|
3055
|
+
postPolicy.policy.conditions.push(['eq', '$x-amz-credential', this.accessKey + '/' + getScope(region, date)])
|
|
3056
|
+
postPolicy.formData['x-amz-credential'] = this.accessKey + '/' + getScope(region, date)
|
|
3057
|
+
|
|
3058
|
+
if (this.sessionToken) {
|
|
3059
|
+
postPolicy.policy.conditions.push(['eq', '$x-amz-security-token', this.sessionToken])
|
|
3060
|
+
postPolicy.formData['x-amz-security-token'] = this.sessionToken
|
|
3061
|
+
}
|
|
3062
|
+
|
|
3063
|
+
const policyBase64 = Buffer.from(JSON.stringify(postPolicy.policy)).toString('base64')
|
|
3064
|
+
|
|
3065
|
+
postPolicy.formData.policy = policyBase64
|
|
3066
|
+
|
|
3067
|
+
postPolicy.formData['x-amz-signature'] = postPresignSignatureV4(region, date, this.secretKey, policyBase64)
|
|
3068
|
+
const opts = {
|
|
3069
|
+
region: region,
|
|
3070
|
+
bucketName: bucketName,
|
|
3071
|
+
method: 'POST',
|
|
3072
|
+
}
|
|
3073
|
+
const reqOptions = this.getRequestOptions(opts)
|
|
3074
|
+
const portStr = this.port == 80 || this.port === 443 ? '' : `:${this.port.toString()}`
|
|
3075
|
+
const urlStr = `${reqOptions.protocol}//${reqOptions.host}${portStr}${reqOptions.path}`
|
|
3076
|
+
return { postURL: urlStr, formData: postPolicy.formData }
|
|
3077
|
+
} catch (err) {
|
|
3078
|
+
if (err instanceof errors.InvalidBucketNameError) {
|
|
3079
|
+
throw new errors.InvalidArgumentError(`Unable to get bucket region for ${bucketName}.`)
|
|
3080
|
+
}
|
|
3081
|
+
|
|
3082
|
+
throw err
|
|
3083
|
+
}
|
|
3084
|
+
}
|
|
3085
|
+
// list a batch of objects
|
|
3086
|
+
async listObjectsQuery(bucketName: string, prefix?: string, marker?: string, listQueryOpts?: ListObjectQueryOpts) {
|
|
3087
|
+
if (!isValidBucketName(bucketName)) {
|
|
3088
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
3089
|
+
}
|
|
3090
|
+
if (!isString(prefix)) {
|
|
3091
|
+
throw new TypeError('prefix should be of type "string"')
|
|
3092
|
+
}
|
|
3093
|
+
if (marker && !isString(marker)) {
|
|
3094
|
+
throw new TypeError('marker should be of type "string"')
|
|
3095
|
+
}
|
|
3096
|
+
|
|
3097
|
+
if (listQueryOpts && !isObject(listQueryOpts)) {
|
|
3098
|
+
throw new TypeError('listQueryOpts should be of type "object"')
|
|
3099
|
+
}
|
|
3100
|
+
let { Delimiter, MaxKeys, IncludeVersion, versionIdMarker, keyMarker } = listQueryOpts as ListObjectQueryOpts
|
|
3101
|
+
|
|
3102
|
+
if (!isString(Delimiter)) {
|
|
3103
|
+
throw new TypeError('Delimiter should be of type "string"')
|
|
3104
|
+
}
|
|
3105
|
+
if (!isNumber(MaxKeys)) {
|
|
3106
|
+
throw new TypeError('MaxKeys should be of type "number"')
|
|
3107
|
+
}
|
|
3108
|
+
|
|
3109
|
+
const queries = []
|
|
3110
|
+
// escape every value in query string, except maxKeys
|
|
3111
|
+
queries.push(`prefix=${uriEscape(prefix)}`)
|
|
3112
|
+
queries.push(`delimiter=${uriEscape(Delimiter)}`)
|
|
3113
|
+
queries.push(`encoding-type=url`)
|
|
3114
|
+
|
|
3115
|
+
if (IncludeVersion) {
|
|
3116
|
+
queries.push(`versions`)
|
|
3117
|
+
}
|
|
3118
|
+
|
|
3119
|
+
if (IncludeVersion) {
|
|
3120
|
+
// v1 version listing..
|
|
3121
|
+
if (keyMarker) {
|
|
3122
|
+
queries.push(`key-marker=${keyMarker}`)
|
|
3123
|
+
}
|
|
3124
|
+
if (versionIdMarker) {
|
|
3125
|
+
queries.push(`version-id-marker=${versionIdMarker}`)
|
|
3126
|
+
}
|
|
3127
|
+
} else if (marker) {
|
|
3128
|
+
marker = uriEscape(marker)
|
|
3129
|
+
queries.push(`marker=${marker}`)
|
|
3130
|
+
}
|
|
3131
|
+
|
|
3132
|
+
// no need to escape maxKeys
|
|
3133
|
+
if (MaxKeys) {
|
|
3134
|
+
if (MaxKeys >= 1000) {
|
|
3135
|
+
MaxKeys = 1000
|
|
3136
|
+
}
|
|
3137
|
+
queries.push(`max-keys=${MaxKeys}`)
|
|
3138
|
+
}
|
|
3139
|
+
queries.sort()
|
|
3140
|
+
let query = ''
|
|
3141
|
+
if (queries.length > 0) {
|
|
3142
|
+
query = `${queries.join('&')}`
|
|
3143
|
+
}
|
|
3144
|
+
|
|
3145
|
+
const method = 'GET'
|
|
3146
|
+
const res = await this.makeRequestAsync({ method, bucketName, query })
|
|
3147
|
+
const body = await readAsString(res)
|
|
3148
|
+
const listQryList = parseListObjects(body)
|
|
3149
|
+
return listQryList
|
|
3150
|
+
}
|
|
3151
|
+
|
|
3152
|
+
listObjects(
|
|
3153
|
+
bucketName: string,
|
|
3154
|
+
prefix?: string,
|
|
3155
|
+
recursive?: boolean,
|
|
3156
|
+
listOpts?: ListObjectQueryOpts | undefined,
|
|
3157
|
+
): BucketStream<ObjectInfo> {
|
|
3158
|
+
if (prefix === undefined) {
|
|
3159
|
+
prefix = ''
|
|
3160
|
+
}
|
|
3161
|
+
if (recursive === undefined) {
|
|
3162
|
+
recursive = false
|
|
3163
|
+
}
|
|
3164
|
+
if (!isValidBucketName(bucketName)) {
|
|
3165
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
3166
|
+
}
|
|
3167
|
+
if (!isValidPrefix(prefix)) {
|
|
3168
|
+
throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`)
|
|
3169
|
+
}
|
|
3170
|
+
if (!isString(prefix)) {
|
|
3171
|
+
throw new TypeError('prefix should be of type "string"')
|
|
3172
|
+
}
|
|
3173
|
+
if (!isBoolean(recursive)) {
|
|
3174
|
+
throw new TypeError('recursive should be of type "boolean"')
|
|
3175
|
+
}
|
|
3176
|
+
if (listOpts && !isObject(listOpts)) {
|
|
3177
|
+
throw new TypeError('listOpts should be of type "object"')
|
|
3178
|
+
}
|
|
3179
|
+
let marker: string | undefined = ''
|
|
3180
|
+
let keyMarker: string | undefined = ''
|
|
3181
|
+
let versionIdMarker: string | undefined = ''
|
|
3182
|
+
let objects: ObjectInfo[] = []
|
|
3183
|
+
let ended = false
|
|
3184
|
+
const readStream: stream.Readable = new stream.Readable({ objectMode: true })
|
|
3185
|
+
readStream._read = async () => {
|
|
3186
|
+
// push one object per _read()
|
|
3187
|
+
if (objects.length) {
|
|
3188
|
+
readStream.push(objects.shift())
|
|
3189
|
+
return
|
|
3190
|
+
}
|
|
3191
|
+
if (ended) {
|
|
3192
|
+
return readStream.push(null)
|
|
3193
|
+
}
|
|
3194
|
+
|
|
3195
|
+
try {
|
|
3196
|
+
const listQueryOpts = {
|
|
3197
|
+
Delimiter: recursive ? '' : '/', // if recursive is false set delimiter to '/'
|
|
3198
|
+
MaxKeys: 1000,
|
|
3199
|
+
IncludeVersion: listOpts?.IncludeVersion,
|
|
3200
|
+
// version listing specific options
|
|
3201
|
+
keyMarker: keyMarker,
|
|
3202
|
+
versionIdMarker: versionIdMarker,
|
|
3203
|
+
}
|
|
3204
|
+
|
|
3205
|
+
const result: ListObjectQueryRes = await this.listObjectsQuery(bucketName, prefix, marker, listQueryOpts)
|
|
3206
|
+
if (result.isTruncated) {
|
|
3207
|
+
marker = result.nextMarker || undefined
|
|
3208
|
+
if (result.keyMarker) {
|
|
3209
|
+
keyMarker = result.keyMarker
|
|
3210
|
+
}
|
|
3211
|
+
if (result.versionIdMarker) {
|
|
3212
|
+
versionIdMarker = result.versionIdMarker
|
|
3213
|
+
}
|
|
3214
|
+
} else {
|
|
3215
|
+
ended = true
|
|
3216
|
+
}
|
|
3217
|
+
if (result.objects) {
|
|
3218
|
+
objects = result.objects
|
|
3219
|
+
}
|
|
3220
|
+
// @ts-ignore
|
|
3221
|
+
readStream._read()
|
|
3222
|
+
} catch (err) {
|
|
3223
|
+
readStream.emit('error', err)
|
|
3224
|
+
}
|
|
3225
|
+
}
|
|
3226
|
+
return readStream
|
|
3227
|
+
}
|
|
3228
|
+
|
|
3229
|
+
async listObjectsV2Query(
|
|
3230
|
+
bucketName: string,
|
|
3231
|
+
prefix: string,
|
|
3232
|
+
continuationToken: string,
|
|
3233
|
+
delimiter: string,
|
|
3234
|
+
maxKeys: number,
|
|
3235
|
+
startAfter: string,
|
|
3236
|
+
): Promise<ListObjectV2Res> {
|
|
3237
|
+
if (!isValidBucketName(bucketName)) {
|
|
3238
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
3239
|
+
}
|
|
3240
|
+
if (!isString(prefix)) {
|
|
3241
|
+
throw new TypeError('prefix should be of type "string"')
|
|
3242
|
+
}
|
|
3243
|
+
if (!isString(continuationToken)) {
|
|
3244
|
+
throw new TypeError('continuationToken should be of type "string"')
|
|
3245
|
+
}
|
|
3246
|
+
if (!isString(delimiter)) {
|
|
3247
|
+
throw new TypeError('delimiter should be of type "string"')
|
|
3248
|
+
}
|
|
3249
|
+
if (!isNumber(maxKeys)) {
|
|
3250
|
+
throw new TypeError('maxKeys should be of type "number"')
|
|
3251
|
+
}
|
|
3252
|
+
if (!isString(startAfter)) {
|
|
3253
|
+
throw new TypeError('startAfter should be of type "string"')
|
|
3254
|
+
}
|
|
3255
|
+
|
|
3256
|
+
const queries = []
|
|
3257
|
+
queries.push(`list-type=2`)
|
|
3258
|
+
queries.push(`encoding-type=url`)
|
|
3259
|
+
queries.push(`prefix=${uriEscape(prefix)}`)
|
|
3260
|
+
queries.push(`delimiter=${uriEscape(delimiter)}`)
|
|
3261
|
+
|
|
3262
|
+
if (continuationToken) {
|
|
3263
|
+
queries.push(`continuation-token=${uriEscape(continuationToken)}`)
|
|
3264
|
+
}
|
|
3265
|
+
if (startAfter) {
|
|
3266
|
+
queries.push(`start-after=${uriEscape(startAfter)}`)
|
|
3267
|
+
}
|
|
3268
|
+
if (maxKeys) {
|
|
3269
|
+
if (maxKeys >= 1000) {
|
|
3270
|
+
maxKeys = 1000
|
|
3271
|
+
}
|
|
3272
|
+
queries.push(`max-keys=${maxKeys}`)
|
|
3273
|
+
}
|
|
3274
|
+
queries.sort()
|
|
3275
|
+
let query = ''
|
|
3276
|
+
if (queries.length > 0) {
|
|
3277
|
+
query = `${queries.join('&')}`
|
|
3278
|
+
}
|
|
3279
|
+
|
|
3280
|
+
const method = 'GET'
|
|
3281
|
+
const res = await this.makeRequestAsync({ method, bucketName, query })
|
|
3282
|
+
const body = await readAsString(res)
|
|
3283
|
+
return parseListObjectsV2(body)
|
|
3284
|
+
}
|
|
3285
|
+
|
|
3286
|
+
listObjectsV2(
|
|
3287
|
+
bucketName: string,
|
|
3288
|
+
prefix?: string,
|
|
3289
|
+
recursive?: boolean,
|
|
3290
|
+
startAfter?: string,
|
|
3291
|
+
): BucketStream<BucketItem> {
|
|
3292
|
+
if (prefix === undefined) {
|
|
3293
|
+
prefix = ''
|
|
3294
|
+
}
|
|
3295
|
+
if (recursive === undefined) {
|
|
3296
|
+
recursive = false
|
|
3297
|
+
}
|
|
3298
|
+
if (startAfter === undefined) {
|
|
3299
|
+
startAfter = ''
|
|
3300
|
+
}
|
|
3301
|
+
if (!isValidBucketName(bucketName)) {
|
|
3302
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
3303
|
+
}
|
|
3304
|
+
if (!isValidPrefix(prefix)) {
|
|
3305
|
+
throw new errors.InvalidPrefixError(`Invalid prefix : ${prefix}`)
|
|
3306
|
+
}
|
|
3307
|
+
if (!isString(prefix)) {
|
|
3308
|
+
throw new TypeError('prefix should be of type "string"')
|
|
3309
|
+
}
|
|
3310
|
+
if (!isBoolean(recursive)) {
|
|
3311
|
+
throw new TypeError('recursive should be of type "boolean"')
|
|
3312
|
+
}
|
|
3313
|
+
if (!isString(startAfter)) {
|
|
3314
|
+
throw new TypeError('startAfter should be of type "string"')
|
|
3315
|
+
}
|
|
3316
|
+
|
|
3317
|
+
const delimiter = recursive ? '' : '/'
|
|
3318
|
+
const prefixStr = prefix
|
|
3319
|
+
const startAfterStr = startAfter
|
|
3320
|
+
let continuationToken = ''
|
|
3321
|
+
let objects: BucketItem[] = []
|
|
3322
|
+
let ended = false
|
|
3323
|
+
const readStream: stream.Readable = new stream.Readable({ objectMode: true })
|
|
3324
|
+
readStream._read = async () => {
|
|
3325
|
+
if (objects.length) {
|
|
3326
|
+
readStream.push(objects.shift())
|
|
3327
|
+
return
|
|
3328
|
+
}
|
|
3329
|
+
if (ended) {
|
|
3330
|
+
return readStream.push(null)
|
|
3331
|
+
}
|
|
3332
|
+
|
|
3333
|
+
try {
|
|
3334
|
+
const result = await this.listObjectsV2Query(
|
|
3335
|
+
bucketName,
|
|
3336
|
+
prefixStr,
|
|
3337
|
+
continuationToken,
|
|
3338
|
+
delimiter,
|
|
3339
|
+
1000,
|
|
3340
|
+
startAfterStr,
|
|
3341
|
+
)
|
|
3342
|
+
if (result.isTruncated) {
|
|
3343
|
+
continuationToken = result.nextContinuationToken
|
|
3344
|
+
} else {
|
|
3345
|
+
ended = true
|
|
3346
|
+
}
|
|
3347
|
+
objects = result.objects
|
|
3348
|
+
// @ts-ignore
|
|
3349
|
+
readStream._read()
|
|
3350
|
+
} catch (err) {
|
|
3351
|
+
readStream.emit('error', err)
|
|
3352
|
+
}
|
|
3353
|
+
}
|
|
3354
|
+
return readStream
|
|
3355
|
+
}
|
|
3356
|
+
|
|
3357
|
+
async setBucketNotification(bucketName: string, config: NotificationConfig): Promise<void> {
|
|
3358
|
+
if (!isValidBucketName(bucketName)) {
|
|
3359
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
3360
|
+
}
|
|
3361
|
+
if (!isObject(config)) {
|
|
3362
|
+
throw new TypeError('notification config should be of type "Object"')
|
|
3363
|
+
}
|
|
3364
|
+
const method = 'PUT'
|
|
3365
|
+
const query = 'notification'
|
|
3366
|
+
const builder = new xml2js.Builder({
|
|
3367
|
+
rootName: 'NotificationConfiguration',
|
|
3368
|
+
renderOpts: { pretty: false },
|
|
3369
|
+
headless: true,
|
|
3370
|
+
})
|
|
3371
|
+
const payload = builder.buildObject(config)
|
|
3372
|
+
await this.makeRequestAsyncOmit({ method, bucketName, query }, payload)
|
|
3373
|
+
}
|
|
3374
|
+
|
|
3375
|
+
async removeAllBucketNotification(bucketName: string): Promise<void> {
|
|
3376
|
+
await this.setBucketNotification(bucketName, new NotificationConfig())
|
|
3377
|
+
}
|
|
3378
|
+
|
|
3379
|
+
async getBucketNotification(bucketName: string): Promise<NotificationConfigResult> {
|
|
3380
|
+
if (!isValidBucketName(bucketName)) {
|
|
3381
|
+
throw new errors.InvalidBucketNameError('Invalid bucket name: ' + bucketName)
|
|
3382
|
+
}
|
|
3383
|
+
const method = 'GET'
|
|
3384
|
+
const query = 'notification'
|
|
3385
|
+
const res = await this.makeRequestAsync({ method, bucketName, query })
|
|
3386
|
+
const body = await readAsString(res)
|
|
3387
|
+
return parseBucketNotification(body)
|
|
3388
|
+
}
|
|
3389
|
+
|
|
3390
|
+
listenBucketNotification(
|
|
3391
|
+
bucketName: string,
|
|
3392
|
+
prefix: string,
|
|
3393
|
+
suffix: string,
|
|
3394
|
+
events: NotificationEvent[],
|
|
3395
|
+
): NotificationPoller {
|
|
3396
|
+
if (!isValidBucketName(bucketName)) {
|
|
3397
|
+
throw new errors.InvalidBucketNameError(`Invalid bucket name: ${bucketName}`)
|
|
3398
|
+
}
|
|
3399
|
+
if (!isString(prefix)) {
|
|
3400
|
+
throw new TypeError('prefix must be of type string')
|
|
3401
|
+
}
|
|
3402
|
+
if (!isString(suffix)) {
|
|
3403
|
+
throw new TypeError('suffix must be of type string')
|
|
3404
|
+
}
|
|
3405
|
+
if (!Array.isArray(events)) {
|
|
3406
|
+
throw new TypeError('events must be of type Array')
|
|
3407
|
+
}
|
|
3408
|
+
const listener = new NotificationPoller(this, bucketName, prefix, suffix, events)
|
|
3409
|
+
listener.start()
|
|
3410
|
+
return listener
|
|
3411
|
+
}
|
|
3412
|
+
}
|