@pi-r/aws 0.11.2 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/client/index.js CHANGED
@@ -1,27 +1,10 @@
1
1
  "use strict";
2
- exports.CLOUD_UPLOAD_CHUNK = exports.CLOUD_UPLOAD_STREAM = void 0;
3
- exports.validateStorage = validateStorage;
4
- exports.validateDatabase = validateDatabase;
5
- exports.createStorageClient = createStorageClient;
6
- exports.createDatabaseClient = createDatabaseClient;
7
- exports.createBucket = createBucket;
8
- exports.createBucketV2 = createBucketV2;
9
- exports.setBucketPolicy = setBucketPolicy;
10
- exports.setBucketTagging = setBucketTagging;
11
- exports.setBucketWebsite = setBucketWebsite;
12
- exports.deleteObjects = deleteObjects;
13
- exports.deleteObjectsV2 = deleteObjectsV2;
14
- exports.deleteObjectsV3 = deleteObjectsV3;
15
- exports.copyObject = copyObject;
16
- exports.executeQuery = executeQuery;
17
- exports.executeBatchQuery = executeBatchQuery;
18
- exports.parseAttributeValue = parseAttributeValue;
19
- exports.isNoSuchBucket = isNoSuchBucket;
20
- const aws = require("aws-sdk");
21
- const Cloud = require("@e-mc/cloud");
22
- const types_1 = require("@e-mc/types");
23
- const util_1 = require("@e-mc/cloud/util");
24
- const aws_lib_1 = require("@pi-r/aws-lib");
2
+
3
+ const aws = require('aws-sdk');
4
+ const Cloud = require('@e-mc/cloud');
5
+ const { isArray, isErrorCode, isPlainObject, isString } = require('@e-mc/types');
6
+ const { formatError } = require('@e-mc/cloud/util');
7
+ const { checkBucketCannedACL, getBucketPublicReadPolicy, getPrivatePolicy, getPublicReadPolicy, isAccessDefined, isDatabaseDefined, isEnvDefined, isProviderChainDefined, isSharedCredentialsDefined, setDatabaseEndpoint } = require('@pi-r/aws-lib');
25
8
  const ACP_AUTHENTICATEDREAD = {
26
9
  Grants: [
27
10
  {
@@ -39,7 +22,7 @@ async function setCannedAcl(S3, Bucket, ACL, service = "aws", recursive) {
39
22
  this.formatMessage(64, service, ['Grant ' + ACL, Bucket], '', Cloud.optionsLogMessage(recursive ? 'DELAYED' : 'COMMAND'));
40
23
  }
41
24
  else if (!recursive) {
42
- if ((0, types_1.isErrorCode)(err, 'OperationAborted')) {
25
+ if (isErrorCode(err, 'OperationAborted')) {
43
26
  setTimeout(() => {
44
27
  void setCannedAcl.call(this, S3, Bucket, ACL, service, true);
45
28
  }, 60000);
@@ -60,19 +43,19 @@ async function setCannedAcl(S3, Bucket, ACL, service = "aws", recursive) {
60
43
  else {
61
44
  switch (ACL) {
62
45
  case 1:
63
- promise = S3.putBucketPolicy({ Bucket, Policy: (0, aws_lib_1.getBucketPublicReadPolicy)(Bucket) }).promise();
46
+ promise = S3.putBucketPolicy({ Bucket, Policy: getBucketPublicReadPolicy(Bucket) }).promise();
64
47
  break;
65
48
  case "public-read":
66
- promise = S3.putBucketPolicy({ Bucket, Policy: (0, aws_lib_1.getPublicReadPolicy)(Bucket) }).promise();
49
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket) }).promise();
67
50
  break;
68
51
  case "public-read-write":
69
- promise = S3.putBucketPolicy({ Bucket, Policy: (0, aws_lib_1.getPublicReadPolicy)(Bucket, false, true) }).promise();
52
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket, false, true) }).promise();
70
53
  break;
71
54
  case "authenticated-read":
72
- promise = S3.putBucketPolicy({ Bucket, Policy: (0, aws_lib_1.getPublicReadPolicy)(Bucket, true) }).promise();
55
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket, true) }).promise();
73
56
  break;
74
57
  default:
75
- promise = S3.putBucketPolicy({ Bucket, Policy: (0, aws_lib_1.getPrivatePolicy)(Bucket) }).promise();
58
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPrivatePolicy(Bucket) }).promise();
76
59
  break;
77
60
  }
78
61
  }
@@ -82,10 +65,10 @@ async function setCannedAcl(S3, Bucket, ACL, service = "aws", recursive) {
82
65
  .catch(callback);
83
66
  }
84
67
  function validateStorage(credential) {
85
- return !!((0, aws_lib_1.isAccessDefined)(credential) || (0, aws_lib_1.isEnvDefined)() || credential.fromPath || credential.profile || (0, aws_lib_1.isProviderChainDefined)());
68
+ return !!(isAccessDefined(credential) || isEnvDefined() || credential.fromPath || credential.profile || isProviderChainDefined());
86
69
  }
87
70
  function validateDatabase(credential, data) {
88
- return (0, aws_lib_1.isDatabaseDefined)(credential, data) && validateStorage(credential);
71
+ return isDatabaseDefined(credential, data) && validateStorage(credential);
89
72
  }
90
73
  function createStorageClient(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
91
74
  try {
@@ -96,7 +79,7 @@ function createStorageClient(credential, service = "aws", sdk = "aws-sdk/clients
96
79
  client.config.loadFromPath(fromPath);
97
80
  return client;
98
81
  }
99
- if (profile || (0, aws_lib_1.isSharedCredentialsDefined)() && !(0, aws_lib_1.isAccessDefined)(credential) && !(0, aws_lib_1.isEnvDefined)()) {
82
+ if (profile || isSharedCredentialsDefined() && !isAccessDefined(credential) && !isEnvDefined()) {
100
83
  credential = new aws.SharedIniFileCredentials({ profile });
101
84
  }
102
85
  return new aws.S3(credential);
@@ -118,14 +101,14 @@ function createDatabaseClient(credential) {
118
101
  catch {
119
102
  }
120
103
  }
121
- else if (profile || (0, aws_lib_1.isSharedCredentialsDefined)() && !(0, aws_lib_1.isAccessDefined)(credential) && !(0, aws_lib_1.isEnvDefined)()) {
104
+ else if (profile || isSharedCredentialsDefined() && !isAccessDefined(credential) && !isEnvDefined()) {
122
105
  options = new aws.SharedIniFileCredentials({ profile });
123
106
  }
124
107
  else {
125
108
  options = credential;
126
109
  }
127
110
  if (options?.endpoint && !options.region) {
128
- (0, aws_lib_1.setDatabaseEndpoint)(options);
111
+ setDatabaseEndpoint(options);
129
112
  }
130
113
  return new aws.DynamoDB.DocumentClient(options);
131
114
  }
@@ -133,7 +116,7 @@ async function createBucket(credential, Bucket, publicRead, service, sdk) {
133
116
  return createBucketV2.call(this, credential, Bucket, publicRead ? "public-read" : undefined, undefined, service, sdk);
134
117
  }
135
118
  async function createBucketV2(credential, Bucket, ACL, options, service = "aws", sdk = "aws-sdk/clients/s3") {
136
- ACL = ACL === 1 ? 1 : (0, aws_lib_1.checkBucketCannedACL)(ACL);
119
+ ACL = ACL === 1 ? 1 : checkBucketCannedACL(ACL);
137
120
  const S3 = createStorageClient.call(this, credential, service, sdk);
138
121
  return S3.headBucket({ Bucket }).promise()
139
122
  .then(async () => {
@@ -157,17 +140,14 @@ async function createBucketV2(credential, Bucket, ACL, options, service = "aws",
157
140
  return true;
158
141
  })
159
142
  .catch(async (err) => {
160
- switch (err instanceof Error && err.code) {
161
- case 'BucketAlreadyExists':
162
- case 'BucketAlreadyOwnedByYou':
163
- if (ACL) {
164
- await setCannedAcl.call(this, S3, Bucket, ACL, service);
165
- }
166
- return true;
167
- default:
168
- this.formatFail(64, service, ["Unable to create bucket", Bucket], err, Cloud.optionsLogMessage('FAIL'));
169
- return false;
143
+ if (isErrorCode(err, 'BucketAlreadyExists', 'BucketAlreadyOwnedByYou')) {
144
+ if (ACL) {
145
+ await setCannedAcl.call(this, S3, Bucket, ACL, service);
146
+ }
147
+ return true;
170
148
  }
149
+ this.formatFail(64, service, ["Unable to create bucket", Bucket], err, Cloud.optionsLogMessage('FAIL'));
150
+ return false;
171
151
  });
172
152
  });
173
153
  }
@@ -179,7 +159,7 @@ async function setBucketPolicy(credential, Bucket, options, service = "aws", sdk
179
159
  options.AccessControlPolicy = ACP_AUTHENTICATEDREAD;
180
160
  delete options.ACL;
181
161
  }
182
- return ('PublicAccessBlockConfiguration' in options ? S3.putPublicAccessBlock(options) : 'Policy' in options && (0, types_1.isString)(options.Policy) && !ibm ? S3.putBucketPolicy(options) : S3.putBucketAcl(options)).promise()
162
+ return ('PublicAccessBlockConfiguration' in options ? S3.putPublicAccessBlock(options) : 'Policy' in options && isString(options.Policy) && !ibm ? S3.putBucketPolicy(options) : S3.putBucketAcl(options)).promise()
183
163
  .then(() => {
184
164
  this.formatMessage(64, service, ["Bucket policy configured", Bucket], '', Cloud.optionsLogMessage('COMMAND'));
185
165
  return true;
@@ -192,7 +172,7 @@ async function setBucketPolicy(credential, Bucket, options, service = "aws", sdk
192
172
  });
193
173
  }
194
174
  async function setBucketTagging(credential, Bucket, options, service = "aws", sdk = "aws-sdk/clients/s3") {
195
- if (!(0, types_1.isPlainObject)(options) || !Array.isArray(options.Tagging?.TagSet)) {
175
+ if (!isPlainObject(options) || !Array.isArray(options.Tagging?.TagSet)) {
196
176
  return false;
197
177
  }
198
178
  const S3 = createStorageClient.call(this, credential, service, sdk);
@@ -221,10 +201,10 @@ async function setBucketWebsite(credential, Bucket, options, service = "aws", sd
221
201
  const S3 = createStorageClient.call(this, credential, service, sdk);
222
202
  const WebsiteConfiguration = {};
223
203
  const { indexPage: Suffix, errorPage: Key } = options;
224
- if ((0, types_1.isString)(Suffix)) {
204
+ if (isString(Suffix)) {
225
205
  WebsiteConfiguration.IndexDocument = { Suffix };
226
206
  }
227
- if ((0, types_1.isString)(Key)) {
207
+ if (isString(Key)) {
228
208
  WebsiteConfiguration.ErrorDocument = { Key };
229
209
  }
230
210
  return S3.putBucketWebsite({ Bucket, WebsiteConfiguration }).promise()
@@ -239,8 +219,8 @@ async function setBucketWebsite(credential, Bucket, options, service = "aws", sd
239
219
  return false;
240
220
  });
241
221
  }
242
- async function deleteObjects(credential, Bucket, service, sdk, recursive = true) {
243
- return deleteObjectsV2.call(this, credential, Bucket, recursive, service, sdk);
222
+ async function deleteObjects(credential, Bucket, options, service, sdk) {
223
+ return deleteObjectsV3.call(this, credential, Bucket, options, service, sdk);
244
224
  }
245
225
  async function deleteObjectsV2(credential, Bucket, recursive = true, service, sdk) {
246
226
  return deleteObjectsV3.call(this, credential, Bucket, { recursive, Bucket }, service, sdk);
@@ -264,7 +244,7 @@ async function deleteObjectsV3(credential, Bucket, options = {}, service = "aws"
264
244
  return S3.deleteObjects({ Bucket, Delete: { Objects } }).promise()
265
245
  .then(data => {
266
246
  const Deleted = data.Deleted;
267
- if ((0, types_1.isArray)(Deleted)) {
247
+ if (isArray(Deleted)) {
268
248
  const files = Deleted.length + ' files';
269
249
  this.formatMessage(64, service, ["Bucket emptied" + ` (${recursive ? 'recursive' : files})`, Bucket], recursive ? files : '', Cloud.optionsLogMessage('COMMAND'));
270
250
  }
@@ -284,7 +264,7 @@ async function copyObject(credential, BucketSource, KeySource, Bucket, Key, opti
284
264
  const S3 = createStorageClient.call(this, credential, service, sdk);
285
265
  const index = Key.lastIndexOf('/');
286
266
  if (index !== -1) {
287
- await S3.putObject({ Bucket, Key: Key.substring(0, index + 1), Body: Buffer.from(''), ContentLength: 0 }).promise().catch(() => { });
267
+ await S3.putObject({ Bucket, Key: Key.slice(0, index + 1), Body: Buffer.from(''), ContentLength: 0 }).promise().catch(() => { });
288
268
  }
289
269
  const CopySource = BucketSource + '/' + KeySource;
290
270
  return S3.copyObject({ ...options, Bucket, Key, CopySource }).promise()
@@ -305,16 +285,16 @@ async function executeBatchQuery(credential, batch, sessionKey) {
305
285
  const cacheValue = { value: this.valueOfKey(credential, 'cache'), sessionKey };
306
286
  let client;
307
287
  const createClient = () => client ||= createDatabaseClient.call(this, credential);
308
- (0, aws_lib_1.setDatabaseEndpoint)(credential);
288
+ setDatabaseEndpoint(credential);
309
289
  for (let i = 0; i < length; ++i) {
310
290
  const item = batch[i];
311
291
  let { service, table: TableName, id, query, key, limit = 0, update, ignoreCache } = item;
312
292
  const useCache = caching && ignoreCache !== true;
313
293
  cacheValue.exclusiveOf = Array.isArray(ignoreCache) ? ignoreCache : undefined;
314
294
  let rows, queryString = '';
315
- if (key && (id || (0, types_1.isPlainObject)(key))) {
295
+ if (key && (id || isPlainObject(key))) {
316
296
  if (!TableName) {
317
- throw (0, util_1.formatError)(item, "Missing database table");
297
+ throw formatError(item, "Missing database table");
318
298
  }
319
299
  if (useCache) {
320
300
  queryString = TableName + '_' + Cloud.asString(key, true) + (id !== undefined ? '_' + Cloud.asString(id, true) : '');
@@ -323,7 +303,7 @@ async function executeBatchQuery(credential, batch, sessionKey) {
323
303
  continue;
324
304
  }
325
305
  }
326
- const Key = (0, types_1.isPlainObject)(key) ? key : { [key]: id };
306
+ const Key = isPlainObject(key) ? key : { [key]: id };
327
307
  const command = { TableName, Key };
328
308
  client = createClient();
329
309
  if (update) {
@@ -334,12 +314,12 @@ async function executeBatchQuery(credential, batch, sessionKey) {
334
314
  rows = [Item];
335
315
  }
336
316
  }
337
- else if ((0, types_1.isPlainObject)(query)) {
317
+ else if (isPlainObject(query)) {
338
318
  if (TableName) {
339
319
  query.TableName = TableName;
340
320
  }
341
321
  if (!TableName) {
342
- throw (0, util_1.formatError)(item, "Missing database table");
322
+ throw formatError(item, "Missing database table");
343
323
  }
344
324
  if (limit > 0) {
345
325
  query.Limit = limit;
@@ -353,14 +333,14 @@ async function executeBatchQuery(credential, batch, sessionKey) {
353
333
  rows = Items;
354
334
  }
355
335
  }
356
- else if ((0, types_1.isArray)(query)) {
336
+ else if (isArray(query)) {
357
337
  let params = (item.params || {});
358
- if (!(0, types_1.isPlainObject)(params.RequestItems)) {
338
+ if (!isPlainObject(params.RequestItems)) {
359
339
  params.RequestItems = {};
360
340
  }
361
341
  TableName ||= Object.keys(params.RequestItems)[0];
362
342
  if (!TableName) {
363
- throw (0, util_1.formatError)(item, "Missing database table");
343
+ throw formatError(item, "Missing database table");
364
344
  }
365
345
  const Item = params.RequestItems[TableName] ||= {};
366
346
  Item.Keys = query;
@@ -381,7 +361,7 @@ async function executeBatchQuery(credential, batch, sessionKey) {
381
361
  params.TableName = TableName;
382
362
  }
383
363
  else if (!params.TableName) {
384
- throw (0, util_1.formatError)(item, "Missing database table");
364
+ throw formatError(item, "Missing database table");
385
365
  }
386
366
  }
387
367
  if (useCache && (rows = this.getCacheResult(service, credential, queryString = Cloud.asString(params, true), cacheValue, ignoreCache))) {
@@ -403,7 +383,7 @@ async function executeBatchQuery(credential, batch, sessionKey) {
403
383
  }
404
384
  }
405
385
  else {
406
- throw (0, util_1.formatError)(item, "Missing database query");
386
+ throw formatError(item, "Missing database query");
407
387
  }
408
388
  result[i] = this.setQueryResult(service, credential, queryString, rows, cacheValue);
409
389
  }
@@ -439,7 +419,27 @@ function parseAttributeValue(value) {
439
419
  return { NULL: true };
440
420
  }
441
421
  function isNoSuchBucket(err) {
442
- return (0, types_1.isErrorCode)(err, 'NoSuchBucket');
422
+ return isErrorCode(err, 'NoSuchBucket');
443
423
  }
444
- exports.CLOUD_UPLOAD_STREAM = true;
445
- exports.CLOUD_UPLOAD_CHUNK = true;
424
+ const CLOUD_UPLOAD_STREAM = true;
425
+ const CLOUD_UPLOAD_CHUNK = true;
426
+
427
+ exports.CLOUD_UPLOAD_CHUNK = CLOUD_UPLOAD_CHUNK;
428
+ exports.CLOUD_UPLOAD_STREAM = CLOUD_UPLOAD_STREAM;
429
+ exports.copyObject = copyObject;
430
+ exports.createBucket = createBucket;
431
+ exports.createBucketV2 = createBucketV2;
432
+ exports.createDatabaseClient = createDatabaseClient;
433
+ exports.createStorageClient = createStorageClient;
434
+ exports.deleteObjects = deleteObjects;
435
+ exports.deleteObjectsV2 = deleteObjectsV2;
436
+ exports.deleteObjectsV3 = deleteObjectsV3;
437
+ exports.executeBatchQuery = executeBatchQuery;
438
+ exports.executeQuery = executeQuery;
439
+ exports.isNoSuchBucket = isNoSuchBucket;
440
+ exports.parseAttributeValue = parseAttributeValue;
441
+ exports.setBucketPolicy = setBucketPolicy;
442
+ exports.setBucketTagging = setBucketTagging;
443
+ exports.setBucketWebsite = setBucketWebsite;
444
+ exports.validateDatabase = validateDatabase;
445
+ exports.validateStorage = validateStorage;
@@ -0,0 +1,5 @@
1
+ import type { DownloadHost } from '@e-mc/cloud/types';
2
+
3
+ declare const download: DownloadHost;
4
+
5
+ export = download;
package/download/index.js CHANGED
@@ -1,35 +1,36 @@
1
1
  "use strict";
2
- const Cloud = require("@e-mc/cloud");
3
- const types_1 = require("@e-mc/types");
4
- const util_1 = require("@e-mc/cloud/util");
5
- const client_1 = require("@pi-r/aws");
2
+
3
+ const Cloud = require('@e-mc/cloud');
4
+ const { errorValue, isPlainObject } = require('@e-mc/types');
5
+ const { createErrorHandler, intoArray } = require('@e-mc/cloud/util');
6
+ const { copyObject, createStorageClient } = require('@pi-r/aws');
6
7
  function download(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
7
- const s3 = client_1.createStorageClient.call(this, credential, service, sdk);
8
+ const s3 = createStorageClient.call(this, credential, service, sdk);
8
9
  return (data, callback) => {
9
10
  const { bucket: Bucket, download: target } = data;
10
11
  const Key = target.keyname || target.filename;
11
12
  if (!Bucket || !Key) {
12
- callback((0, types_1.errorValue)('Missing property', !Bucket ? 'Bucket' : 'Key'));
13
+ callback(errorValue('Missing property', !Bucket ? 'Bucket' : 'Key'));
13
14
  return;
14
15
  }
15
16
  s3.getObject({ ...target.options, Bucket, Key, VersionId: target.versionId }, async (err, result) => {
16
17
  if (!err) {
17
18
  callback(null, result.Body);
18
- const copyTo = (0, util_1.intoArray)(target.copyObject);
19
+ const copyTo = intoArray(target.copyObject);
19
20
  if (copyTo) {
20
21
  const tasks = [];
21
22
  for (const { bucket, pathname, filename, options } of copyTo) {
22
23
  const keyObject = filename ? Cloud.joinPath(pathname, filename, true) : Key;
23
- tasks.push(client_1.copyObject
24
+ tasks.push(copyObject
24
25
  .call(this, credential, Bucket, Key, bucket, keyObject, options, service, sdk)
25
- .catch((0, util_1.createErrorHandler)(this, service, Bucket)));
26
+ .catch(createErrorHandler(this, service, Bucket)));
26
27
  }
27
28
  await Promise.all(tasks);
28
29
  }
29
30
  const deleteObject = target.deleteObject;
30
31
  if (deleteObject) {
31
32
  const location = Cloud.joinPath(Bucket, Key);
32
- const deleteOptions = (0, types_1.isPlainObject)(deleteObject) ? deleteObject : undefined;
33
+ const deleteOptions = isPlainObject(deleteObject) ? deleteObject : undefined;
33
34
  s3.deleteObject({ ...deleteOptions, Bucket, Key, VersionId: target.versionId }, error => {
34
35
  if (!error) {
35
36
  this.formatMessage(64, service, "Delete success", location, Cloud.optionsLogMessage('DELETE'));
@@ -46,4 +47,5 @@ function download(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
46
47
  });
47
48
  };
48
49
  }
50
+
49
51
  module.exports = download;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@pi-r/aws",
3
- "version": "0.11.2",
3
+ "version": "0.12.0",
4
4
  "description": "AWS V2 cloud functions for E-mc.",
5
5
  "main": "client/index.js",
6
6
  "types": "client/index.d.ts",
@@ -20,10 +20,10 @@
20
20
  "license": "MIT",
21
21
  "homepage": "https://github.com/anpham6/pi-r#readme",
22
22
  "dependencies": {
23
- "@e-mc/cloud": "^0.13.9",
24
- "@e-mc/module": "^0.13.9",
25
- "@e-mc/types": "^0.13.9",
26
- "@pi-r/aws-lib": "^0.11.2",
23
+ "@e-mc/cloud": "^0.14.0",
24
+ "@e-mc/module": "^0.14.0",
25
+ "@e-mc/types": "^0.14.0",
26
+ "@pi-r/aws-lib": "^0.12.0",
27
27
  "aws-sdk": "^2.1693.0"
28
28
  }
29
29
  }
@@ -0,0 +1,5 @@
1
+ import type { UploadHost } from '@e-mc/cloud/types';
2
+
3
+ declare const upload: UploadHost;
4
+
5
+ export = upload;
package/upload/index.js CHANGED
@@ -1,21 +1,22 @@
1
1
  "use strict";
2
- const path = require("node:path");
3
- const fs = require("node:fs");
4
- const stream = require("node:stream");
5
- const aws = require("aws-sdk");
6
- const node_crypto_1 = require("node:crypto");
7
- const Cloud = require("@e-mc/cloud");
8
- const types_1 = require("@e-mc/types");
9
- const util_1 = require("@e-mc/cloud/util");
10
- const aws_lib_1 = require("@pi-r/aws-lib");
11
- const client_1 = require("@pi-r/aws");
2
+
3
+ const path = require('node:path');
4
+ const fs = require('node:fs');
5
+ const stream = require('node:stream');
6
+ const aws = require('aws-sdk');
7
+ const { randomUUID } = require('node:crypto');
8
+ const Cloud = require('@e-mc/cloud');
9
+ const { alignSize, createAbortError, isErrorCode, isPlainObject } = require('@e-mc/types');
10
+ const { createErrorHandler, createKeyAndBody, generateFilename, intoArray } = require('@e-mc/cloud/util');
11
+ const { formatDefaultRetention, getBucketKey } = require('@pi-r/aws-lib');
12
+ const { copyObject, createBucketV2, createStorageClient } = require('@pi-r/aws');
12
13
  const BUCKET_SESSION = new Set();
13
14
  const BUCKET_RESPONSE = {};
14
15
  function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
15
- const s3 = client_1.createStorageClient.call(this, credential, service, sdk);
16
+ const s3 = createStorageClient.call(this, credential, service, sdk);
16
17
  return async (data, callback) => {
17
18
  const { bucket: Bucket, localUri } = data;
18
- const { pathname, flags = 0, fileGroup, contentType, metadata, tags: Tags, endpoint, active, publicRead, acl, admin = {}, overwrite, options } = data.upload;
19
+ const { pathname, flags = 0, fileGroup, descendantsGroup, contentType, metadata, tags: Tags, endpoint, active, publicRead, acl, admin = {}, overwrite, options } = data.upload;
19
20
  let filename = data.upload.filename || path.basename(localUri), bucketKey;
20
21
  const complete = (err, url) => {
21
22
  BUCKET_SESSION.delete(service + Bucket);
@@ -24,11 +25,11 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
24
25
  }
25
26
  callback(err, url);
26
27
  };
27
- const addLog = (0, util_1.createErrorHandler)(this, Bucket, service);
28
+ const addLog = createErrorHandler(this, Bucket, service);
28
29
  const configBucket = admin.configBucket;
29
30
  if (!BUCKET_SESSION.has(service + Bucket)) {
30
31
  const bucketAcl = admin.publicRead ? "public-read" : admin.acl;
31
- const response = BUCKET_RESPONSE[bucketKey = (0, aws_lib_1.getBucketKey)(credential, Bucket, bucketAcl, service, sdk)] ||= client_1.createBucketV2.call(this, credential, Bucket, bucketAcl, configBucket?.create, service, sdk);
32
+ const response = BUCKET_RESPONSE[bucketKey = getBucketKey(credential, Bucket, bucketAcl, service, sdk)] ||= createBucketV2.call(this, credential, Bucket, bucketAcl, configBucket?.create, service, sdk);
32
33
  if (!await response) {
33
34
  complete(null);
34
35
  return;
@@ -42,10 +43,10 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
42
43
  this.formatMessage(64, service, ["Bucket configured" + ` (${feature})`, Bucket], message || ExpectedBucketOwner, { ...Cloud[message === 'delete' ? 'LOG_CLOUD_WARN' : 'LOG_CLOUD_COMMAND'] });
43
44
  };
44
45
  if (service !== "ibm") {
45
- if ((0, types_1.isPlainObject)(DefaultRetention)) {
46
+ if (isPlainObject(DefaultRetention)) {
46
47
  s3.putObjectLockConfiguration({ Bucket, ObjectLockConfiguration: { ObjectLockEnabled: 'Enabled', Rule: { DefaultRetention } }, ExpectedBucketOwner, RequestPayer: options?.RequestPayer }, err => {
47
48
  if (!err) {
48
- this.formatMessage(64, service, ["Bucket configured" + ' (Retention Policy)', Bucket], (0, aws_lib_1.formatDefaultRetention)(DefaultRetention), Cloud.optionsLogMessage('COMMAND'));
49
+ this.formatMessage(64, service, ["Bucket configured" + ' (Retention Policy)', Bucket], formatDefaultRetention(DefaultRetention), Cloud.optionsLogMessage('COMMAND'));
49
50
  }
50
51
  else {
51
52
  addLog(err);
@@ -100,7 +101,7 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
100
101
  }
101
102
  if (!overwrite) {
102
103
  const current = filename;
103
- const next = (0, util_1.generateFilename)(filename);
104
+ const next = generateFilename(filename);
104
105
  let i = 0, exists = false;
105
106
  do {
106
107
  if (i > 0) {
@@ -112,8 +113,8 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
112
113
  exists = await s3.headObject({ Bucket, Key: Cloud.joinPath(pathname, filename, true) }).promise()
113
114
  .then(() => true)
114
115
  .catch((err) => {
115
- if (err instanceof Error && err.code !== 'NotFound') {
116
- filename = (0, node_crypto_1.randomUUID)() + path.extname(current);
116
+ if (!isErrorCode(err, 'NotFound')) {
117
+ filename = randomUUID() + path.extname(current);
117
118
  }
118
119
  return false;
119
120
  });
@@ -125,7 +126,7 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
125
126
  if (pathname) {
126
127
  await s3.putObject({ Bucket, Key: pathname.endsWith('/') ? pathname : pathname + '/', Body: Buffer.from(''), ContentLength: 0 }).promise().catch(() => { });
127
128
  }
128
- const partSize = flags & 4 ? (0, types_1.alignSize)(data.upload.chunkSize, 1024) : 0;
129
+ const partSize = flags & 4 ? alignSize(data.upload.chunkSize, 1024) : 0;
129
130
  const Key = [filename];
130
131
  const Body = [data.buffer];
131
132
  const Stream = [];
@@ -149,7 +150,7 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
149
150
  Stream[0] = stream.Readable.from(Body[0], { highWaterMark: getPartSize() });
150
151
  }
151
152
  if (fileGroup) {
152
- const [key, body, type] = (0, util_1.createKeyAndBody)(filename, fileGroup, 0, addLog, flags);
153
+ const [key, body, type] = createKeyAndBody(filename, fileGroup, { errorCallback: addLog, descendantsGroup, flags });
153
154
  Key.push(...key);
154
155
  ContentType.push(...type);
155
156
  if (flags & 2) {
@@ -163,7 +164,7 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
163
164
  const first = i === 0;
164
165
  if (this.aborted) {
165
166
  if (first) {
166
- complete((0, types_1.createAbortError)());
167
+ complete(createAbortError());
167
168
  }
168
169
  return;
169
170
  }
@@ -182,7 +183,7 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
182
183
  params.Metadata = metadata;
183
184
  }
184
185
  if (service !== "oci") {
185
- if ((0, types_1.isPlainObject)(Tags) && (length = Object.keys(Tags).length) > 0) {
186
+ if (isPlainObject(Tags) && (length = Object.keys(Tags).length) > 0) {
186
187
  tags = [];
187
188
  for (const name in Tags) {
188
189
  tags.push({ Key: name, Value: Tags[name] });
@@ -227,12 +228,12 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
227
228
  }
228
229
  });
229
230
  }
230
- const copyTo = (0, util_1.intoArray)(data.upload.copyObject);
231
+ const copyTo = intoArray(data.upload.copyObject);
231
232
  if (copyTo) {
232
233
  const tasks = [];
233
234
  for (const { bucket: bucketName, pathname: pathObject = pathname, filename: fileObject, options: copyOptions } of copyTo) {
234
235
  const keyObject = fileObject ? Cloud.joinPath(pathObject, fileObject, true) : result.Key;
235
- tasks.push(client_1.copyObject
236
+ tasks.push(copyObject
236
237
  .call(this, credential, Bucket, result.Key, bucketName, keyObject, copyOptions, service, sdk)
237
238
  .catch(addLog));
238
239
  }
@@ -244,4 +245,5 @@ function upload(credential, service = "aws", sdk = "aws-sdk/clients/s3") {
244
245
  }
245
246
  };
246
247
  }
248
+
247
249
  module.exports = upload;