@pi-r/aws 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,7 @@
1
+ Copyright 2023 An Pham
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4
+
5
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6
+
7
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,5 @@
1
+ ### @pi-r/aws
2
+
3
+ ### LICENSE
4
+
5
+ MIT
@@ -0,0 +1,38 @@
1
+ import type { ICloud, IModule } from '@e-mc/types/lib';
2
+ import type { BucketWebsiteOptions, CloudDatabase } from '@e-mc/types/lib/cloud';
3
+ import type { BatchQueryResult, QueryResult } from '@e-mc/types/lib/db';
4
+
5
+ import type { AWSDatabaseCredential, AWSDatabaseQuery, AWSStorageCredential, BucketCannedACL, ConfigureBucketOptions } from '../types';
6
+
7
+ import type { ConfigurationOptions } from 'aws-sdk/lib/core';
8
+ import type { ServiceConfigurationOptions } from 'aws-sdk/lib/service';
9
+ import type { DocumentClient } from 'aws-sdk/clients/dynamodb';
10
+ import type { DynamoDBClientConfig } from '@aws-sdk/client-dynamodb';
11
+ import type { CreateBucketRequest } from 'aws-sdk/clients/s3';
12
+
13
+ declare namespace AWS {
14
+ function isAccessDefined(credential: Pick<ConfigurationOptions, "accessKeyId" | "secretAccessKey" | "sessionToken">): boolean;
15
+ function isEnvDefined(): boolean;
16
+ function isDatabaseDefined(credential: AWSDatabaseCredential, data: CloudDatabase): boolean;
17
+ function getPublicReadPolicy(bucket: string, authenticated?: boolean, write?: boolean): string;
18
+ function getBucketPublicReadPolicy(bucket: string): string;
19
+ function getPrivatePolicy(bucket: string): string;
20
+ function validateStorage(credential: AWSStorageCredential): boolean;
21
+ function validateDatabase(credential: AWSDatabaseCredential, data: CloudDatabase): boolean;
22
+ function createStorageClient(this: IModule, credential: AWSStorageCredential, service?: string, sdk?: string): boolean;
23
+ function createDatabaseClient(this: IModule, credential: AWSDatabaseCredential): DocumentClient;
24
+ function createBucket(this: IModule, credential: AWSStorageCredential, Bucket: string, publicRead?: boolean, service?: string, sdk?: string): Promise<boolean>;
25
+ function createBucketV2(this: IModule, credential: AWSStorageCredential, Bucket: string, ACL?: BucketCannedACL | 1, options?: CreateBucketRequest, service?: string, sdk?: string): Promise<boolean>;
26
+ function setBucketPolicy(this: IModule, credential: AWSStorageCredential, Bucket: string, options: ConfigureBucketOptions, service?: string, sdk?: string): Promise<boolean>;
27
+ function setBucketWebsite(this: IModule, credential: AWSStorageCredential, Bucket: string, options: BucketWebsiteOptions, service?: string, sdk?: string): Promise<boolean>;
28
+ function deleteObjects(this: IModule, credential: AWSStorageCredential, Bucket: string, service?: string, sdk?: string): Promise<void>;
29
+ function deleteObjectsV2(this: IModule, credential: AWSStorageCredential, Bucket: string, recursive?: boolean, service?: string, sdk?: string): Promise<void>;
30
+ function executeQuery(this: ICloud, credential: AWSDatabaseCredential, data: AWSDatabaseQuery, sessionKey?: string): Promise<QueryResult>;
31
+ function executeBatchQuery(this: ICloud, credential: AWSDatabaseCredential, batch: AWSDatabaseQuery[], sessionKey?: string): Promise<BatchQueryResult>;
32
+ function setDatabaseEndpoint(config: ServiceConfigurationOptions | DynamoDBClientConfig): void;
33
+ function checkBucketCannedACL(value: unknown): BucketCannedACL | undefined;
34
+ function getBucketKey(credential: unknown, Bucket: string, acl: string | undefined, service: string, sdk: string): string;
35
+ function isNoSuchBucket(err: unknown): boolean;
36
+ }
37
+
38
+ export = AWS;
@@ -0,0 +1,381 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.isNoSuchBucket = exports.getBucketKey = exports.checkBucketCannedACL = exports.setDatabaseEndpoint = exports.executeBatchQuery = exports.executeQuery = exports.deleteObjectsV2 = exports.deleteObjects = exports.setBucketWebsite = exports.setBucketPolicy = exports.createBucketV2 = exports.createBucket = exports.createDatabaseClient = exports.createStorageClient = exports.validateDatabase = exports.validateStorage = exports.getPrivatePolicy = exports.getBucketPublicReadPolicy = exports.getPublicReadPolicy = exports.isDatabaseDefined = exports.isEnvDefined = exports.isAccessDefined = void 0;
4
+ const types_1 = require("@e-mc/types");
5
+ const util_1 = require("@e-mc/cloud/util");
6
+ const Module = require("@e-mc/module");
7
+ const Cloud = require("@e-mc/cloud");
8
+ const ACP_AUTHENTICATEDREAD = {
9
+ Grants: [{
10
+ Grantee: { Type: 'Group', URI: 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers' },
11
+ Permission: 'READ'
12
+ }]
13
+ };
14
+ async function setCannedAcl(S3, Bucket, ACL, service = 'aws', recursive) {
15
+ if (service === 'oci') {
16
+ return;
17
+ }
18
+ const callback = (err) => {
19
+ if (!err) {
20
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ['Grant ' + ACL, Bucket], '', { ...recursive ? Cloud.LOG_CLOUD_DELAYED : Cloud.LOG_CLOUD_COMMAND });
21
+ }
22
+ else if (!recursive) {
23
+ if (err instanceof Error && err.code === 'OperationAborted') {
24
+ setTimeout(() => setCannedAcl.call(this, S3, Bucket, ACL, service, true), 60000 /* TIME.m */);
25
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, [`Grant ${ACL} (delayed)`, Bucket], err, { ...Cloud.LOG_CLOUD_DELAYED });
26
+ }
27
+ else {
28
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ['Unable to grant ' + ACL, Bucket], err, { ...Cloud.LOG_CLOUD_WARN });
29
+ }
30
+ }
31
+ };
32
+ let promise;
33
+ if (service === 'ibm') {
34
+ if (ACL === 1) {
35
+ ACL = 'public-read';
36
+ }
37
+ promise = S3.putBucketAcl(ACL === 'authenticated-read' ? { Bucket, AccessControlPolicy: ACP_AUTHENTICATEDREAD } : { Bucket, ACL }).promise();
38
+ }
39
+ else {
40
+ switch (ACL) {
41
+ case 1:
42
+ promise = S3.putBucketPolicy({ Bucket, Policy: getBucketPublicReadPolicy(Bucket) }).promise();
43
+ break;
44
+ case 'public-read':
45
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket) }).promise();
46
+ break;
47
+ case 'public-read-write':
48
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket, false, true) }).promise();
49
+ break;
50
+ case 'authenticated-read':
51
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket, true) }).promise();
52
+ break;
53
+ default:
54
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPrivatePolicy(Bucket) }).promise();
55
+ break;
56
+ }
57
+ }
58
+ return promise.then(() => callback(null)).catch(err => callback(err));
59
+ }
60
+ function isAccessDefined(credential) {
61
+ return !!(credential.accessKeyId && credential.secretAccessKey || credential.sessionToken);
62
+ }
63
+ exports.isAccessDefined = isAccessDefined;
64
+ function isEnvDefined() {
65
+ return !!(process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY || process.env.AWS_SESSION_TOKEN);
66
+ }
67
+ exports.isEnvDefined = isEnvDefined;
68
+ function isDatabaseDefined(credential, data) {
69
+ return !!(data.table && (credential.region || credential.endpoint || process.env.AWS_DEFAULT_REGION));
70
+ }
71
+ exports.isDatabaseDefined = isDatabaseDefined;
72
+ function getPublicReadPolicy(bucket, authenticated, write) {
73
+ const Action = ["s3:GetObject", "s3:GetObjectVersion"];
74
+ if (write) {
75
+ Action.push("s3:PutObject", "s3:DeleteObjectVersion");
76
+ }
77
+ return JSON.stringify({
78
+ "Version": "2012-10-17",
79
+ "Statement": [{
80
+ "Sid": (authenticated ? "AuthenticatedRead" : "PublicRead") + (write ? "Write" : ""),
81
+ "Effect": "Allow",
82
+ "Principal": authenticated ? { "AWS": "*" } : "*",
83
+ "Action": Action,
84
+ "Resource": [`arn:aws:s3:::${bucket}/*`]
85
+ }]
86
+ });
87
+ }
88
+ exports.getPublicReadPolicy = getPublicReadPolicy;
89
+ function getBucketPublicReadPolicy(bucket) {
90
+ return JSON.stringify({
91
+ "Version": "2012-10-17",
92
+ "Statement": [{
93
+ "Sid": "BucketPublicRead",
94
+ "Effect": "Allow",
95
+ "Principal": "*",
96
+ "Action": ["s3:ListBucket", "s3:ListBucketVersions", "s3:ListBucketMultipartUploads"],
97
+ "Resource": [`arn:aws:s3:::${bucket}`]
98
+ }]
99
+ });
100
+ }
101
+ exports.getBucketPublicReadPolicy = getBucketPublicReadPolicy;
102
+ function getPrivatePolicy(bucket) {
103
+ return JSON.stringify({
104
+ "Version": "2012-10-17",
105
+ "Statement": [{
106
+ "Sid": "Private",
107
+ "Effect": "Deny",
108
+ "Principal": "*",
109
+ "Action": "*",
110
+ "Resource": [`arn:aws:s3:::${bucket}`, `arn:aws:s3:::${bucket}/*`]
111
+ }]
112
+ });
113
+ }
114
+ exports.getPrivatePolicy = getPrivatePolicy;
115
+ function validateStorage(credential) {
116
+ return !!(isAccessDefined(credential) || isEnvDefined() || credential.fromPath || credential.profile || process.env.AWS_SDK_LOAD_CONFIG === '1');
117
+ }
118
+ exports.validateStorage = validateStorage;
119
+ function validateDatabase(credential, data) {
120
+ return isDatabaseDefined(credential, data) && validateStorage(credential);
121
+ }
122
+ exports.validateDatabase = validateDatabase;
123
+ function createStorageClient(credential, service = 'aws', sdk = 'aws-sdk/clients/s3') {
124
+ try {
125
+ if (service === 'aws') {
126
+ const S3 = require('aws-sdk/clients/s3');
127
+ if (credential.fromPath) {
128
+ const client = new S3();
129
+ client.config.loadFromPath(credential.fromPath);
130
+ return client;
131
+ }
132
+ if (credential.profile || process.env.AWS_SDK_LOAD_CONFIG === '1' && !isAccessDefined(credential) && !isEnvDefined()) {
133
+ const { SharedIniFileCredentials } = require('aws-sdk');
134
+ credential = new SharedIniFileCredentials({ profile: credential.profile });
135
+ }
136
+ return new S3(credential);
137
+ }
138
+ const S3 = require(sdk);
139
+ return new S3(credential);
140
+ }
141
+ catch (err) {
142
+ this.checkPackage(err, sdk.split('/')[0], { passThrough: true });
143
+ throw err;
144
+ }
145
+ }
146
+ exports.createStorageClient = createStorageClient;
147
+ function createDatabaseClient(credential) {
148
+ try {
149
+ let options;
150
+ if (credential.fromPath) {
151
+ const AWS = require('aws-sdk');
152
+ AWS.config.loadFromPath(credential.fromPath);
153
+ }
154
+ else if (credential.profile || process.env.AWS_SDK_LOAD_CONFIG === '1' && !isAccessDefined(credential) && !isEnvDefined()) {
155
+ const { SharedIniFileCredentials } = require('aws-sdk');
156
+ options = new SharedIniFileCredentials({ profile: credential.profile });
157
+ }
158
+ else {
159
+ options = credential;
160
+ }
161
+ const DynamoDB = require('aws-sdk/clients/dynamodb');
162
+ return new DynamoDB.DocumentClient(options);
163
+ }
164
+ catch (err) {
165
+ this.checkPackage(err, 'aws-sdk', { passThrough: true });
166
+ throw err;
167
+ }
168
+ }
169
+ exports.createDatabaseClient = createDatabaseClient;
170
+ function createBucket(credential, Bucket, publicRead, service = 'aws', sdk = 'aws-sdk/clients/s3') {
171
+ return createBucketV2.call(this, credential, Bucket, publicRead ? 'public-read' : undefined, undefined, service, sdk);
172
+ }
173
+ exports.createBucket = createBucket;
174
+ function createBucketV2(credential, Bucket, ACL, options, service = 'aws', sdk = 'aws-sdk/clients/s3') {
175
+ ACL = ACL === 1 ? 1 : checkBucketCannedACL(ACL);
176
+ const S3 = createStorageClient.call(this, credential, service, sdk);
177
+ return S3.headBucket({ Bucket }).promise()
178
+ .then(async () => {
179
+ if (ACL) {
180
+ await setCannedAcl.call(this, S3, Bucket, ACL, service);
181
+ }
182
+ return true;
183
+ })
184
+ .catch(() => {
185
+ const input = { ...options, Bucket };
186
+ const region = credential.region;
187
+ if (!input.CreateBucketConfiguration && typeof region === 'string' && (region !== 'us-east-1' || service !== 'aws')) {
188
+ input.CreateBucketConfiguration = { LocationConstraint: region };
189
+ }
190
+ return S3.createBucket(input).promise()
191
+ .then(async () => {
192
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket created" /* VAL_CLOUD.CREATE_BUCKET */, Bucket], '', { ...Cloud.LOG_CLOUD_COMMAND });
193
+ if (ACL) {
194
+ await setCannedAcl.call(this, S3, Bucket, ACL, service);
195
+ }
196
+ return true;
197
+ })
198
+ .catch(async (err) => {
199
+ switch (err instanceof Error && err.code) {
200
+ case 'BucketAlreadyExists':
201
+ case 'BucketAlreadyOwnedByYou':
202
+ if (ACL) {
203
+ await setCannedAcl.call(this, S3, Bucket, ACL, service);
204
+ }
205
+ return true;
206
+ default:
207
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to create bucket" /* ERR_CLOUD.CREATE_BUCKET */, Bucket], err, { ...Cloud.LOG_CLOUD_FAIL });
208
+ return false;
209
+ }
210
+ });
211
+ });
212
+ }
213
+ exports.createBucketV2 = createBucketV2;
214
+ function setBucketPolicy(credential, Bucket, options, service = 'aws', sdk = 'aws-sdk/clients/s3') {
215
+ const S3 = createStorageClient.call(this, credential, service, sdk);
216
+ options.Bucket = Bucket;
217
+ if (service === 'ibm' && 'ACL' in options && options.ACL === 'authenticated-read') {
218
+ options.AccessControlPolicy = ACP_AUTHENTICATEDREAD;
219
+ delete options.ACL;
220
+ }
221
+ return ('Policy' in options && (0, types_1.isString)(options.Policy) && service !== 'ibm' ? S3.putBucketPolicy(options) : S3.putBucketAcl(options)).promise()
222
+ .then(() => {
223
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket policy configured" /* VAL_CLOUD.POLICY_BUCKET */, Bucket], '', { ...Cloud.LOG_CLOUD_COMMAND });
224
+ return true;
225
+ })
226
+ .catch(err => {
227
+ if (!(0, exports.isNoSuchBucket)(err)) {
228
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to update bucket policy" /* ERR_CLOUD.POLICY_BUCKET */, Bucket], err, { ...Cloud.LOG_CLOUD_FAIL, fatal: false });
229
+ }
230
+ return false;
231
+ });
232
+ }
233
+ exports.setBucketPolicy = setBucketPolicy;
234
+ function setBucketWebsite(credential, Bucket, options, service = 'aws', sdk = 'aws-sdk/clients/s3') {
235
+ const S3 = createStorageClient.call(this, credential, service, sdk);
236
+ const WebsiteConfiguration = {};
237
+ if ((0, types_1.isString)(options.indexPage)) {
238
+ WebsiteConfiguration.IndexDocument = { Suffix: options.indexPage };
239
+ }
240
+ if ((0, types_1.isString)(options.errorPage)) {
241
+ WebsiteConfiguration.ErrorDocument = { Key: options.errorPage };
242
+ }
243
+ return S3.putBucketWebsite({ Bucket, WebsiteConfiguration }).promise()
244
+ .then(() => {
245
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket configured" /* VAL_CLOUD.CONFIGURE_BUCKET */, Bucket], WebsiteConfiguration, { ...Cloud.LOG_CLOUD_COMMAND });
246
+ return true;
247
+ })
248
+ .catch(err => {
249
+ if (!(0, exports.isNoSuchBucket)(err)) {
250
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to configure bucket" /* ERR_CLOUD.CONFIGURE_BUCKET */, Bucket], err, { ...Cloud.LOG_CLOUD_FAIL, fatal: false });
251
+ }
252
+ return false;
253
+ });
254
+ }
255
+ exports.setBucketWebsite = setBucketWebsite;
256
+ function deleteObjects(credential, Bucket, service = 'aws', sdk = 'aws-sdk/clients/s3') {
257
+ return deleteObjectsV2.call(this, credential, Bucket, true, service, sdk);
258
+ }
259
+ exports.deleteObjects = deleteObjects;
260
+ async function deleteObjectsV2(credential, Bucket, recursive = true, service = 'aws', sdk = 'aws-sdk/clients/s3') {
261
+ const S3 = createStorageClient.call(this, credential, service, sdk);
262
+ return S3.listObjects({ Bucket }).promise()
263
+ .then(({ Contents }) => {
264
+ if (Contents?.length) {
265
+ let Objects = Contents.map(data => ({ Key: data.Key }));
266
+ if (!recursive) {
267
+ Objects = Objects.filter(value => value.Key.indexOf('/') === -1);
268
+ }
269
+ return S3.deleteObjects({ Bucket, Delete: { Objects } }).promise()
270
+ .then(data => {
271
+ if ((0, types_1.isArray)(data.Deleted)) {
272
+ const files = data.Deleted.length + ' files';
273
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket emptied" /* VAL_CLOUD.EMPTY_BUCKET */ + ` (${recursive ? 'recursive' : files})`, Bucket], recursive ? files : '', { ...Cloud.LOG_CLOUD_COMMAND });
274
+ }
275
+ })
276
+ .catch(err => {
277
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to empty bucket" /* ERR_CLOUD.DELETE_BUCKET */, Bucket], err, { ...Cloud.LOG_CLOUD_FAIL, fatal: false });
278
+ });
279
+ }
280
+ })
281
+ .catch(err => {
282
+ if (!(0, exports.isNoSuchBucket)(err)) {
283
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to list bucket" /* ERR_CLOUD.LIST_BUCKET */, Bucket], err, { ...Cloud.LOG_CLOUD_FAIL, fatal: false });
284
+ }
285
+ });
286
+ }
287
+ exports.deleteObjectsV2 = deleteObjectsV2;
288
+ async function executeQuery(credential, data, sessionKey) {
289
+ return (await executeBatchQuery.call(this, credential, [data], sessionKey))[0] || [];
290
+ }
291
+ exports.executeQuery = executeQuery;
292
+ async function executeBatchQuery(credential, batch, sessionKey) {
293
+ const length = batch.length;
294
+ const result = new Array(length);
295
+ const caching = length > 0 && this.hasCache(batch[0].service, sessionKey);
296
+ const cacheValue = { value: this.valueOfKey(credential, 'cache'), sessionKey };
297
+ let client;
298
+ const createClient = () => {
299
+ setDatabaseEndpoint(credential);
300
+ return client || (client = createDatabaseClient.call(this, length === 1 ? credential : { ...credential }));
301
+ };
302
+ for (let i = 0; i < length; ++i) {
303
+ const item = batch[i];
304
+ const { service, table, id = '', query, partitionKey, key = partitionKey, limit = 0, update, ignoreCache } = item;
305
+ if (!table) {
306
+ throw (0, util_1.formatError)(item, "Missing database table" /* ERR_DB.TABLE */);
307
+ }
308
+ const renewCache = ignoreCache === 0;
309
+ const getCache = (value) => {
310
+ if (ignoreCache === 1) {
311
+ return;
312
+ }
313
+ cacheValue.renewCache = renewCache;
314
+ return this.getQueryResult(service, credential, value, cacheValue);
315
+ };
316
+ let rows, queryString = caching && ignoreCache !== true || ignoreCache === false || ignoreCache === 1 || renewCache ? table + '_' : '';
317
+ if (key && (id || (0, types_1.isPlainObject)(key))) {
318
+ if (queryString) {
319
+ queryString += Module.asString(key, true) + id;
320
+ if (!update && (rows = getCache(queryString))) {
321
+ result[i] = rows;
322
+ continue;
323
+ }
324
+ }
325
+ const Key = (0, types_1.isPlainObject)(key) ? key : { [key]: id };
326
+ const command = { TableName: table, Key };
327
+ client = createClient();
328
+ if (update) {
329
+ await client.update({ ...command, ...update }).promise();
330
+ }
331
+ const output = await client.get(command).promise();
332
+ if (output.Item) {
333
+ rows = [output.Item];
334
+ }
335
+ }
336
+ else if ((0, types_1.isPlainObject)(query)) {
337
+ if (queryString && (rows = getCache(queryString += Module.asString(query, true) + limit))) {
338
+ result[i] = rows;
339
+ continue;
340
+ }
341
+ query.TableName = table;
342
+ if (limit > 0) {
343
+ query.Limit = limit;
344
+ }
345
+ const output = await createClient().query(query).promise();
346
+ if (output.Count && output.Items) {
347
+ rows = output.Items;
348
+ }
349
+ }
350
+ else {
351
+ throw (0, util_1.formatError)(item, "Missing database query" /* ERR_DB.QUERY */);
352
+ }
353
+ result[i] = this.setQueryResult(service, credential, queryString, rows, cacheValue);
354
+ }
355
+ return result;
356
+ }
357
+ exports.executeBatchQuery = executeBatchQuery;
358
+ function setDatabaseEndpoint(config) {
359
+ config.endpoint || (config.endpoint = `https://dynamodb.${(0, types_1.isString)(config.region) ? config.region : process.env.AWS_DEFAULT_REGION || 'us-east-1'}.amazonaws.com`);
360
+ if (!config.region && (0, types_1.isString)(config.endpoint)) {
361
+ const region = (/dynamodb\.([^.]+)\./i.exec(config.endpoint)?.[1] || process.env.AWS_DEFAULT_REGION)?.toLowerCase();
362
+ if (region && region !== 'us-east-1') {
363
+ config.region = region;
364
+ }
365
+ }
366
+ }
367
+ exports.setDatabaseEndpoint = setDatabaseEndpoint;
368
+ function checkBucketCannedACL(value) {
369
+ switch (value) {
370
+ case 'private':
371
+ case 'public-read':
372
+ case 'public-read-write':
373
+ case 'authenticated-read':
374
+ return value;
375
+ }
376
+ }
377
+ exports.checkBucketCannedACL = checkBucketCannedACL;
378
+ const getBucketKey = (credential, Bucket, acl, service, sdk) => Module.asString(credential, true) + Bucket + '_' + (acl || '') + service + sdk;
379
+ exports.getBucketKey = getBucketKey;
380
+ const isNoSuchBucket = (err) => err instanceof Error && err.code === 'NoSuchBucket';
381
+ exports.isNoSuchBucket = isNoSuchBucket;
@@ -0,0 +1,40 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const client_1 = require("../client");
4
+ const types_1 = require("@e-mc/types");
5
+ const Module = require("@e-mc/module");
6
+ const Cloud = require("@e-mc/cloud");
7
+ function download(credential, service = 'aws', sdk = 'aws-sdk/clients/s3') {
8
+ const s3 = client_1.createStorageClient.call(this, credential, service, sdk);
9
+ return (data, callback) => {
10
+ const { bucket: Bucket, download: target } = data;
11
+ const Key = target.filename;
12
+ if (!Bucket || !Key) {
13
+ callback((0, types_1.errorValue)('Missing property', !Bucket ? 'Bucket' : 'Key'));
14
+ return;
15
+ }
16
+ const location = Module.joinPath(Bucket, Key);
17
+ const params = { Bucket, Key, VersionId: target.versionId };
18
+ s3.getObject(params, (err, result) => {
19
+ if (!err) {
20
+ callback(null, result.Body);
21
+ const deleteObject = target.deleteObject;
22
+ if (deleteObject) {
23
+ s3.deleteObject((0, types_1.isPlainObject)(deleteObject) ? Object.assign(deleteObject, params) : params, error => {
24
+ if (!error) {
25
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, "Delete success" /* VAL_CLOUD.DELETE_FILE */, location, { ...Cloud.LOG_CLOUD_DELETE });
26
+ }
27
+ else {
28
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Delete failed" /* ERR_CLOUD.DELETE_FAIL */, location], error, { ...Cloud.LOG_CLOUD_FAIL, fatal: !!target.active });
29
+ }
30
+ });
31
+ }
32
+ }
33
+ else {
34
+ callback(err);
35
+ }
36
+ });
37
+ };
38
+ }
39
+
40
+ module.exports = download;
package/package.json ADDED
@@ -0,0 +1,29 @@
1
+ {
2
+ "name": "@pi-r/aws",
3
+ "version": "0.0.1",
4
+ "description": "AWS V2 cloud functions for E-mc.",
5
+ "main": "client/index.js",
6
+ "types": "client/index.d.ts",
7
+ "publishConfig": {
8
+ "access": "public"
9
+ },
10
+ "repository": {
11
+ "type": "git",
12
+ "url": "https://github.com/anpham6/pi-r.git",
13
+ "directory": "src/cloud/aws"
14
+ },
15
+ "keywords": [
16
+ "squared",
17
+ "e-mc",
18
+ "squared-functions"
19
+ ],
20
+ "author": "An Pham <anpham6@gmail.com>",
21
+ "license": "MIT",
22
+ "homepage": "https://github.com/anpham6/pi-r#readme",
23
+ "dependencies": {
24
+ "@e-mc/cloud": "^0.4.0",
25
+ "@e-mc/module": "^0.4.0",
26
+ "@e-mc/types": "^0.4.0",
27
+ "aws-sdk": "^2.1337.0"
28
+ }
29
+ }
@@ -0,0 +1,23 @@
1
+
2
+ import type { CloudDatabase } from '@e-mc/types/lib/cloud';
3
+
4
+ import type { ConfigurationOptions } from 'aws-sdk/lib/core';
5
+ import type { ServiceConfigurationOptions } from 'aws-sdk/lib/service';
6
+ import type { Key, QueryInput, UpdateItemInput } from 'aws-sdk/clients/dynamodb';
7
+ import type { PutBucketAclRequest, PutBucketPolicyRequest } from 'aws-sdk/clients/s3';
8
+
9
+ export type BucketCannedACL = "authenticated-read" | "private" | "public-read" | "public-read-write";
10
+ export type ObjectCannedACL = BucketCannedACL | "aws-exec-read" | "bucket-owner-full-control" | "bucket-owner-read";
11
+ export type ConfigureBucketOptions = PutBucketAclRequest | PutBucketPolicyRequest;
12
+
13
+ export interface AWSStorageCredential extends ConfigurationOptions {
14
+ profile?: string;
15
+ fromPath?: string;
16
+ }
17
+
18
+ export interface AWSDatabaseQuery extends CloudDatabase<QueryInput, PlainObject, UpdateItemInput> {
19
+ key?: string | Key;
20
+ partitionKey?: string | Key;
21
+ }
22
+
23
+ export interface AWSDatabaseCredential extends AWSStorageCredential, ServiceConfigurationOptions {}
@@ -0,0 +1,133 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const path = require("path");
4
+ const fs = require("fs");
5
+ const client_1 = require("../client");
6
+ const util_1 = require("@e-mc/cloud/util");
7
+ const types_1 = require("@e-mc/types");
8
+ const Module = require("@e-mc/module");
9
+ const Cloud = require("@e-mc/cloud");
10
+ const BUCKET_SESSION = new Set();
11
+ const BUCKET_RESPONSE = {};
12
+ function upload(credential, service = 'aws', sdk = 'aws-sdk/clients/s3') {
13
+ const s3 = client_1.createStorageClient.call(this, credential, service, sdk);
14
+ return async (data, callback) => {
15
+ var _a;
16
+ const { bucket: Bucket, localUri } = data;
17
+ const { pathname = '', fileGroup, contentType, metadata, endpoint, active, publicRead, acl, admin = {}, overwrite, options } = data.upload;
18
+ let filename = data.upload.filename || path.basename(localUri), bucketKey;
19
+ const cleanup = () => {
20
+ BUCKET_SESSION.delete(service + Bucket);
21
+ if (bucketKey && bucketKey in BUCKET_RESPONSE) {
22
+ delete BUCKET_RESPONSE[bucketKey];
23
+ }
24
+ };
25
+ const errorResponse = (err) => {
26
+ cleanup();
27
+ callback(err);
28
+ };
29
+ if (!BUCKET_SESSION.has(service + Bucket)) {
30
+ const bucketAcl = admin.publicRead ? 'public-read' : admin.acl;
31
+ const response = BUCKET_RESPONSE[_a = bucketKey = (0, client_1.getBucketKey)(credential, Bucket, bucketAcl, service, sdk)] || (BUCKET_RESPONSE[_a] = client_1.createBucketV2.call(this, credential, Bucket, bucketAcl, admin.configBucket?.create, service, sdk));
32
+ if (!await response) {
33
+ errorResponse(null);
34
+ return;
35
+ }
36
+ BUCKET_SESSION.add(service + Bucket);
37
+ }
38
+ if (!overwrite) {
39
+ const current = filename;
40
+ const next = (0, util_1.generateFilename)(filename);
41
+ let i = 0, exists;
42
+ do {
43
+ if (i > 0) {
44
+ [filename, exists] = next(i);
45
+ if (!exists) {
46
+ break;
47
+ }
48
+ }
49
+ exists = await s3.headObject({ Bucket, Key: pathname ? Module.joinPath(pathname, filename) : filename }).promise()
50
+ .then(() => true)
51
+ .catch((err) => {
52
+ if (err instanceof Error && err.code !== 'NotFound') {
53
+ filename = (0, types_1.generateUUID)() + path.extname(current);
54
+ }
55
+ return false;
56
+ });
57
+ } while (exists && ++i);
58
+ if (i > 0) {
59
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["File renamed" /* VAL_CLOUD.RENAME_FILE */, current], filename, { ...Cloud.LOG_CLOUD_WARN });
60
+ }
61
+ }
62
+ if (pathname) {
63
+ await s3.putObject({ Bucket, Key: pathname, Body: Buffer.from(''), ContentLength: 0 }).promise().catch(() => { });
64
+ }
65
+ const Key = [filename];
66
+ const Body = [data.buffer];
67
+ const ContentType = [contentType];
68
+ const addLog = (err) => err instanceof Error && this.addLog(this.statusType.WARN, err.message, service + ': ' + Bucket);
69
+ if (fileGroup) {
70
+ for (const [content, ext, localFile] of fileGroup) {
71
+ try {
72
+ Body.push(typeof content === 'string' ? fs.readFileSync(content) : content);
73
+ Key.push(ext === '.map' && localFile ? path.basename(localFile) : filename + ext);
74
+ }
75
+ catch (err) {
76
+ addLog(err);
77
+ }
78
+ }
79
+ }
80
+ for (let i = 0; i < Key.length; ++i) {
81
+ const first = i === 0;
82
+ if (this.aborted) {
83
+ if (first) {
84
+ errorResponse((0, types_1.createAbortError)());
85
+ }
86
+ return;
87
+ }
88
+ const params = { ...options, Bucket, Key: pathname + Key[i], Body: Body[i] };
89
+ const readable = publicRead || active && publicRead !== false && !acl;
90
+ if (first) {
91
+ params.ContentType || (params.ContentType = ContentType[i]);
92
+ if (readable) {
93
+ params.ACL = 'public-read';
94
+ }
95
+ else if (acl) {
96
+ params.ACL = acl;
97
+ }
98
+ if (metadata) {
99
+ params.Metadata = metadata;
100
+ }
101
+ }
102
+ else {
103
+ params.ContentType = ContentType[i];
104
+ if (!params.ACL) {
105
+ if (readable) {
106
+ params.ACL = 'public-read';
107
+ }
108
+ else if (acl) {
109
+ params.ACL = acl;
110
+ }
111
+ }
112
+ }
113
+ s3.upload(params, (err, result) => {
114
+ if (!err) {
115
+ const url = endpoint ? Module.joinPath(endpoint, result.Key) : result.Location;
116
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, "Upload success" /* VAL_CLOUD.UPLOAD_FILE */, url, { ...Cloud.LOG_CLOUD_UPLOAD });
117
+ if (first) {
118
+ cleanup();
119
+ callback(null, url);
120
+ }
121
+ }
122
+ else if (first) {
123
+ errorResponse(err);
124
+ }
125
+ else {
126
+ addLog(err);
127
+ }
128
+ });
129
+ }
130
+ };
131
+ }
132
+
133
+ module.exports = upload;