@e-mc/cloud 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,11 @@
1
+ Copyright 2023 An Pham
2
+
3
+ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
4
+
5
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
6
+
7
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
8
+
9
+ 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
10
+
11
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package/README.md ADDED
@@ -0,0 +1,5 @@
1
+ ### @e-mc/cloud
2
+
3
+ ### LICENSE
4
+
5
+ BSD 3-Clause
package/atlas/index.js ADDED
@@ -0,0 +1,193 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.getObjectId = exports.executeBatchQuery = exports.executeQuery = exports.createDatabaseClient = exports.validateDatabase = void 0;
4
+ const types_1 = require("../../types");
5
+ const mongodb_1 = require("../../db/mongodb");
6
+ const util_1 = require("../util");
7
+ const module_1 = require("../../module");
8
+ const MONGODB_V3 = module_1.default.checkSemVer('mongodb', 1, 4);
9
+ function sanitizeCredentials(item, credential) {
10
+ const { username, password } = credential;
11
+ if (username) {
12
+ const auth = password ? { username, password } : { username };
13
+ if (MONGODB_V3) {
14
+ credential.auth = password || !credential.auth || !credential.auth.password ? auth : credential.auth;
15
+ }
16
+ else {
17
+ credential.auth || (credential.auth = auth);
18
+ }
19
+ delete credential.username;
20
+ }
21
+ if (password) {
22
+ delete credential.password;
23
+ }
24
+ if (MONGODB_V3 && credential.auth) {
25
+ if (!(0, util_1.hasBasicAuth)(item.uri)) {
26
+ const auth = (0, util_1.getBasicAuth)(credential.auth);
27
+ if (auth) {
28
+ const url = item.uri.split('://');
29
+ item.uri = url[0] + '://' + auth + url[1];
30
+ }
31
+ }
32
+ delete credential.auth;
33
+ }
34
+ }
35
+ function validateDatabase(credential, data) {
36
+ const auth = credential.auth;
37
+ return !!(data.table && (auth?.username || credential.username || module_1.default.isURL(data.uri) && (0, util_1.hasBasicAuth)(data.uri)));
38
+ }
39
+ exports.validateDatabase = validateDatabase;
40
+ function createDatabaseClient(credential, data) {
41
+ try {
42
+ sanitizeCredentials(data, credential);
43
+ const { MongoClient } = require('mongodb');
44
+ return new MongoClient(data.uri, credential);
45
+ }
46
+ catch (err) {
47
+ this.checkPackage(err, 'mongodb', { passThrough: true });
48
+ throw err;
49
+ }
50
+ }
51
+ exports.createDatabaseClient = createDatabaseClient;
52
+ async function executeQuery(credential, data, sessionKey) {
53
+ return (await executeBatchQuery.call(this, credential, [data], sessionKey))[0] || [];
54
+ }
55
+ exports.executeQuery = executeQuery;
56
+ async function executeBatchQuery(credential, batch, sessionKey) {
57
+ const length = batch.length;
58
+ const result = new Array(length);
59
+ const clients = {};
60
+ const caching = length > 0 && this.hasCache(batch[0].service, sessionKey);
61
+ const coercing = this.hasCoerce('atlas', 'options');
62
+ const cacheValue = { value: this.valueOfKey(credential, 'cache'), sessionKey };
63
+ const createClient = async (item, options) => {
64
+ var _a;
65
+ return clients[_a = item.uri + module_1.default.asString(options, true)] || (clients[_a] = await createDatabaseClient.call(this, options, item).connect());
66
+ };
67
+ const closeClient = () => {
68
+ for (const key in clients) {
69
+ clients[key].close();
70
+ }
71
+ };
72
+ for (let i = 0; i < length; ++i) {
73
+ const item = batch[i];
74
+ const { service, name, table, id, aggregate, sort, client, ignoreCache } = item;
75
+ if (!table) {
76
+ closeClient();
77
+ throw (0, util_1.formatError)(item, "Missing database table" /* ERR_DB.TABLE */);
78
+ }
79
+ let { query, update, limit = 0 } = item;
80
+ if (id && (!query || limit === 1)) {
81
+ query = getObjectId(id);
82
+ limit = 1;
83
+ }
84
+ const renewCache = ignoreCache === 0;
85
+ const options = item.options ? length === 1 && !MONGODB_V3 ? Object.assign(item.options, credential) : { ...item.options, ...credential } : length === 1 && !MONGODB_V3 ? credential : { ...credential };
86
+ let rows, queryString = caching && ignoreCache !== true || ignoreCache === false || ignoreCache === 1 || renewCache ? (name || '') + '_' + table + '_' + limit + module_1.default.asString(options, true) + (sort ? module_1.default.asString(sort, true) : '') + (client ? module_1.default.asString(client, true) : '') : '', auth, pipeline;
87
+ if (queryString) {
88
+ sanitizeCredentials(item, options);
89
+ auth = { uri: item.uri, options };
90
+ }
91
+ const getCache = () => {
92
+ if (ignoreCache === 1) {
93
+ return;
94
+ }
95
+ cacheValue.renewCache = renewCache;
96
+ return this.getQueryResult(service, auth, queryString, cacheValue);
97
+ };
98
+ const getCollection = (mongoClient) => mongoClient.db(name, client?.db).collection(table, client?.collection);
99
+ if (aggregate) {
100
+ pipeline = Array.isArray(aggregate) ? aggregate : aggregate.pipeline;
101
+ }
102
+ if (pipeline || query) {
103
+ if (queryString) {
104
+ queryString += module_1.default.asString(aggregate || query, true);
105
+ if ((aggregate || !update) && (rows = getCache())) {
106
+ result[i] = rows;
107
+ continue;
108
+ }
109
+ }
110
+ const collection = getCollection(await createClient(item, options));
111
+ if (pipeline) {
112
+ const aggregateOptions = aggregate?.options;
113
+ if (coercing) {
114
+ pipeline.forEach(doc => (0, types_1.coerceObject)(doc));
115
+ if (aggregateOptions) {
116
+ (0, types_1.coerceObject)(aggregateOptions);
117
+ }
118
+ }
119
+ const items = collection.aggregate(pipeline, aggregateOptions);
120
+ if (sort) {
121
+ items.sort((0, mongodb_1.getSortValue)(sort, coercing)[0]);
122
+ }
123
+ if (limit > 0) {
124
+ items.limit(limit);
125
+ }
126
+ rows = await items.toArray();
127
+ }
128
+ else {
129
+ const updateType = item.updateType;
130
+ const [filter, command] = (0, mongodb_1.getFilterValue)(query, coercing);
131
+ if (update && coercing) {
132
+ (0, types_1.coerceObject)(update);
133
+ }
134
+ if ((0, types_1.isArray)(update)) {
135
+ const insertOptions = item.execute?.insert;
136
+ if (insertOptions) {
137
+ await collection.insertMany(update, coercing ? (0, types_1.coerceObject)(insertOptions) : insertOptions);
138
+ }
139
+ else {
140
+ await collection.insertMany(update);
141
+ }
142
+ update = undefined;
143
+ }
144
+ else if (updateType === 1) {
145
+ update = undefined;
146
+ }
147
+ if (limit === 1) {
148
+ const document = update ? await collection[updateType === 2 ? 'findOneAndReplace' : updateType === 3 ? 'findOneAndDelete' : 'findOneAndUpdate'](filter, update, command) : await collection.findOne(filter, command);
149
+ rows = document ? [document] : [];
150
+ }
151
+ else {
152
+ if (update) {
153
+ await collection.updateMany(filter, update);
154
+ }
155
+ const items = collection.find(filter, command);
156
+ if (sort) {
157
+ items.sort(...(0, mongodb_1.getSortValue)(sort, coercing));
158
+ }
159
+ if (limit > 1) {
160
+ items.limit(limit);
161
+ }
162
+ rows = await items.toArray();
163
+ }
164
+ }
165
+ }
166
+ else if (!aggregate) {
167
+ if (queryString && (rows = getCache())) {
168
+ result[i] = rows;
169
+ continue;
170
+ }
171
+ const items = getCollection(await createClient(item, options)).find();
172
+ if (sort) {
173
+ items.sort(...(0, mongodb_1.getSortValue)(sort, coercing));
174
+ }
175
+ if (limit > 0) {
176
+ items.limit(limit);
177
+ }
178
+ rows = await items.toArray();
179
+ }
180
+ else {
181
+ queryString = '';
182
+ }
183
+ result[i] = this.setQueryResult(service, auth, queryString, rows, cacheValue);
184
+ }
185
+ closeClient();
186
+ return result;
187
+ }
188
+ exports.executeBatchQuery = executeBatchQuery;
189
+ function getObjectId(value) {
190
+ const { ObjectId } = require('mongodb');
191
+ return { '_id': new ObjectId(value) };
192
+ }
193
+ exports.getObjectId = getObjectId;
@@ -0,0 +1,44 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const index_1 = require("../index");
4
+ const types_1 = require("../../../types");
5
+ const module_1 = require("../../../module");
6
+ const index_2 = require("../../index");
7
+ function download(credential, service = 'aws', sdk = 'aws-sdk/clients/s3') {
8
+ const s3 = index_1.createStorageClient.call(this, credential, service, sdk);
9
+ return (data, callback) => {
10
+ const { bucket: Bucket, download: target } = data;
11
+ const Key = target.filename;
12
+ if (!Bucket || !Key) {
13
+ callback((0, types_1.errorValue)('Missing property', !Bucket ? 'Bucket' : 'Key'));
14
+ return;
15
+ }
16
+ const location = module_1.default.joinPath(Bucket, Key);
17
+ const params = { Bucket, Key, VersionId: target.versionId };
18
+ s3.getObject(params, (err, result) => {
19
+ if (!err) {
20
+ callback(null, result.Body);
21
+ const deleteObject = target.deleteObject;
22
+ if (deleteObject) {
23
+ s3.deleteObject((0, types_1.isPlainObject)(deleteObject) ? Object.assign(deleteObject, params) : params, error => {
24
+ if (!error) {
25
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, "Delete success" /* CMD_CLOUD.DELETE_FILE */, location, { ...index_2.default.LOG_CLOUD_DELETE });
26
+ }
27
+ else {
28
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Delete failed" /* ERR_CLOUD.DELETE_FAIL */, location], error, { ...index_2.default.LOG_CLOUD_FAIL, fatal: !!target.active });
29
+ }
30
+ });
31
+ }
32
+ }
33
+ else {
34
+ callback(err);
35
+ }
36
+ });
37
+ };
38
+ }
39
+ exports.default = download;
40
+
41
+ if (exports.default) {
42
+ module.exports = exports.default;
43
+ module.exports.default = exports.default;
44
+ }
package/aws/index.js ADDED
@@ -0,0 +1,381 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.isNoSuchBucket = exports.getBucketKey = exports.checkBucketCannedACL = exports.setDatabaseEndpoint = exports.executeBatchQuery = exports.executeQuery = exports.deleteObjectsV2 = exports.deleteObjects = exports.setBucketWebsite = exports.setBucketPolicy = exports.createBucketV2 = exports.createBucket = exports.createDatabaseClient = exports.createStorageClient = exports.validateDatabase = exports.validateStorage = exports.getPrivatePolicy = exports.getBucketPublicReadPolicy = exports.getPublicReadPolicy = exports.isDatabaseDefined = exports.isEnvDefined = exports.isAccessDefined = void 0;
4
+ const util_1 = require("../util");
5
+ const types_1 = require("../../types");
6
+ const module_1 = require("../../module");
7
+ const index_1 = require("../index");
8
+ const ACP_AUTHENTICATEDREAD = {
9
+ Grants: [{
10
+ Grantee: { Type: 'Group', URI: 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers' },
11
+ Permission: 'READ'
12
+ }]
13
+ };
14
+ async function setCannedAcl(S3, Bucket, ACL, service = 'aws', recursive) {
15
+ if (service === 'oci') {
16
+ return;
17
+ }
18
+ const callback = (err) => {
19
+ if (!err) {
20
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ['Grant ' + ACL, Bucket], '', { ...recursive ? index_1.default.LOG_CLOUD_DELAYED : index_1.default.LOG_CLOUD_COMMAND });
21
+ }
22
+ else if (!recursive) {
23
+ if (err instanceof Error && err.code === 'OperationAborted') {
24
+ setTimeout(() => setCannedAcl.call(this, S3, Bucket, ACL, service, true), 60000 /* TIME.m */);
25
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, [`Grant ${ACL} (delayed)`, Bucket], err, { ...index_1.default.LOG_CLOUD_DELAYED });
26
+ }
27
+ else {
28
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ['Unable to grant ' + ACL, Bucket], err, { ...index_1.default.LOG_CLOUD_WARN });
29
+ }
30
+ }
31
+ };
32
+ let promise;
33
+ if (service === 'ibm') {
34
+ if (ACL === 1) {
35
+ ACL = 'public-read';
36
+ }
37
+ promise = S3.putBucketAcl(ACL === 'authenticated-read' ? { Bucket, AccessControlPolicy: ACP_AUTHENTICATEDREAD } : { Bucket, ACL }).promise();
38
+ }
39
+ else {
40
+ switch (ACL) {
41
+ case 1:
42
+ promise = S3.putBucketPolicy({ Bucket, Policy: getBucketPublicReadPolicy(Bucket) }).promise();
43
+ break;
44
+ case 'public-read':
45
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket) }).promise();
46
+ break;
47
+ case 'public-read-write':
48
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket, false, true) }).promise();
49
+ break;
50
+ case 'authenticated-read':
51
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPublicReadPolicy(Bucket, true) }).promise();
52
+ break;
53
+ default:
54
+ promise = S3.putBucketPolicy({ Bucket, Policy: getPrivatePolicy(Bucket) }).promise();
55
+ break;
56
+ }
57
+ }
58
+ return promise.then(() => callback(null)).catch(err => callback(err));
59
+ }
60
+ function isAccessDefined(credential) {
61
+ return !!(credential.accessKeyId && credential.secretAccessKey || credential.sessionToken);
62
+ }
63
+ exports.isAccessDefined = isAccessDefined;
64
+ function isEnvDefined() {
65
+ return !!(process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY || process.env.AWS_SESSION_TOKEN);
66
+ }
67
+ exports.isEnvDefined = isEnvDefined;
68
+ function isDatabaseDefined(credential, data) {
69
+ return !!(data.table && (credential.region || credential.endpoint || process.env.AWS_DEFAULT_REGION));
70
+ }
71
+ exports.isDatabaseDefined = isDatabaseDefined;
72
+ function getPublicReadPolicy(bucket, authenticated, write) {
73
+ const Action = ["s3:GetObject", "s3:GetObjectVersion"];
74
+ if (write) {
75
+ Action.push("s3:PutObject", "s3:DeleteObjectVersion");
76
+ }
77
+ return JSON.stringify({
78
+ "Version": "2012-10-17",
79
+ "Statement": [{
80
+ "Sid": (authenticated ? "AuthenticatedRead" : "PublicRead") + (write ? "Write" : ""),
81
+ "Effect": "Allow",
82
+ "Principal": authenticated ? { "AWS": "*" } : "*",
83
+ "Action": Action,
84
+ "Resource": [`arn:aws:s3:::${bucket}/*`]
85
+ }]
86
+ });
87
+ }
88
+ exports.getPublicReadPolicy = getPublicReadPolicy;
89
+ function getBucketPublicReadPolicy(bucket) {
90
+ return JSON.stringify({
91
+ "Version": "2012-10-17",
92
+ "Statement": [{
93
+ "Sid": "BucketPublicRead",
94
+ "Effect": "Allow",
95
+ "Principal": "*",
96
+ "Action": ["s3:ListBucket", "s3:ListBucketVersions", "s3:ListBucketMultipartUploads"],
97
+ "Resource": [`arn:aws:s3:::${bucket}`]
98
+ }]
99
+ });
100
+ }
101
+ exports.getBucketPublicReadPolicy = getBucketPublicReadPolicy;
102
+ function getPrivatePolicy(bucket) {
103
+ return JSON.stringify({
104
+ "Version": "2012-10-17",
105
+ "Statement": [{
106
+ "Sid": "Private",
107
+ "Effect": "Deny",
108
+ "Principal": "*",
109
+ "Action": "*",
110
+ "Resource": [`arn:aws:s3:::${bucket}`, `arn:aws:s3:::${bucket}/*`]
111
+ }]
112
+ });
113
+ }
114
+ exports.getPrivatePolicy = getPrivatePolicy;
115
+ function validateStorage(credential) {
116
+ return !!(isAccessDefined(credential) || isEnvDefined() || credential.fromPath || credential.profile || process.env.AWS_SDK_LOAD_CONFIG === '1');
117
+ }
118
+ exports.validateStorage = validateStorage;
119
+ function validateDatabase(credential, data) {
120
+ return isDatabaseDefined(credential, data) && validateStorage(credential);
121
+ }
122
+ exports.validateDatabase = validateDatabase;
123
+ function createStorageClient(credential, service = 'aws', sdk = 'aws-sdk/clients/s3') {
124
+ try {
125
+ if (service === 'aws') {
126
+ const S3 = require('aws-sdk/clients/s3');
127
+ if (credential.fromPath) {
128
+ const client = new S3();
129
+ client.config.loadFromPath(credential.fromPath);
130
+ return client;
131
+ }
132
+ if (credential.profile || process.env.AWS_SDK_LOAD_CONFIG === '1' && !isAccessDefined(credential) && !isEnvDefined()) {
133
+ const { SharedIniFileCredentials } = require('aws-sdk');
134
+ credential = new SharedIniFileCredentials({ profile: credential.profile });
135
+ }
136
+ return new S3(credential);
137
+ }
138
+ const S3 = require(sdk);
139
+ return new S3(credential);
140
+ }
141
+ catch (err) {
142
+ this.checkPackage(err, sdk.split('/')[0], { passThrough: true });
143
+ throw err;
144
+ }
145
+ }
146
+ exports.createStorageClient = createStorageClient;
147
+ function createDatabaseClient(credential) {
148
+ try {
149
+ let options;
150
+ if (credential.fromPath) {
151
+ const AWS = require('aws-sdk');
152
+ AWS.config.loadFromPath(credential.fromPath);
153
+ }
154
+ else if (credential.profile || process.env.AWS_SDK_LOAD_CONFIG === '1' && !isAccessDefined(credential) && !isEnvDefined()) {
155
+ const { SharedIniFileCredentials } = require('aws-sdk');
156
+ options = new SharedIniFileCredentials({ profile: credential.profile });
157
+ }
158
+ else {
159
+ options = credential;
160
+ }
161
+ const DynamoDB = require('aws-sdk/clients/dynamodb');
162
+ return new DynamoDB.DocumentClient(options);
163
+ }
164
+ catch (err) {
165
+ this.checkPackage(err, 'aws-sdk', { passThrough: true });
166
+ throw err;
167
+ }
168
+ }
169
+ exports.createDatabaseClient = createDatabaseClient;
170
+ function createBucket(credential, Bucket, publicRead, service = 'aws', sdk = 'aws-sdk/clients/s3') {
171
+ return createBucketV2.call(this, credential, Bucket, publicRead ? 'public-read' : undefined, undefined, service, sdk);
172
+ }
173
+ exports.createBucket = createBucket;
174
+ function createBucketV2(credential, Bucket, ACL, options, service = 'aws', sdk = 'aws-sdk/clients/s3') {
175
+ ACL = ACL === 1 ? 1 : checkBucketCannedACL(ACL);
176
+ const S3 = createStorageClient.call(this, credential, service, sdk);
177
+ return S3.headBucket({ Bucket }).promise()
178
+ .then(async () => {
179
+ if (ACL) {
180
+ await setCannedAcl.call(this, S3, Bucket, ACL, service);
181
+ }
182
+ return true;
183
+ })
184
+ .catch(() => {
185
+ const input = { ...options, Bucket };
186
+ const region = credential.region;
187
+ if (!input.CreateBucketConfiguration && typeof region === 'string' && (region !== 'us-east-1' || service !== 'aws')) {
188
+ input.CreateBucketConfiguration = { LocationConstraint: region };
189
+ }
190
+ return S3.createBucket(input).promise()
191
+ .then(async () => {
192
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket created" /* CMD_CLOUD.CREATE_BUCKET */, Bucket], '', { ...index_1.default.LOG_CLOUD_COMMAND });
193
+ if (ACL) {
194
+ await setCannedAcl.call(this, S3, Bucket, ACL, service);
195
+ }
196
+ return true;
197
+ })
198
+ .catch(async (err) => {
199
+ switch (err instanceof Error && err.code) {
200
+ case 'BucketAlreadyExists':
201
+ case 'BucketAlreadyOwnedByYou':
202
+ if (ACL) {
203
+ await setCannedAcl.call(this, S3, Bucket, ACL, service);
204
+ }
205
+ return true;
206
+ default:
207
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to create bucket" /* ERR_CLOUD.CREATE_BUCKET */, Bucket], err, { ...index_1.default.LOG_CLOUD_FAIL });
208
+ return false;
209
+ }
210
+ });
211
+ });
212
+ }
213
+ exports.createBucketV2 = createBucketV2;
214
+ function setBucketPolicy(credential, Bucket, options, service = 'aws', sdk = 'aws-sdk/clients/s3') {
215
+ const S3 = createStorageClient.call(this, credential, service, sdk);
216
+ options.Bucket = Bucket;
217
+ if (service === 'ibm' && 'ACL' in options && options.ACL === 'authenticated-read') {
218
+ options.AccessControlPolicy = ACP_AUTHENTICATEDREAD;
219
+ delete options.ACL;
220
+ }
221
+ return ('Policy' in options && (0, types_1.isString)(options.Policy) && service !== 'ibm' ? S3.putBucketPolicy(options) : S3.putBucketAcl(options)).promise()
222
+ .then(() => {
223
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket policy configured" /* CMD_CLOUD.POLICY_BUCKET */, Bucket], '', { ...index_1.default.LOG_CLOUD_COMMAND });
224
+ return true;
225
+ })
226
+ .catch(err => {
227
+ if (!(0, exports.isNoSuchBucket)(err)) {
228
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to update bucket policy" /* ERR_CLOUD.POLICY_BUCKET */, Bucket], err, { ...index_1.default.LOG_CLOUD_FAIL, fatal: false });
229
+ }
230
+ return false;
231
+ });
232
+ }
233
+ exports.setBucketPolicy = setBucketPolicy;
234
+ function setBucketWebsite(credential, Bucket, options, service = 'aws', sdk = 'aws-sdk/clients/s3') {
235
+ const S3 = createStorageClient.call(this, credential, service, sdk);
236
+ const WebsiteConfiguration = {};
237
+ if ((0, types_1.isString)(options.indexPage)) {
238
+ WebsiteConfiguration.IndexDocument = { Suffix: options.indexPage };
239
+ }
240
+ if ((0, types_1.isString)(options.errorPage)) {
241
+ WebsiteConfiguration.ErrorDocument = { Key: options.errorPage };
242
+ }
243
+ return S3.putBucketWebsite({ Bucket, WebsiteConfiguration }).promise()
244
+ .then(() => {
245
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket configured" /* CMD_CLOUD.CONFIGURE_BUCKET */, Bucket], WebsiteConfiguration, { ...index_1.default.LOG_CLOUD_COMMAND });
246
+ return true;
247
+ })
248
+ .catch(err => {
249
+ if (!(0, exports.isNoSuchBucket)(err)) {
250
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to configure bucket" /* ERR_CLOUD.CONFIGURE_BUCKET */, Bucket], err, { ...index_1.default.LOG_CLOUD_FAIL, fatal: false });
251
+ }
252
+ return false;
253
+ });
254
+ }
255
+ exports.setBucketWebsite = setBucketWebsite;
256
+ function deleteObjects(credential, Bucket, service = 'aws', sdk = 'aws-sdk/clients/s3') {
257
+ return deleteObjectsV2.call(this, credential, Bucket, true, service, sdk);
258
+ }
259
+ exports.deleteObjects = deleteObjects;
260
+ async function deleteObjectsV2(credential, Bucket, recursive = true, service = 'aws', sdk = 'aws-sdk/clients/s3') {
261
+ const S3 = createStorageClient.call(this, credential, service, sdk);
262
+ return S3.listObjects({ Bucket }).promise()
263
+ .then(({ Contents }) => {
264
+ if (Contents?.length) {
265
+ let Objects = Contents.map(data => ({ Key: data.Key }));
266
+ if (!recursive) {
267
+ Objects = Objects.filter(value => value.Key.indexOf('/') === -1);
268
+ }
269
+ return S3.deleteObjects({ Bucket, Delete: { Objects } }).promise()
270
+ .then(data => {
271
+ if ((0, types_1.isArray)(data.Deleted)) {
272
+ const files = data.Deleted.length + ' files';
273
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket emptied" /* CMD_CLOUD.EMPTY_BUCKET */ + ` (${recursive ? 'recursive' : files})`, Bucket], recursive ? files : '', { ...index_1.default.LOG_CLOUD_COMMAND });
274
+ }
275
+ })
276
+ .catch(err => {
277
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to empty bucket" /* ERR_CLOUD.DELETE_BUCKET */, Bucket], err, { ...index_1.default.LOG_CLOUD_FAIL, fatal: false });
278
+ });
279
+ }
280
+ })
281
+ .catch(err => {
282
+ if (!(0, exports.isNoSuchBucket)(err)) {
283
+ this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to list bucket" /* ERR_CLOUD.LIST_BUCKET */, Bucket], err, { ...index_1.default.LOG_CLOUD_FAIL, fatal: false });
284
+ }
285
+ });
286
+ }
287
+ exports.deleteObjectsV2 = deleteObjectsV2;
288
+ async function executeQuery(credential, data, sessionKey) {
289
+ return (await executeBatchQuery.call(this, credential, [data], sessionKey))[0] || [];
290
+ }
291
+ exports.executeQuery = executeQuery;
292
+ async function executeBatchQuery(credential, batch, sessionKey) {
293
+ const length = batch.length;
294
+ const result = new Array(length);
295
+ const caching = length > 0 && this.hasCache(batch[0].service, sessionKey);
296
+ const cacheValue = { value: this.valueOfKey(credential, 'cache'), sessionKey };
297
+ let client;
298
+ const createClient = () => {
299
+ setDatabaseEndpoint(credential);
300
+ return client || (client = createDatabaseClient.call(this, length === 1 ? credential : { ...credential }));
301
+ };
302
+ for (let i = 0; i < length; ++i) {
303
+ const item = batch[i];
304
+ const { service, table, id = '', query, partitionKey, key = partitionKey, limit = 0, update, ignoreCache } = item;
305
+ if (!table) {
306
+ throw (0, util_1.formatError)(item, "Missing database table" /* ERR_DB.TABLE */);
307
+ }
308
+ const renewCache = ignoreCache === 0;
309
+ const getCache = (value) => {
310
+ if (ignoreCache === 1) {
311
+ return;
312
+ }
313
+ cacheValue.renewCache = renewCache;
314
+ return this.getQueryResult(service, credential, value, cacheValue);
315
+ };
316
+ let rows, queryString = caching && ignoreCache !== true || ignoreCache === false || ignoreCache === 1 || renewCache ? table + '_' : '';
317
+ if (key && (id || (0, types_1.isPlainObject)(key))) {
318
+ if (queryString) {
319
+ queryString += module_1.default.asString(key, true) + id;
320
+ if (!update && (rows = getCache(queryString))) {
321
+ result[i] = rows;
322
+ continue;
323
+ }
324
+ }
325
+ const Key = (0, types_1.isPlainObject)(key) ? key : { [key]: id };
326
+ const command = { TableName: table, Key };
327
+ client = createClient();
328
+ if (update) {
329
+ await client.update({ ...command, ...update }).promise();
330
+ }
331
+ const output = await client.get(command).promise();
332
+ if (output.Item) {
333
+ rows = [output.Item];
334
+ }
335
+ }
336
+ else if ((0, types_1.isPlainObject)(query)) {
337
+ if (queryString && (rows = getCache(queryString += module_1.default.asString(query, true) + limit))) {
338
+ result[i] = rows;
339
+ continue;
340
+ }
341
+ query.TableName = table;
342
+ if (limit > 0) {
343
+ query.Limit = limit;
344
+ }
345
+ const output = await createClient().query(query).promise();
346
+ if (output.Count && output.Items) {
347
+ rows = output.Items;
348
+ }
349
+ }
350
+ else {
351
+ throw (0, util_1.formatError)(item, "Missing database query" /* ERR_DB.QUERY */);
352
+ }
353
+ result[i] = this.setQueryResult(service, credential, queryString, rows, cacheValue);
354
+ }
355
+ return result;
356
+ }
357
+ exports.executeBatchQuery = executeBatchQuery;
358
+ function setDatabaseEndpoint(config) {
359
+ config.endpoint || (config.endpoint = `https://dynamodb.${(0, types_1.isString)(config.region) ? config.region : process.env.AWS_DEFAULT_REGION || 'us-east-1'}.amazonaws.com`);
360
+ if (!config.region && (0, types_1.isString)(config.endpoint)) {
361
+ const region = (/dynamodb\.([^.]+)\./i.exec(config.endpoint)?.[1] || process.env.AWS_DEFAULT_REGION)?.toLowerCase();
362
+ if (region && region !== 'us-east-1') {
363
+ config.region = region;
364
+ }
365
+ }
366
+ }
367
+ exports.setDatabaseEndpoint = setDatabaseEndpoint;
368
+ function checkBucketCannedACL(value) {
369
+ switch (value) {
370
+ case 'private':
371
+ case 'public-read':
372
+ case 'public-read-write':
373
+ case 'authenticated-read':
374
+ return value;
375
+ }
376
+ }
377
+ exports.checkBucketCannedACL = checkBucketCannedACL;
378
+ const getBucketKey = (credential, Bucket, acl, service, sdk) => module_1.default.asString(credential, true) + Bucket + '_' + (acl || '') + service + sdk;
379
+ exports.getBucketKey = getBucketKey;
380
+ const isNoSuchBucket = (err) => err instanceof Error && err.code === 'NoSuchBucket';
381
+ exports.isNoSuchBucket = isNoSuchBucket;