@e-mc/cloud 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,234 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const path = require("path");
4
+ const fs = require("fs");
5
+ const index_1 = require("../index");
6
+ const util_1 = require("../../util");
7
+ const types_1 = require("../../../types");
8
+ const module_1 = require("../../../module");
9
+ const index_2 = require("../../index");
10
+ const BUCKET_SESSION = new Set();
11
+ const BUCKET_RESPONSE = {};
12
+ const getBucketKey = (credential, bucket, acl = '') => module_1.default.asString(credential, true) + bucket + '_' + acl;
13
+ function upload(credential, service = 'gcp') {
14
+ const storage = index_1.createStorageClient.call(this, credential);
15
+ return async (data, callback) => {
16
+ var _a;
17
+ const firebase = (0, index_1.isFirebaseApp)(credential, true, true);
18
+ const { bucket, localUri } = data;
19
+ const { pathname = '', fileGroup, contentType, metadata, endpoint, active, publicRead, acl, admin = {}, overwrite, options } = data.upload;
20
+ let filename = data.upload.filename || path.basename(localUri), bucketClient, bucketKey, listFiles;
21
+ const uploadResponse = (err, url) => {
22
+ if (!firebase) {
23
+ BUCKET_SESSION.delete(bucket);
24
+ if (bucketKey) {
25
+ delete BUCKET_RESPONSE[bucketKey];
26
+ }
27
+ }
28
+ callback(err, url);
29
+ };
30
+ if (firebase) {
31
+ const { ref, listAll } = require('@firebase/storage');
32
+ bucketClient = ref(storage, bucket);
33
+ if (!overwrite) {
34
+ try {
35
+ listFiles = (await listAll(bucketClient)).items.map(item => item.fullPath);
36
+ }
37
+ catch (err) {
38
+ uploadResponse(err);
39
+ return;
40
+ }
41
+ }
42
+ }
43
+ else {
44
+ if (!BUCKET_SESSION.has(bucket)) {
45
+ const bucketAcl = admin.publicRead ? 'publicRead' : admin.acl;
46
+ const response = BUCKET_RESPONSE[_a = bucketKey = getBucketKey(credential, bucket, bucketAcl)] || (BUCKET_RESPONSE[_a] = index_1.createBucketV2.call(this, credential, bucket, bucketAcl, admin.configBucket?.create || (0, index_1.createBucketRequest)(credential)));
47
+ if (!await response) {
48
+ uploadResponse(null);
49
+ return;
50
+ }
51
+ BUCKET_SESSION.add(bucket);
52
+ }
53
+ bucketClient = storage.bucket(bucket);
54
+ }
55
+ if (!overwrite && (!listFiles || listFiles.length)) {
56
+ const current = filename;
57
+ const next = (0, util_1.generateFilename)(filename);
58
+ let i = 0, exists;
59
+ do {
60
+ if (i > 0) {
61
+ [filename, exists] = next(i);
62
+ if (!exists) {
63
+ break;
64
+ }
65
+ }
66
+ const name = pathname ? module_1.default.joinPath(pathname, filename) : filename;
67
+ if (firebase) {
68
+ exists = listFiles.includes(name);
69
+ }
70
+ else {
71
+ [exists] = await bucketClient.file(name).exists();
72
+ }
73
+ } while (exists && ++i);
74
+ if (i > 0) {
75
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["File renamed" /* CMD_CLOUD.RENAME_FILE */, current], filename, { ...index_2.default.LOG_CLOUD_WARN });
76
+ }
77
+ }
78
+ const Key = [filename];
79
+ const Body = [data.buffer];
80
+ const ContentType = [contentType];
81
+ const getURL = (value) => endpoint ? module_1.default.joinPath(endpoint, value) : module_1.default.joinPath("https://storage.googleapis.com" /* STRINGS.BASE_URL */, bucket, value);
82
+ const addLog = (err) => err instanceof Error && this.addLog(this.statusType.WARN, err.message, service + ': ' + bucket);
83
+ if (fileGroup) {
84
+ for (const [content, ext, localFile] of fileGroup) {
85
+ Key.push(ext === '.map' && localFile ? path.basename(localFile) : filename + ext);
86
+ Body.push(content);
87
+ }
88
+ }
89
+ for (let i = 0; i < Key.length; ++i) {
90
+ const first = i === 0;
91
+ if (this.aborted) {
92
+ if (first) {
93
+ uploadResponse((0, types_1.createAbortError)());
94
+ }
95
+ return;
96
+ }
97
+ if (firebase) {
98
+ const { ref, uploadBytes } = require('@firebase/storage');
99
+ if (!Buffer.isBuffer(Body[i])) {
100
+ try {
101
+ Body[i] = fs.readFileSync(Body[i]);
102
+ }
103
+ catch (err) {
104
+ if (first) {
105
+ uploadResponse(err);
106
+ return;
107
+ }
108
+ addLog(err);
109
+ continue;
110
+ }
111
+ }
112
+ const params = { ...options };
113
+ if (first) {
114
+ params.contentType || (params.contentType = ContentType[i]);
115
+ if (metadata) {
116
+ params.customMetadata = metadata;
117
+ }
118
+ }
119
+ else {
120
+ params.contentType = ContentType[i];
121
+ }
122
+ const destination = pathname ? module_1.default.joinPath(pathname, Key[i]) : Key[i];
123
+ uploadBytes(ref(bucketClient, destination), Body[i], params)
124
+ .then(() => {
125
+ const url = getURL(destination);
126
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, "Upload success" /* CMD_CLOUD.UPLOAD_FILE */, url, { ...index_2.default.LOG_CLOUD_UPLOAD });
127
+ if (first) {
128
+ uploadResponse(null, url);
129
+ }
130
+ })
131
+ .catch(err => {
132
+ if (first) {
133
+ uploadResponse(err);
134
+ }
135
+ else {
136
+ addLog(err);
137
+ }
138
+ });
139
+ }
140
+ else {
141
+ let outputUri, tempDir;
142
+ try {
143
+ const output = Body[i];
144
+ if (Buffer.isBuffer(output) || path.basename(output) !== Key[i]) {
145
+ if (!(tempDir = this.getTempDir({ uuidDir: true }))) {
146
+ throw new Error("Unable to create temp directory" /* ERR_CLOUD.TEMP_DIR */);
147
+ }
148
+ outputUri = path.join(tempDir, Key[i]);
149
+ if (typeof output === 'string') {
150
+ fs.copyFileSync(output, outputUri);
151
+ }
152
+ else {
153
+ fs.writeFileSync(outputUri, output);
154
+ }
155
+ }
156
+ else {
157
+ outputUri = output;
158
+ }
159
+ }
160
+ catch (err) {
161
+ if (first) {
162
+ uploadResponse(err);
163
+ return;
164
+ }
165
+ addLog(err);
166
+ continue;
167
+ }
168
+ const params = { ...options };
169
+ const readable = publicRead || active && publicRead !== false && publicRead !== 0 && !acl;
170
+ let ACL, revoke, predefined;
171
+ if (first) {
172
+ params.contentType || (params.contentType = ContentType[i]);
173
+ if (readable) {
174
+ ACL = 'publicRead';
175
+ }
176
+ else {
177
+ predefined = true;
178
+ }
179
+ if (metadata) {
180
+ params.metadata = metadata;
181
+ }
182
+ }
183
+ else {
184
+ params.contentType = ContentType[i];
185
+ if (!params.predefinedAcl) {
186
+ if (readable) {
187
+ ACL = 'publicRead';
188
+ }
189
+ else {
190
+ predefined = true;
191
+ }
192
+ }
193
+ }
194
+ if (predefined) {
195
+ if (publicRead === 0) {
196
+ ACL = 'publicRead';
197
+ revoke = true;
198
+ }
199
+ if (acl && (!revoke || acl !== ACL)) {
200
+ params.predefinedAcl = acl;
201
+ }
202
+ }
203
+ params.destination = pathname ? module_1.default.joinPath(pathname, path.basename(outputUri)) : undefined;
204
+ bucketClient.upload(outputUri, params, (err, file) => {
205
+ if (file) {
206
+ const url = getURL(file.name);
207
+ this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, "Upload success" /* CMD_CLOUD.UPLOAD_FILE */, url, { ...index_2.default.LOG_CLOUD_UPLOAD });
208
+ if (first) {
209
+ uploadResponse(null, url);
210
+ }
211
+ if (ACL) {
212
+ index_1.setPredefinedAcl.call(this, file.acl, ACL, file.bucket, file.name, revoke || file.metadata, true);
213
+ }
214
+ }
215
+ else if (first) {
216
+ uploadResponse(err);
217
+ }
218
+ else {
219
+ addLog(err);
220
+ }
221
+ if (tempDir) {
222
+ queueMicrotask(() => module_1.default.removeDir(tempDir));
223
+ }
224
+ });
225
+ }
226
+ }
227
+ };
228
+ }
229
+ exports.default = upload;
230
+
231
+ if (exports.default) {
232
+ module.exports = exports.default;
233
+ module.exports.default = exports.default;
234
+ }
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const index_1 = require("../index");
4
+ function download(credential, service = 'ibm') {
5
+ (0, index_1.setStorageCredential)(credential);
6
+ return require('../../aws/download').call(this, credential, service, 'ibm-cos-sdk/clients/s3');
7
+ }
8
+ exports.default = download;
9
+
10
+ if (exports.default) {
11
+ module.exports = exports.default;
12
+ module.exports.default = exports.default;
13
+ }
package/ibm/index.js ADDED
@@ -0,0 +1,229 @@
1
+ "use strict";
2
+ /// <reference lib="dom" />
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ exports.executeBatchQuery = exports.executeQuery = exports.deleteObjectsV2 = exports.deleteObjects = exports.setBucketWebsite = exports.setBucketPolicy = exports.createBucketV2 = exports.createBucket = exports.createDatabaseClient = exports.setStorageCredential = exports.validateDatabase = exports.validateStorage = void 0;
5
+ const types_1 = require("../../types");
6
+ const aws_1 = require("../aws");
7
+ const util_1 = require("../util");
8
+ const module_1 = require("../../module");
9
+ function validateStorage(credential) {
10
+ return !!(credential.apiKeyId && credential.serviceInstanceId && (credential.region || credential.endpoint));
11
+ }
12
+ exports.validateStorage = validateStorage;
13
+ function validateDatabase(credential, data) {
14
+ if (credential.url && (credential.username && credential.password || credential.apikey)) {
15
+ return (0, types_1.isString)(data.table);
16
+ }
17
+ const serviceName = credential.serviceName || 'CLOUDANT';
18
+ if (process.env[serviceName + '_URL'] && (process.env[serviceName + '_APIKEY'] || process.env[serviceName + '_USERNAME'] && process.env[serviceName + '_PASSWORD'])) {
19
+ return (0, types_1.isString)(data.table);
20
+ }
21
+ return false;
22
+ }
23
+ exports.validateDatabase = validateDatabase;
24
+ function setStorageCredential(credential) {
25
+ if (credential.endpoint) {
26
+ credential.region || (credential.region = /([^.]+)\.cloud-object-storage\.appdomain\.cloud\b/.exec(credential.endpoint)?.[1]);
27
+ }
28
+ else if (credential.region) {
29
+ credential.endpoint || (credential.endpoint = `https://s3.${credential.region}.cloud-object-storage.appdomain.cloud`);
30
+ }
31
+ credential.ibmAuthEndpoint = 'https://iam.cloud.ibm.com/identity/token';
32
+ credential.signatureVersion = 'iam';
33
+ }
34
+ exports.setStorageCredential = setStorageCredential;
35
+ function createDatabaseClient(credential) {
36
+ try {
37
+ const Cloudant = require('@ibm-cloud/cloudant/cloudant/v1');
38
+ const { BasicAuthenticator, IamAuthenticator } = require('ibm-cloud-sdk-core');
39
+ const { url: serviceUrl, apikey, username, password, serviceName = 'CLOUDANT' } = credential;
40
+ let authenticator;
41
+ if (apikey) {
42
+ authenticator = new IamAuthenticator({ apikey });
43
+ }
44
+ else if (username && password) {
45
+ authenticator = new BasicAuthenticator({ username, password });
46
+ }
47
+ if (authenticator) {
48
+ return Cloudant.newInstance({ ...credential, serviceUrl, authenticator });
49
+ }
50
+ return Cloudant.newInstance({ ...credential, serviceUrl, serviceName });
51
+ }
52
+ catch (err) {
53
+ this.checkPackage(err, '@ibm-cloud/cloudant', { passThrough: true });
54
+ throw err;
55
+ }
56
+ }
57
+ exports.createDatabaseClient = createDatabaseClient;
58
+ function createBucket(credential, bucket, publicRead) {
59
+ return aws_1.createBucketV2.call(this, credential, bucket, publicRead ? 'public-read' : undefined, undefined, 'ibm', 'ibm-cos-sdk/clients/s3');
60
+ }
61
+ exports.createBucket = createBucket;
62
+ function createBucketV2(credential, bucket, ACL, options) {
63
+ return aws_1.createBucketV2.call(this, credential, bucket, ACL, options, 'ibm', 'ibm-cos-sdk/clients/s3');
64
+ }
65
+ exports.createBucketV2 = createBucketV2;
66
+ function setBucketPolicy(credential, bucket, options) {
67
+ return aws_1.setBucketPolicy.call(this, credential, bucket, options, 'ibm', 'ibm-cos-sdk/clients/s3');
68
+ }
69
+ exports.setBucketPolicy = setBucketPolicy;
70
+ function setBucketWebsite(credential, bucket, options) {
71
+ return aws_1.setBucketWebsite.call(this, credential, bucket, options, 'ibm', 'ibm-cos-sdk/clients/s3');
72
+ }
73
+ exports.setBucketWebsite = setBucketWebsite;
74
+ function deleteObjects(credential, bucket) {
75
+ return deleteObjectsV2.call(this, credential, bucket, true);
76
+ }
77
+ exports.deleteObjects = deleteObjects;
78
+ function deleteObjectsV2(credential, bucket, recursive = true) {
79
+ setStorageCredential(credential);
80
+ return aws_1.deleteObjectsV2.call(this, credential, bucket, recursive, 'ibm', 'ibm-cos-sdk/clients/s3');
81
+ }
82
+ exports.deleteObjectsV2 = deleteObjectsV2;
83
+ async function executeQuery(credential, data, sessionKey) {
84
+ return (await executeBatchQuery.call(this, credential, [data], sessionKey))[0] || [];
85
+ }
86
+ exports.executeQuery = executeQuery;
87
+ async function executeBatchQuery(credential, batch, sessionKey) {
88
+ var _a;
89
+ const length = batch.length;
90
+ const result = new Array(length);
91
+ const caching = length > 0 && this.hasCache(batch[0].service, sessionKey);
92
+ const cacheValue = { value: this.valueOfKey(credential, 'cache'), sessionKey };
93
+ let client;
94
+ const createClient = () => client || (client = createDatabaseClient.call(this, length === 1 ? credential : { ...credential }));
95
+ for (let i = 0; i < length; ++i) {
96
+ const item = batch[i];
97
+ const { service, table: db, id: docId, query, limit = 0, update, ignoreCache } = item;
98
+ if (!db) {
99
+ throw (0, util_1.formatError)(item, "Missing database name" /* ERR_DB.NAME */);
100
+ }
101
+ const renewCache = ignoreCache === 0;
102
+ const getCache = (value) => {
103
+ if (ignoreCache === 1) {
104
+ return;
105
+ }
106
+ cacheValue.renewCache = renewCache;
107
+ return this.getQueryResult(service, credential, value, cacheValue);
108
+ };
109
+ let rows, queryString = caching && ignoreCache !== true || ignoreCache === false || ignoreCache === 1 || renewCache ? db + '_' : '';
110
+ if (docId) {
111
+ const doc = update && update.document;
112
+ if (queryString) {
113
+ queryString += docId;
114
+ if (!doc && (rows = getCache(queryString))) {
115
+ result[i] = rows;
116
+ continue;
117
+ }
118
+ }
119
+ client = createClient();
120
+ let { status, result: document } = await client.getDocument({ db, docId });
121
+ if (status === 200 /* HTTP_STATUS.OK */) {
122
+ rows = [document];
123
+ }
124
+ if (doc) {
125
+ let _rev = doc._rev;
126
+ const failed = (message, fatal) => {
127
+ if (fatal) {
128
+ item.transactionFail = true;
129
+ }
130
+ else {
131
+ delete item.update;
132
+ }
133
+ this.addLog(types_1.STATUS_TYPE.WARN, service + ` -> ${message} (_id=${docId};_rev=${_rev || "Unknown" /* ERR_MESSAGE.UNKNOWN */})`);
134
+ };
135
+ if (rows) {
136
+ try {
137
+ _rev = document._rev;
138
+ update.db = db;
139
+ update.document = { ...document, ...doc, _id: docId, _rev };
140
+ ({ status } = await client.postDocument(update));
141
+ if (status === 200 /* HTTP_STATUS.OK */ || status === 202 /* HTTP_STATUS.ACCEPTED */) {
142
+ rows = undefined;
143
+ ({ status, result: document } = await client.getDocument({ db, docId }));
144
+ if (status === 200 /* HTTP_STATUS.OK */) {
145
+ rows = [document];
146
+ }
147
+ else {
148
+ failed('Update success (GET failed)');
149
+ }
150
+ }
151
+ else {
152
+ failed('Update failed');
153
+ }
154
+ }
155
+ catch {
156
+ failed('Update failed');
157
+ }
158
+ }
159
+ else {
160
+ failed('Row does not exist', true);
161
+ }
162
+ }
163
+ }
164
+ else {
165
+ let partitionKey = item.partitionKey;
166
+ if (query) {
167
+ if ((0, types_1.isString)(partitionKey)) {
168
+ (_a = query).partitionKey || (_a.partitionKey = partitionKey);
169
+ }
170
+ else {
171
+ partitionKey = query.partitionKey;
172
+ }
173
+ if (queryString && (rows = getCache(queryString += module_1.default.asString(query, true) + limit))) {
174
+ result[i] = rows;
175
+ continue;
176
+ }
177
+ client = createClient();
178
+ query.db || (query.db = db);
179
+ if (limit > 0) {
180
+ query.limit || (query.limit = limit);
181
+ }
182
+ let status;
183
+ if (query.selector) {
184
+ let document;
185
+ ({ status, result: document } = partitionKey ? await client.postPartitionFind(query) : await client.postFind(query));
186
+ if (status === 200 /* HTTP_STATUS.OK */ && document.docs) {
187
+ rows = document.docs;
188
+ }
189
+ }
190
+ else if (query.view) {
191
+ let document;
192
+ ({ status, result: document } = partitionKey ? await client.postPartitionView(query) : await client.postView(query));
193
+ if (status === 200 /* HTTP_STATUS.OK */ && document.rows) {
194
+ rows = document.rows.map(row => row.value);
195
+ }
196
+ }
197
+ else if (query.index) {
198
+ let document;
199
+ ({ status, result: document } = partitionKey ? await client.postPartitionSearch(query) : await client.postSearch(query));
200
+ if (status === 200 /* HTTP_STATUS.OK */ && document.rows) {
201
+ rows = await Promise.all(document.rows.map(row => Object.keys(row.fields).length ? Promise.resolve(row.fields) : client.getDocument({ db, docId: row.id }).then(doc => doc.result)));
202
+ }
203
+ }
204
+ }
205
+ else if (partitionKey) {
206
+ if (queryString && (rows = getCache(queryString += module_1.default.asString(partitionKey, true) + limit))) {
207
+ result[i] = rows;
208
+ continue;
209
+ }
210
+ if (!(0, types_1.isPlainObject)(partitionKey)) {
211
+ partitionKey = { db, partitionKey, includeDocs: true };
212
+ }
213
+ if (limit > 0) {
214
+ partitionKey.limit = limit;
215
+ }
216
+ const { status, result: document } = await createClient().postPartitionAllDocs(partitionKey);
217
+ if (status === 200 /* HTTP_STATUS.OK */ && document.rows) {
218
+ rows = document.rows.map(row => row.value);
219
+ }
220
+ }
221
+ else {
222
+ throw (0, util_1.formatError)(item, "Missing database query" /* ERR_DB.QUERY */);
223
+ }
224
+ }
225
+ result[i] = this.setQueryResult(service, credential, queryString, rows, cacheValue);
226
+ }
227
+ return result;
228
+ }
229
+ exports.executeBatchQuery = executeBatchQuery;
@@ -0,0 +1,13 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const index_1 = require("../index");
4
+ function upload(credential, service = 'ibm') {
5
+ (0, index_1.setStorageCredential)(credential);
6
+ return require('../../aws/upload').call(this, credential, service, 'ibm-cos-sdk/clients/s3');
7
+ }
8
+ exports.default = upload;
9
+
10
+ if (exports.default) {
11
+ module.exports = exports.default;
12
+ module.exports.default = exports.default;
13
+ }
package/index.d.ts ADDED
@@ -0,0 +1,6 @@
1
+ import type { CloudConstructor, IFileManager } from '../types/lib';
2
+ import type { CloudAsset } from '../types/lib/cloud';
3
+
4
+ declare const Cloud: CloudConstructor<IFileManager<CloudAsset>>;
5
+
6
+ export = Cloud;