@e-mc/cloud 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +11 -0
- package/README.md +5 -0
- package/atlas/index.js +193 -0
- package/aws/download/index.js +44 -0
- package/aws/index.js +381 -0
- package/aws/upload/index.js +137 -0
- package/aws-v3/download/index.js +42 -0
- package/aws-v3/index.js +285 -0
- package/aws-v3/upload/index.js +157 -0
- package/azure/download/index.js +40 -0
- package/azure/index.js +236 -0
- package/azure/upload/index.js +124 -0
- package/gcp/download/index.js +86 -0
- package/gcp/index.js +801 -0
- package/gcp/upload/index.js +234 -0
- package/ibm/download/index.js +13 -0
- package/ibm/index.js +229 -0
- package/ibm/upload/index.js +13 -0
- package/index.d.ts +6 -0
- package/index.js +832 -0
- package/minio/download/index.js +44 -0
- package/minio/index.js +182 -0
- package/minio/upload/index.js +135 -0
- package/oci/download/index.js +13 -0
- package/oci/index.js +183 -0
- package/oci/upload/index.js +13 -0
- package/package.json +26 -0
- package/types.d.ts +29 -0
- package/util.d.ts +12 -0
- package/util.js +46 -0
package/azure/index.js
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.executeBatchQuery = exports.executeQuery = exports.deleteObjectsV2 = exports.deleteObjects = exports.setBucketWebsite = exports.createBucketV2 = exports.createBucket = exports.createDatabaseClient = exports.createStorageClient = exports.validateDatabase = exports.validateStorage = void 0;
|
|
4
|
+
const path = require("path");
|
|
5
|
+
const types_1 = require("../../types");
|
|
6
|
+
const util_1 = require("../util");
|
|
7
|
+
const module_1 = require("../../module");
|
|
8
|
+
const index_1 = require("../index");
|
|
9
|
+
function validateStorage(credential) {
|
|
10
|
+
const env = process.env;
|
|
11
|
+
if (credential.accountName && credential.accountKey || credential.connectionString || credential.sharedAccessSignature || env.AZURE_TENANT_ID && env.AZURE_CLIENT_ID && env.AZURE_CLIENT_SECRET && (credential.accountName || env.AZURE_STORAGE_ACCOUNT)) {
|
|
12
|
+
return true;
|
|
13
|
+
}
|
|
14
|
+
return module_1.default.enabled("process.env.apply" /* KEY_NAME.PROCESS_ENV_APPLY */) && !!((credential.connectionString = env.AZURE_STORAGE_CONNECTION_STRING) || (credential.sharedAccessSignature = env.AZURE_STORAGE_SAS_TOKEN) || (credential.accountName = env.AZURE_STORAGE_ACCOUNT) && (credential.accountKey = env.AZURE_STORAGE_KEY));
|
|
15
|
+
}
|
|
16
|
+
exports.validateStorage = validateStorage;
|
|
17
|
+
function validateDatabase(credential, data) {
|
|
18
|
+
return !!(data.name && data.table && (credential.endpoint && credential.key || module_1.default.enabled("process.env.apply" /* KEY_NAME.PROCESS_ENV_APPLY */) && (credential.endpoint = process.env.AZURE_COSMOS_ENDPOINT) && (credential.key = process.env.AZURE_COSMOS_KEY)));
|
|
19
|
+
}
|
|
20
|
+
exports.validateDatabase = validateDatabase;
|
|
21
|
+
function createStorageClient(credential) {
|
|
22
|
+
let packageName = '';
|
|
23
|
+
try {
|
|
24
|
+
const { BlobServiceClient, StorageSharedKeyCredential } = require(packageName = '@azure/storage-blob');
|
|
25
|
+
const { accountName, accountKey, connectionString, sharedAccessSignature } = credential;
|
|
26
|
+
if (connectionString) {
|
|
27
|
+
credential.accountName || (credential.accountName = /AccountName=([^;]+);/.exec(connectionString)?.[1]);
|
|
28
|
+
return BlobServiceClient.fromConnectionString(connectionString);
|
|
29
|
+
}
|
|
30
|
+
if (sharedAccessSignature) {
|
|
31
|
+
credential.accountName || (credential.accountName = /^https:\/\/([a-z\d]+)\./.exec(sharedAccessSignature)?.[1]);
|
|
32
|
+
return new BlobServiceClient(sharedAccessSignature);
|
|
33
|
+
}
|
|
34
|
+
let azureCredential;
|
|
35
|
+
if (accountName && accountKey) {
|
|
36
|
+
azureCredential = new StorageSharedKeyCredential(accountName, accountKey);
|
|
37
|
+
}
|
|
38
|
+
else {
|
|
39
|
+
const { DefaultAzureCredential } = require(packageName = '@azure/identity');
|
|
40
|
+
azureCredential = new DefaultAzureCredential();
|
|
41
|
+
}
|
|
42
|
+
return new BlobServiceClient(`https://${accountName || process.env.AZURE_STORAGE_ACCOUNT}.blob.core.windows.net`, azureCredential);
|
|
43
|
+
}
|
|
44
|
+
catch (err) {
|
|
45
|
+
this.checkPackage(err, packageName, { passThrough: true });
|
|
46
|
+
throw err;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
exports.createStorageClient = createStorageClient;
|
|
50
|
+
function createDatabaseClient(credential) {
|
|
51
|
+
try {
|
|
52
|
+
const { CosmosClient } = require('@azure/cosmos');
|
|
53
|
+
return new CosmosClient(credential);
|
|
54
|
+
}
|
|
55
|
+
catch (err) {
|
|
56
|
+
this.checkPackage(err, '@azure/cosmos', { passThrough: true });
|
|
57
|
+
throw err;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
exports.createDatabaseClient = createDatabaseClient;
|
|
61
|
+
async function createBucket(credential, bucket, publicRead, service = 'azure') {
|
|
62
|
+
return createBucketV2.call(this, credential, bucket, publicRead ? 'blob' : 'container', undefined, service);
|
|
63
|
+
}
|
|
64
|
+
exports.createBucket = createBucket;
|
|
65
|
+
async function createBucketV2(credential, bucket, access, options, service = 'azure') {
|
|
66
|
+
switch (access) {
|
|
67
|
+
case 'blob':
|
|
68
|
+
case 'container':
|
|
69
|
+
break;
|
|
70
|
+
default:
|
|
71
|
+
access = undefined;
|
|
72
|
+
break;
|
|
73
|
+
}
|
|
74
|
+
const containerClient = createStorageClient.call(this, credential).getContainerClient(bucket);
|
|
75
|
+
const setAccessPolicy = () => containerClient.setAccessPolicy(access).catch(err => this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Unable to configure container" /* ERR_AZURE.CONFIGURE_CONTAINER */, bucket], err, { ...index_1.default.LOG_CLOUD_WARN }));
|
|
76
|
+
if (await containerClient.exists().catch(() => false)) {
|
|
77
|
+
if (access) {
|
|
78
|
+
await setAccessPolicy();
|
|
79
|
+
}
|
|
80
|
+
return true;
|
|
81
|
+
}
|
|
82
|
+
options = { ...options };
|
|
83
|
+
if (access) {
|
|
84
|
+
options.access = access;
|
|
85
|
+
}
|
|
86
|
+
options.abortSignal = this.signal;
|
|
87
|
+
return containerClient.create(options)
|
|
88
|
+
.then(response => {
|
|
89
|
+
if (response.errorCode) {
|
|
90
|
+
this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ['Container created with errors', bucket], (0, types_1.errorMessage)("Error code" /* ERR_MESSAGE.ERROR_CODE */, response.errorCode), { ...index_1.default.LOG_CLOUD_WARN });
|
|
91
|
+
}
|
|
92
|
+
else {
|
|
93
|
+
this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ['Container created', bucket], response.requestId, { ...index_1.default.LOG_CLOUD_COMMAND });
|
|
94
|
+
}
|
|
95
|
+
return true;
|
|
96
|
+
})
|
|
97
|
+
.catch(async (err) => {
|
|
98
|
+
if (err instanceof Error && err.code === 'ContainerAlreadyExists') {
|
|
99
|
+
if (access) {
|
|
100
|
+
await setAccessPolicy();
|
|
101
|
+
}
|
|
102
|
+
return true;
|
|
103
|
+
}
|
|
104
|
+
this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to create container" /* ERR_AZURE.CREATE_CONTAINER */, bucket], err, { ...index_1.default.LOG_CLOUD_FAIL });
|
|
105
|
+
return false;
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
exports.createBucketV2 = createBucketV2;
|
|
109
|
+
function setBucketWebsite(credential, bucket, options, service = 'azure') {
|
|
110
|
+
const staticWebsite = { enabled: true };
|
|
111
|
+
if ((0, types_1.isString)(options.indexPage)) {
|
|
112
|
+
staticWebsite.indexDocument = path.basename(options.indexPage);
|
|
113
|
+
}
|
|
114
|
+
if ((0, types_1.isString)(options.indexPath)) {
|
|
115
|
+
staticWebsite.defaultIndexDocumentPath = module_1.default.joinPath(bucket, options.indexPath);
|
|
116
|
+
}
|
|
117
|
+
if ((0, types_1.isString)(options.errorPath)) {
|
|
118
|
+
staticWebsite.errorDocument404Path = module_1.default.joinPath(bucket, options.errorPath);
|
|
119
|
+
}
|
|
120
|
+
return createStorageClient.call(this, credential).setProperties({ staticWebsite })
|
|
121
|
+
.then(() => {
|
|
122
|
+
this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["Bucket configured" /* CMD_CLOUD.CONFIGURE_BUCKET */, bucket], options, { ...index_1.default.LOG_CLOUD_COMMAND });
|
|
123
|
+
return true;
|
|
124
|
+
})
|
|
125
|
+
.catch(err => {
|
|
126
|
+
this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to configure bucket" /* ERR_CLOUD.CONFIGURE_BUCKET */, bucket], err, { ...index_1.default.LOG_CLOUD_FAIL, fatal: false });
|
|
127
|
+
return false;
|
|
128
|
+
});
|
|
129
|
+
}
|
|
130
|
+
exports.setBucketWebsite = setBucketWebsite;
|
|
131
|
+
function deleteObjects(credential, Bucket, service) {
|
|
132
|
+
return deleteObjectsV2.call(this, credential, Bucket, true, service);
|
|
133
|
+
}
|
|
134
|
+
exports.deleteObjects = deleteObjects;
|
|
135
|
+
async function deleteObjectsV2(credential, bucket, recursive = true, service = 'azure') {
|
|
136
|
+
const containerClient = createStorageClient.call(this, credential).getContainerClient(bucket);
|
|
137
|
+
if (await containerClient.exists().catch(() => false)) {
|
|
138
|
+
try {
|
|
139
|
+
const tasks = [];
|
|
140
|
+
let fileCount = 0;
|
|
141
|
+
for await (const blob of containerClient.listBlobsFlat({ includeUncommitedBlobs: true })) {
|
|
142
|
+
if (!recursive && blob.name.indexOf('/') !== -1) {
|
|
143
|
+
continue;
|
|
144
|
+
}
|
|
145
|
+
tasks.push(containerClient.deleteBlob(blob.name, { versionId: blob.versionId }).catch(() => { --fileCount; }));
|
|
146
|
+
}
|
|
147
|
+
fileCount = tasks.length;
|
|
148
|
+
return this.allSettled(tasks, ["Unable to delete blob" /* ERR_AZURE.DELETE_BLOB */, bucket], 64 /* LOG_TYPE.CLOUD */).then(() => {
|
|
149
|
+
const files = fileCount + ' files';
|
|
150
|
+
this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, [`Container emptied (${recursive ? 'recursive' : files})`, bucket], recursive ? files : '', { ...index_1.default.LOG_CLOUD_COMMAND });
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
catch (err) {
|
|
154
|
+
this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Unable to list bucket" /* ERR_CLOUD.LIST_BUCKET */, bucket], err, { ...index_1.default.LOG_CLOUD_FAIL, fatal: false });
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
exports.deleteObjectsV2 = deleteObjectsV2;
|
|
159
|
+
async function executeQuery(credential, data, sessionKey) {
|
|
160
|
+
return (await executeBatchQuery.call(this, credential, [data], sessionKey))[0] || [];
|
|
161
|
+
}
|
|
162
|
+
exports.executeQuery = executeQuery;
|
|
163
|
+
async function executeBatchQuery(credential, batch, sessionKey) {
|
|
164
|
+
const length = batch.length;
|
|
165
|
+
const result = new Array(length);
|
|
166
|
+
const caching = length > 0 && this.hasCache(batch[0].service, sessionKey);
|
|
167
|
+
const cacheValue = { value: this.valueOfKey(credential, 'cache'), sessionKey };
|
|
168
|
+
let client;
|
|
169
|
+
const createClient = (name, table) => {
|
|
170
|
+
client || (client = createDatabaseClient.call(this, length === 1 ? credential : { ...credential }));
|
|
171
|
+
return client.database(name).container(table);
|
|
172
|
+
};
|
|
173
|
+
const closeClient = () => client?.dispose();
|
|
174
|
+
for (let i = 0; i < length; ++i) {
|
|
175
|
+
const item = batch[i];
|
|
176
|
+
const { service, name, table, id, query, storedProcedureId, partitionKey, params, limit = 0, update, ignoreCache } = item;
|
|
177
|
+
if (!name || !table) {
|
|
178
|
+
closeClient();
|
|
179
|
+
throw (0, util_1.formatError)(item, name ? "Missing database table" /* ERR_DB.TABLE */ : "Missing database name" /* ERR_DB.NAME */);
|
|
180
|
+
}
|
|
181
|
+
const renewCache = ignoreCache === 0;
|
|
182
|
+
const getCache = (value) => {
|
|
183
|
+
if (ignoreCache === 1) {
|
|
184
|
+
return;
|
|
185
|
+
}
|
|
186
|
+
cacheValue.renewCache = renewCache;
|
|
187
|
+
return this.getQueryResult(service, credential, value, cacheValue);
|
|
188
|
+
};
|
|
189
|
+
let rows, queryString = caching && ignoreCache !== true || ignoreCache === false || ignoreCache === 1 || renewCache ? name + '_' + table + '_' : '';
|
|
190
|
+
if (storedProcedureId && params) {
|
|
191
|
+
const { options } = item;
|
|
192
|
+
if (queryString && (rows = getCache(queryString += (partitionKey || '') + '_' + storedProcedureId + module_1.default.asString(params, true) + module_1.default.asString(options, true)))) {
|
|
193
|
+
result[i] = rows;
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
const { statusCode, resource } = await createClient(name, table).scripts.storedProcedure(storedProcedureId).execute(partitionKey, params, options);
|
|
197
|
+
if (statusCode === 200 /* HTTP_STATUS.OK */) {
|
|
198
|
+
rows = Array.isArray(resource) ? resource : [resource];
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
else if (id) {
|
|
202
|
+
const { options } = item;
|
|
203
|
+
if (queryString) {
|
|
204
|
+
queryString += (partitionKey || '') + '_' + id + module_1.default.asString(options, true);
|
|
205
|
+
if (!update && (rows = getCache(queryString))) {
|
|
206
|
+
result[i] = rows;
|
|
207
|
+
continue;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
const db = createClient(name, table).item(id, partitionKey);
|
|
211
|
+
const { statusCode, resource } = update ? await db.patch(update, options) : await db.read(options); // eslint-disable-line import/namespace
|
|
212
|
+
if (statusCode === 200 /* HTTP_STATUS.OK */) {
|
|
213
|
+
rows = [resource];
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
else if (query) {
|
|
217
|
+
let { options } = item;
|
|
218
|
+
if (limit > 0) {
|
|
219
|
+
(options || (options = {})).maxItemCount = limit;
|
|
220
|
+
}
|
|
221
|
+
if (queryString && (rows = getCache(queryString += module_1.default.asString(query, true) + module_1.default.asString(options, true)))) {
|
|
222
|
+
result[i] = rows;
|
|
223
|
+
continue;
|
|
224
|
+
}
|
|
225
|
+
rows = (await createClient(name, table).items.query(query, options).fetchAll()).resources;
|
|
226
|
+
}
|
|
227
|
+
else {
|
|
228
|
+
closeClient();
|
|
229
|
+
throw (0, util_1.formatError)(item, "Missing database query" /* ERR_DB.QUERY */);
|
|
230
|
+
}
|
|
231
|
+
result[i] = this.setQueryResult(service, credential, queryString, rows, cacheValue);
|
|
232
|
+
}
|
|
233
|
+
closeClient();
|
|
234
|
+
return result;
|
|
235
|
+
}
|
|
236
|
+
exports.executeBatchQuery = executeBatchQuery;
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const path = require("path");
|
|
4
|
+
const fs = require("fs");
|
|
5
|
+
const index_1 = require("../index");
|
|
6
|
+
const util_1 = require("../../util");
|
|
7
|
+
const types_1 = require("../../../types");
|
|
8
|
+
const module_1 = require("../../../module");
|
|
9
|
+
const index_2 = require("../../index");
|
|
10
|
+
const BUCKET_SESSION = new Set();
|
|
11
|
+
const BUCKET_RESPONSE = {};
|
|
12
|
+
const getBucketKey = (credential, bucket, acl = '') => module_1.default.asString(credential, true) + bucket + '_' + acl;
|
|
13
|
+
function upload(credential, service = 'azure') {
|
|
14
|
+
const blobServiceClient = index_1.createStorageClient.call(this, credential);
|
|
15
|
+
return async (data, callback) => {
|
|
16
|
+
var _a, _b;
|
|
17
|
+
const { bucket, localUri } = data;
|
|
18
|
+
const { pathname = '', fileGroup, contentType, metadata, endpoint, admin = {}, overwrite, options } = data.upload;
|
|
19
|
+
const containerClient = blobServiceClient.getContainerClient(bucket);
|
|
20
|
+
let filename = data.upload.filename || path.basename(localUri), bucketKey;
|
|
21
|
+
const cleanup = () => {
|
|
22
|
+
BUCKET_SESSION.delete(bucket);
|
|
23
|
+
if (bucketKey) {
|
|
24
|
+
delete BUCKET_RESPONSE[bucketKey];
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
const errorResponse = (err) => {
|
|
28
|
+
cleanup();
|
|
29
|
+
callback(err);
|
|
30
|
+
};
|
|
31
|
+
if (!BUCKET_SESSION.has(bucket)) {
|
|
32
|
+
const bucketAcl = admin.publicRead ? 'blob' : admin.acl;
|
|
33
|
+
const response = BUCKET_RESPONSE[_a = bucketKey = getBucketKey(credential, bucket, bucketAcl)] || (BUCKET_RESPONSE[_a] = index_1.createBucketV2.call(this, credential, bucket, bucketAcl, admin.configBucket?.create));
|
|
34
|
+
if (!await response) {
|
|
35
|
+
errorResponse(null);
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
BUCKET_SESSION.add(bucket);
|
|
39
|
+
}
|
|
40
|
+
if (!overwrite) {
|
|
41
|
+
try {
|
|
42
|
+
const current = filename;
|
|
43
|
+
const next = (0, util_1.generateFilename)(filename);
|
|
44
|
+
let i = 0, exists;
|
|
45
|
+
do {
|
|
46
|
+
if (i > 0) {
|
|
47
|
+
[filename, exists] = next(i);
|
|
48
|
+
if (!exists) {
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
const name = pathname ? module_1.default.joinPath(pathname, filename) : filename;
|
|
53
|
+
for await (const blob of containerClient.listBlobsFlat({ includeUncommitedBlobs: true })) {
|
|
54
|
+
if (blob.name === name) {
|
|
55
|
+
exists = true;
|
|
56
|
+
break;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
} while (exists && ++i);
|
|
60
|
+
if (i > 0) {
|
|
61
|
+
this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, ["File renamed" /* CMD_CLOUD.RENAME_FILE */, current], filename, { ...index_2.default.LOG_CLOUD_WARN });
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
catch (err) {
|
|
65
|
+
errorResponse(err);
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
const Key = [filename];
|
|
70
|
+
const Body = [data.buffer];
|
|
71
|
+
const ContentType = [contentType];
|
|
72
|
+
const addLog = (err) => err instanceof Error && this.addLog(this.statusType.WARN, err.message, service + ': ' + bucket);
|
|
73
|
+
if (fileGroup) {
|
|
74
|
+
for (const [content, ext, localFile] of fileGroup) {
|
|
75
|
+
try {
|
|
76
|
+
Body.push(typeof content === 'string' ? fs.readFileSync(content) : content);
|
|
77
|
+
Key.push(ext === '.map' && localFile ? path.basename(localFile) : filename + ext);
|
|
78
|
+
}
|
|
79
|
+
catch (err) {
|
|
80
|
+
addLog(err);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
for (let i = 0; i < Key.length; ++i) {
|
|
85
|
+
const first = i === 0;
|
|
86
|
+
if (this.aborted) {
|
|
87
|
+
if (first) {
|
|
88
|
+
errorResponse((0, types_1.createAbortError)());
|
|
89
|
+
}
|
|
90
|
+
return;
|
|
91
|
+
}
|
|
92
|
+
const blobName = module_1.default.joinPath(pathname, Key[i]);
|
|
93
|
+
const params = { ...options, abortSignal: this.signal };
|
|
94
|
+
if (first && metadata) {
|
|
95
|
+
params.metadata = metadata;
|
|
96
|
+
}
|
|
97
|
+
(_b = (params.blobHTTPHeaders || (params.blobHTTPHeaders = {}))).blobContentType || (_b.blobContentType = ContentType[i]);
|
|
98
|
+
containerClient.getBlockBlobClient(blobName).upload(Body[i], Body[i].byteLength, params)
|
|
99
|
+
.then(result => {
|
|
100
|
+
let requestUrl = result._response.request.url;
|
|
101
|
+
const url = endpoint ? module_1.default.joinPath(endpoint, blobName) : module_1.default.isFile(requestUrl = decodeURIComponent(requestUrl), 'http/s') ? requestUrl : module_1.default.joinPath(`https://${credential.accountName}.blob.core.windows.net`, bucket, blobName);
|
|
102
|
+
this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, "Upload success" /* CMD_CLOUD.UPLOAD_FILE */, url, { ...index_2.default.LOG_CLOUD_UPLOAD });
|
|
103
|
+
if (first) {
|
|
104
|
+
cleanup();
|
|
105
|
+
callback(null, url);
|
|
106
|
+
}
|
|
107
|
+
})
|
|
108
|
+
.catch(err => {
|
|
109
|
+
if (first) {
|
|
110
|
+
errorResponse(err);
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
addLog(err);
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
exports.default = upload;
|
|
120
|
+
|
|
121
|
+
if (exports.default) {
|
|
122
|
+
module.exports = exports.default;
|
|
123
|
+
module.exports.default = exports.default;
|
|
124
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
const path = require("path");
|
|
4
|
+
const fs = require("fs");
|
|
5
|
+
const https = require("https");
|
|
6
|
+
const index_1 = require("../index");
|
|
7
|
+
const types_1 = require("../../../types");
|
|
8
|
+
const module_1 = require("../../../module");
|
|
9
|
+
const index_2 = require("../../index");
|
|
10
|
+
function download(credential, service = 'gcp') {
|
|
11
|
+
const storage = index_1.createStorageClient.call(this, credential);
|
|
12
|
+
return (data, callback) => {
|
|
13
|
+
const { bucket: Bucket, download: target } = data;
|
|
14
|
+
const Key = target.filename;
|
|
15
|
+
const firebase = (0, index_1.isFirebaseApp)(credential, true, true);
|
|
16
|
+
let tempDir;
|
|
17
|
+
const downloadResponse = (err, url) => {
|
|
18
|
+
callback(err, url);
|
|
19
|
+
if (tempDir) {
|
|
20
|
+
queueMicrotask(() => module_1.default.removeDir(tempDir));
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
if (!Bucket || !Key) {
|
|
24
|
+
downloadResponse((0, types_1.errorValue)('Missing property', !Bucket ? 'Bucket' : 'Key'));
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
if (!(tempDir = this.getTempDir({ uuidDir: true }))) {
|
|
28
|
+
downloadResponse(new Error("Unable to create temp directory" /* ERR_CLOUD.TEMP_DIR */));
|
|
29
|
+
return;
|
|
30
|
+
}
|
|
31
|
+
const location = module_1.default.joinPath(Bucket, Key);
|
|
32
|
+
const destination = path.join(tempDir, path.basename(Key));
|
|
33
|
+
const deleteMessage = () => this.formatMessage(64 /* LOG_TYPE.CLOUD */, service, "Delete success" /* CMD_CLOUD.DELETE_FILE */, location, { ...index_2.default.LOG_CLOUD_DELETE });
|
|
34
|
+
if (firebase) {
|
|
35
|
+
const { ref, getDownloadURL, deleteObject } = require('@firebase/storage');
|
|
36
|
+
const objectRef = ref(storage, Key);
|
|
37
|
+
getDownloadURL(objectRef).then(url => {
|
|
38
|
+
https.get(url, res => {
|
|
39
|
+
const statusCode = res.statusCode;
|
|
40
|
+
if (statusCode >= 200 && statusCode < 300) {
|
|
41
|
+
res.on('error', err => downloadResponse(err));
|
|
42
|
+
res.pipe(fs.createWriteStream(destination)
|
|
43
|
+
.on('finish', () => {
|
|
44
|
+
downloadResponse(null, destination);
|
|
45
|
+
if (target.deleteObject) {
|
|
46
|
+
deleteObject(objectRef)
|
|
47
|
+
.then(() => deleteMessage())
|
|
48
|
+
.catch(err => this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Delete failed" /* ERR_CLOUD.DELETE_FAIL */, location], err, { ...index_2.default.LOG_CLOUD_FAIL, fatal: !!target.active }));
|
|
49
|
+
}
|
|
50
|
+
})
|
|
51
|
+
.on('error', err => downloadResponse(err)));
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
downloadResponse((0, types_1.errorMessage)(statusCode, 'Invalid HTTP request', location));
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
else {
|
|
60
|
+
const bucket = storage.bucket(Bucket);
|
|
61
|
+
const file = bucket.file(Key, { generation: target.versionId });
|
|
62
|
+
file.download({ destination })
|
|
63
|
+
.then(() => {
|
|
64
|
+
downloadResponse(null, destination);
|
|
65
|
+
const deleteObject = target.deleteObject;
|
|
66
|
+
if (deleteObject) {
|
|
67
|
+
file.delete((0, types_1.isPlainObject)(deleteObject) ? deleteObject : { ignoreNotFound: true }, err => {
|
|
68
|
+
if (!err) {
|
|
69
|
+
deleteMessage();
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
this.formatFail(64 /* LOG_TYPE.CLOUD */, service, ["Delete failed" /* ERR_CLOUD.DELETE_FAIL */, location], err, { ...index_2.default.LOG_CLOUD_FAIL, fatal: !!target.active });
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
})
|
|
77
|
+
.catch(err => downloadResponse(err));
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
}
|
|
81
|
+
exports.default = download;
|
|
82
|
+
|
|
83
|
+
if (exports.default) {
|
|
84
|
+
module.exports = exports.default;
|
|
85
|
+
module.exports.default = exports.default;
|
|
86
|
+
}
|