cloud-bucket 0.3.14 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bucket.d.ts CHANGED
@@ -2,8 +2,8 @@
2
2
  import { Bucket as GoogleBucket } from '@google-cloud/storage';
3
3
  import { S3 } from 'aws-sdk';
4
4
  import { Readable, Writable } from 'stream';
5
- import { Driver, ListCloudFilesOptions } from './driver';
6
- import { BucketFile, BucketFileDeleted, BucketType, ListArg, ListOptions, ListResult } from './types';
5
+ import { Driver, ListCloudFilesOptions } from './driver.js';
6
+ import { BucketFile, BucketFileDeleted, BucketType, ListArg, ListOptions, ListResult } from './types.js';
7
7
  export interface BucketOptions {
8
8
  driver: Driver;
9
9
  log: boolean;
package/dist/bucket.js CHANGED
@@ -1,34 +1,23 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
5
- }) : (function(o, m, k, k2) {
6
- if (k2 === undefined) k2 = k;
7
- o[k2] = m[k];
8
- }));
9
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
10
- Object.defineProperty(o, "default", { enumerable: true, value: v });
11
- }) : function(o, v) {
12
- o["default"] = v;
13
- });
14
- var __importStar = (this && this.__importStar) || function (mod) {
15
- if (mod && mod.__esModule) return mod;
16
- var result = {};
17
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
18
- __setModuleDefault(result, mod);
19
- return result;
20
- };
21
- Object.defineProperty(exports, "__esModule", { value: true });
22
- exports.parsePrefixOrGlob = exports.parseListOptions = exports.getContentType = exports.buildFullDestPath = exports.newBucket = void 0;
23
- const fs_extra_plus_1 = require("fs-extra-plus");
24
- const mime_types_1 = require("mime-types");
25
- const Path = __importStar(require("path"));
26
- function newBucket(opts) {
1
+ import { glob, mkdirp } from 'fs-extra-plus';
2
+ import { lookup } from 'mime-types';
3
+ import * as Path from 'path';
4
+ export function newBucket(opts) {
27
5
  return new BucketImpl(opts);
28
6
  }
29
- exports.newBucket = newBucket;
30
7
  class BucketImpl {
31
8
  constructor(opts) {
9
+ Object.defineProperty(this, "driver", {
10
+ enumerable: true,
11
+ configurable: true,
12
+ writable: true,
13
+ value: void 0
14
+ });
15
+ Object.defineProperty(this, "log", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: void 0
20
+ });
32
21
  this.driver = opts.driver;
33
22
  this.log = opts.log;
34
23
  }
@@ -135,7 +124,7 @@ class BucketImpl {
135
124
  const remotePath = this.driver.getPath(cf);
136
125
  const localFilePath = (isLocalPathDir) ? getDestPath(baseDir, remotePath, localPath) : localPath;
137
126
  const localPathDir = Path.dirname(localFilePath);
138
- await fs_extra_plus_1.mkdirp(localPathDir);
127
+ await mkdirp(localPathDir);
139
128
  if (this.log) {
140
129
  process.stdout.write(`Downloading ${this.type}://${this.name}/${remotePath} to ${localFilePath}`);
141
130
  }
@@ -167,7 +156,7 @@ class BucketImpl {
167
156
  }
168
157
  const isLocalGlob = localPath.includes('*');
169
158
  const { baseDir } = parsePrefixOrGlob(localPath);
170
- const localFiles = await fs_extra_plus_1.glob(localPath);
159
+ const localFiles = await glob(localPath);
171
160
  for (const localPath of localFiles) {
172
161
  // if we have an localFileExpression (globs), then, we build the fullDestPath relative to the baseDir of the glob (last / before the first *)
173
162
  const fullDestPath = (isLocalGlob) ? getDestPath(baseDir, localPath, remotePath) : buildFullDestPath(localPath, remotePath);
@@ -256,7 +245,7 @@ function getDestPath(baseDir, remotePath, destPathDir) {
256
245
  *
257
246
  * @throws exception if destPath is not present.
258
247
  */
259
- function buildFullDestPath(localPath, destPath) {
248
+ export function buildFullDestPath(localPath, destPath) {
260
249
  // we do not have a dest path, throw error
261
250
  if (!destPath) {
262
251
  throw new Error('No depthPath');
@@ -273,14 +262,12 @@ function buildFullDestPath(localPath, destPath) {
273
262
  }
274
263
  return fullDestPath;
275
264
  }
276
- exports.buildFullDestPath = buildFullDestPath;
277
- function getContentType(path) {
278
- let ct = mime_types_1.lookup(path);
265
+ export function getContentType(path) {
266
+ let ct = lookup(path);
279
267
  let contentType = (ct) ? ct : undefined;
280
268
  return contentType;
281
269
  }
282
- exports.getContentType = getContentType;
283
- function parseListOptions(optsOrPrefix) {
270
+ export function parseListOptions(optsOrPrefix) {
284
271
  if (optsOrPrefix == null) {
285
272
  optsOrPrefix = ''; // for now, default
286
273
  }
@@ -289,7 +276,6 @@ function parseListOptions(optsOrPrefix) {
289
276
  const { directory, limit, marker } = opts;
290
277
  return { prefix, glob, directory, limit, marker };
291
278
  }
292
- exports.parseListOptions = parseListOptions;
293
279
  /**
294
280
  * Return a clean prefix and glob when defined in the string. Clean prefix, meaning, glob less one,
295
281
  * that can be passed to most cloud storage api.
@@ -300,7 +286,7 @@ exports.parseListOptions = parseListOptions;
300
286
  * - glob is prefixOrGlob value if it is a glob, otherwise undefined.
301
287
  * - baseDir is the eventual longest directory path without any glob char (ending with '/')
302
288
  */
303
- function parsePrefixOrGlob(prefixOrGlob) {
289
+ export function parsePrefixOrGlob(prefixOrGlob) {
304
290
  let glob;
305
291
  let prefix;
306
292
  let baseDir;
@@ -324,5 +310,4 @@ function parsePrefixOrGlob(prefixOrGlob) {
324
310
  }
325
311
  return { prefix, glob, baseDir };
326
312
  }
327
- exports.parsePrefixOrGlob = parsePrefixOrGlob;
328
313
  //#endregion ---------- /Utils ----------
@@ -1,9 +1,9 @@
1
1
  /// <reference types="node" />
2
- import { S3 } from 'aws-sdk';
2
+ import type { S3 as S3_TYPE } from 'aws-sdk';
3
3
  import { Readable, Writable } from "stream";
4
4
  import { Driver, ListCloudFilesOptions, ListCloudFilesResult } from "./driver";
5
5
  import { BucketFile, BucketType } from './types';
6
- declare type AwsFile = S3.Object & {
6
+ declare type AwsFile = S3_TYPE.Object & {
7
7
  ContentType?: string;
8
8
  };
9
9
  export interface S3DriverCfg {
@@ -17,7 +17,7 @@ export declare class S3Driver implements Driver<AwsFile> {
17
17
  private baseParams;
18
18
  get type(): BucketType;
19
19
  get name(): string;
20
- constructor(s3: S3, bucketName: string);
20
+ constructor(s3: S3_TYPE, bucketName: string);
21
21
  toFile(awsFile: AwsFile): Omit<BucketFile, 'bucket'>;
22
22
  getPath(obj: AwsFile): string;
23
23
  exists(path: string): Promise<boolean>;
@@ -1,22 +1,18 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.S3Driver = exports.getS3Driver = void 0;
4
- const aws_sdk_1 = require("aws-sdk");
5
- const fs_extra_plus_1 = require("fs-extra-plus");
6
- const stream_1 = require("stream");
7
- const micromatch = require("micromatch");
8
- async function getS3Driver(cfg) {
9
- const credentials = new aws_sdk_1.Credentials(cfg.access_key_id, cfg.access_key_secret);
1
+ import { PassThrough } from "stream";
2
+ const micromatch = (await import('micromatch')).default;
3
+ const { createReadStream, createWriteStream } = (await import('fs-extra')).default;
4
+ const { Credentials, S3 } = (await import('aws-sdk')).default;
5
+ export async function getS3Driver(cfg) {
6
+ const credentials = new Credentials(cfg.access_key_id, cfg.access_key_secret);
10
7
  // Create S3 service object
11
- const s3 = new aws_sdk_1.S3({ apiVersion: '2006-03-01', credentials });
8
+ const s3 = new S3({ apiVersion: '2006-03-01', credentials });
12
9
  return new S3Driver(s3, cfg.bucketName);
13
10
  }
14
- exports.getS3Driver = getS3Driver;
15
11
  /**
16
12
  * Custom Writable to trigger finish/close event manually on upload
17
13
  * TODO: Needs to check if this create some side effect.
18
14
  */
19
- class S3UploadWriteStream extends stream_1.PassThrough {
15
+ class S3UploadWriteStream extends PassThrough {
20
16
  emit(event) {
21
17
  if (event !== 'finish' && event !== 'close') {
22
18
  super.emit(event);
@@ -31,8 +27,20 @@ class S3UploadWriteStream extends stream_1.PassThrough {
31
27
  super.emit('close');
32
28
  }
33
29
  }
34
- class S3Driver {
30
+ export class S3Driver {
35
31
  constructor(s3, bucketName) {
32
+ Object.defineProperty(this, "s3", {
33
+ enumerable: true,
34
+ configurable: true,
35
+ writable: true,
36
+ value: void 0
37
+ });
38
+ Object.defineProperty(this, "baseParams", {
39
+ enumerable: true,
40
+ configurable: true,
41
+ writable: true,
42
+ value: void 0
43
+ });
36
44
  this.s3 = s3;
37
45
  this.baseParams = { Bucket: bucketName };
38
46
  }
@@ -87,7 +95,6 @@ class S3Driver {
87
95
  }
88
96
  }
89
97
  async listCloudFiles(opts) {
90
- var _a;
91
98
  const { prefix, glob, directory, limit, marker } = opts;
92
99
  // build the list params
93
100
  let listParams = {};
@@ -101,24 +108,24 @@ class S3Driver {
101
108
  listParams.MaxKeys = limit;
102
109
  }
103
110
  if (marker != null) {
104
- listParams.Marker = marker;
111
+ listParams.ContinuationToken = marker;
105
112
  }
106
113
  const params = { ...this.baseParams, ...listParams };
107
114
  // perform the s3 list request
108
115
  try {
109
- const awsResult = await this.s3.listObjects(params).promise();
116
+ const awsResult = await this.s3.listObjectsV2(params).promise();
110
117
  const awsFiles = awsResult.Contents;
111
118
  // if glob, filter again the result
112
119
  let files = (!glob) ? awsFiles : awsFiles.filter(af => micromatch.isMatch(af.Key, glob));
113
120
  let dirs = undefined;
114
121
  if (directory && awsResult.CommonPrefixes) {
115
122
  // Note: for now, match the gcp driver, undefined if empty
116
- const prefixes = (_a = awsResult.CommonPrefixes) === null || _a === void 0 ? void 0 : _a.map(cp => cp.Prefix);
123
+ const prefixes = awsResult.CommonPrefixes?.map(cp => cp.Prefix);
117
124
  if (prefixes != null && prefixes.length > 0) {
118
125
  dirs = prefixes;
119
126
  }
120
127
  }
121
- const nextMarker = awsResult.NextMarker;
128
+ const nextMarker = awsResult.NextContinuationToken;
122
129
  return { files, dirs, nextMarker };
123
130
  }
124
131
  catch (ex) {
@@ -141,7 +148,7 @@ class S3Driver {
141
148
  const remotePath = rawFile.Key;
142
149
  const params = { ...this.baseParams, ...{ Key: remotePath } };
143
150
  const remoteReadStream = this.s3.getObject(params).createReadStream();
144
- const localWriteStream = fs_extra_plus_1.createWriteStream(localPath);
151
+ const localWriteStream = createWriteStream(localPath);
145
152
  const writePromise = new Promise((resolve, reject) => {
146
153
  localWriteStream.once('close', () => {
147
154
  resolve();
@@ -154,7 +161,7 @@ class S3Driver {
154
161
  await writePromise;
155
162
  }
156
163
  async uploadCloudFile(localPath, remoteFilePath, contentType) {
157
- const readable = fs_extra_plus_1.createReadStream(localPath);
164
+ const readable = createReadStream(localPath);
158
165
  const awsResult = await this.s3.putObject({ ...this.baseParams, ...{ Key: remoteFilePath, Body: readable, ContentType: contentType } }).promise();
159
166
  // TODO: probably check the awsResult that match remoteFilePath
160
167
  return { Key: remoteFilePath };
@@ -202,4 +209,3 @@ class S3Driver {
202
209
  }
203
210
  }
204
211
  }
205
- exports.S3Driver = S3Driver;
@@ -1,10 +1,7 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getGsDriver = void 0;
4
- const storage_1 = require("@google-cloud/storage");
5
- const stream_1 = require("stream");
6
- const micromatch = require("micromatch");
7
- async function getGsDriver(cfg) {
1
+ import { Storage as GoogleStorage } from '@google-cloud/storage';
2
+ import { Readable } from "stream";
3
+ const micromatch = (await import('micromatch')).default;
4
+ export async function getGsDriver(cfg) {
8
5
  // TODO: valid cfg
9
6
  const googleStorageConf = {
10
7
  projectId: cfg.project_id,
@@ -13,13 +10,18 @@ async function getGsDriver(cfg) {
13
10
  private_key: cfg.private_key
14
11
  }
15
12
  };
16
- const storage = new storage_1.Storage(googleStorageConf);
13
+ const storage = new GoogleStorage(googleStorageConf);
17
14
  const googleBucket = storage.bucket(cfg.bucketName);
18
15
  return new GcpDriver(googleBucket);
19
16
  }
20
- exports.getGsDriver = getGsDriver;
21
17
  class GcpDriver {
22
18
  constructor(googleBucket) {
19
+ Object.defineProperty(this, "googleBucket", {
20
+ enumerable: true,
21
+ configurable: true,
22
+ writable: true,
23
+ value: void 0
24
+ });
23
25
  this.googleBucket = googleBucket;
24
26
  }
25
27
  get type() {
@@ -87,7 +89,7 @@ class GcpDriver {
87
89
  // get the dirs
88
90
  const dirs = (directory && meta.prefixes) ? meta.prefixes : undefined;
89
91
  // get pageToken (nextMarker)
90
- const nextMarker = info === null || info === void 0 ? void 0 : info.pageToken;
92
+ const nextMarker = info?.pageToken;
91
93
  // if glob, filter the data further
92
94
  let files = (!glob) ? gfList : gfList.filter(gf => micromatch.isMatch(gf.name, glob));
93
95
  return { files, dirs, nextMarker };
@@ -139,7 +141,7 @@ class GcpDriver {
139
141
  }
140
142
  async uploadCloudContent(path, content, contentType) {
141
143
  const googleFile = this.googleBucket.file(path);
142
- const uploadReadable = new stream_1.Readable();
144
+ const uploadReadable = new Readable();
143
145
  return new Promise(function (resolve, reject) {
144
146
  uploadReadable
145
147
  .pipe(googleFile.createWriteStream({ contentType }))
@@ -1,5 +1,5 @@
1
- import { S3Driver, S3DriverCfg } from './driver-aws';
2
- import { BucketType } from './types';
1
+ import { S3Driver, S3DriverCfg } from './driver-aws.js';
2
+ import { BucketType } from './types.js';
3
3
  export interface MinioDriverCfg extends S3DriverCfg {
4
4
  minio_endpoint: string;
5
5
  }
@@ -1,11 +1,8 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getMinioDriver = void 0;
4
- const aws_sdk_1 = require("aws-sdk");
5
- const driver_aws_1 = require("./driver-aws");
6
- async function getMinioDriver(cfg) {
1
+ import { S3Driver } from './driver-aws.js';
2
+ const { S3 } = (await import('aws-sdk')).default;
3
+ export async function getMinioDriver(cfg) {
7
4
  // const credentials = new Credentials(cfg.access_key_id, cfg.access_key_secret);
8
- const s3 = new aws_sdk_1.S3({
5
+ const s3 = new S3({
9
6
  accessKeyId: cfg.access_key_id,
10
7
  secretAccessKey: cfg.access_key_secret,
11
8
  endpoint: cfg.minio_endpoint,
@@ -19,8 +16,7 @@ async function getMinioDriver(cfg) {
19
16
  // Create S3 service object
20
17
  return new MinioDriver(s3, cfg.bucketName);
21
18
  }
22
- exports.getMinioDriver = getMinioDriver;
23
- class MinioDriver extends driver_aws_1.S3Driver {
19
+ class MinioDriver extends S3Driver {
24
20
  get type() {
25
21
  return 'minio';
26
22
  }
package/dist/driver.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  /// <reference types="node" />
2
2
  import { Readable, Writable } from 'stream';
3
- import { BucketFile, BucketType } from './types';
3
+ import { BucketFile, BucketType } from './types.js';
4
4
  export interface Driver<F = any> {
5
5
  type: BucketType;
6
6
  name: string;
package/dist/driver.js CHANGED
@@ -1,2 +1 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
1
+ export {};
package/dist/index.d.ts CHANGED
@@ -1,9 +1,9 @@
1
- import { Bucket } from './bucket';
2
- import { S3DriverCfg } from './driver-aws';
3
- import { GsDriverCfg } from './driver-gcp';
4
- import { MinioDriverCfg } from './driver-minio';
5
- import { BucketFile, ListOptions, ListResult } from './types';
6
- export { signUrl, SignUrlOptions, urlSigner } from './url-signer';
1
+ import { Bucket } from './bucket.js';
2
+ import { S3DriverCfg } from './driver-aws.js';
3
+ import { GsDriverCfg } from './driver-gcp.js';
4
+ import { MinioDriverCfg } from './driver-minio.js';
5
+ import { BucketFile, ListOptions, ListResult } from './types.js';
6
+ export { signUrl, SignUrlOptions, urlSigner } from './url-signer.js';
7
7
  export { Bucket, BucketFile, ListOptions, ListResult };
8
8
  declare type GetBucketOptions = {
9
9
  log?: boolean;
package/dist/index.js CHANGED
@@ -1,34 +1,27 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getBucket = exports.urlSigner = exports.signUrl = void 0;
4
- const bucket_1 = require("./bucket");
5
- const driver_aws_1 = require("./driver-aws");
6
- const driver_gcp_1 = require("./driver-gcp");
7
- const driver_minio_1 = require("./driver-minio");
8
- var url_signer_1 = require("./url-signer");
9
- Object.defineProperty(exports, "signUrl", { enumerable: true, get: function () { return url_signer_1.signUrl; } });
10
- Object.defineProperty(exports, "urlSigner", { enumerable: true, get: function () { return url_signer_1.urlSigner; } });
11
- async function getBucket(options) {
12
- var _a;
1
+ import { newBucket } from './bucket.js';
2
+ import { getS3Driver } from './driver-aws.js';
3
+ import { getGsDriver } from './driver-gcp.js';
4
+ import { getMinioDriver } from './driver-minio.js';
5
+ export { signUrl, urlSigner } from './url-signer.js';
6
+ export async function getBucket(options) {
13
7
  if (options == null) {
14
8
  throw new Error(`ERROR - cloud-bucket - Cannot getBucket with options ${options}`);
15
9
  }
16
- const log = (_a = options.log) !== null && _a !== void 0 ? _a : false; // by default, false.
10
+ const log = options.log ?? false; // by default, false.
17
11
  // if has .project_id, assume GcpBucket
18
12
  const driver = await getDriver(options);
19
- const bucket = bucket_1.newBucket({ driver, log });
13
+ const bucket = newBucket({ driver, log });
20
14
  return bucket;
21
15
  }
22
- exports.getBucket = getBucket;
23
16
  async function getDriver(driverCfg) {
24
17
  if (isGsDriverCfg(driverCfg)) {
25
- return driver_gcp_1.getGsDriver(driverCfg);
18
+ return getGsDriver(driverCfg);
26
19
  }
27
20
  else if (isMinioDriverCfg(driverCfg)) { // IMPORTANT MUST be before S3Driver, because same access_key...
28
- return driver_minio_1.getMinioDriver(driverCfg);
21
+ return getMinioDriver(driverCfg);
29
22
  }
30
23
  else if (isS3DriverCfg(driverCfg)) {
31
- return driver_aws_1.getS3Driver(driverCfg);
24
+ return getS3Driver(driverCfg);
32
25
  }
33
26
  else {
34
27
  throw new Error(`bucket config does not seem to be valid (only support Gcp and Aws for now)`);
package/dist/types.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import type { Bucket } from './bucket';
1
+ import type { Bucket } from './bucket.js';
2
2
  export declare type BucketType = 's3' | 'gs' | 'minio';
3
3
  export interface BucketFile {
4
4
  bucket: Bucket;
package/dist/types.js CHANGED
@@ -1,2 +1 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
1
+ export {};
@@ -1,27 +1,5 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
5
- }) : (function(o, m, k, k2) {
6
- if (k2 === undefined) k2 = k;
7
- o[k2] = m[k];
8
- }));
9
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
10
- Object.defineProperty(o, "default", { enumerable: true, value: v });
11
- }) : function(o, v) {
12
- o["default"] = v;
13
- });
14
- var __importStar = (this && this.__importStar) || function (mod) {
15
- if (mod && mod.__esModule) return mod;
16
- var result = {};
17
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
18
- __setModuleDefault(result, mod);
19
- return result;
20
- };
21
- Object.defineProperty(exports, "__esModule", { value: true });
22
- exports.urlSigner = exports.signUrl = void 0;
23
- const crypto = __importStar(require("crypto"));
24
- function signUrl(url, opts) {
1
+ import * as crypto from 'crypto';
2
+ export function signUrl(url, opts) {
25
3
  if (opts.type === 's3') {
26
4
  return s3_sign_url(url, opts);
27
5
  }
@@ -35,7 +13,6 @@ function signUrl(url, opts) {
35
13
  throw new Error(`cdnSign does not support type ${opts.type} for now`);
36
14
  }
37
15
  }
38
- exports.signUrl = signUrl;
39
16
  /**
40
17
  * Return a path signer based on a baseUrl and the sign option. This is the optimal way to sign many urls and should be typically used over the signUrl method.
41
18
  *
@@ -49,7 +26,7 @@ exports.signUrl = signUrl;
49
26
  * - s3 - This takes full advantage of the aws 'directory signing' like urlSigner('https://.../some/dir/*', opts) will create one signature for the folder and apply it to each sub path.
50
27
  * - gs - While google storage does not have the same capability, there are small benefits as well on some base64 object creation (not much though). However, because of GCP small key, the signature is much faster than s3 (about 10x)
51
28
  */
52
- function urlSigner(baseUrl, opts) {
29
+ export function urlSigner(baseUrl, opts) {
53
30
  if (opts.type === 's3') {
54
31
  return s3_urlSigner(baseUrl, opts);
55
32
  }
@@ -66,7 +43,6 @@ function urlSigner(baseUrl, opts) {
66
43
  throw new Error('urlSigner only supported for s3');
67
44
  }
68
45
  }
69
- exports.urlSigner = urlSigner;
70
46
  //#region ---------- S3 Signer ----------
71
47
  function s3_urlSigner(baseUrl, opts) {
72
48
  const isWildPolicy = baseUrl.endsWith('*');
package/package.json CHANGED
@@ -1,6 +1,7 @@
1
1
  {
2
2
  "name": "cloud-bucket",
3
- "version": "0.3.14",
3
+ "type": "module",
4
+ "version": "0.4.0",
4
5
  "description": "Simple multi cloud (Google Storage and AWS S3) bucket API",
5
6
  "author": "Jeremy Chone <jeremy.chone@gmail.com>",
6
7
  "main": "dist/index.js",
@@ -12,24 +13,24 @@
12
13
  "node": ">=14"
13
14
  },
14
15
  "dependencies": {
15
- "@google-cloud/storage": "^5.8.5",
16
- "@types/micromatch": "^4.0.1",
17
- "@types/mime-types": "^2.1.0",
18
- "aws-sdk": "^2.934.0",
19
- "fs-extra-plus": "^0.5.22",
16
+ "@google-cloud/storage": "^5.16.0",
17
+ "@types/micromatch": "^4.0.2",
18
+ "@types/mime-types": "^2.1.1",
19
+ "aws-sdk": "^2.1037.0",
20
+ "fs-extra-plus": "^0.5.23",
20
21
  "micromatch": "^4.0.4",
21
- "mime-types": "^2.1.31"
22
+ "mime-types": "^2.1.34"
22
23
  },
23
24
  "devDependencies": {
24
- "@types/js-yaml": "^4.0.1",
25
- "@types/mocha": "^8.2.2",
25
+ "@types/js-yaml": "^4.0.5",
26
+ "@types/mocha": "^9.0.0",
26
27
  "js-yaml": "^4.1.0",
27
- "mocha": "^9.0.1",
28
- "ts-node": "^10.0.0",
29
- "typescript": "^4.3.4"
28
+ "mocha": "^9.1.3",
29
+ "ts-node": "^10.4.0",
30
+ "typescript": "^4.5.2"
30
31
  },
31
32
  "scripts": {
32
- "test": "TS_NODE_FILES=true ./node_modules/.bin/mocha",
33
+ "test": "TS_NODE_PROJECT='test/tsconfig.json' ./node_modules/.bin/mocha",
33
34
  "testw": "TS_NODE_FILES=true ./node_modules/.bin/mocha --watch",
34
35
  "testd": "TS_NODE_FILES=true ./node_modules/.bin/mocha --inspect-brk",
35
36
  "prepare": "rm -Rf dist/ && ./node_modules/.bin/tsc"
package/src/bucket.ts CHANGED
@@ -4,8 +4,8 @@ import { glob, mkdirp } from 'fs-extra-plus';
4
4
  import { lookup } from 'mime-types';
5
5
  import * as Path from 'path';
6
6
  import { Readable, Writable } from 'stream';
7
- import { Driver, ListCloudFilesOptions } from './driver';
8
- import { BucketFile, BucketFileDeleted, BucketType, ListArg, ListOptions, ListResult } from './types';
7
+ import { Driver, ListCloudFilesOptions } from './driver.js';
8
+ import { BucketFile, BucketFileDeleted, BucketType, ListArg, ListOptions, ListResult } from './types.js';
9
9
 
10
10
  export interface BucketOptions {
11
11
  driver: Driver;
package/src/driver-aws.ts CHANGED
@@ -1,16 +1,16 @@
1
- import { Credentials, S3 } from 'aws-sdk';
2
- import { ListObjectsRequest } from 'aws-sdk/clients/s3';
3
- import { createReadStream, createWriteStream } from 'fs-extra-plus';
1
+ import type { S3 as S3_TYPE } from 'aws-sdk';
2
+ import { ListObjectsV2Request } from 'aws-sdk/clients/s3';
4
3
  import { PassThrough, Readable, Writable } from "stream";
5
4
  import { Driver, ListCloudFilesOptions, ListCloudFilesResult } from "./driver";
6
5
  import { BucketFile, BucketType } from './types';
7
-
8
- import micromatch = require('micromatch');
6
+ const micromatch = (await import('micromatch')).default;
7
+ const { createReadStream, createWriteStream } = (await import('fs-extra')).default;
8
+ const { Credentials, S3 } = (await import('aws-sdk')).default;
9
9
 
10
10
  // import {Object as AwsFile} from 'aws-sdk';
11
11
 
12
12
  // type S3 = AWS.S3;
13
- type AwsFile = S3.Object & { ContentType?: string };
13
+ type AwsFile = S3_TYPE.Object & { ContentType?: string };
14
14
 
15
15
  export interface S3DriverCfg {
16
16
  bucketName: string;
@@ -46,7 +46,7 @@ class S3UploadWriteStream extends PassThrough {
46
46
  }
47
47
 
48
48
  export class S3Driver implements Driver<AwsFile> {
49
- private s3: S3;
49
+ private s3: S3_TYPE;
50
50
  private baseParams: { Bucket: string };
51
51
 
52
52
  get type(): BucketType {
@@ -57,7 +57,7 @@ export class S3Driver implements Driver<AwsFile> {
57
57
  return this.baseParams.Bucket;
58
58
  }
59
59
 
60
- constructor(s3: S3, bucketName: string) {
60
+ constructor(s3: S3_TYPE, bucketName: string) {
61
61
  this.s3 = s3;
62
62
  this.baseParams = { Bucket: bucketName };
63
63
  }
@@ -99,7 +99,7 @@ export class S3Driver implements Driver<AwsFile> {
99
99
 
100
100
  const awsFile: AwsFile = { Key, Size, LastModified, ETag, ContentType };
101
101
  return awsFile;
102
- } catch (ex) {
102
+ } catch (ex: any) {
103
103
  // if NotFound, return false
104
104
  if (ex.code === 'NotFound') {
105
105
  return null;
@@ -117,7 +117,7 @@ export class S3Driver implements Driver<AwsFile> {
117
117
  const { prefix, glob, directory, limit, marker } = opts;
118
118
 
119
119
  // build the list params
120
- let listParams: Partial<ListObjectsRequest> = {};
120
+ let listParams: Partial<ListObjectsV2Request> = {};
121
121
  if (prefix) {
122
122
  listParams.Prefix = prefix;
123
123
  }
@@ -128,13 +128,13 @@ export class S3Driver implements Driver<AwsFile> {
128
128
  listParams.MaxKeys = limit;
129
129
  }
130
130
  if (marker != null) {
131
- listParams.Marker = marker;
131
+ listParams.ContinuationToken = marker;
132
132
  }
133
133
  const params = { ...this.baseParams, ...listParams };
134
134
 
135
135
  // perform the s3 list request
136
136
  try {
137
- const awsResult = await this.s3.listObjects(params).promise();
137
+ const awsResult = await this.s3.listObjectsV2(params).promise();
138
138
  const awsFiles = awsResult.Contents as AwsFile[];
139
139
  // if glob, filter again the result
140
140
  let files: AwsFile[] = (!glob) ? awsFiles : awsFiles.filter(af => micromatch.isMatch(af.Key!, glob));
@@ -147,7 +147,7 @@ export class S3Driver implements Driver<AwsFile> {
147
147
  dirs = prefixes;
148
148
  }
149
149
  }
150
- const nextMarker = awsResult.NextMarker;
150
+ const nextMarker = awsResult.NextContinuationToken;
151
151
 
152
152
  return { files, dirs, nextMarker };
153
153
  } catch (ex) {
package/src/driver-gcp.ts CHANGED
@@ -2,7 +2,7 @@ import { Bucket as GoogleBucket, File as GoogleFile, GetFilesOptions, Storage as
2
2
  import { Readable, Writable } from "stream";
3
3
  import { Driver, ListCloudFilesOptions, ListCloudFilesResult } from "./driver";
4
4
  import { BucketFile, BucketType } from './types';
5
- import micromatch = require('micromatch');
5
+ const micromatch = (await import('micromatch')).default;
6
6
 
7
7
  export async function getGsDriver(cfg: GsDriverCfg) {
8
8
  // TODO: valid cfg
@@ -68,7 +68,7 @@ class GcpDriver implements Driver<GoogleFile> {
68
68
  const googleFile = this.googleBucket.file(path);
69
69
  try {
70
70
  return (await googleFile.get())[0];
71
- } catch (ex) {
71
+ } catch (ex: any) {
72
72
  // not found return null, as per getFile design.
73
73
  if (ex.code === 404) {
74
74
  return null;
@@ -143,7 +143,7 @@ class GcpDriver implements Driver<GoogleFile> {
143
143
  if (googleFile) {
144
144
  try {
145
145
  await googleFile.delete();
146
- } catch (ex) {
146
+ } catch (ex: any) {
147
147
  // if not found, just return false.
148
148
  if (ex.code === 404) {
149
149
  process.stdout.write(` - Skipped (object not found)\n`);
@@ -1,6 +1,7 @@
1
- import { S3 } from 'aws-sdk';
2
- import { S3Driver, S3DriverCfg } from './driver-aws';
3
- import { BucketType } from './types';
1
+ import type { S3 as S3_TYPE } from 'aws-sdk';
2
+ import { S3Driver, S3DriverCfg } from './driver-aws.js';
3
+ import { BucketType } from './types.js';
4
+ const { S3 } = (await import('aws-sdk')).default;
4
5
 
5
6
  export interface MinioDriverCfg extends S3DriverCfg {
6
7
  minio_endpoint: string;
@@ -33,7 +34,7 @@ class MinioDriver extends S3Driver {
33
34
 
34
35
 
35
36
 
36
- async function bucketExists(s3: S3, bucketName: string) {
37
+ async function bucketExists(s3: S3_TYPE, bucketName: string) {
37
38
 
38
39
  return new Promise((res, rej) => {
39
40
  s3.headBucket({
@@ -45,7 +46,7 @@ async function bucketExists(s3: S3, bucketName: string) {
45
46
 
46
47
  }
47
48
 
48
- async function createBucket(s3: S3, bucketName: string) {
49
+ async function createBucket(s3: S3_TYPE, bucketName: string) {
49
50
 
50
51
  // create the bucket
51
52
  await new Promise((res, rej) => {
package/src/driver.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import { Readable, Writable } from 'stream';
2
- import { BucketFile, BucketType } from './types';
2
+ import { BucketFile, BucketType } from './types.js';
3
3
 
4
4
 
5
5
 
package/src/index.ts CHANGED
@@ -1,11 +1,11 @@
1
- import { Bucket, newBucket } from './bucket';
2
- import { Driver } from './driver';
3
- import { getS3Driver, S3DriverCfg } from './driver-aws';
4
- import { getGsDriver, GsDriverCfg } from './driver-gcp';
5
- import { getMinioDriver, MinioDriverCfg } from './driver-minio';
6
- import { BucketFile, ListOptions, ListResult } from './types';
7
-
8
- export { signUrl, SignUrlOptions, urlSigner } from './url-signer';
1
+ import { Bucket, newBucket } from './bucket.js';
2
+ import { getS3Driver, S3DriverCfg } from './driver-aws.js';
3
+ import { getGsDriver, GsDriverCfg } from './driver-gcp.js';
4
+ import { getMinioDriver, MinioDriverCfg } from './driver-minio.js';
5
+ import { Driver } from './driver.js';
6
+ import { BucketFile, ListOptions, ListResult } from './types.js';
7
+
8
+ export { signUrl, SignUrlOptions, urlSigner } from './url-signer.js';
9
9
  export { Bucket, BucketFile, ListOptions, ListResult };
10
10
 
11
11
  type GetBucketOptions = { log?: boolean } & (GsDriverCfg | S3DriverCfg | MinioDriverCfg);
package/src/types.ts CHANGED
@@ -1,5 +1,5 @@
1
1
 
2
- import type { Bucket } from './bucket';
2
+ import type { Bucket } from './bucket.js';
3
3
 
4
4
  /////////////////////
5
5
  // Those are the common types to avoid uncessary cyclic module reference. (best practice)