cloud-bucket 0.4.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bucket.d.ts CHANGED
@@ -1,6 +1,5 @@
1
- /// <reference types="node" />
1
+ import { S3Client } from '@aws-sdk/client-s3';
2
2
  import { Bucket as GoogleBucket } from '@google-cloud/storage';
3
- import { S3 } from 'aws-sdk';
4
3
  import { Readable, Writable } from 'stream';
5
4
  import { Driver, ListCloudFilesOptions } from './driver.js';
6
5
  import { BucketFile, BucketFileDeleted, BucketType, ListArg, ListOptions, ListResult } from './types.js';
@@ -12,7 +11,7 @@ export declare function newBucket(opts: BucketOptions): BucketImpl<unknown>;
12
11
  export interface Bucket {
13
12
  type: BucketType;
14
13
  name: string;
15
- readonly s3?: S3;
14
+ readonly s3?: S3Client;
16
15
  readonly googleBucket?: GoogleBucket;
17
16
  exists(path: string): Promise<boolean>;
18
17
  /**
@@ -55,7 +54,7 @@ declare class BucketImpl<F> implements Bucket {
55
54
  constructor(opts: BucketOptions);
56
55
  get type(): BucketType;
57
56
  get name(): string;
58
- get s3(): S3 | undefined;
57
+ get s3(): S3Client | undefined;
59
58
  get googleBucket(): GoogleBucket | undefined;
60
59
  toFile(cf: F): BucketFile;
61
60
  exists(path: string): Promise<boolean>;
@@ -1,9 +1,8 @@
1
- /// <reference types="node" />
2
- import type { S3 as S3_TYPE } from 'aws-sdk';
1
+ import { _Object as S3_TYPE, S3Client } from '@aws-sdk/client-s3';
3
2
  import { Readable, Writable } from "stream";
4
3
  import { Driver, ListCloudFilesOptions, ListCloudFilesResult } from "./driver";
5
4
  import { BucketFile, BucketType } from './types';
6
- declare type AwsFile = S3_TYPE.Object & {
5
+ type AwsFile = S3_TYPE & {
7
6
  ContentType?: string;
8
7
  };
9
8
  export interface S3DriverCfg {
@@ -17,7 +16,7 @@ export declare class S3Driver implements Driver<AwsFile> {
17
16
  private baseParams;
18
17
  get type(): BucketType;
19
18
  get name(): string;
20
- constructor(s3: S3_TYPE, bucketName: string);
19
+ constructor(s3: S3Client, bucketName: string);
21
20
  toFile(awsFile: AwsFile): Omit<BucketFile, 'bucket'>;
22
21
  getPath(obj: AwsFile): string;
23
22
  exists(path: string): Promise<boolean>;
@@ -1,11 +1,19 @@
1
+ import { CopyObjectCommand, DeleteObjectCommand, GetObjectCommand, HeadObjectCommand, ListObjectsV2Command, PutObjectCommand, S3Client } from '@aws-sdk/client-s3';
2
+ import { Upload } from "@aws-sdk/lib-storage";
1
3
  import { PassThrough } from "stream";
2
4
  const micromatch = (await import('micromatch')).default;
3
5
  const { createReadStream, createWriteStream } = (await import('fs-extra')).default;
4
- const { Credentials, S3 } = (await import('aws-sdk')).default;
5
6
  export async function getS3Driver(cfg) {
6
- const credentials = new Credentials(cfg.access_key_id, cfg.access_key_secret);
7
7
  // Create S3 service object
8
- const s3 = new S3({ apiVersion: '2006-03-01', credentials });
8
+ let region = process.env.AWS_REGION || 'us-east-1';
9
+ const s3 = new S3Client({
10
+ region,
11
+ apiVersion: '2006-03-01',
12
+ credentials: {
13
+ accessKeyId: cfg.access_key_id,
14
+ secretAccessKey: cfg.access_key_secret
15
+ }
16
+ });
9
17
  return new S3Driver(s3, cfg.bucketName);
10
18
  }
11
19
  /**
@@ -28,6 +36,12 @@ class S3UploadWriteStream extends PassThrough {
28
36
  }
29
37
  }
30
38
  export class S3Driver {
39
+ get type() {
40
+ return 's3';
41
+ }
42
+ get name() {
43
+ return this.baseParams.Bucket;
44
+ }
31
45
  constructor(s3, bucketName) {
32
46
  Object.defineProperty(this, "s3", {
33
47
  enumerable: true,
@@ -44,12 +58,6 @@ export class S3Driver {
44
58
  this.s3 = s3;
45
59
  this.baseParams = { Bucket: bucketName };
46
60
  }
47
- get type() {
48
- return 's3';
49
- }
50
- get name() {
51
- return this.baseParams.Bucket;
52
- }
53
61
  toFile(awsFile) {
54
62
  if (!awsFile) {
55
63
  throw new Error(`No awsFile`);
@@ -71,7 +79,11 @@ export class S3Driver {
71
79
  }
72
80
  async getCloudFile(path) {
73
81
  try {
74
- const object = await this.s3.headObject({ ...this.baseParams, ...{ Key: path } }).promise();
82
+ const command = new HeadObjectCommand({
83
+ ...this.baseParams,
84
+ ...{ Key: path }
85
+ });
86
+ const object = await this.s3.send(command);
75
87
  // bucket: this,
76
88
  // path,
77
89
  // updated,
@@ -113,7 +125,8 @@ export class S3Driver {
113
125
  const params = { ...this.baseParams, ...listParams };
114
126
  // perform the s3 list request
115
127
  try {
116
- const awsResult = await this.s3.listObjectsV2(params).promise();
128
+ const command = new ListObjectsV2Command(params);
129
+ const awsResult = await this.s3.send(command);
117
130
  const awsFiles = awsResult.Contents;
118
131
  // if glob, filter again the result
119
132
  let files = (!glob) ? awsFiles : awsFiles.filter(af => micromatch.isMatch(af.Key, glob));
@@ -142,12 +155,14 @@ export class S3Driver {
142
155
  Bucket: dest.bucket.name,
143
156
  Key: dest.path
144
157
  };
145
- await this.s3.copyObject(params).promise();
158
+ const command = new CopyObjectCommand(params);
159
+ await this.s3.send(command);
146
160
  }
147
161
  async downloadCloudFile(rawFile, localPath) {
148
162
  const remotePath = rawFile.Key;
149
163
  const params = { ...this.baseParams, ...{ Key: remotePath } };
150
- const remoteReadStream = this.s3.getObject(params).createReadStream();
164
+ const command = new GetObjectCommand(params);
165
+ const remoteReadStream = (await this.s3.send(command)).Body;
151
166
  const localWriteStream = createWriteStream(localPath);
152
167
  const writePromise = new Promise((resolve, reject) => {
153
168
  localWriteStream.once('close', () => {
@@ -162,33 +177,40 @@ export class S3Driver {
162
177
  }
163
178
  async uploadCloudFile(localPath, remoteFilePath, contentType) {
164
179
  const readable = createReadStream(localPath);
165
- const awsResult = await this.s3.putObject({ ...this.baseParams, ...{ Key: remoteFilePath, Body: readable, ContentType: contentType } }).promise();
180
+ const command = new PutObjectCommand({ ...this.baseParams, ...{ Key: remoteFilePath, Body: readable, ContentType: contentType } });
181
+ const awsResult = await this.s3.send(command);
166
182
  // TODO: probably check the awsResult that match remoteFilePath
167
183
  return { Key: remoteFilePath };
168
184
  }
169
185
  async downloadAsText(path) {
170
186
  const params = { ...this.baseParams, ...{ Key: path } };
171
- const obj = await this.s3.getObject(params).promise();
187
+ const command = new GetObjectCommand(params);
188
+ const obj = await await this.s3.send(command);
172
189
  const content = obj.Body.toString();
173
190
  return content;
174
191
  }
175
192
  async uploadCloudContent(path, content, contentType) {
176
- await this.s3.putObject({ ...this.baseParams, ...{ Key: path, Body: content, ContentType: contentType } }).promise();
193
+ const command = new PutObjectCommand({ ...this.baseParams, ...{ Key: path, Body: content, ContentType: contentType } });
194
+ await this.s3.send(command);
177
195
  }
178
196
  async createReadStream(path) {
179
197
  const params = { ...this.baseParams, ...{ Key: path } };
180
- const obj = this.s3.getObject(params);
198
+ const command = new GetObjectCommand(params);
199
+ const obj = await this.s3.send(command);
181
200
  if (!obj) {
182
201
  throw new Error(`Object not found for ${path}`);
183
202
  }
184
- return obj.createReadStream();
203
+ return obj.Body;
185
204
  }
186
205
  async createWriteStream(path, contentType) {
187
206
  const writable = new S3UploadWriteStream();
188
207
  const params = { ...this.baseParams, ...{ Key: path, ContentType: contentType }, Body: writable };
189
- const uploadCtrl = this.s3.upload(params);
208
+ const uploadCtrl = new Upload({
209
+ client: this.s3,
210
+ params: params
211
+ });
190
212
  // NOTE: We use the S3UploadWriteStream trigger finish and close stream even when the upload is done
191
- uploadCtrl.promise().then(() => {
213
+ uploadCtrl.done().then(() => {
192
214
  writable.triggerFinishAndClose();
193
215
  });
194
216
  return writable;
@@ -200,7 +222,8 @@ export class S3Driver {
200
222
  if (exists) {
201
223
  // NOTE: between the first test and this delete, the object might have been deleted, but since s3.deleteObjecct
202
224
  // does not seems to tell if the object exits or not, this is the best can do.
203
- await this.s3.deleteObject({ ...this.baseParams, ...{ Key: path } }).promise();
225
+ const command = new DeleteObjectCommand({ ...this.baseParams, ...{ Key: path } });
226
+ await this.s3.send(command);
204
227
  return true;
205
228
  }
206
229
  else {
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  import { Bucket as GoogleBucket, File as GoogleFile } from '@google-cloud/storage';
3
2
  import { Readable, Writable } from "stream";
4
3
  import { Driver, ListCloudFilesOptions, ListCloudFilesResult } from "./driver";
@@ -15,6 +15,12 @@ export async function getGsDriver(cfg) {
15
15
  return new GcpDriver(googleBucket);
16
16
  }
17
17
  class GcpDriver {
18
+ get type() {
19
+ return 'gs';
20
+ }
21
+ get name() {
22
+ return this.googleBucket.name;
23
+ }
18
24
  constructor(googleBucket) {
19
25
  Object.defineProperty(this, "googleBucket", {
20
26
  enumerable: true,
@@ -24,12 +30,6 @@ class GcpDriver {
24
30
  });
25
31
  this.googleBucket = googleBucket;
26
32
  }
27
- get type() {
28
- return 'gs';
29
- }
30
- get name() {
31
- return this.googleBucket.name;
32
- }
33
33
  toFile(googleFile) {
34
34
  if (!googleFile) {
35
35
  throw new Error(`No googleFile`);
@@ -1,13 +1,16 @@
1
+ import { HeadBucketCommand, PutBucketPolicyCommand, S3Client } from '@aws-sdk/client-s3';
1
2
  import { S3Driver } from './driver-aws.js';
2
- const { S3 } = (await import('aws-sdk')).default;
3
3
  export async function getMinioDriver(cfg) {
4
4
  // const credentials = new Credentials(cfg.access_key_id, cfg.access_key_secret);
5
- const s3 = new S3({
6
- accessKeyId: cfg.access_key_id,
7
- secretAccessKey: cfg.access_key_secret,
5
+ let region = process.env.AWS_REGION || 'us-east-1';
6
+ const s3 = new S3Client({
7
+ region,
8
+ credentials: {
9
+ accessKeyId: cfg.access_key_id,
10
+ secretAccessKey: cfg.access_key_secret
11
+ },
8
12
  endpoint: cfg.minio_endpoint,
9
- s3ForcePathStyle: true,
10
- signatureVersion: 'v4'
13
+ forcePathStyle: true, // needed with minio (otherwise bucket.locahost and get address not found)
11
14
  });
12
15
  // For Minio, assume mock mode, so, auto create bucket
13
16
  if (!(await bucketExists(s3, cfg.bucketName))) {
@@ -22,31 +25,46 @@ class MinioDriver extends S3Driver {
22
25
  }
23
26
  }
24
27
  async function bucketExists(s3, bucketName) {
25
- return new Promise((res, rej) => {
26
- s3.headBucket({
27
- Bucket: bucketName
28
- }, (err, data) => {
29
- (err) ? res(false) : res(true);
30
- });
28
+ return new Promise(async (res, rej) => {
29
+ try {
30
+ const command = new HeadBucketCommand({
31
+ Bucket: bucketName
32
+ });
33
+ const data = await s3.send(command);
34
+ res(data);
35
+ }
36
+ catch (error) {
37
+ rej(error);
38
+ }
31
39
  });
32
40
  }
33
41
  async function createBucket(s3, bucketName) {
34
42
  // create the bucket
35
- await new Promise((res, rej) => {
36
- s3.createBucket({
37
- // ACL: 'public-read-write', // Does not see have effect on minio, see below
38
- Bucket: bucketName
39
- }, function (err, data) {
40
- (err) ? rej(err) : res(data);
41
- });
43
+ await new Promise(async (res, rej) => {
44
+ try {
45
+ const command = new HeadBucketCommand({
46
+ // ACL: 'public-read-write', // Does not see have effect on minio, see below
47
+ Bucket: bucketName
48
+ });
49
+ const data = await s3.send(command);
50
+ res(data);
51
+ }
52
+ catch (error) {
53
+ rej(error);
54
+ }
42
55
  });
43
56
  // set it public
44
- await new Promise((res, rej) => {
45
- s3.putBucketPolicy({
46
- Bucket: bucketName,
47
- Policy: `{ "Version": "2012-10-17", "Statement": [{ "Sid": "MakeItPublic", "Effect": "Allow", "Principal": "*", "Action": "s3:GetObject", "Resource": "arn:aws:s3:::${bucketName}/*" }] }'`
48
- }, function (err, data) {
49
- (err) ? rej(err) : res(data);
50
- });
57
+ await new Promise(async (res, rej) => {
58
+ try {
59
+ const command = new PutBucketPolicyCommand({
60
+ Bucket: bucketName,
61
+ Policy: `{ "Version": "2012-10-17", "Statement": [{ "Sid": "MakeItPublic", "Effect": "Allow", "Principal": "*", "Action": "s3:GetObject", "Resource": "arn:aws:s3:::${bucketName}/*" }] }'`
62
+ });
63
+ const data = await s3.send(command);
64
+ res(data);
65
+ }
66
+ catch (error) {
67
+ rej(error);
68
+ }
51
69
  });
52
70
  }
package/dist/driver.d.ts CHANGED
@@ -1,4 +1,3 @@
1
- /// <reference types="node" />
2
1
  import { Readable, Writable } from 'stream';
3
2
  import { BucketFile, BucketType } from './types.js';
4
3
  export interface Driver<F = any> {
package/dist/index.d.ts CHANGED
@@ -5,7 +5,7 @@ import { MinioDriverCfg } from './driver-minio.js';
5
5
  import { BucketFile, ListOptions, ListResult } from './types.js';
6
6
  export { signUrl, SignUrlOptions, urlSigner } from './url-signer.js';
7
7
  export { Bucket, BucketFile, ListOptions, ListResult };
8
- declare type GetBucketOptions = {
8
+ type GetBucketOptions = {
9
9
  log?: boolean;
10
10
  } & (GsDriverCfg | S3DriverCfg | MinioDriverCfg);
11
11
  export declare function getBucket(options: GetBucketOptions): Promise<Bucket>;
package/dist/types.d.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import type { Bucket } from './bucket.js';
2
- export declare type BucketType = 's3' | 'gs' | 'minio';
2
+ export type BucketType = 's3' | 'gs' | 'minio';
3
3
  export interface BucketFile {
4
4
  bucket: Bucket;
5
5
  path: string;
@@ -8,7 +8,7 @@ export interface BucketFile {
8
8
  contentType?: string;
9
9
  local?: string;
10
10
  }
11
- export declare type BucketFileDeleted = BucketFile & {
11
+ export type BucketFileDeleted = BucketFile & {
12
12
  deleted: boolean;
13
13
  };
14
14
  export interface ListResult {
@@ -27,4 +27,4 @@ export interface ListOptions {
27
27
  * - when string means it is the prefix (which can be of glob format)
28
28
  * - when ListOptions prefix can be specified as property.
29
29
  */
30
- export declare type ListArg = ListOptions | string;
30
+ export type ListArg = ListOptions | string;
@@ -1,10 +1,10 @@
1
- declare type CloudSignUrlOptions = {
1
+ type CloudSignUrlOptions = {
2
2
  type: 's3' | 'gs';
3
3
  expires: number;
4
4
  keyName: string;
5
5
  key: string;
6
6
  };
7
- export declare type SignUrlOptions = CloudSignUrlOptions | {
7
+ export type SignUrlOptions = CloudSignUrlOptions | {
8
8
  type: 'minio';
9
9
  };
10
10
  export declare function signUrl(url: string, opts: SignUrlOptions): string;
@@ -110,7 +110,7 @@ function gs_urlSigner(baseUrl, opts) {
110
110
  const url = base_url + pathFromBaseUrl;
111
111
  // URL to sign
112
112
  const urlToSign = `${url}?Expires=${opts.expires}&KeyName=${opts.keyName}`;
113
- let signature = crypto.createHmac('sha1', su_key_buff).update(urlToSign).digest('base64');
113
+ let signature = crypto.createHmac('sha1', new Uint8Array(su_key_buff)).update(urlToSign).digest('base64');
114
114
  signature = signature.replace(/[+/=]/g, c => GCP_BASE64_REPLACE[c]);
115
115
  // Add signature to urlToSign
116
116
  return `${urlToSign}&Signature=${signature}`;
@@ -121,7 +121,7 @@ function gs_sign_url(url, opts) {
121
121
  const urlToSign = `${url}?Expires=${opts.expires}&KeyName=${opts.keyName}`;
122
122
  // Compute signature
123
123
  let su_key_buff = Buffer.from(opts.key, 'base64');
124
- let signature = crypto.createHmac('sha1', su_key_buff).update(urlToSign).digest('base64');
124
+ let signature = crypto.createHmac('sha1', new Uint8Array(su_key_buff)).update(urlToSign).digest('base64');
125
125
  signature = signature.replace(/[+/=]/g, c => GCP_BASE64_REPLACE[c]);
126
126
  // Add signature to urlToSign
127
127
  return urlToSign + `&Signature=${signature}`;
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "cloud-bucket",
3
3
  "type": "module",
4
- "version": "0.4.1",
4
+ "version": "0.5.0",
5
5
  "description": "Simple multi cloud (Google Storage and AWS S3) bucket API",
6
6
  "author": "Jeremy Chone <jeremy.chone@gmail.com>",
7
7
  "main": "dist/index.js",
@@ -10,24 +10,25 @@
10
10
  "url": "git://github.com/BriteSnow/node-cloud-bucket.git"
11
11
  },
12
12
  "engines": {
13
- "node": ">=14"
13
+ "node": ">=20"
14
14
  },
15
15
  "dependencies": {
16
- "@google-cloud/storage": "^5.18.0",
17
- "@types/micromatch": "^4.0.2",
18
- "@types/mime-types": "^2.1.1",
19
- "aws-sdk": "^2.1061.0",
16
+ "@google-cloud/storage": "^7.18.0",
17
+ "@types/micromatch": "^4.0.10",
18
+ "@types/mime-types": "^3.0.1",
19
+ "@aws-sdk/client-s3": "^3.964.0",
20
+ "@aws-sdk/lib-storage": "^3.964.0",
20
21
  "fs-extra-plus": "^0.6.0",
21
- "micromatch": "^4.0.4",
22
- "mime-types": "^2.1.34"
22
+ "micromatch": "^4.0.8",
23
+ "mime-types": "^3.0.2"
23
24
  },
24
25
  "devDependencies": {
25
- "@types/js-yaml": "^4.0.5",
26
- "@types/mocha": "^9.1.0",
27
- "js-yaml": "^4.1.0",
28
- "mocha": "^9.1.4",
29
- "ts-node": "^10.4.0",
30
- "typescript": "^4.5.5"
26
+ "@types/js-yaml": "^4.0.9",
27
+ "@types/mocha": "^10.0.10",
28
+ "js-yaml": "^4.1.1",
29
+ "mocha": "^11.7.5",
30
+ "ts-node": "^10.9.2",
31
+ "typescript": "^5.9.3"
31
32
  },
32
33
  "scripts": {
33
34
  "test": "TS_NODE_PROJECT='test/tsconfig.json' ./node_modules/.bin/mocha",
package/src/bucket.ts CHANGED
@@ -1,5 +1,5 @@
1
+ import { S3Client } from '@aws-sdk/client-s3';
1
2
  import { Bucket as GoogleBucket } from '@google-cloud/storage';
2
- import { S3 } from 'aws-sdk';
3
3
  import { glob } from 'fs-extra-plus';
4
4
  import { lookup } from 'mime-types';
5
5
  import * as Path from 'path';
@@ -21,7 +21,7 @@ export interface Bucket {
21
21
  type: BucketType;
22
22
  name: string;
23
23
 
24
- readonly s3?: S3;
24
+ readonly s3?: S3Client;
25
25
  readonly googleBucket?: GoogleBucket;
26
26
 
27
27
 
@@ -91,9 +91,9 @@ class BucketImpl<F> implements Bucket {
91
91
  return this.driver.name;
92
92
  }
93
93
 
94
- get s3(): S3 | undefined {
94
+ get s3(): S3Client | undefined {
95
95
  if (this.driver.type === 's3') {
96
- return (<any>this.driver).s3 as S3;
96
+ return (<any>this.driver).s3 as S3Client;
97
97
  }
98
98
  }
99
99
 
package/src/driver-aws.ts CHANGED
@@ -1,16 +1,15 @@
1
- import type { S3 as S3_TYPE } from 'aws-sdk';
2
- import { ListObjectsV2Request } from 'aws-sdk/clients/s3';
1
+ import { CopyObjectCommand, DeleteObjectCommand, GetObjectCommand, HeadObjectCommand, ListObjectsV2Command, ListObjectsV2Request, PutObjectCommand, _Object as S3_TYPE, S3Client } from '@aws-sdk/client-s3';
2
+ import { Upload } from "@aws-sdk/lib-storage";
3
3
  import { PassThrough, Readable, Writable } from "stream";
4
4
  import { Driver, ListCloudFilesOptions, ListCloudFilesResult } from "./driver";
5
5
  import { BucketFile, BucketType } from './types';
6
6
  const micromatch = (await import('micromatch')).default;
7
7
  const { createReadStream, createWriteStream } = (await import('fs-extra')).default;
8
- const { Credentials, S3 } = (await import('aws-sdk')).default;
9
8
 
10
9
  // import {Object as AwsFile} from 'aws-sdk';
11
10
 
12
11
  // type S3 = AWS.S3;
13
- type AwsFile = S3_TYPE.Object & { ContentType?: string };
12
+ type AwsFile = S3_TYPE & { ContentType?: string };
14
13
 
15
14
  export interface S3DriverCfg {
16
15
  bucketName: string;
@@ -19,9 +18,16 @@ export interface S3DriverCfg {
19
18
  }
20
19
 
21
20
  export async function getS3Driver(cfg: S3DriverCfg) {
22
- const credentials = new Credentials(cfg.access_key_id, cfg.access_key_secret);
23
21
  // Create S3 service object
24
- const s3 = new S3({ apiVersion: '2006-03-01', credentials });
22
+ let region = process.env.AWS_REGION || 'us-east-1';
23
+ const s3 = new S3Client({
24
+ region,
25
+ apiVersion: '2006-03-01',
26
+ credentials: {
27
+ accessKeyId: cfg.access_key_id,
28
+ secretAccessKey: cfg.access_key_secret
29
+ }
30
+ });
25
31
  return new S3Driver(s3, cfg.bucketName);
26
32
  }
27
33
 
@@ -46,7 +52,7 @@ class S3UploadWriteStream extends PassThrough {
46
52
  }
47
53
 
48
54
  export class S3Driver implements Driver<AwsFile> {
49
- private s3: S3_TYPE;
55
+ private s3: S3Client;
50
56
  private baseParams: { Bucket: string };
51
57
 
52
58
  get type(): BucketType {
@@ -57,7 +63,7 @@ export class S3Driver implements Driver<AwsFile> {
57
63
  return this.baseParams.Bucket;
58
64
  }
59
65
 
60
- constructor(s3: S3_TYPE, bucketName: string) {
66
+ constructor(s3: S3Client, bucketName: string) {
61
67
  this.s3 = s3;
62
68
  this.baseParams = { Bucket: bucketName };
63
69
  }
@@ -87,7 +93,12 @@ export class S3Driver implements Driver<AwsFile> {
87
93
 
88
94
  async getCloudFile(path: string): Promise<AwsFile | null> {
89
95
  try {
90
- const object = await this.s3.headObject({ ...this.baseParams, ...{ Key: path } }).promise();
96
+ const command = new HeadObjectCommand({
97
+ ...this.baseParams,
98
+ ...{ Key: path }
99
+ });
100
+
101
+ const object = await this.s3.send(command);
91
102
  // bucket: this,
92
103
  // path,
93
104
  // updated,
@@ -134,7 +145,8 @@ export class S3Driver implements Driver<AwsFile> {
134
145
 
135
146
  // perform the s3 list request
136
147
  try {
137
- const awsResult = await this.s3.listObjectsV2(params).promise();
148
+ const command = new ListObjectsV2Command(params);
149
+ const awsResult = await this.s3.send(command);
138
150
  const awsFiles = awsResult.Contents as AwsFile[];
139
151
  // if glob, filter again the result
140
152
  let files: AwsFile[] = (!glob) ? awsFiles : awsFiles.filter(af => micromatch.isMatch(af.Key!, glob));
@@ -166,14 +178,16 @@ export class S3Driver implements Driver<AwsFile> {
166
178
  Bucket: dest.bucket.name,
167
179
  Key: dest.path
168
180
  }
169
- await this.s3.copyObject(params).promise();
181
+ const command = new CopyObjectCommand(params);
182
+ await this.s3.send(command);
170
183
  }
171
184
 
172
185
 
173
186
  async downloadCloudFile(rawFile: AwsFile, localPath: string): Promise<void> {
174
187
  const remotePath = rawFile.Key!;
175
188
  const params = { ...this.baseParams, ...{ Key: remotePath } };
176
- const remoteReadStream = this.s3.getObject(params).createReadStream();
189
+ const command = new GetObjectCommand(params);
190
+ const remoteReadStream = (await this.s3.send(command)).Body as Readable;
177
191
  const localWriteStream = createWriteStream(localPath);
178
192
  const writePromise = new Promise<void>((resolve, reject) => {
179
193
  localWriteStream.once('close', () => {
@@ -190,7 +204,8 @@ export class S3Driver implements Driver<AwsFile> {
190
204
 
191
205
  async uploadCloudFile(localPath: string, remoteFilePath: string, contentType?: string): Promise<AwsFile> {
192
206
  const readable = createReadStream(localPath);
193
- const awsResult = await this.s3.putObject({ ...this.baseParams, ...{ Key: remoteFilePath, Body: readable, ContentType: contentType } }).promise();
207
+ const command = new PutObjectCommand({ ...this.baseParams, ...{ Key: remoteFilePath, Body: readable, ContentType: contentType } });
208
+ const awsResult = await this.s3.send(command);
194
209
  // TODO: probably check the awsResult that match remoteFilePath
195
210
  return { Key: remoteFilePath };
196
211
 
@@ -198,34 +213,39 @@ export class S3Driver implements Driver<AwsFile> {
198
213
 
199
214
  async downloadAsText(path: string): Promise<string> {
200
215
  const params = { ...this.baseParams, ...{ Key: path } };
201
- const obj = await this.s3.getObject(params).promise();
216
+ const command = new GetObjectCommand(params);
217
+ const obj = await await this.s3.send(command);
202
218
  const content = obj.Body!.toString();
203
219
  return content;
204
220
  }
205
221
 
206
222
  async uploadCloudContent(path: string, content: string, contentType?: string): Promise<void> {
207
-
208
- await this.s3.putObject({ ...this.baseParams, ...{ Key: path, Body: content, ContentType: contentType } }).promise();
223
+ const command = new PutObjectCommand({ ...this.baseParams, ...{ Key: path, Body: content, ContentType: contentType } });
224
+ await this.s3.send(command);
209
225
  }
210
226
 
211
227
  async createReadStream(path: string): Promise<Readable> {
212
228
  const params = { ...this.baseParams, ...{ Key: path } };
213
- const obj = this.s3.getObject(params);
229
+ const command = new GetObjectCommand(params);
230
+ const obj = await this.s3.send(command);
214
231
 
215
232
  if (!obj) {
216
233
  throw new Error(`Object not found for ${path}`);
217
234
  }
218
- return obj.createReadStream();
235
+ return obj.Body as Readable;
219
236
  }
220
237
 
221
238
  async createWriteStream(path: string, contentType?: string): Promise<Writable> {
222
239
  const writable = new S3UploadWriteStream();
223
240
 
224
241
  const params = { ...this.baseParams, ...{ Key: path, ContentType: contentType }, Body: writable };
225
- const uploadCtrl = this.s3.upload(params);
242
+ const uploadCtrl = new Upload({
243
+ client: this.s3,
244
+ params: params
245
+ });
226
246
 
227
247
  // NOTE: We use the S3UploadWriteStream trigger finish and close stream even when the upload is done
228
- uploadCtrl.promise().then(() => {
248
+ uploadCtrl.done().then(() => {
229
249
  writable.triggerFinishAndClose();
230
250
  });
231
251
 
@@ -239,7 +259,8 @@ export class S3Driver implements Driver<AwsFile> {
239
259
  if (exists) {
240
260
  // NOTE: between the first test and this delete, the object might have been deleted, but since s3.deleteObjecct
241
261
  // does not seems to tell if the object exits or not, this is the best can do.
242
- await this.s3.deleteObject({ ...this.baseParams, ...{ Key: path } }).promise();
262
+ const command = new DeleteObjectCommand({ ...this.baseParams, ...{ Key: path } });
263
+ await this.s3.send(command);
243
264
  return true;
244
265
  } else {
245
266
  process.stdout.write(` - Skipped (object not found)\n`);
@@ -1,7 +1,6 @@
1
- import type { S3 as S3_TYPE } from 'aws-sdk';
1
+ import { HeadBucketCommand, PutBucketPolicyCommand, S3Client } from '@aws-sdk/client-s3';
2
2
  import { S3Driver, S3DriverCfg } from './driver-aws.js';
3
3
  import { BucketType } from './types.js';
4
- const { S3 } = (await import('aws-sdk')).default;
5
4
 
6
5
  export interface MinioDriverCfg extends S3DriverCfg {
7
6
  minio_endpoint: string;
@@ -9,12 +8,15 @@ export interface MinioDriverCfg extends S3DriverCfg {
9
8
 
10
9
  export async function getMinioDriver(cfg: MinioDriverCfg) {
11
10
  // const credentials = new Credentials(cfg.access_key_id, cfg.access_key_secret);
12
- const s3 = new S3({
13
- accessKeyId: cfg.access_key_id,
14
- secretAccessKey: cfg.access_key_secret,
15
- endpoint: cfg.minio_endpoint,
16
- s3ForcePathStyle: true, // needed with minio (otherwise bucket.locahost and get address not found)
17
- signatureVersion: 'v4'
11
+ let region = process.env.AWS_REGION || 'us-east-1';
12
+ const s3 = new S3Client({
13
+ region,
14
+ credentials: {
15
+ accessKeyId: cfg.access_key_id,
16
+ secretAccessKey: cfg.access_key_secret
17
+ },
18
+ endpoint:cfg.minio_endpoint,
19
+ forcePathStyle: true, // needed with minio (otherwise bucket.locahost and get address not found)
18
20
  });
19
21
 
20
22
  // For Minio, assume mock mode, so, auto create bucket
@@ -34,38 +36,48 @@ class MinioDriver extends S3Driver {
34
36
 
35
37
 
36
38
 
37
- async function bucketExists(s3: S3_TYPE, bucketName: string) {
38
-
39
- return new Promise((res, rej) => {
40
- s3.headBucket({
41
- Bucket: bucketName
42
- }, (err, data) => {
43
- (err) ? res(false) : res(true);
44
- })
39
+ async function bucketExists(s3: S3Client, bucketName: string) {
40
+ return new Promise(async (res, rej) => {
41
+ try {
42
+ const command = new HeadBucketCommand({
43
+ Bucket: bucketName
44
+ });
45
+ const data = await s3.send(command);
46
+ res(data);
47
+ } catch (error) {
48
+ rej(error);
49
+ }
45
50
  });
46
-
47
51
  }
48
52
 
49
- async function createBucket(s3: S3_TYPE, bucketName: string) {
53
+ async function createBucket(s3: S3Client, bucketName: string) {
50
54
 
51
55
  // create the bucket
52
- await new Promise((res, rej) => {
53
- s3.createBucket({
54
- // ACL: 'public-read-write', // Does not see have effect on minio, see below
55
- Bucket: bucketName
56
- }, function (err, data) {
57
- (err) ? rej(err) : res(data);
58
- });
56
+ await new Promise(async (res, rej) => {
57
+ try {
58
+ const command = new HeadBucketCommand({
59
+ // ACL: 'public-read-write', // Does not see have effect on minio, see below
60
+ Bucket: bucketName
61
+ });
62
+ const data = await s3.send(command);
63
+ res(data);
64
+ } catch (error) {
65
+ rej(error);
66
+ }
59
67
  });
60
68
 
61
69
  // set it public
62
- await new Promise((res, rej) => {
63
- s3.putBucketPolicy({
64
- Bucket: bucketName,
65
- Policy: `{ "Version": "2012-10-17", "Statement": [{ "Sid": "MakeItPublic", "Effect": "Allow", "Principal": "*", "Action": "s3:GetObject", "Resource": "arn:aws:s3:::${bucketName}/*" }] }'`
66
- }, function (err, data) {
67
- (err) ? rej(err) : res(data);
68
- });
70
+ await new Promise(async (res, rej) => {
71
+ try {
72
+ const command = new PutBucketPolicyCommand({
73
+ Bucket: bucketName,
74
+ Policy: `{ "Version": "2012-10-17", "Statement": [{ "Sid": "MakeItPublic", "Effect": "Allow", "Principal": "*", "Action": "s3:GetObject", "Resource": "arn:aws:s3:::${bucketName}/*" }] }'`
75
+ });
76
+ const data = await s3.send(command);
77
+ res(data);
78
+ } catch (error) {
79
+ rej(error);
80
+ }
69
81
  })
70
82
 
71
83
  }
package/src/url-signer.ts CHANGED
@@ -133,7 +133,7 @@ function gs_urlSigner(baseUrl: string, opts: CloudSignUrlOptions): (pathFromBase
133
133
  const url = base_url + pathFromBaseUrl;
134
134
  // URL to sign
135
135
  const urlToSign = `${url}?Expires=${opts.expires}&KeyName=${opts.keyName}`;
136
- let signature = crypto.createHmac('sha1', su_key_buff).update(urlToSign).digest('base64');
136
+ let signature = crypto.createHmac('sha1', new Uint8Array(su_key_buff)).update(urlToSign).digest('base64');
137
137
  signature = signature.replace(/[+/=]/g, c => (<any>GCP_BASE64_REPLACE)[c]);
138
138
  // Add signature to urlToSign
139
139
  return `${urlToSign}&Signature=${signature}`;
@@ -147,7 +147,7 @@ function gs_sign_url(url: string, opts: CloudSignUrlOptions) {
147
147
 
148
148
  // Compute signature
149
149
  let su_key_buff = Buffer.from(opts.key, 'base64');
150
- let signature = crypto.createHmac('sha1', su_key_buff).update(urlToSign).digest('base64');
150
+ let signature = crypto.createHmac('sha1', new Uint8Array(su_key_buff)).update(urlToSign).digest('base64');
151
151
  signature = signature.replace(/[+/=]/g, c => (<any>GCP_BASE64_REPLACE)[c]);
152
152
 
153
153
  // Add signature to urlToSign