eip-cloud-services 1.1.5 → 1.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/package.json +1 -1
  2. package/src/s3.js +15 -7
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "eip-cloud-services",
3
- "version": "1.1.5",
3
+ "version": "1.1.7",
4
4
  "description": "Houses a collection of helpers for connecting with Cloud services.",
5
5
  "main": "index.js",
6
6
  "scripts": {
package/src/s3.js CHANGED
@@ -51,6 +51,9 @@ const crypto = require ( 'crypto' );
51
51
  const { cwd } = require ( 'process' );
52
52
  const { log } = config?.s3?.logsFunction ? require ( `${ cwd ()}/${config?.s3?.logsFunction}` ) : console;
53
53
  const S3 = new S3Client ( { region: 'eu-west-1' } );
54
+ const { pipeline, Writable } = require ( 'stream' );
55
+ const util = require ( 'util' );
56
+ const pipelineAsync = util.promisify ( pipeline );
54
57
 
55
58
  /**
56
59
  * Check if an object exists in S3.
@@ -121,7 +124,6 @@ exports.get = async ( key, bucket = config?.s3?.Bucket, options = {} ) => {
121
124
  }
122
125
  else {
123
126
 
124
- const response = await S3.send ( command );
125
127
  let data = await streamToBuffer ( response.Body );
126
128
 
127
129
  if ( response.ContentEncoding && response.ContentEncoding === 'gzip' ) {
@@ -390,10 +392,16 @@ exports.listObjects = async ( prefix, bucket = config?.s3?.Bucket, continuationT
390
392
 
391
393
  exports.getClient = S3;
392
394
 
393
- const streamToBuffer = ( stream ) =>
394
- new Promise ( ( resolve, reject ) => {
395
- const chunks = [];
396
- stream.on ( 'data', ( chunk ) => chunks.push ( chunk ) );
397
- stream.on ( 'error', reject );
398
- stream.on ( 'end', () => resolve ( Buffer.concat ( chunks ) ) );
395
+ const streamToBuffer = async ( stream ) => {
396
+ const chunks = [];
397
+ const collectorStream = new Writable ( {
398
+ write ( chunk, encoding, callback ) {
399
+ chunks.push ( chunk );
400
+ callback ();
401
+ }
399
402
  } );
403
+
404
+ await pipelineAsync ( stream, collectorStream );
405
+
406
+ return Buffer.concat ( chunks );
407
+ };