@beauraines/node-helpers 4.0.38 → 4.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,15 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
4
4
 
5
+ ## [4.1.0](https://github.com/beauraines/node-helpers/compare/v4.0.39...v4.1.0) (2024-10-05)
6
+
7
+
8
+ ### Features
9
+
10
+ * **Azure Storage and helpers:** getBinaryBlob and decompress gzip files functions ([#147](https://github.com/beauraines/node-helpers/issues/147)) ([ec156af](https://github.com/beauraines/node-helpers/commit/ec156afb043a70e8f78beb7d276e35b643ab6668)), closes [#146](https://github.com/beauraines/node-helpers/issues/146)
11
+
12
+ ### [4.0.39](https://github.com/beauraines/node-helpers/compare/v4.0.38...v4.0.39) (2024-10-05)
13
+
5
14
  ### [4.0.38](https://github.com/beauraines/node-helpers/compare/v4.0.37...v4.0.38) (2024-09-29)
6
15
 
7
16
  ### [4.0.37](https://github.com/beauraines/node-helpers/compare/v4.0.36...v4.0.37) (2024-09-28)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@beauraines/node-helpers",
3
- "version": "4.0.38",
3
+ "version": "4.1.0",
4
4
  "description": "Collection of node helpers",
5
5
  "main": "index.js",
6
6
  "repository": {
@@ -21,6 +21,7 @@
21
21
  "@azure/storage-queue": "^12.11.0",
22
22
  "azure-devops-node-api": "^14.0.1",
23
23
  "dayjs": "^1.11.7",
24
+ "gunzip-maybe": "^1.4.2",
24
25
  "node-fetch": "^2.6.7",
25
26
  "sparkly": "^5.0.0",
26
27
  "sqlite": "^5.0.1",
package/src/azure.js CHANGED
@@ -210,8 +210,7 @@ getStorageQueueSignedURL(queueUrl,options) {
210
210
 
211
211
  /**
212
212
  * Gets a blob and returns the content. The idea is that you can get a blob without
213
- * having to save it to a file and then re-read it. This may be limited in that it
214
- * can only deal with non-binary content.
213
+ * having to save it to a file and then re-read it. This cannot handle binary data
215
214
  *
216
215
  * @param {string} containerName the container to get the blob from
217
216
  * @param {string} blobName the name of the blob to get
@@ -236,6 +235,30 @@ getStorageQueueSignedURL(queueUrl,options) {
236
235
 
237
236
  }
238
237
 
238
+
239
+ /**
240
+ * Gets a blob and returns the content as a Buffer.
241
+ *
242
+ * @param {string} containerName the container to get the blob from
243
+ * @param {string} blobName the name of the blob to get
244
+ * @returns {Buffer} the downloaded blob as
245
+ */
246
+ async getBinaryBlob(containerName,blobName) {
247
+ const blobServiceClient = new BlobServiceClient(
248
+ this.host('blob',this.cloudName),
249
+ new StorageSharedKeyCredential(this.storageAccountName, this.storageAccountKey)
250
+ );
251
+ const containerClient = blobServiceClient.getContainerClient(containerName);
252
+ const blobClient = containerClient.getBlobClient(blobName);
253
+
254
+ // Get blob content from position 0 to the end
255
+ // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody
256
+ const downloadBlockBlobResponse = await blobClient.download();
257
+ const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);
258
+ return downloaded
259
+
260
+ }
261
+
239
262
  /**
240
263
  * Lists the blobs in a specified container, returning an array of the BlobItem object
241
264
  *
package/src/helpers.js CHANGED
@@ -1,5 +1,6 @@
1
1
  const fs = require('fs');
2
2
  const sparkly = require('sparkly')
3
+ const gunzip = require('gunzip-maybe');
3
4
 
4
5
  /**
5
6
  * Converts a string to Title Case, using whitespace as the delimiter
@@ -165,17 +166,24 @@ async function streamToBuffer(readableStream) {
165
166
  });
166
167
  }
167
168
 
169
+ // Function to decompress a gzipped file and return a stream
170
+ const decompressFile = (filePath) => {
171
+ return fs.createReadStream(filePath).pipe(gunzip());
172
+ };
173
+
174
+
168
175
  module.exports = {
169
- getEpochMillis,
170
- getResourceId,
171
- fileExists,
172
- groupAndSum,
173
- readFile,
174
- listFiles,
175
- sparkline,
176
- streamToBuffer,
177
- stripNewLines,
178
- toTitleCase,
179
- unixTimestamp,
180
- writeFile
176
+ decompressFile,
177
+ getEpochMillis,
178
+ getResourceId,
179
+ fileExists,
180
+ groupAndSum,
181
+ readFile,
182
+ listFiles,
183
+ sparkline,
184
+ streamToBuffer,
185
+ stripNewLines,
186
+ toTitleCase,
187
+ unixTimestamp,
188
+ writeFile
181
189
  }