@beauraines/node-helpers 4.0.39 → 4.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -2,6 +2,15 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
4
4
 
5
+ ### [4.1.1](https://github.com/beauraines/node-helpers/compare/v4.1.0...v4.1.1) (2024-10-05)
6
+
7
+ ## [4.1.0](https://github.com/beauraines/node-helpers/compare/v4.0.39...v4.1.0) (2024-10-05)
8
+
9
+
10
+ ### Features
11
+
12
+ * **Azure Storage and helpers:** getBinaryBlob and decompress gzip files functions ([#147](https://github.com/beauraines/node-helpers/issues/147)) ([ec156af](https://github.com/beauraines/node-helpers/commit/ec156afb043a70e8f78beb7d276e35b643ab6668)), closes [#146](https://github.com/beauraines/node-helpers/issues/146)
13
+
5
14
  ### [4.0.39](https://github.com/beauraines/node-helpers/compare/v4.0.38...v4.0.39) (2024-10-05)
6
15
 
7
16
  ### [4.0.38](https://github.com/beauraines/node-helpers/compare/v4.0.37...v4.0.38) (2024-09-29)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@beauraines/node-helpers",
3
- "version": "4.0.39",
3
+ "version": "4.1.1",
4
4
  "description": "Collection of node helpers",
5
5
  "main": "index.js",
6
6
  "repository": {
@@ -21,6 +21,7 @@
21
21
  "@azure/storage-queue": "^12.11.0",
22
22
  "azure-devops-node-api": "^14.0.1",
23
23
  "dayjs": "^1.11.7",
24
+ "gunzip-maybe": "^1.4.2",
24
25
  "node-fetch": "^2.6.7",
25
26
  "sparkly": "^5.0.0",
26
27
  "sqlite": "^5.0.1",
package/src/azure.js CHANGED
@@ -210,8 +210,7 @@ getStorageQueueSignedURL(queueUrl,options) {
210
210
 
211
211
  /**
212
212
  * Gets a blob and returns the content. The idea is that you can get a blob without
213
- * having to save it to a file and then re-read it. This may be limited in that it
214
- * can only deal with non-binary content.
213
+ * having to save it to a file and then re-read it. This cannot handle binary data
215
214
  *
216
215
  * @param {string} containerName the container to get the blob from
217
216
  * @param {string} blobName the name of the blob to get
@@ -236,6 +235,30 @@ getStorageQueueSignedURL(queueUrl,options) {
236
235
 
237
236
  }
238
237
 
238
+
239
+ /**
240
+ * Gets a blob and returns the content as a Buffer.
241
+ *
242
+ * @param {string} containerName the container to get the blob from
243
+ * @param {string} blobName the name of the blob to get
244
+ * @returns {Buffer} the downloaded blob as
245
+ */
246
+ async getBinaryBlob(containerName,blobName) {
247
+ const blobServiceClient = new BlobServiceClient(
248
+ this.host('blob',this.cloudName),
249
+ new StorageSharedKeyCredential(this.storageAccountName, this.storageAccountKey)
250
+ );
251
+ const containerClient = blobServiceClient.getContainerClient(containerName);
252
+ const blobClient = containerClient.getBlobClient(blobName);
253
+
254
+ // Get blob content from position 0 to the end
255
+ // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody
256
+ const downloadBlockBlobResponse = await blobClient.download();
257
+ const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);
258
+ return downloaded
259
+
260
+ }
261
+
239
262
  /**
240
263
  * Lists the blobs in a specified container, returning an array of the BlobItem object
241
264
  *
package/src/helpers.js CHANGED
@@ -1,5 +1,6 @@
1
1
  const fs = require('fs');
2
2
  const sparkly = require('sparkly')
3
+ const gunzip = require('gunzip-maybe');
3
4
 
4
5
  /**
5
6
  * Converts a string to Title Case, using whitespace as the delimiter
@@ -105,6 +106,7 @@ function getEpochMillis() {
105
106
  return Date.now()
106
107
  }
107
108
 
109
+
108
110
  // TODO Add unit test
109
111
  // Expected output last 30 days [1,5] ▁▂▄▆█ 5 from [1,2,3,4,5]
110
112
  /**
@@ -128,6 +130,8 @@ function sparkline(data,label,options) {
128
130
  // coerces the minimum value to zero because the mimimum option is only used for range validation,
129
131
  // not display https://github.com/sindresorhus/sparkly/blob/9e33eaff891c41e8fb8c8883f62e9821729a9882/index.js#L15
130
132
  // sparkly(open,{minimum:27,maximum:50})
133
+
134
+ // TODO add option to not display labels issue #148
131
135
  return `${label} [${minValue},${maxValue}] ${sparkly(data.map( x=> x- minValue))} ${lastValue}`
132
136
  }
133
137
 
@@ -165,17 +169,24 @@ async function streamToBuffer(readableStream) {
165
169
  });
166
170
  }
167
171
 
172
+ // Function to decompress a gzipped file and return a stream
173
+ const decompressFile = (filePath) => {
174
+ return fs.createReadStream(filePath).pipe(gunzip());
175
+ };
176
+
177
+
168
178
  module.exports = {
169
- getEpochMillis,
170
- getResourceId,
171
- fileExists,
172
- groupAndSum,
173
- readFile,
174
- listFiles,
175
- sparkline,
176
- streamToBuffer,
177
- stripNewLines,
178
- toTitleCase,
179
- unixTimestamp,
180
- writeFile
179
+ decompressFile,
180
+ getEpochMillis,
181
+ getResourceId,
182
+ fileExists,
183
+ groupAndSum,
184
+ readFile,
185
+ listFiles,
186
+ sparkline,
187
+ streamToBuffer,
188
+ stripNewLines,
189
+ toTitleCase,
190
+ unixTimestamp,
191
+ writeFile
181
192
  }