@beauraines/node-helpers 2.9.0 → 2.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/test.yaml +1 -0
- package/CHANGELOG.md +2 -1
- package/package.json +4 -2
- package/src/azure.js +7 -1
- package/src/azure.test.js +75 -3
- package/src/config.js +2 -2
- package/src/credentials.js +1 -0
- package/src/database.js +1 -0
- package/src/database.test.js +3 -0
- package/src/helpers.js +3 -0
- package/src/jira.js +1 -0
package/CHANGELOG.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
|
4
4
|
|
|
5
|
-
## [2.
|
|
5
|
+
## [2.11.0](https://github.com/beauraines/node-helpers/compare/v2.7.1...v2.11.0) (2023-07-01)
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
### Features
|
|
@@ -15,6 +15,7 @@ All notable changes to this project will be documented in this file. See [standa
|
|
|
15
15
|
|
|
16
16
|
### Bug Fixes
|
|
17
17
|
|
|
18
|
+
* **deps:** bump azure-devops-node-api from 12.0.0 to 12.1.0 ([#37](https://github.com/beauraines/node-helpers/issues/37)) ([b71396d](https://github.com/beauraines/node-helpers/commit/b71396d28d089a1f8919ad53363a4759f61b1e52))
|
|
18
19
|
* **deps:** bump node-fetch from 2.6.11 to 2.6.12 ([786f458](https://github.com/beauraines/node-helpers/commit/786f458fbf0c03fd9462ab67db558c5646afb62e))
|
|
19
20
|
|
|
20
21
|
### [2.7.1](https://github.com/beauraines/node-helpers/compare/v2.7.0...v2.7.1) (2023-06-14)
|
package/package.json
CHANGED
|
@@ -1,12 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@beauraines/node-helpers",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.11.0",
|
|
4
4
|
"description": "Collection of node helpers",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"scripts": {
|
|
7
7
|
"test": "jest",
|
|
8
8
|
"release": "standard-version",
|
|
9
|
-
"should-release": "should-release"
|
|
9
|
+
"should-release": "should-release",
|
|
10
|
+
"lint": "eslint '*/**.js'",
|
|
11
|
+
"lint:fix": "eslint '*/**.js' --fix"
|
|
10
12
|
},
|
|
11
13
|
"author": "beau.raines@gmail.com",
|
|
12
14
|
"license": "ISC",
|
package/src/azure.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
const azure = require('azure-storage');
|
|
2
2
|
const dayjs = require('dayjs')
|
|
3
|
+
const fs = require('fs');
|
|
3
4
|
const { streamToBuffer } = require('./helpers.js')
|
|
4
5
|
const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob");
|
|
5
6
|
const { QueueClient } = require("@azure/storage-queue");
|
|
@@ -149,6 +150,7 @@ getStorageQueueSignedURL(queueUrl,options) {
|
|
|
149
150
|
*
|
|
150
151
|
* @param {string} containerName the container to which the file will be uploaded
|
|
151
152
|
* @param {string} file The path the the local file to upload to the container
|
|
153
|
+
* @returns {boolean} Success or failure to upload
|
|
152
154
|
*/
|
|
153
155
|
//TODO migrate to @azure/storage-blob
|
|
154
156
|
uploadBlobFromFile(containerName,file) {
|
|
@@ -161,8 +163,11 @@ getStorageQueueSignedURL(queueUrl,options) {
|
|
|
161
163
|
blobService.createBlockBlobFromLocalFile(containerName,blobName,file,options,function(error,response) {
|
|
162
164
|
if( error) {
|
|
163
165
|
console.error(error.message)
|
|
166
|
+
return false
|
|
164
167
|
} else {
|
|
168
|
+
// TODO remove this from this function - separation of concerns, let the caller do the logging
|
|
165
169
|
console.log(`${response.name} uploaded to ${response.container} container`)
|
|
170
|
+
return true
|
|
166
171
|
}
|
|
167
172
|
});
|
|
168
173
|
}
|
|
@@ -183,8 +188,10 @@ getStorageQueueSignedURL(queueUrl,options) {
|
|
|
183
188
|
const blobClient = containerClient.getBlobClient(blobName);
|
|
184
189
|
|
|
185
190
|
const downloadBlockBlobResponse = await blobClient.download();
|
|
191
|
+
// TODO add a success and failure test
|
|
186
192
|
let writer = fs.createWriteStream(file)
|
|
187
193
|
downloadBlockBlobResponse.readableStreamBody.pipe(writer)
|
|
194
|
+
// TODO remove this from this function - separation of concerns, let the caller do the logging
|
|
188
195
|
console.log(`${blobName} downloaded to ${file}`)
|
|
189
196
|
|
|
190
197
|
}
|
|
@@ -229,7 +236,6 @@ getStorageQueueSignedURL(queueUrl,options) {
|
|
|
229
236
|
new StorageSharedKeyCredential(this.storageAccountName, this.storageAccountKey)
|
|
230
237
|
);
|
|
231
238
|
const containerClient = blobServiceClient.getContainerClient(containerName);
|
|
232
|
-
let i = 1;
|
|
233
239
|
let blobs = []
|
|
234
240
|
for await (const blob of containerClient.listBlobsFlat()) {
|
|
235
241
|
blobs.push(blob)
|
package/src/azure.test.js
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
|
+
const dayjs = require('dayjs')
|
|
2
|
+
const { fileExists } = require('./helpers.js')
|
|
1
3
|
const AzureStorage = require('./azure.js')
|
|
2
|
-
|
|
4
|
+
const os = require('os');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
3
7
|
describe('Azure Storage module', () => {
|
|
4
8
|
|
|
5
9
|
it('should be created with default values', () => {
|
|
@@ -42,8 +46,76 @@ describe('Azure Storage module', () => {
|
|
|
42
46
|
|
|
43
47
|
})
|
|
44
48
|
|
|
45
|
-
it.
|
|
46
|
-
|
|
49
|
+
it.skip('should generate a signed URL for a blob', () => {
|
|
50
|
+
const account = "devstoreaccount1";
|
|
51
|
+
const accountKey = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
|
|
52
|
+
let containerName = 'node-helpers-testing'
|
|
53
|
+
let blobName = 'package.json'
|
|
54
|
+
|
|
55
|
+
let azure = new AzureStorage(account,accountKey,{cloudName:'Azurite'})
|
|
56
|
+
const options = {
|
|
57
|
+
tokenExpiry: 42
|
|
58
|
+
}
|
|
59
|
+
const signedUrl = azure.generateBlobSignedUrl(containerName,blobName,options)
|
|
60
|
+
let url = new URL(signedUrl)
|
|
61
|
+
const sasTokenParams = url.searchParams;
|
|
62
|
+
|
|
63
|
+
expect(signedUrl).toContain(azure.host('blob','Azurite'))
|
|
64
|
+
expect(dayjs(sasTokenParams.get('st')).isBefore(dayjs()))
|
|
65
|
+
expect(dayjs(sasTokenParams.get('se')).isAfter(dayjs()))
|
|
66
|
+
expect(dayjs(sasTokenParams.get('st')).add(azure.tokenExpiry).isSame(dayjs(sasTokenParams.get('se'))))
|
|
67
|
+
expect(sasTokenParams.get('sp')).toBe('r') // Read only by default
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
it.skip('should upload a blob from a file',async () => {
|
|
71
|
+
const account = "devstoreaccount1";
|
|
72
|
+
const accountKey = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
|
|
73
|
+
let containerName = 'node-helpers-testing'
|
|
74
|
+
let file = 'package.json'
|
|
75
|
+
let azure = new AzureStorage(account,accountKey,{cloudName:'Azurite'})
|
|
76
|
+
let success = await azure.uploadBlobFromFile(containerName,file)
|
|
77
|
+
expect(success)
|
|
78
|
+
})
|
|
79
|
+
|
|
47
80
|
it.todo('should send a message to the storage queue')
|
|
48
81
|
|
|
82
|
+
it.skip('should get a blob from azure storage', async () =>{
|
|
83
|
+
const account = "devstoreaccount1";
|
|
84
|
+
const accountKey = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
|
|
85
|
+
let containerName = 'node-helpers-testing'
|
|
86
|
+
let blobName = 'package.json'
|
|
87
|
+
let azure = new AzureStorage(account,accountKey,{cloudName:'Azurite'})
|
|
88
|
+
let file = await azure.getBlob(containerName,blobName)
|
|
89
|
+
file = JSON.parse(file)
|
|
90
|
+
expect(file.name).toBe("@beauraines/node-helpers")
|
|
91
|
+
|
|
92
|
+
})
|
|
93
|
+
|
|
94
|
+
it.skip('should download a blob to a file', async () => {
|
|
95
|
+
const account = "devstoreaccount1";
|
|
96
|
+
const accountKey = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
|
|
97
|
+
let containerName = 'node-helpers-testing'
|
|
98
|
+
let blobName = 'package.json'
|
|
99
|
+
let file = path.join(os.tmpdir(),`package.${dayjs().unix()}.json`)
|
|
100
|
+
let azure = new AzureStorage(account,accountKey,{cloudName:'Azurite'})
|
|
101
|
+
await azure.downloadBlobToFile(containerName,blobName,file)
|
|
102
|
+
expect(fileExists(file))
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
it.skip('should list blobs from azure storage', async () => {
|
|
109
|
+
const account = "devstoreaccount1";
|
|
110
|
+
const accountKey = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==";
|
|
111
|
+
let containerName = 'node-helpers-testing'
|
|
112
|
+
let blobName = 'package.json'
|
|
113
|
+
let azure = new AzureStorage(account,accountKey,{cloudName:'Azurite'})
|
|
114
|
+
let blobs = await azure.listBlobs(containerName)
|
|
115
|
+
console.log(blobs)
|
|
116
|
+
expect(Array.isArray(blobs));
|
|
117
|
+
expect(blobs.length).toBeGreaterThan(0)
|
|
118
|
+
expect(blobs.filter(b => b.name == blobName).length).toBe(1)
|
|
119
|
+
})
|
|
120
|
+
|
|
49
121
|
})
|
package/src/config.js
CHANGED
|
@@ -47,7 +47,7 @@ const validateConfig = async (configFile, configProps) => {
|
|
|
47
47
|
config = JSON.parse(config);
|
|
48
48
|
// Check for properties
|
|
49
49
|
let validConfig = true
|
|
50
|
-
for (key of configProps) {
|
|
50
|
+
for (const key of configProps) {
|
|
51
51
|
validConfig = config[key] ? true : false
|
|
52
52
|
}
|
|
53
53
|
|
|
@@ -67,7 +67,7 @@ const validateConfig = async (configFile, configProps) => {
|
|
|
67
67
|
const createConfig = async (configFile,configProps) => {
|
|
68
68
|
configFile = path.join(homedir(),configFile)
|
|
69
69
|
let config = {}
|
|
70
|
-
for (key of configProps) {
|
|
70
|
+
for (const key of configProps) {
|
|
71
71
|
config[key] = ''
|
|
72
72
|
}
|
|
73
73
|
fs.writeFileSync(configFile,JSON.stringify(config))
|
package/src/credentials.js
CHANGED
package/src/database.js
CHANGED
package/src/database.test.js
CHANGED
|
@@ -4,6 +4,7 @@ const sqlite3 = require('sqlite3');
|
|
|
4
4
|
const helpers = require('./helpers');
|
|
5
5
|
const { getDBConnection } = require('./database');
|
|
6
6
|
const path = require('path');
|
|
7
|
+
const process = require('node:process')
|
|
7
8
|
|
|
8
9
|
jest.mock('sqlite');
|
|
9
10
|
jest.mock('os');
|
|
@@ -37,6 +38,7 @@ describe('database module', () => {
|
|
|
37
38
|
|
|
38
39
|
const file = undefined;
|
|
39
40
|
// call function with null file
|
|
41
|
+
// eslint-disable-next-line no-unused-vars
|
|
40
42
|
const db = await getDBConnection(file)
|
|
41
43
|
|
|
42
44
|
// In theory you should mock the open function and what it returns.
|
|
@@ -59,6 +61,7 @@ describe('database module', () => {
|
|
|
59
61
|
|
|
60
62
|
const expectedDefaultFile = path.join(os.homedir(),'BurnDownStatus.db')
|
|
61
63
|
|
|
64
|
+
// eslint-disable-next-line no-unused-vars
|
|
62
65
|
const db = await getDBConnection(expectedDefaultFile)
|
|
63
66
|
|
|
64
67
|
expect(sqlite.open).toHaveBeenCalledWith({
|
package/src/helpers.js
CHANGED
|
@@ -115,6 +115,7 @@ function getEpochMillis() {
|
|
|
115
115
|
* @param {object} options Optional options for display, e.g display min,max,last, range coercion
|
|
116
116
|
* @returns
|
|
117
117
|
*/
|
|
118
|
+
// eslint-disable-next-line no-unused-vars
|
|
118
119
|
function sparkline(data,label,options) {
|
|
119
120
|
// TODO add handling if data is object
|
|
120
121
|
// let open = last30days.map( x=> x.open_count)
|
|
@@ -153,9 +154,11 @@ async function streamToBuffer(readableStream) {
|
|
|
153
154
|
return new Promise((resolve, reject) => {
|
|
154
155
|
const chunks = [];
|
|
155
156
|
readableStream.on("data", (data) => {
|
|
157
|
+
// eslint-disable-next-line no-undef
|
|
156
158
|
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
|
|
157
159
|
});
|
|
158
160
|
readableStream.on("end", () => {
|
|
161
|
+
// eslint-disable-next-line no-undef
|
|
159
162
|
resolve(Buffer.concat(chunks));
|
|
160
163
|
});
|
|
161
164
|
readableStream.on("error", reject);
|
package/src/jira.js
CHANGED