@beauraines/node-helpers 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.eslintrc.js ADDED
@@ -0,0 +1,17 @@
1
+ module.exports = {
2
+ "env": {
3
+ "browser": true,
4
+ "commonjs": true,
5
+ "es2021": true,
6
+ "jest/globals": true
7
+ },
8
+ "extends": "eslint:recommended",
9
+ "overrides": [
10
+ ],
11
+ "parserOptions": {
12
+ "ecmaVersion": "latest"
13
+ },
14
+ "plugins": ["jest"],
15
+ "rules": {
16
+ }
17
+ }
@@ -0,0 +1,26 @@
1
+ name: Node.js CI
2
+
3
+ on:
4
+ push:
5
+ branches: [ main ]
6
+ pull_request:
7
+ branches: [ main ]
8
+
9
+ jobs:
10
+ build:
11
+
12
+ runs-on: ubuntu-latest
13
+
14
+ strategy:
15
+ matrix:
16
+ node-version: [16.x]
17
+
18
+ steps:
19
+ - uses: actions/checkout@v3
20
+ - name: Use Node.js ${{ matrix.node-version }}
21
+ uses: actions/setup-node@v3
22
+ with:
23
+ node-version: ${{ matrix.node-version }}
24
+ - run: npm ci
25
+ - run: npm run build --if-present
26
+ - run: npm test
package/README.md ADDED
@@ -0,0 +1,5 @@
1
+ # Node Helpers
2
+
3
+ This is a set of helpers for node. They're written for quick reuse rather than robust functions or efficiency. For instance, the database functions will create a new database connection every time. This is not efficient, but it makes making the function call simple.
4
+
5
+ My use is primarily in quicker one off scripts that sometime morph into something long lasting...
package/index.js ADDED
@@ -0,0 +1,14 @@
1
+
2
+ const azureStorage = require("./src/azure")
3
+ const credentials = require("./src/credentials.js");
4
+ const database = require("./src/database");
5
+ const helpers = require("./src/helpers");
6
+ const jira = require("./src/jira");
7
+
8
+ module.exports = {
9
+ azureStorage,
10
+ credentials,
11
+ database,
12
+ helpers,
13
+ jira
14
+ }
package/jest.config.js ADDED
@@ -0,0 +1,195 @@
1
+ /*
2
+ * For a detailed explanation regarding each configuration property, visit:
3
+ * https://jestjs.io/docs/configuration
4
+ */
5
+
6
+ module.exports = {
7
+ // All imported modules in your tests should be mocked automatically
8
+ // automock: false,
9
+
10
+ // Stop running tests after `n` failures
11
+ // bail: 0,
12
+
13
+ // The directory where Jest should store its cached dependency information
14
+ // cacheDirectory: "/private/var/folders/8s/3xlngh6x3734dvdvgk4235_r0000gp/T/jest_dy",
15
+
16
+ // Automatically clear mock calls, instances, contexts and results before every test
17
+ clearMocks: true,
18
+
19
+ // Indicates whether the coverage information should be collected while executing the test
20
+ collectCoverage: true,
21
+
22
+ // An array of glob patterns indicating a set of files for which coverage information should be collected
23
+ // collectCoverageFrom: undefined,
24
+
25
+ // The directory where Jest should output its coverage files
26
+ coverageDirectory: "coverage",
27
+
28
+ // An array of regexp pattern strings used to skip coverage collection
29
+ // coveragePathIgnorePatterns: [
30
+ // "/node_modules/"
31
+ // ],
32
+
33
+ // Indicates which provider should be used to instrument code for coverage
34
+ coverageProvider: "v8",
35
+
36
+ // A list of reporter names that Jest uses when writing coverage reports
37
+ // coverageReporters: [
38
+ // "json",
39
+ // "text",
40
+ // "lcov",
41
+ // "clover"
42
+ // ],
43
+
44
+ // An object that configures minimum threshold enforcement for coverage results
45
+ // coverageThreshold: undefined,
46
+
47
+ // A path to a custom dependency extractor
48
+ // dependencyExtractor: undefined,
49
+
50
+ // Make calling deprecated APIs throw helpful error messages
51
+ // errorOnDeprecated: false,
52
+
53
+ // The default configuration for fake timers
54
+ // fakeTimers: {
55
+ // "enableGlobally": false
56
+ // },
57
+
58
+ // Force coverage collection from ignored files using an array of glob patterns
59
+ // forceCoverageMatch: [],
60
+
61
+ // A path to a module which exports an async function that is triggered once before all test suites
62
+ // globalSetup: undefined,
63
+
64
+ // A path to a module which exports an async function that is triggered once after all test suites
65
+ // globalTeardown: undefined,
66
+
67
+ // A set of global variables that need to be available in all test environments
68
+ // globals: {},
69
+
70
+ // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
71
+ // maxWorkers: "50%",
72
+
73
+ // An array of directory names to be searched recursively up from the requiring module's location
74
+ // moduleDirectories: [
75
+ // "node_modules"
76
+ // ],
77
+
78
+ // An array of file extensions your modules use
79
+ // moduleFileExtensions: [
80
+ // "js",
81
+ // "mjs",
82
+ // "cjs",
83
+ // "jsx",
84
+ // "ts",
85
+ // "tsx",
86
+ // "json",
87
+ // "node"
88
+ // ],
89
+
90
+ // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
91
+ // moduleNameMapper: {},
92
+
93
+ // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
94
+ // modulePathIgnorePatterns: [],
95
+
96
+ // Activates notifications for test results
97
+ // notify: false,
98
+
99
+ // An enum that specifies notification mode. Requires { notify: true }
100
+ // notifyMode: "failure-change",
101
+
102
+ // A preset that is used as a base for Jest's configuration
103
+ // preset: undefined,
104
+
105
+ // Run tests from one or more projects
106
+ // projects: undefined,
107
+
108
+ // Use this configuration option to add custom reporters to Jest
109
+ // reporters: undefined,
110
+
111
+ // Automatically reset mock state before every test
112
+ // resetMocks: false,
113
+
114
+ // Reset the module registry before running each individual test
115
+ // resetModules: false,
116
+
117
+ // A path to a custom resolver
118
+ // resolver: undefined,
119
+
120
+ // Automatically restore mock state and implementation before every test
121
+ // restoreMocks: false,
122
+
123
+ // The root directory that Jest should scan for tests and modules within
124
+ // rootDir: undefined,
125
+
126
+ // A list of paths to directories that Jest should use to search for files in
127
+ // roots: [
128
+ // "<rootDir>"
129
+ // ],
130
+
131
+ // Allows you to use a custom runner instead of Jest's default test runner
132
+ // runner: "jest-runner",
133
+
134
+ // The paths to modules that run some code to configure or set up the testing environment before each test
135
+ // setupFiles: [],
136
+
137
+ // A list of paths to modules that run some code to configure or set up the testing framework before each test
138
+ // setupFilesAfterEnv: [],
139
+
140
+ // The number of seconds after which a test is considered as slow and reported as such in the results.
141
+ // slowTestThreshold: 5,
142
+
143
+ // A list of paths to snapshot serializer modules Jest should use for snapshot testing
144
+ // snapshotSerializers: [],
145
+
146
+ // The test environment that will be used for testing
147
+ // testEnvironment: "jest-environment-node",
148
+
149
+ // Options that will be passed to the testEnvironment
150
+ // testEnvironmentOptions: {},
151
+
152
+ // Adds a location field to test results
153
+ // testLocationInResults: false,
154
+
155
+ // The glob patterns Jest uses to detect test files
156
+ // testMatch: [
157
+ // "**/__tests__/**/*.[jt]s?(x)",
158
+ // "**/?(*.)+(spec|test).[tj]s?(x)"
159
+ // ],
160
+
161
+ // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
162
+ // testPathIgnorePatterns: [
163
+ // "/node_modules/"
164
+ // ],
165
+
166
+ // The regexp pattern or array of patterns that Jest uses to detect test files
167
+ // testRegex: [],
168
+
169
+ // This option allows the use of a custom results processor
170
+ // testResultsProcessor: undefined,
171
+
172
+ // This option allows use of a custom test runner
173
+ // testRunner: "jest-circus/runner",
174
+
175
+ // A map from regular expressions to paths to transformers
176
+ // transform: undefined,
177
+
178
+ // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
179
+ // transformIgnorePatterns: [
180
+ // "/node_modules/",
181
+ // "\\.pnp\\.[^\\/]+$"
182
+ // ],
183
+
184
+ // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
185
+ // unmockedModulePathPatterns: undefined,
186
+
187
+ // Indicates whether each individual test should be reported during the run
188
+ // verbose: undefined,
189
+
190
+ // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
191
+ // watchPathIgnorePatterns: [],
192
+
193
+ // Whether to use watchman for file crawling
194
+ // watchman: true,
195
+ };
package/package.json ADDED
@@ -0,0 +1,23 @@
1
+ {
2
+ "name": "@beauraines/node-helpers",
3
+ "version": "2.0.0",
4
+ "description": "Collection of node helpers",
5
+ "main": "index.js",
6
+ "scripts": {
7
+ "test": "jest"
8
+ },
9
+ "author": "beau.raines@gmail.com",
10
+ "license": "ISC",
11
+ "dependencies": {
12
+ "@azure/storage-queue": "^12.11.0",
13
+ "azure-storage": "^2.10.7",
14
+ "node-fetch": "^2.6.7",
15
+ "sqlite": "^4.1.2",
16
+ "sqlite3": "^5.1.2"
17
+ },
18
+ "devDependencies": {
19
+ "eslint": "^8.27.0",
20
+ "eslint-plugin-jest": "^27.2.1",
21
+ "jest": "^29.3.1"
22
+ }
23
+ }
package/src/azure.js ADDED
@@ -0,0 +1,145 @@
1
+ const azure = require('azure-storage');
2
+ const { QueueClient, StorageSharedKeyCredential } = require("@azure/storage-queue");
3
+ var path = require('path');
4
+
5
+ /**
6
+ * Creates an AzureStorage class to interact with Azure Storage Blob Containers and Storage Queues.
7
+ *
8
+ * @param {string} storageAccountName the name of the storage account
9
+ * @param {string} storageAccountKey the key for the storage account
10
+ * @param {Object} [options]
11
+ * @param {int} [options.tokenExpiry] expiration minutes for SAS tokens, defaults to 60 minutes
12
+ * @param {int} [options.queueMessageTTLSeconds] expiration in seconds for messages written to storage queues, defaults to 3600 (1 hour)
13
+ * @param {string} [options.cloudName] AzureCloud, AzureChinaCloud. AzureUSGovernment, AzureGermanCloud or Azurite for the emulator. Defaults to AzureCloud
14
+ */
15
+ class AzureStorage {
16
+
17
+ constructor(storageAccountName,storageAccountKey,options) {
18
+ this.storageAccountName = storageAccountName
19
+ this.storageAccountKey = storageAccountKey
20
+ this.tokenExpiry = options?.tokenExpiry || 60 // defaults to 60 minute expiry
21
+ this.queueMessageTTLSeconds = options?.queueMessageTTLSeconds || 60 *60 // defaults to 60 minutes
22
+ this.cloudName = options?.cloudName || 'AzureCloud'
23
+
24
+ }
25
+
26
+
27
+ /**
28
+ * Determines the Azure Storage service URL, accounting for the service type, government cloud or if
29
+ * the Azurite storage emulator is used.
30
+ * @param {string} service The Azure Storage service, one of `blob`,`queue` or `table`
31
+ * @param {string} cloudName One of AzureCloud, AzureChinaCloud. AzureUSGovernment, AzureGermanCloud or Azurite for the emulator
32
+ * @returns
33
+ */
34
+ host(service,cloudName) {
35
+ let port,url;
36
+ switch (service) {
37
+ case 'blob':
38
+ port = 10000
39
+ break;
40
+ case 'queue':
41
+ port = 10001
42
+ break;
43
+ case 'table':
44
+ port = 10002
45
+ break;
46
+ default:
47
+ break;
48
+ }
49
+ switch (cloudName) {
50
+ case 'Azurite':
51
+ url = `http://127.0.0.1:${port}/${this.storageAccountName}`
52
+ break;
53
+ case 'AzureUSGovernment':
54
+ url = `https://${this.storageAccountName}.${service}.core.usgovcloudapi.net`
55
+ break;
56
+ case 'AzureChinaCloud':
57
+ url = `https://${this.storageAccountName}.${service}.core.chinacloudapi.cn`
58
+ break;
59
+ case 'AzureGermanCloud':
60
+ url = `https://${this.storageAccountName}.${service}.core.cloudapi.de`
61
+ break;
62
+ case 'AzureCloud':
63
+ default:
64
+ url = `https://${this.storageAccountName}.${service}.core.windows.net`
65
+ break;
66
+ }
67
+ return url
68
+ }
69
+
70
+
71
+ /**
72
+ * Sends a message to the specified storage queue. The messages are given a TTL based upon the
73
+ * class's `queueMessageTTLSeconds` value.
74
+ * @param {string} queueUrl The URL to the storage queue
75
+ * @param {string} messageContent The message to send to the queue
76
+ */
77
+ async sendMessageToQueue(queueUrl, messageContent,) {
78
+ try {
79
+ const queueClient = new QueueClient(
80
+ queueUrl,
81
+ new StorageSharedKeyCredential(this.storageAccountName, this.storageAccountKey)
82
+ );
83
+ let queueOptions = {
84
+ messageTimeToLive: this.queueMessageTTLSeconds
85
+ };
86
+ let sendMessageResponse = await queueClient.sendMessage(messageContent, queueOptions);
87
+ console.log(
88
+ "Sent message successfully, service assigned message Id:", sendMessageResponse.messageId,
89
+ "service assigned request Id:", sendMessageResponse.requestId
90
+ );
91
+ } catch (error) {
92
+ console.error(error.message)
93
+ }
94
+ }
95
+
96
+ /**
97
+ * Generates a signed URL for the specified blob in the specified container. The signed URL will expire
98
+ * after the class's `tokenExpiry` minutes, which defaults to 60 if not specified.
99
+ *
100
+ * @param {string} containerName The name of the parent container for the blob
101
+ * @param {string} blobName The name of the blob to generate the token
102
+ * @returns {string} the signed URL for the blob
103
+ */
104
+ generateBlobSignedUrl(containerName, blobName) {
105
+
106
+ const sharedAccessPolicy = {
107
+ AccessPolicy: {
108
+ Permissions: azure.BlobUtilities.SharedAccessPermissions.READ,
109
+ Start: new Date(),
110
+ Expiry: azure.date.minutesFromNow(this.tokenExpiry),
111
+ },
112
+ };
113
+
114
+ const blobService = azure.createBlobService(this.storageAccountName, this.storageAccountKey, this.host('blob',this.cloudName));
115
+ const sasToken = blobService.generateSharedAccessSignature(containerName, blobName, sharedAccessPolicy);
116
+ const blobUrl = blobService.getUrl(containerName, blobName, sasToken);
117
+
118
+ return blobUrl;
119
+ }
120
+
121
+ /**
122
+ * Uploads a local file to an Azure container as a blob.
123
+ *
124
+ * @param {string} containerName the container to which the file will be uploaded
125
+ * @param {string} file The path the the local file to upload to the container
126
+ */
127
+ uploadBlobFromFile(containerName,file) {
128
+ const blobService = azure.createBlobService(this.storageAccountName, this.storageAccountKey, this.host('blob',this.cloudName));
129
+ const options = {
130
+ access: 'container'
131
+ };
132
+
133
+ let blobName = path.basename(file)
134
+ blobService.createBlockBlobFromLocalFile(containerName,blobName,file,options,function(error,response) {
135
+ if( error) {
136
+ console.error(error.message)
137
+ } else {
138
+ console.log(`${response.name} uploaded to ${response.container} container`)
139
+ }
140
+ });
141
+ }
142
+
143
+ }
144
+
145
+ module.exports = AzureStorage
@@ -0,0 +1,49 @@
1
+ const AzureStorage = require('./azure.js')
2
+
3
+ describe('Azure Storage module', () => {
4
+
5
+ it('should be created with default values', () => {
6
+ let azure = new AzureStorage('account','key')
7
+ expect(azure.cloudName).toBe('AzureCloud')
8
+ expect(azure.tokenExpiry).toBe(60)
9
+ expect(azure.queueMessageTTLSeconds).toBe(3600)
10
+ })
11
+
12
+ it('defaults can be overidden', () => {
13
+ let options = {
14
+ tokenExpiry: 30,
15
+ queueMessageTTLSeconds: 500,
16
+ cloudName: 'AzureUSGovernment'
17
+ }
18
+
19
+ let azure = new AzureStorage('account','key', options)
20
+ expect(azure.cloudName).toBe('AzureUSGovernment')
21
+ expect(azure.tokenExpiry).toBe(30)
22
+ expect(azure.queueMessageTTLSeconds).toBe(500)
23
+ })
24
+
25
+ it('should generate host URLs', () => {
26
+
27
+ let azure = new AzureStorage('account','key')
28
+
29
+ const azuriteBlobUrl = azure.host('blob','Azurite')
30
+ const governmentQueueUrl = azure.host('queue','AzureUSGovernment')
31
+ const chinaTableUrl = azure.host('table','AzureChinaCloud')
32
+ const germanCloudBlobUrl = azure.host('blob','AzureGermanCloud')
33
+ const azureCloudBlobUrl = azure.host('blob','AzureCloud')
34
+ const defaultQueueUrl = azure.host('queue')
35
+
36
+ expect(azuriteBlobUrl).toBe('http://127.0.0.1:10000/account')
37
+ expect(governmentQueueUrl).toBe('https://account.queue.core.usgovcloudapi.net')
38
+ expect(chinaTableUrl).toBe('https://account.table.core.chinacloudapi.cn')
39
+ expect(germanCloudBlobUrl).toBe('https://account.blob.core.cloudapi.de')
40
+ expect(azureCloudBlobUrl).toBe('https://account.blob.core.windows.net')
41
+ expect(defaultQueueUrl).toBe('https://account.queue.core.windows.net')
42
+
43
+ })
44
+
45
+ it.todo('should generate a signed URL for a blob')
46
+ it.todo('should upload a blob from a file')
47
+ it.todo('should send a message to the storage queue')
48
+
49
+ })
@@ -0,0 +1,48 @@
1
+ const fs = require('fs');
2
+ const {fileExists} = require('./helpers');
3
+
4
+ /**
5
+ * Reads a file for credentials and validates that the file has the required attributes.
6
+ * It will throw an error and EXIT if the file is not found or the file does not pass validation.
7
+ * @param {string} file the fully qualified path and file to the credentials file
8
+ * @returns Object containing the validated credentials
9
+ */
10
+ async function getCredentials(file) {
11
+ const filePath = `${file}`
12
+ if (! await fileExists(filePath)) {
13
+ console.error(`Credentials file not found: ${filePath}`);
14
+ process.exit(1);
15
+ }
16
+ let credentials = fs.readFileSync(filePath,
17
+ { encoding: 'utf8', flag: 'r' });
18
+ credentials = JSON.parse(credentials);
19
+ if (! await validateCredentials(credentials)) {
20
+ console.error(`Invalid credentials file ${filePath}`);
21
+ process.exit(1);
22
+ }
23
+
24
+ return credentials;
25
+ }
26
+
27
+ /**
28
+ * Validates that the credentials include the token property
29
+ * @param {Object} credentials The credentials object
30
+ * @returns Boolean
31
+ */
32
+ async function validateCredentials(credentials){
33
+ // TODO include the properties array as a parameter
34
+ let properties = ['token'];
35
+ let credentialKeys = Object.keys(credentials);
36
+ let validCredentials = true;
37
+ properties.forEach(property=>{
38
+ let found = credentialKeys.find(x=>(x==property));
39
+ if (!found) {
40
+ validCredentials = false;
41
+ }
42
+ });
43
+ return validCredentials;
44
+ }
45
+
46
+ module.exports = {
47
+ getCredentials
48
+ }
@@ -0,0 +1,4 @@
1
+
2
+ describe('credentials module', () => {
3
+ it.todo('placeholder')
4
+ })
@@ -0,0 +1,112 @@
1
+ const {homedir} = require('os');
2
+ const sqlite = require('sqlite');
3
+ const sqlite3 = require('sqlite3');
4
+ const {fileExists} = require('./helpers');
5
+
6
+ /**
7
+ * Opens the BurnDownStatus SQLite3 Database
8
+ * @param file file name of the SQLite3 DB. If not provided, defaults to ${homedir}/BurnDownStatus.db
9
+ * @returns SQLite database connection
10
+ */
11
+ async function getDBConnection(file) {
12
+ const homeDir = homedir();
13
+ file = file ? file : `${homeDir}/BurnDownStatus.db`;
14
+ if (! await fileExists(file)){
15
+ console.error(`${file} not found`);
16
+ // ! Separation of concerns - this should probably not be doing the exiting, but it is.
17
+ // This should return something and the calling script will exit
18
+ process.exit(1)
19
+ }
20
+ const db = await sqlite.open({
21
+ filename: file,
22
+ driver: sqlite3.Database
23
+ });
24
+ return db;
25
+ }
26
+
27
+ /**
28
+ *
29
+ * @param {Object} data The burndown data to be inserted into the Burndown database
30
+ *
31
+ * @returns {Object} The result of the database running the provided query
32
+ * @deprecated Should be replaced with the insert function
33
+ */
34
+ async function writeToDb(data) {
35
+ // TODO should take db, query and values as parameters
36
+ let db = await getDBConnection();
37
+ let query = `insert into qa_burndown (date, qa_review_count, qa_validated_count) values (?,?,?) on conflict do update set qa_review_count = excluded.qa_review_count, qa_validated_count = excluded.qa_validated_count`;
38
+
39
+ let values = [data.date, data.qa_review_count, data.qa_validated_count];
40
+ let result = await db.run(query, values);
41
+ await db.close();
42
+ return result;
43
+
44
+ }
45
+
46
+ /**
47
+ *
48
+ * Inserts or upserts data into the specified table in the specified database
49
+ *
50
+ * @param {string} database Path and filename to the SQLite3 DB
51
+ * @param {string} table
52
+ * @param {array} data array of fields and values to be inserted
53
+ * @param {array} keys Optional array of record keys, if provided will perform an UPSERT
54
+ * @returns {array} A not very useful of responses from the individual insert statements
55
+ */
56
+ async function insert(database,table,data,keys) {
57
+ let db = await getDBConnection(database);
58
+
59
+ let fields = Object.keys(data[0]).toString();
60
+ let fieldsCount = Object.keys(data[0]).length;
61
+ let valueSubstitution = Array(fieldsCount).fill('?').toString();
62
+
63
+ let query = `insert into ${table} (${fields}) values (${valueSubstitution})`
64
+ if (keys) {
65
+ query = query + buildOnConflictStatement(fields,keys);
66
+ }
67
+ query = query.concat(';');
68
+ let result = []
69
+ data.forEach(async (row) => {
70
+ // TODO add try/catch for SQL error errno and code
71
+ // TODO switch to db.exec() for more meaningful response
72
+ let response = await db.run(query,Object.values(row));
73
+ result.push(response);
74
+ });
75
+ await db.close();
76
+ // TODO clean up result
77
+ return result;
78
+ }
79
+
80
+ /**
81
+ *
82
+ * @param {string} fields Comma separated list of fields from the data
83
+ * @param {array} keys Array of key values for the record, these will be excluded form the UPSERT statement
84
+ * @returns {string} the on conflict statement for an UPSERT based on the fields and keys provide
85
+ */
86
+ function buildOnConflictStatement(fields,keys) {
87
+ fields = fields.split(',');
88
+ let upsertFields = fields.filter( x => !keys.includes(x));
89
+ let conflictStatement = ` on conflict do update set`;
90
+ for (let i = 0; i < upsertFields.length; i++) {
91
+ const field = upsertFields[i];
92
+ conflictStatement = conflictStatement + ` ${field} = excluded.${field}`
93
+ i != upsertFields.length -1 ? conflictStatement = conflictStatement + `,` : null
94
+ }
95
+ return conflictStatement;
96
+
97
+ }
98
+
99
+ async function query(database,query) {
100
+ let db = await getDBConnection(database);
101
+ // TODO add try/catch for SQL error errno and code
102
+ let response = await db.all(query);
103
+ return response;
104
+ }
105
+
106
+
107
+ module.exports = {
108
+ getDBConnection,
109
+ writeToDb,
110
+ insert,
111
+ query
112
+ }
@@ -0,0 +1,74 @@
1
+ const os = require('os');
2
+ const sqlite = require('sqlite');
3
+ const sqlite3 = require('sqlite3');
4
+ const helpers = require('./helpers');
5
+ const { getDBConnection } = require('./database');
6
+
7
+ jest.mock('sqlite');
8
+ jest.mock('os');
9
+ jest.mock('./helpers');
10
+
11
+
12
+ describe('database module', () => {
13
+
14
+ it('should exit if the database does not exist', async () => {
15
+ helpers.fileExists.mockReturnValue(false);
16
+ console.error = jest.fn()
17
+ const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => {});
18
+
19
+
20
+ await getDBConnection('/home/database-does-not-exist.db')
21
+ expect(mockExit).toHaveBeenCalledWith(1)
22
+
23
+ })
24
+
25
+ it('should open the default database if no file specified', async () => {
26
+ // mock sqlite.open()
27
+ // Need `${homedir}/BurnDownStatus.db` or do we mock it?
28
+ const expectedHomeDir = '/home';
29
+
30
+ os.homedir.mockReturnValue(expectedHomeDir);
31
+ // helpers.fileExists.mockResolvedValue(true);
32
+ helpers.fileExists.mockReturnValue(true);
33
+
34
+
35
+ const expectedDefaultFile = `${os.homedir()}/BurnDownStatus.db`
36
+
37
+ const file = undefined;
38
+ // call function with null file
39
+ const db = await getDBConnection(file)
40
+
41
+ // In theory you should mock the open function and what it returns.
42
+
43
+ // expect that sqlite.open() is called with `${homedir}/BurnDownStatus.db`
44
+ expect(sqlite.open).toHaveBeenCalledWith({
45
+ filename: expectedDefaultFile,
46
+ driver: sqlite3.Database
47
+ })
48
+
49
+ })
50
+
51
+ it('should open the specified database if a file is specified', async () => {
52
+
53
+
54
+ const expectedHomeDir = '/home';
55
+
56
+ os.homedir.mockReturnValue(expectedHomeDir);
57
+ helpers.fileExists.mockReturnValue(true);
58
+
59
+ const expectedDefaultFile = `${os.homedir()}/my_database.db`
60
+
61
+ const db = await getDBConnection(expectedDefaultFile)
62
+
63
+ expect(sqlite.open).toHaveBeenCalledWith({
64
+ filename: expectedDefaultFile,
65
+ driver: sqlite3.Database
66
+ })
67
+
68
+
69
+ })
70
+
71
+ })
72
+
73
+
74
+
package/src/helpers.js ADDED
@@ -0,0 +1,107 @@
1
+ const fs = require('fs');
2
+
3
+ /**
4
+ * Converts a string to Title Case, using whitespace as the delimiter
5
+ * @param {String} str String to convert
6
+ * @returns The string converted to title case
7
+ */
8
+ function toTitleCase(str) {
9
+ return str.replace(
10
+ /\w\S*/g,
11
+ function(txt) {
12
+ return txt.charAt(0).toUpperCase() + txt.substring(1).toLowerCase();
13
+ }
14
+ );
15
+ }
16
+
17
+ /**
18
+ * Removes newline characters \r and/or \n from a string
19
+ * @param {string} string to remove newlines from
20
+ * @returns string
21
+ */
22
+ function stripNewLines(string) {
23
+ return string.replace(/\r|\n/g, '');
24
+ }
25
+
26
+ /**
27
+ * Checks to see if the specified file exists
28
+ * @param {string} filePath fully qualified path and filename
29
+ * @returns Boolean
30
+ */
31
+ async function fileExists(filePath) {
32
+ return fs.existsSync(filePath);
33
+ }
34
+
35
+ /**
36
+ * Asynchronously reads the entire file contents and returns it.
37
+ * @param {string} filePath fully qualified path and filename
38
+ * @returns any
39
+ */
40
+ async function readFile(filePath) {
41
+ return fs.readFileSync(filePath,{ encoding: 'utf8', flag: 'r' });
42
+ }
43
+
44
+ /**
45
+ * Asynchronously reads the contents of a directory and returns the filenames as an array. Optionally,
46
+ * filters by the extension
47
+ * @param {String} directory Path to the directory
48
+ * @param {String | null} extension optional file extension to filter for
49
+ * @returns { String[] } of file names
50
+ */
51
+ async function listFiles(directory,extension) {
52
+ // TODO check for directory existence
53
+ return fs.readdirSync(directory,{withFileTypes: true})
54
+ .filter(item => !item.isDirectory() && item.name.includes(extension))
55
+ .map(item => item.name)
56
+ }
57
+
58
+ /**
59
+ * Groups an array by specified properties and sums other specified properties
60
+ *
61
+ * https://stackoverflow.com/questions/46794232/group-objects-by-multiple-properties-in-array-then-sum-up-their-values
62
+ *
63
+ * @param {Array} arr Array of Objects to aggregate
64
+ * @param {Array} groupKeys keys to group by
65
+ * @param {Array} sumKeys keys of properties to sum by
66
+ * @returns Array of Objects
67
+ */
68
+ function groupAndSum(arr, groupKeys, sumKeys){
69
+ return Object.values(
70
+ arr.reduce((acc,curr)=>{
71
+ const group = groupKeys.map(k => curr[k]).join('-');
72
+ acc[group] = acc[group] || Object.fromEntries(
73
+ groupKeys.map(k => [k, curr[k]]).concat(sumKeys.map(k => [k, 0])));
74
+ sumKeys.forEach(k => acc[group][k] += curr[k]);
75
+ return acc;
76
+ }, {})
77
+ );
78
+ }
79
+
80
+ /**
81
+ * Returns the current unix timestamp in seconds
82
+ *
83
+ * @returns Number
84
+ */
85
+ function unixTimestamp() {
86
+ return Math.floor(Date.now() / 1000)
87
+ }
88
+
89
+ /**
90
+ * Returns unix timestamp in milliseconds
91
+ *
92
+ * @returns Number
93
+ */
94
+ function getEpochMillis() {
95
+ return Date.now()
96
+ }
97
+
98
+ module.exports = {
99
+ getEpochMillis,
100
+ fileExists,
101
+ groupAndSum,
102
+ readFile,
103
+ listFiles,
104
+ stripNewLines,
105
+ toTitleCase,
106
+ unixTimestamp
107
+ }
@@ -0,0 +1,9 @@
1
+ const helper = require('./helpers');
2
+
3
+ it('should return a title case string',() => {
4
+ const lowerString = 'a quick brown fox';
5
+ const titleString = 'A Quick Brown Fox';
6
+
7
+ expect(helper.toTitleCase(lowerString)).toBe(titleString);
8
+ })
9
+
package/src/jira.js ADDED
@@ -0,0 +1,163 @@
1
+ const fetch = require('node-fetch');
2
+
3
+ /**
4
+ * Converts an email and password to a base64 encoded string to be used as a Bearer token.
5
+ * NB: This only converts the string and does not pre-pend Bearer
6
+ * @param {string} email email address
7
+ * @param {string} token access token or password
8
+ * @returns String
9
+ */
10
+ function credentialsToToken(email,token) {
11
+ let bearerToken = Buffer.from(`${email}:${token}`).toString('base64');
12
+ return bearerToken
13
+ }
14
+
15
+ /**
16
+ * Gets up to 100 issues by JQL query, retuning the "key","summary","status","statusCategory",
17
+ * "labels","parent","project","customfield_10013"
18
+ * @param {string} jql A valid JQL statement, NB: any quotation marks should be single quotes
19
+ * @param {object} credentials A object at least with the property `bearerToken`
20
+ * @returns Object
21
+ */
22
+ async function getIssuesByJql(jql,credentials) {
23
+ let {domain } = credentials;
24
+ let headersList = {
25
+ "Accept": "*/*",
26
+ "User-Agent": "Thunder Client (https://www.thunderclient.com)",
27
+ "Authorization": `Basic ${credentials.bearerToken}`,
28
+ "Content-Type": "application/json"
29
+ }
30
+
31
+ // TODO make this an option parameter
32
+ let fields = [
33
+ "key","summary","status","statusCategory","labels","parent","project","customfield_10013"
34
+ ];
35
+
36
+ // TODO make the max results an option parameter and variable to be reused in the paginated data increments
37
+ let bodyContent = {
38
+ "startAt": 0,
39
+ "maxResults": 100,
40
+ "jql": jql,
41
+ "fields": fields
42
+ };
43
+
44
+ let response = await fetch(`https://${domain}.atlassian.net/rest/api/3/search`, {
45
+ method: "POST",
46
+ body: JSON.stringify(bodyContent),
47
+ headers: headersList
48
+ })
49
+
50
+ let body
51
+
52
+ if (response.ok) {
53
+ body = await response.json();
54
+ // console.log('total',body.total);
55
+ // console.log('maxResults',body.maxResults);
56
+ if (body.total < body.maxResults) {
57
+ return body;
58
+ }
59
+ } else {
60
+ throw new Error(response.statusText);
61
+ }
62
+
63
+ console.log('There is more data to be pulled...');
64
+ let retrieveCount = 100;
65
+ let totalResults = body.total;
66
+
67
+ let requestBodies = []
68
+ // console.log(bodyContent);
69
+ while (retrieveCount < totalResults) {
70
+ bodyContent.startAt = retrieveCount;
71
+ requestBodies.push({...bodyContent});
72
+ retrieveCount = retrieveCount + 100;
73
+ }
74
+
75
+ // for of is a better practice than forEach
76
+
77
+ // TODO move this to function like
78
+ // const callApi = async (): Promise<ResponseModel> => { /* function code here */ } (edited)
79
+ // Return an array of Promises and wait for all of them to resolve with Promise.all()
80
+
81
+ for (const requestBody of requestBodies) {
82
+ console.log(`Making API call startAt ${requestBody.startAt}`);
83
+ let response = await fetch(`https://${domain}.atlassian.net/rest/api/3/search`, {
84
+ method: "POST",
85
+ body: JSON.stringify(requestBody),
86
+ headers: headersList
87
+ });
88
+
89
+ if (response.ok) {
90
+ let responseBody = await response.json();
91
+ body.issues = body.issues.concat(responseBody.issues);
92
+ console.log(`API call startAt ${requestBody.startAt} complete, found ${responseBody.issues.length}, body length is now ${body.issues.length}`);
93
+ } else {
94
+ throw new Error(response.statusText);
95
+ }
96
+ }
97
+
98
+ console.log("issues", body.issues.length)
99
+ return body;
100
+
101
+ }
102
+
103
+ /**
104
+ * Gets up to 100 Jira issues that belong to the specified Epic
105
+ * @param {string} epic Epic Key, e.g. CEC-3360
106
+ * @param {Object} credentials An object with at least the `bearerToken` property
107
+ * @returns Object
108
+ */
109
+ async function getIssuesForEpic(epic,credentials) {
110
+ let {domain } = credentials;
111
+ let headersList = {
112
+ "Accept": "*/*",
113
+ "User-Agent": "Thunder Client (https://www.thunderclient.com)",
114
+ "Authorization": `Basic ${credentials.bearerToken}`
115
+ }
116
+
117
+ // TODO make the max results an option parameter
118
+ let response = await fetch(`https://${domain}.atlassian.net/rest/agile/1.0/epic/${epic}/issue?maxResults=100`, {
119
+ method: "GET",
120
+ headers: headersList
121
+ })
122
+
123
+ if (response.ok) {
124
+ return response.json();
125
+ } else {
126
+ throw new Error(response.statusText);
127
+ }
128
+ }
129
+
130
+ /**
131
+ * gets data from any Jira API end point
132
+ * @param {string} url Jira API Endpoint
133
+ * @param {object} credentials the credentials object, with email and token
134
+ * @returns Object
135
+ */
136
+ async function getJiraData(url,credentials) {
137
+ let {domain, email, token} = credentials;
138
+ let response;
139
+ try {
140
+ response = await fetch(`https://${domain}.atlassian.net/${url}`, {
141
+ method: 'GET',
142
+ headers: {
143
+ 'Authorization': `Basic ${credentialsToToken(email,token)}`,
144
+ 'Accept': 'application/json',
145
+ 'Content-Type': 'application/json'
146
+ }
147
+ });
148
+ if (response.ok) {
149
+ return response.json();
150
+ } else {
151
+ throw new Error(response.statusText);
152
+ }
153
+ } catch (error) {
154
+ console.error(error.message)
155
+ }
156
+ }
157
+
158
+ module.exports = {
159
+ getIssuesByJql,
160
+ getIssuesForEpic,
161
+ credentialsToToken,
162
+ getJiraData
163
+ }