@itentialopensource/adapter-db_sybase 0.2.1 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,5 @@
1
+ ---------------------------------------------------------------------------------------------
2
+ **** Project Compliance Summary ****
3
+ 0 project(s) are not valid
4
+ 0 project(s) are valid
5
+ ---------------------------------------------------------------------------------------------
package/error.json CHANGED
@@ -96,6 +96,12 @@
96
96
  "displayString": "Invalid properties: $VARIABLE$",
97
97
  "recommendation": "Verify the properties for the adapter"
98
98
  },
99
+ {
100
+ "key": "Missing Properties",
101
+ "icode": "AD.306",
102
+ "displayString": "Property $VARIABLE$ is required",
103
+ "recommendation": "Please provide the required property"
104
+ },
99
105
  {
100
106
  "key": "Query Not Translated",
101
107
  "icode": "AD.310",
@@ -168,6 +174,12 @@
168
174
  "displayString": "Failure response received for $VARIABLE$",
169
175
  "recommendation": "Check the reason for failure in the stack trace"
170
176
  },
177
+ {
178
+ "key": "Suspended Adapter",
179
+ "icode": "AD.600",
180
+ "displayString": "Adapter is suspended",
181
+ "recommendation": "Check if external system is functional and unsuspend if appropriate"
182
+ },
171
183
  {
172
184
  "key": "Caught Exception",
173
185
  "icode": "AD.900",
package/metadata.json ADDED
@@ -0,0 +1,77 @@
1
+ {
2
+ "name": "adapter-db_sybase",
3
+ "webName": "Adapter for Integration to Sybase Database",
4
+ "vendor": "SAP",
5
+ "product": "Sybase",
6
+ "osVersion": [],
7
+ "apiVersions": [],
8
+ "iapVersions": [
9
+ "2021.1.x",
10
+ "2021.2.x",
11
+ "2022.1.x",
12
+ "2023.1.x"
13
+ ],
14
+ "method": "Library",
15
+ "type": "Adapter",
16
+ "domains": [
17
+ "Data Storage"
18
+ ],
19
+ "tags": [
20
+ "Database & Repository Operations"
21
+ ],
22
+ "useCases": [
23
+ "CRUD Operations",
24
+ "Persist Data"
25
+ ],
26
+ "deprecated": {
27
+ "isDeprecated": false
28
+ },
29
+ "brokerSince": "",
30
+ "authMethods": [
31
+ {
32
+ "type": "Database Auth",
33
+ "subtypes": [],
34
+ "primary": true
35
+ }
36
+ ],
37
+ "documentation": {
38
+ "storeLink": "",
39
+ "npmLink": "https://www.npmjs.com/package/@itentialopensource/adapter-db_sybase",
40
+ "repoLink": "https://gitlab.com/itentialopensource/adapters/adapter-db_sybase",
41
+ "docLink": "",
42
+ "demoLinks": [],
43
+ "trainingLinks": [
44
+ {
45
+ "title": "Itential Academy",
46
+ "link": "https://www.itential.com/itential-academy/"
47
+ }
48
+ ],
49
+ "faqLink": "https://docs.itential.com/opensource/docs/troubleshooting-an-adapter",
50
+ "contributeLink": "https://gitlab.com/itentialopensource/adapters/contributing-guide",
51
+ "issueLink": "https://itential.atlassian.net/servicedesk/customer/portals",
52
+ "webLink": "https://www.itential.com/adapters/sybase/",
53
+ "vendorLink": "https://www.sap.com/index.html",
54
+ "productLink": "https://www.sap.com/products/technology-platform/sybase-ase.html",
55
+ "apiLinks": [
56
+ {
57
+ "title": "Sybase SQL Reference",
58
+ "link": "https://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.help.sqlanywhere.12.0.1/dbusage/sqlqueries-sqlug.html",
59
+ "public": true
60
+ },
61
+ {
62
+ "title": "Sybase Node Library Documentation",
63
+ "link": "https://www.npmjs.com/package/sybase",
64
+ "public": true
65
+ }
66
+ ]
67
+ },
68
+ "assets": [],
69
+ "relatedItems": {
70
+ "adapters": [],
71
+ "integrations": [],
72
+ "ecosystemApplications": [],
73
+ "workflowProjects": [],
74
+ "transformationProjects": [],
75
+ "exampleProjects": []
76
+ }
77
+ }
package/package.json CHANGED
@@ -1,19 +1,21 @@
1
1
  {
2
2
  "name": "@itentialopensource/adapter-db_sybase",
3
- "version": "0.2.1",
3
+ "version": "0.3.1",
4
4
  "description": "Itential adapter to connect to Sybase",
5
5
  "main": "adapter.js",
6
- "wizardVersion": "2.41.0",
7
- "engineVersion": "1.44.2",
6
+ "wizardVersion": "2.44.7",
7
+ "engineVersion": "1.67.14",
8
+ "adapterType": "Library",
8
9
  "scripts": {
9
10
  "artifactize": "npm i && node utils/packModificationScript.js",
10
11
  "preinstall": "node utils/setup.js",
11
- "lint": "eslint . --ext .json --ext .js",
12
+ "deinstall": "node utils/removeHooks.js",
13
+ "lint": "node --max_old_space_size=4096 ./node_modules/eslint/bin/eslint.js . --ext .json --ext .js",
14
+ "lint:errors": "node --max_old_space_size=4096 ./node_modules/eslint/bin/eslint.js . --ext .json --ext .js --quiet",
12
15
  "test:unit": "mocha test/unit/adapterTestUnit.js --LOG=error",
13
16
  "test:integration": "mocha test/integration/adapterTestIntegration.js --LOG=error",
14
- "test:cover": "nyc --reporter html --reporter text mocha --reporter dot test/*",
15
17
  "test": "npm run test:unit && npm run test:integration",
16
- "deploy": "npm publish --registry=http://registry.npmjs.org --access=public",
18
+ "deploy": "npm publish --registry=https://registry.npmjs.org --access=public",
17
19
  "build": "npm run deploy"
18
20
  },
19
21
  "keywords": [
@@ -29,32 +31,31 @@
29
31
  ],
30
32
  "license": "Apache-2.0",
31
33
  "engines": {
32
- "node": ">= 0.12.7"
34
+ "node": ">=14.0.0",
35
+ "npm": ">=6.0.0"
33
36
  },
34
37
  "repository": {
35
38
  "type": "git",
36
- "url": "git@gitlab.com:itentialopensource/adapters/persistence/adapter-db_sybase.git"
39
+ "url": "git@gitlab.com:itentialopensource/adapters/adapter-db_sybase.git"
37
40
  },
38
41
  "author": "Itential",
39
- "homepage": "https://gitlab.com/itentialopensource/adapters/persistence/adapter-db_sybase#readme",
42
+ "homepage": "https://gitlab.com/itentialopensource/adapters/adapter-db_sybase#readme",
40
43
  "dependencies": {
41
- "ajv": "^6.12.0",
42
- "fs-extra": "^8.1.0",
43
- "sybase": "^1.2.3",
44
- "package-json-validator": "^0.6.3",
44
+ "ajv": "^8.12.0",
45
+ "fs-extra": "^11.1.1",
45
46
  "readline-sync": "^1.4.10",
47
+ "sybase": "^1.2.3",
46
48
  "uuid": "^3.0.1"
47
49
  },
48
50
  "devDependencies": {
49
- "chai": "^4.2.0",
50
- "eslint": "^6.8.0",
51
- "eslint-config-airbnb-base": "^14.1.0",
52
- "eslint-plugin-import": "^2.20.1",
53
- "eslint-plugin-json": "^2.1.1",
54
- "mocha": "^7.1.1",
55
- "nyc": "^15.0.0",
56
- "testdouble": "^3.13.1",
57
- "winston": "^3.2.1"
51
+ "chai": "^4.3.7",
52
+ "eslint": "^8.44.0",
53
+ "eslint-config-airbnb-base": "^15.0.0",
54
+ "eslint-plugin-import": "^2.27.5",
55
+ "eslint-plugin-json": "^3.1.0",
56
+ "mocha": "^10.7.0",
57
+ "testdouble": "^3.18.0",
58
+ "winston": "^3.13.1"
58
59
  },
59
60
  "private": false
60
61
  }
@@ -0,0 +1,14 @@
1
+ [
2
+ {
3
+ "type": "encryption",
4
+ "pointer": "/authentication/password"
5
+ },
6
+ {
7
+ "type": "encryption",
8
+ "pointer": "/authentication/token"
9
+ },
10
+ {
11
+ "type": "encryption",
12
+ "pointer": "/mongo/password"
13
+ }
14
+ ]
Binary file
@@ -0,0 +1,10 @@
1
+ {
2
+ "version": "0.2.1",
3
+ "configLines": 473,
4
+ "scriptLines": 516,
5
+ "codeLines": 572,
6
+ "testLines": 963,
7
+ "testCases": 45,
8
+ "totalCodeLines": 2051,
9
+ "wfTasks": 7
10
+ }
@@ -12,11 +12,13 @@
12
12
  },
13
13
  "ssl": {
14
14
  "enabled": false,
15
- "accept_invalid_cert": true,
15
+ "accept_invalid_cert": false,
16
16
  "ca_file": ""
17
17
  }
18
18
  },
19
19
  "brokers": [],
20
20
  "groups": [],
21
- "requestTimeout": 1000000
22
- }
21
+ "requestTimeout": 1000000,
22
+ "logLevel": "none",
23
+ "timeout": 60000
24
+ }
package/test/.eslintrc.js CHANGED
@@ -1,16 +1,16 @@
1
1
  module.exports = {
2
2
  env: {
3
- mocha: true,
3
+ mocha: true
4
4
  },
5
5
  globals: {
6
6
  // Pronghorn globals.
7
- log: true,
7
+ log: true
8
8
  },
9
9
  rules: {
10
10
  // Mocha preference for describe context.
11
11
  'prefer-arrow-callback': 'off',
12
12
  'func-names': 'off',
13
13
  // We want to isolate dependencies.
14
- 'global-require': 'off',
15
- },
14
+ 'global-require': 'off'
15
+ }
16
16
  };
@@ -129,7 +129,7 @@ function runErrorAsserts(data, error, code, origin, displayStr) {
129
129
  }
130
130
 
131
131
  // require the adapter that we are going to be using
132
- const MicrosoftSQL = require('../../adapter.js');
132
+ const MicrosoftSQL = require('../../adapter');
133
133
 
134
134
  // begin the testing - these should be pretty well defined between the describe and the it!
135
135
  describe('[integration] MicrosoftSQL Adapter Test', () => {
@@ -16,7 +16,9 @@ const execute = require('child_process').execSync;
16
16
  const { expect } = require('chai');
17
17
  const { use } = require('chai');
18
18
  const td = require('testdouble');
19
+ const Ajv = require('ajv');
19
20
 
21
+ const ajv = new Ajv({ strictSchema: false, allErrors: true, allowUnionTypes: true });
20
22
  const anything = td.matchers.anything();
21
23
 
22
24
  // stub and attemptTimeout are used throughout the code so set them here
@@ -122,7 +124,7 @@ function runErrorAsserts(data, error, code, origin, displayStr) {
122
124
  }
123
125
 
124
126
  // require the adapter that we are going to be using
125
- const Sybase = require('../../adapter.js');
127
+ const Sybase = require('../../adapter');
126
128
 
127
129
  // delete the .DS_Store directory in entities -- otherwise this will cause errors
128
130
  const dirPath = path.join(__dirname, '../../entities/.DS_Store');
@@ -186,23 +188,33 @@ describe('[unit] Sybase Adapter Test', () => {
186
188
  });
187
189
  });
188
190
  it('package.json should be validated', (done) => {
189
- const packageDotJson = require('../../package.json');
190
- const { PJV } = require('package-json-validator');
191
- const options = {
192
- warnings: true, // show warnings
193
- recommendations: true // show recommendations
194
- };
195
- const results = PJV.validate(JSON.stringify(packageDotJson), 'npm', options);
196
-
197
- if (results.valid === false) {
198
- log.error('The package.json contains the following errors: ');
199
- log.error(util.inspect(results));
200
- assert.equal(true, results.valid);
201
- } else {
202
- assert.equal(true, results.valid);
203
- }
191
+ try {
192
+ const packageDotJson = require('../../package.json');
193
+ // Define the JSON schema for package.json
194
+ const packageJsonSchema = {
195
+ type: 'object',
196
+ properties: {
197
+ name: { type: 'string' },
198
+ version: { type: 'string' }
199
+ // May need to add more properties as needed
200
+ },
201
+ required: ['name', 'version']
202
+ };
203
+ const validate = ajv.compile(packageJsonSchema);
204
+ const isValid = validate(packageDotJson);
205
+
206
+ if (isValid === false) {
207
+ log.error('The package.json contains errors');
208
+ assert.equal(true, isValid);
209
+ } else {
210
+ assert.equal(true, isValid);
211
+ }
204
212
 
205
- done();
213
+ done();
214
+ } catch (error) {
215
+ log.error(`Test Failure: ${error}`);
216
+ done(error);
217
+ }
206
218
  });
207
219
  it('package.json should be customized', (done) => {
208
220
  const packageDotJson = require('../../package.json');
@@ -0,0 +1,206 @@
1
+ #!/usr/bin/env node
2
+ /* @copyright Itential, LLC 2019 */
3
+ /* eslint global-require:warn */
4
+ /* eslint import/no-dynamic-require:warn */
5
+ /* eslint prefer-destructuring:warn */
6
+
7
+ const path = require('path');
8
+ const fs = require('fs-extra');
9
+
10
+ /**
11
+ * This script will determine the information about the adapter and store
12
+ * it into a file in the adapter.
13
+ */
14
+
15
+ /**
16
+ * get adapter information
17
+ */
18
+ function adapterInfo() {
19
+ // set the base pase of the adapter - tool shoud be one level up in utils
20
+ let adaptdir = __dirname;
21
+ const infoRes = {};
22
+
23
+ if (adaptdir.endsWith('/utils')) {
24
+ adaptdir = adaptdir.substring(0, adaptdir.length - 6);
25
+ }
26
+ const pack = require(`${adaptdir}/package.json`);
27
+ infoRes.version = pack.version;
28
+
29
+ let configCount = 0;
30
+ if (fs.existsSync(`${adaptdir}/pronghorn.json`)) {
31
+ const cFile = fs.readFileSync(`${adaptdir}/pronghorn.json`, 'utf8');
32
+ configCount += cFile.split('\n').length;
33
+ } else {
34
+ console.log('Missing - pronghorn.json');
35
+ }
36
+ if (fs.existsSync(`${adaptdir}/propertiesSchema.json`)) {
37
+ const cFile = fs.readFileSync(`${adaptdir}/propertiesSchema.json`, 'utf8');
38
+ configCount += cFile.split('\n').length;
39
+ } else {
40
+ console.log('Missing - propertiesSchema.json');
41
+ }
42
+ if (fs.existsSync(`${adaptdir}/error.json`)) {
43
+ const cFile = fs.readFileSync(`${adaptdir}/error.json`, 'utf8');
44
+ configCount += cFile.split('\n').length;
45
+ } else {
46
+ console.log('Missing - error.json');
47
+ }
48
+ const entitydir = path.join(adaptdir, '/entities');
49
+ if (fs.existsSync(entitydir) && fs.statSync(entitydir).isDirectory()) {
50
+ const entities = fs.readdirSync(entitydir);
51
+ // need to go through each entity in the entities directory
52
+ for (let e = 0; e < entities.length; e += 1) {
53
+ if (fs.statSync(`${entitydir}/${entities[e]}`).isDirectory()) {
54
+ const cfiles = fs.readdirSync(entitydir);
55
+ for (let c = 0; c < cfiles.length; c += 1) {
56
+ if (cfiles[c].endsWith('.json')) {
57
+ const ccFile = fs.readFileSync(`${entitydir}/${entities[e]}/${cfiles[c]}`, 'utf8');
58
+ configCount += ccFile.split('\n').length;
59
+ }
60
+ }
61
+ }
62
+ }
63
+ } else {
64
+ console.log('Could not find the entities directory');
65
+ }
66
+ infoRes.configLines = configCount;
67
+
68
+ let scodeCount = 0;
69
+ if (fs.existsSync(`${adaptdir}/utils/artifactize.js`)) {
70
+ const sFile = fs.readFileSync(`${adaptdir}/utils/artifactize.js`, 'utf8');
71
+ scodeCount += sFile.split('\n').length;
72
+ } else {
73
+ console.log('Missing - utils/artifactize.js');
74
+ }
75
+ if (fs.existsSync(`${adaptdir}/utils/basicGet.js`)) {
76
+ const sFile = fs.readFileSync(`${adaptdir}/utils/basicGet.js`, 'utf8');
77
+ scodeCount += sFile.split('\n').length;
78
+ } else {
79
+ console.log('Missing - utils/basicGet.js');
80
+ }
81
+ if (fs.existsSync(`${adaptdir}/utils/checkMigrate.js`)) {
82
+ const sFile = fs.readFileSync(`${adaptdir}/utils/checkMigrate.js`, 'utf8');
83
+ scodeCount += sFile.split('\n').length;
84
+ } else {
85
+ console.log('Missing - utils/checkMigrate.js');
86
+ }
87
+ if (fs.existsSync(`${adaptdir}/utils/findPath.js`)) {
88
+ const sFile = fs.readFileSync(`${adaptdir}/utils/findPath.js`, 'utf8');
89
+ scodeCount += sFile.split('\n').length;
90
+ } else {
91
+ console.log('Missing - utils/findPath.js');
92
+ }
93
+ if (fs.existsSync(`${adaptdir}/utils/modify.js`)) {
94
+ const sFile = fs.readFileSync(`${adaptdir}/utils/modify.js`, 'utf8');
95
+ scodeCount += sFile.split('\n').length;
96
+ } else {
97
+ console.log('Missing - utils/modify.js');
98
+ }
99
+ if (fs.existsSync(`${adaptdir}/utils/packModificationScript.js`)) {
100
+ const sFile = fs.readFileSync(`${adaptdir}/utils/packModificationScript.js`, 'utf8');
101
+ scodeCount += sFile.split('\n').length;
102
+ } else {
103
+ console.log('Missing - utils/packModificationScript.js');
104
+ }
105
+ if (fs.existsSync(`${adaptdir}/utils/setup.js`)) {
106
+ const sFile = fs.readFileSync(`${adaptdir}/utils/setup.js`, 'utf8');
107
+ scodeCount += sFile.split('\n').length;
108
+ } else {
109
+ console.log('Missing - utils/setup.js');
110
+ }
111
+ if (fs.existsSync(`${adaptdir}/utils/tbScript.js`)) {
112
+ const sFile = fs.readFileSync(`${adaptdir}/utils/tbScript.js`, 'utf8');
113
+ scodeCount += sFile.split('\n').length;
114
+ } else {
115
+ console.log('Missing - utils/tbScript.js');
116
+ }
117
+ if (fs.existsSync(`${adaptdir}/utils/tbUtils.js`)) {
118
+ const sFile = fs.readFileSync(`${adaptdir}/utils/tbUtils.js`, 'utf8');
119
+ scodeCount += sFile.split('\n').length;
120
+ } else {
121
+ console.log('Missing - utils/tbUtils.js');
122
+ }
123
+ if (fs.existsSync(`${adaptdir}/utils/testRunner.js`)) {
124
+ const sFile = fs.readFileSync(`${adaptdir}/utils/testRunner.js`, 'utf8');
125
+ scodeCount += sFile.split('\n').length;
126
+ } else {
127
+ console.log('Missing - utils/testRunner.js');
128
+ }
129
+ if (fs.existsSync(`${adaptdir}/utils/troubleshootingAdapter.js`)) {
130
+ const sFile = fs.readFileSync(`${adaptdir}/utils/troubleshootingAdapter.js`, 'utf8');
131
+ scodeCount += sFile.split('\n').length;
132
+ } else {
133
+ console.log('Missing - utils/troubleshootingAdapter.js');
134
+ }
135
+ infoRes.scriptLines = scodeCount;
136
+
137
+ let codeCount = 0;
138
+ if (fs.existsSync(`${adaptdir}/adapter.js`)) {
139
+ const aFile = fs.readFileSync(`${adaptdir}/adapter.js`, 'utf8');
140
+ codeCount += aFile.split('\n').length;
141
+ } else {
142
+ console.log('Missing - utils/adapter.js');
143
+ }
144
+ if (fs.existsSync(`${adaptdir}/adapterBase.js`)) {
145
+ const aFile = fs.readFileSync(`${adaptdir}/adapterBase.js`, 'utf8');
146
+ codeCount += aFile.split('\n').length;
147
+ } else {
148
+ console.log('Missing - utils/adapterBase.js');
149
+ }
150
+ infoRes.codeLines = codeCount;
151
+
152
+ let tcodeCount = 0;
153
+ let ttestCount = 0;
154
+ if (fs.existsSync(`${adaptdir}/test/integration/adapterTestBasicGet.js`)) {
155
+ const tFile = fs.readFileSync(`${adaptdir}/test/integration/adapterTestBasicGet.js`, 'utf8');
156
+ tcodeCount += tFile.split('\n').length;
157
+ ttestCount += tFile.split('it(\'').length;
158
+ } else {
159
+ console.log('Missing - test/integration/adapterTestBasicGet.js');
160
+ }
161
+ if (fs.existsSync(`${adaptdir}/test/integration/adapterTestConnectivity.js`)) {
162
+ const tFile = fs.readFileSync(`${adaptdir}/test/integration/adapterTestConnectivity.js`, 'utf8');
163
+ tcodeCount += tFile.split('\n').length;
164
+ ttestCount += tFile.split('it(\'').length;
165
+ } else {
166
+ console.log('Missing - test/integration/adapterTestConnectivity.js');
167
+ }
168
+ if (fs.existsSync(`${adaptdir}/test/integration/adapterTestIntegration.js`)) {
169
+ const tFile = fs.readFileSync(`${adaptdir}/test/integration/adapterTestIntegration.js`, 'utf8');
170
+ tcodeCount += tFile.split('\n').length;
171
+ ttestCount += tFile.split('it(\'').length;
172
+ } else {
173
+ console.log('Missing - test/integration/adapterTestIntegration.js');
174
+ }
175
+ if (fs.existsSync(`${adaptdir}/test/unit/adapterBaseTestUnit.js`)) {
176
+ const tFile = fs.readFileSync(`${adaptdir}/test/unit/adapterBaseTestUnit.js`, 'utf8');
177
+ tcodeCount += tFile.split('\n').length;
178
+ ttestCount += tFile.split('it(\'').length;
179
+ } else {
180
+ console.log('Missing - test/unit/adapterBaseTestUnit.js');
181
+ }
182
+ if (fs.existsSync(`${adaptdir}/test/unit/adapterTestUnit.js`)) {
183
+ const tFile = fs.readFileSync(`${adaptdir}/test/unit/adapterTestUnit.js`, 'utf8');
184
+ tcodeCount += tFile.split('\n').length;
185
+ ttestCount += tFile.split('it(\'').length;
186
+ } else {
187
+ console.log('Missing - test/unit/adapterTestUnit.js');
188
+ }
189
+ infoRes.testLines = tcodeCount;
190
+ infoRes.testCases = ttestCount;
191
+ infoRes.totalCodeLines = scodeCount + codeCount + tcodeCount;
192
+
193
+ if (fs.existsSync(`${adaptdir}/pronghorn.json`)) {
194
+ // Read the entity schema from the file system
195
+ const phFile = path.join(adaptdir, '/pronghorn.json');
196
+ const prong = require(phFile);
197
+ infoRes.wfTasks = prong.methods.length;
198
+ } else {
199
+ console.log('Missing - pronghorn.json');
200
+ }
201
+
202
+ console.log(JSON.stringify(infoRes));
203
+ fs.writeFileSync(`${adaptdir}/report/adapterInfo.json`, JSON.stringify(infoRes, null, 2));
204
+ }
205
+
206
+ adapterInfo();
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
2
  /* @copyright Itential, LLC 2019 */
3
3
 
4
- const fs = require('fs-extra');
5
4
  const path = require('path');
5
+ const fs = require('fs-extra');
6
6
 
7
7
  async function createBundle(adapterOldDir) {
8
8
  // set directories
@@ -1,10 +1,10 @@
1
1
  #!/usr/bin/env node
2
2
  /* @copyright Itential, LLC 2019 */
3
3
 
4
- const fs = require('fs-extra');
5
4
  const path = require('path');
6
5
  const { spawnSync } = require('child_process');
7
- const { createBundle } = require('./artifactize.js');
6
+ const fs = require('fs-extra');
7
+ const { createBundle } = require('./artifactize');
8
8
 
9
9
  const nodeEntryPath = path.resolve('.');
10
10
  createBundle(nodeEntryPath).then((pathObj) => {
@@ -0,0 +1,20 @@
1
+ const fs = require('fs');
2
+
3
+ /**
4
+ * This script will uninstall pre-commit or pre-push hooks in case there's ever a need to
5
+ * commit/push something that has issues
6
+ */
7
+
8
+ const precommitPath = '.git/hooks/pre-commit';
9
+ const prepushPath = '.git/hooks/pre-push';
10
+ fs.unlink(precommitPath, (err) => {
11
+ if (err && err.code !== 'ENOENT') {
12
+ console.log(`${err.message}`);
13
+ }
14
+ });
15
+
16
+ fs.unlink(prepushPath, (err) => {
17
+ if (err && err.code !== 'ENOENT') {
18
+ console.log(`${err.message}`);
19
+ }
20
+ });