node-adlt 0.16.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -0
- package/jsconfig.json +8 -0
- package/lib/download.js +328 -0
- package/lib/index.d.ts +1 -0
- package/lib/index.js +8 -0
- package/lib/postinstall.js +91 -0
- package/package.json +36 -0
- package/test/adltPathNotEmpty.js +4 -0
package/README.md
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
# node-adlt
|
|
2
|
+
|
|
3
|
+
This is an npm module for using [adlt](https://github.com/mbehr1/adlt) in a Node project. It's used by VS Code Extension [dlt-logs](https://github.com/mbehr1/dlt-logs).
|
|
4
|
+
|
|
5
|
+
## How it works
|
|
6
|
+
|
|
7
|
+
- adlt is built in [adlt](https://github.com/mbehr1/adlt) and published to releases for each tag in that repo.
|
|
8
|
+
- In this module's postinstall task, it determines which platform it is being installed on and downloads the correct binary from adlt for the platform.
|
|
9
|
+
- The path to the adlt binary is exported as `adltPath`.
|
|
10
|
+
- This modules version reflects 1:1 the adlt version.
|
|
11
|
+
|
|
12
|
+
### Usage example
|
|
13
|
+
|
|
14
|
+
```js
|
|
15
|
+
const { adltPath } = require('node-adlt');
|
|
16
|
+
|
|
17
|
+
// child_process.spawn(adltPath, ...)
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
### Dev note
|
|
21
|
+
|
|
22
|
+
Runtime dependencies are not allowed in this project. This code runs on postinstall, and any dependencies would only be needed for postinstall, but they would have to be declared as `dependencies`, not `devDependencies`. Then if they were not cleaned up manually, they would end up being included in any project that uses this. I allow `https-proxy-agent` as an exception because we already ship that in VS Code, and `proxy-from-env` because it's very small and much easier to use it than reimplement it.
|
|
23
|
+
|
|
24
|
+
### GitHub API Limit note
|
|
25
|
+
|
|
26
|
+
You can produce an API key, set the GITHUB_TOKEN environment var to it, and vscode-ripgrep will use it when downloading from GitHub. This increases your API limit.
|
|
27
|
+
|
|
28
|
+
### License
|
|
29
|
+
|
|
30
|
+
This code/project is licensed under MIT license (as its mainly a copy+paste from microsoft/vscode-ripgrep repo/examples). But the adlt binary installed is licensed under CC-BY-NC-SA-4.0 so I reflect that here to avoid confusion on installing it in a node project.
|
package/jsconfig.json
ADDED
package/lib/download.js
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const os = require('os');
|
|
7
|
+
const https = require('https');
|
|
8
|
+
const util = require('util');
|
|
9
|
+
const url = require('url');
|
|
10
|
+
const URL = url.URL;
|
|
11
|
+
const child_process = require('child_process');
|
|
12
|
+
const proxy_from_env = require('proxy-from-env');
|
|
13
|
+
|
|
14
|
+
const packageVersion = require('../package.json').version;
|
|
15
|
+
const tmpDir = path.join(os.tmpdir(), `node-adlt-cache-${packageVersion}`);
|
|
16
|
+
|
|
17
|
+
const fsUnlink = util.promisify(fs.unlink);
|
|
18
|
+
const fsExists = util.promisify(fs.exists);
|
|
19
|
+
const fsMkdir = util.promisify(fs.mkdir);
|
|
20
|
+
|
|
21
|
+
const isWindows = os.platform() === 'win32';
|
|
22
|
+
|
|
23
|
+
const REPO = 'mbehr1/adlt';
|
|
24
|
+
|
|
25
|
+
function isGithubUrl(_url) {
|
|
26
|
+
return url.parse(_url).hostname === 'api.github.com';
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function downloadWin(url, dest, opts) {
|
|
30
|
+
return new Promise((resolve, reject) => {
|
|
31
|
+
let userAgent;
|
|
32
|
+
if (opts.headers['user-agent']) {
|
|
33
|
+
userAgent = opts.headers['user-agent'];
|
|
34
|
+
delete opts.headers['user-agent'];
|
|
35
|
+
}
|
|
36
|
+
const headerValues = Object.keys(opts.headers)
|
|
37
|
+
.map(key => `\\"${key}\\"=\\"${opts.headers[key]}\\"`)
|
|
38
|
+
.join('; ');
|
|
39
|
+
const headers = `@{${headerValues}}`;
|
|
40
|
+
console.log('Downloading with Invoke-WebRequest');
|
|
41
|
+
dest = sanitizePathForPowershell(dest);
|
|
42
|
+
let iwrCmd = `[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; Invoke-WebRequest -URI ${url} -UseBasicParsing -OutFile ${dest} -Headers ${headers}`;
|
|
43
|
+
if (userAgent) {
|
|
44
|
+
iwrCmd += ' -UserAgent ' + userAgent;
|
|
45
|
+
}
|
|
46
|
+
if (opts.proxy) {
|
|
47
|
+
iwrCmd += ' -Proxy ' + opts.proxy;
|
|
48
|
+
|
|
49
|
+
try {
|
|
50
|
+
const { username, password } = new URL(opts.proxy);
|
|
51
|
+
if (username && password) {
|
|
52
|
+
const decodedPassword = decodeURIComponent(password);
|
|
53
|
+
iwrCmd += ` -ProxyCredential (New-Object PSCredential ('${username}', (ConvertTo-SecureString '${decodedPassword}' -AsPlainText -Force)))`;
|
|
54
|
+
}
|
|
55
|
+
} catch (err) {
|
|
56
|
+
reject(err);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
iwrCmd = `powershell "${iwrCmd}"`;
|
|
61
|
+
|
|
62
|
+
child_process.exec(iwrCmd, err => {
|
|
63
|
+
if (err) {
|
|
64
|
+
reject(err);
|
|
65
|
+
return;
|
|
66
|
+
}
|
|
67
|
+
resolve();
|
|
68
|
+
});
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function download(_url, dest, opts) {
|
|
73
|
+
|
|
74
|
+
const proxy = proxy_from_env.getProxyForUrl(url.parse(_url));
|
|
75
|
+
if (proxy !== '') {
|
|
76
|
+
var HttpsProxyAgent = require('https-proxy-agent');
|
|
77
|
+
opts = {
|
|
78
|
+
...opts,
|
|
79
|
+
"agent": HttpsProxyAgent(proxy),
|
|
80
|
+
proxy
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (isWindows) {
|
|
85
|
+
// This alternative strategy shouldn't be necessary but sometimes on Windows the file does not get closed,
|
|
86
|
+
// so unzipping it fails, and I don't know why.
|
|
87
|
+
return downloadWin(_url, dest, opts);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (opts.headers && opts.headers.authorization && !isGithubUrl(_url)) {
|
|
91
|
+
delete opts.headers.authorization;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return new Promise((resolve, reject) => {
|
|
95
|
+
console.log(`Download options: ${JSON.stringify(opts)}`);
|
|
96
|
+
const outFile = fs.createWriteStream(dest);
|
|
97
|
+
const mergedOpts = {
|
|
98
|
+
...url.parse(_url),
|
|
99
|
+
...opts
|
|
100
|
+
};
|
|
101
|
+
https.get(mergedOpts, response => {
|
|
102
|
+
console.log('statusCode: ' + response.statusCode);
|
|
103
|
+
if (response.statusCode === 302) {
|
|
104
|
+
console.log('Following redirect to: ' + response.headers.location);
|
|
105
|
+
return download(response.headers.location, dest, opts)
|
|
106
|
+
.then(resolve, reject);
|
|
107
|
+
} else if (response.statusCode !== 200) {
|
|
108
|
+
reject(new Error('Download failed with ' + response.statusCode));
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
response.pipe(outFile);
|
|
113
|
+
outFile.on('finish', () => {
|
|
114
|
+
resolve();
|
|
115
|
+
});
|
|
116
|
+
}).on('error', async err => {
|
|
117
|
+
await fsUnlink(dest);
|
|
118
|
+
reject(err);
|
|
119
|
+
});
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
function get(_url, opts) {
|
|
124
|
+
console.log(`GET ${_url}`);
|
|
125
|
+
|
|
126
|
+
const proxy = proxy_from_env.getProxyForUrl(url.parse(_url));
|
|
127
|
+
if (proxy !== '') {
|
|
128
|
+
var HttpsProxyAgent = require('https-proxy-agent');
|
|
129
|
+
opts = {
|
|
130
|
+
...opts,
|
|
131
|
+
"agent": HttpsProxyAgent(proxy)
|
|
132
|
+
};
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return new Promise((resolve, reject) => {
|
|
136
|
+
let result = '';
|
|
137
|
+
opts = {
|
|
138
|
+
...url.parse(_url),
|
|
139
|
+
...opts
|
|
140
|
+
};
|
|
141
|
+
https.get(opts, response => {
|
|
142
|
+
if (response.statusCode !== 200) {
|
|
143
|
+
reject(new Error('Request failed: ' + response.statusCode));
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
response.on('data', d => {
|
|
147
|
+
result += d.toString();
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
response.on('end', () => {
|
|
151
|
+
resolve(result);
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
response.on('error', e => {
|
|
155
|
+
reject(e);
|
|
156
|
+
});
|
|
157
|
+
});
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
function getApiUrl(repo, tag) {
|
|
162
|
+
return `https://api.github.com/repos/${repo}/releases/tags/${tag}`;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* @param {{ force: boolean; token: string; version: string; }} opts
|
|
167
|
+
* @param {string} assetName
|
|
168
|
+
* @param {string} downloadFolder
|
|
169
|
+
*/
|
|
170
|
+
async function getAssetFromGithubApi(opts, assetName, downloadFolder) {
|
|
171
|
+
const assetDownloadPath = path.join(downloadFolder, assetName);
|
|
172
|
+
|
|
173
|
+
// We can just use the cached binary
|
|
174
|
+
if (!opts.force && await fsExists(assetDownloadPath)) {
|
|
175
|
+
console.log('Using cached download: ' + assetDownloadPath);
|
|
176
|
+
return assetDownloadPath;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
const downloadOpts = {
|
|
180
|
+
headers: {
|
|
181
|
+
'user-agent': 'node-adlt'
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
if (opts.token) {
|
|
186
|
+
downloadOpts.headers.authorization = `token ${opts.token}`;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
console.log(`Finding release for ${opts.version}`);
|
|
190
|
+
const release = await get(getApiUrl(REPO, opts.version), downloadOpts);
|
|
191
|
+
let jsonRelease;
|
|
192
|
+
try {
|
|
193
|
+
jsonRelease = JSON.parse(release);
|
|
194
|
+
} catch (e) {
|
|
195
|
+
throw new Error('Malformed API response: ' + e.stack);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
if (!jsonRelease.assets) {
|
|
199
|
+
throw new Error('Bad API response: ' + JSON.stringify(release));
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
const asset = jsonRelease.assets.find(a => a.name === assetName);
|
|
203
|
+
if (!asset) {
|
|
204
|
+
throw new Error('Asset not found with name: ' + assetName);
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
console.log(`Downloading from ${asset.url}`);
|
|
208
|
+
console.log(`Downloading to ${assetDownloadPath}`);
|
|
209
|
+
|
|
210
|
+
downloadOpts.headers.accept = 'application/octet-stream';
|
|
211
|
+
await download(asset.url, assetDownloadPath, downloadOpts);
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
function unzipWindows(zipPath, destinationDir) {
|
|
215
|
+
return new Promise((resolve, reject) => {
|
|
216
|
+
zipPath = sanitizePathForPowershell(zipPath);
|
|
217
|
+
destinationDir = sanitizePathForPowershell(destinationDir);
|
|
218
|
+
const expandCmd = 'powershell -ExecutionPolicy Bypass -Command Expand-Archive ' + ['-Path', zipPath, '-DestinationPath', destinationDir, '-Force'].join(' ');
|
|
219
|
+
child_process.exec(expandCmd, (err, _stdout, stderr) => {
|
|
220
|
+
if (err) {
|
|
221
|
+
reject(err);
|
|
222
|
+
return;
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if (stderr) {
|
|
226
|
+
console.log(stderr);
|
|
227
|
+
reject(new Error(stderr));
|
|
228
|
+
return;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
console.log('Expand-Archive completed');
|
|
232
|
+
resolve();
|
|
233
|
+
});
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// Handle whitespace in filepath as powershell split's path with whitespaces
|
|
238
|
+
function sanitizePathForPowershell(path) {
|
|
239
|
+
path = path.replace(/ /g, '` '); // replace whitespace with "` " as solution provided here https://stackoverflow.com/a/18537344/7374562
|
|
240
|
+
return path;
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
function untar(zipPath, destinationDir) {
|
|
244
|
+
return new Promise((resolve, reject) => {
|
|
245
|
+
const unzipProc = child_process.spawn('tar', ['xvf', zipPath, '-C', destinationDir], { stdio: 'inherit' });
|
|
246
|
+
unzipProc.on('error', err => {
|
|
247
|
+
reject(err);
|
|
248
|
+
});
|
|
249
|
+
unzipProc.on('close', code => {
|
|
250
|
+
console.log(`tar xvf exited with ${code}`);
|
|
251
|
+
if (code !== 0) {
|
|
252
|
+
reject(new Error(`tar xvf exited with ${code}`));
|
|
253
|
+
return;
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
resolve();
|
|
257
|
+
});
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
async function unzipAdlt(zipPath, destinationDir) {
|
|
262
|
+
if (isWindows) {
|
|
263
|
+
await unzipWindows(zipPath, destinationDir);
|
|
264
|
+
} else {
|
|
265
|
+
await untar(zipPath, destinationDir);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// the binary is extracted into a folder like adlt-vx.y.z-target/
|
|
269
|
+
// determine that folder name:
|
|
270
|
+
const dirEntries = fs.readdirSync(destinationDir);
|
|
271
|
+
let adltDir = dirEntries.find((dirEntry) => dirEntry.startsWith("adlt-v"));
|
|
272
|
+
|
|
273
|
+
const expectedName = path.join(destinationDir, adltDir, 'adlt');
|
|
274
|
+
if (await fsExists(expectedName)) {
|
|
275
|
+
return expectedName;
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
if (await fsExists(expectedName + '.exe')) {
|
|
279
|
+
return expectedName + '.exe';
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
throw new Error(`Expecting adlt or adlt.exe unzipped into ${destinationDir}, didn't find one.`);
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
module.exports = async opts => {
|
|
286
|
+
if (!opts.version) {
|
|
287
|
+
return Promise.reject(new Error('Missing version'));
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
if (!opts.target) {
|
|
291
|
+
return Promise.reject(new Error('Missing target'));
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
const extension = isWindows ? '.zip' : '.tar.gz';
|
|
295
|
+
const assetName = ['adlt', opts.version, opts.target].join('-') + extension;
|
|
296
|
+
|
|
297
|
+
if (!await fsExists(tmpDir)) {
|
|
298
|
+
await fsMkdir(tmpDir);
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
const assetDownloadPath = path.join(tmpDir, assetName);
|
|
302
|
+
try {
|
|
303
|
+
await getAssetFromGithubApi(opts, assetName, tmpDir)
|
|
304
|
+
} catch (e) {
|
|
305
|
+
console.log('Deleting invalid download cache');
|
|
306
|
+
try {
|
|
307
|
+
await fsUnlink(assetDownloadPath);
|
|
308
|
+
} catch (e) { }
|
|
309
|
+
|
|
310
|
+
throw e;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
console.log(`Unzipping to ${opts.destDir}`);
|
|
314
|
+
try {
|
|
315
|
+
const destinationPath = await unzipAdlt(assetDownloadPath, opts.destDir);
|
|
316
|
+
if (!isWindows) {
|
|
317
|
+
await util.promisify(fs.chmod)(destinationPath, '755');
|
|
318
|
+
}
|
|
319
|
+
} catch (e) {
|
|
320
|
+
console.log('Deleting invalid download');
|
|
321
|
+
|
|
322
|
+
try {
|
|
323
|
+
await fsUnlink(assetDownloadPath);
|
|
324
|
+
} catch (e) { }
|
|
325
|
+
|
|
326
|
+
throw e;
|
|
327
|
+
}
|
|
328
|
+
};
|
package/lib/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const adltPath: string;
|
package/lib/index.js
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const dirEntries = fs.readdirSync(path.join(__dirname, "../bin"));
|
|
6
|
+
let adltDir = dirEntries.find((dirEntry) => dirEntry.startsWith("adlt-v"));
|
|
7
|
+
|
|
8
|
+
module.exports.adltPath = path.join(__dirname, adltDir, `adlt${process.platform === 'win32' ? '.exe' : ''}`);
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
// @ts-check
|
|
2
|
+
'use strict';
|
|
3
|
+
|
|
4
|
+
const os = require('os');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const path = require('path');
|
|
7
|
+
const util = require('util');
|
|
8
|
+
const child_process = require('child_process');
|
|
9
|
+
|
|
10
|
+
const download = require('./download');
|
|
11
|
+
|
|
12
|
+
const fsExists = util.promisify(fs.exists);
|
|
13
|
+
const mkdir = util.promisify(fs.mkdir);
|
|
14
|
+
const exec = util.promisify(child_process.exec);
|
|
15
|
+
|
|
16
|
+
const forceInstall = process.argv.includes('--force');
|
|
17
|
+
if (forceInstall) {
|
|
18
|
+
console.log('--force, ignoring caches');
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// we do use this package version as well for the binary
|
|
22
|
+
const { version } = require('./../package.json');
|
|
23
|
+
const BIN_PATH = path.join(__dirname, '../bin');
|
|
24
|
+
|
|
25
|
+
process.on('unhandledRejection', (reason, promise) => {
|
|
26
|
+
console.log('Unhandled rejection: ', promise, 'reason:', reason);
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
async function isMusl() {
|
|
30
|
+
let stderr;
|
|
31
|
+
try {
|
|
32
|
+
stderr = (await exec('ldd --version')).stderr;
|
|
33
|
+
} catch (err) {
|
|
34
|
+
stderr = err.stderr;
|
|
35
|
+
}
|
|
36
|
+
if (stderr.indexOf('musl') > -1) {
|
|
37
|
+
return true;
|
|
38
|
+
}
|
|
39
|
+
return false;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async function getTarget() {
|
|
43
|
+
const arch = process.env.npm_config_arch || os.arch();
|
|
44
|
+
|
|
45
|
+
switch (os.platform()) {
|
|
46
|
+
case 'darwin':
|
|
47
|
+
return arch === 'arm64' ? 'aarch64-apple-darwin' :
|
|
48
|
+
'x86_64-apple-darwin';
|
|
49
|
+
case 'win32':
|
|
50
|
+
return arch === 'x64' ? 'x86_64-pc-windows-msvc' :
|
|
51
|
+
arch === 'arm' ? 'aarch64-pc-windows-msvc' :
|
|
52
|
+
'i686-pc-windows-msvc';
|
|
53
|
+
case 'linux':
|
|
54
|
+
return arch === 'x64' ? 'x86_64-unknown-linux-musl' :
|
|
55
|
+
arch === 'arm' ? 'arm-unknown-linux-gnueabihf' :
|
|
56
|
+
arch === 'armv7l' ? 'arm-unknown-linux-gnueabihf' :
|
|
57
|
+
arch === 'arm64' ? await isMusl() ? 'aarch64-unknown-linux-musl' : 'aarch64-unknown-linux-gnu' :
|
|
58
|
+
arch === 'ppc64' ? 'powerpc64le-unknown-linux-gnu' :
|
|
59
|
+
arch === 's390x' ? 's390x-unknown-linux-gnu' :
|
|
60
|
+
'i686-unknown-linux-musl'
|
|
61
|
+
default: throw new Error('Unknown platform: ' + os.platform());
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
async function main() {
|
|
66
|
+
const binExists = await fsExists(BIN_PATH);
|
|
67
|
+
if (!forceInstall && binExists) {
|
|
68
|
+
console.log('bin/ folder already exists, exiting');
|
|
69
|
+
process.exit(0);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (!binExists) {
|
|
73
|
+
await mkdir(BIN_PATH);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const opts = {
|
|
77
|
+
version: `v${version}`,
|
|
78
|
+
token: process.env['GITHUB_TOKEN'],
|
|
79
|
+
target: await getTarget(),
|
|
80
|
+
destDir: BIN_PATH,
|
|
81
|
+
force: forceInstall
|
|
82
|
+
};
|
|
83
|
+
try {
|
|
84
|
+
await download(opts);
|
|
85
|
+
} catch (err) {
|
|
86
|
+
console.error(`Downloading adlt failed: ${err.stack}`);
|
|
87
|
+
process.exit(1);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
main();
|
package/package.json
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "node-adlt",
|
|
3
|
+
"version": "0.16.1",
|
|
4
|
+
"description": "A npm module for using adlt binary in a Node project. Used e.g. by VS Code extension dlt-logs.",
|
|
5
|
+
"main": "lib/index.js",
|
|
6
|
+
"typings": "lib/index.d.ts",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"postinstall": "node ./lib/postinstall.js",
|
|
9
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
10
|
+
},
|
|
11
|
+
"repository": {
|
|
12
|
+
"type": "git",
|
|
13
|
+
"url": "https://github.com/mbehr1/node-adlt"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [
|
|
16
|
+
"dlt",
|
|
17
|
+
"binaries",
|
|
18
|
+
"log+trace"
|
|
19
|
+
],
|
|
20
|
+
"author": "Matthias Behr",
|
|
21
|
+
"license": "CC-BY-NC-SA-4.0",
|
|
22
|
+
"bugs": {
|
|
23
|
+
"url": "https://github.com/mbehr1/node-adlt/issues"
|
|
24
|
+
},
|
|
25
|
+
"homepage": "https://github.com/mbehr1/node-adlt#readme",
|
|
26
|
+
"dependencies": {
|
|
27
|
+
"https-proxy-agent": "^5.0.0",
|
|
28
|
+
"proxy-from-env": "^1.1.0"
|
|
29
|
+
},
|
|
30
|
+
"overrides": {
|
|
31
|
+
"agent-base": "^6.0.2"
|
|
32
|
+
},
|
|
33
|
+
"devDependencies": {
|
|
34
|
+
"@types/node": "^10.12.0"
|
|
35
|
+
}
|
|
36
|
+
}
|