@jbrowse/cli 3.7.0 → 4.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/bin/run +0 -0
- package/bundle/index.js +2280 -2722
- package/dist/bin.js +2 -2
- package/dist/commands/{add-assembly.js → add-assembly/index.js} +10 -10
- package/dist/commands/{assembly-utils.js → add-assembly/utils.js} +69 -105
- package/dist/commands/add-connection.js +10 -15
- package/dist/commands/add-track-json.js +26 -31
- package/dist/commands/add-track-utils/track-config.js +3 -15
- package/dist/commands/add-track-utils/validators.js +4 -9
- package/dist/commands/add-track.js +23 -25
- package/dist/commands/{admin-server.js → admin-server/index.js} +9 -9
- package/dist/commands/{admin-server-utils.js → admin-server/utils.js} +7 -16
- package/dist/commands/create.js +8 -8
- package/dist/commands/{make-pif.js → make-pif/index.js} +11 -17
- package/dist/commands/{make-pif-utils → make-pif}/pif-generator.js +6 -6
- package/dist/commands/{sort-bed-utils/process-utils.js → process-utils.js} +0 -10
- package/dist/commands/remove-track.js +6 -11
- package/dist/commands/set-default-session.js +8 -12
- package/dist/commands/shared/config-operations.js +37 -0
- package/dist/commands/shared/sort-utils.js +57 -0
- package/dist/commands/shared/validators.js +18 -0
- package/dist/commands/sort-bed.js +13 -24
- package/dist/commands/sort-gff.js +13 -24
- package/dist/commands/text-index/adapter-utils.js +43 -0
- package/dist/commands/text-index/aggregate.js +52 -0
- package/dist/commands/{text-index.js → text-index/command.js} +13 -6
- package/dist/commands/text-index/config-utils.js +134 -0
- package/dist/commands/{text-index-utils → text-index}/file-list.js +12 -15
- package/dist/commands/text-index/index.js +11 -0
- package/dist/commands/{text-index-utils → text-index}/indexing-utils.js +30 -20
- package/dist/commands/text-index/per-track.js +54 -0
- package/dist/commands/track-utils.js +33 -33
- package/dist/commands/upgrade.js +8 -8
- package/dist/index.js +39 -27
- package/dist/types/common.js +9 -8
- package/dist/types/gff3Adapter.js +17 -48
- package/dist/types/streamUtils.js +66 -0
- package/dist/types/vcfAdapter.js +17 -54
- package/dist/util.js +14 -7
- package/dist/utils.js +10 -5
- package/package.json +14 -17
- package/dist/commands/make-pif-utils/validators.js +0 -22
- package/dist/commands/sort-bed-utils/constants.js +0 -12
- package/dist/commands/sort-bed-utils/sort-utils.js +0 -24
- package/dist/commands/sort-bed-utils/validators.js +0 -22
- package/dist/commands/sort-gff-utils/constants.js +0 -13
- package/dist/commands/sort-gff-utils/process-utils.js +0 -23
- package/dist/commands/sort-gff-utils/sort-utils.js +0 -55
- package/dist/commands/sort-gff-utils/validators.js +0 -21
- package/dist/commands/text-index-utils/adapter-utils.js +0 -63
- package/dist/commands/text-index-utils/aggregate.js +0 -87
- package/dist/commands/text-index-utils/config-utils.js +0 -59
- package/dist/commands/text-index-utils/index.js +0 -9
- package/dist/commands/text-index-utils/per-track.js +0 -65
- /package/dist/commands/{make-pif-utils → make-pif}/cigar-utils.js +0 -0
- /package/dist/commands/{make-pif-utils → make-pif}/file-utils.js +0 -0
- /package/dist/commands/{text-index-utils → text-index}/validators.js +0 -0
package/dist/commands/upgrade.js
CHANGED
|
@@ -8,8 +8,8 @@ const fs_1 = __importDefault(require("fs"));
|
|
|
8
8
|
const path_1 = __importDefault(require("path"));
|
|
9
9
|
const util_1 = require("util");
|
|
10
10
|
const decompress_1 = __importDefault(require("decompress"));
|
|
11
|
-
const
|
|
12
|
-
const
|
|
11
|
+
const fetchWithProxy_ts_1 = __importDefault(require("../fetchWithProxy.js"));
|
|
12
|
+
const utils_ts_1 = require("../utils.js");
|
|
13
13
|
const description = 'Upgrades JBrowse 2 to latest version';
|
|
14
14
|
const examples = [
|
|
15
15
|
'# Upgrades current directory to latest jbrowse release',
|
|
@@ -74,7 +74,7 @@ async function run(args) {
|
|
|
74
74
|
const argsPath = positionals[0];
|
|
75
75
|
const { clean, listVersions, tag, url, branch, nightly } = runFlags;
|
|
76
76
|
if (runFlags.help) {
|
|
77
|
-
(0,
|
|
77
|
+
(0, utils_ts_1.printHelp)({
|
|
78
78
|
options,
|
|
79
79
|
examples,
|
|
80
80
|
usage: 'jbrowse upgrade [localPath] [options]',
|
|
@@ -83,7 +83,7 @@ async function run(args) {
|
|
|
83
83
|
return;
|
|
84
84
|
}
|
|
85
85
|
if (listVersions) {
|
|
86
|
-
const versions = (await (0,
|
|
86
|
+
const versions = (await (0, utils_ts_1.fetchGithubVersions)()).map(v => v.tag_name);
|
|
87
87
|
console.log(`All JBrowse versions:\n${versions.join('\n')}`);
|
|
88
88
|
process.exit(0);
|
|
89
89
|
}
|
|
@@ -95,11 +95,11 @@ async function run(args) {
|
|
|
95
95
|
existing jbrowse 2 installation?`);
|
|
96
96
|
}
|
|
97
97
|
const locationUrl = url ||
|
|
98
|
-
(nightly ? await (0,
|
|
99
|
-
(branch ? await (0,
|
|
100
|
-
(tag ? await (0,
|
|
98
|
+
(nightly ? await (0, utils_ts_1.getBranch)('main') : '') ||
|
|
99
|
+
(branch ? await (0, utils_ts_1.getBranch)(branch) : '') ||
|
|
100
|
+
(tag ? await (0, utils_ts_1.getTag)(tag) : await (0, utils_ts_1.getLatest)());
|
|
101
101
|
console.log(`Fetching ${locationUrl}...`);
|
|
102
|
-
const response = await (0,
|
|
102
|
+
const response = await (0, fetchWithProxy_ts_1.default)(locationUrl);
|
|
103
103
|
if (!response.ok) {
|
|
104
104
|
throw new Error(`HTTP ${response.status} fetching ${locationUrl}: ${response.statusText}`);
|
|
105
105
|
}
|
package/dist/index.js
CHANGED
|
@@ -9,33 +9,33 @@ const fs_1 = __importDefault(require("fs"));
|
|
|
9
9
|
const path_1 = __importDefault(require("path"));
|
|
10
10
|
const util_1 = require("util");
|
|
11
11
|
// Command imports
|
|
12
|
-
const
|
|
13
|
-
const
|
|
14
|
-
const
|
|
15
|
-
const
|
|
16
|
-
const
|
|
17
|
-
const
|
|
18
|
-
const
|
|
19
|
-
const
|
|
20
|
-
const
|
|
21
|
-
const
|
|
22
|
-
const
|
|
23
|
-
const
|
|
24
|
-
const
|
|
12
|
+
const index_ts_1 = require("./commands/add-assembly/index.js");
|
|
13
|
+
const add_connection_ts_1 = require("./commands/add-connection.js");
|
|
14
|
+
const add_track_json_ts_1 = require("./commands/add-track-json.js");
|
|
15
|
+
const add_track_ts_1 = require("./commands/add-track.js");
|
|
16
|
+
const index_ts_2 = require("./commands/admin-server/index.js");
|
|
17
|
+
const create_ts_1 = require("./commands/create.js");
|
|
18
|
+
const index_ts_3 = require("./commands/make-pif/index.js");
|
|
19
|
+
const remove_track_ts_1 = require("./commands/remove-track.js");
|
|
20
|
+
const set_default_session_ts_1 = require("./commands/set-default-session.js");
|
|
21
|
+
const sort_bed_ts_1 = require("./commands/sort-bed.js");
|
|
22
|
+
const sort_gff_ts_1 = require("./commands/sort-gff.js");
|
|
23
|
+
const index_ts_4 = require("./commands/text-index/index.js");
|
|
24
|
+
const upgrade_ts_1 = require("./commands/upgrade.js");
|
|
25
25
|
const commands = {
|
|
26
|
-
create:
|
|
27
|
-
'add-assembly':
|
|
28
|
-
'add-track':
|
|
29
|
-
'text-index':
|
|
30
|
-
'admin-server':
|
|
31
|
-
upgrade:
|
|
32
|
-
'make-pif':
|
|
33
|
-
'sort-gff':
|
|
34
|
-
'sort-bed':
|
|
35
|
-
'add-connection':
|
|
36
|
-
'add-track-json':
|
|
37
|
-
'remove-track':
|
|
38
|
-
'set-default-session':
|
|
26
|
+
create: create_ts_1.run,
|
|
27
|
+
'add-assembly': index_ts_1.run,
|
|
28
|
+
'add-track': add_track_ts_1.run,
|
|
29
|
+
'text-index': index_ts_4.run,
|
|
30
|
+
'admin-server': index_ts_2.run,
|
|
31
|
+
upgrade: upgrade_ts_1.run,
|
|
32
|
+
'make-pif': index_ts_3.run,
|
|
33
|
+
'sort-gff': sort_gff_ts_1.run,
|
|
34
|
+
'sort-bed': sort_bed_ts_1.run,
|
|
35
|
+
'add-connection': add_connection_ts_1.run,
|
|
36
|
+
'add-track-json': add_track_json_ts_1.run,
|
|
37
|
+
'remove-track': remove_track_ts_1.run,
|
|
38
|
+
'set-default-session': set_default_session_ts_1.run,
|
|
39
39
|
};
|
|
40
40
|
async function main(args) {
|
|
41
41
|
try {
|
|
@@ -84,7 +84,19 @@ async function main(args) {
|
|
|
84
84
|
await command(commandArgs);
|
|
85
85
|
}
|
|
86
86
|
catch (error) {
|
|
87
|
-
|
|
87
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
88
|
+
const code = error instanceof Error ? error.code : undefined;
|
|
89
|
+
console.error('Error:', message);
|
|
90
|
+
if (code === 'EPIPE' || code === 'ENOSPC' || message.includes('EPIPE')) {
|
|
91
|
+
console.error(`
|
|
92
|
+
This error may be caused by running out of space in the temporary directory.
|
|
93
|
+
Try setting a custom TMPDIR with more available space:
|
|
94
|
+
|
|
95
|
+
mkdir mytmpdir
|
|
96
|
+
TMPDIR=mytmpdir jbrowse text-index ...
|
|
97
|
+
|
|
98
|
+
`);
|
|
99
|
+
}
|
|
88
100
|
process.exit(1);
|
|
89
101
|
}
|
|
90
102
|
}
|
package/dist/types/common.js
CHANGED
|
@@ -10,9 +10,9 @@ exports.supported = supported;
|
|
|
10
10
|
exports.generateMeta = generateMeta;
|
|
11
11
|
const fs_1 = __importDefault(require("fs"));
|
|
12
12
|
const path_1 = __importDefault(require("path"));
|
|
13
|
-
const
|
|
13
|
+
const fetchWithProxy_ts_1 = __importDefault(require("../fetchWithProxy.js"));
|
|
14
14
|
async function createRemoteStream(urlIn) {
|
|
15
|
-
const res = await (0,
|
|
15
|
+
const res = await (0, fetchWithProxy_ts_1.default)(urlIn);
|
|
16
16
|
if (!res.ok) {
|
|
17
17
|
throw new Error(`Failed to fetch ${urlIn} status ${res.status} ${await res.text()}`);
|
|
18
18
|
}
|
|
@@ -105,13 +105,14 @@ function guessAdapterFromFileName(filePath) {
|
|
|
105
105
|
throw new Error(`Unsupported file type ${filePath}`);
|
|
106
106
|
}
|
|
107
107
|
}
|
|
108
|
+
const SUPPORTED_ADAPTERS = new Set([
|
|
109
|
+
'Gff3TabixAdapter',
|
|
110
|
+
'VcfTabixAdapter',
|
|
111
|
+
'Gff3Adapter',
|
|
112
|
+
'VcfAdapter',
|
|
113
|
+
]);
|
|
108
114
|
function supported(type = '') {
|
|
109
|
-
return
|
|
110
|
-
'Gff3TabixAdapter',
|
|
111
|
-
'VcfTabixAdapter',
|
|
112
|
-
'Gff3Adapter',
|
|
113
|
-
'VcfAdapter',
|
|
114
|
-
].includes(type);
|
|
115
|
+
return SUPPORTED_ADAPTERS.has(type);
|
|
115
116
|
}
|
|
116
117
|
async function generateMeta({ trackConfigs, attributes, outLocation, name, typesToExclude, assemblyNames, }) {
|
|
117
118
|
const tracks = trackConfigs.map(({ adapter, textSearching, trackId }) => ({
|
|
@@ -1,35 +1,18 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.indexGff3 = indexGff3;
|
|
7
|
-
const
|
|
8
|
-
const
|
|
9
|
-
const cli_progress_1 = require("cli-progress");
|
|
10
|
-
const util_1 = require("../util");
|
|
4
|
+
const util_ts_1 = require("../util.js");
|
|
5
|
+
const streamUtils_ts_1 = require("./streamUtils.js");
|
|
11
6
|
async function* indexGff3({ config, attributesToIndex, inLocation, outLocation, typesToExclude, quiet, }) {
|
|
12
7
|
const { trackId } = config;
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
}, cli_progress_1.Presets.shades_classic);
|
|
19
|
-
let receivedBytes = 0;
|
|
20
|
-
const { totalBytes, stream } = await (0, util_1.getLocalOrRemoteStream)(inLocation, outLocation);
|
|
21
|
-
if (!quiet) {
|
|
22
|
-
progressBar.start(totalBytes, 0);
|
|
23
|
-
}
|
|
24
|
-
// @ts-expect-error
|
|
25
|
-
stream.on('data', chunk => {
|
|
26
|
-
receivedBytes += chunk.length;
|
|
27
|
-
progressBar.update(receivedBytes);
|
|
28
|
-
});
|
|
29
|
-
const rl = readline_1.default.createInterface({
|
|
30
|
-
// @ts-expect-error
|
|
31
|
-
input: /.b?gz$/.exec(inLocation) ? stream.pipe((0, zlib_1.createGunzip)()) : stream,
|
|
8
|
+
const { rl, progressBar } = await (0, streamUtils_ts_1.createIndexingStream)({
|
|
9
|
+
inLocation,
|
|
10
|
+
outLocation,
|
|
11
|
+
trackId,
|
|
12
|
+
quiet,
|
|
32
13
|
});
|
|
14
|
+
const excludeSet = new Set(typesToExclude);
|
|
15
|
+
const encodedTrackId = encodeURIComponent(trackId);
|
|
33
16
|
for await (const line of rl) {
|
|
34
17
|
if (!line.trim()) {
|
|
35
18
|
continue;
|
|
@@ -41,31 +24,17 @@ async function* indexGff3({ config, attributesToIndex, inLocation, outLocation,
|
|
|
41
24
|
break;
|
|
42
25
|
}
|
|
43
26
|
const [seq_id, , type, start, end, , , , col9] = line.split('\t');
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
// turns gff3 attrs into a map, and converts the arrays into space
|
|
47
|
-
// separated strings
|
|
48
|
-
const col9attrs = Object.fromEntries(col9
|
|
49
|
-
.split(';')
|
|
50
|
-
.map(f => f.trim())
|
|
51
|
-
.filter(f => !!f)
|
|
52
|
-
.map(f => f.split('='))
|
|
53
|
-
.map(([key, val]) => [
|
|
54
|
-
key.trim(),
|
|
55
|
-
val
|
|
56
|
-
? (0, util_1.decodeURIComponentNoThrow)(val).trim().split(',').join(' ')
|
|
57
|
-
: undefined,
|
|
58
|
-
]));
|
|
27
|
+
if (!excludeSet.has(type)) {
|
|
28
|
+
const col9attrs = (0, streamUtils_ts_1.parseAttributes)(col9, util_ts_1.decodeURIComponentNoThrow);
|
|
59
29
|
const attrs = attributesToIndex
|
|
60
30
|
.map(attr => col9attrs[attr])
|
|
61
31
|
.filter((f) => !!f);
|
|
62
|
-
if (attrs.length) {
|
|
63
|
-
const
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
yield `${record} ${[...new Set(attrs)].join(' ')}\n`;
|
|
32
|
+
if (attrs.length > 0) {
|
|
33
|
+
const locStr = `${seq_id}:${start}..${end}`;
|
|
34
|
+
const encodedAttrs = attrs.map(a => `"${encodeURIComponent(a)}"`);
|
|
35
|
+
const record = `["${encodeURIComponent(locStr)}"|"${encodedTrackId}"|${encodedAttrs.join('|')}]`;
|
|
36
|
+
const uniqueAttrs = [...new Set(attrs)];
|
|
37
|
+
yield `${record} ${uniqueAttrs.join(' ')}\n`;
|
|
69
38
|
}
|
|
70
39
|
}
|
|
71
40
|
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createIndexingStream = createIndexingStream;
|
|
4
|
+
exports.parseAttributes = parseAttributes;
|
|
5
|
+
const cli_progress_1 = require("cli-progress");
|
|
6
|
+
const util_ts_1 = require("../util.js");
|
|
7
|
+
async function* readLines(reader, progressBar) {
|
|
8
|
+
const decoder = new TextDecoder();
|
|
9
|
+
let buffer = '';
|
|
10
|
+
let receivedBytes = 0;
|
|
11
|
+
try {
|
|
12
|
+
let result = await reader.read();
|
|
13
|
+
while (!result.done) {
|
|
14
|
+
receivedBytes += result.value.length;
|
|
15
|
+
progressBar.update(receivedBytes);
|
|
16
|
+
buffer += decoder.decode(result.value, { stream: true });
|
|
17
|
+
const lines = buffer.split('\n');
|
|
18
|
+
buffer = lines.pop();
|
|
19
|
+
for (const line of lines) {
|
|
20
|
+
yield line;
|
|
21
|
+
}
|
|
22
|
+
result = await reader.read();
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
finally {
|
|
26
|
+
reader.releaseLock();
|
|
27
|
+
}
|
|
28
|
+
buffer += decoder.decode();
|
|
29
|
+
if (buffer) {
|
|
30
|
+
yield buffer;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
async function createIndexingStream({ inLocation, outLocation, trackId, quiet, }) {
|
|
34
|
+
const progressBar = new cli_progress_1.SingleBar({
|
|
35
|
+
format: `{bar} ${trackId} {percentage}% | ETA: {eta}s`,
|
|
36
|
+
etaBuffer: 2000,
|
|
37
|
+
}, cli_progress_1.Presets.shades_classic);
|
|
38
|
+
const { totalBytes, stream } = await (0, util_ts_1.getLocalOrRemoteStream)(inLocation, outLocation);
|
|
39
|
+
if (!quiet) {
|
|
40
|
+
progressBar.start(totalBytes, 0);
|
|
41
|
+
}
|
|
42
|
+
if (!stream) {
|
|
43
|
+
throw new Error(`Failed to fetch ${inLocation}: no response body`);
|
|
44
|
+
}
|
|
45
|
+
const inputStream = /.b?gz$/.exec(inLocation)
|
|
46
|
+
? // @ts-ignore root tsconfig includes DOM lib which has incompatible stream types
|
|
47
|
+
stream.pipeThrough(new DecompressionStream('gzip'))
|
|
48
|
+
: stream;
|
|
49
|
+
const rl = readLines(inputStream.getReader(), progressBar);
|
|
50
|
+
return { rl, progressBar };
|
|
51
|
+
}
|
|
52
|
+
function parseAttributes(infoString, decodeFunc) {
|
|
53
|
+
const result = {};
|
|
54
|
+
for (const field of infoString.split(';')) {
|
|
55
|
+
const trimmed = field.trim();
|
|
56
|
+
if (trimmed) {
|
|
57
|
+
const eqIdx = trimmed.indexOf('=');
|
|
58
|
+
if (eqIdx !== -1) {
|
|
59
|
+
const key = trimmed.slice(0, eqIdx).trim();
|
|
60
|
+
const val = trimmed.slice(eqIdx + 1);
|
|
61
|
+
result[key] = decodeFunc(val).trim().replaceAll(',', ' ');
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
return result;
|
|
66
|
+
}
|
package/dist/types/vcfAdapter.js
CHANGED
|
@@ -1,75 +1,38 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
3
|
exports.indexVcf = indexVcf;
|
|
7
|
-
const
|
|
8
|
-
const
|
|
9
|
-
const cli_progress_1 = require("cli-progress");
|
|
10
|
-
const util_1 = require("../util");
|
|
4
|
+
const util_ts_1 = require("../util.js");
|
|
5
|
+
const streamUtils_ts_1 = require("./streamUtils.js");
|
|
11
6
|
async function* indexVcf({ config, attributesToIndex, inLocation, outLocation, quiet, }) {
|
|
12
7
|
const { trackId } = config;
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
}, cli_progress_1.Presets.shades_classic);
|
|
19
|
-
let receivedBytes = 0;
|
|
20
|
-
const { totalBytes, stream } = await (0, util_1.getLocalOrRemoteStream)(inLocation, outLocation);
|
|
21
|
-
if (!quiet) {
|
|
22
|
-
progressBar.start(totalBytes, 0);
|
|
23
|
-
}
|
|
24
|
-
// @ts-expect-error
|
|
25
|
-
stream.on('data', chunk => {
|
|
26
|
-
receivedBytes += chunk.length;
|
|
27
|
-
progressBar.update(receivedBytes);
|
|
28
|
-
});
|
|
29
|
-
const gzStream = /.b?gz$/.exec(inLocation)
|
|
30
|
-
? // @ts-expect-error
|
|
31
|
-
stream.pipe((0, zlib_1.createGunzip)())
|
|
32
|
-
: stream;
|
|
33
|
-
const rl = readline_1.default.createInterface({
|
|
34
|
-
input: gzStream,
|
|
8
|
+
const { rl, progressBar } = await (0, streamUtils_ts_1.createIndexingStream)({
|
|
9
|
+
inLocation,
|
|
10
|
+
outLocation,
|
|
11
|
+
trackId,
|
|
12
|
+
quiet,
|
|
35
13
|
});
|
|
14
|
+
const encodedTrackId = encodeURIComponent(trackId);
|
|
36
15
|
for await (const line of rl) {
|
|
37
16
|
if (line.startsWith('#')) {
|
|
38
17
|
continue;
|
|
39
18
|
}
|
|
40
19
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
41
20
|
const [ref, pos, id, _ref, _alt, _qual, _filter, info] = line.split('\t');
|
|
42
|
-
// turns gff3 attrs into a map, and converts the arrays into space
|
|
43
|
-
// separated strings
|
|
44
|
-
const fields = Object.fromEntries(info
|
|
45
|
-
.split(';')
|
|
46
|
-
.map(f => f.trim())
|
|
47
|
-
.filter(f => !!f)
|
|
48
|
-
.map(f => f.split('='))
|
|
49
|
-
.map(([key, val]) => [
|
|
50
|
-
key.trim(),
|
|
51
|
-
val
|
|
52
|
-
? (0, util_1.decodeURIComponentNoThrow)(val).trim().split(',').join(' ')
|
|
53
|
-
: undefined,
|
|
54
|
-
]));
|
|
55
|
-
const end = fields.END;
|
|
56
|
-
const locStr = `${ref}:${pos}..${end || +pos + 1}`;
|
|
57
21
|
if (id === '.') {
|
|
58
22
|
continue;
|
|
59
23
|
}
|
|
24
|
+
const fields = (0, streamUtils_ts_1.parseAttributes)(info, util_ts_1.decodeURIComponentNoThrow);
|
|
25
|
+
const end = fields.END;
|
|
26
|
+
const locStr = `${ref}:${pos}..${end || +pos + 1}`;
|
|
27
|
+
const encodedLocStr = encodeURIComponent(locStr);
|
|
60
28
|
const infoAttrs = attributesToIndex
|
|
61
29
|
.map(attr => fields[attr])
|
|
62
30
|
.filter((f) => !!f);
|
|
63
|
-
const
|
|
64
|
-
for (const
|
|
65
|
-
const
|
|
66
|
-
const record =
|
|
67
|
-
|
|
68
|
-
encodeURIComponent(trackId),
|
|
69
|
-
encodeURIComponent(id || ''),
|
|
70
|
-
...infoAttrs.map(a => encodeURIComponent(a || '')),
|
|
71
|
-
]).replaceAll(',', '|');
|
|
72
|
-
yield `${record} ${[...new Set(attrs)].join(' ')}\n`;
|
|
31
|
+
const encodedInfoAttrs = infoAttrs.map(a => `"${encodeURIComponent(a)}"`);
|
|
32
|
+
for (const variantId of id.split(',')) {
|
|
33
|
+
const encodedId = encodeURIComponent(variantId);
|
|
34
|
+
const record = `["${encodedLocStr}"|"${encodedTrackId}"|"${encodedId}"${encodedInfoAttrs.length > 0 ? `|${encodedInfoAttrs.join('|')}` : ''}]`;
|
|
35
|
+
yield `${record} ${variantId}\n`;
|
|
73
36
|
}
|
|
74
37
|
}
|
|
75
38
|
progressBar.stop();
|
package/dist/util.js
CHANGED
|
@@ -5,12 +5,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.getLocalOrRemoteStream = getLocalOrRemoteStream;
|
|
7
7
|
exports.decodeURIComponentNoThrow = decodeURIComponentNoThrow;
|
|
8
|
-
const fs_1 =
|
|
8
|
+
const fs_1 = require("fs");
|
|
9
|
+
const promises_1 = require("fs/promises");
|
|
9
10
|
const path_1 = __importDefault(require("path"));
|
|
10
|
-
const
|
|
11
|
+
const stream_1 = require("stream");
|
|
12
|
+
const common_ts_1 = require("./types/common.js");
|
|
11
13
|
async function getLocalOrRemoteStream(uri, out) {
|
|
12
|
-
if ((0,
|
|
13
|
-
const result = await (0,
|
|
14
|
+
if ((0, common_ts_1.isURL)(uri)) {
|
|
15
|
+
const result = await (0, common_ts_1.createRemoteStream)(uri);
|
|
14
16
|
return {
|
|
15
17
|
totalBytes: +(result.headers.get('Content-Length') || 0),
|
|
16
18
|
stream: result.body,
|
|
@@ -18,17 +20,22 @@ async function getLocalOrRemoteStream(uri, out) {
|
|
|
18
20
|
}
|
|
19
21
|
else {
|
|
20
22
|
const filename = path_1.default.isAbsolute(uri) ? uri : path_1.default.join(out, uri);
|
|
23
|
+
const stats = await (0, promises_1.stat)(filename);
|
|
24
|
+
const nodeStream = (0, fs_1.createReadStream)(filename);
|
|
21
25
|
return {
|
|
22
|
-
totalBytes:
|
|
23
|
-
stream:
|
|
26
|
+
totalBytes: stats.size,
|
|
27
|
+
stream: stream_1.Readable.toWeb(nodeStream),
|
|
24
28
|
};
|
|
25
29
|
}
|
|
26
30
|
}
|
|
27
31
|
function decodeURIComponentNoThrow(uri) {
|
|
32
|
+
if (!uri.includes('%')) {
|
|
33
|
+
return uri;
|
|
34
|
+
}
|
|
28
35
|
try {
|
|
29
36
|
return decodeURIComponent(uri);
|
|
30
37
|
}
|
|
31
|
-
catch
|
|
38
|
+
catch {
|
|
32
39
|
// avoid throwing exception on a failure to decode URI component
|
|
33
40
|
return uri;
|
|
34
41
|
}
|
package/dist/utils.js
CHANGED
|
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.debug = debug;
|
|
7
|
+
exports.resolveConfigPath = resolveConfigPath;
|
|
7
8
|
exports.readFile = readFile;
|
|
8
9
|
exports.readJsonFile = readJsonFile;
|
|
9
10
|
exports.writeJsonFile = writeJsonFile;
|
|
@@ -18,12 +19,17 @@ exports.printHelp = printHelp;
|
|
|
18
19
|
const fs_1 = require("fs");
|
|
19
20
|
const path_1 = __importDefault(require("path"));
|
|
20
21
|
const json_parse_better_errors_1 = __importDefault(require("json-parse-better-errors"));
|
|
21
|
-
const
|
|
22
|
+
const fetchWithProxy_ts_1 = __importDefault(require("./fetchWithProxy.js"));
|
|
22
23
|
function debug(message) {
|
|
23
24
|
if (process.env.DEBUG) {
|
|
24
25
|
console.log(`DEBUG: ${message}`);
|
|
25
26
|
}
|
|
26
27
|
}
|
|
28
|
+
async function resolveConfigPath(target, out) {
|
|
29
|
+
const output = target || out || '.';
|
|
30
|
+
const stat = await fs_1.promises.lstat(output);
|
|
31
|
+
return stat.isDirectory() ? `${output}/config.json` : output;
|
|
32
|
+
}
|
|
27
33
|
async function readFile(location) {
|
|
28
34
|
return fs_1.promises.readFile(location, { encoding: 'utf8' });
|
|
29
35
|
}
|
|
@@ -45,8 +51,7 @@ async function resolveFileLocation(location, check = true, inPlace = false) {
|
|
|
45
51
|
}
|
|
46
52
|
if (locationUrl) {
|
|
47
53
|
if (check) {
|
|
48
|
-
|
|
49
|
-
const response = await (0, fetchWithProxy_1.default)(locationUrl, { method: 'HEAD' });
|
|
54
|
+
const response = await (0, fetchWithProxy_ts_1.default)(locationUrl, { method: 'HEAD' });
|
|
50
55
|
if (!response.ok) {
|
|
51
56
|
throw new Error(`${locationUrl} result ${response.statusText}`);
|
|
52
57
|
}
|
|
@@ -112,7 +117,7 @@ async function* fetchVersions() {
|
|
|
112
117
|
let result;
|
|
113
118
|
do {
|
|
114
119
|
const url = `https://api.github.com/repos/GMOD/jbrowse-components/releases?page=${page}`;
|
|
115
|
-
const response = await (0,
|
|
120
|
+
const response = await (0, fetchWithProxy_ts_1.default)(url);
|
|
116
121
|
if (response.ok) {
|
|
117
122
|
result = (await response.json());
|
|
118
123
|
yield result.filter(release => release.tag_name.startsWith('v'));
|
|
@@ -124,7 +129,7 @@ async function* fetchVersions() {
|
|
|
124
129
|
} while (result.length);
|
|
125
130
|
}
|
|
126
131
|
async function getTag(tag) {
|
|
127
|
-
const response = await (0,
|
|
132
|
+
const response = await (0, fetchWithProxy_ts_1.default)(`https://api.github.com/repos/GMOD/jbrowse-components/releases/tags/${tag}`);
|
|
128
133
|
if (response.ok) {
|
|
129
134
|
const result = (await response.json());
|
|
130
135
|
const file = result.assets?.find(f => f.name.includes('jbrowse-web'))?.browser_download_url;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@jbrowse/cli",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "4.0.1",
|
|
4
4
|
"description": "A command line tool for working with JBrowse 2",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"jbrowse",
|
|
@@ -27,27 +27,24 @@
|
|
|
27
27
|
"engines": {
|
|
28
28
|
"node": ">=18.3.0"
|
|
29
29
|
},
|
|
30
|
-
"scripts": {
|
|
31
|
-
"prebuild": "npm run clean",
|
|
32
|
-
"build": "tsc --build && webpack",
|
|
33
|
-
"clean": "rimraf dist bundle",
|
|
34
|
-
"prepack": "npm run clean && npm run docs",
|
|
35
|
-
"predocs": "npm run build",
|
|
36
|
-
"docs": "./generate_readme.sh > README.md"
|
|
37
|
-
},
|
|
38
30
|
"dependencies": {
|
|
39
|
-
"cli-progress": "^3.
|
|
31
|
+
"cli-progress": "^3.12.0",
|
|
40
32
|
"command-exists": "^1.2.9",
|
|
41
33
|
"cors": "^2.8.5",
|
|
42
|
-
"decompress": "^4.
|
|
43
|
-
"express": "^5.1
|
|
44
|
-
"ixixx": "^
|
|
34
|
+
"decompress": "^4.2.1",
|
|
35
|
+
"express": "^5.2.1",
|
|
36
|
+
"ixixx": "^3.0.2",
|
|
45
37
|
"json-parse-better-errors": "^1.0.2",
|
|
46
|
-
"node-fetch-native": "^1.6.
|
|
47
|
-
"tmp": "^0.2.
|
|
38
|
+
"node-fetch-native": "^1.6.7",
|
|
39
|
+
"tmp": "^0.2.5"
|
|
48
40
|
},
|
|
49
41
|
"publishConfig": {
|
|
50
42
|
"access": "public"
|
|
51
43
|
},
|
|
52
|
-
"
|
|
53
|
-
|
|
44
|
+
"scripts": {
|
|
45
|
+
"prebuild": "pnpm clean",
|
|
46
|
+
"build": "tsc && webpack",
|
|
47
|
+
"clean": "rimraf bundle",
|
|
48
|
+
"generate-readme": "./generate_readme.sh > README.md"
|
|
49
|
+
}
|
|
50
|
+
}
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.validateFileArgument = validateFileArgument;
|
|
4
|
-
exports.validateRequiredCommands = validateRequiredCommands;
|
|
5
|
-
const command_exists_1 = require("command-exists");
|
|
6
|
-
function validateFileArgument(file) {
|
|
7
|
-
// Allow no file argument for stdin input
|
|
8
|
-
if (!file && process.stdin.isTTY) {
|
|
9
|
-
console.error('Error: Missing required argument: file');
|
|
10
|
-
console.error('Usage: jbrowse make-pif <file> [options]');
|
|
11
|
-
console.error(' OR pipe data via stdin: cat file.paf | jbrowse make-pif');
|
|
12
|
-
process.exit(1);
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
function validateRequiredCommands() {
|
|
16
|
-
const requiredCommands = ['sh', 'sort', 'grep', 'tabix', 'bgzip'];
|
|
17
|
-
const missingCommands = requiredCommands.filter(cmd => !(0, command_exists_1.sync)(cmd));
|
|
18
|
-
if (missingCommands.length > 0) {
|
|
19
|
-
console.error('Error: Unable to sort, requires unix type environment with sort, grep, bgzip, tabix');
|
|
20
|
-
process.exit(1);
|
|
21
|
-
}
|
|
22
|
-
}
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.REQUIRED_COMMANDS = exports.SORT_BED_EXAMPLES = exports.SORT_BED_DESCRIPTION = void 0;
|
|
4
|
-
exports.SORT_BED_DESCRIPTION = 'Helper utility to sort BED files for tabix. Moves all lines starting with # to the top of the file, and sort by refname and start position using unix utilities sort and grep';
|
|
5
|
-
exports.SORT_BED_EXAMPLES = [
|
|
6
|
-
'# sort bed and pipe to bgzip',
|
|
7
|
-
'$ jbrowse sort-bed input.bed | bgzip > sorted.bed.gz',
|
|
8
|
-
'$ tabix sorted.bed.gz',
|
|
9
|
-
'',
|
|
10
|
-
'# OR pipe data via stdin: cat file.bed | jbrowse sort-bed | bgzip > sorted.bed.gz',
|
|
11
|
-
];
|
|
12
|
-
exports.REQUIRED_COMMANDS = ['sh', 'sort', 'grep'];
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.getMinimalEnvironment = getMinimalEnvironment;
|
|
4
|
-
exports.createSortCommandForFile = createSortCommandForFile;
|
|
5
|
-
exports.spawnSortProcess = spawnSortProcess;
|
|
6
|
-
const child_process_1 = require("child_process");
|
|
7
|
-
function getMinimalEnvironment() {
|
|
8
|
-
return {
|
|
9
|
-
...process.env,
|
|
10
|
-
LC_ALL: 'C',
|
|
11
|
-
};
|
|
12
|
-
}
|
|
13
|
-
function createSortCommandForFile(file) {
|
|
14
|
-
// BED files use columns 1,2 (0-based) for chromosome and start position
|
|
15
|
-
return `(grep "^#" "${file}"; grep -v "^#" "${file}" | sort -t"\`printf '\\t'\`" -k1,1 -k2,2n)`;
|
|
16
|
-
}
|
|
17
|
-
function spawnSortProcess(options) {
|
|
18
|
-
const command = createSortCommandForFile(options.file);
|
|
19
|
-
const env = getMinimalEnvironment();
|
|
20
|
-
return (0, child_process_1.spawn)('sh', ['-c', command], {
|
|
21
|
-
env,
|
|
22
|
-
stdio: 'inherit',
|
|
23
|
-
});
|
|
24
|
-
}
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.validateFileArgument = validateFileArgument;
|
|
4
|
-
exports.validateRequiredCommands = validateRequiredCommands;
|
|
5
|
-
const command_exists_1 = require("command-exists");
|
|
6
|
-
function validateFileArgument(file) {
|
|
7
|
-
// Allow no file argument for stdin input
|
|
8
|
-
if (!file && process.stdin.isTTY) {
|
|
9
|
-
console.error('Error: Missing required argument: file');
|
|
10
|
-
console.error('Usage: jbrowse sort-bed <file>');
|
|
11
|
-
console.error(' OR pipe data via stdin: cat file.bed | jbrowse sort-bed');
|
|
12
|
-
process.exit(1);
|
|
13
|
-
}
|
|
14
|
-
}
|
|
15
|
-
function validateRequiredCommands() {
|
|
16
|
-
const requiredCommands = ['sh', 'sort', 'grep'];
|
|
17
|
-
const missingCommands = requiredCommands.filter(cmd => !(0, command_exists_1.sync)(cmd));
|
|
18
|
-
if (missingCommands.length > 0) {
|
|
19
|
-
console.error('Error: Unable to sort, requires unix type environment with sort, grep');
|
|
20
|
-
process.exit(1);
|
|
21
|
-
}
|
|
22
|
-
}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.REQUIRED_COMMANDS = exports.SORT_GFF_EXAMPLES = exports.SORT_GFF_DESCRIPTION = void 0;
|
|
4
|
-
exports.SORT_GFF_DESCRIPTION = 'Helper utility to sort GFF files for tabix. Moves all lines starting with # to the top of the file, and sort by refname and start position using unix utilities sort and grep';
|
|
5
|
-
exports.SORT_GFF_EXAMPLES = [
|
|
6
|
-
'# sort gff and pipe to bgzip',
|
|
7
|
-
'$ jbrowse sort-gff input.gff | bgzip > sorted.gff.gz',
|
|
8
|
-
'$ tabix sorted.gff.gz',
|
|
9
|
-
'',
|
|
10
|
-
'# sort gff from stdin',
|
|
11
|
-
'$ cat input.gff | jbrowse sort-gff | bgzip > sorted.gff.gz',
|
|
12
|
-
];
|
|
13
|
-
exports.REQUIRED_COMMANDS = ['sh', 'sort', 'grep'];
|