@gmod/bbi 1.0.27 → 1.0.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/bbi.d.ts ADDED
@@ -0,0 +1,84 @@
1
+ import { GenericFilehandle } from 'generic-filehandle';
2
+ import { Observable } from 'rxjs';
3
+ import { BlockView } from './blockView';
4
+ export interface Feature {
5
+ start: number;
6
+ end: number;
7
+ score: number;
8
+ rest?: string;
9
+ minScore?: number;
10
+ maxScore?: number;
11
+ summary?: boolean;
12
+ uniqueId?: string;
13
+ field?: number;
14
+ }
15
+ interface Statistics {
16
+ scoreSum: number;
17
+ basesCovered: number;
18
+ scoreSumSquares: number;
19
+ }
20
+ interface RefInfo {
21
+ name: string;
22
+ id: number;
23
+ length: number;
24
+ }
25
+ export interface Header {
26
+ autoSql: string;
27
+ totalSummary: Statistics;
28
+ zoomLevels: any;
29
+ unzoomedIndexOffset: number;
30
+ unzoomedDataOffset: number;
31
+ definedFieldCount: number;
32
+ uncompressBufSize: number;
33
+ chromTreeOffset: number;
34
+ fileSize: number;
35
+ extHeaderOffset: number;
36
+ isBigEndian: boolean;
37
+ fileType: string;
38
+ refsByName: {
39
+ [key: string]: number;
40
+ };
41
+ refsByNumber: {
42
+ [key: number]: RefInfo;
43
+ };
44
+ }
45
+ export interface RequestOptions {
46
+ signal?: AbortSignal;
47
+ headers?: Record<string, string>;
48
+ [key: string]: unknown;
49
+ }
50
+ export declare abstract class BBI {
51
+ protected bbi: GenericFilehandle;
52
+ protected headerCache: any;
53
+ protected renameRefSeqs: (a: string) => string;
54
+ getHeader(opts?: RequestOptions | AbortSignal): any;
55
+ constructor(options?: {
56
+ filehandle?: GenericFilehandle;
57
+ path?: string;
58
+ url?: string;
59
+ renameRefSeqs?: (a: string) => string;
60
+ });
61
+ private _getHeader;
62
+ private _getMainHeader;
63
+ private _isBigEndian;
64
+ private _readChromTree;
65
+ protected getUnzoomedView(opts: RequestOptions): Promise<BlockView>;
66
+ protected abstract getView(scale: number, opts: RequestOptions): Promise<BlockView>;
67
+ /**
68
+ * Gets features from a BigWig file
69
+ *
70
+ * @param refName - The chromosome name
71
+ * @param start - The start of a region
72
+ * @param end - The end of a region
73
+ * @param opts - An object containing basesPerSpan (e.g. pixels per basepair) or scale used to infer the zoomLevel to use
74
+ */
75
+ getFeatureStream(refName: string, start: number, end: number, opts?: RequestOptions & {
76
+ scale?: number;
77
+ basesPerSpan?: number;
78
+ }): Promise<Observable<Feature[]>>;
79
+ getFeatures(refName: string, start: number, end: number, opts?: RequestOptions & {
80
+ scale?: number;
81
+ basesPerSpan?: number;
82
+ }): Promise<Feature[]>;
83
+ }
84
+ export {};
package/esm/bbi.js ADDED
@@ -0,0 +1,259 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.BBI = void 0;
7
+ const binary_parser_1 = require("@gmod/binary-parser");
8
+ const generic_filehandle_1 = require("generic-filehandle");
9
+ const rxjs_1 = require("rxjs");
10
+ const operators_1 = require("rxjs/operators");
11
+ const abortable_promise_cache_1 = __importDefault(require("abortable-promise-cache"));
12
+ const quick_lru_1 = __importDefault(require("quick-lru"));
13
+ const blockView_1 = require("./blockView");
14
+ const BIG_WIG_MAGIC = -2003829722;
15
+ const BIG_BED_MAGIC = -2021002517;
16
+ /* get the compiled parsers for different sections of the bigwig file
17
+ *
18
+ * @param isBE - is big endian, typically false
19
+ * @return an object with compiled parsers
20
+ */
21
+ function getParsers(isBE) {
22
+ const le = isBE ? 'big' : 'little';
23
+ const headerParser = new binary_parser_1.Parser()
24
+ .endianess(le)
25
+ .int32('magic')
26
+ .uint16('version')
27
+ .uint16('numZoomLevels')
28
+ .uint64('chromTreeOffset')
29
+ .uint64('unzoomedDataOffset')
30
+ .uint64('unzoomedIndexOffset')
31
+ .uint16('fieldCount')
32
+ .uint16('definedFieldCount')
33
+ .uint64('asOffset') // autoSql offset, used in bigbed
34
+ .uint64('totalSummaryOffset')
35
+ .uint32('uncompressBufSize')
36
+ .uint64('extHeaderOffset') // name index offset, used in bigbed
37
+ .array('zoomLevels', {
38
+ length: 'numZoomLevels',
39
+ type: new binary_parser_1.Parser()
40
+ .uint32('reductionLevel')
41
+ .uint32('reserved')
42
+ .uint64('dataOffset')
43
+ .uint64('indexOffset'),
44
+ });
45
+ const totalSummaryParser = new binary_parser_1.Parser()
46
+ .endianess(le)
47
+ .uint64('basesCovered')
48
+ .double('scoreMin')
49
+ .double('scoreMax')
50
+ .double('scoreSum')
51
+ .double('scoreSumSquares');
52
+ const chromTreeParser = new binary_parser_1.Parser()
53
+ .endianess(le)
54
+ .uint32('magic')
55
+ .uint32('blockSize')
56
+ .uint32('keySize')
57
+ .uint32('valSize')
58
+ .uint64('itemCount');
59
+ const isLeafNode = new binary_parser_1.Parser()
60
+ .endianess(le)
61
+ .uint8('isLeafNode')
62
+ .skip(1)
63
+ .uint16('cnt');
64
+ return {
65
+ chromTreeParser,
66
+ totalSummaryParser,
67
+ headerParser,
68
+ isLeafNode,
69
+ };
70
+ }
71
+ class BBI {
72
+ /*
73
+ * @param filehandle - a filehandle from generic-filehandle or implementing something similar to the node10 fs.promises API
74
+ * @param path - a Local file path as a string
75
+ * @param url - a URL string
76
+ * @param renameRefSeqs - an optional method to rename the internal reference sequences using a mapping function
77
+ */
78
+ constructor(options = {}) {
79
+ this.headerCache = new abortable_promise_cache_1.default({
80
+ cache: new quick_lru_1.default({ maxSize: 1 }),
81
+ fill: async (params, signal) => {
82
+ return this._getHeader({ ...params, signal });
83
+ },
84
+ });
85
+ const { filehandle, renameRefSeqs, path, url } = options;
86
+ this.renameRefSeqs = renameRefSeqs || ((s) => s);
87
+ if (filehandle) {
88
+ this.bbi = filehandle;
89
+ }
90
+ else if (url) {
91
+ this.bbi = new generic_filehandle_1.RemoteFile(url);
92
+ }
93
+ else if (path) {
94
+ this.bbi = new generic_filehandle_1.LocalFile(path);
95
+ }
96
+ else {
97
+ throw new Error('no file given');
98
+ }
99
+ }
100
+ /* fetch and parse header information from a bigwig or bigbed file
101
+ * @param abortSignal - abort the operation, can be null
102
+ * @return a Header object
103
+ */
104
+ getHeader(opts = {}) {
105
+ const options = 'aborted' in opts ? { signal: opts } : opts;
106
+ return this.headerCache.get(JSON.stringify(options), options, options.signal);
107
+ }
108
+ async _getHeader(opts) {
109
+ const header = await this._getMainHeader(opts);
110
+ const chroms = await this._readChromTree(header, opts);
111
+ return { ...header, ...chroms };
112
+ }
113
+ async _getMainHeader(opts, requestSize = 2000) {
114
+ const { buffer } = await this.bbi.read(Buffer.alloc(requestSize), 0, requestSize, 0, opts);
115
+ const isBigEndian = this._isBigEndian(buffer);
116
+ const ret = getParsers(isBigEndian);
117
+ const header = ret.headerParser.parse(buffer).result;
118
+ header.fileType = header.magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig';
119
+ if (header.asOffset > requestSize ||
120
+ header.totalSummaryOffset > requestSize) {
121
+ return this._getMainHeader(opts, requestSize * 2);
122
+ }
123
+ if (header.asOffset) {
124
+ header.autoSql = buffer
125
+ .slice(header.asOffset, buffer.indexOf(0, header.asOffset))
126
+ .toString('utf8');
127
+ }
128
+ if (header.totalSummaryOffset > requestSize) {
129
+ return this._getMainHeader(opts, requestSize * 2);
130
+ }
131
+ if (header.totalSummaryOffset) {
132
+ const tail = buffer.slice(header.totalSummaryOffset);
133
+ header.totalSummary = ret.totalSummaryParser.parse(tail).result;
134
+ }
135
+ return { ...header, isBigEndian };
136
+ }
137
+ _isBigEndian(buffer) {
138
+ let ret = buffer.readInt32LE(0);
139
+ if (ret === BIG_WIG_MAGIC || ret === BIG_BED_MAGIC) {
140
+ return false;
141
+ }
142
+ ret = buffer.readInt32BE(0);
143
+ if (ret === BIG_WIG_MAGIC || ret === BIG_BED_MAGIC) {
144
+ return true;
145
+ }
146
+ throw new Error('not a BigWig/BigBed file');
147
+ }
148
+ // todo: add progress if long running
149
+ async _readChromTree(header, opts) {
150
+ const isBE = header.isBigEndian;
151
+ const le = isBE ? 'big' : 'little';
152
+ const refsByNumber = [];
153
+ const refsByName = {};
154
+ const { chromTreeOffset } = header;
155
+ let { unzoomedDataOffset } = header;
156
+ while (unzoomedDataOffset % 4 !== 0) {
157
+ unzoomedDataOffset += 1;
158
+ }
159
+ const { buffer: data } = await this.bbi.read(Buffer.alloc(unzoomedDataOffset - chromTreeOffset), 0, unzoomedDataOffset - chromTreeOffset, chromTreeOffset, opts);
160
+ const p = getParsers(isBE);
161
+ const { keySize } = p.chromTreeParser.parse(data).result;
162
+ const leafNodeParser = new binary_parser_1.Parser()
163
+ .endianess(le)
164
+ .string('key', { stripNull: true, length: keySize })
165
+ .uint32('refId')
166
+ .uint32('refSize');
167
+ const nonleafNodeParser = new binary_parser_1.Parser()
168
+ .endianess(le)
169
+ .skip(keySize)
170
+ .uint64('childOffset');
171
+ const rootNodeOffset = 32;
172
+ const bptReadNode = async (currentOffset) => {
173
+ let offset = currentOffset;
174
+ if (offset >= data.length) {
175
+ throw new Error('reading beyond end of buffer');
176
+ }
177
+ const ret = p.isLeafNode.parse(data.slice(offset));
178
+ const { isLeafNode, cnt } = ret.result;
179
+ offset += ret.offset;
180
+ if (isLeafNode) {
181
+ for (let n = 0; n < cnt; n += 1) {
182
+ const leafRet = leafNodeParser.parse(data.slice(offset));
183
+ offset += leafRet.offset;
184
+ const { key, refId, refSize } = leafRet.result;
185
+ const refRec = { name: key, id: refId, length: refSize };
186
+ refsByName[this.renameRefSeqs(key)] = refId;
187
+ refsByNumber[refId] = refRec;
188
+ }
189
+ }
190
+ else {
191
+ // parse index node
192
+ const nextNodes = [];
193
+ for (let n = 0; n < cnt; n += 1) {
194
+ const nonleafRet = nonleafNodeParser.parse(data.slice(offset));
195
+ let { childOffset } = nonleafRet.result;
196
+ offset += nonleafRet.offset;
197
+ childOffset -= chromTreeOffset;
198
+ nextNodes.push(bptReadNode(childOffset));
199
+ }
200
+ await Promise.all(nextNodes);
201
+ }
202
+ };
203
+ await bptReadNode(rootNodeOffset);
204
+ return {
205
+ refsByName,
206
+ refsByNumber,
207
+ };
208
+ }
209
+ /*
210
+ * fetches the "unzoomed" view of the bigwig data. this is the default for bigbed
211
+ * @param abortSignal - a signal to optionally abort this operation
212
+ */
213
+ async getUnzoomedView(opts) {
214
+ const { unzoomedIndexOffset, zoomLevels, refsByName, uncompressBufSize, isBigEndian, fileType, } = await this.getHeader(opts);
215
+ const nzl = zoomLevels[0];
216
+ const cirLen = nzl ? nzl.dataOffset - unzoomedIndexOffset : 4000;
217
+ return new blockView_1.BlockView(this.bbi, refsByName, unzoomedIndexOffset, cirLen, isBigEndian, uncompressBufSize > 0, fileType);
218
+ }
219
+ /**
220
+ * Gets features from a BigWig file
221
+ *
222
+ * @param refName - The chromosome name
223
+ * @param start - The start of a region
224
+ * @param end - The end of a region
225
+ * @param opts - An object containing basesPerSpan (e.g. pixels per basepair) or scale used to infer the zoomLevel to use
226
+ */
227
+ async getFeatureStream(refName, start, end, opts = {
228
+ scale: 1,
229
+ }) {
230
+ await this.getHeader(opts);
231
+ const chrName = this.renameRefSeqs(refName);
232
+ let view;
233
+ if (opts.basesPerSpan) {
234
+ view = await this.getView(1 / opts.basesPerSpan, opts);
235
+ }
236
+ else if (opts.scale) {
237
+ view = await this.getView(opts.scale, opts);
238
+ }
239
+ else {
240
+ view = await this.getView(1, opts);
241
+ }
242
+ if (!view) {
243
+ throw new Error('unable to get block view for data');
244
+ }
245
+ return new rxjs_1.Observable((observer) => {
246
+ view.readWigData(chrName, start, end, observer, opts);
247
+ });
248
+ }
249
+ async getFeatures(refName, start, end, opts = {
250
+ scale: 1,
251
+ }) {
252
+ const ob = await this.getFeatureStream(refName, start, end, opts);
253
+ const ret = await ob
254
+ .pipe((0, operators_1.reduce)((acc, curr) => acc.concat(curr)))
255
+ .toPromise();
256
+ return ret || [];
257
+ }
258
+ }
259
+ exports.BBI = BBI;
@@ -0,0 +1,12 @@
1
+ import { BBI, Feature, RequestOptions } from './bbi';
2
+ import { BlockView } from './blockView';
3
+ export declare function filterUndef<T>(ts: (T | undefined)[]): T[];
4
+ export declare class BigBed extends BBI {
5
+ readIndicesCache: any;
6
+ constructor(opts?: any);
7
+ readIndices(opts?: AbortSignal | RequestOptions): any;
8
+ protected getView(scale: number, opts: RequestOptions): Promise<BlockView>;
9
+ private _readIndices;
10
+ private searchExtraIndexBlocks;
11
+ searchExtraIndex(name: string, opts?: RequestOptions): Promise<Feature[]>;
12
+ }
package/esm/bigbed.js ADDED
@@ -0,0 +1,182 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.BigBed = exports.filterUndef = void 0;
7
+ const binary_parser_1 = require("@gmod/binary-parser");
8
+ const rxjs_1 = require("rxjs");
9
+ const operators_1 = require("rxjs/operators");
10
+ const abortable_promise_cache_1 = __importDefault(require("abortable-promise-cache"));
11
+ const quick_lru_1 = __importDefault(require("quick-lru"));
12
+ const bbi_1 = require("./bbi");
13
+ function filterUndef(ts) {
14
+ return ts.filter((t) => !!t);
15
+ }
16
+ exports.filterUndef = filterUndef;
17
+ class BigBed extends bbi_1.BBI {
18
+ constructor(opts) {
19
+ super(opts);
20
+ this.readIndicesCache = new abortable_promise_cache_1.default({
21
+ cache: new quick_lru_1.default({ maxSize: 1 }),
22
+ fill: async (args, signal) => {
23
+ return this._readIndices({ ...args, signal });
24
+ },
25
+ });
26
+ }
27
+ readIndices(opts = {}) {
28
+ const options = 'aborted' in opts ? { signal: opts } : opts;
29
+ return this.readIndicesCache.get(JSON.stringify(options), options, options.signal);
30
+ }
31
+ /*
32
+ * retrieve unzoomed view for any scale
33
+ * @param scale - unused
34
+ * @param abortSignal - an optional AbortSignal to kill operation
35
+ * @return promise for a BlockView
36
+ */
37
+ async getView(scale, opts) {
38
+ return this.getUnzoomedView(opts);
39
+ }
40
+ /*
41
+ * parse the bigbed extraIndex fields
42
+ * @param abortSignal to abort operation
43
+ * @return a Promise for an array of Index data structure since there can be multiple extraIndexes in a bigbed, see bedToBigBed documentation
44
+ */
45
+ async _readIndices(opts) {
46
+ const { extHeaderOffset, isBigEndian } = await this.getHeader(opts);
47
+ const { buffer: data } = await this.bbi.read(Buffer.alloc(64), 0, 64, extHeaderOffset);
48
+ const le = isBigEndian ? 'big' : 'little';
49
+ const ret = new binary_parser_1.Parser()
50
+ .endianess(le)
51
+ .uint16('size')
52
+ .uint16('count')
53
+ .uint64('offset')
54
+ .parse(data).result;
55
+ const { count, offset } = ret;
56
+ // no extra index is defined if count==0
57
+ if (count === 0) {
58
+ return [];
59
+ }
60
+ const blocklen = 20;
61
+ const len = blocklen * count;
62
+ const { buffer } = await this.bbi.read(Buffer.alloc(len), 0, len, offset);
63
+ const extParser = new binary_parser_1.Parser()
64
+ .endianess(le)
65
+ .int16('type')
66
+ .int16('fieldcount')
67
+ .uint64('offset')
68
+ .skip(4)
69
+ .int16('field');
70
+ const indices = [];
71
+ for (let i = 0; i < count; i += 1) {
72
+ indices.push(extParser.parse(buffer.slice(i * blocklen)).result);
73
+ }
74
+ return indices;
75
+ }
76
+ /*
77
+ * perform a search in the bigbed extraIndex to find which blocks in the bigbed data to look for the
78
+ * actual feature data
79
+ *
80
+ * @param name - the name to search for
81
+ * @param opts - a SearchOptions argument with optional signal
82
+ * @return a Promise for an array of bigbed block Loc entries
83
+ */
84
+ async searchExtraIndexBlocks(name, opts = {}) {
85
+ const { isBigEndian } = await this.getHeader(opts);
86
+ const indices = await this.readIndices(opts);
87
+ if (!indices.length) {
88
+ return [];
89
+ }
90
+ const locs = indices.map(async (index) => {
91
+ const { offset, field } = index;
92
+ const { buffer: data } = await this.bbi.read(Buffer.alloc(32), 0, 32, offset, opts);
93
+ const p = new binary_parser_1.Parser()
94
+ .endianess(isBigEndian ? 'big' : 'little')
95
+ .int32('magic')
96
+ .int32('blockSize')
97
+ .int32('keySize')
98
+ .int32('valSize')
99
+ .uint64('itemCount');
100
+ const { blockSize, keySize, valSize } = p.parse(data).result;
101
+ const bpt = new binary_parser_1.Parser()
102
+ .endianess(isBigEndian ? 'big' : 'little')
103
+ .int8('nodeType')
104
+ .skip(1)
105
+ .int16('cnt')
106
+ .choice({
107
+ tag: 'nodeType',
108
+ choices: {
109
+ 0: new binary_parser_1.Parser().array('leafkeys', {
110
+ length: 'cnt',
111
+ type: new binary_parser_1.Parser()
112
+ .string('key', { length: keySize, stripNull: true })
113
+ .uint64('offset'),
114
+ }),
115
+ 1: new binary_parser_1.Parser().array('keys', {
116
+ length: 'cnt',
117
+ type: new binary_parser_1.Parser()
118
+ .string('key', { length: keySize, stripNull: true })
119
+ .uint64('offset')
120
+ .uint32('length')
121
+ .uint32('reserved'),
122
+ }),
123
+ },
124
+ });
125
+ const bptReadNode = async (nodeOffset) => {
126
+ const len = 4 + blockSize * (keySize + valSize);
127
+ const { buffer } = await this.bbi.read(Buffer.alloc(len), 0, len, nodeOffset, opts);
128
+ const node = bpt.parse(buffer).result;
129
+ if (node.leafkeys) {
130
+ let lastOffset;
131
+ for (let i = 0; i < node.leafkeys.length; i += 1) {
132
+ const { key } = node.leafkeys[i];
133
+ if (name.localeCompare(key) < 0 && lastOffset) {
134
+ return bptReadNode(lastOffset);
135
+ }
136
+ lastOffset = node.leafkeys[i].offset;
137
+ }
138
+ return bptReadNode(lastOffset);
139
+ }
140
+ for (let i = 0; i < node.keys.length; i += 1) {
141
+ if (node.keys[i].key === name) {
142
+ return { ...node.keys[i], field };
143
+ }
144
+ }
145
+ return undefined;
146
+ };
147
+ const rootNodeOffset = 32;
148
+ return bptReadNode(offset + rootNodeOffset);
149
+ });
150
+ return filterUndef(await Promise.all(locs));
151
+ }
152
+ /*
153
+ * retrieve the features from the bigbed data that were found through the lookup of the extraIndex
154
+ * note that there can be multiple extraIndex, see the BigBed specification and the -extraIndex argument to bedToBigBed
155
+ *
156
+ * @param name - the name to search for
157
+ * @param opts - a SearchOptions argument with optional signal
158
+ * @return a Promise for an array of Feature
159
+ */
160
+ async searchExtraIndex(name, opts = {}) {
161
+ const blocks = await this.searchExtraIndexBlocks(name, opts);
162
+ if (!blocks.length) {
163
+ return [];
164
+ }
165
+ const view = await this.getUnzoomedView(opts);
166
+ const res = blocks.map(block => {
167
+ return new rxjs_1.Observable((observer) => {
168
+ view.readFeatures(observer, [block], opts);
169
+ }).pipe((0, operators_1.reduce)((acc, curr) => acc.concat(curr)), (0, operators_1.map)(x => {
170
+ for (let i = 0; i < x.length; i += 1) {
171
+ x[i].field = block.field;
172
+ }
173
+ return x;
174
+ }));
175
+ });
176
+ const ret = await (0, rxjs_1.merge)(...res).toPromise();
177
+ return ret.filter((f) => {
178
+ return f.rest.split('\t')[f.field - 3] === name;
179
+ });
180
+ }
181
+ }
182
+ exports.BigBed = BigBed;
@@ -0,0 +1,13 @@
1
+ import { BlockView } from './blockView';
2
+ import { BBI, RequestOptions } from './bbi';
3
+ export declare class BigWig extends BBI {
4
+ /**
5
+ * Retrieves a BlockView of a specific zoomLevel
6
+ *
7
+ * @param refName - The chromosome name
8
+ * @param start - The start of a region
9
+ * @param end - The end of a region
10
+ * @param opts - An object containing basesPerSpan (e.g. pixels per basepair) or scale used to infer the zoomLevel to use
11
+ */
12
+ protected getView(scale: number, opts: RequestOptions): Promise<BlockView>;
13
+ }
package/esm/bigwig.js ADDED
@@ -0,0 +1,35 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.BigWig = void 0;
4
+ const blockView_1 = require("./blockView");
5
+ const bbi_1 = require("./bbi");
6
+ class BigWig extends bbi_1.BBI {
7
+ /**
8
+ * Retrieves a BlockView of a specific zoomLevel
9
+ *
10
+ * @param refName - The chromosome name
11
+ * @param start - The start of a region
12
+ * @param end - The end of a region
13
+ * @param opts - An object containing basesPerSpan (e.g. pixels per basepair) or scale used to infer the zoomLevel to use
14
+ */
15
+ async getView(scale, opts) {
16
+ const { zoomLevels, refsByName, fileSize, isBigEndian, uncompressBufSize } = await this.getHeader(opts);
17
+ const basesPerPx = 1 / scale;
18
+ let maxLevel = zoomLevels.length;
19
+ if (!fileSize) {
20
+ // if we don't know the file size, we can't fetch the highest zoom level :-(
21
+ maxLevel -= 1;
22
+ }
23
+ for (let i = maxLevel; i >= 0; i -= 1) {
24
+ const zh = zoomLevels[i];
25
+ if (zh && zh.reductionLevel <= 2 * basesPerPx) {
26
+ const indexLength = i < zoomLevels.length - 1
27
+ ? zoomLevels[i + 1].dataOffset - zh.indexOffset
28
+ : fileSize - 4 - zh.indexOffset;
29
+ return new blockView_1.BlockView(this.bbi, refsByName, zh.indexOffset, indexLength, isBigEndian, uncompressBufSize > 0, 'summary');
30
+ }
31
+ }
32
+ return this.getUnzoomedView(opts);
33
+ }
34
+ }
35
+ exports.BigWig = BigWig;
@@ -0,0 +1,42 @@
1
+ import { Observer } from 'rxjs';
2
+ import { GenericFilehandle } from 'generic-filehandle';
3
+ import { Feature } from './bbi';
4
+ interface CoordRequest {
5
+ chrId: number;
6
+ start: number;
7
+ end: number;
8
+ }
9
+ interface Options {
10
+ signal?: AbortSignal;
11
+ request?: CoordRequest;
12
+ }
13
+ /**
14
+ * View into a subset of the data in a BigWig file.
15
+ *
16
+ * Adapted by Robert Buels and Colin Diesh from bigwig.js in the Dalliance Genome
17
+ * Explorer by Thomas Down.
18
+ * @constructs
19
+ */
20
+ export declare class BlockView {
21
+ private cirTreeOffset;
22
+ private cirTreeLength;
23
+ private bbi;
24
+ private isCompressed;
25
+ private isBigEndian;
26
+ private refsByName;
27
+ private blockType;
28
+ private cirTreePromise?;
29
+ private featureCache;
30
+ private leafParser;
31
+ private bigWigParser;
32
+ private bigBedParser;
33
+ private summaryParser;
34
+ constructor(bbi: GenericFilehandle, refsByName: any, cirTreeOffset: number, cirTreeLength: number, isBigEndian: boolean, isCompressed: boolean, blockType: string);
35
+ readWigData(chrName: string, start: number, end: number, observer: Observer<Feature[]>, opts: Options): Promise<void>;
36
+ private parseSummaryBlock;
37
+ private parseBigBedBlock;
38
+ private parseBigWigBlock;
39
+ private static coordFilter;
40
+ readFeatures(observer: Observer<Feature[]>, blocks: any, opts?: Options): Promise<void>;
41
+ }
42
+ export {};