@gmod/bbi 1.0.33 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -3
- package/dist/bbi.d.ts +2 -2
- package/dist/bbi.js +56 -59
- package/dist/bbi.js.map +1 -0
- package/dist/bigbed.d.ts +1 -2
- package/dist/bigbed.js +23 -20
- package/dist/bigbed.js.map +1 -0
- package/dist/bigwig.d.ts +1 -3
- package/dist/bigwig.js +5 -8
- package/dist/bigwig.js.map +1 -0
- package/dist/blockView.d.ts +8 -9
- package/dist/blockView.js +153 -92
- package/dist/blockView.js.map +1 -0
- package/dist/index.js +1 -0
- package/dist/index.js.map +1 -0
- package/dist/range.js +2 -0
- package/dist/range.js.map +1 -0
- package/dist/unzip-pako.d.ts +1 -1
- package/dist/unzip-pako.js +2 -1
- package/dist/unzip-pako.js.map +1 -0
- package/dist/unzip.js +1 -0
- package/dist/unzip.js.map +1 -0
- package/dist/util.d.ts +11 -1
- package/dist/util.js +10 -4
- package/dist/util.js.map +1 -0
- package/esm/bbi.d.ts +2 -2
- package/esm/bbi.js +62 -67
- package/esm/bbi.js.map +1 -0
- package/esm/bigbed.d.ts +1 -2
- package/esm/bigbed.js +42 -46
- package/esm/bigbed.js.map +1 -0
- package/esm/bigwig.d.ts +1 -3
- package/esm/bigwig.js +7 -14
- package/esm/bigwig.js.map +1 -0
- package/esm/blockView.d.ts +8 -9
- package/esm/blockView.js +166 -116
- package/esm/blockView.js.map +1 -0
- package/esm/index.js +3 -7
- package/esm/index.js.map +1 -0
- package/esm/range.js +3 -4
- package/esm/range.js.map +1 -0
- package/esm/unzip-pako.d.ts +1 -1
- package/esm/unzip-pako.js +4 -7
- package/esm/unzip-pako.js.map +1 -0
- package/esm/unzip.js +3 -5
- package/esm/unzip.js.map +1 -0
- package/esm/util.d.ts +11 -1
- package/esm/util.js +14 -15
- package/esm/util.js.map +1 -0
- package/package.json +13 -13
- package/src/bbi.ts +375 -0
- package/src/bigbed.ts +244 -0
- package/src/bigwig.ts +38 -0
- package/src/blockView.ts +496 -0
- package/src/declare.d.ts +2 -0
- package/src/index.ts +3 -0
- package/src/range.ts +142 -0
- package/src/unzip-pako.ts +5 -0
- package/src/unzip.ts +2 -0
- package/src/util.ts +83 -0
package/src/bigbed.ts
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import { Parser } from 'binary-parser'
|
|
2
|
+
import { Observable, merge } from 'rxjs'
|
|
3
|
+
import { map, reduce } from 'rxjs/operators'
|
|
4
|
+
import AbortablePromiseCache from 'abortable-promise-cache'
|
|
5
|
+
import QuickLRU from 'quick-lru'
|
|
6
|
+
|
|
7
|
+
import { BBI, Feature, RequestOptions } from './bbi'
|
|
8
|
+
import { BlockView } from './blockView'
|
|
9
|
+
|
|
10
|
+
interface Loc {
|
|
11
|
+
key: string
|
|
12
|
+
offset: bigint
|
|
13
|
+
length: bigint
|
|
14
|
+
field?: number
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
interface Index {
|
|
18
|
+
type: number
|
|
19
|
+
fieldcount: number
|
|
20
|
+
offset: number
|
|
21
|
+
field: number
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function filterUndef<T>(ts: (T | undefined)[]): T[] {
|
|
25
|
+
return ts.filter((t: T | undefined): t is T => !!t)
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export class BigBed extends BBI {
|
|
29
|
+
public readIndicesCache = new AbortablePromiseCache({
|
|
30
|
+
cache: new QuickLRU({ maxSize: 1 }),
|
|
31
|
+
fill: async (args: any, signal?: AbortSignal) => {
|
|
32
|
+
return this._readIndices({ ...args, signal })
|
|
33
|
+
},
|
|
34
|
+
})
|
|
35
|
+
|
|
36
|
+
public readIndices(opts: AbortSignal | RequestOptions = {}) {
|
|
37
|
+
const options = 'aborted' in opts ? { signal: opts } : opts
|
|
38
|
+
return this.readIndicesCache.get(
|
|
39
|
+
JSON.stringify(options),
|
|
40
|
+
options,
|
|
41
|
+
options.signal,
|
|
42
|
+
)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/*
|
|
46
|
+
* retrieve unzoomed view for any scale
|
|
47
|
+
* @param scale - unused
|
|
48
|
+
* @param abortSignal - an optional AbortSignal to kill operation
|
|
49
|
+
* @return promise for a BlockView
|
|
50
|
+
*/
|
|
51
|
+
protected async getView(
|
|
52
|
+
_scale: number,
|
|
53
|
+
opts: RequestOptions,
|
|
54
|
+
): Promise<BlockView> {
|
|
55
|
+
return this.getUnzoomedView(opts)
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/*
|
|
59
|
+
* parse the bigbed extraIndex fields
|
|
60
|
+
* @param abortSignal to abort operation
|
|
61
|
+
* @return a Promise for an array of Index data structure since there can be multiple extraIndexes in a bigbed, see bedToBigBed documentation
|
|
62
|
+
*/
|
|
63
|
+
private async _readIndices(opts: RequestOptions) {
|
|
64
|
+
const { extHeaderOffset, isBigEndian } = await this.getHeader(opts)
|
|
65
|
+
const { buffer: data } = await this.bbi.read(
|
|
66
|
+
Buffer.alloc(64),
|
|
67
|
+
0,
|
|
68
|
+
64,
|
|
69
|
+
Number(extHeaderOffset),
|
|
70
|
+
)
|
|
71
|
+
const le = isBigEndian ? 'big' : 'little'
|
|
72
|
+
const ret = new Parser()
|
|
73
|
+
.endianess(le)
|
|
74
|
+
.uint16('size')
|
|
75
|
+
.uint16('count')
|
|
76
|
+
.uint64('offset')
|
|
77
|
+
.parse(data)
|
|
78
|
+
|
|
79
|
+
const { count, offset } = ret
|
|
80
|
+
|
|
81
|
+
// no extra index is defined if count==0
|
|
82
|
+
if (count === 0) {
|
|
83
|
+
return []
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const blocklen = 20
|
|
87
|
+
const len = blocklen * count
|
|
88
|
+
const { buffer } = await this.bbi.read(
|
|
89
|
+
Buffer.alloc(len),
|
|
90
|
+
0,
|
|
91
|
+
len,
|
|
92
|
+
Number(offset),
|
|
93
|
+
)
|
|
94
|
+
const extParser = new Parser()
|
|
95
|
+
.endianess(le)
|
|
96
|
+
.int16('type')
|
|
97
|
+
.int16('fieldcount')
|
|
98
|
+
.uint64('offset')
|
|
99
|
+
.skip(4)
|
|
100
|
+
.int16('field')
|
|
101
|
+
const indices = [] as Index[]
|
|
102
|
+
|
|
103
|
+
for (let i = 0; i < count; i += 1) {
|
|
104
|
+
indices.push(extParser.parse(buffer.subarray(i * blocklen)))
|
|
105
|
+
}
|
|
106
|
+
return indices
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/*
|
|
110
|
+
* perform a search in the bigbed extraIndex to find which blocks in the bigbed data to look for the
|
|
111
|
+
* actual feature data
|
|
112
|
+
*
|
|
113
|
+
* @param name - the name to search for
|
|
114
|
+
* @param opts - a SearchOptions argument with optional signal
|
|
115
|
+
* @return a Promise for an array of bigbed block Loc entries
|
|
116
|
+
*/
|
|
117
|
+
private async searchExtraIndexBlocks(
|
|
118
|
+
name: string,
|
|
119
|
+
opts: RequestOptions = {},
|
|
120
|
+
): Promise<Loc[]> {
|
|
121
|
+
const { isBigEndian } = await this.getHeader(opts)
|
|
122
|
+
const indices = await this.readIndices(opts)
|
|
123
|
+
if (!indices.length) {
|
|
124
|
+
return []
|
|
125
|
+
}
|
|
126
|
+
const locs = indices.map(async (index: any): Promise<Loc | undefined> => {
|
|
127
|
+
const { offset, field } = index
|
|
128
|
+
const { buffer: data } = await this.bbi.read(
|
|
129
|
+
Buffer.alloc(32),
|
|
130
|
+
0,
|
|
131
|
+
32,
|
|
132
|
+
Number(offset),
|
|
133
|
+
opts,
|
|
134
|
+
)
|
|
135
|
+
const le = isBigEndian ? 'big' : 'little'
|
|
136
|
+
const p = new Parser()
|
|
137
|
+
.endianess(le)
|
|
138
|
+
.int32('magic')
|
|
139
|
+
.int32('blockSize')
|
|
140
|
+
.int32('keySize')
|
|
141
|
+
.int32('valSize')
|
|
142
|
+
.uint64('itemCount')
|
|
143
|
+
|
|
144
|
+
const { blockSize, keySize, valSize } = p.parse(data)
|
|
145
|
+
// console.log({blockSize,keySize,valSize})
|
|
146
|
+
const bpt = new Parser()
|
|
147
|
+
.endianess(le)
|
|
148
|
+
.int8('nodeType')
|
|
149
|
+
.skip(1)
|
|
150
|
+
.int16('cnt')
|
|
151
|
+
.choice({
|
|
152
|
+
tag: 'nodeType',
|
|
153
|
+
choices: {
|
|
154
|
+
0: new Parser().array('leafkeys', {
|
|
155
|
+
length: 'cnt',
|
|
156
|
+
type: new Parser()
|
|
157
|
+
.endianess(le)
|
|
158
|
+
.string('key', { length: keySize, stripNull: true })
|
|
159
|
+
.uint64('offset'),
|
|
160
|
+
}),
|
|
161
|
+
1: new Parser().array('keys', {
|
|
162
|
+
length: 'cnt',
|
|
163
|
+
type: new Parser()
|
|
164
|
+
.endianess(le)
|
|
165
|
+
.string('key', { length: keySize, stripNull: true })
|
|
166
|
+
.uint64('offset')
|
|
167
|
+
.uint32('length')
|
|
168
|
+
.uint32('reserved'),
|
|
169
|
+
}),
|
|
170
|
+
},
|
|
171
|
+
})
|
|
172
|
+
|
|
173
|
+
const bptReadNode = async (
|
|
174
|
+
nodeOffset: number,
|
|
175
|
+
): Promise<Loc | undefined> => {
|
|
176
|
+
const val = Number(nodeOffset)
|
|
177
|
+
const len = 4 + blockSize * (keySize + valSize)
|
|
178
|
+
const { buffer } = await this.bbi.read(
|
|
179
|
+
Buffer.alloc(len),
|
|
180
|
+
0,
|
|
181
|
+
len,
|
|
182
|
+
val,
|
|
183
|
+
opts,
|
|
184
|
+
)
|
|
185
|
+
const node = bpt.parse(buffer)
|
|
186
|
+
if (node.leafkeys) {
|
|
187
|
+
let lastOffset
|
|
188
|
+
for (let i = 0; i < node.leafkeys.length; i += 1) {
|
|
189
|
+
const { key } = node.leafkeys[i]
|
|
190
|
+
if (name.localeCompare(key) < 0 && lastOffset) {
|
|
191
|
+
return bptReadNode(lastOffset)
|
|
192
|
+
}
|
|
193
|
+
lastOffset = node.leafkeys[i].offset
|
|
194
|
+
}
|
|
195
|
+
return bptReadNode(lastOffset)
|
|
196
|
+
}
|
|
197
|
+
for (let i = 0; i < node.keys.length; i += 1) {
|
|
198
|
+
if (node.keys[i].key === name) {
|
|
199
|
+
return { ...node.keys[i], field }
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
return undefined
|
|
204
|
+
}
|
|
205
|
+
const rootNodeOffset = 32
|
|
206
|
+
return bptReadNode(Number(offset) + rootNodeOffset)
|
|
207
|
+
})
|
|
208
|
+
return filterUndef(await Promise.all(locs))
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
/*
|
|
212
|
+
* retrieve the features from the bigbed data that were found through the lookup of the extraIndex
|
|
213
|
+
* note that there can be multiple extraIndex, see the BigBed specification and the -extraIndex argument to bedToBigBed
|
|
214
|
+
*
|
|
215
|
+
* @param name - the name to search for
|
|
216
|
+
* @param opts - a SearchOptions argument with optional signal
|
|
217
|
+
* @return a Promise for an array of Feature
|
|
218
|
+
*/
|
|
219
|
+
public async searchExtraIndex(
|
|
220
|
+
name: string,
|
|
221
|
+
opts: RequestOptions = {},
|
|
222
|
+
): Promise<Feature[]> {
|
|
223
|
+
const blocks = await this.searchExtraIndexBlocks(name, opts)
|
|
224
|
+
if (!blocks.length) {
|
|
225
|
+
return []
|
|
226
|
+
}
|
|
227
|
+
const view = await this.getUnzoomedView(opts)
|
|
228
|
+
const res = blocks.map(block => {
|
|
229
|
+
return new Observable<Feature[]>(observer => {
|
|
230
|
+
view.readFeatures(observer, [block], opts)
|
|
231
|
+
}).pipe(
|
|
232
|
+
reduce((acc, curr) => acc.concat(curr)),
|
|
233
|
+
map(x => {
|
|
234
|
+
for (let i = 0; i < x.length; i += 1) {
|
|
235
|
+
x[i].field = block.field
|
|
236
|
+
}
|
|
237
|
+
return x
|
|
238
|
+
}),
|
|
239
|
+
)
|
|
240
|
+
})
|
|
241
|
+
const ret = await merge(...res).toPromise()
|
|
242
|
+
return ret.filter(f => f.rest?.split('\t')[(f.field || 0) - 3] === name)
|
|
243
|
+
}
|
|
244
|
+
}
|
package/src/bigwig.ts
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { BlockView } from './blockView'
|
|
2
|
+
import { BBI, RequestOptions } from './bbi'
|
|
3
|
+
|
|
4
|
+
export class BigWig extends BBI {
|
|
5
|
+
/**
|
|
6
|
+
* Retrieves a BlockView of a specific zoomLevel
|
|
7
|
+
*
|
|
8
|
+
* @param scale - number
|
|
9
|
+
* @param opts - An object containing basesPerSpan (e.g. pixels per basepair) or scale used to infer the zoomLevel to use
|
|
10
|
+
*/
|
|
11
|
+
protected async getView(scale: number, opts: RequestOptions) {
|
|
12
|
+
const { zoomLevels, refsByName, fileSize, isBigEndian, uncompressBufSize } =
|
|
13
|
+
await this.getHeader(opts)
|
|
14
|
+
const basesPerPx = 1 / scale
|
|
15
|
+
let maxLevel = zoomLevels.length
|
|
16
|
+
if (!fileSize) {
|
|
17
|
+
// if we don't know the file size, we can't fetch the highest zoom level :-(
|
|
18
|
+
maxLevel -= 1
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
for (let i = maxLevel; i >= 0; i -= 1) {
|
|
22
|
+
const zh = zoomLevels[i]
|
|
23
|
+
if (zh && zh.reductionLevel <= 2 * basesPerPx) {
|
|
24
|
+
const indexOffset = Number(zh.indexOffset)
|
|
25
|
+
|
|
26
|
+
return new BlockView(
|
|
27
|
+
this.bbi,
|
|
28
|
+
refsByName,
|
|
29
|
+
indexOffset,
|
|
30
|
+
isBigEndian,
|
|
31
|
+
uncompressBufSize > 0,
|
|
32
|
+
'summary',
|
|
33
|
+
)
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
return this.getUnzoomedView(opts)
|
|
37
|
+
}
|
|
38
|
+
}
|