@gmod/bbi 1.0.35 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/bbi.ts CHANGED
@@ -1,9 +1,7 @@
1
- import { Parser } from '@gmod/binary-parser'
1
+ import { Parser } from 'binary-parser'
2
2
  import { LocalFile, RemoteFile, GenericFilehandle } from 'generic-filehandle'
3
3
  import { Observable, Observer } from 'rxjs'
4
4
  import { reduce } from 'rxjs/operators'
5
- import AbortablePromiseCache from 'abortable-promise-cache'
6
- import QuickLRU from 'quick-lru'
7
5
  import { BlockView } from './blockView'
8
6
 
9
7
  const BIG_WIG_MAGIC = -2003829722
@@ -53,7 +51,7 @@ export interface Header {
53
51
  * @param isBE - is big endian, typically false
54
52
  * @return an object with compiled parsers
55
53
  */
56
- function getParsers(isBE: boolean): any {
54
+ function getParsers(isBE: boolean) {
57
55
  const le = isBE ? 'big' : 'little'
58
56
  const headerParser = new Parser()
59
57
  .endianess(le)
@@ -72,6 +70,7 @@ function getParsers(isBE: boolean): any {
72
70
  .array('zoomLevels', {
73
71
  length: 'numZoomLevels',
74
72
  type: new Parser()
73
+ .endianess(le)
75
74
  .uint32('reductionLevel')
76
75
  .uint32('reserved')
77
76
  .uint64('dataOffset')
@@ -81,10 +80,10 @@ function getParsers(isBE: boolean): any {
81
80
  const totalSummaryParser = new Parser()
82
81
  .endianess(le)
83
82
  .uint64('basesCovered')
84
- .double('scoreMin')
85
- .double('scoreMax')
86
- .double('scoreSum')
87
- .double('scoreSumSquares')
83
+ .doublele('scoreMin')
84
+ .doublele('scoreMax')
85
+ .doublele('scoreSum')
86
+ .doublele('scoreSumSquares')
88
87
 
89
88
  const chromTreeParser = new Parser()
90
89
  .endianess(le)
@@ -99,6 +98,7 @@ function getParsers(isBE: boolean): any {
99
98
  .uint8('isLeafNode')
100
99
  .skip(1)
101
100
  .uint16('cnt')
101
+ .saveOffset('offset')
102
102
 
103
103
  return {
104
104
  chromTreeParser,
@@ -117,12 +117,7 @@ export interface RequestOptions {
117
117
  export abstract class BBI {
118
118
  protected bbi: GenericFilehandle
119
119
 
120
- protected headerCache = new AbortablePromiseCache({
121
- cache: new QuickLRU({ maxSize: 1 }),
122
- fill: async (params: any, signal?: AbortSignal) => {
123
- return this._getHeader({ ...params, signal })
124
- },
125
- })
120
+ private headerP?: Promise<Header>
126
121
 
127
122
  protected renameRefSeqs: (a: string) => string
128
123
 
@@ -131,12 +126,14 @@ export abstract class BBI {
131
126
  * @return a Header object
132
127
  */
133
128
  public getHeader(opts: RequestOptions | AbortSignal = {}) {
134
- const options = 'aborted' in opts ? { signal: opts } : opts
135
- return this.headerCache.get(
136
- JSON.stringify(options),
137
- options,
138
- options.signal,
139
- )
129
+ const options = 'aborted' in opts ? { signal: opts as AbortSignal } : opts
130
+ if (!this.headerP) {
131
+ this.headerP = this._getHeader(options).catch(e => {
132
+ this.headerP = undefined
133
+ throw e
134
+ })
135
+ }
136
+ return this.headerP
140
137
  }
141
138
 
142
139
  /*
@@ -153,8 +150,8 @@ export abstract class BBI {
153
150
  renameRefSeqs?: (a: string) => string
154
151
  } = {},
155
152
  ) {
156
- const { filehandle, renameRefSeqs, path, url } = options
157
- this.renameRefSeqs = renameRefSeqs || ((s: string): string => s)
153
+ const { filehandle, renameRefSeqs = s => s, path, url } = options
154
+ this.renameRefSeqs = renameRefSeqs
158
155
  if (filehandle) {
159
156
  this.bbi = filehandle
160
157
  } else if (url) {
@@ -185,25 +182,24 @@ export abstract class BBI {
185
182
  )
186
183
  const isBigEndian = this._isBigEndian(buffer)
187
184
  const ret = getParsers(isBigEndian)
188
- const header = ret.headerParser.parse(buffer).result
189
- header.fileType = header.magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig'
190
- if (
191
- header.asOffset > requestSize ||
192
- header.totalSummaryOffset > requestSize
193
- ) {
185
+ const header = ret.headerParser.parse(buffer)
186
+ const { magic, asOffset, totalSummaryOffset } = header
187
+ header.fileType = magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig'
188
+ if (asOffset > requestSize || totalSummaryOffset > requestSize) {
194
189
  return this._getMainHeader(opts, requestSize * 2)
195
190
  }
196
- if (header.asOffset) {
191
+ if (asOffset) {
192
+ const off = Number(header.asOffset)
197
193
  header.autoSql = buffer
198
- .slice(header.asOffset, buffer.indexOf(0, header.asOffset))
194
+ .subarray(off, buffer.indexOf(0, off))
199
195
  .toString('utf8')
200
196
  }
201
197
  if (header.totalSummaryOffset > requestSize) {
202
198
  return this._getMainHeader(opts, requestSize * 2)
203
199
  }
204
200
  if (header.totalSummaryOffset) {
205
- const tail = buffer.slice(header.totalSummaryOffset)
206
- header.totalSummary = ret.totalSummaryParser.parse(tail).result
201
+ const tail = buffer.subarray(Number(header.totalSummaryOffset))
202
+ header.totalSummary = ret.totalSummaryParser.parse(tail)
207
203
  }
208
204
  return { ...header, isBigEndian }
209
205
  }
@@ -228,46 +224,48 @@ export abstract class BBI {
228
224
  [key: number]: { name: string; id: number; length: number }
229
225
  } = []
230
226
  const refsByName: { [key: string]: number } = {}
231
- const { chromTreeOffset } = header
232
- let { unzoomedDataOffset } = header
233
227
 
228
+ let unzoomedDataOffset = Number(header.unzoomedDataOffset)
229
+ const chromTreeOffset = Number(header.chromTreeOffset)
234
230
  while (unzoomedDataOffset % 4 !== 0) {
235
231
  unzoomedDataOffset += 1
236
232
  }
237
-
238
- const { buffer: data } = await this.bbi.read(
239
- Buffer.alloc(unzoomedDataOffset - chromTreeOffset),
233
+ const off = unzoomedDataOffset - chromTreeOffset
234
+ const { buffer } = await this.bbi.read(
235
+ Buffer.alloc(off),
240
236
  0,
241
- unzoomedDataOffset - chromTreeOffset,
242
- chromTreeOffset,
237
+ off,
238
+ Number(chromTreeOffset),
243
239
  opts,
244
240
  )
245
241
 
246
242
  const p = getParsers(isBE)
247
- const { keySize } = p.chromTreeParser.parse(data).result
243
+ const { keySize } = p.chromTreeParser.parse(buffer)
248
244
  const leafNodeParser = new Parser()
249
245
  .endianess(le)
250
246
  .string('key', { stripNull: true, length: keySize })
251
247
  .uint32('refId')
252
248
  .uint32('refSize')
249
+ .saveOffset('offset')
253
250
  const nonleafNodeParser = new Parser()
254
251
  .endianess(le)
255
252
  .skip(keySize)
256
253
  .uint64('childOffset')
254
+ .saveOffset('offset')
257
255
  const rootNodeOffset = 32
258
- const bptReadNode = async (currentOffset: number): Promise<void> => {
256
+ const bptReadNode = async (currentOffset: number) => {
259
257
  let offset = currentOffset
260
- if (offset >= data.length) {
258
+ if (offset >= buffer.length) {
261
259
  throw new Error('reading beyond end of buffer')
262
260
  }
263
- const ret = p.isLeafNode.parse(data.slice(offset))
264
- const { isLeafNode, cnt } = ret.result
261
+ const ret = p.isLeafNode.parse(buffer.subarray(offset))
262
+ const { isLeafNode, cnt } = ret
265
263
  offset += ret.offset
266
264
  if (isLeafNode) {
267
265
  for (let n = 0; n < cnt; n += 1) {
268
- const leafRet = leafNodeParser.parse(data.slice(offset))
266
+ const leafRet = leafNodeParser.parse(buffer.subarray(offset))
269
267
  offset += leafRet.offset
270
- const { key, refId, refSize } = leafRet.result
268
+ const { key, refId, refSize } = leafRet
271
269
  const refRec = { name: key, id: refId, length: refSize }
272
270
  refsByName[this.renameRefSeqs(key)] = refId
273
271
  refsByNumber[refId] = refRec
@@ -276,11 +274,12 @@ export abstract class BBI {
276
274
  // parse index node
277
275
  const nextNodes = []
278
276
  for (let n = 0; n < cnt; n += 1) {
279
- const nonleafRet = nonleafNodeParser.parse(data.slice(offset))
280
- let { childOffset } = nonleafRet.result
277
+ const nonleafRet = nonleafNodeParser.parse(buffer.subarray(offset))
278
+ const { childOffset } = nonleafRet
281
279
  offset += nonleafRet.offset
282
- childOffset -= chromTreeOffset
283
- nextNodes.push(bptReadNode(childOffset))
280
+ nextNodes.push(
281
+ bptReadNode(Number(childOffset) - Number(chromTreeOffset)),
282
+ )
284
283
  }
285
284
  await Promise.all(nextNodes)
286
285
  }
@@ -299,19 +298,15 @@ export abstract class BBI {
299
298
  protected async getUnzoomedView(opts: RequestOptions): Promise<BlockView> {
300
299
  const {
301
300
  unzoomedIndexOffset,
302
- zoomLevels,
303
301
  refsByName,
304
302
  uncompressBufSize,
305
303
  isBigEndian,
306
304
  fileType,
307
305
  } = await this.getHeader(opts)
308
- const nzl = zoomLevels[0]
309
- const cirLen = nzl ? nzl.dataOffset - unzoomedIndexOffset : 4000
310
306
  return new BlockView(
311
307
  this.bbi,
312
308
  refsByName,
313
309
  unzoomedIndexOffset,
314
- cirLen,
315
310
  isBigEndian,
316
311
  uncompressBufSize > 0,
317
312
  fileType,
package/src/bigbed.ts CHANGED
@@ -1,5 +1,5 @@
1
- import { Parser } from '@gmod/binary-parser'
2
- import { Observable, Observer, merge } from 'rxjs'
1
+ import { Parser } from 'binary-parser'
2
+ import { Observable, merge } from 'rxjs'
3
3
  import { map, reduce } from 'rxjs/operators'
4
4
  import AbortablePromiseCache from 'abortable-promise-cache'
5
5
  import QuickLRU from 'quick-lru'
@@ -9,8 +9,8 @@ import { BlockView } from './blockView'
9
9
 
10
10
  interface Loc {
11
11
  key: string
12
- offset: number
13
- length: number
12
+ offset: bigint
13
+ length: bigint
14
14
  field?: number
15
15
  }
16
16
 
@@ -33,10 +33,6 @@ export class BigBed extends BBI {
33
33
  },
34
34
  })
35
35
 
36
- public constructor(opts?: any) {
37
- super(opts)
38
- }
39
-
40
36
  public readIndices(opts: AbortSignal | RequestOptions = {}) {
41
37
  const options = 'aborted' in opts ? { signal: opts } : opts
42
38
  return this.readIndicesCache.get(
@@ -53,7 +49,7 @@ export class BigBed extends BBI {
53
49
  * @return promise for a BlockView
54
50
  */
55
51
  protected async getView(
56
- scale: number,
52
+ _scale: number,
57
53
  opts: RequestOptions,
58
54
  ): Promise<BlockView> {
59
55
  return this.getUnzoomedView(opts)
@@ -64,13 +60,13 @@ export class BigBed extends BBI {
64
60
  * @param abortSignal to abort operation
65
61
  * @return a Promise for an array of Index data structure since there can be multiple extraIndexes in a bigbed, see bedToBigBed documentation
66
62
  */
67
- private async _readIndices(opts: RequestOptions): Promise<Index[]> {
63
+ private async _readIndices(opts: RequestOptions) {
68
64
  const { extHeaderOffset, isBigEndian } = await this.getHeader(opts)
69
65
  const { buffer: data } = await this.bbi.read(
70
66
  Buffer.alloc(64),
71
67
  0,
72
68
  64,
73
- extHeaderOffset,
69
+ Number(extHeaderOffset),
74
70
  )
75
71
  const le = isBigEndian ? 'big' : 'little'
76
72
  const ret = new Parser()
@@ -78,7 +74,8 @@ export class BigBed extends BBI {
78
74
  .uint16('size')
79
75
  .uint16('count')
80
76
  .uint64('offset')
81
- .parse(data).result
77
+ .parse(data)
78
+
82
79
  const { count, offset } = ret
83
80
 
84
81
  // no extra index is defined if count==0
@@ -88,7 +85,12 @@ export class BigBed extends BBI {
88
85
 
89
86
  const blocklen = 20
90
87
  const len = blocklen * count
91
- const { buffer } = await this.bbi.read(Buffer.alloc(len), 0, len, offset)
88
+ const { buffer } = await this.bbi.read(
89
+ Buffer.alloc(len),
90
+ 0,
91
+ len,
92
+ Number(offset),
93
+ )
92
94
  const extParser = new Parser()
93
95
  .endianess(le)
94
96
  .int16('type')
@@ -96,10 +98,10 @@ export class BigBed extends BBI {
96
98
  .uint64('offset')
97
99
  .skip(4)
98
100
  .int16('field')
99
- const indices = []
101
+ const indices = [] as Index[]
100
102
 
101
103
  for (let i = 0; i < count; i += 1) {
102
- indices.push(extParser.parse(buffer.slice(i * blocklen)).result)
104
+ indices.push(extParser.parse(buffer.subarray(i * blocklen)))
103
105
  }
104
106
  return indices
105
107
  }
@@ -127,20 +129,22 @@ export class BigBed extends BBI {
127
129
  Buffer.alloc(32),
128
130
  0,
129
131
  32,
130
- offset,
132
+ Number(offset),
131
133
  opts,
132
134
  )
135
+ const le = isBigEndian ? 'big' : 'little'
133
136
  const p = new Parser()
134
- .endianess(isBigEndian ? 'big' : 'little')
137
+ .endianess(le)
135
138
  .int32('magic')
136
139
  .int32('blockSize')
137
140
  .int32('keySize')
138
141
  .int32('valSize')
139
142
  .uint64('itemCount')
140
143
 
141
- const { blockSize, keySize, valSize } = p.parse(data).result
144
+ const { blockSize, keySize, valSize } = p.parse(data)
145
+ // console.log({blockSize,keySize,valSize})
142
146
  const bpt = new Parser()
143
- .endianess(isBigEndian ? 'big' : 'little')
147
+ .endianess(le)
144
148
  .int8('nodeType')
145
149
  .skip(1)
146
150
  .int16('cnt')
@@ -150,12 +154,14 @@ export class BigBed extends BBI {
150
154
  0: new Parser().array('leafkeys', {
151
155
  length: 'cnt',
152
156
  type: new Parser()
157
+ .endianess(le)
153
158
  .string('key', { length: keySize, stripNull: true })
154
159
  .uint64('offset'),
155
160
  }),
156
161
  1: new Parser().array('keys', {
157
162
  length: 'cnt',
158
163
  type: new Parser()
164
+ .endianess(le)
159
165
  .string('key', { length: keySize, stripNull: true })
160
166
  .uint64('offset')
161
167
  .uint32('length')
@@ -167,15 +173,16 @@ export class BigBed extends BBI {
167
173
  const bptReadNode = async (
168
174
  nodeOffset: number,
169
175
  ): Promise<Loc | undefined> => {
176
+ const val = Number(nodeOffset)
170
177
  const len = 4 + blockSize * (keySize + valSize)
171
178
  const { buffer } = await this.bbi.read(
172
179
  Buffer.alloc(len),
173
180
  0,
174
181
  len,
175
- nodeOffset,
182
+ val,
176
183
  opts,
177
184
  )
178
- const node = bpt.parse(buffer).result
185
+ const node = bpt.parse(buffer)
179
186
  if (node.leafkeys) {
180
187
  let lastOffset
181
188
  for (let i = 0; i < node.leafkeys.length; i += 1) {
@@ -196,7 +203,7 @@ export class BigBed extends BBI {
196
203
  return undefined
197
204
  }
198
205
  const rootNodeOffset = 32
199
- return bptReadNode(offset + rootNodeOffset)
206
+ return bptReadNode(Number(offset) + rootNodeOffset)
200
207
  })
201
208
  return filterUndef(await Promise.all(locs))
202
209
  }
@@ -219,7 +226,7 @@ export class BigBed extends BBI {
219
226
  }
220
227
  const view = await this.getUnzoomedView(opts)
221
228
  const res = blocks.map(block => {
222
- return new Observable((observer: Observer<Feature[]>) => {
229
+ return new Observable<Feature[]>(observer => {
223
230
  view.readFeatures(observer, [block], opts)
224
231
  }).pipe(
225
232
  reduce((acc, curr) => acc.concat(curr)),
@@ -232,8 +239,6 @@ export class BigBed extends BBI {
232
239
  )
233
240
  })
234
241
  const ret = await merge(...res).toPromise()
235
- return ret.filter((f: any) => {
236
- return f.rest.split('\t')[f.field - 3] === name
237
- })
242
+ return ret.filter(f => f.rest?.split('\t')[(f.field || 0) - 3] === name)
238
243
  }
239
244
  }
package/src/bigwig.ts CHANGED
@@ -5,15 +5,10 @@ export class BigWig extends BBI {
5
5
  /**
6
6
  * Retrieves a BlockView of a specific zoomLevel
7
7
  *
8
- * @param refName - The chromosome name
9
- * @param start - The start of a region
10
- * @param end - The end of a region
8
+ * @param scale - number
11
9
  * @param opts - An object containing basesPerSpan (e.g. pixels per basepair) or scale used to infer the zoomLevel to use
12
10
  */
13
- protected async getView(
14
- scale: number,
15
- opts: RequestOptions,
16
- ): Promise<BlockView> {
11
+ protected async getView(scale: number, opts: RequestOptions) {
17
12
  const { zoomLevels, refsByName, fileSize, isBigEndian, uncompressBufSize } =
18
13
  await this.getHeader(opts)
19
14
  const basesPerPx = 1 / scale
@@ -26,15 +21,12 @@ export class BigWig extends BBI {
26
21
  for (let i = maxLevel; i >= 0; i -= 1) {
27
22
  const zh = zoomLevels[i]
28
23
  if (zh && zh.reductionLevel <= 2 * basesPerPx) {
29
- const indexLength =
30
- i < zoomLevels.length - 1
31
- ? zoomLevels[i + 1].dataOffset - zh.indexOffset
32
- : fileSize - 4 - zh.indexOffset
24
+ const indexOffset = Number(zh.indexOffset)
25
+
33
26
  return new BlockView(
34
27
  this.bbi,
35
28
  refsByName,
36
- zh.indexOffset,
37
- indexLength,
29
+ indexOffset,
38
30
  isBigEndian,
39
31
  uncompressBufSize > 0,
40
32
  'summary',