@gmod/bbi 4.0.5 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/CHANGELOG.md +9 -0
  2. package/dist/bbi.d.ts +20 -4
  3. package/dist/bbi.js +124 -104
  4. package/dist/bbi.js.map +1 -1
  5. package/dist/bigbed.d.ts +1 -1
  6. package/dist/bigbed.js +91 -72
  7. package/dist/bigbed.js.map +1 -1
  8. package/dist/bigint-polyfill/polyfill.js +0 -10
  9. package/dist/bigint-polyfill/polyfill.js.map +1 -1
  10. package/dist/bigint-polyfill/pure.d.ts +0 -2
  11. package/dist/bigint-polyfill/pure.js +2 -29
  12. package/dist/bigint-polyfill/pure.js.map +1 -1
  13. package/dist/bigwig.js +3 -8
  14. package/dist/bigwig.js.map +1 -1
  15. package/dist/block-view.d.ts +4 -6
  16. package/dist/block-view.js +123 -132
  17. package/dist/block-view.js.map +1 -1
  18. package/dist/range.js +1 -1
  19. package/dist/range.js.map +1 -1
  20. package/dist/unzip-pako.d.ts +0 -1
  21. package/dist/unzip-pako.js +1 -2
  22. package/dist/unzip-pako.js.map +1 -1
  23. package/dist/unzip.d.ts +0 -1
  24. package/dist/util.d.ts +12 -14
  25. package/dist/util.js +12 -18
  26. package/dist/util.js.map +1 -1
  27. package/esm/bbi.d.ts +20 -4
  28. package/esm/bbi.js +124 -104
  29. package/esm/bbi.js.map +1 -1
  30. package/esm/bigbed.d.ts +1 -1
  31. package/esm/bigbed.js +89 -70
  32. package/esm/bigbed.js.map +1 -1
  33. package/esm/bigint-polyfill/polyfill.js +1 -11
  34. package/esm/bigint-polyfill/polyfill.js.map +1 -1
  35. package/esm/bigint-polyfill/pure.d.ts +0 -2
  36. package/esm/bigint-polyfill/pure.js +0 -24
  37. package/esm/bigint-polyfill/pure.js.map +1 -1
  38. package/esm/bigwig.js +3 -8
  39. package/esm/bigwig.js.map +1 -1
  40. package/esm/block-view.d.ts +4 -6
  41. package/esm/block-view.js +123 -132
  42. package/esm/block-view.js.map +1 -1
  43. package/esm/range.js +1 -1
  44. package/esm/range.js.map +1 -1
  45. package/esm/unzip-pako.d.ts +0 -1
  46. package/esm/unzip.d.ts +0 -1
  47. package/esm/util.d.ts +12 -14
  48. package/esm/util.js +8 -14
  49. package/esm/util.js.map +1 -1
  50. package/package.json +6 -7
  51. package/src/bbi.ts +151 -115
  52. package/src/bigbed.ts +100 -81
  53. package/src/bigint-polyfill/polyfill.ts +1 -13
  54. package/src/bigint-polyfill/pure.ts +0 -36
  55. package/src/bigwig.ts +3 -9
  56. package/src/block-view.ts +134 -169
  57. package/src/range.ts +1 -1
  58. package/src/util.ts +16 -21
package/src/bigbed.ts CHANGED
@@ -1,16 +1,16 @@
1
1
  import { Buffer } from 'buffer'
2
- import { Parser } from 'binary-parser'
3
2
  import { Observable, merge, firstValueFrom } from 'rxjs'
4
3
  import { map, reduce } from 'rxjs/operators'
5
- import AbortablePromiseCache from 'abortable-promise-cache'
4
+ import AbortablePromiseCache from '@gmod/abortable-promise-cache'
6
5
  import QuickLRU from 'quick-lru'
7
6
 
7
+ // locals
8
8
  import { BBI, Feature, RequestOptions } from './bbi'
9
9
 
10
10
  interface Loc {
11
11
  key: string
12
- offset: bigint
13
- length: bigint
12
+ offset: number
13
+ length: number
14
14
  field?: number
15
15
  }
16
16
 
@@ -46,8 +46,10 @@ export class BigBed extends BBI {
46
46
 
47
47
  /*
48
48
  * parse the bigbed extraIndex fields
49
- * @param abortSignal to abort operation
50
- * @return a Promise for an array of Index data structure since there can be multiple extraIndexes in a bigbed, see bedToBigBed documentation
49
+ *
50
+ *
51
+ * @return a Promise for an array of Index data structure since there can be
52
+ * multiple extraIndexes in a bigbed, see bedToBigBed documentation
51
53
  */
52
54
  private async _readIndices(opts: RequestOptions) {
53
55
  const { extHeaderOffset, isBigEndian } = await this.getHeader(opts)
@@ -57,15 +59,17 @@ export class BigBed extends BBI {
57
59
  64,
58
60
  Number(extHeaderOffset),
59
61
  )
60
- const le = isBigEndian ? 'big' : 'little'
61
- const ret = new Parser()
62
- .endianess(le)
63
- .uint16('size')
64
- .uint16('count')
65
- .uint64('offset')
66
- .parse(data)
67
-
68
- const { count, offset } = ret
62
+ const le = !isBigEndian
63
+
64
+ const b = data
65
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
66
+ let offset = 0
67
+ // const _size = dataView.getUint16(offset, le)
68
+ offset += 2
69
+ const count = dataView.getUint16(offset, le)
70
+ offset += 2
71
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
72
+ offset += 8
69
73
 
70
74
  // no extra index is defined if count==0
71
75
  if (count === 0) {
@@ -78,26 +82,30 @@ export class BigBed extends BBI {
78
82
  Buffer.alloc(len),
79
83
  0,
80
84
  len,
81
- Number(offset),
85
+ Number(dataOffset),
82
86
  )
83
- const extParser = new Parser()
84
- .endianess(le)
85
- .int16('type')
86
- .int16('fieldcount')
87
- .uint64('offset')
88
- .skip(4)
89
- .int16('field')
87
+
90
88
  const indices = [] as Index[]
91
89
 
92
90
  for (let i = 0; i < count; i += 1) {
93
- indices.push(extParser.parse(buffer.subarray(i * blocklen)))
91
+ const b = buffer.subarray(i * blocklen)
92
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
93
+ let offset = 0
94
+ const type = dataView.getInt16(offset, le)
95
+ offset += 2
96
+ const fieldcount = dataView.getInt16(offset, le)
97
+ offset += 2
98
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
99
+ offset += 8 + 4 //4 skip
100
+ const field = dataView.getInt16(offset, le)
101
+ indices.push({ type, fieldcount, offset: Number(dataOffset), field })
94
102
  }
95
103
  return indices
96
104
  }
97
105
 
98
106
  /*
99
- * perform a search in the bigbed extraIndex to find which blocks in the bigbed data to look for the
100
- * actual feature data
107
+ * perform a search in the bigbed extraIndex to find which blocks in the
108
+ * bigbed data to look for the actual feature data
101
109
  *
102
110
  * @param name - the name to search for
103
111
  * @param opts - a SearchOptions argument with optional signal
@@ -112,56 +120,32 @@ export class BigBed extends BBI {
112
120
  if (indices.length === 0) {
113
121
  return []
114
122
  }
115
- const locs = indices.map(async (index: any): Promise<Loc | undefined> => {
116
- const { offset, field } = index
123
+ const locs = indices.map(async (index): Promise<Loc | undefined> => {
124
+ const { offset: offset2, field } = index
117
125
  const { buffer: data } = await this.bbi.read(
118
126
  Buffer.alloc(32),
119
127
  0,
120
128
  32,
121
- Number(offset),
129
+ offset2,
122
130
  opts,
123
131
  )
124
- const le = isBigEndian ? 'big' : 'little'
125
- const p = new Parser()
126
- .endianess(le)
127
- .int32('magic')
128
- .int32('blockSize')
129
- .int32('keySize')
130
- .int32('valSize')
131
- .uint64('itemCount')
132
-
133
- const { blockSize, keySize, valSize } = p.parse(data)
134
- // console.log({blockSize,keySize,valSize})
135
- const bpt = new Parser()
136
- .endianess(le)
137
- .int8('nodeType')
138
- .skip(1)
139
- .int16('cnt')
140
- .choice({
141
- tag: 'nodeType',
142
- choices: {
143
- 0: new Parser().array('leafkeys', {
144
- length: 'cnt',
145
- type: new Parser()
146
- .endianess(le)
147
- .string('key', { length: keySize, stripNull: true })
148
- .uint64('offset'),
149
- }),
150
- 1: new Parser().array('keys', {
151
- length: 'cnt',
152
- type: new Parser()
153
- .endianess(le)
154
- .string('key', { length: keySize, stripNull: true })
155
- .uint64('offset')
156
- .uint32('length')
157
- .uint32('reserved'),
158
- }),
159
- },
160
- })
161
-
162
- const bptReadNode = async (
163
- nodeOffset: number,
164
- ): Promise<Loc | undefined> => {
132
+ const le = !isBigEndian
133
+ const b = data
134
+
135
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
136
+ let offset = 0
137
+ // const _magic = dataView.getInt32(offset, le)
138
+ offset += 4
139
+ const blockSize = dataView.getInt32(offset, le)
140
+ offset += 4
141
+ const keySize = dataView.getInt32(offset, le)
142
+ offset += 4
143
+ const valSize = dataView.getInt32(offset, le)
144
+ offset += 4
145
+ // const _itemCount = Number(dataView.getBigUint64(offset, le))
146
+ offset += 8
147
+
148
+ const bptReadNode = async (nodeOffset: number) => {
165
149
  const val = Number(nodeOffset)
166
150
  const len = 4 + blockSize * (keySize + valSize)
167
151
  const { buffer } = await this.bbi.read(
@@ -171,27 +155,62 @@ export class BigBed extends BBI {
171
155
  val,
172
156
  opts,
173
157
  )
174
- const node = bpt.parse(buffer)
175
- if (node.leafkeys) {
176
- let lastOffset
177
- for (const { key, offset } of node.leafkeys) {
158
+ const b = buffer
159
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
160
+ let offset = 0
161
+ const nodeType = dataView.getInt8(offset)
162
+ offset += 2 //skip 1
163
+ const cnt = dataView.getInt16(offset, le)
164
+ offset += 2
165
+ const keys = []
166
+ if (nodeType === 0) {
167
+ const leafkeys = []
168
+ for (let i = 0; i < cnt; i++) {
169
+ const key = b
170
+ .subarray(offset, offset + keySize)
171
+ .toString()
172
+ .replaceAll('\0', '')
173
+ offset += keySize
174
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
175
+ offset += 8
176
+ leafkeys.push({ key, offset: dataOffset })
177
+ }
178
+
179
+ let lastOffset = 0
180
+ for (const { key, offset } of leafkeys) {
178
181
  if (name.localeCompare(key) < 0 && lastOffset) {
179
182
  return bptReadNode(lastOffset)
180
183
  }
181
184
  lastOffset = offset
182
185
  }
183
186
  return bptReadNode(lastOffset)
184
- }
185
- for (const n of node.keys) {
186
- if (n.key === name) {
187
- return { ...n, field }
187
+ } else if (nodeType === 1) {
188
+ for (let i = 0; i < cnt; i++) {
189
+ const key = b
190
+ .subarray(offset, offset + keySize)
191
+ .toString()
192
+ .replaceAll('\0', '')
193
+ offset += keySize
194
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
195
+ offset += 8
196
+ const length = dataView.getUint32(offset, le)
197
+ offset += 4
198
+ const reserved = dataView.getUint32(offset, le)
199
+ offset += 4
200
+ keys.push({ key, offset: dataOffset, length, reserved })
201
+ }
202
+
203
+ for (const n of keys) {
204
+ if (n.key === name) {
205
+ return { ...n, field }
206
+ }
188
207
  }
189
- }
190
208
 
191
- return undefined
209
+ return undefined
210
+ }
192
211
  }
193
212
  const rootNodeOffset = 32
194
- return bptReadNode(Number(offset) + rootNodeOffset)
213
+ return bptReadNode(offset2 + rootNodeOffset)
195
214
  })
196
215
  return filterUndef(await Promise.all(locs))
197
216
  }
@@ -1,4 +1,4 @@
1
- import { getBigInt64, getBigUint64, setBigInt64, setBigUint64 } from './pure'
1
+ import { getBigInt64, getBigUint64 } from './pure'
2
2
 
3
3
  if (!('getBigInt64' in DataView)) {
4
4
  DataView.prototype.getBigInt64 = function (byteOffset, littleEndian) {
@@ -11,15 +11,3 @@ if (!('getBigUint64' in DataView)) {
11
11
  return getBigUint64(this, byteOffset, littleEndian)
12
12
  }
13
13
  }
14
-
15
- if (!('setBigInt64' in DataView)) {
16
- DataView.prototype.setBigInt64 = function (byteOffset, value, littleEndian) {
17
- setBigInt64(this, byteOffset, value, littleEndian)
18
- }
19
- }
20
-
21
- if (!('setBigUint64' in DataView)) {
22
- DataView.prototype.setBigUint64 = function (byteOffset, value, littleEndian) {
23
- setBigUint64(this, byteOffset, value, littleEndian)
24
- }
25
- }
@@ -40,39 +40,3 @@ export function getBigUint64(
40
40
  BigInt(a * littleEndianMask + b * bigEndianMask)
41
41
  )
42
42
  }
43
-
44
- export function setBigInt64(
45
- dataView: DataView,
46
- byteOffset: number,
47
- value: bigint,
48
- littleEndian: boolean | undefined,
49
- ) {
50
- const hi = Number(value >> BigInt32)
51
- const lo = Number(value & BigInt(0xffffffff))
52
-
53
- if (littleEndian) {
54
- dataView.setInt32(byteOffset + 4, hi, littleEndian)
55
- dataView.setUint32(byteOffset, lo, littleEndian)
56
- } else {
57
- dataView.setInt32(byteOffset, hi, littleEndian)
58
- dataView.setUint32(byteOffset + 4, lo, littleEndian)
59
- }
60
- }
61
-
62
- export function setBigUint64(
63
- dataView: DataView,
64
- byteOffset: number,
65
- value: bigint,
66
- littleEndian: boolean | undefined,
67
- ) {
68
- const hi = Number(value >> BigInt32)
69
- const lo = Number(value & BigInt(0xffffffff))
70
-
71
- if (littleEndian) {
72
- dataView.setUint32(byteOffset + 4, hi, littleEndian)
73
- dataView.setUint32(byteOffset, lo, littleEndian)
74
- } else {
75
- dataView.setUint32(byteOffset, hi, littleEndian)
76
- dataView.setUint32(byteOffset + 4, lo, littleEndian)
77
- }
78
- }
package/src/bigwig.ts CHANGED
@@ -11,24 +11,18 @@ export class BigWig extends BBI {
11
11
  * or scale used to infer the zoomLevel to use
12
12
  */
13
13
  protected async getView(scale: number, opts: RequestOptions) {
14
- const { zoomLevels, refsByName, fileSize, isBigEndian, uncompressBufSize } =
14
+ const { zoomLevels, refsByName, isBigEndian, uncompressBufSize } =
15
15
  await this.getHeader(opts)
16
16
  const basesPerPx = 1 / scale
17
- let maxLevel = zoomLevels.length
18
- if (!fileSize) {
19
- // if we don't know the file size, we can't fetch the highest zoom level :-(
20
- maxLevel -= 1
21
- }
17
+ const maxLevel = zoomLevels.length - 1
22
18
 
23
19
  for (let i = maxLevel; i >= 0; i -= 1) {
24
20
  const zh = zoomLevels[i]
25
21
  if (zh && zh.reductionLevel <= 2 * basesPerPx) {
26
- const indexOffset = Number(zh.indexOffset)
27
-
28
22
  return new BlockView(
29
23
  this.bbi,
30
24
  refsByName,
31
- indexOffset,
25
+ zh.indexOffset,
32
26
  isBigEndian,
33
27
  uncompressBufSize > 0,
34
28
  'summary',