@gmod/bbi 4.0.6 → 5.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/CHANGELOG.md +10 -1
  2. package/dist/bbi.d.ts +20 -4
  3. package/dist/bbi.js +123 -106
  4. package/dist/bbi.js.map +1 -1
  5. package/dist/bigbed.js +88 -69
  6. package/dist/bigbed.js.map +1 -1
  7. package/dist/bigint-polyfill/polyfill.js +0 -10
  8. package/dist/bigint-polyfill/polyfill.js.map +1 -1
  9. package/dist/bigint-polyfill/pure.d.ts +0 -2
  10. package/dist/bigint-polyfill/pure.js +0 -26
  11. package/dist/bigint-polyfill/pure.js.map +1 -1
  12. package/dist/bigwig.js +3 -8
  13. package/dist/bigwig.js.map +1 -1
  14. package/dist/block-view.d.ts +4 -6
  15. package/dist/block-view.js +122 -131
  16. package/dist/block-view.js.map +1 -1
  17. package/dist/range.js +1 -1
  18. package/dist/range.js.map +1 -1
  19. package/dist/util.d.ts +12 -14
  20. package/dist/util.js +8 -14
  21. package/dist/util.js.map +1 -1
  22. package/esm/bbi.d.ts +20 -4
  23. package/esm/bbi.js +123 -106
  24. package/esm/bbi.js.map +1 -1
  25. package/esm/bigbed.js +88 -69
  26. package/esm/bigbed.js.map +1 -1
  27. package/esm/bigint-polyfill/polyfill.js +1 -11
  28. package/esm/bigint-polyfill/polyfill.js.map +1 -1
  29. package/esm/bigint-polyfill/pure.d.ts +0 -2
  30. package/esm/bigint-polyfill/pure.js +0 -24
  31. package/esm/bigint-polyfill/pure.js.map +1 -1
  32. package/esm/bigwig.js +3 -8
  33. package/esm/bigwig.js.map +1 -1
  34. package/esm/block-view.d.ts +4 -6
  35. package/esm/block-view.js +122 -131
  36. package/esm/block-view.js.map +1 -1
  37. package/esm/range.js +1 -1
  38. package/esm/range.js.map +1 -1
  39. package/esm/util.d.ts +12 -14
  40. package/esm/util.js +8 -14
  41. package/esm/util.js.map +1 -1
  42. package/package.json +4 -5
  43. package/src/bbi.ts +149 -116
  44. package/src/bigbed.ts +99 -80
  45. package/src/bigint-polyfill/polyfill.ts +1 -13
  46. package/src/bigint-polyfill/pure.ts +0 -36
  47. package/src/bigwig.ts +3 -9
  48. package/src/block-view.ts +133 -168
  49. package/src/range.ts +1 -1
  50. package/src/util.ts +16 -21
package/src/bbi.ts CHANGED
@@ -1,5 +1,4 @@
1
1
  import { Buffer } from 'buffer'
2
- import { Parser } from 'binary-parser'
3
2
  import { LocalFile, RemoteFile, GenericFilehandle } from 'generic-filehandle'
4
3
  import { firstValueFrom, Observable } from 'rxjs'
5
4
  import { toArray } from 'rxjs/operators'
@@ -8,14 +7,19 @@ import { BlockView } from './block-view'
8
7
  const BIG_WIG_MAGIC = -2003829722
9
8
  const BIG_BED_MAGIC = -2021002517
10
9
 
11
- function toString(arr: Uint8Array) {
12
- return new TextDecoder().decode(arr)
10
+ interface ZoomLevel {
11
+ reductionLevel: number
12
+ reserved: number
13
+ dataOffset: number
14
+ indexOffset: number
13
15
  }
14
16
 
15
17
  export interface Feature {
18
+ offset?: number
19
+ chromId: number
16
20
  start: number
17
21
  end: number
18
- score: number
22
+ score?: number
19
23
  rest?: string // for bigbed line
20
24
  minScore?: number // for summary line
21
25
  maxScore?: number // for summary line
@@ -27,6 +31,8 @@ interface Statistics {
27
31
  scoreSum: number
28
32
  basesCovered: number
29
33
  scoreSumSquares: number
34
+ scoreMin: number
35
+ scoreMax: number
30
36
  }
31
37
 
32
38
  interface RefInfo {
@@ -34,87 +40,31 @@ interface RefInfo {
34
40
  id: number
35
41
  length: number
36
42
  }
37
- export interface Header {
43
+
44
+ export interface MainHeader {
45
+ magic: number
38
46
  version: number
39
47
  autoSql: string
40
48
  totalSummary: Statistics
41
- zoomLevels: any
49
+ asOffset: number
50
+ zoomLevels: ZoomLevel[]
51
+ fieldCount: number
52
+ numZoomLevels: number
42
53
  unzoomedIndexOffset: number
54
+ totalSummaryOffset: number
43
55
  unzoomedDataOffset: number
44
56
  definedFieldCount: number
45
57
  uncompressBufSize: number
46
58
  chromTreeOffset: number
47
- fileSize: number
48
59
  extHeaderOffset: number
49
60
  isBigEndian: boolean
50
61
  fileType: string
62
+ }
63
+ export interface Header extends MainHeader {
51
64
  refsByName: Record<string, number>
52
65
  refsByNumber: Record<number, RefInfo>
53
66
  }
54
67
 
55
- /**
56
- * get the compiled parsers for different sections of the bigwig file
57
- *
58
- * @param isBE - is big endian, typically false
59
- * @return an object with compiled parsers
60
- */
61
- function getParsers(isBE: boolean) {
62
- const le = isBE ? 'big' : 'little'
63
- const headerParser = new Parser()
64
- .endianess(le)
65
- .int32('magic')
66
- .uint16('version')
67
- .uint16('numZoomLevels')
68
- .uint64('chromTreeOffset')
69
- .uint64('unzoomedDataOffset')
70
- .uint64('unzoomedIndexOffset')
71
- .uint16('fieldCount')
72
- .uint16('definedFieldCount')
73
- .uint64('asOffset') // autoSql offset, used in bigbed
74
- .uint64('totalSummaryOffset')
75
- .uint32('uncompressBufSize')
76
- .uint64('extHeaderOffset') // name index offset, used in bigbed
77
- .array('zoomLevels', {
78
- length: 'numZoomLevels',
79
- type: new Parser()
80
- .endianess(le)
81
- .uint32('reductionLevel')
82
- .uint32('reserved')
83
- .uint64('dataOffset')
84
- .uint64('indexOffset'),
85
- })
86
-
87
- const totalSummaryParser = new Parser()
88
- .endianess(le)
89
- .uint64('basesCovered')
90
- .doublele('scoreMin')
91
- .doublele('scoreMax')
92
- .doublele('scoreSum')
93
- .doublele('scoreSumSquares')
94
-
95
- const chromTreeParser = new Parser()
96
- .endianess(le)
97
- .uint32('magic')
98
- .uint32('blockSize')
99
- .uint32('keySize')
100
- .uint32('valSize')
101
- .uint64('itemCount')
102
-
103
- const isLeafNode = new Parser()
104
- .endianess(le)
105
- .uint8('isLeafNode')
106
- .skip(1)
107
- .uint16('cnt')
108
- .saveOffset('offset')
109
-
110
- return {
111
- chromTreeParser,
112
- totalSummaryParser,
113
- headerParser,
114
- isLeafNode,
115
- }
116
- }
117
-
118
68
  export interface RequestOptions {
119
69
  signal?: AbortSignal
120
70
  headers?: Record<string, string>
@@ -139,7 +89,8 @@ export abstract class BBI {
139
89
  }
140
90
 
141
91
  /*
142
- * @param filehandle - a filehandle from generic-filehandle or implementing something similar to the node10 fs.promises API
92
+ * @param filehandle - a filehandle from generic-filehandle or implementing
93
+ * something similar to the node10 fs.promises API
143
94
  *
144
95
  * @param path - a Local file path as a string
145
96
  *
@@ -176,7 +127,8 @@ export abstract class BBI {
176
127
  private async _getMainHeader(
177
128
  opts?: RequestOptions,
178
129
  requestSize = 2000,
179
- ): Promise<Header> {
130
+ ): Promise<MainHeader> {
131
+ const le = true
180
132
  const { buffer } = await this.bbi.read(
181
133
  Buffer.alloc(requestSize),
182
134
  0,
@@ -185,30 +137,102 @@ export abstract class BBI {
185
137
  opts,
186
138
  )
187
139
  const isBigEndian = this._isBigEndian(buffer)
188
- const ret = getParsers(isBigEndian)
189
- const header = ret.headerParser.parse(buffer)
190
- const { magic, asOffset, totalSummaryOffset } = header
191
- header.fileType = magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig'
192
- if (asOffset > requestSize || totalSummaryOffset > requestSize) {
193
- return this._getMainHeader(opts, requestSize * 2)
194
- }
195
- if (asOffset) {
196
- const off = Number(header.asOffset)
197
- header.autoSql = toString(buffer.subarray(off, buffer.indexOf(0, off)))
140
+ const b = buffer
141
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
142
+ let offset = 0
143
+ const magic = dataView.getInt32(offset, le)
144
+ offset += 4
145
+ const version = dataView.getUint16(offset, le)
146
+ offset += 2
147
+ const numZoomLevels = dataView.getUint16(offset, le)
148
+ offset += 2
149
+ const chromTreeOffset = Number(dataView.getBigUint64(offset, le))
150
+ offset += 8
151
+ const unzoomedDataOffset = Number(dataView.getBigUint64(offset, le))
152
+ offset += 8
153
+ const unzoomedIndexOffset = Number(dataView.getBigUint64(offset, le))
154
+ offset += 8
155
+ const fieldCount = dataView.getUint16(offset, le)
156
+ offset += 2
157
+ const definedFieldCount = dataView.getUint16(offset, le)
158
+ offset += 2
159
+ const asOffset = Number(dataView.getBigUint64(offset, le))
160
+ offset += 8
161
+ const totalSummaryOffset = Number(dataView.getBigUint64(offset, le))
162
+ offset += 8
163
+ const uncompressBufSize = dataView.getUint32(offset, le)
164
+ offset += 4
165
+ const extHeaderOffset = Number(dataView.getBigUint64(offset, le))
166
+ offset += 8
167
+ const zoomLevels = [] as ZoomLevel[]
168
+ for (let i = 0; i < numZoomLevels; i++) {
169
+ const reductionLevel = dataView.getUint32(offset, le)
170
+ offset += 4
171
+ const reserved = dataView.getUint32(offset, le)
172
+ offset += 4
173
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
174
+ offset += 8
175
+ const indexOffset = Number(dataView.getBigUint64(offset, le))
176
+ offset += 8
177
+ zoomLevels.push({ reductionLevel, reserved, dataOffset, indexOffset })
198
178
  }
199
179
 
180
+ const fileType = magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig'
181
+
200
182
  // refetch header if it is too large on first pass,
201
183
  // 8*5 is the sizeof the totalSummary struct
202
- if (header.totalSummaryOffset > requestSize - 8 * 5) {
184
+ if (asOffset > requestSize || totalSummaryOffset > requestSize - 8 * 5) {
203
185
  return this._getMainHeader(opts, requestSize * 2)
204
186
  }
205
187
 
206
- if (header.totalSummaryOffset) {
207
- const tail = buffer.subarray(Number(header.totalSummaryOffset))
208
- const sum = ret.totalSummaryParser.parse(tail)
209
- header.totalSummary = { ...sum, basesCovered: Number(sum.basesCovered) }
188
+ let totalSummary: Statistics
189
+ if (totalSummaryOffset) {
190
+ const b = buffer.subarray(Number(totalSummaryOffset))
191
+ let offset = 0
192
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
193
+ const basesCovered = Number(dataView.getBigUint64(offset, le))
194
+ offset += 8
195
+ const scoreMin = dataView.getFloat64(offset, le)
196
+ offset += 8
197
+ const scoreMax = dataView.getFloat64(offset, le)
198
+ offset += 8
199
+ const scoreSum = dataView.getFloat64(offset, le)
200
+ offset += 8
201
+ const scoreSumSquares = dataView.getFloat64(offset, le)
202
+ offset += 8
203
+
204
+ totalSummary = {
205
+ scoreMin,
206
+ scoreMax,
207
+ scoreSum,
208
+ scoreSumSquares,
209
+ basesCovered,
210
+ }
211
+ } else {
212
+ throw new Error('no stats')
213
+ }
214
+
215
+ return {
216
+ zoomLevels,
217
+ magic,
218
+ extHeaderOffset,
219
+ numZoomLevels,
220
+ fieldCount,
221
+ totalSummary,
222
+ definedFieldCount,
223
+ uncompressBufSize,
224
+ asOffset,
225
+ chromTreeOffset,
226
+ totalSummaryOffset,
227
+ unzoomedDataOffset,
228
+ unzoomedIndexOffset,
229
+ fileType,
230
+ version,
231
+ isBigEndian,
232
+ autoSql: asOffset
233
+ ? buffer.subarray(asOffset, buffer.indexOf(0, asOffset)).toString()
234
+ : '',
210
235
  }
211
- return { ...header, isBigEndian }
212
236
  }
213
237
 
214
238
  private _isBigEndian(buffer: Buffer) {
@@ -225,19 +249,19 @@ export abstract class BBI {
225
249
 
226
250
  // todo: add progress if long running
227
251
  private async _readChromTree(
228
- header: Header,
252
+ header: MainHeader,
229
253
  opts?: { signal?: AbortSignal },
230
254
  ) {
231
255
  const isBE = header.isBigEndian
232
- const le = isBE ? 'big' : 'little'
256
+ const le = !isBE
233
257
  const refsByNumber: Record<
234
258
  number,
235
259
  { name: string; id: number; length: number }
236
260
  > = []
237
261
  const refsByName: Record<string, number> = {}
238
262
 
239
- let unzoomedDataOffset = Number(header.unzoomedDataOffset)
240
- const chromTreeOffset = Number(header.chromTreeOffset)
263
+ let unzoomedDataOffset = header.unzoomedDataOffset
264
+ const chromTreeOffset = header.chromTreeOffset
241
265
  while (unzoomedDataOffset % 4 !== 0) {
242
266
  unzoomedDataOffset += 1
243
267
  }
@@ -250,33 +274,42 @@ export abstract class BBI {
250
274
  opts,
251
275
  )
252
276
 
253
- const p = getParsers(isBE)
254
- const { keySize } = p.chromTreeParser.parse(buffer)
255
- const leafNodeParser = new Parser()
256
- .endianess(le)
257
- .string('key', { stripNull: true, length: keySize })
258
- .uint32('refId')
259
- .uint32('refSize')
260
- .saveOffset('offset')
261
- const nonleafNodeParser = new Parser()
262
- .endianess(le)
263
- .skip(keySize)
264
- .uint64('childOffset')
265
- .saveOffset('offset')
277
+ const b = buffer
278
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
279
+ let offset = 0
280
+ // const magic = dataView.getUint32(offset, le)
281
+ offset += 4
282
+ // const blockSize = dataView.getUint32(offset, le)
283
+ offset += 4
284
+ const keySize = dataView.getUint32(offset, le)
285
+ offset += 4
286
+ // const valSize = dataView.getUint32(offset, le)
287
+ offset += 4
288
+ // const itemCount = dataView.getBigUint64(offset, le)
289
+ offset += 8
290
+
266
291
  const rootNodeOffset = 32
267
292
  const bptReadNode = async (currentOffset: number) => {
268
293
  let offset = currentOffset
269
294
  if (offset >= buffer.length) {
270
295
  throw new Error('reading beyond end of buffer')
271
296
  }
272
- const ret = p.isLeafNode.parse(buffer.subarray(offset))
273
- const { isLeafNode, cnt } = ret
274
- offset += ret.offset
297
+ const isLeafNode = dataView.getUint8(offset)
298
+ offset += 2 //skip 1
299
+ const cnt = dataView.getUint16(offset, le)
300
+ offset += 2
275
301
  if (isLeafNode) {
276
- for (let n = 0; n < cnt; n += 1) {
277
- const leafRet = leafNodeParser.parse(buffer.subarray(offset))
278
- offset += leafRet.offset
279
- const { key, refId, refSize } = leafRet
302
+ for (let n = 0; n < cnt; n++) {
303
+ const key = buffer
304
+ .subarray(offset, offset + keySize)
305
+ .toString()
306
+ .replaceAll('\0', '')
307
+ offset += keySize
308
+ const refId = dataView.getUint32(offset, le)
309
+ offset += 4
310
+ const refSize = dataView.getUint32(offset, le)
311
+ offset += 4
312
+
280
313
  const refRec = { name: key, id: refId, length: refSize }
281
314
  refsByName[this.renameRefSeqs(key)] = refId
282
315
  refsByNumber[refId] = refRec
@@ -284,10 +317,10 @@ export abstract class BBI {
284
317
  } else {
285
318
  // parse index node
286
319
  const nextNodes = []
287
- for (let n = 0; n < cnt; n += 1) {
288
- const nonleafRet = nonleafNodeParser.parse(buffer.subarray(offset))
289
- const { childOffset } = nonleafRet
290
- offset += nonleafRet.offset
320
+ for (let n = 0; n < cnt; n++) {
321
+ offset += keySize
322
+ const childOffset = Number(dataView.getBigUint64(offset, le))
323
+ offset += 8
291
324
  nextNodes.push(
292
325
  bptReadNode(Number(childOffset) - Number(chromTreeOffset)),
293
326
  )
package/src/bigbed.ts CHANGED
@@ -1,16 +1,16 @@
1
1
  import { Buffer } from 'buffer'
2
- import { Parser } from 'binary-parser'
3
2
  import { Observable, merge, firstValueFrom } from 'rxjs'
4
3
  import { map, reduce } from 'rxjs/operators'
5
4
  import AbortablePromiseCache from '@gmod/abortable-promise-cache'
6
5
  import QuickLRU from 'quick-lru'
7
6
 
7
+ // locals
8
8
  import { BBI, Feature, RequestOptions } from './bbi'
9
9
 
10
10
  interface Loc {
11
11
  key: string
12
- offset: bigint
13
- length: bigint
12
+ offset: number
13
+ length: number
14
14
  field?: number
15
15
  }
16
16
 
@@ -46,8 +46,10 @@ export class BigBed extends BBI {
46
46
 
47
47
  /*
48
48
  * parse the bigbed extraIndex fields
49
- * @param abortSignal to abort operation
50
- * @return a Promise for an array of Index data structure since there can be multiple extraIndexes in a bigbed, see bedToBigBed documentation
49
+ *
50
+ *
51
+ * @return a Promise for an array of Index data structure since there can be
52
+ * multiple extraIndexes in a bigbed, see bedToBigBed documentation
51
53
  */
52
54
  private async _readIndices(opts: RequestOptions) {
53
55
  const { extHeaderOffset, isBigEndian } = await this.getHeader(opts)
@@ -57,15 +59,17 @@ export class BigBed extends BBI {
57
59
  64,
58
60
  Number(extHeaderOffset),
59
61
  )
60
- const le = isBigEndian ? 'big' : 'little'
61
- const ret = new Parser()
62
- .endianess(le)
63
- .uint16('size')
64
- .uint16('count')
65
- .uint64('offset')
66
- .parse(data)
67
-
68
- const { count, offset } = ret
62
+ const le = !isBigEndian
63
+
64
+ const b = data
65
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
66
+ let offset = 0
67
+ // const _size = dataView.getUint16(offset, le)
68
+ offset += 2
69
+ const count = dataView.getUint16(offset, le)
70
+ offset += 2
71
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
72
+ offset += 8
69
73
 
70
74
  // no extra index is defined if count==0
71
75
  if (count === 0) {
@@ -78,26 +82,30 @@ export class BigBed extends BBI {
78
82
  Buffer.alloc(len),
79
83
  0,
80
84
  len,
81
- Number(offset),
85
+ Number(dataOffset),
82
86
  )
83
- const extParser = new Parser()
84
- .endianess(le)
85
- .int16('type')
86
- .int16('fieldcount')
87
- .uint64('offset')
88
- .skip(4)
89
- .int16('field')
87
+
90
88
  const indices = [] as Index[]
91
89
 
92
90
  for (let i = 0; i < count; i += 1) {
93
- indices.push(extParser.parse(buffer.subarray(i * blocklen)))
91
+ const b = buffer.subarray(i * blocklen)
92
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
93
+ let offset = 0
94
+ const type = dataView.getInt16(offset, le)
95
+ offset += 2
96
+ const fieldcount = dataView.getInt16(offset, le)
97
+ offset += 2
98
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
99
+ offset += 8 + 4 //4 skip
100
+ const field = dataView.getInt16(offset, le)
101
+ indices.push({ type, fieldcount, offset: Number(dataOffset), field })
94
102
  }
95
103
  return indices
96
104
  }
97
105
 
98
106
  /*
99
- * perform a search in the bigbed extraIndex to find which blocks in the bigbed data to look for the
100
- * actual feature data
107
+ * perform a search in the bigbed extraIndex to find which blocks in the
108
+ * bigbed data to look for the actual feature data
101
109
  *
102
110
  * @param name - the name to search for
103
111
  * @param opts - a SearchOptions argument with optional signal
@@ -112,56 +120,32 @@ export class BigBed extends BBI {
112
120
  if (indices.length === 0) {
113
121
  return []
114
122
  }
115
- const locs = indices.map(async (index: any): Promise<Loc | undefined> => {
116
- const { offset, field } = index
123
+ const locs = indices.map(async (index): Promise<Loc | undefined> => {
124
+ const { offset: offset2, field } = index
117
125
  const { buffer: data } = await this.bbi.read(
118
126
  Buffer.alloc(32),
119
127
  0,
120
128
  32,
121
- Number(offset),
129
+ offset2,
122
130
  opts,
123
131
  )
124
- const le = isBigEndian ? 'big' : 'little'
125
- const p = new Parser()
126
- .endianess(le)
127
- .int32('magic')
128
- .int32('blockSize')
129
- .int32('keySize')
130
- .int32('valSize')
131
- .uint64('itemCount')
132
-
133
- const { blockSize, keySize, valSize } = p.parse(data)
134
- // console.log({blockSize,keySize,valSize})
135
- const bpt = new Parser()
136
- .endianess(le)
137
- .int8('nodeType')
138
- .skip(1)
139
- .int16('cnt')
140
- .choice({
141
- tag: 'nodeType',
142
- choices: {
143
- 0: new Parser().array('leafkeys', {
144
- length: 'cnt',
145
- type: new Parser()
146
- .endianess(le)
147
- .string('key', { length: keySize, stripNull: true })
148
- .uint64('offset'),
149
- }),
150
- 1: new Parser().array('keys', {
151
- length: 'cnt',
152
- type: new Parser()
153
- .endianess(le)
154
- .string('key', { length: keySize, stripNull: true })
155
- .uint64('offset')
156
- .uint32('length')
157
- .uint32('reserved'),
158
- }),
159
- },
160
- })
161
-
162
- const bptReadNode = async (
163
- nodeOffset: number,
164
- ): Promise<Loc | undefined> => {
132
+ const le = !isBigEndian
133
+ const b = data
134
+
135
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
136
+ let offset = 0
137
+ // const _magic = dataView.getInt32(offset, le)
138
+ offset += 4
139
+ const blockSize = dataView.getInt32(offset, le)
140
+ offset += 4
141
+ const keySize = dataView.getInt32(offset, le)
142
+ offset += 4
143
+ const valSize = dataView.getInt32(offset, le)
144
+ offset += 4
145
+ // const _itemCount = Number(dataView.getBigUint64(offset, le))
146
+ offset += 8
147
+
148
+ const bptReadNode = async (nodeOffset: number) => {
165
149
  const val = Number(nodeOffset)
166
150
  const len = 4 + blockSize * (keySize + valSize)
167
151
  const { buffer } = await this.bbi.read(
@@ -171,27 +155,62 @@ export class BigBed extends BBI {
171
155
  val,
172
156
  opts,
173
157
  )
174
- const node = bpt.parse(buffer)
175
- if (node.leafkeys) {
176
- let lastOffset
177
- for (const { key, offset } of node.leafkeys) {
158
+ const b = buffer
159
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
160
+ let offset = 0
161
+ const nodeType = dataView.getInt8(offset)
162
+ offset += 2 //skip 1
163
+ const cnt = dataView.getInt16(offset, le)
164
+ offset += 2
165
+ const keys = []
166
+ if (nodeType === 0) {
167
+ const leafkeys = []
168
+ for (let i = 0; i < cnt; i++) {
169
+ const key = b
170
+ .subarray(offset, offset + keySize)
171
+ .toString()
172
+ .replaceAll('\0', '')
173
+ offset += keySize
174
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
175
+ offset += 8
176
+ leafkeys.push({ key, offset: dataOffset })
177
+ }
178
+
179
+ let lastOffset = 0
180
+ for (const { key, offset } of leafkeys) {
178
181
  if (name.localeCompare(key) < 0 && lastOffset) {
179
182
  return bptReadNode(lastOffset)
180
183
  }
181
184
  lastOffset = offset
182
185
  }
183
186
  return bptReadNode(lastOffset)
184
- }
185
- for (const n of node.keys) {
186
- if (n.key === name) {
187
- return { ...n, field }
187
+ } else if (nodeType === 1) {
188
+ for (let i = 0; i < cnt; i++) {
189
+ const key = b
190
+ .subarray(offset, offset + keySize)
191
+ .toString()
192
+ .replaceAll('\0', '')
193
+ offset += keySize
194
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
195
+ offset += 8
196
+ const length = dataView.getUint32(offset, le)
197
+ offset += 4
198
+ const reserved = dataView.getUint32(offset, le)
199
+ offset += 4
200
+ keys.push({ key, offset: dataOffset, length, reserved })
201
+ }
202
+
203
+ for (const n of keys) {
204
+ if (n.key === name) {
205
+ return { ...n, field }
206
+ }
188
207
  }
189
- }
190
208
 
191
- return undefined
209
+ return undefined
210
+ }
192
211
  }
193
212
  const rootNodeOffset = 32
194
- return bptReadNode(Number(offset) + rootNodeOffset)
213
+ return bptReadNode(offset2 + rootNodeOffset)
195
214
  })
196
215
  return filterUndef(await Promise.all(locs))
197
216
  }
@@ -1,4 +1,4 @@
1
- import { getBigInt64, getBigUint64, setBigInt64, setBigUint64 } from './pure'
1
+ import { getBigInt64, getBigUint64 } from './pure'
2
2
 
3
3
  if (!('getBigInt64' in DataView)) {
4
4
  DataView.prototype.getBigInt64 = function (byteOffset, littleEndian) {
@@ -11,15 +11,3 @@ if (!('getBigUint64' in DataView)) {
11
11
  return getBigUint64(this, byteOffset, littleEndian)
12
12
  }
13
13
  }
14
-
15
- if (!('setBigInt64' in DataView)) {
16
- DataView.prototype.setBigInt64 = function (byteOffset, value, littleEndian) {
17
- setBigInt64(this, byteOffset, value, littleEndian)
18
- }
19
- }
20
-
21
- if (!('setBigUint64' in DataView)) {
22
- DataView.prototype.setBigUint64 = function (byteOffset, value, littleEndian) {
23
- setBigUint64(this, byteOffset, value, littleEndian)
24
- }
25
- }
@@ -40,39 +40,3 @@ export function getBigUint64(
40
40
  BigInt(a * littleEndianMask + b * bigEndianMask)
41
41
  )
42
42
  }
43
-
44
- export function setBigInt64(
45
- dataView: DataView,
46
- byteOffset: number,
47
- value: bigint,
48
- littleEndian: boolean | undefined,
49
- ) {
50
- const hi = Number(value >> BigInt32)
51
- const lo = Number(value & BigInt(0xffffffff))
52
-
53
- if (littleEndian) {
54
- dataView.setInt32(byteOffset + 4, hi, littleEndian)
55
- dataView.setUint32(byteOffset, lo, littleEndian)
56
- } else {
57
- dataView.setInt32(byteOffset, hi, littleEndian)
58
- dataView.setUint32(byteOffset + 4, lo, littleEndian)
59
- }
60
- }
61
-
62
- export function setBigUint64(
63
- dataView: DataView,
64
- byteOffset: number,
65
- value: bigint,
66
- littleEndian: boolean | undefined,
67
- ) {
68
- const hi = Number(value >> BigInt32)
69
- const lo = Number(value & BigInt(0xffffffff))
70
-
71
- if (littleEndian) {
72
- dataView.setUint32(byteOffset + 4, hi, littleEndian)
73
- dataView.setUint32(byteOffset, lo, littleEndian)
74
- } else {
75
- dataView.setUint32(byteOffset, hi, littleEndian)
76
- dataView.setUint32(byteOffset + 4, lo, littleEndian)
77
- }
78
- }