@gmod/bbi 4.0.5 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/CHANGELOG.md +9 -0
  2. package/dist/bbi.d.ts +20 -4
  3. package/dist/bbi.js +124 -104
  4. package/dist/bbi.js.map +1 -1
  5. package/dist/bigbed.d.ts +1 -1
  6. package/dist/bigbed.js +91 -72
  7. package/dist/bigbed.js.map +1 -1
  8. package/dist/bigint-polyfill/polyfill.js +0 -10
  9. package/dist/bigint-polyfill/polyfill.js.map +1 -1
  10. package/dist/bigint-polyfill/pure.d.ts +0 -2
  11. package/dist/bigint-polyfill/pure.js +2 -29
  12. package/dist/bigint-polyfill/pure.js.map +1 -1
  13. package/dist/bigwig.js +3 -8
  14. package/dist/bigwig.js.map +1 -1
  15. package/dist/block-view.d.ts +4 -6
  16. package/dist/block-view.js +123 -132
  17. package/dist/block-view.js.map +1 -1
  18. package/dist/range.js +1 -1
  19. package/dist/range.js.map +1 -1
  20. package/dist/unzip-pako.d.ts +0 -1
  21. package/dist/unzip-pako.js +1 -2
  22. package/dist/unzip-pako.js.map +1 -1
  23. package/dist/unzip.d.ts +0 -1
  24. package/dist/util.d.ts +12 -14
  25. package/dist/util.js +12 -18
  26. package/dist/util.js.map +1 -1
  27. package/esm/bbi.d.ts +20 -4
  28. package/esm/bbi.js +124 -104
  29. package/esm/bbi.js.map +1 -1
  30. package/esm/bigbed.d.ts +1 -1
  31. package/esm/bigbed.js +89 -70
  32. package/esm/bigbed.js.map +1 -1
  33. package/esm/bigint-polyfill/polyfill.js +1 -11
  34. package/esm/bigint-polyfill/polyfill.js.map +1 -1
  35. package/esm/bigint-polyfill/pure.d.ts +0 -2
  36. package/esm/bigint-polyfill/pure.js +0 -24
  37. package/esm/bigint-polyfill/pure.js.map +1 -1
  38. package/esm/bigwig.js +3 -8
  39. package/esm/bigwig.js.map +1 -1
  40. package/esm/block-view.d.ts +4 -6
  41. package/esm/block-view.js +123 -132
  42. package/esm/block-view.js.map +1 -1
  43. package/esm/range.js +1 -1
  44. package/esm/range.js.map +1 -1
  45. package/esm/unzip-pako.d.ts +0 -1
  46. package/esm/unzip.d.ts +0 -1
  47. package/esm/util.d.ts +12 -14
  48. package/esm/util.js +8 -14
  49. package/esm/util.js.map +1 -1
  50. package/package.json +6 -7
  51. package/src/bbi.ts +151 -115
  52. package/src/bigbed.ts +100 -81
  53. package/src/bigint-polyfill/polyfill.ts +1 -13
  54. package/src/bigint-polyfill/pure.ts +0 -36
  55. package/src/bigwig.ts +3 -9
  56. package/src/block-view.ts +134 -169
  57. package/src/range.ts +1 -1
  58. package/src/util.ts +16 -21
package/esm/unzip.d.ts CHANGED
@@ -1,2 +1 @@
1
- /// <reference types="node" />
2
1
  export { inflateSync as unzip } from 'zlib';
package/esm/util.d.ts CHANGED
@@ -2,22 +2,19 @@ export declare class AbortError extends Error {
2
2
  code: string;
3
3
  constructor(message: string);
4
4
  }
5
- export declare function groupBlocks(blocks: {
6
- offset: bigint;
7
- length: bigint;
8
- }[]): {
9
- blocks: {
10
- offset: bigint;
11
- length: bigint;
12
- }[];
13
- length: bigint;
14
- offset: bigint;
5
+ interface Block {
6
+ offset: number;
7
+ length: number;
8
+ }
9
+ export declare function groupBlocks(blocks: Block[]): {
10
+ blocks: Block[];
11
+ length: number;
12
+ offset: number;
15
13
  }[];
16
14
  /**
17
- * Properly check if the given AbortSignal is aborted.
18
- * Per the standard, if the signal reads as aborted,
19
- * this function throws either a DOMException AbortError, or a regular error
20
- * with a `code` attribute set to `ERR_ABORTED`.
15
+ * Properly check if the given AbortSignal is aborted. Per the standard, if the
16
+ * signal reads as aborted, this function throws either a DOMException
17
+ * AbortError, or a regular error with a `code` attribute set to `ERR_ABORTED`.
21
18
  *
22
19
  * For convenience, passing `undefined` is a no-op
23
20
  *
@@ -32,3 +29,4 @@ export declare function checkAbortSignal(signal?: AbortSignal): void;
32
29
  * @param {AbortSignal} signal
33
30
  */
34
31
  export declare function abortBreakPoint(signal?: AbortSignal): Promise<void>;
32
+ export {};
package/esm/util.js CHANGED
@@ -7,18 +7,14 @@ export class AbortError extends Error {
7
7
  // sort blocks by file offset and
8
8
  // group blocks that are within 2KB of eachother
9
9
  export function groupBlocks(blocks) {
10
- blocks.sort((b0, b1) => Number(b0.offset) - Number(b1.offset));
10
+ blocks.sort((b0, b1) => b0.offset - b1.offset);
11
11
  const blockGroups = [];
12
12
  let lastBlock;
13
13
  let lastBlockEnd;
14
14
  for (const block of blocks) {
15
- if (lastBlock &&
16
- lastBlockEnd &&
17
- Number(block.offset) - lastBlockEnd <= 2000) {
18
- lastBlock.length = BigInt(Number(lastBlock.length) +
19
- Number(block.length) -
20
- lastBlockEnd +
21
- Number(block.offset));
15
+ if (lastBlock && lastBlockEnd && block.offset - lastBlockEnd <= 2000) {
16
+ lastBlock.length =
17
+ lastBlock.length + block.length - lastBlockEnd + block.offset;
22
18
  lastBlock.blocks.push(block);
23
19
  }
24
20
  else {
@@ -28,15 +24,14 @@ export function groupBlocks(blocks) {
28
24
  offset: block.offset,
29
25
  }));
30
26
  }
31
- lastBlockEnd = Number(lastBlock.offset) + Number(lastBlock.length);
27
+ lastBlockEnd = lastBlock.offset + lastBlock.length;
32
28
  }
33
29
  return blockGroups;
34
30
  }
35
31
  /**
36
- * Properly check if the given AbortSignal is aborted.
37
- * Per the standard, if the signal reads as aborted,
38
- * this function throws either a DOMException AbortError, or a regular error
39
- * with a `code` attribute set to `ERR_ABORTED`.
32
+ * Properly check if the given AbortSignal is aborted. Per the standard, if the
33
+ * signal reads as aborted, this function throws either a DOMException
34
+ * AbortError, or a regular error with a `code` attribute set to `ERR_ABORTED`.
40
35
  *
41
36
  * For convenience, passing `undefined` is a no-op
42
37
  *
@@ -48,7 +43,6 @@ export function checkAbortSignal(signal) {
48
43
  return;
49
44
  }
50
45
  if (signal.aborted) {
51
- // console.log('bam aborted!')
52
46
  if (typeof DOMException === 'undefined') {
53
47
  const e = new AbortError('aborted');
54
48
  e.code = 'ERR_ABORTED';
package/esm/util.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"util.js","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,UAAW,SAAQ,KAAK;IAGnC,YAAmB,OAAe;QAChC,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,aAAa,CAAA;IAC3B,CAAC;CACF;AACD,iCAAiC;AACjC,gDAAgD;AAChD,MAAM,UAAU,WAAW,CAAC,MAA4C;IACtE,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,CAAA;IAE9D,MAAM,WAAW,GAAG,EAAE,CAAA;IACtB,IAAI,SAAS,CAAA;IACb,IAAI,YAAY,CAAA;IAChB,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,IACE,SAAS;YACT,YAAY;YACZ,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,YAAY,IAAI,IAAI,EAC3C,CAAC;YACD,SAAS,CAAC,MAAM,GAAG,MAAM,CACvB,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC;gBACtB,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC;gBACpB,YAAY;gBACZ,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CACvB,CAAA;YACD,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;QAC9B,CAAC;aAAM,CAAC;YACN,WAAW,CAAC,IAAI,CACd,CAAC,SAAS,GAAG;gBACX,MAAM,EAAE,CAAC,KAAK,CAAC;gBACf,MAAM,EAAE,KAAK,CAAC,MAAM;gBACpB,MAAM,EAAE,KAAK,CAAC,MAAM;aACrB,CAAC,CACH,CAAA;QACH,CAAC;QACD,YAAY,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAA;IACpE,CAAC;IAED,OAAO,WAAW,CAAA;AACpB,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,gBAAgB,CAAC,MAAoB;IACnD,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,OAAM;IACR,CAAC;IAED,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;QACnB,8BAA8B;QAC9B,IAAI,OAAO,YAAY,KAAK,WAAW,EAAE,CAAC;YACxC,MAAM,CAAC,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAA;YACnC,CAAC,CAAC,IAAI,GAAG,aAAa,CAAA;YACtB,MAAM,CAAC,CAAA;QACT,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,YAAY,CAAC,SAAS,EAAE,YAAY,CAAC,CAAA;QACjD,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,MAAoB;IACxD,MAAM,OAAO,CAAC,OAAO,EAAE,CAAA;IACvB,gBAAgB,CAAC,MAAM,CAAC,CAAA;AAC1B,CAAC"}
1
+ {"version":3,"file":"util.js","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,UAAW,SAAQ,KAAK;IAGnC,YAAmB,OAAe;QAChC,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,aAAa,CAAA;IAC3B,CAAC;CACF;AAMD,iCAAiC;AACjC,gDAAgD;AAChD,MAAM,UAAU,WAAW,CAAC,MAAe;IACzC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,MAAM,GAAG,EAAE,CAAC,MAAM,CAAC,CAAA;IAE9C,MAAM,WAAW,GAAG,EAAE,CAAA;IACtB,IAAI,SAAoD,CAAA;IACxD,IAAI,YAAgC,CAAA;IACpC,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,IAAI,SAAS,IAAI,YAAY,IAAI,KAAK,CAAC,MAAM,GAAG,YAAY,IAAI,IAAI,EAAE,CAAC;YACrE,SAAS,CAAC,MAAM;gBACd,SAAS,CAAC,MAAM,GAAG,KAAK,CAAC,MAAM,GAAG,YAAY,GAAG,KAAK,CAAC,MAAM,CAAA;YAC/D,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;QAC9B,CAAC;aAAM,CAAC;YACN,WAAW,CAAC,IAAI,CACd,CAAC,SAAS,GAAG;gBACX,MAAM,EAAE,CAAC,KAAK,CAAC;gBACf,MAAM,EAAE,KAAK,CAAC,MAAM;gBACpB,MAAM,EAAE,KAAK,CAAC,MAAM;aACrB,CAAC,CACH,CAAA;QACH,CAAC;QACD,YAAY,GAAG,SAAS,CAAC,MAAM,GAAG,SAAS,CAAC,MAAM,CAAA;IACpD,CAAC;IAED,OAAO,WAAW,CAAA;AACpB,CAAC;AAED;;;;;;;;;GASG;AACH,MAAM,UAAU,gBAAgB,CAAC,MAAoB;IACnD,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,OAAM;IACR,CAAC;IAED,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;QACnB,IAAI,OAAO,YAAY,KAAK,WAAW,EAAE,CAAC;YACxC,MAAM,CAAC,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,CAAA;YACnC,CAAC,CAAC,IAAI,GAAG,aAAa,CAAA;YACtB,MAAM,CAAC,CAAA;QACT,CAAC;aAAM,CAAC;YACN,MAAM,IAAI,YAAY,CAAC,SAAS,EAAE,YAAY,CAAC,CAAA;QACjD,CAAC;IACH,CAAC;AACH,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CAAC,MAAoB;IACxD,MAAM,OAAO,CAAC,OAAO,EAAE,CAAA;IACvB,gBAAgB,CAAC,MAAM,CAAC,CAAA;AAC1B,CAAC"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@gmod/bbi",
3
- "version": "4.0.5",
3
+ "version": "5.0.0",
4
4
  "description": "Parser for BigWig/BigBed files",
5
5
  "license": "MIT",
6
6
  "repository": "GMOD/bbi-js",
@@ -42,8 +42,7 @@
42
42
  "genomics"
43
43
  ],
44
44
  "dependencies": {
45
- "abortable-promise-cache": "^1.4.1",
46
- "binary-parser": "^2.1.0",
45
+ "@gmod/abortable-promise-cache": "^2.0.0",
47
46
  "generic-filehandle": "^3.0.0",
48
47
  "pako": "^2.0.0",
49
48
  "quick-lru": "^4.0.0",
@@ -54,16 +53,16 @@
54
53
  "@types/jest": "^29.5.12",
55
54
  "@types/node": "^20.11.16",
56
55
  "@types/pako": "^2.0.0",
57
- "@typescript-eslint/eslint-plugin": "^7.3.1",
58
- "@typescript-eslint/parser": "^7.3.1",
56
+ "@typescript-eslint/eslint-plugin": "^8.0.1",
57
+ "@typescript-eslint/parser": "^8.0.1",
59
58
  "eslint": "^9.0.0",
60
59
  "eslint-config-prettier": "^9.1.0",
61
60
  "eslint-plugin-prettier": "^5.1.3",
62
- "eslint-plugin-unicorn": "^54.0.0",
61
+ "eslint-plugin-unicorn": "^55.0.0",
63
62
  "jest": "^29.6.2",
64
63
  "jest-environment-jsdom": "^29.6.2",
65
64
  "prettier": "^3.2.5",
66
- "rimraf": "^5.0.1",
65
+ "rimraf": "^6.0.1",
67
66
  "standard-changelog": "^6.0.0",
68
67
  "ts-jest": "^29.1.2",
69
68
  "typescript": "^5.1.6"
package/src/bbi.ts CHANGED
@@ -1,5 +1,4 @@
1
1
  import { Buffer } from 'buffer'
2
- import { Parser } from 'binary-parser'
3
2
  import { LocalFile, RemoteFile, GenericFilehandle } from 'generic-filehandle'
4
3
  import { firstValueFrom, Observable } from 'rxjs'
5
4
  import { toArray } from 'rxjs/operators'
@@ -8,14 +7,22 @@ import { BlockView } from './block-view'
8
7
  const BIG_WIG_MAGIC = -2003829722
9
8
  const BIG_BED_MAGIC = -2021002517
10
9
 
11
- function toString(arr: Uint8Array) {
10
+ function myToString(arr: Uint8Array) {
12
11
  return new TextDecoder().decode(arr)
13
12
  }
13
+ interface ZoomLevel {
14
+ reductionLevel: number
15
+ reserved: number
16
+ dataOffset: number
17
+ indexOffset: number
18
+ }
14
19
 
15
20
  export interface Feature {
21
+ offset?: number
22
+ chromId: number
16
23
  start: number
17
24
  end: number
18
- score: number
25
+ score?: number
19
26
  rest?: string // for bigbed line
20
27
  minScore?: number // for summary line
21
28
  maxScore?: number // for summary line
@@ -27,6 +34,8 @@ interface Statistics {
27
34
  scoreSum: number
28
35
  basesCovered: number
29
36
  scoreSumSquares: number
37
+ scoreMin: number
38
+ scoreMax: number
30
39
  }
31
40
 
32
41
  interface RefInfo {
@@ -34,87 +43,31 @@ interface RefInfo {
34
43
  id: number
35
44
  length: number
36
45
  }
37
- export interface Header {
46
+
47
+ export interface MainHeader {
48
+ magic: number
38
49
  version: number
39
50
  autoSql: string
40
51
  totalSummary: Statistics
41
- zoomLevels: any
52
+ asOffset: number
53
+ zoomLevels: ZoomLevel[]
54
+ fieldCount: number
55
+ numZoomLevels: number
42
56
  unzoomedIndexOffset: number
57
+ totalSummaryOffset: number
43
58
  unzoomedDataOffset: number
44
59
  definedFieldCount: number
45
60
  uncompressBufSize: number
46
61
  chromTreeOffset: number
47
- fileSize: number
48
62
  extHeaderOffset: number
49
63
  isBigEndian: boolean
50
64
  fileType: string
65
+ }
66
+ export interface Header extends MainHeader {
51
67
  refsByName: Record<string, number>
52
68
  refsByNumber: Record<number, RefInfo>
53
69
  }
54
70
 
55
- /**
56
- * get the compiled parsers for different sections of the bigwig file
57
- *
58
- * @param isBE - is big endian, typically false
59
- * @return an object with compiled parsers
60
- */
61
- function getParsers(isBE: boolean) {
62
- const le = isBE ? 'big' : 'little'
63
- const headerParser = new Parser()
64
- .endianess(le)
65
- .int32('magic')
66
- .uint16('version')
67
- .uint16('numZoomLevels')
68
- .uint64('chromTreeOffset')
69
- .uint64('unzoomedDataOffset')
70
- .uint64('unzoomedIndexOffset')
71
- .uint16('fieldCount')
72
- .uint16('definedFieldCount')
73
- .uint64('asOffset') // autoSql offset, used in bigbed
74
- .uint64('totalSummaryOffset')
75
- .uint32('uncompressBufSize')
76
- .uint64('extHeaderOffset') // name index offset, used in bigbed
77
- .array('zoomLevels', {
78
- length: 'numZoomLevels',
79
- type: new Parser()
80
- .endianess(le)
81
- .uint32('reductionLevel')
82
- .uint32('reserved')
83
- .uint64('dataOffset')
84
- .uint64('indexOffset'),
85
- })
86
-
87
- const totalSummaryParser = new Parser()
88
- .endianess(le)
89
- .uint64('basesCovered')
90
- .doublele('scoreMin')
91
- .doublele('scoreMax')
92
- .doublele('scoreSum')
93
- .doublele('scoreSumSquares')
94
-
95
- const chromTreeParser = new Parser()
96
- .endianess(le)
97
- .uint32('magic')
98
- .uint32('blockSize')
99
- .uint32('keySize')
100
- .uint32('valSize')
101
- .uint64('itemCount')
102
-
103
- const isLeafNode = new Parser()
104
- .endianess(le)
105
- .uint8('isLeafNode')
106
- .skip(1)
107
- .uint16('cnt')
108
- .saveOffset('offset')
109
-
110
- return {
111
- chromTreeParser,
112
- totalSummaryParser,
113
- headerParser,
114
- isLeafNode,
115
- }
116
- }
117
-
118
71
  export interface RequestOptions {
119
72
  signal?: AbortSignal
120
73
  headers?: Record<string, string>
@@ -139,7 +92,8 @@ export abstract class BBI {
139
92
  }
140
93
 
141
94
  /*
142
- * @param filehandle - a filehandle from generic-filehandle or implementing something similar to the node10 fs.promises API
95
+ * @param filehandle - a filehandle from generic-filehandle or implementing
96
+ * something similar to the node10 fs.promises API
143
97
  *
144
98
  * @param path - a Local file path as a string
145
99
  *
@@ -176,7 +130,8 @@ export abstract class BBI {
176
130
  private async _getMainHeader(
177
131
  opts?: RequestOptions,
178
132
  requestSize = 2000,
179
- ): Promise<Header> {
133
+ ): Promise<MainHeader> {
134
+ const le = true
180
135
  const { buffer } = await this.bbi.read(
181
136
  Buffer.alloc(requestSize),
182
137
  0,
@@ -185,30 +140,102 @@ export abstract class BBI {
185
140
  opts,
186
141
  )
187
142
  const isBigEndian = this._isBigEndian(buffer)
188
- const ret = getParsers(isBigEndian)
189
- const header = ret.headerParser.parse(buffer)
190
- const { magic, asOffset, totalSummaryOffset } = header
191
- header.fileType = magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig'
192
- if (asOffset > requestSize || totalSummaryOffset > requestSize) {
193
- return this._getMainHeader(opts, requestSize * 2)
194
- }
195
- if (asOffset) {
196
- const off = Number(header.asOffset)
197
- header.autoSql = toString(buffer.subarray(off, buffer.indexOf(0, off)))
143
+ const b = buffer
144
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
145
+ let offset = 0
146
+ const magic = dataView.getInt32(offset, le)
147
+ offset += 4
148
+ const version = dataView.getUint16(offset, le)
149
+ offset += 2
150
+ const numZoomLevels = dataView.getUint16(offset, le)
151
+ offset += 2
152
+ const chromTreeOffset = Number(dataView.getBigUint64(offset, le))
153
+ offset += 8
154
+ const unzoomedDataOffset = Number(dataView.getBigUint64(offset, le))
155
+ offset += 8
156
+ const unzoomedIndexOffset = Number(dataView.getBigUint64(offset, le))
157
+ offset += 8
158
+ const fieldCount = dataView.getUint16(offset, le)
159
+ offset += 2
160
+ const definedFieldCount = dataView.getUint16(offset, le)
161
+ offset += 2
162
+ const asOffset = Number(dataView.getBigUint64(offset, le))
163
+ offset += 8
164
+ const totalSummaryOffset = Number(dataView.getBigUint64(offset, le))
165
+ offset += 8
166
+ const uncompressBufSize = dataView.getUint32(offset, le)
167
+ offset += 4
168
+ const extHeaderOffset = Number(dataView.getBigUint64(offset, le))
169
+ offset += 8
170
+ const zoomLevels = [] as ZoomLevel[]
171
+ for (let i = 0; i < numZoomLevels; i++) {
172
+ const reductionLevel = dataView.getUint32(offset, le)
173
+ offset += 4
174
+ const reserved = dataView.getUint32(offset, le)
175
+ offset += 4
176
+ const dataOffset = Number(dataView.getBigUint64(offset, le))
177
+ offset += 8
178
+ const indexOffset = Number(dataView.getBigUint64(offset, le))
179
+ offset += 8
180
+ zoomLevels.push({ reductionLevel, reserved, dataOffset, indexOffset })
198
181
  }
199
182
 
183
+ const fileType = magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig'
184
+
200
185
  // refetch header if it is too large on first pass,
201
186
  // 8*5 is the sizeof the totalSummary struct
202
- if (header.totalSummaryOffset > requestSize - 8 * 5) {
187
+ if (asOffset > requestSize || totalSummaryOffset > requestSize - 8 * 5) {
203
188
  return this._getMainHeader(opts, requestSize * 2)
204
189
  }
205
190
 
206
- if (header.totalSummaryOffset) {
207
- const tail = buffer.subarray(Number(header.totalSummaryOffset))
208
- const sum = ret.totalSummaryParser.parse(tail)
209
- header.totalSummary = { ...sum, basesCovered: Number(sum.basesCovered) }
191
+ let totalSummary: Statistics
192
+ if (totalSummaryOffset) {
193
+ const b = buffer.subarray(Number(totalSummaryOffset))
194
+ let offset = 0
195
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
196
+ const basesCovered = Number(dataView.getBigUint64(offset, le))
197
+ offset += 8
198
+ const scoreMin = dataView.getFloat64(offset, le)
199
+ offset += 8
200
+ const scoreMax = dataView.getFloat64(offset, le)
201
+ offset += 8
202
+ const scoreSum = dataView.getFloat64(offset, le)
203
+ offset += 8
204
+ const scoreSumSquares = dataView.getFloat64(offset, le)
205
+ offset += 8
206
+
207
+ totalSummary = {
208
+ scoreMin,
209
+ scoreMax,
210
+ scoreSum,
211
+ scoreSumSquares,
212
+ basesCovered,
213
+ }
214
+ } else {
215
+ throw new Error('no stats')
216
+ }
217
+
218
+ return {
219
+ zoomLevels,
220
+ magic,
221
+ extHeaderOffset,
222
+ numZoomLevels,
223
+ fieldCount,
224
+ totalSummary,
225
+ definedFieldCount,
226
+ uncompressBufSize,
227
+ asOffset,
228
+ chromTreeOffset,
229
+ totalSummaryOffset,
230
+ unzoomedDataOffset,
231
+ unzoomedIndexOffset,
232
+ fileType,
233
+ version,
234
+ isBigEndian,
235
+ autoSql: asOffset
236
+ ? myToString(buffer.subarray(asOffset, buffer.indexOf(0, asOffset)))
237
+ : '',
210
238
  }
211
- return { ...header, isBigEndian }
212
239
  }
213
240
 
214
241
  private _isBigEndian(buffer: Buffer) {
@@ -225,19 +252,19 @@ export abstract class BBI {
225
252
 
226
253
  // todo: add progress if long running
227
254
  private async _readChromTree(
228
- header: Header,
255
+ header: MainHeader,
229
256
  opts?: { signal?: AbortSignal },
230
257
  ) {
231
258
  const isBE = header.isBigEndian
232
- const le = isBE ? 'big' : 'little'
259
+ const le = !isBE
233
260
  const refsByNumber: Record<
234
261
  number,
235
262
  { name: string; id: number; length: number }
236
263
  > = []
237
264
  const refsByName: Record<string, number> = {}
238
265
 
239
- let unzoomedDataOffset = Number(header.unzoomedDataOffset)
240
- const chromTreeOffset = Number(header.chromTreeOffset)
266
+ let unzoomedDataOffset = header.unzoomedDataOffset
267
+ const chromTreeOffset = header.chromTreeOffset
241
268
  while (unzoomedDataOffset % 4 !== 0) {
242
269
  unzoomedDataOffset += 1
243
270
  }
@@ -250,33 +277,42 @@ export abstract class BBI {
250
277
  opts,
251
278
  )
252
279
 
253
- const p = getParsers(isBE)
254
- const { keySize } = p.chromTreeParser.parse(buffer)
255
- const leafNodeParser = new Parser()
256
- .endianess(le)
257
- .string('key', { stripNull: true, length: keySize })
258
- .uint32('refId')
259
- .uint32('refSize')
260
- .saveOffset('offset')
261
- const nonleafNodeParser = new Parser()
262
- .endianess(le)
263
- .skip(keySize)
264
- .uint64('childOffset')
265
- .saveOffset('offset')
280
+ const b = buffer
281
+ const dataView = new DataView(b.buffer, b.byteOffset, b.length)
282
+ let offset = 0
283
+ // const magic = dataView.getUint32(offset, le)
284
+ offset += 4
285
+ // const blockSize = dataView.getUint32(offset, le)
286
+ offset += 4
287
+ const keySize = dataView.getUint32(offset, le)
288
+ offset += 4
289
+ // const valSize = dataView.getUint32(offset, le)
290
+ offset += 4
291
+ // const itemCount = dataView.getBigUint64(offset, le)
292
+ offset += 8
293
+
266
294
  const rootNodeOffset = 32
267
295
  const bptReadNode = async (currentOffset: number) => {
268
296
  let offset = currentOffset
269
297
  if (offset >= buffer.length) {
270
298
  throw new Error('reading beyond end of buffer')
271
299
  }
272
- const ret = p.isLeafNode.parse(buffer.subarray(offset))
273
- const { isLeafNode, cnt } = ret
274
- offset += ret.offset
300
+ const isLeafNode = dataView.getUint8(offset)
301
+ offset += 2 //skip 1
302
+ const cnt = dataView.getUint16(offset, le)
303
+ offset += 2
275
304
  if (isLeafNode) {
276
- for (let n = 0; n < cnt; n += 1) {
277
- const leafRet = leafNodeParser.parse(buffer.subarray(offset))
278
- offset += leafRet.offset
279
- const { key, refId, refSize } = leafRet
305
+ for (let n = 0; n < cnt; n++) {
306
+ const key = buffer
307
+ .subarray(offset, offset + keySize)
308
+ .toString()
309
+ .replaceAll('\0', '')
310
+ offset += keySize
311
+ const refId = dataView.getUint32(offset, le)
312
+ offset += 4
313
+ const refSize = dataView.getUint32(offset, le)
314
+ offset += 4
315
+
280
316
  const refRec = { name: key, id: refId, length: refSize }
281
317
  refsByName[this.renameRefSeqs(key)] = refId
282
318
  refsByNumber[refId] = refRec
@@ -284,10 +320,10 @@ export abstract class BBI {
284
320
  } else {
285
321
  // parse index node
286
322
  const nextNodes = []
287
- for (let n = 0; n < cnt; n += 1) {
288
- const nonleafRet = nonleafNodeParser.parse(buffer.subarray(offset))
289
- const { childOffset } = nonleafRet
290
- offset += nonleafRet.offset
323
+ for (let n = 0; n < cnt; n++) {
324
+ offset += keySize
325
+ const childOffset = Number(dataView.getBigUint64(offset, le))
326
+ offset += 8
291
327
  nextNodes.push(
292
328
  bptReadNode(Number(childOffset) - Number(chromTreeOffset)),
293
329
  )