@gmod/bbi 1.0.32 → 1.0.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/bbi.js +5 -5
- package/dist/bigbed.js +1 -1
- package/dist/blockView.js +5 -5
- package/esm/bbi.js +5 -5
- package/esm/bigbed.js +1 -1
- package/esm/blockView.js +5 -5
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,10 @@
|
|
|
1
|
+
## [1.0.33](https://github.com/GMOD/bbi-js/compare/v1.0.32...v1.0.33) (2022-02-25)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
- Use subarray instead of slice since slice is deprecated under node xref
|
|
6
|
+
'https://nodejs.org/api/buffer.html#bufslicestart-end'"
|
|
7
|
+
|
|
1
8
|
## [1.0.32](https://github.com/GMOD/bbi-js/compare/v1.0.31...v1.0.32) (2022-02-16)
|
|
2
9
|
|
|
3
10
|
|
package/dist/bbi.js
CHANGED
|
@@ -192,14 +192,14 @@ var BBI = /** @class */ (function () {
|
|
|
192
192
|
}
|
|
193
193
|
if (header.asOffset) {
|
|
194
194
|
header.autoSql = buffer
|
|
195
|
-
.
|
|
195
|
+
.subarray(header.asOffset, buffer.indexOf(0, header.asOffset))
|
|
196
196
|
.toString('utf8');
|
|
197
197
|
}
|
|
198
198
|
if (header.totalSummaryOffset > requestSize) {
|
|
199
199
|
return [2 /*return*/, this._getMainHeader(opts, requestSize * 2)];
|
|
200
200
|
}
|
|
201
201
|
if (header.totalSummaryOffset) {
|
|
202
|
-
tail = buffer.
|
|
202
|
+
tail = buffer.subarray(header.totalSummaryOffset);
|
|
203
203
|
header.totalSummary = ret.totalSummaryParser.parse(tail).result;
|
|
204
204
|
}
|
|
205
205
|
return [2 /*return*/, __assign(__assign({}, header), { isBigEndian: isBigEndian })];
|
|
@@ -259,12 +259,12 @@ var BBI = /** @class */ (function () {
|
|
|
259
259
|
if (offset >= data.length) {
|
|
260
260
|
throw new Error('reading beyond end of buffer');
|
|
261
261
|
}
|
|
262
|
-
ret = p.isLeafNode.parse(data.
|
|
262
|
+
ret = p.isLeafNode.parse(data.subarray(offset));
|
|
263
263
|
_a = ret.result, isLeafNode = _a.isLeafNode, cnt = _a.cnt;
|
|
264
264
|
offset += ret.offset;
|
|
265
265
|
if (!isLeafNode) return [3 /*break*/, 1];
|
|
266
266
|
for (n = 0; n < cnt; n += 1) {
|
|
267
|
-
leafRet = leafNodeParser.parse(data.
|
|
267
|
+
leafRet = leafNodeParser.parse(data.subarray(offset));
|
|
268
268
|
offset += leafRet.offset;
|
|
269
269
|
_b = leafRet.result, key = _b.key, refId = _b.refId, refSize = _b.refSize;
|
|
270
270
|
refRec = { name: key, id: refId, length: refSize };
|
|
@@ -275,7 +275,7 @@ var BBI = /** @class */ (function () {
|
|
|
275
275
|
case 1:
|
|
276
276
|
nextNodes = [];
|
|
277
277
|
for (n = 0; n < cnt; n += 1) {
|
|
278
|
-
nonleafRet = nonleafNodeParser.parse(data.
|
|
278
|
+
nonleafRet = nonleafNodeParser.parse(data.subarray(offset));
|
|
279
279
|
childOffset = nonleafRet.result.childOffset;
|
|
280
280
|
offset += nonleafRet.offset;
|
|
281
281
|
childOffset -= chromTreeOffset;
|
package/dist/bigbed.js
CHANGED
|
@@ -150,7 +150,7 @@ var BigBed = /** @class */ (function (_super) {
|
|
|
150
150
|
.int16('field');
|
|
151
151
|
indices = [];
|
|
152
152
|
for (i = 0; i < count; i += 1) {
|
|
153
|
-
indices.push(extParser.parse(buffer.
|
|
153
|
+
indices.push(extParser.parse(buffer.subarray(i * blocklen)).result);
|
|
154
154
|
}
|
|
155
155
|
return [2 /*return*/, indices];
|
|
156
156
|
}
|
package/dist/blockView.js
CHANGED
|
@@ -215,7 +215,7 @@ var BlockView = /** @class */ (function () {
|
|
|
215
215
|
outstanding_1 = 0;
|
|
216
216
|
cirFobRecur2_1 = function (cirBlockData, offset, level) {
|
|
217
217
|
try {
|
|
218
|
-
var data = cirBlockData.
|
|
218
|
+
var data = cirBlockData.subarray(offset);
|
|
219
219
|
var p = _this.leafParser.parse(data).result;
|
|
220
220
|
if (p.blocksToFetch) {
|
|
221
221
|
blocksToFetch_1 = blocksToFetch_1.concat(p.blocksToFetch.filter(filterFeats_1).map(function (l) { return ({
|
|
@@ -299,7 +299,7 @@ var BlockView = /** @class */ (function () {
|
|
|
299
299
|
var features = [];
|
|
300
300
|
var currOffset = startOffset;
|
|
301
301
|
while (currOffset < data.byteLength) {
|
|
302
|
-
var res = this.summaryParser.parse(data.
|
|
302
|
+
var res = this.summaryParser.parse(data.subarray(currOffset));
|
|
303
303
|
features.push(res.result);
|
|
304
304
|
currOffset += res.offset;
|
|
305
305
|
}
|
|
@@ -323,7 +323,7 @@ var BlockView = /** @class */ (function () {
|
|
|
323
323
|
var items = [];
|
|
324
324
|
var currOffset = startOffset;
|
|
325
325
|
while (currOffset < data.byteLength) {
|
|
326
|
-
var res = this.bigBedParser.parse(data.
|
|
326
|
+
var res = this.bigBedParser.parse(data.subarray(currOffset));
|
|
327
327
|
res.result.uniqueId = "bb-".concat(offset + currOffset);
|
|
328
328
|
items.push(res.result);
|
|
329
329
|
currOffset += res.offset;
|
|
@@ -333,7 +333,7 @@ var BlockView = /** @class */ (function () {
|
|
|
333
333
|
: items;
|
|
334
334
|
};
|
|
335
335
|
BlockView.prototype.parseBigWigBlock = function (bytes, startOffset, request) {
|
|
336
|
-
var data = bytes.
|
|
336
|
+
var data = bytes.subarray(startOffset);
|
|
337
337
|
var results = this.bigWigParser.parse(data).result;
|
|
338
338
|
var items = results.items, itemSpan = results.itemSpan, itemStep = results.itemStep, blockStart = results.blockStart, blockType = results.blockType;
|
|
339
339
|
if (blockType === BIG_WIG_TYPE_FSTEP) {
|
|
@@ -383,7 +383,7 @@ var BlockView = /** @class */ (function () {
|
|
|
383
383
|
var blockOffset = block.offset - blockGroup.offset;
|
|
384
384
|
var resultData = data;
|
|
385
385
|
if (isCompressed_1) {
|
|
386
|
-
resultData = (0, unzip_1.unzip)(data.
|
|
386
|
+
resultData = (0, unzip_1.unzip)(data.subarray(blockOffset));
|
|
387
387
|
blockOffset = 0;
|
|
388
388
|
}
|
|
389
389
|
(0, util_1.checkAbortSignal)(signal_2);
|
package/esm/bbi.js
CHANGED
|
@@ -122,14 +122,14 @@ class BBI {
|
|
|
122
122
|
}
|
|
123
123
|
if (header.asOffset) {
|
|
124
124
|
header.autoSql = buffer
|
|
125
|
-
.
|
|
125
|
+
.subarray(header.asOffset, buffer.indexOf(0, header.asOffset))
|
|
126
126
|
.toString('utf8');
|
|
127
127
|
}
|
|
128
128
|
if (header.totalSummaryOffset > requestSize) {
|
|
129
129
|
return this._getMainHeader(opts, requestSize * 2);
|
|
130
130
|
}
|
|
131
131
|
if (header.totalSummaryOffset) {
|
|
132
|
-
const tail = buffer.
|
|
132
|
+
const tail = buffer.subarray(header.totalSummaryOffset);
|
|
133
133
|
header.totalSummary = ret.totalSummaryParser.parse(tail).result;
|
|
134
134
|
}
|
|
135
135
|
return { ...header, isBigEndian };
|
|
@@ -174,12 +174,12 @@ class BBI {
|
|
|
174
174
|
if (offset >= data.length) {
|
|
175
175
|
throw new Error('reading beyond end of buffer');
|
|
176
176
|
}
|
|
177
|
-
const ret = p.isLeafNode.parse(data.
|
|
177
|
+
const ret = p.isLeafNode.parse(data.subarray(offset));
|
|
178
178
|
const { isLeafNode, cnt } = ret.result;
|
|
179
179
|
offset += ret.offset;
|
|
180
180
|
if (isLeafNode) {
|
|
181
181
|
for (let n = 0; n < cnt; n += 1) {
|
|
182
|
-
const leafRet = leafNodeParser.parse(data.
|
|
182
|
+
const leafRet = leafNodeParser.parse(data.subarray(offset));
|
|
183
183
|
offset += leafRet.offset;
|
|
184
184
|
const { key, refId, refSize } = leafRet.result;
|
|
185
185
|
const refRec = { name: key, id: refId, length: refSize };
|
|
@@ -191,7 +191,7 @@ class BBI {
|
|
|
191
191
|
// parse index node
|
|
192
192
|
const nextNodes = [];
|
|
193
193
|
for (let n = 0; n < cnt; n += 1) {
|
|
194
|
-
const nonleafRet = nonleafNodeParser.parse(data.
|
|
194
|
+
const nonleafRet = nonleafNodeParser.parse(data.subarray(offset));
|
|
195
195
|
let { childOffset } = nonleafRet.result;
|
|
196
196
|
offset += nonleafRet.offset;
|
|
197
197
|
childOffset -= chromTreeOffset;
|
package/esm/bigbed.js
CHANGED
|
@@ -69,7 +69,7 @@ class BigBed extends bbi_1.BBI {
|
|
|
69
69
|
.int16('field');
|
|
70
70
|
const indices = [];
|
|
71
71
|
for (let i = 0; i < count; i += 1) {
|
|
72
|
-
indices.push(extParser.parse(buffer.
|
|
72
|
+
indices.push(extParser.parse(buffer.subarray(i * blocklen)).result);
|
|
73
73
|
}
|
|
74
74
|
return indices;
|
|
75
75
|
}
|
package/esm/blockView.js
CHANGED
|
@@ -150,7 +150,7 @@ class BlockView {
|
|
|
150
150
|
let outstanding = 0;
|
|
151
151
|
const cirFobRecur2 = (cirBlockData, offset, level) => {
|
|
152
152
|
try {
|
|
153
|
-
const data = cirBlockData.
|
|
153
|
+
const data = cirBlockData.subarray(offset);
|
|
154
154
|
const p = this.leafParser.parse(data).result;
|
|
155
155
|
if (p.blocksToFetch) {
|
|
156
156
|
blocksToFetch = blocksToFetch.concat(p.blocksToFetch.filter(filterFeats).map((l) => ({
|
|
@@ -220,7 +220,7 @@ class BlockView {
|
|
|
220
220
|
const features = [];
|
|
221
221
|
let currOffset = startOffset;
|
|
222
222
|
while (currOffset < data.byteLength) {
|
|
223
|
-
const res = this.summaryParser.parse(data.
|
|
223
|
+
const res = this.summaryParser.parse(data.subarray(currOffset));
|
|
224
224
|
features.push(res.result);
|
|
225
225
|
currOffset += res.offset;
|
|
226
226
|
}
|
|
@@ -244,7 +244,7 @@ class BlockView {
|
|
|
244
244
|
const items = [];
|
|
245
245
|
let currOffset = startOffset;
|
|
246
246
|
while (currOffset < data.byteLength) {
|
|
247
|
-
const res = this.bigBedParser.parse(data.
|
|
247
|
+
const res = this.bigBedParser.parse(data.subarray(currOffset));
|
|
248
248
|
res.result.uniqueId = `bb-${offset + currOffset}`;
|
|
249
249
|
items.push(res.result);
|
|
250
250
|
currOffset += res.offset;
|
|
@@ -254,7 +254,7 @@ class BlockView {
|
|
|
254
254
|
: items;
|
|
255
255
|
}
|
|
256
256
|
parseBigWigBlock(bytes, startOffset, request) {
|
|
257
|
-
const data = bytes.
|
|
257
|
+
const data = bytes.subarray(startOffset);
|
|
258
258
|
const results = this.bigWigParser.parse(data).result;
|
|
259
259
|
const { items, itemSpan, itemStep, blockStart, blockType } = results;
|
|
260
260
|
if (blockType === BIG_WIG_TYPE_FSTEP) {
|
|
@@ -290,7 +290,7 @@ class BlockView {
|
|
|
290
290
|
let blockOffset = block.offset - blockGroup.offset;
|
|
291
291
|
let resultData = data;
|
|
292
292
|
if (isCompressed) {
|
|
293
|
-
resultData = (0, unzip_1.unzip)(data.
|
|
293
|
+
resultData = (0, unzip_1.unzip)(data.subarray(blockOffset));
|
|
294
294
|
blockOffset = 0;
|
|
295
295
|
}
|
|
296
296
|
(0, util_1.checkAbortSignal)(signal);
|