@gmod/cram 1.5.9 → 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +90 -0
- package/README.md +182 -172
- package/dist/craiIndex.d.ts +37 -0
- package/dist/craiIndex.js +196 -301
- package/dist/craiIndex.js.map +1 -0
- package/dist/cram-bundle.js +6 -15
- package/dist/cramFile/codecs/_base.d.ts +6 -0
- package/dist/cramFile/codecs/_base.js +44 -53
- package/dist/cramFile/codecs/_base.js.map +1 -0
- package/dist/cramFile/codecs/beta.d.ts +4 -0
- package/dist/cramFile/codecs/beta.js +38 -48
- package/dist/cramFile/codecs/beta.js.map +1 -0
- package/dist/cramFile/codecs/byteArrayLength.d.ts +8 -0
- package/dist/cramFile/codecs/byteArrayLength.js +58 -78
- package/dist/cramFile/codecs/byteArrayLength.js.map +1 -0
- package/dist/cramFile/codecs/byteArrayStop.d.ts +6 -0
- package/dist/cramFile/codecs/byteArrayStop.js +62 -76
- package/dist/cramFile/codecs/byteArrayStop.js.map +1 -0
- package/dist/cramFile/codecs/external.d.ts +7 -0
- package/dist/cramFile/codecs/external.js +63 -81
- package/dist/cramFile/codecs/external.js.map +1 -0
- package/dist/cramFile/codecs/gamma.d.ts +4 -0
- package/dist/cramFile/codecs/gamma.js +43 -56
- package/dist/cramFile/codecs/gamma.js.map +1 -0
- package/dist/cramFile/codecs/huffman.d.ts +17 -0
- package/dist/cramFile/codecs/huffman.js +126 -199
- package/dist/cramFile/codecs/huffman.js.map +1 -0
- package/dist/cramFile/codecs/index.d.ts +2 -0
- package/dist/cramFile/codecs/index.js +31 -38
- package/dist/cramFile/codecs/index.js.map +1 -0
- package/dist/cramFile/codecs/subexp.d.ts +4 -0
- package/dist/cramFile/codecs/subexp.js +51 -64
- package/dist/cramFile/codecs/subexp.js.map +1 -0
- package/dist/cramFile/constants.d.ts +36 -0
- package/dist/cramFile/constants.js +52 -50
- package/dist/cramFile/constants.js.map +1 -0
- package/dist/cramFile/container/compressionScheme.d.ts +23 -0
- package/dist/cramFile/container/compressionScheme.js +115 -153
- package/dist/cramFile/container/compressionScheme.js.map +1 -0
- package/dist/cramFile/container/index.d.ts +13 -0
- package/dist/cramFile/container/index.js +169 -283
- package/dist/cramFile/container/index.js.map +1 -0
- package/dist/cramFile/file.d.ts +63 -0
- package/dist/cramFile/file.js +440 -766
- package/dist/cramFile/file.js.map +1 -0
- package/dist/cramFile/index.d.ts +2 -0
- package/dist/cramFile/index.js +7 -4
- package/dist/cramFile/index.js.map +1 -0
- package/dist/cramFile/record.d.ts +79 -0
- package/dist/cramFile/record.js +253 -308
- package/dist/cramFile/record.js.map +1 -0
- package/dist/cramFile/sectionParsers.d.ts +18 -0
- package/dist/cramFile/sectionParsers.js +324 -362
- package/dist/cramFile/sectionParsers.js.map +1 -0
- package/dist/cramFile/slice/decodeRecord.d.ts +2 -0
- package/dist/cramFile/slice/decodeRecord.js +278 -298
- package/dist/cramFile/slice/decodeRecord.js.map +1 -0
- package/dist/cramFile/slice/index.d.ts +20 -0
- package/dist/cramFile/slice/index.js +488 -789
- package/dist/cramFile/slice/index.js.map +1 -0
- package/dist/cramFile/util.d.ts +5 -0
- package/dist/cramFile/util.js +158 -144
- package/dist/cramFile/util.js.map +1 -0
- package/dist/errors.d.ts +23 -0
- package/dist/errors.js +66 -103
- package/dist/errors.js.map +1 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +12 -12
- package/dist/index.js.map +1 -0
- package/dist/indexedCramFile.d.ts +39 -0
- package/dist/indexedCramFile.js +213 -315
- package/dist/indexedCramFile.js.map +1 -0
- package/dist/io/bufferCache.d.ts +12 -0
- package/dist/io/bufferCache.js +108 -128
- package/dist/io/bufferCache.js.map +1 -0
- package/dist/io/index.d.ts +5 -0
- package/dist/io/index.js +29 -27
- package/dist/io/index.js.map +1 -0
- package/dist/io/localFile.d.ts +10 -0
- package/dist/io/localFile.js +105 -162
- package/dist/io/localFile.js.map +1 -0
- package/dist/io/remoteFile.d.ts +16 -0
- package/dist/io/remoteFile.js +137 -206
- package/dist/io/remoteFile.js.map +1 -0
- package/dist/rans/constants.d.ts +3 -0
- package/dist/rans/constants.js +6 -6
- package/dist/rans/constants.js.map +1 -0
- package/dist/rans/d04.d.ts +1 -0
- package/dist/rans/d04.js +70 -99
- package/dist/rans/d04.js.map +1 -0
- package/dist/rans/d14.d.ts +1 -0
- package/dist/rans/d14.js +55 -93
- package/dist/rans/d14.js.map +1 -0
- package/dist/rans/decoding.d.ts +30 -0
- package/dist/rans/decoding.js +112 -159
- package/dist/rans/decoding.js.map +1 -0
- package/dist/rans/frequencies.d.ts +2 -0
- package/dist/rans/frequencies.js +110 -119
- package/dist/rans/frequencies.js.map +1 -0
- package/dist/rans/index.d.ts +1 -0
- package/dist/rans/index.js +111 -174
- package/dist/rans/index.js.map +1 -0
- package/dist/sam.d.ts +1 -0
- package/dist/sam.js +16 -41
- package/dist/sam.js.map +1 -0
- package/dist/unzip-pako.d.ts +2 -0
- package/dist/unzip-pako.js +9 -0
- package/dist/unzip-pako.js.map +1 -0
- package/dist/unzip.d.ts +2 -0
- package/dist/unzip.js +6 -0
- package/dist/unzip.js.map +1 -0
- package/errors.js +66 -103
- package/esm/craiIndex.d.ts +37 -0
- package/esm/craiIndex.js +158 -0
- package/esm/craiIndex.js.map +1 -0
- package/esm/cramFile/codecs/_base.d.ts +6 -0
- package/esm/cramFile/codecs/_base.js +42 -0
- package/esm/cramFile/codecs/_base.js.map +1 -0
- package/esm/cramFile/codecs/beta.d.ts +4 -0
- package/esm/cramFile/codecs/beta.js +15 -0
- package/esm/cramFile/codecs/beta.js.map +1 -0
- package/esm/cramFile/codecs/byteArrayLength.d.ts +8 -0
- package/esm/cramFile/codecs/byteArrayLength.js +35 -0
- package/esm/cramFile/codecs/byteArrayLength.js.map +1 -0
- package/esm/cramFile/codecs/byteArrayStop.d.ts +6 -0
- package/esm/cramFile/codecs/byteArrayStop.js +40 -0
- package/esm/cramFile/codecs/byteArrayStop.js.map +1 -0
- package/esm/cramFile/codecs/external.d.ts +7 -0
- package/esm/cramFile/codecs/external.js +40 -0
- package/esm/cramFile/codecs/external.js.map +1 -0
- package/esm/cramFile/codecs/gamma.d.ts +4 -0
- package/esm/cramFile/codecs/gamma.js +20 -0
- package/esm/cramFile/codecs/gamma.js.map +1 -0
- package/esm/cramFile/codecs/huffman.d.ts +17 -0
- package/esm/cramFile/codecs/huffman.js +107 -0
- package/esm/cramFile/codecs/huffman.js.map +1 -0
- package/esm/cramFile/codecs/index.d.ts +2 -0
- package/esm/cramFile/codecs/index.js +30 -0
- package/esm/cramFile/codecs/index.js.map +1 -0
- package/esm/cramFile/codecs/subexp.d.ts +4 -0
- package/esm/cramFile/codecs/subexp.js +28 -0
- package/esm/cramFile/codecs/subexp.js.map +1 -0
- package/esm/cramFile/constants.d.ts +36 -0
- package/esm/cramFile/constants.js +51 -0
- package/esm/cramFile/constants.js.map +1 -0
- package/esm/cramFile/container/compressionScheme.d.ts +23 -0
- package/esm/cramFile/container/compressionScheme.js +123 -0
- package/esm/cramFile/container/compressionScheme.js.map +1 -0
- package/esm/cramFile/container/index.d.ts +13 -0
- package/esm/cramFile/container/index.js +84 -0
- package/esm/cramFile/container/index.js.map +1 -0
- package/esm/cramFile/file.d.ts +63 -0
- package/esm/cramFile/file.js +281 -0
- package/esm/cramFile/file.js.map +1 -0
- package/esm/cramFile/index.d.ts +2 -0
- package/esm/cramFile/index.js +3 -0
- package/esm/cramFile/index.js.map +1 -0
- package/esm/cramFile/record.d.ts +79 -0
- package/esm/cramFile/record.js +297 -0
- package/esm/cramFile/record.js.map +1 -0
- package/esm/cramFile/sectionParsers.d.ts +18 -0
- package/esm/cramFile/sectionParsers.js +347 -0
- package/esm/cramFile/sectionParsers.js.map +1 -0
- package/esm/cramFile/slice/decodeRecord.d.ts +2 -0
- package/esm/cramFile/slice/decodeRecord.js +299 -0
- package/esm/cramFile/slice/decodeRecord.js.map +1 -0
- package/esm/cramFile/slice/index.d.ts +20 -0
- package/esm/cramFile/slice/index.js +364 -0
- package/esm/cramFile/slice/index.js.map +1 -0
- package/esm/cramFile/util.d.ts +5 -0
- package/esm/cramFile/util.js +161 -0
- package/esm/cramFile/util.js.map +1 -0
- package/esm/errors.d.ts +23 -0
- package/esm/errors.js +24 -0
- package/esm/errors.js.map +1 -0
- package/esm/index.d.ts +4 -0
- package/esm/index.js +5 -0
- package/esm/index.js.map +1 -0
- package/esm/indexedCramFile.d.ts +39 -0
- package/esm/indexedCramFile.js +155 -0
- package/esm/indexedCramFile.js.map +1 -0
- package/esm/io/bufferCache.d.ts +12 -0
- package/esm/io/bufferCache.js +54 -0
- package/esm/io/bufferCache.js.map +1 -0
- package/esm/io/index.d.ts +5 -0
- package/esm/io/index.js +24 -0
- package/esm/io/index.js.map +1 -0
- package/esm/io/localFile.d.ts +10 -0
- package/esm/io/localFile.js +31 -0
- package/esm/io/localFile.js.map +1 -0
- package/esm/io/remoteFile.d.ts +16 -0
- package/esm/io/remoteFile.js +64 -0
- package/esm/io/remoteFile.js.map +1 -0
- package/esm/rans/constants.d.ts +3 -0
- package/esm/rans/constants.js +5 -0
- package/esm/rans/constants.js.map +1 -0
- package/esm/rans/d04.d.ts +1 -0
- package/esm/rans/d04.js +67 -0
- package/esm/rans/d04.js.map +1 -0
- package/esm/rans/d14.d.ts +1 -0
- package/esm/rans/d14.js +52 -0
- package/esm/rans/d14.js.map +1 -0
- package/esm/rans/decoding.d.ts +30 -0
- package/esm/rans/decoding.js +118 -0
- package/esm/rans/decoding.js.map +1 -0
- package/esm/rans/frequencies.d.ts +2 -0
- package/esm/rans/frequencies.js +110 -0
- package/esm/rans/frequencies.js.map +1 -0
- package/esm/rans/index.d.ts +1 -0
- package/esm/rans/index.js +195 -0
- package/esm/rans/index.js.map +1 -0
- package/esm/sam.d.ts +1 -0
- package/esm/sam.js +16 -0
- package/esm/sam.js.map +1 -0
- package/esm/unzip-pako.d.ts +2 -0
- package/esm/unzip-pako.js +5 -0
- package/esm/unzip-pako.js.map +1 -0
- package/esm/unzip.d.ts +2 -0
- package/esm/unzip.js +3 -0
- package/esm/unzip.js.map +1 -0
- package/package.json +38 -35
- package/src/craiIndex.js +180 -0
- package/src/cramFile/codecs/_base.js +49 -0
- package/src/cramFile/codecs/beta.js +23 -0
- package/src/cramFile/codecs/byteArrayLength.js +55 -0
- package/src/cramFile/codecs/byteArrayStop.js +50 -0
- package/src/cramFile/codecs/external.js +54 -0
- package/src/cramFile/codecs/gamma.js +30 -0
- package/src/cramFile/codecs/huffman.js +137 -0
- package/src/cramFile/codecs/index.js +38 -0
- package/src/cramFile/codecs/subexp.js +32 -0
- package/src/cramFile/constants.js +55 -0
- package/src/cramFile/container/compressionScheme.js +144 -0
- package/src/cramFile/container/index.js +119 -0
- package/src/cramFile/file.js +347 -0
- package/src/cramFile/index.js +3 -0
- package/src/cramFile/record.js +337 -0
- package/src/cramFile/sectionParsers.js +379 -0
- package/src/cramFile/slice/decodeRecord.js +362 -0
- package/src/cramFile/slice/index.js +497 -0
- package/src/cramFile/util.js +169 -0
- package/src/errors.js +22 -0
- package/src/index.js +5 -0
- package/src/indexedCramFile.js +191 -0
- package/src/io/bufferCache.js +66 -0
- package/src/io/index.js +26 -0
- package/src/io/localFile.js +35 -0
- package/src/io/remoteFile.js +71 -0
- package/src/rans/README.md +1 -0
- package/src/rans/constants.js +5 -0
- package/src/rans/d04.js +83 -0
- package/src/rans/d14.js +59 -0
- package/src/rans/decoding.js +141 -0
- package/src/rans/frequencies.js +121 -0
- package/src/rans/index.js +249 -0
- package/src/sam.js +15 -0
- package/src/unzip-pako.ts +5 -0
- package/src/unzip.ts +2 -0
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
import { CramUnimplementedError, CramSizeLimitError } from './errors'
|
|
2
|
+
|
|
3
|
+
import CramFile from './cramFile'
|
|
4
|
+
|
|
5
|
+
export default class IndexedCramFile {
|
|
6
|
+
/**
|
|
7
|
+
*
|
|
8
|
+
* @param {object} args
|
|
9
|
+
* @param {CramFile} args.cram
|
|
10
|
+
* @param {Index-like} args.index object that supports getEntriesForRange(seqId,start,end) -> Promise[Array[index entries]]
|
|
11
|
+
* @param {number} [args.cacheSize] optional maximum number of CRAM records to cache. default 20,000
|
|
12
|
+
* @param {number} [args.fetchSizeLimit] optional maximum number of bytes to fetch in a single getRecordsForRange call. Default 3 MiB.
|
|
13
|
+
* @param {boolean} [args.checkSequenceMD5] - default true. if false, disables verifying the MD5
|
|
14
|
+
* checksum of the reference sequence underlying a slice. In some applications, this check can cause an inconvenient amount (many megabases) of sequences to be fetched.
|
|
15
|
+
*/
|
|
16
|
+
constructor(args) {
|
|
17
|
+
// { cram, index, seqFetch /* fasta, fastaIndex */ }) {
|
|
18
|
+
if (args.cram) {
|
|
19
|
+
this.cram = args.cram
|
|
20
|
+
} else {
|
|
21
|
+
this.cram = new CramFile({
|
|
22
|
+
url: args.cramUrl,
|
|
23
|
+
path: args.cramPath,
|
|
24
|
+
filehandle: args.cramFilehandle,
|
|
25
|
+
seqFetch: args.seqFetch,
|
|
26
|
+
checkSequenceMD5: args.checkSequenceMD5,
|
|
27
|
+
cacheSize: args.cacheSize,
|
|
28
|
+
})
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
if (!(this.cram instanceof CramFile)) {
|
|
32
|
+
throw new Error('invalid arguments: no cramfile')
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
this.index = args.index
|
|
36
|
+
if (!this.index.getEntriesForRange) {
|
|
37
|
+
throw new Error('invalid arguments: not an index')
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
this.fetchSizeLimit = args.fetchSizeLimit || 3000000
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
*
|
|
45
|
+
* @param {number} seq numeric ID of the reference sequence
|
|
46
|
+
* @param {number} start start of the range of interest. 1-based closed coordinates.
|
|
47
|
+
* @param {number} end end of the range of interest. 1-based closed coordinates.
|
|
48
|
+
* @returns {Promise[Array[CramRecord]]}
|
|
49
|
+
*/
|
|
50
|
+
async getRecordsForRange(seq, start, end, opts = {}) {
|
|
51
|
+
opts.viewAsPairs = opts.viewAsPairs || false
|
|
52
|
+
opts.pairAcrossChr = opts.pairAcrossChr || false
|
|
53
|
+
opts.maxInsertSize = opts.maxInsertSize || 200000
|
|
54
|
+
|
|
55
|
+
if (typeof seq === 'string') {
|
|
56
|
+
// TODO: support string reference sequence names somehow
|
|
57
|
+
throw new CramUnimplementedError(
|
|
58
|
+
'string sequence names not yet supported',
|
|
59
|
+
)
|
|
60
|
+
}
|
|
61
|
+
const seqId = seq
|
|
62
|
+
const slices = await this.index.getEntriesForRange(seqId, start, end)
|
|
63
|
+
const totalSize = slices.map(s => s.sliceBytes).reduce((a, b) => a + b, 0)
|
|
64
|
+
if (totalSize > this.fetchSizeLimit) {
|
|
65
|
+
throw new CramSizeLimitError(
|
|
66
|
+
`data size of ${totalSize.toLocaleString()} bytes exceeded fetch size limit of ${this.fetchSizeLimit.toLocaleString()} bytes`,
|
|
67
|
+
)
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// TODO: do we need to merge or de-duplicate the blocks?
|
|
71
|
+
|
|
72
|
+
// fetch all the slices and parse the feature data
|
|
73
|
+
const filter = feature =>
|
|
74
|
+
feature.sequenceId === seq &&
|
|
75
|
+
feature.alignmentStart <= end &&
|
|
76
|
+
feature.alignmentStart + feature.lengthOnRef - 1 >= start
|
|
77
|
+
const sliceResults = await Promise.all(
|
|
78
|
+
slices.map(slice => this.getRecordsInSlice(slice, filter)),
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
let ret = Array.prototype.concat(...sliceResults)
|
|
82
|
+
if (opts.viewAsPairs) {
|
|
83
|
+
const readNames = {}
|
|
84
|
+
const readIds = {}
|
|
85
|
+
for (let i = 0; i < ret.length; i += 1) {
|
|
86
|
+
const name = ret[i].readName
|
|
87
|
+
const id = ret[i].uniqueId
|
|
88
|
+
if (!readNames[name]) {
|
|
89
|
+
readNames[name] = 0
|
|
90
|
+
}
|
|
91
|
+
readNames[name] += 1
|
|
92
|
+
readIds[id] = 1
|
|
93
|
+
}
|
|
94
|
+
const unmatedPairs = {}
|
|
95
|
+
Object.entries(readNames).forEach(([k, v]) => {
|
|
96
|
+
if (v === 1) {
|
|
97
|
+
unmatedPairs[k] = true
|
|
98
|
+
}
|
|
99
|
+
})
|
|
100
|
+
const matePromises = []
|
|
101
|
+
for (let i = 0; i < ret.length; i += 1) {
|
|
102
|
+
const name = ret[i].readName
|
|
103
|
+
if (
|
|
104
|
+
unmatedPairs[name] &&
|
|
105
|
+
ret[i].mate &&
|
|
106
|
+
(ret[i].mate.sequenceId === seqId || opts.pairAcrossChr) &&
|
|
107
|
+
Math.abs(ret[i].alignmentStart - ret[i].mate.alignmentStart) <
|
|
108
|
+
opts.maxInsertSize
|
|
109
|
+
) {
|
|
110
|
+
const mateSlices = this.index.getEntriesForRange(
|
|
111
|
+
ret[i].mate.sequenceId,
|
|
112
|
+
ret[i].mate.alignmentStart,
|
|
113
|
+
ret[i].mate.alignmentStart + 1,
|
|
114
|
+
)
|
|
115
|
+
matePromises.push(mateSlices)
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
const mateBlocks = await Promise.all(matePromises)
|
|
119
|
+
let mateChunks = []
|
|
120
|
+
for (let i = 0; i < mateBlocks.length; i += 1) {
|
|
121
|
+
mateChunks.push(...mateBlocks[i])
|
|
122
|
+
}
|
|
123
|
+
// filter out duplicates
|
|
124
|
+
mateChunks = mateChunks
|
|
125
|
+
.sort((a, b) => a.toString().localeCompare(b.toString()))
|
|
126
|
+
.filter(
|
|
127
|
+
(item, pos, ary) =>
|
|
128
|
+
!pos || item.toString() !== ary[pos - 1].toString(),
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
const mateRecordPromises = []
|
|
132
|
+
const mateFeatPromises = []
|
|
133
|
+
|
|
134
|
+
const mateTotalSize = mateChunks
|
|
135
|
+
.map(s => s.sliceBytes)
|
|
136
|
+
.reduce((a, b) => a + b, 0)
|
|
137
|
+
if (mateTotalSize > this.fetchSizeLimit) {
|
|
138
|
+
throw new Error(
|
|
139
|
+
`mate data size of ${mateTotalSize.toLocaleString()} bytes exceeded fetch size limit of ${this.fetchSizeLimit.toLocaleString()} bytes`,
|
|
140
|
+
)
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
mateChunks.forEach(c => {
|
|
144
|
+
let recordPromise = this.cram.featureCache.get(c.toString())
|
|
145
|
+
if (!recordPromise) {
|
|
146
|
+
recordPromise = this.getRecordsInSlice(c, () => true)
|
|
147
|
+
this.cram.featureCache.set(c.toString(), recordPromise)
|
|
148
|
+
}
|
|
149
|
+
mateRecordPromises.push(recordPromise)
|
|
150
|
+
const featPromise = recordPromise.then(feats => {
|
|
151
|
+
const mateRecs = []
|
|
152
|
+
for (let i = 0; i < feats.length; i += 1) {
|
|
153
|
+
const feature = feats[i]
|
|
154
|
+
if (unmatedPairs[feature.readName] && !readIds[feature.uniqueId]) {
|
|
155
|
+
mateRecs.push(feature)
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
return mateRecs
|
|
159
|
+
})
|
|
160
|
+
mateFeatPromises.push(featPromise)
|
|
161
|
+
})
|
|
162
|
+
const newMateFeats = await Promise.all(mateFeatPromises)
|
|
163
|
+
if (newMateFeats.length) {
|
|
164
|
+
const newMates = newMateFeats.reduce((result, current) =>
|
|
165
|
+
result.concat(current),
|
|
166
|
+
)
|
|
167
|
+
ret = ret.concat(newMates)
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
return ret
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
getRecordsInSlice(
|
|
174
|
+
{ containerStart, sliceStart, sliceBytes },
|
|
175
|
+
filterFunction,
|
|
176
|
+
) {
|
|
177
|
+
const container = this.cram.getContainerAtPosition(containerStart)
|
|
178
|
+
const slice = container.getSlice(sliceStart, sliceBytes)
|
|
179
|
+
return slice.getRecords(filterFunction)
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
*
|
|
184
|
+
* @param {number} seqId
|
|
185
|
+
* @returns {Promise} true if the CRAM file contains data for the given
|
|
186
|
+
* reference sequence numerical ID
|
|
187
|
+
*/
|
|
188
|
+
hasDataForReferenceSequence(seqId) {
|
|
189
|
+
return this.index.hasDataForReferenceSequence(seqId)
|
|
190
|
+
}
|
|
191
|
+
}
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import LRU from 'quick-lru'
|
|
2
|
+
|
|
3
|
+
export default class BufferCache {
|
|
4
|
+
constructor({ fetch, size = 10000000, chunkSize = 32768 }) {
|
|
5
|
+
if (!fetch) {
|
|
6
|
+
throw new Error('fetch function required')
|
|
7
|
+
}
|
|
8
|
+
this.fetch = fetch
|
|
9
|
+
this.chunkSize = chunkSize
|
|
10
|
+
this.lruCache = new LRU({ maxSize: Math.floor(size / chunkSize) })
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
async get(outputBuffer, offset, length, position) {
|
|
14
|
+
if (outputBuffer.length < offset + length) {
|
|
15
|
+
throw new Error('output buffer not big enough for request')
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// calculate the list of chunks involved in this fetch
|
|
19
|
+
const firstChunk = Math.floor(position / this.chunkSize)
|
|
20
|
+
const lastChunk = Math.floor((position + length) / this.chunkSize)
|
|
21
|
+
|
|
22
|
+
// fetch them all as necessary
|
|
23
|
+
const fetches = new Array(lastChunk - firstChunk + 1)
|
|
24
|
+
for (let chunk = firstChunk; chunk <= lastChunk; chunk += 1) {
|
|
25
|
+
fetches[chunk - firstChunk] = this._getChunk(chunk).then(data => ({
|
|
26
|
+
data,
|
|
27
|
+
chunkNumber: chunk,
|
|
28
|
+
}))
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// stitch together the response buffer using them
|
|
32
|
+
const chunks = await Promise.all(fetches)
|
|
33
|
+
const chunksOffset = position - chunks[0].chunkNumber * this.chunkSize
|
|
34
|
+
chunks.forEach(({ data, chunkNumber }) => {
|
|
35
|
+
const chunkPositionStart = chunkNumber * this.chunkSize
|
|
36
|
+
let copyStart = 0
|
|
37
|
+
let copyEnd = this.chunkSize
|
|
38
|
+
let copyOffset =
|
|
39
|
+
offset + (chunkNumber - firstChunk) * this.chunkSize - chunksOffset
|
|
40
|
+
|
|
41
|
+
if (chunkNumber === firstChunk) {
|
|
42
|
+
copyOffset = offset
|
|
43
|
+
copyStart = chunksOffset
|
|
44
|
+
}
|
|
45
|
+
if (chunkNumber === lastChunk) {
|
|
46
|
+
copyEnd = position + length - chunkPositionStart
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
data.copy(outputBuffer, copyOffset, copyStart, copyEnd)
|
|
50
|
+
})
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
_getChunk(chunkNumber) {
|
|
54
|
+
const cachedPromise = this.lruCache.get(chunkNumber)
|
|
55
|
+
if (cachedPromise) {
|
|
56
|
+
return cachedPromise
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const freshPromise = this.fetch(
|
|
60
|
+
chunkNumber * this.chunkSize,
|
|
61
|
+
this.chunkSize,
|
|
62
|
+
)
|
|
63
|
+
this.lruCache.set(chunkNumber, freshPromise)
|
|
64
|
+
return freshPromise
|
|
65
|
+
}
|
|
66
|
+
}
|
package/src/io/index.js
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import url from 'url'
|
|
2
|
+
import RemoteFile from './remoteFile'
|
|
3
|
+
import LocalFile from './localFile'
|
|
4
|
+
|
|
5
|
+
function fromUrl(source) {
|
|
6
|
+
const { protocol, pathname } = url.parse(source)
|
|
7
|
+
if (protocol === 'file:') {
|
|
8
|
+
return new LocalFile(unescape(pathname))
|
|
9
|
+
}
|
|
10
|
+
return new RemoteFile(source)
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function open(maybeUrl, maybePath, maybeFilehandle) {
|
|
14
|
+
if (maybeFilehandle) {
|
|
15
|
+
return maybeFilehandle
|
|
16
|
+
}
|
|
17
|
+
if (maybeUrl) {
|
|
18
|
+
return fromUrl(maybeUrl)
|
|
19
|
+
}
|
|
20
|
+
if (maybePath) {
|
|
21
|
+
return new LocalFile(maybePath)
|
|
22
|
+
}
|
|
23
|
+
throw new Error('no url, path, or filehandle provided, cannot open')
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export { LocalFile, RemoteFile, fromUrl, open }
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { promisify } from 'es6-promisify'
|
|
2
|
+
import fs from 'fs'
|
|
3
|
+
|
|
4
|
+
const fsOpen = fs && promisify(fs.open)
|
|
5
|
+
const fsRead = fs && promisify(fs.read)
|
|
6
|
+
const fsFStat = fs && promisify(fs.fstat)
|
|
7
|
+
const fsReadFile = fs && promisify(fs.readFile)
|
|
8
|
+
|
|
9
|
+
export default class LocalFile {
|
|
10
|
+
constructor(source) {
|
|
11
|
+
this.position = 0
|
|
12
|
+
this.filename = source
|
|
13
|
+
this.fd = fsOpen(this.filename, 'r')
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
async read(buffer, offset = 0, length, position) {
|
|
17
|
+
let readPosition = position
|
|
18
|
+
if (readPosition === null) {
|
|
19
|
+
readPosition = this.position
|
|
20
|
+
this.position += length
|
|
21
|
+
}
|
|
22
|
+
return fsRead(await this.fd, buffer, offset, length, position)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async readFile() {
|
|
26
|
+
return fsReadFile(await this.fd)
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async stat() {
|
|
30
|
+
if (!this._stat) {
|
|
31
|
+
this._stat = await fsFStat(await this.fd)
|
|
32
|
+
}
|
|
33
|
+
return this._stat
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import fetch from 'cross-fetch'
|
|
2
|
+
import BufferCache from './bufferCache'
|
|
3
|
+
|
|
4
|
+
export default class RemoteFile {
|
|
5
|
+
constructor(source) {
|
|
6
|
+
this.position = 0
|
|
7
|
+
this.url = source
|
|
8
|
+
this.cache = new BufferCache({
|
|
9
|
+
fetch: (start, length) => this._fetch(start, length),
|
|
10
|
+
})
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
async _fetch(position, length) {
|
|
14
|
+
const headers = {}
|
|
15
|
+
if (length < Infinity) {
|
|
16
|
+
headers.range = `bytes=${position}-${position + length}`
|
|
17
|
+
} else if (length === Infinity && position !== 0) {
|
|
18
|
+
headers.range = `bytes=${position}-`
|
|
19
|
+
}
|
|
20
|
+
const response = await fetch(this.url, {
|
|
21
|
+
method: 'GET',
|
|
22
|
+
headers,
|
|
23
|
+
redirect: 'follow',
|
|
24
|
+
mode: 'cors',
|
|
25
|
+
})
|
|
26
|
+
if (
|
|
27
|
+
(response.status === 200 && position === 0) ||
|
|
28
|
+
response.status === 206
|
|
29
|
+
) {
|
|
30
|
+
const nodeBuffer = Buffer.from(await response.arrayBuffer())
|
|
31
|
+
|
|
32
|
+
// try to parse out the size of the remote file
|
|
33
|
+
const sizeMatch = /\/(\d+)$/.exec(response.headers.get('content-range'))
|
|
34
|
+
if (sizeMatch[1]) {
|
|
35
|
+
this._stat = { size: parseInt(sizeMatch[1], 10) }
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return nodeBuffer
|
|
39
|
+
}
|
|
40
|
+
throw new Error(`HTTP ${response.status} fetching ${this.url}`)
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
read(buffer, offset = 0, length = Infinity, position = 0) {
|
|
44
|
+
let readPosition = position
|
|
45
|
+
if (readPosition === null) {
|
|
46
|
+
readPosition = this.position
|
|
47
|
+
this.position += length
|
|
48
|
+
}
|
|
49
|
+
return this.cache.get(buffer, offset, length, position)
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
async readFile() {
|
|
53
|
+
const response = await fetch(this.url, {
|
|
54
|
+
method: 'GET',
|
|
55
|
+
redirect: 'follow',
|
|
56
|
+
mode: 'cors',
|
|
57
|
+
})
|
|
58
|
+
return Buffer.from(await response.arrayBuffer())
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
async stat() {
|
|
62
|
+
if (!this._stat) {
|
|
63
|
+
const buf = Buffer.allocUnsafe(10)
|
|
64
|
+
await this.read(buf, 0, 10, 0)
|
|
65
|
+
if (!this._stat) {
|
|
66
|
+
throw new Error(`unable to determine size of file at ${this.url}`)
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
return this._stat
|
|
70
|
+
}
|
|
71
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
Ported from htsjdk's rANS implementation
|
package/src/rans/d04.js
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import { CramMalformedError } from '../errors'
|
|
2
|
+
|
|
3
|
+
import { TF_SHIFT } from './constants'
|
|
4
|
+
import Decoding from './decoding'
|
|
5
|
+
|
|
6
|
+
export default function uncompress(
|
|
7
|
+
/* ByteBuffer */ input,
|
|
8
|
+
/* Decoding.AriDecoder */ D,
|
|
9
|
+
/* Decoding.Symbol[] */ syms,
|
|
10
|
+
/* ByteBuffer */ out,
|
|
11
|
+
) {
|
|
12
|
+
let rans0 = input.getInt()
|
|
13
|
+
let rans1 = input.getInt()
|
|
14
|
+
let rans2 = input.getInt()
|
|
15
|
+
let rans3 = input.getInt()
|
|
16
|
+
|
|
17
|
+
const /* int */ outputSize = out.remaining()
|
|
18
|
+
const /* int */ outputEnd = outputSize & ~3
|
|
19
|
+
for (let i = 0; i < outputEnd; i += 4) {
|
|
20
|
+
const /* byte */ c0 = D.R[Decoding.get(rans0, TF_SHIFT)]
|
|
21
|
+
const /* byte */ c1 = D.R[Decoding.get(rans1, TF_SHIFT)]
|
|
22
|
+
const /* byte */ c2 = D.R[Decoding.get(rans2, TF_SHIFT)]
|
|
23
|
+
const /* byte */ c3 = D.R[Decoding.get(rans3, TF_SHIFT)]
|
|
24
|
+
|
|
25
|
+
out.putAt(i, c0)
|
|
26
|
+
out.putAt(i + 1, c1)
|
|
27
|
+
out.putAt(i + 2, c2)
|
|
28
|
+
out.putAt(i + 3, c3)
|
|
29
|
+
|
|
30
|
+
rans0 = Decoding.advanceSymbolStep(rans0, syms[0xff & c0], TF_SHIFT)
|
|
31
|
+
rans1 = Decoding.advanceSymbolStep(rans1, syms[0xff & c1], TF_SHIFT)
|
|
32
|
+
rans2 = Decoding.advanceSymbolStep(rans2, syms[0xff & c2], TF_SHIFT)
|
|
33
|
+
rans3 = Decoding.advanceSymbolStep(rans3, syms[0xff & c3], TF_SHIFT)
|
|
34
|
+
|
|
35
|
+
rans0 = Decoding.renormalize(rans0, input)
|
|
36
|
+
rans1 = Decoding.renormalize(rans1, input)
|
|
37
|
+
rans2 = Decoding.renormalize(rans2, input)
|
|
38
|
+
rans3 = Decoding.renormalize(rans3, input)
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
out.setPosition(outputEnd)
|
|
42
|
+
let /* byte */ c
|
|
43
|
+
switch (outputSize & 3) {
|
|
44
|
+
case 0:
|
|
45
|
+
break
|
|
46
|
+
case 1:
|
|
47
|
+
c = D.R[Decoding.get(rans0, TF_SHIFT)]
|
|
48
|
+
Decoding.advanceSymbol(rans0, input, syms[0xff & c], TF_SHIFT)
|
|
49
|
+
out.put(c)
|
|
50
|
+
break
|
|
51
|
+
|
|
52
|
+
case 2:
|
|
53
|
+
c = D.R[Decoding.get(rans0, TF_SHIFT)]
|
|
54
|
+
Decoding.advanceSymbol(rans0, input, syms[0xff & c], TF_SHIFT)
|
|
55
|
+
out.put(c)
|
|
56
|
+
|
|
57
|
+
c = D.R[Decoding.get(rans1, TF_SHIFT)]
|
|
58
|
+
Decoding.advanceSymbol(rans1, input, syms[0xff & c], TF_SHIFT)
|
|
59
|
+
out.put(c)
|
|
60
|
+
break
|
|
61
|
+
|
|
62
|
+
case 3:
|
|
63
|
+
c = D.R[Decoding.get(rans0, TF_SHIFT)]
|
|
64
|
+
Decoding.advanceSymbol(rans0, input, syms[0xff & c], TF_SHIFT)
|
|
65
|
+
out.put(c)
|
|
66
|
+
|
|
67
|
+
c = D.R[Decoding.get(rans1, TF_SHIFT)]
|
|
68
|
+
Decoding.advanceSymbol(rans1, input, syms[0xff & c], TF_SHIFT)
|
|
69
|
+
out.put(c)
|
|
70
|
+
|
|
71
|
+
c = D.R[Decoding.get(rans2, TF_SHIFT)]
|
|
72
|
+
Decoding.advanceSymbol(rans2, input, syms[0xff & c], TF_SHIFT)
|
|
73
|
+
out.put(c)
|
|
74
|
+
break
|
|
75
|
+
|
|
76
|
+
default:
|
|
77
|
+
throw new CramMalformedError(
|
|
78
|
+
'invalid output size encountered during rANS decoding',
|
|
79
|
+
)
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
out.setPosition(0)
|
|
83
|
+
}
|
package/src/rans/d14.js
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { TF_SHIFT } from './constants'
|
|
2
|
+
import Decoding from './decoding'
|
|
3
|
+
|
|
4
|
+
export default function uncompress(
|
|
5
|
+
/* ByteBuffer */ input,
|
|
6
|
+
/* ByteBuffer */ output,
|
|
7
|
+
/* Decoding.AriDecoder[] */ D,
|
|
8
|
+
/* Decoding.Symbol[][] */ syms,
|
|
9
|
+
) {
|
|
10
|
+
const /* int */ outputSize = output.remaining()
|
|
11
|
+
let rans0 = input.getInt()
|
|
12
|
+
let rans1 = input.getInt()
|
|
13
|
+
let rans2 = input.getInt()
|
|
14
|
+
let rans7 = input.getInt()
|
|
15
|
+
|
|
16
|
+
const /* int */ isz4 = outputSize >> 2
|
|
17
|
+
let /* int */ i0 = 0
|
|
18
|
+
let /* int */ i1 = isz4
|
|
19
|
+
let /* int */ i2 = 2 * isz4
|
|
20
|
+
let /* int */ i7 = 3 * isz4
|
|
21
|
+
let /* int */ l0 = 0
|
|
22
|
+
let /* int */ l1 = 0
|
|
23
|
+
let /* int */ l2 = 0
|
|
24
|
+
let /* int */ l7 = 0
|
|
25
|
+
for (; i0 < isz4; i0 += 1, i1 += 1, i2 += 1, i7 += 1) {
|
|
26
|
+
const /* int */ c0 = 0xff & D[l0].R[Decoding.get(rans0, TF_SHIFT)]
|
|
27
|
+
const /* int */ c1 = 0xff & D[l1].R[Decoding.get(rans1, TF_SHIFT)]
|
|
28
|
+
const /* int */ c2 = 0xff & D[l2].R[Decoding.get(rans2, TF_SHIFT)]
|
|
29
|
+
const /* int */ c7 = 0xff & D[l7].R[Decoding.get(rans7, TF_SHIFT)]
|
|
30
|
+
|
|
31
|
+
output.putAt(i0, c0)
|
|
32
|
+
output.putAt(i1, c1)
|
|
33
|
+
output.putAt(i2, c2)
|
|
34
|
+
output.putAt(i7, c7)
|
|
35
|
+
|
|
36
|
+
rans0 = Decoding.advanceSymbolStep(rans0, syms[l0][c0], TF_SHIFT)
|
|
37
|
+
rans1 = Decoding.advanceSymbolStep(rans1, syms[l1][c1], TF_SHIFT)
|
|
38
|
+
rans2 = Decoding.advanceSymbolStep(rans2, syms[l2][c2], TF_SHIFT)
|
|
39
|
+
rans7 = Decoding.advanceSymbolStep(rans7, syms[l7][c7], TF_SHIFT)
|
|
40
|
+
|
|
41
|
+
rans0 = Decoding.renormalize(rans0, input)
|
|
42
|
+
rans1 = Decoding.renormalize(rans1, input)
|
|
43
|
+
rans2 = Decoding.renormalize(rans2, input)
|
|
44
|
+
rans7 = Decoding.renormalize(rans7, input)
|
|
45
|
+
|
|
46
|
+
l0 = c0
|
|
47
|
+
l1 = c1
|
|
48
|
+
l2 = c2
|
|
49
|
+
l7 = c7
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Remainder
|
|
53
|
+
for (; i7 < outputSize; i7 += 1) {
|
|
54
|
+
const /* int */ c7 = 0xff & D[l7].R[Decoding.get(rans7, TF_SHIFT)]
|
|
55
|
+
output.putAt(i7, c7)
|
|
56
|
+
rans7 = Decoding.advanceSymbol(rans7, input, syms[l7][c7], TF_SHIFT)
|
|
57
|
+
l7 = c7
|
|
58
|
+
}
|
|
59
|
+
}
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { CramMalformedError } from '../errors'
|
|
2
|
+
|
|
3
|
+
import { RANS_BYTE_L } from './constants'
|
|
4
|
+
|
|
5
|
+
class FC {
|
|
6
|
+
// int F, C;
|
|
7
|
+
constructor() {
|
|
8
|
+
this.F = undefined
|
|
9
|
+
this.C = undefined
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
class AriDecoder {
|
|
14
|
+
// final FC[] fc = new FC[256];
|
|
15
|
+
// byte[] R;
|
|
16
|
+
|
|
17
|
+
constructor() {
|
|
18
|
+
this.fc = new Array(256)
|
|
19
|
+
for (let i = 0; i < this.fc.length; i += 1) {
|
|
20
|
+
this.fc[i] = new FC()
|
|
21
|
+
}
|
|
22
|
+
this.R = null
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
class Symbol {
|
|
27
|
+
// int start; // Start of range.
|
|
28
|
+
// int freq; // Symbol frequency.
|
|
29
|
+
constructor() {
|
|
30
|
+
this.start = undefined
|
|
31
|
+
this.freq = undefined
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Initialize a decoder symbol to start "start" and frequency "freq"
|
|
36
|
+
function symbolInit(sym, start, freq) {
|
|
37
|
+
if (!(start <= 1 << 16)) {
|
|
38
|
+
throw new CramMalformedError(`assertion failed: start <= 1<<16`)
|
|
39
|
+
}
|
|
40
|
+
if (!(freq <= (1 << 16) - start)) {
|
|
41
|
+
throw new CramMalformedError(`assertion failed: freq <= 1<<16`)
|
|
42
|
+
}
|
|
43
|
+
sym.start = start
|
|
44
|
+
sym.freq = freq
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Advances in the bit stream by "popping" a single symbol with range start
|
|
48
|
+
// "start" and frequency "freq". All frequencies are assumed to sum to
|
|
49
|
+
// "1 << scaleBits".
|
|
50
|
+
// No renormalization or output happens.
|
|
51
|
+
/* private static int */ function advanceStep(
|
|
52
|
+
/* final int */ r,
|
|
53
|
+
/* final int */ start,
|
|
54
|
+
/* final int */ freq,
|
|
55
|
+
/* final int */ scaleBits,
|
|
56
|
+
) {
|
|
57
|
+
/* final int */ const mask = (1 << scaleBits) - 1
|
|
58
|
+
|
|
59
|
+
// s, x = D(x)
|
|
60
|
+
return freq * (r >> scaleBits) + (r & mask) - start
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// Equivalent to RansDecAdvanceStep that takes a symbol.
|
|
64
|
+
/* static int */ function advanceSymbolStep(
|
|
65
|
+
/* final int */ r,
|
|
66
|
+
/* final RansDecSymbol */ sym,
|
|
67
|
+
/* final int */ scaleBits,
|
|
68
|
+
) {
|
|
69
|
+
return advanceStep(r, sym.start, sym.freq, scaleBits)
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Returns the current cumulative frequency (map it to a symbol yourself!)
|
|
73
|
+
/* static int */ function get(/* final int */ r, /* final int */ scaleBits) {
|
|
74
|
+
return r & ((1 << scaleBits) - 1)
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Advances in the bit stream by "popping" a single symbol with range start
|
|
78
|
+
// "start" and frequency "freq". All frequencies are assumed to sum to
|
|
79
|
+
// "1 << scaleBits",
|
|
80
|
+
// and the resulting bytes get written to ptr (which is updated).
|
|
81
|
+
/* private static int */ function advance(
|
|
82
|
+
/* int */ r,
|
|
83
|
+
/* final ByteBuffer */ pptr,
|
|
84
|
+
/* final int */ start,
|
|
85
|
+
/* final int */ freq,
|
|
86
|
+
/* final int */ scaleBits,
|
|
87
|
+
) {
|
|
88
|
+
/* final int */ const mask = (1 << scaleBits) - 1
|
|
89
|
+
|
|
90
|
+
// s, x = D(x)
|
|
91
|
+
r = freq * (r >> scaleBits) + (r & mask) - start
|
|
92
|
+
|
|
93
|
+
// re-normalize
|
|
94
|
+
if (r < RANS_BYTE_L) {
|
|
95
|
+
do {
|
|
96
|
+
/* final int */ const b = 0xff & pptr.get()
|
|
97
|
+
r = (r << 8) | b
|
|
98
|
+
} while (r < RANS_BYTE_L)
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
return r
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Equivalent to RansDecAdvance that takes a symbol.
|
|
105
|
+
/* static int */ function advanceSymbol(
|
|
106
|
+
/* final int */ r,
|
|
107
|
+
/* final ByteBuffer */ pptr,
|
|
108
|
+
/* final RansDecSymbol */ sym,
|
|
109
|
+
/* final int */ scaleBits,
|
|
110
|
+
) {
|
|
111
|
+
return advance(r, pptr, sym.start, sym.freq, scaleBits)
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
// Re-normalize.
|
|
115
|
+
/* static int */ function renormalize(
|
|
116
|
+
/* int */ r,
|
|
117
|
+
/* final ByteBuffer */ pptr,
|
|
118
|
+
) {
|
|
119
|
+
// re-normalize
|
|
120
|
+
if (r < RANS_BYTE_L) {
|
|
121
|
+
do {
|
|
122
|
+
r = (r << 8) | (0xff & pptr.get())
|
|
123
|
+
} while (r < RANS_BYTE_L)
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return r
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const Decode = {
|
|
130
|
+
FC,
|
|
131
|
+
AriDecoder,
|
|
132
|
+
Symbol,
|
|
133
|
+
symbolInit,
|
|
134
|
+
advanceStep,
|
|
135
|
+
advanceSymbolStep,
|
|
136
|
+
get,
|
|
137
|
+
advanceSymbol,
|
|
138
|
+
renormalize,
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
export default Decode
|