@loaders.gl/shapefile 4.2.0-alpha.4 → 4.2.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbf-loader.js +25 -20
- package/dist/dbf-worker.js +14 -7
- package/dist/dist.dev.js +219 -232
- package/dist/dist.min.js +12 -0
- package/dist/index.cjs +74 -75
- package/dist/index.cjs.map +7 -0
- package/dist/index.d.ts +6 -6
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/lib/parsers/parse-dbf.d.ts +1 -1
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf.js +300 -258
- package/dist/lib/parsers/parse-shapefile.d.ts +3 -3
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shapefile.js +225 -184
- package/dist/lib/parsers/parse-shp-geometry.d.ts +1 -1
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js +260 -168
- package/dist/lib/parsers/parse-shp-header.js +33 -23
- package/dist/lib/parsers/parse-shp.d.ts +1 -1
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp.js +146 -109
- package/dist/lib/parsers/parse-shx.js +19 -15
- package/dist/lib/parsers/types.js +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js +154 -95
- package/dist/lib/streaming/binary-reader.js +51 -23
- package/dist/lib/streaming/zip-batch-iterators.js +61 -45
- package/dist/shapefile-loader.js +26 -19
- package/dist/shp-loader.js +25 -19
- package/dist/shp-worker.js +22 -16
- package/dist/workers/dbf-worker.js +0 -1
- package/dist/workers/shp-worker.js +0 -1
- package/package.json +11 -7
- package/dist/dbf-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/lib/parsers/parse-shp.js.map +0 -1
- package/dist/lib/parsers/parse-shx.js.map +0 -1
- package/dist/lib/parsers/types.js.map +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/lib/streaming/binary-reader.js.map +0 -1
- package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/shapefile-loader.js.map +0 -1
- package/dist/shp-loader.js.map +0 -1
- package/dist/workers/dbf-worker.js.map +0 -1
- package/dist/workers/shp-worker.js.map +0 -1
|
@@ -4,130 +4,167 @@ import { parseRecord } from "./parse-shp-geometry.js";
|
|
|
4
4
|
const LITTLE_ENDIAN = true;
|
|
5
5
|
const BIG_ENDIAN = false;
|
|
6
6
|
const SHP_HEADER_SIZE = 100;
|
|
7
|
+
// According to the spec, the record header is just 8 bytes, but here we set it
|
|
8
|
+
// to 12 so that we can also access the record's type
|
|
7
9
|
const SHP_RECORD_HEADER_SIZE = 12;
|
|
8
10
|
const STATE = {
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
11
|
+
EXPECTING_HEADER: 0,
|
|
12
|
+
EXPECTING_RECORD: 1,
|
|
13
|
+
END: 2,
|
|
14
|
+
ERROR: 3
|
|
13
15
|
};
|
|
14
16
|
class SHPParser {
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
currentIndex: NaN
|
|
17
|
+
options = {};
|
|
18
|
+
binaryReader = new BinaryChunkReader({ maxRewindBytes: SHP_RECORD_HEADER_SIZE });
|
|
19
|
+
state = STATE.EXPECTING_HEADER;
|
|
20
|
+
result = {
|
|
21
|
+
geometries: [],
|
|
22
|
+
// Initialize with number values to make TS happy
|
|
23
|
+
// These are initialized for real in STATE.EXPECTING_HEADER
|
|
24
|
+
progress: {
|
|
25
|
+
bytesTotal: NaN,
|
|
26
|
+
bytesUsed: NaN,
|
|
27
|
+
rows: NaN
|
|
28
|
+
},
|
|
29
|
+
currentIndex: NaN
|
|
29
30
|
};
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
31
|
+
constructor(options) {
|
|
32
|
+
this.options = options;
|
|
33
|
+
}
|
|
34
|
+
write(arrayBuffer) {
|
|
35
|
+
this.binaryReader.write(arrayBuffer);
|
|
36
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.options);
|
|
37
|
+
}
|
|
38
|
+
end() {
|
|
39
|
+
this.binaryReader.end();
|
|
40
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.options);
|
|
41
|
+
// this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
|
|
42
|
+
if (this.state !== STATE.END) {
|
|
43
|
+
this.state = STATE.ERROR;
|
|
44
|
+
this.result.error = 'SHP incomplete file';
|
|
45
|
+
}
|
|
42
46
|
}
|
|
43
|
-
}
|
|
44
47
|
}
|
|
45
48
|
export function parseSHP(arrayBuffer, options) {
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
49
|
+
const shpParser = new SHPParser(options);
|
|
50
|
+
shpParser.write(arrayBuffer);
|
|
51
|
+
shpParser.end();
|
|
52
|
+
// @ts-ignore
|
|
53
|
+
return shpParser.result;
|
|
50
54
|
}
|
|
55
|
+
/**
|
|
56
|
+
* @param asyncIterator
|
|
57
|
+
* @param options
|
|
58
|
+
* @returns
|
|
59
|
+
*/
|
|
51
60
|
export async function* parseSHPInBatches(asyncIterator, options) {
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
61
|
+
const parser = new SHPParser(options);
|
|
62
|
+
let headerReturned = false;
|
|
63
|
+
for await (const arrayBuffer of asyncIterator) {
|
|
64
|
+
parser.write(arrayBuffer);
|
|
65
|
+
if (!headerReturned && parser.result.header) {
|
|
66
|
+
headerReturned = true;
|
|
67
|
+
yield parser.result.header;
|
|
68
|
+
}
|
|
69
|
+
if (parser.result.geometries.length > 0) {
|
|
70
|
+
yield parser.result.geometries;
|
|
71
|
+
parser.result.geometries = [];
|
|
72
|
+
}
|
|
59
73
|
}
|
|
74
|
+
parser.end();
|
|
60
75
|
if (parser.result.geometries.length > 0) {
|
|
61
|
-
|
|
62
|
-
parser.result.geometries = [];
|
|
76
|
+
yield parser.result.geometries;
|
|
63
77
|
}
|
|
64
|
-
|
|
65
|
-
parser.end();
|
|
66
|
-
if (parser.result.geometries.length > 0) {
|
|
67
|
-
yield parser.result.geometries;
|
|
68
|
-
}
|
|
69
|
-
return;
|
|
78
|
+
return;
|
|
70
79
|
}
|
|
80
|
+
/**
|
|
81
|
+
* State-machine parser for SHP data
|
|
82
|
+
*
|
|
83
|
+
* Note that whenever more data is needed, a `return`, not a `break`, is
|
|
84
|
+
* necessary, as the `break` keeps the context within `parseState`, while
|
|
85
|
+
* `return` releases context so that more data can be written into the
|
|
86
|
+
* BinaryChunkReader.
|
|
87
|
+
*
|
|
88
|
+
* @param state Current state
|
|
89
|
+
* @param result An object to hold result data
|
|
90
|
+
* @param binaryReader
|
|
91
|
+
* @return State at end of current parsing
|
|
92
|
+
*/
|
|
93
|
+
/* eslint-disable complexity, max-depth */
|
|
71
94
|
function parseState(state, result, binaryReader, options) {
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
95
|
+
// eslint-disable-next-line no-constant-condition
|
|
96
|
+
while (true) {
|
|
97
|
+
try {
|
|
98
|
+
switch (state) {
|
|
99
|
+
case STATE.ERROR:
|
|
100
|
+
case STATE.END:
|
|
101
|
+
return state;
|
|
102
|
+
case STATE.EXPECTING_HEADER:
|
|
103
|
+
// Parse initial file header
|
|
104
|
+
const dataView = binaryReader.getDataView(SHP_HEADER_SIZE);
|
|
105
|
+
if (!dataView) {
|
|
106
|
+
return state;
|
|
107
|
+
}
|
|
108
|
+
result.header = parseSHPHeader(dataView);
|
|
109
|
+
result.progress = {
|
|
110
|
+
bytesUsed: 0,
|
|
111
|
+
bytesTotal: result.header.length,
|
|
112
|
+
rows: 0
|
|
113
|
+
};
|
|
114
|
+
// index numbering starts at 1
|
|
115
|
+
result.currentIndex = 1;
|
|
116
|
+
state = STATE.EXPECTING_RECORD;
|
|
117
|
+
break;
|
|
118
|
+
case STATE.EXPECTING_RECORD:
|
|
119
|
+
while (binaryReader.hasAvailableBytes(SHP_RECORD_HEADER_SIZE)) {
|
|
120
|
+
const recordHeaderView = binaryReader.getDataView(SHP_RECORD_HEADER_SIZE);
|
|
121
|
+
const recordHeader = {
|
|
122
|
+
recordNumber: recordHeaderView.getInt32(0, BIG_ENDIAN),
|
|
123
|
+
// 2 byte words; includes the four words of record header
|
|
124
|
+
byteLength: recordHeaderView.getInt32(4, BIG_ENDIAN) * 2,
|
|
125
|
+
// This is actually part of the record, not the header...
|
|
126
|
+
type: recordHeaderView.getInt32(8, LITTLE_ENDIAN)
|
|
127
|
+
};
|
|
128
|
+
if (!binaryReader.hasAvailableBytes(recordHeader.byteLength - 4)) {
|
|
129
|
+
binaryReader.rewind(SHP_RECORD_HEADER_SIZE);
|
|
130
|
+
return state;
|
|
131
|
+
}
|
|
132
|
+
const invalidRecord = recordHeader.byteLength < 4 ||
|
|
133
|
+
recordHeader.type !== result.header?.type ||
|
|
134
|
+
recordHeader.recordNumber !== result.currentIndex;
|
|
135
|
+
// All records must have at least four bytes (for the record shape type)
|
|
136
|
+
if (invalidRecord) {
|
|
137
|
+
// Malformed record, try again, advancing just 4 bytes
|
|
138
|
+
// Note: this is a rewind because binaryReader.getDataView above
|
|
139
|
+
// moved the pointer forward 12 bytes, so rewinding 8 bytes still
|
|
140
|
+
// leaves us 4 bytes ahead
|
|
141
|
+
binaryReader.rewind(SHP_RECORD_HEADER_SIZE - 4);
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
// Note: type is actually part of the record, not the header, so
|
|
145
|
+
// rewind 4 bytes before reading record
|
|
146
|
+
binaryReader.rewind(4);
|
|
147
|
+
const recordView = binaryReader.getDataView(recordHeader.byteLength);
|
|
148
|
+
const geometry = parseRecord(recordView, options);
|
|
149
|
+
result.geometries.push(geometry);
|
|
150
|
+
result.currentIndex++;
|
|
151
|
+
result.progress.rows = result.currentIndex - 1;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
if (binaryReader.ended) {
|
|
155
|
+
state = STATE.END;
|
|
156
|
+
}
|
|
157
|
+
return state;
|
|
158
|
+
default:
|
|
159
|
+
state = STATE.ERROR;
|
|
160
|
+
result.error = `illegal parser state ${state}`;
|
|
161
|
+
return state;
|
|
104
162
|
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
const geometry = parseRecord(recordView, options);
|
|
112
|
-
result.geometries.push(geometry);
|
|
113
|
-
result.currentIndex++;
|
|
114
|
-
result.progress.rows = result.currentIndex - 1;
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
if (binaryReader.ended) {
|
|
118
|
-
state = STATE.END;
|
|
119
|
-
}
|
|
120
|
-
return state;
|
|
121
|
-
default:
|
|
122
|
-
state = STATE.ERROR;
|
|
123
|
-
result.error = `illegal parser state ${state}`;
|
|
124
|
-
return state;
|
|
125
|
-
}
|
|
126
|
-
} catch (error) {
|
|
127
|
-
state = STATE.ERROR;
|
|
128
|
-
result.error = `SHP parsing failed: ${error === null || error === void 0 ? void 0 : error.message}`;
|
|
129
|
-
return state;
|
|
163
|
+
}
|
|
164
|
+
catch (error) {
|
|
165
|
+
state = STATE.ERROR;
|
|
166
|
+
result.error = `SHP parsing failed: ${error?.message}`;
|
|
167
|
+
return state;
|
|
168
|
+
}
|
|
130
169
|
}
|
|
131
|
-
}
|
|
132
170
|
}
|
|
133
|
-
//# sourceMappingURL=parse-shp.js.map
|
|
@@ -1,20 +1,24 @@
|
|
|
1
1
|
import { parseSHPHeader } from "./parse-shp-header.js";
|
|
2
2
|
const SHX_HEADER_SIZE = 100;
|
|
3
3
|
const BIG_ENDIAN = false;
|
|
4
|
+
/**
|
|
5
|
+
* @param arrayBuffer
|
|
6
|
+
* @returns SHXOutput
|
|
7
|
+
*/
|
|
4
8
|
export function parseShx(arrayBuffer) {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
9
|
+
// SHX header is identical to SHP Header
|
|
10
|
+
const headerView = new DataView(arrayBuffer, 0, SHX_HEADER_SIZE);
|
|
11
|
+
const header = parseSHPHeader(headerView);
|
|
12
|
+
const contentLength = header.length - SHX_HEADER_SIZE;
|
|
13
|
+
const contentView = new DataView(arrayBuffer, SHX_HEADER_SIZE, contentLength);
|
|
14
|
+
const offsets = new Int32Array(contentLength);
|
|
15
|
+
const lengths = new Int32Array(contentLength);
|
|
16
|
+
for (let i = 0; i < contentLength / 8; i++) {
|
|
17
|
+
offsets[i] = contentView.getInt32(i * 8, BIG_ENDIAN);
|
|
18
|
+
lengths[i] = contentView.getInt32(i * 8 + 4, BIG_ENDIAN);
|
|
19
|
+
}
|
|
20
|
+
return {
|
|
21
|
+
offsets,
|
|
22
|
+
lengths
|
|
23
|
+
};
|
|
19
24
|
}
|
|
20
|
-
//# sourceMappingURL=parse-shx.js.map
|
|
@@ -1,105 +1,164 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
export class BinaryChunkReader {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
write(arrayBuffer) {
|
|
16
|
-
this.arrayBuffers.push(arrayBuffer);
|
|
17
|
-
}
|
|
18
|
-
end() {
|
|
19
|
-
this.arrayBuffers = [];
|
|
20
|
-
this.ended = true;
|
|
21
|
-
}
|
|
22
|
-
hasAvailableBytes(bytes) {
|
|
23
|
-
let bytesAvailable = -this.offset;
|
|
24
|
-
for (const arrayBuffer of this.arrayBuffers) {
|
|
25
|
-
bytesAvailable += arrayBuffer.byteLength;
|
|
26
|
-
if (bytesAvailable >= bytes) {
|
|
27
|
-
return true;
|
|
28
|
-
}
|
|
5
|
+
offset;
|
|
6
|
+
arrayBuffers;
|
|
7
|
+
ended;
|
|
8
|
+
maxRewindBytes;
|
|
9
|
+
constructor(options) {
|
|
10
|
+
const { maxRewindBytes = 0 } = options || {};
|
|
11
|
+
/** current global offset into current array buffer*/
|
|
12
|
+
this.offset = 0;
|
|
13
|
+
/** current buffer from iterator */
|
|
14
|
+
this.arrayBuffers = [];
|
|
15
|
+
this.ended = false;
|
|
16
|
+
/** bytes behind offset to hold on to */
|
|
17
|
+
this.maxRewindBytes = maxRewindBytes;
|
|
29
18
|
}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
for (let i = 0; i < this.arrayBuffers.length; i++) {
|
|
36
|
-
const buf = this.arrayBuffers[i];
|
|
37
|
-
if (offset + buf.byteLength <= 0) {
|
|
38
|
-
offset += buf.byteLength;
|
|
39
|
-
continue;
|
|
40
|
-
}
|
|
41
|
-
const start = offset <= 0 ? Math.abs(offset) : 0;
|
|
42
|
-
let end;
|
|
43
|
-
if (start + bytes <= buf.byteLength) {
|
|
44
|
-
end = start + bytes;
|
|
45
|
-
selectedBuffers.push([i, [start, end]]);
|
|
46
|
-
return selectedBuffers;
|
|
47
|
-
}
|
|
48
|
-
end = buf.byteLength;
|
|
49
|
-
selectedBuffers.push([i, [start, end]]);
|
|
50
|
-
bytes -= buf.byteLength - start;
|
|
51
|
-
offset += buf.byteLength;
|
|
19
|
+
/**
|
|
20
|
+
* @param arrayBuffer
|
|
21
|
+
*/
|
|
22
|
+
write(arrayBuffer) {
|
|
23
|
+
this.arrayBuffers.push(arrayBuffer);
|
|
52
24
|
}
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
57
|
-
if (!bufferOffsets && this.ended) {
|
|
58
|
-
throw new Error('binary data exhausted');
|
|
25
|
+
end() {
|
|
26
|
+
this.arrayBuffers = [];
|
|
27
|
+
this.ended = true;
|
|
59
28
|
}
|
|
60
|
-
|
|
61
|
-
|
|
29
|
+
/**
|
|
30
|
+
* Has enough bytes available in array buffers
|
|
31
|
+
*
|
|
32
|
+
* @param bytes Number of bytes
|
|
33
|
+
* @return boolean
|
|
34
|
+
*/
|
|
35
|
+
hasAvailableBytes(bytes) {
|
|
36
|
+
let bytesAvailable = -this.offset;
|
|
37
|
+
for (const arrayBuffer of this.arrayBuffers) {
|
|
38
|
+
bytesAvailable += arrayBuffer.byteLength;
|
|
39
|
+
if (bytesAvailable >= bytes) {
|
|
40
|
+
return true;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return false;
|
|
62
44
|
}
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
45
|
+
/**
|
|
46
|
+
* Find offsets of byte ranges within this.arrayBuffers
|
|
47
|
+
*
|
|
48
|
+
* @param bytes Byte length to read
|
|
49
|
+
* @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
|
|
50
|
+
*/
|
|
51
|
+
findBufferOffsets(bytes) {
|
|
52
|
+
let offset = -this.offset;
|
|
53
|
+
const selectedBuffers = [];
|
|
54
|
+
for (let i = 0; i < this.arrayBuffers.length; i++) {
|
|
55
|
+
const buf = this.arrayBuffers[i];
|
|
56
|
+
// Current buffer isn't long enough to reach global offset
|
|
57
|
+
if (offset + buf.byteLength <= 0) {
|
|
58
|
+
offset += buf.byteLength;
|
|
59
|
+
// eslint-disable-next-line no-continue
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
// Find start/end offsets for this buffer
|
|
63
|
+
// When offset < 0, need to skip over Math.abs(offset) bytes
|
|
64
|
+
// When offset > 0, implies bytes in previous buffer, start at 0
|
|
65
|
+
const start = offset <= 0 ? Math.abs(offset) : 0;
|
|
66
|
+
let end;
|
|
67
|
+
// Length of requested bytes is contained in current buffer
|
|
68
|
+
if (start + bytes <= buf.byteLength) {
|
|
69
|
+
end = start + bytes;
|
|
70
|
+
selectedBuffers.push([i, [start, end]]);
|
|
71
|
+
return selectedBuffers;
|
|
72
|
+
}
|
|
73
|
+
// Will need to look into next buffer
|
|
74
|
+
end = buf.byteLength;
|
|
75
|
+
selectedBuffers.push([i, [start, end]]);
|
|
76
|
+
// Need to read fewer bytes in next iter
|
|
77
|
+
bytes -= buf.byteLength - start;
|
|
78
|
+
offset += buf.byteLength;
|
|
79
|
+
}
|
|
80
|
+
// Should only finish loop if exhausted all arrays
|
|
81
|
+
return null;
|
|
70
82
|
}
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
83
|
+
/**
|
|
84
|
+
* Get the required number of bytes from the iterator
|
|
85
|
+
*
|
|
86
|
+
* @param bytes Number of bytes
|
|
87
|
+
* @return DataView with data
|
|
88
|
+
*/
|
|
89
|
+
getDataView(bytes) {
|
|
90
|
+
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
91
|
+
// return `null` if not enough data, except if end() already called, in
|
|
92
|
+
// which case throw an error.
|
|
93
|
+
if (!bufferOffsets && this.ended) {
|
|
94
|
+
throw new Error('binary data exhausted');
|
|
95
|
+
}
|
|
96
|
+
if (!bufferOffsets) {
|
|
97
|
+
return null;
|
|
98
|
+
}
|
|
99
|
+
// If only one arrayBuffer needed, return DataView directly
|
|
100
|
+
if (bufferOffsets.length === 1) {
|
|
101
|
+
const [bufferIndex, [start, end]] = bufferOffsets[0];
|
|
102
|
+
const arrayBuffer = this.arrayBuffers[bufferIndex];
|
|
103
|
+
const view = new DataView(arrayBuffer, start, end - start);
|
|
104
|
+
this.offset += bytes;
|
|
105
|
+
this.disposeBuffers();
|
|
106
|
+
return view;
|
|
107
|
+
}
|
|
108
|
+
// Concatenate portions of multiple ArrayBuffers
|
|
109
|
+
const view = new DataView(this._combineArrayBuffers(bufferOffsets));
|
|
110
|
+
this.offset += bytes;
|
|
111
|
+
this.disposeBuffers();
|
|
112
|
+
return view;
|
|
80
113
|
}
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
114
|
+
/**
|
|
115
|
+
* Dispose of old array buffers
|
|
116
|
+
*/
|
|
117
|
+
disposeBuffers() {
|
|
118
|
+
while (this.arrayBuffers.length > 0 &&
|
|
119
|
+
this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
|
|
120
|
+
this.offset -= this.arrayBuffers[0].byteLength;
|
|
121
|
+
this.arrayBuffers.shift();
|
|
122
|
+
}
|
|
87
123
|
}
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
124
|
+
/**
|
|
125
|
+
* Copy multiple ArrayBuffers into one contiguous ArrayBuffer
|
|
126
|
+
*
|
|
127
|
+
* In contrast to concatenateArrayBuffers, this only copies the necessary
|
|
128
|
+
* portions of the source arrays, rather than first copying the entire arrays
|
|
129
|
+
* then taking a part of them.
|
|
130
|
+
*
|
|
131
|
+
* @param bufferOffsets List of internal array offsets
|
|
132
|
+
* @return New contiguous ArrayBuffer
|
|
133
|
+
*/
|
|
134
|
+
_combineArrayBuffers(bufferOffsets) {
|
|
135
|
+
let byteLength = 0;
|
|
136
|
+
for (const bufferOffset of bufferOffsets) {
|
|
137
|
+
const [start, end] = bufferOffset[1];
|
|
138
|
+
byteLength += end - start;
|
|
139
|
+
}
|
|
140
|
+
const result = new Uint8Array(byteLength);
|
|
141
|
+
// Copy the subarrays
|
|
142
|
+
let resultOffset = 0;
|
|
143
|
+
for (const bufferOffset of bufferOffsets) {
|
|
144
|
+
const [bufferIndex, [start, end]] = bufferOffset;
|
|
145
|
+
const sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);
|
|
146
|
+
result.set(sourceArray.subarray(start, end), resultOffset);
|
|
147
|
+
resultOffset += end - start;
|
|
148
|
+
}
|
|
149
|
+
return result.buffer;
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* @param bytes
|
|
153
|
+
*/
|
|
154
|
+
skip(bytes) {
|
|
155
|
+
this.offset += bytes;
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* @param bytes
|
|
159
|
+
*/
|
|
160
|
+
rewind(bytes) {
|
|
161
|
+
// TODO - only works if offset is already set
|
|
162
|
+
this.offset -= bytes;
|
|
95
163
|
}
|
|
96
|
-
return result.buffer;
|
|
97
|
-
}
|
|
98
|
-
skip(bytes) {
|
|
99
|
-
this.offset += bytes;
|
|
100
|
-
}
|
|
101
|
-
rewind(bytes) {
|
|
102
|
-
this.offset -= bytes;
|
|
103
|
-
}
|
|
104
164
|
}
|
|
105
|
-
//# sourceMappingURL=binary-chunk-reader.js.map
|
|
@@ -1,26 +1,54 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
export class BinaryReader {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
5
|
+
offset;
|
|
6
|
+
arrayBuffer;
|
|
7
|
+
constructor(arrayBuffer) {
|
|
8
|
+
/** current global (stream) offset */
|
|
9
|
+
this.offset = 0;
|
|
10
|
+
/** current buffer from iterator */
|
|
11
|
+
this.arrayBuffer = arrayBuffer;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Checks if there are available bytes in data
|
|
15
|
+
*
|
|
16
|
+
* @param bytes
|
|
17
|
+
* @returns boolean
|
|
18
|
+
*/
|
|
19
|
+
hasAvailableBytes(bytes) {
|
|
20
|
+
return this.arrayBuffer.byteLength - this.offset >= bytes;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Get the required number of bytes from the iterator
|
|
24
|
+
*
|
|
25
|
+
* @param bytes
|
|
26
|
+
* @returns Dataview
|
|
27
|
+
*/
|
|
28
|
+
getDataView(bytes) {
|
|
29
|
+
if (bytes && !this.hasAvailableBytes(bytes)) {
|
|
30
|
+
throw new Error('binary data exhausted');
|
|
31
|
+
}
|
|
32
|
+
const dataView = bytes
|
|
33
|
+
? new DataView(this.arrayBuffer, this.offset, bytes)
|
|
34
|
+
: new DataView(this.arrayBuffer, this.offset);
|
|
35
|
+
this.offset += bytes;
|
|
36
|
+
return dataView;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Skipping
|
|
40
|
+
*
|
|
41
|
+
* @param bytes
|
|
42
|
+
*/
|
|
43
|
+
skip(bytes) {
|
|
44
|
+
this.offset += bytes;
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* Rewinding
|
|
48
|
+
*
|
|
49
|
+
* @param bytes
|
|
50
|
+
*/
|
|
51
|
+
rewind(bytes) {
|
|
52
|
+
this.offset -= bytes;
|
|
14
53
|
}
|
|
15
|
-
const dataView = bytes ? new DataView(this.arrayBuffer, this.offset, bytes) : new DataView(this.arrayBuffer, this.offset);
|
|
16
|
-
this.offset += bytes;
|
|
17
|
-
return dataView;
|
|
18
|
-
}
|
|
19
|
-
skip(bytes) {
|
|
20
|
-
this.offset += bytes;
|
|
21
|
-
}
|
|
22
|
-
rewind(bytes) {
|
|
23
|
-
this.offset -= bytes;
|
|
24
|
-
}
|
|
25
54
|
}
|
|
26
|
-
//# sourceMappingURL=binary-reader.js.map
|