@loaders.gl/shapefile 3.1.0-beta.5 → 3.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dist/bundle.js +5 -7554
  2. package/dist/dist.min.js +7557 -0
  3. package/dist/es5/bundle.js +1 -1
  4. package/dist/es5/bundle.js.map +1 -1
  5. package/dist/es5/dbf-loader.js +40 -5
  6. package/dist/es5/dbf-loader.js.map +1 -1
  7. package/dist/es5/index.js +5 -5
  8. package/dist/es5/lib/parsers/parse-dbf.js +230 -85
  9. package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
  10. package/dist/es5/lib/parsers/parse-shapefile.js +408 -151
  11. package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
  12. package/dist/es5/lib/parsers/parse-shp-geometry.js +96 -49
  13. package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -1
  14. package/dist/es5/lib/parsers/parse-shp-header.js +4 -4
  15. package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -1
  16. package/dist/es5/lib/parsers/parse-shp.js +165 -47
  17. package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
  18. package/dist/es5/lib/parsers/parse-shx.js +11 -11
  19. package/dist/es5/lib/parsers/parse-shx.js.map +1 -1
  20. package/dist/es5/lib/streaming/binary-chunk-reader.js +172 -99
  21. package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
  22. package/dist/es5/lib/streaming/binary-reader.js +35 -24
  23. package/dist/es5/lib/streaming/binary-reader.js.map +1 -1
  24. package/dist/es5/lib/streaming/zip-batch-iterators.js +96 -37
  25. package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -1
  26. package/dist/es5/shapefile-loader.js +3 -3
  27. package/dist/es5/shapefile-loader.js.map +1 -1
  28. package/dist/es5/shp-loader.js +41 -6
  29. package/dist/es5/shp-loader.js.map +1 -1
  30. package/dist/esm/dbf-loader.js +1 -1
  31. package/dist/esm/dbf-loader.js.map +1 -1
  32. package/dist/esm/lib/parsers/parse-shapefile.js +4 -0
  33. package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -1
  34. package/dist/esm/shapefile-loader.js +1 -1
  35. package/dist/esm/shapefile-loader.js.map +1 -1
  36. package/dist/esm/shp-loader.js +1 -1
  37. package/dist/esm/shp-loader.js.map +1 -1
  38. package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
  39. package/dist/lib/parsers/parse-shapefile.js +3 -0
  40. package/package.json +6 -6
  41. package/src/lib/parsers/parse-shapefile.ts +4 -0
@@ -7,139 +7,212 @@ Object.defineProperty(exports, "__esModule", {
7
7
  });
8
8
  exports.default = void 0;
9
9
 
10
+ var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
11
+
12
+ var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
13
+
14
+ var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
15
+
10
16
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
11
17
 
12
- class BinaryChunkReader {
13
- constructor(options) {
18
+ function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
19
+
20
+ function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
21
+
22
+ function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
23
+
24
+ var BinaryChunkReader = function () {
25
+ function BinaryChunkReader(options) {
26
+ (0, _classCallCheck2.default)(this, BinaryChunkReader);
14
27
  (0, _defineProperty2.default)(this, "offset", void 0);
15
28
  (0, _defineProperty2.default)(this, "arrayBuffers", void 0);
16
29
  (0, _defineProperty2.default)(this, "ended", void 0);
17
30
  (0, _defineProperty2.default)(this, "maxRewindBytes", void 0);
18
- const {
19
- maxRewindBytes = 0
20
- } = options || {};
31
+
32
+ var _ref = options || {},
33
+ _ref$maxRewindBytes = _ref.maxRewindBytes,
34
+ maxRewindBytes = _ref$maxRewindBytes === void 0 ? 0 : _ref$maxRewindBytes;
35
+
21
36
  this.offset = 0;
22
37
  this.arrayBuffers = [];
23
38
  this.ended = false;
24
39
  this.maxRewindBytes = maxRewindBytes;
25
40
  }
26
41
 
27
- write(arrayBuffer) {
28
- this.arrayBuffers.push(arrayBuffer);
29
- }
30
-
31
- end() {
32
- this.arrayBuffers = [];
33
- this.ended = true;
34
- }
35
-
36
- hasAvailableBytes(bytes) {
37
- let bytesAvailable = -this.offset;
38
-
39
- for (const arrayBuffer of this.arrayBuffers) {
40
- bytesAvailable += arrayBuffer.byteLength;
41
-
42
- if (bytesAvailable >= bytes) {
43
- return true;
44
- }
42
+ (0, _createClass2.default)(BinaryChunkReader, [{
43
+ key: "write",
44
+ value: function write(arrayBuffer) {
45
+ this.arrayBuffers.push(arrayBuffer);
45
46
  }
47
+ }, {
48
+ key: "end",
49
+ value: function end() {
50
+ this.arrayBuffers = [];
51
+ this.ended = true;
52
+ }
53
+ }, {
54
+ key: "hasAvailableBytes",
55
+ value: function hasAvailableBytes(bytes) {
56
+ var bytesAvailable = -this.offset;
57
+
58
+ var _iterator = _createForOfIteratorHelper(this.arrayBuffers),
59
+ _step;
60
+
61
+ try {
62
+ for (_iterator.s(); !(_step = _iterator.n()).done;) {
63
+ var arrayBuffer = _step.value;
64
+ bytesAvailable += arrayBuffer.byteLength;
65
+
66
+ if (bytesAvailable >= bytes) {
67
+ return true;
68
+ }
69
+ }
70
+ } catch (err) {
71
+ _iterator.e(err);
72
+ } finally {
73
+ _iterator.f();
74
+ }
46
75
 
47
- return false;
48
- }
49
-
50
- findBufferOffsets(bytes) {
51
- let offset = -this.offset;
52
- const selectedBuffers = [];
53
-
54
- for (let i = 0; i < this.arrayBuffers.length; i++) {
55
- const buf = this.arrayBuffers[i];
56
-
57
- if (offset + buf.byteLength <= 0) {
76
+ return false;
77
+ }
78
+ }, {
79
+ key: "findBufferOffsets",
80
+ value: function findBufferOffsets(bytes) {
81
+ var offset = -this.offset;
82
+ var selectedBuffers = [];
83
+
84
+ for (var i = 0; i < this.arrayBuffers.length; i++) {
85
+ var buf = this.arrayBuffers[i];
86
+
87
+ if (offset + buf.byteLength <= 0) {
88
+ offset += buf.byteLength;
89
+ continue;
90
+ }
91
+
92
+ var start = offset <= 0 ? Math.abs(offset) : 0;
93
+ var end = void 0;
94
+
95
+ if (start + bytes <= buf.byteLength) {
96
+ end = start + bytes;
97
+ selectedBuffers.push([i, [start, end]]);
98
+ return selectedBuffers;
99
+ }
100
+
101
+ end = buf.byteLength;
102
+ selectedBuffers.push([i, [start, end]]);
103
+ bytes -= buf.byteLength - start;
58
104
  offset += buf.byteLength;
59
- continue;
60
105
  }
61
106
 
62
- const start = offset <= 0 ? Math.abs(offset) : 0;
63
- let end;
107
+ return null;
108
+ }
109
+ }, {
110
+ key: "getDataView",
111
+ value: function getDataView(bytes) {
112
+ var bufferOffsets = this.findBufferOffsets(bytes);
64
113
 
65
- if (start + bytes <= buf.byteLength) {
66
- end = start + bytes;
67
- selectedBuffers.push([i, [start, end]]);
68
- return selectedBuffers;
114
+ if (!bufferOffsets && this.ended) {
115
+ throw new Error('binary data exhausted');
69
116
  }
70
117
 
71
- end = buf.byteLength;
72
- selectedBuffers.push([i, [start, end]]);
73
- bytes -= buf.byteLength - start;
74
- offset += buf.byteLength;
75
- }
118
+ if (!bufferOffsets) {
119
+ return null;
120
+ }
76
121
 
77
- return null;
78
- }
122
+ if (bufferOffsets.length === 1) {
123
+ var _bufferOffsets$ = (0, _slicedToArray2.default)(bufferOffsets[0], 2),
124
+ bufferIndex = _bufferOffsets$[0],
125
+ _bufferOffsets$$ = (0, _slicedToArray2.default)(_bufferOffsets$[1], 2),
126
+ start = _bufferOffsets$$[0],
127
+ end = _bufferOffsets$$[1];
79
128
 
80
- getDataView(bytes) {
81
- const bufferOffsets = this.findBufferOffsets(bytes);
129
+ var arrayBuffer = this.arrayBuffers[bufferIndex];
82
130
 
83
- if (!bufferOffsets && this.ended) {
84
- throw new Error('binary data exhausted');
85
- }
131
+ var _view = new DataView(arrayBuffer, start, end - start);
86
132
 
87
- if (!bufferOffsets) {
88
- return null;
89
- }
133
+ this.offset += bytes;
134
+ this.disposeBuffers();
135
+ return _view;
136
+ }
90
137
 
91
- if (bufferOffsets.length === 1) {
92
- const [bufferIndex, [start, end]] = bufferOffsets[0];
93
- const arrayBuffer = this.arrayBuffers[bufferIndex];
94
- const view = new DataView(arrayBuffer, start, end - start);
138
+ var view = new DataView(this._combineArrayBuffers(bufferOffsets));
95
139
  this.offset += bytes;
96
140
  this.disposeBuffers();
97
141
  return view;
98
142
  }
99
-
100
- const view = new DataView(this._combineArrayBuffers(bufferOffsets));
101
- this.offset += bytes;
102
- this.disposeBuffers();
103
- return view;
104
- }
105
-
106
- disposeBuffers() {
107
- while (this.arrayBuffers.length > 0 && this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
108
- this.offset -= this.arrayBuffers[0].byteLength;
109
- this.arrayBuffers.shift();
143
+ }, {
144
+ key: "disposeBuffers",
145
+ value: function disposeBuffers() {
146
+ while (this.arrayBuffers.length > 0 && this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
147
+ this.offset -= this.arrayBuffers[0].byteLength;
148
+ this.arrayBuffers.shift();
149
+ }
110
150
  }
111
- }
151
+ }, {
152
+ key: "_combineArrayBuffers",
153
+ value: function _combineArrayBuffers(bufferOffsets) {
154
+ var byteLength = 0;
155
+
156
+ var _iterator2 = _createForOfIteratorHelper(bufferOffsets),
157
+ _step2;
158
+
159
+ try {
160
+ for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
161
+ var bufferOffset = _step2.value;
162
+
163
+ var _bufferOffset$ = (0, _slicedToArray2.default)(bufferOffset[1], 2),
164
+ start = _bufferOffset$[0],
165
+ end = _bufferOffset$[1];
166
+
167
+ byteLength += end - start;
168
+ }
169
+ } catch (err) {
170
+ _iterator2.e(err);
171
+ } finally {
172
+ _iterator2.f();
173
+ }
112
174
 
113
- _combineArrayBuffers(bufferOffsets) {
114
- let byteLength = 0;
175
+ var result = new Uint8Array(byteLength);
176
+ var resultOffset = 0;
177
+
178
+ var _iterator3 = _createForOfIteratorHelper(bufferOffsets),
179
+ _step3;
180
+
181
+ try {
182
+ for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
183
+ var _bufferOffset = _step3.value;
184
+
185
+ var _bufferOffset2 = (0, _slicedToArray2.default)(_bufferOffset, 2),
186
+ bufferIndex = _bufferOffset2[0],
187
+ _bufferOffset2$ = (0, _slicedToArray2.default)(_bufferOffset2[1], 2),
188
+ _start = _bufferOffset2$[0],
189
+ _end = _bufferOffset2$[1];
190
+
191
+ var sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);
192
+ result.set(sourceArray.subarray(_start, _end), resultOffset);
193
+ resultOffset += _end - _start;
194
+ }
195
+ } catch (err) {
196
+ _iterator3.e(err);
197
+ } finally {
198
+ _iterator3.f();
199
+ }
115
200
 
116
- for (const bufferOffset of bufferOffsets) {
117
- const [start, end] = bufferOffset[1];
118
- byteLength += end - start;
201
+ return result.buffer;
119
202
  }
120
-
121
- const result = new Uint8Array(byteLength);
122
- let resultOffset = 0;
123
-
124
- for (const bufferOffset of bufferOffsets) {
125
- const [bufferIndex, [start, end]] = bufferOffset;
126
- const sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);
127
- result.set(sourceArray.subarray(start, end), resultOffset);
128
- resultOffset += end - start;
203
+ }, {
204
+ key: "skip",
205
+ value: function skip(bytes) {
206
+ this.offset += bytes;
129
207
  }
130
-
131
- return result.buffer;
132
- }
133
-
134
- skip(bytes) {
135
- this.offset += bytes;
136
- }
137
-
138
- rewind(bytes) {
139
- this.offset -= bytes;
140
- }
141
-
142
- }
208
+ }, {
209
+ key: "rewind",
210
+ value: function rewind(bytes) {
211
+ this.offset -= bytes;
212
+ }
213
+ }]);
214
+ return BinaryChunkReader;
215
+ }();
143
216
 
144
217
  exports.default = BinaryChunkReader;
145
218
  //# sourceMappingURL=binary-chunk-reader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/lib/streaming/binary-chunk-reader.ts"],"names":["BinaryChunkReader","constructor","options","maxRewindBytes","offset","arrayBuffers","ended","write","arrayBuffer","push","end","hasAvailableBytes","bytes","bytesAvailable","byteLength","findBufferOffsets","selectedBuffers","i","length","buf","start","Math","abs","getDataView","bufferOffsets","Error","bufferIndex","view","DataView","disposeBuffers","_combineArrayBuffers","shift","bufferOffset","result","Uint8Array","resultOffset","sourceArray","set","subarray","buffer","skip","rewind"],"mappings":";;;;;;;;;;;AAAe,MAAMA,iBAAN,CAAwB;AAMrCC,EAAAA,WAAW,CAACC,OAAD,EAAiC;AAAA;AAAA;AAAA;AAAA;AAC1C,UAAM;AAACC,MAAAA,cAAc,GAAG;AAAlB,QAAuBD,OAAO,IAAI,EAAxC;AAGA,SAAKE,MAAL,GAAc,CAAd;AAEA,SAAKC,YAAL,GAAoB,EAApB;AACA,SAAKC,KAAL,GAAa,KAAb;AAGA,SAAKH,cAAL,GAAsBA,cAAtB;AACD;;AAIDI,EAAAA,KAAK,CAACC,WAAD,EAAiC;AACpC,SAAKH,YAAL,CAAkBI,IAAlB,CAAuBD,WAAvB;AACD;;AAEDE,EAAAA,GAAG,GAAS;AACV,SAAKL,YAAL,GAAoB,EAApB;AACA,SAAKC,KAAL,GAAa,IAAb;AACD;;AAQDK,EAAAA,iBAAiB,CAACC,KAAD,EAAyB;AACxC,QAAIC,cAAc,GAAG,CAAC,KAAKT,MAA3B;;AACA,SAAK,MAAMI,WAAX,IAA0B,KAAKH,YAA/B,EAA6C;AAC3CQ,MAAAA,cAAc,IAAIL,WAAW,CAACM,UAA9B;;AACA,UAAID,cAAc,IAAID,KAAtB,EAA6B;AAC3B,eAAO,IAAP;AACD;AACF;;AACD,WAAO,KAAP;AACD;;AAQDG,EAAAA,iBAAiB,CAACH,KAAD,EAA8B;AAC7C,QAAIR,MAAM,GAAG,CAAC,KAAKA,MAAnB;AACA,UAAMY,eAAoB,GAAG,EAA7B;;AAEA,SAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG,KAAKZ,YAAL,CAAkBa,MAAtC,EAA8CD,CAAC,EAA/C,EAAmD;AACjD,YAAME,GAAG,GAAG,KAAKd,YAAL,CAAkBY,CAAlB,CAAZ;;AAGA,UAAIb,MAAM,GAAGe,GAAG,CAACL,UAAb,IAA2B,CAA/B,EAAkC;AAChCV,QAAAA,MAAM,IAAIe,GAAG,CAACL,UAAd;AAEA;AACD;;AAKD,YAAMM,KAAK,GAAGhB,MAAM,IAAI,CAAV,GAAciB,IAAI,CAACC,GAAL,CAASlB,MAAT,CAAd,GAAiC,CAA/C;AACA,UAAIM,GAAJ;;AAGA,UAAIU,KAAK,GAAGR,KAAR,IAAiBO,GAAG,CAACL,UAAzB,EAAqC;AACnCJ,QAAAA,GAAG,GAAGU,KAAK,GAAGR,KAAd;AACAI,QAAAA,eAAe,CAACP,IAAhB,CAAqB,CAACQ,CAAD,EAAI,CAACG,KAAD,EAAQV,GAAR,CAAJ,CAArB;AACA,eAAOM,eAAP;AACD;;AAGDN,MAAAA,GAAG,GAAGS,GAAG,CAACL,UAAV;AACAE,MAAAA,eAAe,CAACP,IAAhB,CAAqB,CAACQ,CAAD,EAAI,CAACG,KAAD,EAAQV,GAAR,CAAJ,CAArB;AAGAE,MAAAA,KAAK,IAAIO,GAAG,CAACL,UAAJ,GAAiBM,KAA1B;AACAhB,MAAAA,MAAM,IAAIe,GAAG,CAACL,UAAd;AACD;;AAGD,WAAO,IAAP;AACD;;AAQDS,EAAAA,WAAW,CAACX,KAAD,EAAiC;AAC1C,UAAMY,aAAa,GAAG,KAAKT,iBAAL,CAAuBH,KAAvB,CAAtB;;AAGA,QAAI,CAACY,aAAD,IAAkB,KAAKlB,KAA3B,EAAkC;AAChC,YAAM,IAAImB,KAAJ,CAAU,uBAAV,CAAN;AACD;;AAED,QAAI,CAACD,aAAL,EAAoB;AAElB,aAAO,IAAP;AACD;;AAGD,QAAIA,aAAa,CAACN,MAAd,KAAyB,CAA7B,EAAgC;AAC9B,YAAM,CAACQ,WAAD,EAAc,CAACN,KAAD,EAAQV,GAAR,CAAd,IAA8Bc,aAAa,CAAC,CAAD,CAAjD;AACA,YAAMhB,WAAW,GAAG,KAAKH,YAAL,CAAkBqB,WAAlB,CAApB;AACA,YAAMC,IAAI,GAAG,IAAIC,QAAJ,CAAapB,WAAb,EAA0BY,KAA1B,EAAiCV,GAAG,GAAGU,KAAvC,CAAb;AAEA,WAAKhB,MAAL,IAAeQ,KAAf;AACA,WAAKiB,cAAL;AACA,aAAOF,IAAP;AACD;;AAGD,UAAMA,IAAI,GAAG,IAAIC,QAAJ,CAAa,KAAKE,oBAAL,CAA0BN,aAA1B,CAAb,CAAb;AACA,SAAKpB,MAAL,IAAeQ,KAAf;AACA,SAAKiB,cAAL;AACA,WAAOF,IAAP;AACD;;AAKDE,EAAAA,cAAc,GAAS;AACrB,WACE,KAAKxB,YAAL,CAAkBa,MAAlB,GAA2B,CAA3B,IACA,KAAKd,MAAL,GAAc,KAAKD,cAAnB,IAAqC,KAAKE,YAAL,CAAkB,CAAlB,EAAqBS,UAF5D,EAGE;AACA,WAAKV,MAAL,IAAe,KAAKC,YAAL,CAAkB,CAAlB,EAAqBS,UAApC;AACA,WAAKT,YAAL,CAAkB0B,KAAlB;AACD;AACF;;AAYDD,EAAAA,oBAAoB,CAACN,aAAD,EAAwC;AAC1D,QAAIV,UAAkB,GAAG,CAAzB;;AACA,SAAK,MAAMkB,YAAX,IAA2BR,aAA3B,EAA0C;AACxC,YAAM,CAACJ,KAAD,EAAQV,GAAR,IAAesB,YAAY,CAAC,CAAD,CAAjC;AACAlB,MAAAA,UAAU,IAAIJ,GAAG,GAAGU,KAApB;AACD;;AAED,UAAMa,MAAM,GAAG,IAAIC,UAAJ,CAAepB,UAAf,CAAf;AAGA,QAAIqB,YAAoB,GAAG,CAA3B;;AACA,SAAK,MAAMH,YAAX,IAA2BR,aAA3B,EAA0C;AACxC,YAAM,CAACE,WAAD,EAAc,CAACN,KAAD,EAAQV,GAAR,CAAd,IAA8BsB,YAApC;AACA,YAAMI,WAAW,GAAG,IAAIF,UAAJ,CAAe,KAAK7B,YAAL,CAAkBqB,WAAlB,CAAf,CAApB;AACAO,MAAAA,MAAM,CAACI,GAAP,CAAWD,WAAW,CAACE,QAAZ,CAAqBlB,KAArB,EAA4BV,GAA5B,CAAX,EAA6CyB,YAA7C;AACAA,MAAAA,YAAY,IAAIzB,GAAG,GAAGU,KAAtB;AACD;;AAED,WAAOa,MAAM,CAACM,MAAd;AACD;;AAIDC,EAAAA,IAAI,CAAC5B,KAAD,EAAsB;AACxB,SAAKR,MAAL,IAAeQ,KAAf;AACD;;AAID6B,EAAAA,MAAM,CAAC7B,KAAD,EAAsB;AAE1B,SAAKR,MAAL,IAAeQ,KAAf;AACD;;AAzLoC","sourcesContent":["export default class BinaryChunkReader {\n offset: number;\n arrayBuffers: ArrayBuffer[];\n ended: boolean;\n maxRewindBytes: number;\n\n constructor(options?: {[key: string]: any}) {\n const {maxRewindBytes = 0} = options || {};\n\n /** current global offset into current array buffer*/\n this.offset = 0;\n /** current buffer from iterator */\n this.arrayBuffers = [];\n this.ended = false;\n\n /** bytes behind offset to hold on to */\n this.maxRewindBytes = maxRewindBytes;\n }\n /**\n * @param arrayBuffer\n */\n write(arrayBuffer: ArrayBuffer): void {\n this.arrayBuffers.push(arrayBuffer);\n }\n\n end(): void {\n this.arrayBuffers = [];\n this.ended = true;\n }\n\n /**\n * Has enough bytes available in array buffers\n *\n * @param bytes Number of bytes\n * @return boolean\n */\n hasAvailableBytes(bytes: number): boolean {\n let bytesAvailable = -this.offset;\n for (const arrayBuffer of this.arrayBuffers) {\n bytesAvailable += arrayBuffer.byteLength;\n if (bytesAvailable >= bytes) {\n return true;\n }\n }\n return false;\n }\n\n /**\n * Find offsets of byte ranges within this.arrayBuffers\n *\n * @param bytes Byte length to read\n * @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]\n */\n findBufferOffsets(bytes: number): any[] | null {\n let offset = -this.offset;\n const selectedBuffers: any = [];\n\n for (let i = 0; i < this.arrayBuffers.length; i++) {\n const buf = this.arrayBuffers[i];\n\n // Current buffer isn't long enough to reach global offset\n if (offset + buf.byteLength <= 0) {\n offset += buf.byteLength;\n // eslint-disable-next-line no-continue\n continue;\n }\n\n // Find start/end offsets for this buffer\n // When offset < 0, need to skip over Math.abs(offset) bytes\n // When offset > 0, implies bytes in previous buffer, start at 0\n const start = offset <= 0 ? Math.abs(offset) : 0;\n let end: number;\n\n // Length of requested bytes is contained in current buffer\n if (start + bytes <= buf.byteLength) {\n end = start + bytes;\n selectedBuffers.push([i, [start, end]]);\n return selectedBuffers;\n }\n\n // Will need to look into next buffer\n end = buf.byteLength;\n selectedBuffers.push([i, [start, end]]);\n\n // Need to read fewer bytes in next iter\n bytes -= buf.byteLength - start;\n offset += buf.byteLength;\n }\n\n // Should only finish loop if exhausted all arrays\n return null;\n }\n\n /**\n * Get the required number of bytes from the iterator\n *\n * @param bytes Number of bytes\n * @return DataView with data\n */\n getDataView(bytes: number): DataView | null {\n const bufferOffsets = this.findBufferOffsets(bytes);\n // return `null` if not enough data, except if end() already called, in\n // which case throw an error.\n if (!bufferOffsets && this.ended) {\n throw new Error('binary data exhausted');\n }\n\n if (!bufferOffsets) {\n // @ts-ignore\n return null;\n }\n\n // If only one arrayBuffer needed, return DataView directly\n if (bufferOffsets.length === 1) {\n const [bufferIndex, [start, end]] = bufferOffsets[0];\n const arrayBuffer = this.arrayBuffers[bufferIndex];\n const view = new DataView(arrayBuffer, start, end - start);\n\n this.offset += bytes;\n this.disposeBuffers();\n return view;\n }\n\n // Concatenate portions of multiple ArrayBuffers\n const view = new DataView(this._combineArrayBuffers(bufferOffsets));\n this.offset += bytes;\n this.disposeBuffers();\n return view;\n }\n\n /**\n * Dispose of old array buffers\n */\n disposeBuffers(): void {\n while (\n this.arrayBuffers.length > 0 &&\n this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength\n ) {\n this.offset -= this.arrayBuffers[0].byteLength;\n this.arrayBuffers.shift();\n }\n }\n\n /**\n * Copy multiple ArrayBuffers into one contiguous ArrayBuffer\n *\n * In contrast to concatenateArrayBuffers, this only copies the necessary\n * portions of the source arrays, rather than first copying the entire arrays\n * then taking a part of them.\n *\n * @param bufferOffsets List of internal array offsets\n * @return New contiguous ArrayBuffer\n */\n _combineArrayBuffers(bufferOffsets: any[]): ArrayBufferLike {\n let byteLength: number = 0;\n for (const bufferOffset of bufferOffsets) {\n const [start, end] = bufferOffset[1];\n byteLength += end - start;\n }\n\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let resultOffset: number = 0;\n for (const bufferOffset of bufferOffsets) {\n const [bufferIndex, [start, end]] = bufferOffset;\n const sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);\n result.set(sourceArray.subarray(start, end), resultOffset);\n resultOffset += end - start;\n }\n\n return result.buffer;\n }\n /**\n * @param bytes\n */\n skip(bytes: number): void {\n this.offset += bytes;\n }\n /**\n * @param bytes\n */\n rewind(bytes: number): void {\n // TODO - only works if offset is already set\n this.offset -= bytes;\n }\n}\n"],"file":"binary-chunk-reader.js"}
1
+ {"version":3,"sources":["../../../../src/lib/streaming/binary-chunk-reader.ts"],"names":["BinaryChunkReader","options","maxRewindBytes","offset","arrayBuffers","ended","arrayBuffer","push","bytes","bytesAvailable","byteLength","selectedBuffers","i","length","buf","start","Math","abs","end","bufferOffsets","findBufferOffsets","Error","bufferIndex","view","DataView","disposeBuffers","_combineArrayBuffers","shift","bufferOffset","result","Uint8Array","resultOffset","sourceArray","set","subarray","buffer"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;IAAqBA,iB;AAMnB,6BAAYC,OAAZ,EAA4C;AAAA;AAAA;AAAA;AAAA;AAAA;;AAC1C,eAA6BA,OAAO,IAAI,EAAxC;AAAA,mCAAOC,cAAP;AAAA,QAAOA,cAAP,oCAAwB,CAAxB;;AAGA,SAAKC,MAAL,GAAc,CAAd;AAEA,SAAKC,YAAL,GAAoB,EAApB;AACA,SAAKC,KAAL,GAAa,KAAb;AAGA,SAAKH,cAAL,GAAsBA,cAAtB;AACD;;;;WAID,eAAMI,WAAN,EAAsC;AACpC,WAAKF,YAAL,CAAkBG,IAAlB,CAAuBD,WAAvB;AACD;;;WAED,eAAY;AACV,WAAKF,YAAL,GAAoB,EAApB;AACA,WAAKC,KAAL,GAAa,IAAb;AACD;;;WAQD,2BAAkBG,KAAlB,EAA0C;AACxC,UAAIC,cAAc,GAAG,CAAC,KAAKN,MAA3B;;AADwC,iDAEd,KAAKC,YAFS;AAAA;;AAAA;AAExC,4DAA6C;AAAA,cAAlCE,WAAkC;AAC3CG,UAAAA,cAAc,IAAIH,WAAW,CAACI,UAA9B;;AACA,cAAID,cAAc,IAAID,KAAtB,EAA6B;AAC3B,mBAAO,IAAP;AACD;AACF;AAPuC;AAAA;AAAA;AAAA;AAAA;;AAQxC,aAAO,KAAP;AACD;;;WAQD,2BAAkBA,KAAlB,EAA+C;AAC7C,UAAIL,MAAM,GAAG,CAAC,KAAKA,MAAnB;AACA,UAAMQ,eAAoB,GAAG,EAA7B;;AAEA,WAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAG,KAAKR,YAAL,CAAkBS,MAAtC,EAA8CD,CAAC,EAA/C,EAAmD;AACjD,YAAME,GAAG,GAAG,KAAKV,YAAL,CAAkBQ,CAAlB,CAAZ;;AAGA,YAAIT,MAAM,GAAGW,GAAG,CAACJ,UAAb,IAA2B,CAA/B,EAAkC;AAChCP,UAAAA,MAAM,IAAIW,GAAG,CAACJ,UAAd;AAEA;AACD;;AAKD,YAAMK,KAAK,GAAGZ,MAAM,IAAI,CAAV,GAAca,IAAI,CAACC,GAAL,CAASd,MAAT,CAAd,GAAiC,CAA/C;AACA,YAAIe,GAAW,SAAf;;AAGA,YAAIH,KAAK,GAAGP,KAAR,IAAiBM,GAAG,CAACJ,UAAzB,EAAqC;AACnCQ,UAAAA,GAAG,GAAGH,KAAK,GAAGP,KAAd;AACAG,UAAAA,eAAe,CAACJ,IAAhB,CAAqB,CAACK,CAAD,EAAI,CAACG,KAAD,EAAQG,GAAR,CAAJ,CAArB;AACA,iBAAOP,eAAP;AACD;;AAGDO,QAAAA,GAAG,GAAGJ,GAAG,CAACJ,UAAV;AACAC,QAAAA,eAAe,CAACJ,IAAhB,CAAqB,CAACK,CAAD,EAAI,CAACG,KAAD,EAAQG,GAAR,CAAJ,CAArB;AAGAV,QAAAA,KAAK,IAAIM,GAAG,CAACJ,UAAJ,GAAiBK,KAA1B;AACAZ,QAAAA,MAAM,IAAIW,GAAG,CAACJ,UAAd;AACD;;AAGD,aAAO,IAAP;AACD;;;WAQD,qBAAYF,KAAZ,EAA4C;AAC1C,UAAMW,aAAa,GAAG,KAAKC,iBAAL,CAAuBZ,KAAvB,CAAtB;;AAGA,UAAI,CAACW,aAAD,IAAkB,KAAKd,KAA3B,EAAkC;AAChC,cAAM,IAAIgB,KAAJ,CAAU,uBAAV,CAAN;AACD;;AAED,UAAI,CAACF,aAAL,EAAoB;AAElB,eAAO,IAAP;AACD;;AAGD,UAAIA,aAAa,CAACN,MAAd,KAAyB,CAA7B,EAAgC;AAC9B,2DAAoCM,aAAa,CAAC,CAAD,CAAjD;AAAA,YAAOG,WAAP;AAAA;AAAA,YAAqBP,KAArB;AAAA,YAA4BG,GAA5B;;AACA,YAAMZ,WAAW,GAAG,KAAKF,YAAL,CAAkBkB,WAAlB,CAApB;;AACA,YAAMC,KAAI,GAAG,IAAIC,QAAJ,CAAalB,WAAb,EAA0BS,KAA1B,EAAiCG,GAAG,GAAGH,KAAvC,CAAb;;AAEA,aAAKZ,MAAL,IAAeK,KAAf;AACA,aAAKiB,cAAL;AACA,eAAOF,KAAP;AACD;;AAGD,UAAMA,IAAI,GAAG,IAAIC,QAAJ,CAAa,KAAKE,oBAAL,CAA0BP,aAA1B,CAAb,CAAb;AACA,WAAKhB,MAAL,IAAeK,KAAf;AACA,WAAKiB,cAAL;AACA,aAAOF,IAAP;AACD;;;WAKD,0BAAuB;AACrB,aACE,KAAKnB,YAAL,CAAkBS,MAAlB,GAA2B,CAA3B,IACA,KAAKV,MAAL,GAAc,KAAKD,cAAnB,IAAqC,KAAKE,YAAL,CAAkB,CAAlB,EAAqBM,UAF5D,EAGE;AACA,aAAKP,MAAL,IAAe,KAAKC,YAAL,CAAkB,CAAlB,EAAqBM,UAApC;AACA,aAAKN,YAAL,CAAkBuB,KAAlB;AACD;AACF;;;WAYD,8BAAqBR,aAArB,EAA4D;AAC1D,UAAIT,UAAkB,GAAG,CAAzB;;AAD0D,kDAE/BS,aAF+B;AAAA;;AAAA;AAE1D,+DAA0C;AAAA,cAA/BS,YAA+B;;AACxC,4DAAqBA,YAAY,CAAC,CAAD,CAAjC;AAAA,cAAOb,KAAP;AAAA,cAAcG,GAAd;;AACAR,UAAAA,UAAU,IAAIQ,GAAG,GAAGH,KAApB;AACD;AALyD;AAAA;AAAA;AAAA;AAAA;;AAO1D,UAAMc,MAAM,GAAG,IAAIC,UAAJ,CAAepB,UAAf,CAAf;AAGA,UAAIqB,YAAoB,GAAG,CAA3B;;AAV0D,kDAW/BZ,aAX+B;AAAA;;AAAA;AAW1D,+DAA0C;AAAA,cAA/BS,aAA+B;;AACxC,4DAAoCA,aAApC;AAAA,cAAON,WAAP;AAAA;AAAA,cAAqBP,MAArB;AAAA,cAA4BG,IAA5B;;AACA,cAAMc,WAAW,GAAG,IAAIF,UAAJ,CAAe,KAAK1B,YAAL,CAAkBkB,WAAlB,CAAf,CAApB;AACAO,UAAAA,MAAM,CAACI,GAAP,CAAWD,WAAW,CAACE,QAAZ,CAAqBnB,MAArB,EAA4BG,IAA5B,CAAX,EAA6Ca,YAA7C;AACAA,UAAAA,YAAY,IAAIb,IAAG,GAAGH,MAAtB;AACD;AAhByD;AAAA;AAAA;AAAA;AAAA;;AAkB1D,aAAOc,MAAM,CAACM,MAAd;AACD;;;WAID,cAAK3B,KAAL,EAA0B;AACxB,WAAKL,MAAL,IAAeK,KAAf;AACD;;;WAID,gBAAOA,KAAP,EAA4B;AAE1B,WAAKL,MAAL,IAAeK,KAAf;AACD","sourcesContent":["export default class BinaryChunkReader {\n offset: number;\n arrayBuffers: ArrayBuffer[];\n ended: boolean;\n maxRewindBytes: number;\n\n constructor(options?: {[key: string]: any}) {\n const {maxRewindBytes = 0} = options || {};\n\n /** current global offset into current array buffer*/\n this.offset = 0;\n /** current buffer from iterator */\n this.arrayBuffers = [];\n this.ended = false;\n\n /** bytes behind offset to hold on to */\n this.maxRewindBytes = maxRewindBytes;\n }\n /**\n * @param arrayBuffer\n */\n write(arrayBuffer: ArrayBuffer): void {\n this.arrayBuffers.push(arrayBuffer);\n }\n\n end(): void {\n this.arrayBuffers = [];\n this.ended = true;\n }\n\n /**\n * Has enough bytes available in array buffers\n *\n * @param bytes Number of bytes\n * @return boolean\n */\n hasAvailableBytes(bytes: number): boolean {\n let bytesAvailable = -this.offset;\n for (const arrayBuffer of this.arrayBuffers) {\n bytesAvailable += arrayBuffer.byteLength;\n if (bytesAvailable >= bytes) {\n return true;\n }\n }\n return false;\n }\n\n /**\n * Find offsets of byte ranges within this.arrayBuffers\n *\n * @param bytes Byte length to read\n * @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]\n */\n findBufferOffsets(bytes: number): any[] | null {\n let offset = -this.offset;\n const selectedBuffers: any = [];\n\n for (let i = 0; i < this.arrayBuffers.length; i++) {\n const buf = this.arrayBuffers[i];\n\n // Current buffer isn't long enough to reach global offset\n if (offset + buf.byteLength <= 0) {\n offset += buf.byteLength;\n // eslint-disable-next-line no-continue\n continue;\n }\n\n // Find start/end offsets for this buffer\n // When offset < 0, need to skip over Math.abs(offset) bytes\n // When offset > 0, implies bytes in previous buffer, start at 0\n const start = offset <= 0 ? Math.abs(offset) : 0;\n let end: number;\n\n // Length of requested bytes is contained in current buffer\n if (start + bytes <= buf.byteLength) {\n end = start + bytes;\n selectedBuffers.push([i, [start, end]]);\n return selectedBuffers;\n }\n\n // Will need to look into next buffer\n end = buf.byteLength;\n selectedBuffers.push([i, [start, end]]);\n\n // Need to read fewer bytes in next iter\n bytes -= buf.byteLength - start;\n offset += buf.byteLength;\n }\n\n // Should only finish loop if exhausted all arrays\n return null;\n }\n\n /**\n * Get the required number of bytes from the iterator\n *\n * @param bytes Number of bytes\n * @return DataView with data\n */\n getDataView(bytes: number): DataView | null {\n const bufferOffsets = this.findBufferOffsets(bytes);\n // return `null` if not enough data, except if end() already called, in\n // which case throw an error.\n if (!bufferOffsets && this.ended) {\n throw new Error('binary data exhausted');\n }\n\n if (!bufferOffsets) {\n // @ts-ignore\n return null;\n }\n\n // If only one arrayBuffer needed, return DataView directly\n if (bufferOffsets.length === 1) {\n const [bufferIndex, [start, end]] = bufferOffsets[0];\n const arrayBuffer = this.arrayBuffers[bufferIndex];\n const view = new DataView(arrayBuffer, start, end - start);\n\n this.offset += bytes;\n this.disposeBuffers();\n return view;\n }\n\n // Concatenate portions of multiple ArrayBuffers\n const view = new DataView(this._combineArrayBuffers(bufferOffsets));\n this.offset += bytes;\n this.disposeBuffers();\n return view;\n }\n\n /**\n * Dispose of old array buffers\n */\n disposeBuffers(): void {\n while (\n this.arrayBuffers.length > 0 &&\n this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength\n ) {\n this.offset -= this.arrayBuffers[0].byteLength;\n this.arrayBuffers.shift();\n }\n }\n\n /**\n * Copy multiple ArrayBuffers into one contiguous ArrayBuffer\n *\n * In contrast to concatenateArrayBuffers, this only copies the necessary\n * portions of the source arrays, rather than first copying the entire arrays\n * then taking a part of them.\n *\n * @param bufferOffsets List of internal array offsets\n * @return New contiguous ArrayBuffer\n */\n _combineArrayBuffers(bufferOffsets: any[]): ArrayBufferLike {\n let byteLength: number = 0;\n for (const bufferOffset of bufferOffsets) {\n const [start, end] = bufferOffset[1];\n byteLength += end - start;\n }\n\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let resultOffset: number = 0;\n for (const bufferOffset of bufferOffsets) {\n const [bufferIndex, [start, end]] = bufferOffset;\n const sourceArray = new Uint8Array(this.arrayBuffers[bufferIndex]);\n result.set(sourceArray.subarray(start, end), resultOffset);\n resultOffset += end - start;\n }\n\n return result.buffer;\n }\n /**\n * @param bytes\n */\n skip(bytes: number): void {\n this.offset += bytes;\n }\n /**\n * @param bytes\n */\n rewind(bytes: number): void {\n // TODO - only works if offset is already set\n this.offset -= bytes;\n }\n}\n"],"file":"binary-chunk-reader.js"}
@@ -7,39 +7,50 @@ Object.defineProperty(exports, "__esModule", {
7
7
  });
8
8
  exports.default = void 0;
9
9
 
10
+ var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
11
+
12
+ var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
13
+
10
14
  var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
11
15
 
12
- class BinaryReader {
13
- constructor(arrayBuffer) {
16
+ var BinaryReader = function () {
17
+ function BinaryReader(arrayBuffer) {
18
+ (0, _classCallCheck2.default)(this, BinaryReader);
14
19
  (0, _defineProperty2.default)(this, "offset", void 0);
15
20
  (0, _defineProperty2.default)(this, "arrayBuffer", void 0);
16
21
  this.offset = 0;
17
22
  this.arrayBuffer = arrayBuffer;
18
23
  }
19
24
 
20
- hasAvailableBytes(bytes) {
21
- return this.arrayBuffer.byteLength - this.offset >= bytes;
22
- }
23
-
24
- getDataView(bytes) {
25
- if (bytes && !this.hasAvailableBytes(bytes)) {
26
- throw new Error('binary data exhausted');
25
+ (0, _createClass2.default)(BinaryReader, [{
26
+ key: "hasAvailableBytes",
27
+ value: function hasAvailableBytes(bytes) {
28
+ return this.arrayBuffer.byteLength - this.offset >= bytes;
27
29
  }
28
-
29
- const dataView = bytes ? new DataView(this.arrayBuffer, this.offset, bytes) : new DataView(this.arrayBuffer, this.offset);
30
- this.offset += bytes;
31
- return dataView;
32
- }
33
-
34
- skip(bytes) {
35
- this.offset += bytes;
36
- }
37
-
38
- rewind(bytes) {
39
- this.offset -= bytes;
40
- }
41
-
42
- }
30
+ }, {
31
+ key: "getDataView",
32
+ value: function getDataView(bytes) {
33
+ if (bytes && !this.hasAvailableBytes(bytes)) {
34
+ throw new Error('binary data exhausted');
35
+ }
36
+
37
+ var dataView = bytes ? new DataView(this.arrayBuffer, this.offset, bytes) : new DataView(this.arrayBuffer, this.offset);
38
+ this.offset += bytes;
39
+ return dataView;
40
+ }
41
+ }, {
42
+ key: "skip",
43
+ value: function skip(bytes) {
44
+ this.offset += bytes;
45
+ }
46
+ }, {
47
+ key: "rewind",
48
+ value: function rewind(bytes) {
49
+ this.offset -= bytes;
50
+ }
51
+ }]);
52
+ return BinaryReader;
53
+ }();
43
54
 
44
55
  exports.default = BinaryReader;
45
56
  //# sourceMappingURL=binary-reader.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/lib/streaming/binary-reader.ts"],"names":["BinaryReader","constructor","arrayBuffer","offset","hasAvailableBytes","bytes","byteLength","getDataView","Error","dataView","DataView","skip","rewind"],"mappings":";;;;;;;;;;;AAAe,MAAMA,YAAN,CAAmB;AAIhCC,EAAAA,WAAW,CAACC,WAAD,EAA2B;AAAA;AAAA;AAEpC,SAAKC,MAAL,GAAc,CAAd;AAEA,SAAKD,WAAL,GAAmBA,WAAnB;AACD;;AAODE,EAAAA,iBAAiB,CAACC,KAAD,EAAyB;AACxC,WAAO,KAAKH,WAAL,CAAiBI,UAAjB,GAA8B,KAAKH,MAAnC,IAA6CE,KAApD;AACD;;AAQDE,EAAAA,WAAW,CAACF,KAAD,EAA0B;AACnC,QAAIA,KAAK,IAAI,CAAC,KAAKD,iBAAL,CAAuBC,KAAvB,CAAd,EAA6C;AAC3C,YAAM,IAAIG,KAAJ,CAAU,uBAAV,CAAN;AACD;;AAED,UAAMC,QAAQ,GAAGJ,KAAK,GAClB,IAAIK,QAAJ,CAAa,KAAKR,WAAlB,EAA+B,KAAKC,MAApC,EAA4CE,KAA5C,CADkB,GAElB,IAAIK,QAAJ,CAAa,KAAKR,WAAlB,EAA+B,KAAKC,MAApC,CAFJ;AAGA,SAAKA,MAAL,IAAeE,KAAf;AACA,WAAOI,QAAP;AACD;;AAODE,EAAAA,IAAI,CAACN,KAAD,EAAsB;AACxB,SAAKF,MAAL,IAAeE,KAAf;AACD;;AAODO,EAAAA,MAAM,CAACP,KAAD,EAAsB;AAC1B,SAAKF,MAAL,IAAeE,KAAf;AACD;;AAtD+B","sourcesContent":["export default class BinaryReader {\n offset: number;\n arrayBuffer: ArrayBuffer;\n\n constructor(arrayBuffer: ArrayBuffer) {\n /** current global (stream) offset */\n this.offset = 0;\n /** current buffer from iterator */\n this.arrayBuffer = arrayBuffer;\n }\n /**\n * Checks if there are available bytes in data\n *\n * @param bytes\n * @returns boolean\n */\n hasAvailableBytes(bytes: number): boolean {\n return this.arrayBuffer.byteLength - this.offset >= bytes;\n }\n\n /**\n * Get the required number of bytes from the iterator\n *\n * @param bytes\n * @returns Dataview\n */\n getDataView(bytes: number): DataView {\n if (bytes && !this.hasAvailableBytes(bytes)) {\n throw new Error('binary data exhausted');\n }\n\n const dataView = bytes\n ? new DataView(this.arrayBuffer, this.offset, bytes)\n : new DataView(this.arrayBuffer, this.offset);\n this.offset += bytes;\n return dataView;\n }\n\n /**\n * Skipping\n *\n * @param bytes\n */\n skip(bytes: number): void {\n this.offset += bytes;\n }\n\n /**\n * Rewinding\n *\n * @param bytes\n */\n rewind(bytes: number): void {\n this.offset -= bytes;\n }\n}\n"],"file":"binary-reader.js"}
1
+ {"version":3,"sources":["../../../../src/lib/streaming/binary-reader.ts"],"names":["BinaryReader","arrayBuffer","offset","bytes","byteLength","hasAvailableBytes","Error","dataView","DataView"],"mappings":";;;;;;;;;;;;;;;IAAqBA,Y;AAInB,wBAAYC,WAAZ,EAAsC;AAAA;AAAA;AAAA;AAEpC,SAAKC,MAAL,GAAc,CAAd;AAEA,SAAKD,WAAL,GAAmBA,WAAnB;AACD;;;;WAOD,2BAAkBE,KAAlB,EAA0C;AACxC,aAAO,KAAKF,WAAL,CAAiBG,UAAjB,GAA8B,KAAKF,MAAnC,IAA6CC,KAApD;AACD;;;WAQD,qBAAYA,KAAZ,EAAqC;AACnC,UAAIA,KAAK,IAAI,CAAC,KAAKE,iBAAL,CAAuBF,KAAvB,CAAd,EAA6C;AAC3C,cAAM,IAAIG,KAAJ,CAAU,uBAAV,CAAN;AACD;;AAED,UAAMC,QAAQ,GAAGJ,KAAK,GAClB,IAAIK,QAAJ,CAAa,KAAKP,WAAlB,EAA+B,KAAKC,MAApC,EAA4CC,KAA5C,CADkB,GAElB,IAAIK,QAAJ,CAAa,KAAKP,WAAlB,EAA+B,KAAKC,MAApC,CAFJ;AAGA,WAAKA,MAAL,IAAeC,KAAf;AACA,aAAOI,QAAP;AACD;;;WAOD,cAAKJ,KAAL,EAA0B;AACxB,WAAKD,MAAL,IAAeC,KAAf;AACD;;;WAOD,gBAAOA,KAAP,EAA4B;AAC1B,WAAKD,MAAL,IAAeC,KAAf;AACD","sourcesContent":["export default class BinaryReader {\n offset: number;\n arrayBuffer: ArrayBuffer;\n\n constructor(arrayBuffer: ArrayBuffer) {\n /** current global (stream) offset */\n this.offset = 0;\n /** current buffer from iterator */\n this.arrayBuffer = arrayBuffer;\n }\n /**\n * Checks if there are available bytes in data\n *\n * @param bytes\n * @returns boolean\n */\n hasAvailableBytes(bytes: number): boolean {\n return this.arrayBuffer.byteLength - this.offset >= bytes;\n }\n\n /**\n * Get the required number of bytes from the iterator\n *\n * @param bytes\n * @returns Dataview\n */\n getDataView(bytes: number): DataView {\n if (bytes && !this.hasAvailableBytes(bytes)) {\n throw new Error('binary data exhausted');\n }\n\n const dataView = bytes\n ? new DataView(this.arrayBuffer, this.offset, bytes)\n : new DataView(this.arrayBuffer, this.offset);\n this.offset += bytes;\n return dataView;\n }\n\n /**\n * Skipping\n *\n * @param bytes\n */\n skip(bytes: number): void {\n this.offset += bytes;\n }\n\n /**\n * Rewinding\n *\n * @param bytes\n */\n rewind(bytes: number): void {\n this.offset -= bytes;\n }\n}\n"],"file":"binary-reader.js"}
@@ -1,57 +1,116 @@
1
1
  "use strict";
2
2
 
3
+ var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
+
3
5
  Object.defineProperty(exports, "__esModule", {
4
6
  value: true
5
7
  });
6
8
  exports.zipBatchIterators = zipBatchIterators;
7
9
 
8
- async function* zipBatchIterators(iterator1, iterator2) {
9
- let batch1 = [];
10
- let batch2 = [];
11
- let iterator1Done = false;
12
- let iterator2Done = false;
13
-
14
- while (!iterator1Done && !iterator2Done) {
15
- if (batch1.length === 0 && !iterator1Done) {
16
- const {
17
- value,
18
- done
19
- } = await iterator1.next();
20
-
21
- if (done) {
22
- iterator1Done = true;
23
- } else {
24
- batch1 = value;
25
- }
26
- } else if (batch2.length === 0 && !iterator2Done) {
27
- const {
28
- value,
29
- done
30
- } = await iterator2.next();
31
-
32
- if (done) {
33
- iterator2Done = true;
34
- } else {
35
- batch2 = value;
36
- }
37
- }
10
+ var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
38
11
 
39
- const batch = extractBatch(batch1, batch2);
12
+ var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
40
13
 
41
- if (batch) {
42
- yield batch;
43
- }
44
- }
14
+ var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
15
+
16
+ function zipBatchIterators(_x, _x2) {
17
+ return _zipBatchIterators.apply(this, arguments);
18
+ }
19
+
20
+ function _zipBatchIterators() {
21
+ _zipBatchIterators = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(iterator1, iterator2) {
22
+ var batch1, batch2, iterator1Done, iterator2Done, _yield$_awaitAsyncGen, value, done, _yield$_awaitAsyncGen2, _value, _done, batch;
23
+
24
+ return _regenerator.default.wrap(function _callee$(_context) {
25
+ while (1) {
26
+ switch (_context.prev = _context.next) {
27
+ case 0:
28
+ batch1 = [];
29
+ batch2 = [];
30
+ iterator1Done = false;
31
+ iterator2Done = false;
32
+
33
+ case 4:
34
+ if (!(!iterator1Done && !iterator2Done)) {
35
+ _context.next = 27;
36
+ break;
37
+ }
38
+
39
+ if (!(batch1.length === 0 && !iterator1Done)) {
40
+ _context.next = 14;
41
+ break;
42
+ }
43
+
44
+ _context.next = 8;
45
+ return (0, _awaitAsyncGenerator2.default)(iterator1.next());
46
+
47
+ case 8:
48
+ _yield$_awaitAsyncGen = _context.sent;
49
+ value = _yield$_awaitAsyncGen.value;
50
+ done = _yield$_awaitAsyncGen.done;
51
+
52
+ if (done) {
53
+ iterator1Done = true;
54
+ } else {
55
+ batch1 = value;
56
+ }
57
+
58
+ _context.next = 21;
59
+ break;
60
+
61
+ case 14:
62
+ if (!(batch2.length === 0 && !iterator2Done)) {
63
+ _context.next = 21;
64
+ break;
65
+ }
66
+
67
+ _context.next = 17;
68
+ return (0, _awaitAsyncGenerator2.default)(iterator2.next());
69
+
70
+ case 17:
71
+ _yield$_awaitAsyncGen2 = _context.sent;
72
+ _value = _yield$_awaitAsyncGen2.value;
73
+ _done = _yield$_awaitAsyncGen2.done;
74
+
75
+ if (_done) {
76
+ iterator2Done = true;
77
+ } else {
78
+ batch2 = _value;
79
+ }
80
+
81
+ case 21:
82
+ batch = extractBatch(batch1, batch2);
83
+
84
+ if (!batch) {
85
+ _context.next = 25;
86
+ break;
87
+ }
88
+
89
+ _context.next = 25;
90
+ return batch;
91
+
92
+ case 25:
93
+ _context.next = 4;
94
+ break;
95
+
96
+ case 27:
97
+ case "end":
98
+ return _context.stop();
99
+ }
100
+ }
101
+ }, _callee);
102
+ }));
103
+ return _zipBatchIterators.apply(this, arguments);
45
104
  }
46
105
 
47
106
  function extractBatch(batch1, batch2) {
48
- const batchLength = Math.min(batch1.length, batch2.length);
107
+ var batchLength = Math.min(batch1.length, batch2.length);
49
108
 
50
109
  if (batchLength === 0) {
51
110
  return null;
52
111
  }
53
112
 
54
- const batch = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
113
+ var batch = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
55
114
  batch1.splice(0, batchLength);
56
115
  batch2.splice(0, batchLength);
57
116
  return batch;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/lib/streaming/zip-batch-iterators.ts"],"names":["zipBatchIterators","iterator1","iterator2","batch1","batch2","iterator1Done","iterator2Done","length","value","done","next","batch","extractBatch","batchLength","Math","min","slice","splice"],"mappings":";;;;;;;AAMO,gBAAgBA,iBAAhB,CACLC,SADK,EAELC,SAFK,EAGsC;AAC3C,MAAIC,MAAM,GAAG,EAAb;AACA,MAAIC,MAAM,GAAG,EAAb;AACA,MAAIC,aAAsB,GAAG,KAA7B;AACA,MAAIC,aAAsB,GAAG,KAA7B;;AAIA,SAAO,CAACD,aAAD,IAAkB,CAACC,aAA1B,EAAyC;AACvC,QAAIH,MAAM,CAACI,MAAP,KAAkB,CAAlB,IAAuB,CAACF,aAA5B,EAA2C;AACzC,YAAM;AAACG,QAAAA,KAAD;AAAQC,QAAAA;AAAR,UAAgB,MAAMR,SAAS,CAACS,IAAV,EAA5B;;AACA,UAAID,IAAJ,EAAU;AACRJ,QAAAA,aAAa,GAAG,IAAhB;AACD,OAFD,MAEO;AACLF,QAAAA,MAAM,GAAGK,KAAT;AACD;AACF,KAPD,MAOO,IAAIJ,MAAM,CAACG,MAAP,KAAkB,CAAlB,IAAuB,CAACD,aAA5B,EAA2C;AAChD,YAAM;AAACE,QAAAA,KAAD;AAAQC,QAAAA;AAAR,UAAgB,MAAMP,SAAS,CAACQ,IAAV,EAA5B;;AACA,UAAID,IAAJ,EAAU;AACRH,QAAAA,aAAa,GAAG,IAAhB;AACD,OAFD,MAEO;AACLF,QAAAA,MAAM,GAAGI,KAAT;AACD;AACF;;AAED,UAAMG,KAAK,GAAGC,YAAY,CAACT,MAAD,EAASC,MAAT,CAA1B;;AACA,QAAIO,KAAJ,EAAW;AACT,YAAMA,KAAN;AACD;AACF;AACF;;AASD,SAASC,YAAT,CAAsBT,MAAtB,EAAwCC,MAAxC,EAA6E;AAC3E,QAAMS,WAAmB,GAAGC,IAAI,CAACC,GAAL,CAASZ,MAAM,CAACI,MAAhB,EAAwBH,MAAM,CAACG,MAA/B,CAA5B;;AACA,MAAIM,WAAW,KAAK,CAApB,EAAuB;AACrB,WAAO,IAAP;AACD;;AAGD,QAAMF,KAAiB,GAAG,CAACR,MAAM,CAACa,KAAP,CAAa,CAAb,EAAgBH,WAAhB,CAAD,EAA+BT,MAAM,CAACY,KAAP,CAAa,CAAb,EAAgBH,WAAhB,CAA/B,CAA1B;AAGAV,EAAAA,MAAM,CAACc,MAAP,CAAc,CAAd,EAAiBJ,WAAjB;AACAT,EAAAA,MAAM,CAACa,MAAP,CAAc,CAAd,EAAiBJ,WAAjB;AACA,SAAOF,KAAP;AACD","sourcesContent":["/**\n * Zip two iterators together\n *\n * @param iterator1\n * @param iterator2\n */\nexport async function* zipBatchIterators(\n iterator1: AsyncIterator<any[]>,\n iterator2: AsyncIterator<any[]>\n): AsyncGenerator<number[][], void, unknown> {\n let batch1 = [];\n let batch2 = [];\n let iterator1Done: boolean = false;\n let iterator2Done: boolean = false;\n\n // TODO - one could let all iterators flow at full speed using `Promise.race`\n // however we might end up with a big temporary buffer\n while (!iterator1Done && !iterator2Done) {\n if (batch1.length === 0 && !iterator1Done) {\n const {value, done} = await iterator1.next();\n if (done) {\n iterator1Done = true;\n } else {\n batch1 = value;\n }\n } else if (batch2.length === 0 && !iterator2Done) {\n const {value, done} = await iterator2.next();\n if (done) {\n iterator2Done = true;\n } else {\n batch2 = value;\n }\n }\n\n const batch = extractBatch(batch1, batch2);\n if (batch) {\n yield batch;\n }\n }\n}\n\n/**\n * Extract batch of same length from two batches\n *\n * @param batch1\n * @param batch2\n * @return array | null\n */\nfunction extractBatch(batch1: number[], batch2: number[]): number[][] | null {\n const batchLength: number = Math.min(batch1.length, batch2.length);\n if (batchLength === 0) {\n return null;\n }\n\n // Non interleaved arrays\n const batch: number[][] = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];\n\n // Modify the 2 batches\n batch1.splice(0, batchLength);\n batch2.splice(0, batchLength);\n return batch;\n}\n"],"file":"zip-batch-iterators.js"}
1
+ {"version":3,"sources":["../../../../src/lib/streaming/zip-batch-iterators.ts"],"names":["zipBatchIterators","iterator1","iterator2","batch1","batch2","iterator1Done","iterator2Done","length","next","value","done","batch","extractBatch","batchLength","Math","min","slice","splice"],"mappings":";;;;;;;;;;;;;;;SAMuBA,iB;;;;;mFAAhB,iBACLC,SADK,EAELC,SAFK;AAAA;;AAAA;AAAA;AAAA;AAAA;AAIDC,YAAAA,MAJC,GAIQ,EAJR;AAKDC,YAAAA,MALC,GAKQ,EALR;AAMDC,YAAAA,aANC,GAMwB,KANxB;AAODC,YAAAA,aAPC,GAOwB,KAPxB;;AAAA;AAAA,kBAWE,CAACD,aAAD,IAAkB,CAACC,aAXrB;AAAA;AAAA;AAAA;;AAAA,kBAYCH,MAAM,CAACI,MAAP,KAAkB,CAAlB,IAAuB,CAACF,aAZzB;AAAA;AAAA;AAAA;;AAAA;AAAA,sDAa2BJ,SAAS,CAACO,IAAV,EAb3B;;AAAA;AAAA;AAaMC,YAAAA,KAbN,yBAaMA,KAbN;AAaaC,YAAAA,IAbb,yBAaaA,IAbb;;AAcD,gBAAIA,IAAJ,EAAU;AACRL,cAAAA,aAAa,GAAG,IAAhB;AACD,aAFD,MAEO;AACLF,cAAAA,MAAM,GAAGM,KAAT;AACD;;AAlBA;AAAA;;AAAA;AAAA,kBAmBQL,MAAM,CAACG,MAAP,KAAkB,CAAlB,IAAuB,CAACD,aAnBhC;AAAA;AAAA;AAAA;;AAAA;AAAA,sDAoB2BJ,SAAS,CAACM,IAAV,EApB3B;;AAAA;AAAA;AAoBMC,YAAAA,MApBN,0BAoBMA,KApBN;AAoBaC,YAAAA,KApBb,0BAoBaA,IApBb;;AAqBD,gBAAIA,KAAJ,EAAU;AACRJ,cAAAA,aAAa,GAAG,IAAhB;AACD,aAFD,MAEO;AACLF,cAAAA,MAAM,GAAGK,MAAT;AACD;;AAzBA;AA4BGE,YAAAA,KA5BH,GA4BWC,YAAY,CAACT,MAAD,EAASC,MAAT,CA5BvB;;AAAA,iBA6BCO,KA7BD;AAAA;AAAA;AAAA;;AAAA;AA8BD,mBAAMA,KAAN;;AA9BC;AAAA;AAAA;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,G;;;;AA0CP,SAASC,YAAT,CAAsBT,MAAtB,EAAwCC,MAAxC,EAA6E;AAC3E,MAAMS,WAAmB,GAAGC,IAAI,CAACC,GAAL,CAASZ,MAAM,CAACI,MAAhB,EAAwBH,MAAM,CAACG,MAA/B,CAA5B;;AACA,MAAIM,WAAW,KAAK,CAApB,EAAuB;AACrB,WAAO,IAAP;AACD;;AAGD,MAAMF,KAAiB,GAAG,CAACR,MAAM,CAACa,KAAP,CAAa,CAAb,EAAgBH,WAAhB,CAAD,EAA+BT,MAAM,CAACY,KAAP,CAAa,CAAb,EAAgBH,WAAhB,CAA/B,CAA1B;AAGAV,EAAAA,MAAM,CAACc,MAAP,CAAc,CAAd,EAAiBJ,WAAjB;AACAT,EAAAA,MAAM,CAACa,MAAP,CAAc,CAAd,EAAiBJ,WAAjB;AACA,SAAOF,KAAP;AACD","sourcesContent":["/**\n * Zip two iterators together\n *\n * @param iterator1\n * @param iterator2\n */\nexport async function* zipBatchIterators(\n iterator1: AsyncIterator<any[]>,\n iterator2: AsyncIterator<any[]>\n): AsyncGenerator<number[][], void, unknown> {\n let batch1 = [];\n let batch2 = [];\n let iterator1Done: boolean = false;\n let iterator2Done: boolean = false;\n\n // TODO - one could let all iterators flow at full speed using `Promise.race`\n // however we might end up with a big temporary buffer\n while (!iterator1Done && !iterator2Done) {\n if (batch1.length === 0 && !iterator1Done) {\n const {value, done} = await iterator1.next();\n if (done) {\n iterator1Done = true;\n } else {\n batch1 = value;\n }\n } else if (batch2.length === 0 && !iterator2Done) {\n const {value, done} = await iterator2.next();\n if (done) {\n iterator2Done = true;\n } else {\n batch2 = value;\n }\n }\n\n const batch = extractBatch(batch1, batch2);\n if (batch) {\n yield batch;\n }\n }\n}\n\n/**\n * Extract batch of same length from two batches\n *\n * @param batch1\n * @param batch2\n * @return array | null\n */\nfunction extractBatch(batch1: number[], batch2: number[]): number[][] | null {\n const batchLength: number = Math.min(batch1.length, batch2.length);\n if (batchLength === 0) {\n return null;\n }\n\n // Non interleaved arrays\n const batch: number[][] = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];\n\n // Modify the 2 batches\n batch1.splice(0, batchLength);\n batch2.splice(0, batchLength);\n return batch;\n}\n"],"file":"zip-batch-iterators.js"}
@@ -9,8 +9,8 @@ var _shpLoader = require("./shp-loader");
9
9
 
10
10
  var _parseShapefile = require("./lib/parsers/parse-shapefile");
11
11
 
12
- const VERSION = typeof "3.1.0-beta.5" !== 'undefined' ? "3.1.0-beta.5" : 'latest';
13
- const ShapefileLoader = {
12
+ var VERSION = typeof "3.1.2" !== 'undefined' ? "3.1.2" : 'latest';
13
+ var ShapefileLoader = {
14
14
  name: 'Shapefile',
15
15
  id: 'shapefile',
16
16
  module: 'shapefile',
@@ -29,6 +29,6 @@ const ShapefileLoader = {
29
29
  parseInBatches: _parseShapefile.parseShapefileInBatches
30
30
  };
31
31
  exports.ShapefileLoader = ShapefileLoader;
32
- const _typecheckShapefileLoader = ShapefileLoader;
32
+ var _typecheckShapefileLoader = ShapefileLoader;
33
33
  exports._typecheckShapefileLoader = _typecheckShapefileLoader;
34
34
  //# sourceMappingURL=shapefile-loader.js.map