@loaders.gl/shapefile 4.0.0-alpha.5 → 4.0.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.js +2 -2
- package/dist/dbf-loader.js +29 -20
- package/dist/dbf-worker.js +73 -447
- package/dist/dist.min.js +130 -489
- package/dist/es5/bundle.js +6 -0
- package/dist/es5/bundle.js.map +1 -0
- package/dist/es5/dbf-loader.js +53 -0
- package/dist/es5/dbf-loader.js.map +1 -0
- package/dist/es5/index.js +39 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/lib/parsers/parse-dbf.js +394 -0
- package/dist/es5/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shapefile.js +373 -0
- package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp-geometry.js +220 -0
- package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp-header.js +35 -0
- package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp.js +227 -0
- package/dist/es5/lib/parsers/parse-shp.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shx.js +26 -0
- package/dist/es5/lib/parsers/parse-shx.js.map +1 -0
- package/dist/es5/lib/parsers/types.js +2 -0
- package/dist/es5/lib/parsers/types.js.map +1 -0
- package/dist/es5/lib/streaming/binary-chunk-reader.js +178 -0
- package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/es5/lib/streaming/binary-reader.js +48 -0
- package/dist/es5/lib/streaming/binary-reader.js.map +1 -0
- package/dist/es5/lib/streaming/zip-batch-iterators.js +91 -0
- package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/es5/shapefile-loader.js +31 -0
- package/dist/es5/shapefile-loader.js.map +1 -0
- package/dist/es5/shp-loader.js +56 -0
- package/dist/es5/shp-loader.js.map +1 -0
- package/dist/es5/workers/dbf-worker.js +6 -0
- package/dist/es5/workers/dbf-worker.js.map +1 -0
- package/dist/es5/workers/shp-worker.js +6 -0
- package/dist/es5/workers/shp-worker.js.map +1 -0
- package/dist/esm/bundle.js +4 -0
- package/dist/esm/bundle.js.map +1 -0
- package/dist/esm/dbf-loader.js +24 -0
- package/dist/esm/dbf-loader.js.map +1 -0
- package/dist/esm/index.js +4 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/lib/parsers/parse-dbf.js +296 -0
- package/dist/esm/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shapefile.js +187 -0
- package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp-geometry.js +191 -0
- package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp-header.js +29 -0
- package/dist/esm/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp.js +134 -0
- package/dist/esm/lib/parsers/parse-shp.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shx.js +20 -0
- package/dist/esm/lib/parsers/parse-shx.js.map +1 -0
- package/dist/esm/lib/parsers/types.js +2 -0
- package/dist/esm/lib/parsers/types.js.map +1 -0
- package/dist/esm/lib/streaming/binary-chunk-reader.js +106 -0
- package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/esm/lib/streaming/binary-reader.js +27 -0
- package/dist/esm/lib/streaming/binary-reader.js.map +1 -0
- package/dist/esm/lib/streaming/zip-batch-iterators.js +44 -0
- package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/esm/shapefile-loader.js +23 -0
- package/dist/esm/shapefile-loader.js.map +1 -0
- package/dist/esm/shp-loader.js +26 -0
- package/dist/esm/shp-loader.js.map +1 -0
- package/dist/esm/workers/dbf-worker.js +4 -0
- package/dist/esm/workers/dbf-worker.js.map +1 -0
- package/dist/esm/workers/shp-worker.js +4 -0
- package/dist/esm/workers/shp-worker.js.map +1 -0
- package/dist/index.js +11 -4
- package/dist/lib/parsers/parse-dbf.d.ts +4 -18
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf.js +309 -264
- package/dist/lib/parsers/parse-shapefile.d.ts +3 -8
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shapefile.js +227 -209
- package/dist/lib/parsers/parse-shp-geometry.d.ts +2 -3
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js +265 -212
- package/dist/lib/parsers/parse-shp-header.js +38 -27
- package/dist/lib/parsers/parse-shp.d.ts +3 -2
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp.js +160 -136
- package/dist/lib/parsers/parse-shx.js +25 -19
- package/dist/lib/parsers/types.d.ts +68 -0
- package/dist/lib/parsers/types.d.ts.map +1 -0
- package/dist/lib/parsers/types.js +2 -0
- package/dist/lib/streaming/binary-chunk-reader.d.ts +5 -3
- package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -1
- package/dist/lib/streaming/binary-chunk-reader.js +152 -128
- package/dist/lib/streaming/binary-reader.js +50 -33
- package/dist/lib/streaming/zip-batch-iterators.js +57 -48
- package/dist/shapefile-loader.js +30 -22
- package/dist/shp-loader.js +32 -22
- package/dist/shp-worker.js +57 -19
- package/dist/workers/dbf-worker.js +5 -4
- package/dist/workers/shp-worker.js +5 -4
- package/package.json +7 -7
- package/src/lib/parsers/parse-dbf.ts +41 -67
- package/src/lib/parsers/parse-shapefile.ts +3 -6
- package/src/lib/parsers/parse-shp-geometry.ts +3 -2
- package/src/lib/parsers/parse-shp.ts +26 -12
- package/src/lib/parsers/types.ts +79 -0
- package/src/lib/streaming/binary-chunk-reader.ts +5 -1
- package/src/lib/streaming/zip-batch-iterators.ts +2 -2
- package/dist/bundle.js.map +0 -1
- package/dist/dbf-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/lib/parsers/parse-shp.js.map +0 -1
- package/dist/lib/parsers/parse-shx.js.map +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/lib/streaming/binary-reader.js.map +0 -1
- package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/shapefile-loader.js.map +0 -1
- package/dist/shp-loader.js.map +0 -1
- package/dist/workers/dbf-worker.js.map +0 -1
- package/dist/workers/shp-worker.js.map +0 -1
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.default = void 0;
|
|
8
|
+
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
|
|
9
|
+
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
|
|
10
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
11
|
+
var BinaryReader = function () {
|
|
12
|
+
function BinaryReader(arrayBuffer) {
|
|
13
|
+
(0, _classCallCheck2.default)(this, BinaryReader);
|
|
14
|
+
(0, _defineProperty2.default)(this, "offset", void 0);
|
|
15
|
+
(0, _defineProperty2.default)(this, "arrayBuffer", void 0);
|
|
16
|
+
this.offset = 0;
|
|
17
|
+
this.arrayBuffer = arrayBuffer;
|
|
18
|
+
}
|
|
19
|
+
(0, _createClass2.default)(BinaryReader, [{
|
|
20
|
+
key: "hasAvailableBytes",
|
|
21
|
+
value: function hasAvailableBytes(bytes) {
|
|
22
|
+
return this.arrayBuffer.byteLength - this.offset >= bytes;
|
|
23
|
+
}
|
|
24
|
+
}, {
|
|
25
|
+
key: "getDataView",
|
|
26
|
+
value: function getDataView(bytes) {
|
|
27
|
+
if (bytes && !this.hasAvailableBytes(bytes)) {
|
|
28
|
+
throw new Error('binary data exhausted');
|
|
29
|
+
}
|
|
30
|
+
var dataView = bytes ? new DataView(this.arrayBuffer, this.offset, bytes) : new DataView(this.arrayBuffer, this.offset);
|
|
31
|
+
this.offset += bytes;
|
|
32
|
+
return dataView;
|
|
33
|
+
}
|
|
34
|
+
}, {
|
|
35
|
+
key: "skip",
|
|
36
|
+
value: function skip(bytes) {
|
|
37
|
+
this.offset += bytes;
|
|
38
|
+
}
|
|
39
|
+
}, {
|
|
40
|
+
key: "rewind",
|
|
41
|
+
value: function rewind(bytes) {
|
|
42
|
+
this.offset -= bytes;
|
|
43
|
+
}
|
|
44
|
+
}]);
|
|
45
|
+
return BinaryReader;
|
|
46
|
+
}();
|
|
47
|
+
exports.default = BinaryReader;
|
|
48
|
+
//# sourceMappingURL=binary-reader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"binary-reader.js","names":["BinaryReader","arrayBuffer","_classCallCheck2","default","_defineProperty2","offset","_createClass2","key","value","hasAvailableBytes","bytes","byteLength","getDataView","Error","dataView","DataView","skip","rewind","exports"],"sources":["../../../../src/lib/streaming/binary-reader.ts"],"sourcesContent":["export default class BinaryReader {\n offset: number;\n arrayBuffer: ArrayBuffer;\n\n constructor(arrayBuffer: ArrayBuffer) {\n /** current global (stream) offset */\n this.offset = 0;\n /** current buffer from iterator */\n this.arrayBuffer = arrayBuffer;\n }\n /**\n * Checks if there are available bytes in data\n *\n * @param bytes\n * @returns boolean\n */\n hasAvailableBytes(bytes: number): boolean {\n return this.arrayBuffer.byteLength - this.offset >= bytes;\n }\n\n /**\n * Get the required number of bytes from the iterator\n *\n * @param bytes\n * @returns Dataview\n */\n getDataView(bytes: number): DataView {\n if (bytes && !this.hasAvailableBytes(bytes)) {\n throw new Error('binary data exhausted');\n }\n\n const dataView = bytes\n ? new DataView(this.arrayBuffer, this.offset, bytes)\n : new DataView(this.arrayBuffer, this.offset);\n this.offset += bytes;\n return dataView;\n }\n\n /**\n * Skipping\n *\n * @param bytes\n */\n skip(bytes: number): void {\n this.offset += bytes;\n }\n\n /**\n * Rewinding\n *\n * @param bytes\n */\n rewind(bytes: number): void {\n this.offset -= bytes;\n }\n}\n"],"mappings":";;;;;;;;;;IAAqBA,YAAY;EAI/B,SAAAA,aAAYC,WAAwB,EAAE;IAAA,IAAAC,gBAAA,CAAAC,OAAA,QAAAH,YAAA;IAAA,IAAAI,gBAAA,CAAAD,OAAA;IAAA,IAAAC,gBAAA,CAAAD,OAAA;IAEpC,IAAI,CAACE,MAAM,GAAG,CAAC;IAEf,IAAI,CAACJ,WAAW,GAAGA,WAAW;EAChC;EAAC,IAAAK,aAAA,CAAAH,OAAA,EAAAH,YAAA;IAAAO,GAAA;IAAAC,KAAA,EAOD,SAAAC,kBAAkBC,KAAa,EAAW;MACxC,OAAO,IAAI,CAACT,WAAW,CAACU,UAAU,GAAG,IAAI,CAACN,MAAM,IAAIK,KAAK;IAC3D;EAAC;IAAAH,GAAA;IAAAC,KAAA,EAQD,SAAAI,YAAYF,KAAa,EAAY;MACnC,IAAIA,KAAK,IAAI,CAAC,IAAI,CAACD,iBAAiB,CAACC,KAAK,CAAC,EAAE;QAC3C,MAAM,IAAIG,KAAK,CAAC,uBAAuB,CAAC;MAC1C;MAEA,IAAMC,QAAQ,GAAGJ,KAAK,GAClB,IAAIK,QAAQ,CAAC,IAAI,CAACd,WAAW,EAAE,IAAI,CAACI,MAAM,EAAEK,KAAK,CAAC,GAClD,IAAIK,QAAQ,CAAC,IAAI,CAACd,WAAW,EAAE,IAAI,CAACI,MAAM,CAAC;MAC/C,IAAI,CAACA,MAAM,IAAIK,KAAK;MACpB,OAAOI,QAAQ;IACjB;EAAC;IAAAP,GAAA;IAAAC,KAAA,EAOD,SAAAQ,KAAKN,KAAa,EAAQ;MACxB,IAAI,CAACL,MAAM,IAAIK,KAAK;IACtB;EAAC;IAAAH,GAAA;IAAAC,KAAA,EAOD,SAAAS,OAAOP,KAAa,EAAQ;MAC1B,IAAI,CAACL,MAAM,IAAIK,KAAK;IACtB;EAAC;EAAA,OAAAV,YAAA;AAAA;AAAAkB,OAAA,CAAAf,OAAA,GAAAH,YAAA"}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.zipBatchIterators = zipBatchIterators;
|
|
8
|
+
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
+
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
10
|
+
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
11
|
+
function zipBatchIterators(_x, _x2) {
|
|
12
|
+
return _zipBatchIterators.apply(this, arguments);
|
|
13
|
+
}
|
|
14
|
+
function _zipBatchIterators() {
|
|
15
|
+
_zipBatchIterators = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(iterator1, iterator2) {
|
|
16
|
+
var batch1, batch2, iterator1Done, iterator2Done, _yield$_awaitAsyncGen, value, done, _yield$_awaitAsyncGen2, _value, _done, batch;
|
|
17
|
+
return _regenerator.default.wrap(function _callee$(_context) {
|
|
18
|
+
while (1) switch (_context.prev = _context.next) {
|
|
19
|
+
case 0:
|
|
20
|
+
batch1 = [];
|
|
21
|
+
batch2 = [];
|
|
22
|
+
iterator1Done = false;
|
|
23
|
+
iterator2Done = false;
|
|
24
|
+
case 4:
|
|
25
|
+
if (!(!iterator1Done && !iterator2Done)) {
|
|
26
|
+
_context.next = 27;
|
|
27
|
+
break;
|
|
28
|
+
}
|
|
29
|
+
if (!(batch1.length === 0 && !iterator1Done)) {
|
|
30
|
+
_context.next = 14;
|
|
31
|
+
break;
|
|
32
|
+
}
|
|
33
|
+
_context.next = 8;
|
|
34
|
+
return (0, _awaitAsyncGenerator2.default)(iterator1.next());
|
|
35
|
+
case 8:
|
|
36
|
+
_yield$_awaitAsyncGen = _context.sent;
|
|
37
|
+
value = _yield$_awaitAsyncGen.value;
|
|
38
|
+
done = _yield$_awaitAsyncGen.done;
|
|
39
|
+
if (done) {
|
|
40
|
+
iterator1Done = true;
|
|
41
|
+
} else {
|
|
42
|
+
batch1 = value;
|
|
43
|
+
}
|
|
44
|
+
_context.next = 21;
|
|
45
|
+
break;
|
|
46
|
+
case 14:
|
|
47
|
+
if (!(batch2.length === 0 && !iterator2Done)) {
|
|
48
|
+
_context.next = 21;
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
_context.next = 17;
|
|
52
|
+
return (0, _awaitAsyncGenerator2.default)(iterator2.next());
|
|
53
|
+
case 17:
|
|
54
|
+
_yield$_awaitAsyncGen2 = _context.sent;
|
|
55
|
+
_value = _yield$_awaitAsyncGen2.value;
|
|
56
|
+
_done = _yield$_awaitAsyncGen2.done;
|
|
57
|
+
if (_done) {
|
|
58
|
+
iterator2Done = true;
|
|
59
|
+
} else {
|
|
60
|
+
batch2 = _value;
|
|
61
|
+
}
|
|
62
|
+
case 21:
|
|
63
|
+
batch = extractBatch(batch1, batch2);
|
|
64
|
+
if (!batch) {
|
|
65
|
+
_context.next = 25;
|
|
66
|
+
break;
|
|
67
|
+
}
|
|
68
|
+
_context.next = 25;
|
|
69
|
+
return batch;
|
|
70
|
+
case 25:
|
|
71
|
+
_context.next = 4;
|
|
72
|
+
break;
|
|
73
|
+
case 27:
|
|
74
|
+
case "end":
|
|
75
|
+
return _context.stop();
|
|
76
|
+
}
|
|
77
|
+
}, _callee);
|
|
78
|
+
}));
|
|
79
|
+
return _zipBatchIterators.apply(this, arguments);
|
|
80
|
+
}
|
|
81
|
+
function extractBatch(batch1, batch2) {
|
|
82
|
+
var batchLength = Math.min(batch1.length, batch2.length);
|
|
83
|
+
if (batchLength === 0) {
|
|
84
|
+
return null;
|
|
85
|
+
}
|
|
86
|
+
var batch = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];
|
|
87
|
+
batch1.splice(0, batchLength);
|
|
88
|
+
batch2.splice(0, batchLength);
|
|
89
|
+
return batch;
|
|
90
|
+
}
|
|
91
|
+
//# sourceMappingURL=zip-batch-iterators.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"zip-batch-iterators.js","names":["zipBatchIterators","_x","_x2","_zipBatchIterators","apply","arguments","_wrapAsyncGenerator2","default","_regenerator","mark","_callee","iterator1","iterator2","batch1","batch2","iterator1Done","iterator2Done","_yield$_awaitAsyncGen","value","done","_yield$_awaitAsyncGen2","_value","_done","batch","wrap","_callee$","_context","prev","next","length","_awaitAsyncGenerator2","sent","extractBatch","stop","batchLength","Math","min","slice","splice"],"sources":["../../../../src/lib/streaming/zip-batch-iterators.ts"],"sourcesContent":["/**\n * Zip two iterators together\n *\n * @param iterator1\n * @param iterator2\n */\nexport async function* zipBatchIterators(\n iterator1: AsyncIterator<any[]>,\n iterator2: AsyncIterator<any[]>\n): AsyncGenerator<number[][], void, unknown> {\n let batch1: number[] = [];\n let batch2: number[] = [];\n let iterator1Done: boolean = false;\n let iterator2Done: boolean = false;\n\n // TODO - one could let all iterators flow at full speed using `Promise.race`\n // however we might end up with a big temporary buffer\n while (!iterator1Done && !iterator2Done) {\n if (batch1.length === 0 && !iterator1Done) {\n const {value, done} = await iterator1.next();\n if (done) {\n iterator1Done = true;\n } else {\n batch1 = value;\n }\n } else if (batch2.length === 0 && !iterator2Done) {\n const {value, done} = await iterator2.next();\n if (done) {\n iterator2Done = true;\n } else {\n batch2 = value;\n }\n }\n\n const batch = extractBatch(batch1, batch2);\n if (batch) {\n yield batch;\n }\n }\n}\n\n/**\n * Extract batch of same length from two batches\n *\n * @param batch1\n * @param batch2\n * @return array | null\n */\nfunction extractBatch(batch1: number[], batch2: number[]): number[][] | null {\n const batchLength: number = Math.min(batch1.length, batch2.length);\n if (batchLength === 0) {\n return null;\n }\n\n // Non interleaved arrays\n const batch: number[][] = [batch1.slice(0, batchLength), batch2.slice(0, batchLength)];\n\n // Modify the 2 batches\n batch1.splice(0, batchLength);\n batch2.splice(0, batchLength);\n return batch;\n}\n"],"mappings":";;;;;;;;;;SAMuBA,iBAAiBA,CAAAC,EAAA,EAAAC,GAAA;EAAA,OAAAC,kBAAA,CAAAC,KAAA,OAAAC,SAAA;AAAA;AAAA,SAAAF,mBAAA;EAAAA,kBAAA,OAAAG,oBAAA,CAAAC,OAAA,EAAAC,YAAA,CAAAD,OAAA,CAAAE,IAAA,CAAjC,SAAAC,QACLC,SAA+B,EAC/BC,SAA+B;IAAA,IAAAC,MAAA,EAAAC,MAAA,EAAAC,aAAA,EAAAC,aAAA,EAAAC,qBAAA,EAAAC,KAAA,EAAAC,IAAA,EAAAC,sBAAA,EAAAC,MAAA,EAAAC,KAAA,EAAAC,KAAA;IAAA,OAAAf,YAAA,CAAAD,OAAA,CAAAiB,IAAA,UAAAC,SAAAC,QAAA;MAAA,kBAAAA,QAAA,CAAAC,IAAA,GAAAD,QAAA,CAAAE,IAAA;QAAA;UAE3Bf,MAAgB,GAAG,EAAE;UACrBC,MAAgB,GAAG,EAAE;UACrBC,aAAsB,GAAG,KAAK;UAC9BC,aAAsB,GAAG,KAAK;QAAA;UAAA,MAI3B,CAACD,aAAa,IAAI,CAACC,aAAa;YAAAU,QAAA,CAAAE,IAAA;YAAA;UAAA;UAAA,MACjCf,MAAM,CAACgB,MAAM,KAAK,CAAC,IAAI,CAACd,aAAa;YAAAW,QAAA,CAAAE,IAAA;YAAA;UAAA;UAAAF,QAAA,CAAAE,IAAA;UAAA,WAAAE,qBAAA,CAAAvB,OAAA,EACXI,SAAS,CAACiB,IAAI,CAAC,CAAC;QAAA;UAAAX,qBAAA,GAAAS,QAAA,CAAAK,IAAA;UAArCb,KAAK,GAAAD,qBAAA,CAALC,KAAK;UAAEC,IAAI,GAAAF,qBAAA,CAAJE,IAAI;UAClB,IAAIA,IAAI,EAAE;YACRJ,aAAa,GAAG,IAAI;UACtB,CAAC,MAAM;YACLF,MAAM,GAAGK,KAAK;UAChB;UAACQ,QAAA,CAAAE,IAAA;UAAA;QAAA;UAAA,MACQd,MAAM,CAACe,MAAM,KAAK,CAAC,IAAI,CAACb,aAAa;YAAAU,QAAA,CAAAE,IAAA;YAAA;UAAA;UAAAF,QAAA,CAAAE,IAAA;UAAA,WAAAE,qBAAA,CAAAvB,OAAA,EAClBK,SAAS,CAACgB,IAAI,CAAC,CAAC;QAAA;UAAAR,sBAAA,GAAAM,QAAA,CAAAK,IAAA;UAArCb,MAAK,GAAAE,sBAAA,CAALF,KAAK;UAAEC,KAAI,GAAAC,sBAAA,CAAJD,IAAI;UAClB,IAAIA,KAAI,EAAE;YACRH,aAAa,GAAG,IAAI;UACtB,CAAC,MAAM;YACLF,MAAM,GAAGI,MAAK;UAChB;QAAC;UAGGK,KAAK,GAAGS,YAAY,CAACnB,MAAM,EAAEC,MAAM,CAAC;UAAA,KACtCS,KAAK;YAAAG,QAAA,CAAAE,IAAA;YAAA;UAAA;UAAAF,QAAA,CAAAE,IAAA;UACP,OAAML,KAAK;QAAA;UAAAG,QAAA,CAAAE,IAAA;UAAA;QAAA;QAAA;UAAA,OAAAF,QAAA,CAAAO,IAAA;MAAA;IAAA,GAAAvB,OAAA;EAAA,CAGhB;EAAA,OAAAP,kBAAA,CAAAC,KAAA,OAAAC,SAAA;AAAA;AASD,SAAS2B,YAAYA,CAACnB,MAAgB,EAAEC,MAAgB,EAAqB;EAC3E,IAAMoB,WAAmB,GAAGC,IAAI,CAACC,GAAG,CAACvB,MAAM,CAACgB,MAAM,EAAEf,MAAM,CAACe,MAAM,CAAC;EAClE,IAAIK,WAAW,KAAK,CAAC,EAAE;IACrB,OAAO,IAAI;EACb;EAGA,IAAMX,KAAiB,GAAG,CAACV,MAAM,CAACwB,KAAK,CAAC,CAAC,EAAEH,WAAW,CAAC,EAAEpB,MAAM,CAACuB,KAAK,CAAC,CAAC,EAAEH,WAAW,CAAC,CAAC;EAGtFrB,MAAM,CAACyB,MAAM,CAAC,CAAC,EAAEJ,WAAW,CAAC;EAC7BpB,MAAM,CAACwB,MAAM,CAAC,CAAC,EAAEJ,WAAW,CAAC;EAC7B,OAAOX,KAAK;AACd"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports._typecheckShapefileLoader = exports.ShapefileLoader = void 0;
|
|
7
|
+
var _shpLoader = require("./shp-loader");
|
|
8
|
+
var _parseShapefile = require("./lib/parsers/parse-shapefile");
|
|
9
|
+
var VERSION = typeof "4.0.0-alpha.7" !== 'undefined' ? "4.0.0-alpha.7" : 'latest';
|
|
10
|
+
var ShapefileLoader = {
|
|
11
|
+
name: 'Shapefile',
|
|
12
|
+
id: 'shapefile',
|
|
13
|
+
module: 'shapefile',
|
|
14
|
+
version: VERSION,
|
|
15
|
+
category: 'geometry',
|
|
16
|
+
extensions: ['shp'],
|
|
17
|
+
mimeTypes: ['application/octet-stream'],
|
|
18
|
+
tests: [new Uint8Array(_shpLoader.SHP_MAGIC_NUMBER).buffer],
|
|
19
|
+
options: {
|
|
20
|
+
shapefile: {},
|
|
21
|
+
shp: {
|
|
22
|
+
_maxDimensions: 4
|
|
23
|
+
}
|
|
24
|
+
},
|
|
25
|
+
parse: _parseShapefile.parseShapefile,
|
|
26
|
+
parseInBatches: _parseShapefile.parseShapefileInBatches
|
|
27
|
+
};
|
|
28
|
+
exports.ShapefileLoader = ShapefileLoader;
|
|
29
|
+
var _typecheckShapefileLoader = ShapefileLoader;
|
|
30
|
+
exports._typecheckShapefileLoader = _typecheckShapefileLoader;
|
|
31
|
+
//# sourceMappingURL=shapefile-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"shapefile-loader.js","names":["_shpLoader","require","_parseShapefile","VERSION","ShapefileLoader","name","id","module","version","category","extensions","mimeTypes","tests","Uint8Array","SHP_MAGIC_NUMBER","buffer","options","shapefile","shp","_maxDimensions","parse","parseShapefile","parseInBatches","parseShapefileInBatches","exports","_typecheckShapefileLoader"],"sources":["../../src/shapefile-loader.ts"],"sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {SHP_MAGIC_NUMBER} from './shp-loader';\nimport {parseShapefile, parseShapefileInBatches} from './lib/parsers/parse-shapefile';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * Shapefile loader\n * @note Shapefile is multifile format and requires providing additional files\n */\nexport const ShapefileLoader = {\n name: 'Shapefile',\n id: 'shapefile',\n module: 'shapefile',\n version: VERSION,\n category: 'geometry',\n extensions: ['shp'],\n mimeTypes: ['application/octet-stream'],\n tests: [new Uint8Array(SHP_MAGIC_NUMBER).buffer],\n options: {\n shapefile: {},\n shp: {\n _maxDimensions: 4\n }\n },\n parse: parseShapefile,\n parseInBatches: parseShapefileInBatches\n};\n\nexport const _typecheckShapefileLoader: LoaderWithParser = ShapefileLoader;\n"],"mappings":";;;;;;AACA,IAAAA,UAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAIA,IAAME,OAAO,GAAG,sBAAkB,KAAK,WAAW,qBAAiB,QAAQ;AAMpE,IAAMC,eAAe,GAAG;EAC7BC,IAAI,EAAE,WAAW;EACjBC,EAAE,EAAE,WAAW;EACfC,MAAM,EAAE,WAAW;EACnBC,OAAO,EAAEL,OAAO;EAChBM,QAAQ,EAAE,UAAU;EACpBC,UAAU,EAAE,CAAC,KAAK,CAAC;EACnBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EACvCC,KAAK,EAAE,CAAC,IAAIC,UAAU,CAACC,2BAAgB,CAAC,CAACC,MAAM,CAAC;EAChDC,OAAO,EAAE;IACPC,SAAS,EAAE,CAAC,CAAC;IACbC,GAAG,EAAE;MACHC,cAAc,EAAE;IAClB;EACF,CAAC;EACDC,KAAK,EAAEC,8BAAc;EACrBC,cAAc,EAAEC;AAClB,CAAC;AAACC,OAAA,CAAApB,eAAA,GAAAA,eAAA;AAEK,IAAMqB,yBAA2C,GAAGrB,eAAe;AAACoB,OAAA,CAAAC,yBAAA,GAAAA,yBAAA"}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
Object.defineProperty(exports, "__esModule", {
|
|
5
|
+
value: true
|
|
6
|
+
});
|
|
7
|
+
exports.SHP_MAGIC_NUMBER = exports.SHPWorkerLoader = exports.SHPLoader = void 0;
|
|
8
|
+
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
+
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
10
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
11
|
+
var _parseShp = require("./lib/parsers/parse-shp");
|
|
12
|
+
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
|
13
|
+
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
|
14
|
+
var VERSION = typeof "4.0.0-alpha.7" !== 'undefined' ? "4.0.0-alpha.7" : 'latest';
|
|
15
|
+
var SHP_MAGIC_NUMBER = [0x00, 0x00, 0x27, 0x0a];
|
|
16
|
+
exports.SHP_MAGIC_NUMBER = SHP_MAGIC_NUMBER;
|
|
17
|
+
var SHPWorkerLoader = {
|
|
18
|
+
name: 'SHP',
|
|
19
|
+
id: 'shp',
|
|
20
|
+
module: 'shapefile',
|
|
21
|
+
version: VERSION,
|
|
22
|
+
worker: true,
|
|
23
|
+
category: 'geometry',
|
|
24
|
+
extensions: ['shp'],
|
|
25
|
+
mimeTypes: ['application/octet-stream'],
|
|
26
|
+
tests: [new Uint8Array(SHP_MAGIC_NUMBER).buffer],
|
|
27
|
+
options: {
|
|
28
|
+
shp: {
|
|
29
|
+
_maxDimensions: 4
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
exports.SHPWorkerLoader = SHPWorkerLoader;
|
|
34
|
+
var SHPLoader = _objectSpread(_objectSpread({}, SHPWorkerLoader), {}, {
|
|
35
|
+
parse: function () {
|
|
36
|
+
var _parse = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(arrayBuffer, options) {
|
|
37
|
+
return _regenerator.default.wrap(function _callee$(_context) {
|
|
38
|
+
while (1) switch (_context.prev = _context.next) {
|
|
39
|
+
case 0:
|
|
40
|
+
return _context.abrupt("return", (0, _parseShp.parseSHP)(arrayBuffer, options));
|
|
41
|
+
case 1:
|
|
42
|
+
case "end":
|
|
43
|
+
return _context.stop();
|
|
44
|
+
}
|
|
45
|
+
}, _callee);
|
|
46
|
+
}));
|
|
47
|
+
function parse(_x, _x2) {
|
|
48
|
+
return _parse.apply(this, arguments);
|
|
49
|
+
}
|
|
50
|
+
return parse;
|
|
51
|
+
}(),
|
|
52
|
+
parseSync: _parseShp.parseSHP,
|
|
53
|
+
parseInBatches: _parseShp.parseSHPInBatches
|
|
54
|
+
});
|
|
55
|
+
exports.SHPLoader = SHPLoader;
|
|
56
|
+
//# sourceMappingURL=shp-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"shp-loader.js","names":["_parseShp","require","ownKeys","object","enumerableOnly","keys","Object","getOwnPropertySymbols","symbols","filter","sym","getOwnPropertyDescriptor","enumerable","push","apply","_objectSpread","target","i","arguments","length","source","forEach","key","_defineProperty2","default","getOwnPropertyDescriptors","defineProperties","defineProperty","VERSION","SHP_MAGIC_NUMBER","exports","SHPWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","tests","Uint8Array","buffer","options","shp","_maxDimensions","SHPLoader","parse","_parse","_asyncToGenerator2","_regenerator","mark","_callee","arrayBuffer","wrap","_callee$","_context","prev","next","abrupt","parseSHP","stop","_x","_x2","parseSync","parseInBatches","parseSHPInBatches"],"sources":["../../src/shp-loader.ts"],"sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseSHP, parseSHPInBatches} from './lib/parsers/parse-shp';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport const SHP_MAGIC_NUMBER = [0x00, 0x00, 0x27, 0x0a];\n\n/**\n * SHP file loader\n */\nexport const SHPWorkerLoader: Loader = {\n name: 'SHP',\n id: 'shp',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'geometry',\n extensions: ['shp'],\n mimeTypes: ['application/octet-stream'],\n // ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...\n tests: [new Uint8Array(SHP_MAGIC_NUMBER).buffer],\n options: {\n shp: {\n _maxDimensions: 4\n }\n }\n};\n\n/** SHP file loader */\nexport const SHPLoader: LoaderWithParser = {\n ...SHPWorkerLoader,\n parse: async (arrayBuffer, options?) => parseSHP(arrayBuffer, options),\n parseSync: parseSHP,\n parseInBatches: parseSHPInBatches\n};\n"],"mappings":";;;;;;;;;;AACA,IAAAA,SAAA,GAAAC,OAAA;AAAoE,SAAAC,QAAAC,MAAA,EAAAC,cAAA,QAAAC,IAAA,GAAAC,MAAA,CAAAD,IAAA,CAAAF,MAAA,OAAAG,MAAA,CAAAC,qBAAA,QAAAC,OAAA,GAAAF,MAAA,CAAAC,qBAAA,CAAAJ,MAAA,GAAAC,cAAA,KAAAI,OAAA,GAAAA,OAAA,CAAAC,MAAA,WAAAC,GAAA,WAAAJ,MAAA,CAAAK,wBAAA,CAAAR,MAAA,EAAAO,GAAA,EAAAE,UAAA,OAAAP,IAAA,CAAAQ,IAAA,CAAAC,KAAA,CAAAT,IAAA,EAAAG,OAAA,YAAAH,IAAA;AAAA,SAAAU,cAAAC,MAAA,aAAAC,CAAA,MAAAA,CAAA,GAAAC,SAAA,CAAAC,MAAA,EAAAF,CAAA,UAAAG,MAAA,WAAAF,SAAA,CAAAD,CAAA,IAAAC,SAAA,CAAAD,CAAA,QAAAA,CAAA,OAAAf,OAAA,CAAAI,MAAA,CAAAc,MAAA,OAAAC,OAAA,WAAAC,GAAA,QAAAC,gBAAA,CAAAC,OAAA,EAAAR,MAAA,EAAAM,GAAA,EAAAF,MAAA,CAAAE,GAAA,SAAAhB,MAAA,CAAAmB,yBAAA,GAAAnB,MAAA,CAAAoB,gBAAA,CAAAV,MAAA,EAAAV,MAAA,CAAAmB,yBAAA,CAAAL,MAAA,KAAAlB,OAAA,CAAAI,MAAA,CAAAc,MAAA,GAAAC,OAAA,WAAAC,GAAA,IAAAhB,MAAA,CAAAqB,cAAA,CAAAX,MAAA,EAAAM,GAAA,EAAAhB,MAAA,CAAAK,wBAAA,CAAAS,MAAA,EAAAE,GAAA,iBAAAN,MAAA;AAIpE,IAAMY,OAAO,GAAG,sBAAkB,KAAK,WAAW,qBAAiB,QAAQ;AAEpE,IAAMC,gBAAgB,GAAG,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC;AAACC,OAAA,CAAAD,gBAAA,GAAAA,gBAAA;AAKlD,IAAME,eAAuB,GAAG;EACrCC,IAAI,EAAE,KAAK;EACXC,EAAE,EAAE,KAAK;EACTC,MAAM,EAAE,WAAW;EACnBC,OAAO,EAAEP,OAAO;EAChBQ,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,UAAU;EACpBC,UAAU,EAAE,CAAC,KAAK,CAAC;EACnBC,SAAS,EAAE,CAAC,0BAA0B,CAAC;EAEvCC,KAAK,EAAE,CAAC,IAAIC,UAAU,CAACZ,gBAAgB,CAAC,CAACa,MAAM,CAAC;EAChDC,OAAO,EAAE;IACPC,GAAG,EAAE;MACHC,cAAc,EAAE;IAClB;EACF;AACF,CAAC;AAACf,OAAA,CAAAC,eAAA,GAAAA,eAAA;AAGK,IAAMe,SAA2B,GAAA/B,aAAA,CAAAA,aAAA,KACnCgB,eAAe;EAClBgB,KAAK;IAAA,IAAAC,MAAA,OAAAC,kBAAA,CAAAzB,OAAA,EAAA0B,YAAA,CAAA1B,OAAA,CAAA2B,IAAA,CAAE,SAAAC,QAAOC,WAAW,EAAEV,OAAQ;MAAA,OAAAO,YAAA,CAAA1B,OAAA,CAAA8B,IAAA,UAAAC,SAAAC,QAAA;QAAA,kBAAAA,QAAA,CAAAC,IAAA,GAAAD,QAAA,CAAAE,IAAA;UAAA;YAAA,OAAAF,QAAA,CAAAG,MAAA,WAAK,IAAAC,kBAAQ,EAACP,WAAW,EAAEV,OAAO,CAAC;UAAA;UAAA;YAAA,OAAAa,QAAA,CAAAK,IAAA;QAAA;MAAA,GAAAT,OAAA;IAAA;IAAA,SAAAL,MAAAe,EAAA,EAAAC,GAAA;MAAA,OAAAf,MAAA,CAAAlC,KAAA,OAAAI,SAAA;IAAA;IAAA,OAAA6B,KAAA;EAAA;EACtEiB,SAAS,EAAEJ,kBAAQ;EACnBK,cAAc,EAAEC;AAAiB,EAClC;AAACpC,OAAA,CAAAgB,SAAA,GAAAA,SAAA"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dbf-worker.js","names":["_dbfLoader","require","_loaderUtils","createLoaderWorker","DBFLoader"],"sources":["../../../src/workers/dbf-worker.ts"],"sourcesContent":["import {DBFLoader} from '../dbf-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(DBFLoader);\n"],"mappings":";;AAAA,IAAAA,UAAA,GAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AAEA,IAAAE,+BAAkB,EAACC,oBAAS,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"shp-worker.js","names":["_shpLoader","require","_loaderUtils","createLoaderWorker","SHPLoader"],"sources":["../../../src/workers/shp-worker.ts"],"sourcesContent":["import {SHPLoader} from '../shp-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(SHPLoader);\n"],"mappings":";;AAAA,IAAAA,UAAA,GAAAC,OAAA;AACA,IAAAC,YAAA,GAAAD,OAAA;AAEA,IAAAE,+BAAkB,EAACC,oBAAS,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bundle.js","names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"sources":["../../src/bundle.ts"],"sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"mappings":"AACA,MAAMA,aAAa,GAAGC,OAAO,CAAC,SAAS,CAAC;AACxCC,UAAU,CAACC,OAAO,GAAGD,UAAU,CAACC,OAAO,IAAI,CAAC,CAAC;AAC7CC,MAAM,CAACC,OAAO,GAAGC,MAAM,CAACC,MAAM,CAACL,UAAU,CAACC,OAAO,EAAEH,aAAa,CAAC"}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { parseDBF, parseDBFInBatches } from './lib/parsers/parse-dbf';
|
|
2
|
+
const VERSION = typeof "4.0.0-alpha.7" !== 'undefined' ? "4.0.0-alpha.7" : 'latest';
|
|
3
|
+
export const DBFWorkerLoader = {
|
|
4
|
+
name: 'DBF',
|
|
5
|
+
id: 'dbf',
|
|
6
|
+
module: 'shapefile',
|
|
7
|
+
version: VERSION,
|
|
8
|
+
worker: true,
|
|
9
|
+
category: 'table',
|
|
10
|
+
extensions: ['dbf'],
|
|
11
|
+
mimeTypes: ['application/x-dbf'],
|
|
12
|
+
options: {
|
|
13
|
+
dbf: {
|
|
14
|
+
encoding: 'latin1'
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
};
|
|
18
|
+
export const DBFLoader = {
|
|
19
|
+
...DBFWorkerLoader,
|
|
20
|
+
parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),
|
|
21
|
+
parseSync: parseDBF,
|
|
22
|
+
parseInBatches: parseDBFInBatches
|
|
23
|
+
};
|
|
24
|
+
//# sourceMappingURL=dbf-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dbf-loader.js","names":["parseDBF","parseDBFInBatches","VERSION","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseInBatches"],"sources":["../../src/dbf-loader.ts"],"sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches: parseDBFInBatches\n};\n"],"mappings":"AACA,SAAQA,QAAQ,EAAEC,iBAAiB,QAAO,yBAAyB;AAInE,MAAMC,OAAO,GAAG,sBAAkB,KAAK,WAAW,qBAAiB,QAAQ;AAK3E,OAAO,MAAMC,eAAuB,GAAG;EACrCC,IAAI,EAAE,KAAK;EACXC,EAAE,EAAE,KAAK;EACTC,MAAM,EAAE,WAAW;EACnBC,OAAO,EAAEL,OAAO;EAChBM,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,KAAK,CAAC;EACnBC,SAAS,EAAE,CAAC,mBAAmB,CAAC;EAChCC,OAAO,EAAE;IACPC,GAAG,EAAE;MACHC,QAAQ,EAAE;IACZ;EACF;AACF,CAAC;AAGD,OAAO,MAAMC,SAA2B,GAAG;EACzC,GAAGZ,eAAe;EAClBa,KAAK,EAAE,MAAAA,CAAOC,WAAW,EAAEL,OAAO,KAAKZ,QAAQ,CAACiB,WAAW,EAAEL,OAAO,CAAC;EACrEM,SAAS,EAAElB,QAAQ;EACnBmB,cAAc,EAAElB;AAClB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","names":["ShapefileLoader","DBFLoader","DBFWorkerLoader","SHPLoader","SHPWorkerLoader"],"sources":["../../src/index.ts"],"sourcesContent":["export {ShapefileLoader} from './shapefile-loader';\nexport {DBFLoader, DBFWorkerLoader} from './dbf-loader';\nexport {SHPLoader, SHPWorkerLoader} from './shp-loader';\n"],"mappings":"AAAA,SAAQA,eAAe,QAAO,oBAAoB;AAClD,SAAQC,SAAS,EAAEC,eAAe,QAAO,cAAc;AACvD,SAAQC,SAAS,EAAEC,eAAe,QAAO,cAAc"}
|
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
+
import BinaryChunkReader from '../streaming/binary-chunk-reader';
|
|
3
|
+
const LITTLE_ENDIAN = true;
|
|
4
|
+
const DBF_HEADER_SIZE = 32;
|
|
5
|
+
var STATE = function (STATE) {
|
|
6
|
+
STATE[STATE["START"] = 0] = "START";
|
|
7
|
+
STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
|
|
8
|
+
STATE[STATE["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
|
|
9
|
+
STATE[STATE["END"] = 3] = "END";
|
|
10
|
+
STATE[STATE["ERROR"] = 4] = "ERROR";
|
|
11
|
+
return STATE;
|
|
12
|
+
}(STATE || {});
|
|
13
|
+
class DBFParser {
|
|
14
|
+
constructor(options) {
|
|
15
|
+
_defineProperty(this, "binaryReader", new BinaryChunkReader());
|
|
16
|
+
_defineProperty(this, "textDecoder", void 0);
|
|
17
|
+
_defineProperty(this, "state", STATE.START);
|
|
18
|
+
_defineProperty(this, "result", {
|
|
19
|
+
data: []
|
|
20
|
+
});
|
|
21
|
+
this.textDecoder = new TextDecoder(options.encoding);
|
|
22
|
+
}
|
|
23
|
+
write(arrayBuffer) {
|
|
24
|
+
this.binaryReader.write(arrayBuffer);
|
|
25
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
26
|
+
}
|
|
27
|
+
end() {
|
|
28
|
+
this.binaryReader.end();
|
|
29
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
30
|
+
if (this.state !== STATE.END) {
|
|
31
|
+
this.state = STATE.ERROR;
|
|
32
|
+
this.result.error = 'DBF incomplete file';
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
export function parseDBF(arrayBuffer) {
|
|
37
|
+
var _options$tables, _options$dbf;
|
|
38
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
39
|
+
const {
|
|
40
|
+
encoding = 'latin1'
|
|
41
|
+
} = options.dbf || {};
|
|
42
|
+
const dbfParser = new DBFParser({
|
|
43
|
+
encoding
|
|
44
|
+
});
|
|
45
|
+
dbfParser.write(arrayBuffer);
|
|
46
|
+
dbfParser.end();
|
|
47
|
+
const {
|
|
48
|
+
data,
|
|
49
|
+
schema
|
|
50
|
+
} = dbfParser.result;
|
|
51
|
+
const shape = (options === null || options === void 0 ? void 0 : (_options$tables = options.tables) === null || _options$tables === void 0 ? void 0 : _options$tables.format) || (options === null || options === void 0 ? void 0 : (_options$dbf = options.dbf) === null || _options$dbf === void 0 ? void 0 : _options$dbf.shape);
|
|
52
|
+
switch (shape) {
|
|
53
|
+
case 'object-row-table':
|
|
54
|
+
{
|
|
55
|
+
const table = {
|
|
56
|
+
shape: 'object-row-table',
|
|
57
|
+
schema,
|
|
58
|
+
data
|
|
59
|
+
};
|
|
60
|
+
return table;
|
|
61
|
+
}
|
|
62
|
+
case 'table':
|
|
63
|
+
return {
|
|
64
|
+
schema,
|
|
65
|
+
rows: data
|
|
66
|
+
};
|
|
67
|
+
case 'rows':
|
|
68
|
+
default:
|
|
69
|
+
return data;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
export function parseDBFInBatches(asyncIterator) {
|
|
73
|
+
try {
|
|
74
|
+
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
75
|
+
return async function* () {
|
|
76
|
+
const {
|
|
77
|
+
encoding = 'latin1'
|
|
78
|
+
} = options.dbf || {};
|
|
79
|
+
const parser = new DBFParser({
|
|
80
|
+
encoding
|
|
81
|
+
});
|
|
82
|
+
let headerReturned = false;
|
|
83
|
+
for await (const arrayBuffer of asyncIterator) {
|
|
84
|
+
parser.write(arrayBuffer);
|
|
85
|
+
if (!headerReturned && parser.result.dbfHeader) {
|
|
86
|
+
headerReturned = true;
|
|
87
|
+
yield parser.result.dbfHeader;
|
|
88
|
+
}
|
|
89
|
+
if (parser.result.data.length > 0) {
|
|
90
|
+
yield parser.result.data;
|
|
91
|
+
parser.result.data = [];
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
parser.end();
|
|
95
|
+
if (parser.result.data.length > 0) {
|
|
96
|
+
yield parser.result.data;
|
|
97
|
+
}
|
|
98
|
+
}();
|
|
99
|
+
} catch (e) {
|
|
100
|
+
return Promise.reject(e);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
function parseState(state, result, binaryReader, textDecoder) {
|
|
104
|
+
while (true) {
|
|
105
|
+
try {
|
|
106
|
+
switch (state) {
|
|
107
|
+
case STATE.ERROR:
|
|
108
|
+
case STATE.END:
|
|
109
|
+
return state;
|
|
110
|
+
case STATE.START:
|
|
111
|
+
const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
|
|
112
|
+
if (!dataView) {
|
|
113
|
+
return state;
|
|
114
|
+
}
|
|
115
|
+
result.dbfHeader = parseDBFHeader(dataView);
|
|
116
|
+
result.progress = {
|
|
117
|
+
bytesUsed: 0,
|
|
118
|
+
rowsTotal: result.dbfHeader.nRecords,
|
|
119
|
+
rows: 0
|
|
120
|
+
};
|
|
121
|
+
state = STATE.FIELD_DESCRIPTORS;
|
|
122
|
+
break;
|
|
123
|
+
case STATE.FIELD_DESCRIPTORS:
|
|
124
|
+
const fieldDescriptorView = binaryReader.getDataView(result.dbfHeader.headerLength - DBF_HEADER_SIZE);
|
|
125
|
+
if (!fieldDescriptorView) {
|
|
126
|
+
return state;
|
|
127
|
+
}
|
|
128
|
+
result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
|
|
129
|
+
result.schema = {
|
|
130
|
+
fields: result.dbfFields.map(dbfField => makeField(dbfField)),
|
|
131
|
+
metadata: {}
|
|
132
|
+
};
|
|
133
|
+
state = STATE.FIELD_PROPERTIES;
|
|
134
|
+
binaryReader.skip(1);
|
|
135
|
+
break;
|
|
136
|
+
case STATE.FIELD_PROPERTIES:
|
|
137
|
+
const {
|
|
138
|
+
recordLength = 0,
|
|
139
|
+
nRecords = 0
|
|
140
|
+
} = (result === null || result === void 0 ? void 0 : result.dbfHeader) || {};
|
|
141
|
+
while (result.data.length < nRecords) {
|
|
142
|
+
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
143
|
+
if (!recordView) {
|
|
144
|
+
return state;
|
|
145
|
+
}
|
|
146
|
+
binaryReader.skip(1);
|
|
147
|
+
const row = parseRow(recordView, result.dbfFields, textDecoder);
|
|
148
|
+
result.data.push(row);
|
|
149
|
+
result.progress.rows = result.data.length;
|
|
150
|
+
}
|
|
151
|
+
state = STATE.END;
|
|
152
|
+
break;
|
|
153
|
+
default:
|
|
154
|
+
state = STATE.ERROR;
|
|
155
|
+
result.error = "illegal parser state ".concat(state);
|
|
156
|
+
return state;
|
|
157
|
+
}
|
|
158
|
+
} catch (error) {
|
|
159
|
+
state = STATE.ERROR;
|
|
160
|
+
result.error = "DBF parsing failed: ".concat(error.message);
|
|
161
|
+
return state;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
function parseDBFHeader(headerView) {
|
|
166
|
+
return {
|
|
167
|
+
year: headerView.getUint8(1) + 1900,
|
|
168
|
+
month: headerView.getUint8(2),
|
|
169
|
+
day: headerView.getUint8(3),
|
|
170
|
+
nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
|
|
171
|
+
headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
|
|
172
|
+
recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
|
|
173
|
+
languageDriver: headerView.getUint8(29)
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
function parseFieldDescriptors(view, textDecoder) {
|
|
177
|
+
const nFields = (view.byteLength - 1) / 32;
|
|
178
|
+
const fields = [];
|
|
179
|
+
let offset = 0;
|
|
180
|
+
for (let i = 0; i < nFields; i++) {
|
|
181
|
+
const name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11)).replace(/\u0000/g, '');
|
|
182
|
+
fields.push({
|
|
183
|
+
name,
|
|
184
|
+
dataType: String.fromCharCode(view.getUint8(offset + 11)),
|
|
185
|
+
fieldLength: view.getUint8(offset + 16),
|
|
186
|
+
decimal: view.getUint8(offset + 17)
|
|
187
|
+
});
|
|
188
|
+
offset += 32;
|
|
189
|
+
}
|
|
190
|
+
return fields;
|
|
191
|
+
}
|
|
192
|
+
function parseRow(view, fields, textDecoder) {
|
|
193
|
+
const out = {};
|
|
194
|
+
let offset = 0;
|
|
195
|
+
for (const field of fields) {
|
|
196
|
+
const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
|
|
197
|
+
out[field.name] = parseField(text, field.dataType);
|
|
198
|
+
offset += field.fieldLength;
|
|
199
|
+
}
|
|
200
|
+
return out;
|
|
201
|
+
}
|
|
202
|
+
function parseField(text, dataType) {
|
|
203
|
+
switch (dataType) {
|
|
204
|
+
case 'B':
|
|
205
|
+
return parseNumber(text);
|
|
206
|
+
case 'C':
|
|
207
|
+
return parseCharacter(text);
|
|
208
|
+
case 'F':
|
|
209
|
+
return parseNumber(text);
|
|
210
|
+
case 'N':
|
|
211
|
+
return parseNumber(text);
|
|
212
|
+
case 'O':
|
|
213
|
+
return parseNumber(text);
|
|
214
|
+
case 'D':
|
|
215
|
+
return parseDate(text);
|
|
216
|
+
case 'L':
|
|
217
|
+
return parseBoolean(text);
|
|
218
|
+
default:
|
|
219
|
+
throw new Error('Unsupported data type');
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
function parseDate(str) {
|
|
223
|
+
return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
|
|
224
|
+
}
|
|
225
|
+
function parseBoolean(value) {
|
|
226
|
+
return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
|
|
227
|
+
}
|
|
228
|
+
function parseNumber(text) {
|
|
229
|
+
const number = parseFloat(text);
|
|
230
|
+
return isNaN(number) ? null : number;
|
|
231
|
+
}
|
|
232
|
+
function parseCharacter(text) {
|
|
233
|
+
return text.trim() || null;
|
|
234
|
+
}
|
|
235
|
+
function makeField(_ref) {
|
|
236
|
+
let {
|
|
237
|
+
name,
|
|
238
|
+
dataType,
|
|
239
|
+
fieldLength,
|
|
240
|
+
decimal
|
|
241
|
+
} = _ref;
|
|
242
|
+
switch (dataType) {
|
|
243
|
+
case 'B':
|
|
244
|
+
return {
|
|
245
|
+
name,
|
|
246
|
+
type: 'float64',
|
|
247
|
+
nullable: true,
|
|
248
|
+
metadata: {}
|
|
249
|
+
};
|
|
250
|
+
case 'C':
|
|
251
|
+
return {
|
|
252
|
+
name,
|
|
253
|
+
type: 'utf8',
|
|
254
|
+
nullable: true,
|
|
255
|
+
metadata: {}
|
|
256
|
+
};
|
|
257
|
+
case 'F':
|
|
258
|
+
return {
|
|
259
|
+
name,
|
|
260
|
+
type: 'float64',
|
|
261
|
+
nullable: true,
|
|
262
|
+
metadata: {}
|
|
263
|
+
};
|
|
264
|
+
case 'N':
|
|
265
|
+
return {
|
|
266
|
+
name,
|
|
267
|
+
type: 'float64',
|
|
268
|
+
nullable: true,
|
|
269
|
+
metadata: {}
|
|
270
|
+
};
|
|
271
|
+
case 'O':
|
|
272
|
+
return {
|
|
273
|
+
name,
|
|
274
|
+
type: 'float64',
|
|
275
|
+
nullable: true,
|
|
276
|
+
metadata: {}
|
|
277
|
+
};
|
|
278
|
+
case 'D':
|
|
279
|
+
return {
|
|
280
|
+
name,
|
|
281
|
+
type: 'timestamp-millisecond',
|
|
282
|
+
nullable: true,
|
|
283
|
+
metadata: {}
|
|
284
|
+
};
|
|
285
|
+
case 'L':
|
|
286
|
+
return {
|
|
287
|
+
name,
|
|
288
|
+
type: 'bool',
|
|
289
|
+
nullable: true,
|
|
290
|
+
metadata: {}
|
|
291
|
+
};
|
|
292
|
+
default:
|
|
293
|
+
throw new Error('Unsupported data type');
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
//# sourceMappingURL=parse-dbf.js.map
|