@loaders.gl/shapefile 4.0.0-alpha.9 → 4.0.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbf-worker.js +12 -5
- package/dist/dist.min.js +112 -43
- package/dist/es5/dbf-loader.js +4 -2
- package/dist/es5/dbf-loader.js.map +1 -1
- package/dist/es5/index.js +21 -0
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/parsers/parse-dbf.js +2 -2
- package/dist/es5/lib/parsers/parse-dbf.js.map +1 -1
- package/dist/es5/lib/parsers/parse-shapefile.js +82 -78
- package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -1
- package/dist/es5/lib/parsers/parse-shp.js +2 -2
- package/dist/es5/lib/parsers/parse-shp.js.map +1 -1
- package/dist/es5/lib/streaming/binary-chunk-reader.js +2 -2
- package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -1
- package/dist/es5/lib/streaming/binary-reader.js +2 -2
- package/dist/es5/lib/streaming/binary-reader.js.map +1 -1
- package/dist/es5/lib/streaming/zip-batch-iterators.js +30 -26
- package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -1
- package/dist/es5/shapefile-loader.js +2 -4
- package/dist/es5/shapefile-loader.js.map +1 -1
- package/dist/es5/shp-loader.js +4 -2
- package/dist/es5/shp-loader.js.map +1 -1
- package/dist/esm/dbf-loader.js +4 -2
- package/dist/esm/dbf-loader.js.map +1 -1
- package/dist/esm/index.js +3 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/parsers/parse-dbf.js +1 -1
- package/dist/esm/lib/parsers/parse-dbf.js.map +1 -1
- package/dist/esm/lib/parsers/parse-shapefile.js +31 -26
- package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -1
- package/dist/esm/lib/parsers/parse-shp.js +1 -1
- package/dist/esm/lib/parsers/parse-shp.js.map +1 -1
- package/dist/esm/lib/streaming/binary-chunk-reader.js +1 -1
- package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -1
- package/dist/esm/lib/streaming/binary-reader.js +1 -1
- package/dist/esm/lib/streaming/binary-reader.js.map +1 -1
- package/dist/esm/lib/streaming/zip-batch-iterators.js +17 -11
- package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -1
- package/dist/esm/shapefile-loader.js +1 -2
- package/dist/esm/shapefile-loader.js.map +1 -1
- package/dist/esm/shp-loader.js +2 -2
- package/dist/esm/shp-loader.js.map +1 -1
- package/dist/shp-worker.js +10 -5
- package/dist/src/bundle.d.ts.map +1 -0
- package/dist/src/dbf-loader.d.ts.map +1 -0
- package/dist/src/index.d.ts +7 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/lib/parsers/parse-dbf.d.ts.map +1 -0
- package/dist/{lib → src/lib}/parsers/parse-shapefile.d.ts +2 -2
- package/dist/src/lib/parsers/parse-shapefile.d.ts.map +1 -0
- package/dist/src/lib/parsers/parse-shp-geometry.d.ts.map +1 -0
- package/dist/src/lib/parsers/parse-shp-header.d.ts.map +1 -0
- package/dist/{lib → src/lib}/parsers/parse-shp.d.ts +1 -1
- package/dist/src/lib/parsers/parse-shp.d.ts.map +1 -0
- package/dist/src/lib/parsers/parse-shx.d.ts.map +1 -0
- package/dist/src/lib/parsers/types.d.ts.map +1 -0
- package/dist/{lib → src/lib}/streaming/binary-chunk-reader.d.ts +2 -3
- package/dist/src/lib/streaming/binary-chunk-reader.d.ts.map +1 -0
- package/dist/{lib → src/lib}/streaming/binary-reader.d.ts +1 -1
- package/dist/src/lib/streaming/binary-reader.d.ts.map +1 -0
- package/dist/src/lib/streaming/zip-batch-iterators.d.ts +11 -0
- package/dist/src/lib/streaming/zip-batch-iterators.d.ts.map +1 -0
- package/dist/src/shapefile-loader.d.ts +7 -0
- package/dist/src/shapefile-loader.d.ts.map +1 -0
- package/dist/src/shp-loader.d.ts.map +1 -0
- package/dist/{workers → src/workers}/dbf-worker.d.ts.map +1 -1
- package/dist/{workers → src/workers}/shp-worker.d.ts.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/package.json +5 -6
- package/src/dbf-loader.ts +3 -1
- package/src/index.ts +5 -0
- package/src/lib/parsers/parse-dbf.ts +1 -1
- package/src/lib/parsers/parse-shapefile.ts +58 -40
- package/src/lib/parsers/parse-shp.ts +2 -2
- package/src/lib/streaming/binary-chunk-reader.ts +4 -3
- package/src/lib/streaming/binary-reader.ts +3 -1
- package/src/lib/streaming/zip-batch-iterators.ts +28 -14
- package/src/shapefile-loader.ts +2 -4
- package/src/shp-loader.ts +4 -1
- package/dist/bundle.d.ts.map +0 -1
- package/dist/bundle.js +0 -5
- package/dist/dbf-loader.d.ts.map +0 -1
- package/dist/dbf-loader.js +0 -32
- package/dist/index.d.ts +0 -4
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js +0 -11
- package/dist/lib/parsers/parse-dbf.d.ts.map +0 -1
- package/dist/lib/parsers/parse-dbf.js +0 -344
- package/dist/lib/parsers/parse-shapefile.d.ts.map +0 -1
- package/dist/lib/parsers/parse-shapefile.js +0 -244
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +0 -1
- package/dist/lib/parsers/parse-shp-geometry.js +0 -287
- package/dist/lib/parsers/parse-shp-header.d.ts.map +0 -1
- package/dist/lib/parsers/parse-shp-header.js +0 -43
- package/dist/lib/parsers/parse-shp.d.ts.map +0 -1
- package/dist/lib/parsers/parse-shp.js +0 -178
- package/dist/lib/parsers/parse-shx.d.ts.map +0 -1
- package/dist/lib/parsers/parse-shx.js +0 -28
- package/dist/lib/parsers/types.d.ts.map +0 -1
- package/dist/lib/parsers/types.js +0 -2
- package/dist/lib/streaming/binary-chunk-reader.d.ts.map +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js +0 -161
- package/dist/lib/streaming/binary-reader.d.ts.map +0 -1
- package/dist/lib/streaming/binary-reader.js +0 -52
- package/dist/lib/streaming/zip-batch-iterators.d.ts +0 -8
- package/dist/lib/streaming/zip-batch-iterators.d.ts.map +0 -1
- package/dist/lib/streaming/zip-batch-iterators.js +0 -61
- package/dist/shapefile-loader.d.ts +0 -26
- package/dist/shapefile-loader.d.ts.map +0 -1
- package/dist/shapefile-loader.js +0 -31
- package/dist/shp-loader.d.ts.map +0 -1
- package/dist/shp-loader.js +0 -35
- package/dist/workers/dbf-worker.js +0 -5
- package/dist/workers/shp-worker.js +0 -5
- /package/dist/{bundle.d.ts → src/bundle.d.ts} +0 -0
- /package/dist/{dbf-loader.d.ts → src/dbf-loader.d.ts} +0 -0
- /package/dist/{lib → src/lib}/parsers/parse-dbf.d.ts +0 -0
- /package/dist/{lib → src/lib}/parsers/parse-shp-geometry.d.ts +0 -0
- /package/dist/{lib → src/lib}/parsers/parse-shp-header.d.ts +0 -0
- /package/dist/{lib → src/lib}/parsers/parse-shx.d.ts +0 -0
- /package/dist/{lib → src/lib}/parsers/types.d.ts +0 -0
- /package/dist/{shp-loader.d.ts → src/shp-loader.d.ts} +0 -0
- /package/dist/{workers → src/workers}/dbf-worker.d.ts +0 -0
- /package/dist/{workers → src/workers}/shp-worker.d.ts +0 -0
package/dist/index.js
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.SHPWorkerLoader = exports.SHPLoader = exports.DBFWorkerLoader = exports.DBFLoader = exports.ShapefileLoader = void 0;
|
|
4
|
-
var shapefile_loader_1 = require("./shapefile-loader");
|
|
5
|
-
Object.defineProperty(exports, "ShapefileLoader", { enumerable: true, get: function () { return shapefile_loader_1.ShapefileLoader; } });
|
|
6
|
-
var dbf_loader_1 = require("./dbf-loader");
|
|
7
|
-
Object.defineProperty(exports, "DBFLoader", { enumerable: true, get: function () { return dbf_loader_1.DBFLoader; } });
|
|
8
|
-
Object.defineProperty(exports, "DBFWorkerLoader", { enumerable: true, get: function () { return dbf_loader_1.DBFWorkerLoader; } });
|
|
9
|
-
var shp_loader_1 = require("./shp-loader");
|
|
10
|
-
Object.defineProperty(exports, "SHPLoader", { enumerable: true, get: function () { return shp_loader_1.SHPLoader; } });
|
|
11
|
-
Object.defineProperty(exports, "SHPWorkerLoader", { enumerable: true, get: function () { return shp_loader_1.SHPWorkerLoader; } });
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parse-dbf.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-dbf.ts"],"names":[],"mappings":"AAAA,OAAO,EAAQ,cAAc,EAAC,MAAM,oBAAoB,CAAC;AAEzD,OAAO,EACL,gBAAgB,EAEhB,cAAc,EACd,SAAS,EACT,aAAa,EAEd,MAAM,SAAS,CAAC;AAkDjB;;;;GAIG;AACH,wBAAgB,QAAQ,CACtB,WAAW,EAAE,WAAW,EACxB,OAAO,GAAE,gBAAqB,GAC7B,aAAa,GAAG,cAAc,GAAG,cAAc,CAwBjD;AACD;;;GAGG;AACH,wBAAuB,iBAAiB,CACtC,aAAa,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACjE,OAAO,GAAE,gBAAqB,GAC7B,aAAa,CAAC,SAAS,GAAG,aAAa,GAAG,cAAc,CAAC,CAqB3D"}
|
|
@@ -1,344 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.parseDBFInBatches = exports.parseDBF = void 0;
|
|
7
|
-
const binary_chunk_reader_1 = __importDefault(require("../streaming/binary-chunk-reader"));
|
|
8
|
-
const LITTLE_ENDIAN = true;
|
|
9
|
-
const DBF_HEADER_SIZE = 32;
|
|
10
|
-
var STATE;
|
|
11
|
-
(function (STATE) {
|
|
12
|
-
STATE[STATE["START"] = 0] = "START";
|
|
13
|
-
STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
|
|
14
|
-
STATE[STATE["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
|
|
15
|
-
STATE[STATE["END"] = 3] = "END";
|
|
16
|
-
STATE[STATE["ERROR"] = 4] = "ERROR";
|
|
17
|
-
})(STATE || (STATE = {}));
|
|
18
|
-
class DBFParser {
|
|
19
|
-
constructor(options) {
|
|
20
|
-
this.binaryReader = new binary_chunk_reader_1.default();
|
|
21
|
-
this.state = STATE.START;
|
|
22
|
-
this.result = {
|
|
23
|
-
data: []
|
|
24
|
-
};
|
|
25
|
-
this.textDecoder = new TextDecoder(options.encoding);
|
|
26
|
-
}
|
|
27
|
-
/**
|
|
28
|
-
* @param arrayBuffer
|
|
29
|
-
*/
|
|
30
|
-
write(arrayBuffer) {
|
|
31
|
-
this.binaryReader.write(arrayBuffer);
|
|
32
|
-
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
33
|
-
// this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
|
|
34
|
-
// important events:
|
|
35
|
-
// - schema available
|
|
36
|
-
// - first rows available
|
|
37
|
-
// - all rows available
|
|
38
|
-
}
|
|
39
|
-
end() {
|
|
40
|
-
this.binaryReader.end();
|
|
41
|
-
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
42
|
-
// this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
|
|
43
|
-
if (this.state !== STATE.END) {
|
|
44
|
-
this.state = STATE.ERROR;
|
|
45
|
-
this.result.error = 'DBF incomplete file';
|
|
46
|
-
}
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
/**
|
|
50
|
-
* @param arrayBuffer
|
|
51
|
-
* @param options
|
|
52
|
-
* @returns DBFTable or rows
|
|
53
|
-
*/
|
|
54
|
-
function parseDBF(arrayBuffer, options = {}) {
|
|
55
|
-
const { encoding = 'latin1' } = options.dbf || {};
|
|
56
|
-
const dbfParser = new DBFParser({ encoding });
|
|
57
|
-
dbfParser.write(arrayBuffer);
|
|
58
|
-
dbfParser.end();
|
|
59
|
-
const { data, schema } = dbfParser.result;
|
|
60
|
-
const shape = options?.tables?.format || options?.dbf?.shape;
|
|
61
|
-
switch (shape) {
|
|
62
|
-
case 'object-row-table': {
|
|
63
|
-
const table = {
|
|
64
|
-
shape: 'object-row-table',
|
|
65
|
-
schema,
|
|
66
|
-
data
|
|
67
|
-
};
|
|
68
|
-
return table;
|
|
69
|
-
}
|
|
70
|
-
case 'table':
|
|
71
|
-
return { schema, rows: data };
|
|
72
|
-
case 'rows':
|
|
73
|
-
default:
|
|
74
|
-
return data;
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
exports.parseDBF = parseDBF;
|
|
78
|
-
/**
|
|
79
|
-
* @param asyncIterator
|
|
80
|
-
* @param options
|
|
81
|
-
*/
|
|
82
|
-
async function* parseDBFInBatches(asyncIterator, options = {}) {
|
|
83
|
-
const { encoding = 'latin1' } = options.dbf || {};
|
|
84
|
-
const parser = new DBFParser({ encoding });
|
|
85
|
-
let headerReturned = false;
|
|
86
|
-
for await (const arrayBuffer of asyncIterator) {
|
|
87
|
-
parser.write(arrayBuffer);
|
|
88
|
-
if (!headerReturned && parser.result.dbfHeader) {
|
|
89
|
-
headerReturned = true;
|
|
90
|
-
yield parser.result.dbfHeader;
|
|
91
|
-
}
|
|
92
|
-
if (parser.result.data.length > 0) {
|
|
93
|
-
yield parser.result.data;
|
|
94
|
-
parser.result.data = [];
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
parser.end();
|
|
98
|
-
if (parser.result.data.length > 0) {
|
|
99
|
-
yield parser.result.data;
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
exports.parseDBFInBatches = parseDBFInBatches;
|
|
103
|
-
/**
|
|
104
|
-
* https://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm
|
|
105
|
-
* @param state
|
|
106
|
-
* @param result
|
|
107
|
-
* @param binaryReader
|
|
108
|
-
* @param textDecoder
|
|
109
|
-
* @returns
|
|
110
|
-
*/
|
|
111
|
-
/* eslint-disable complexity, max-depth */
|
|
112
|
-
function parseState(state, result, binaryReader, textDecoder) {
|
|
113
|
-
// eslint-disable-next-line no-constant-condition
|
|
114
|
-
while (true) {
|
|
115
|
-
try {
|
|
116
|
-
switch (state) {
|
|
117
|
-
case STATE.ERROR:
|
|
118
|
-
case STATE.END:
|
|
119
|
-
return state;
|
|
120
|
-
case STATE.START:
|
|
121
|
-
// Parse initial file header
|
|
122
|
-
// DBF Header
|
|
123
|
-
const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
|
|
124
|
-
if (!dataView) {
|
|
125
|
-
return state;
|
|
126
|
-
}
|
|
127
|
-
result.dbfHeader = parseDBFHeader(dataView);
|
|
128
|
-
result.progress = {
|
|
129
|
-
bytesUsed: 0,
|
|
130
|
-
rowsTotal: result.dbfHeader.nRecords,
|
|
131
|
-
rows: 0
|
|
132
|
-
};
|
|
133
|
-
state = STATE.FIELD_DESCRIPTORS;
|
|
134
|
-
break;
|
|
135
|
-
case STATE.FIELD_DESCRIPTORS:
|
|
136
|
-
// Parse DBF field descriptors (schema)
|
|
137
|
-
const fieldDescriptorView = binaryReader.getDataView(
|
|
138
|
-
// @ts-ignore
|
|
139
|
-
result.dbfHeader.headerLength - DBF_HEADER_SIZE);
|
|
140
|
-
if (!fieldDescriptorView) {
|
|
141
|
-
return state;
|
|
142
|
-
}
|
|
143
|
-
result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
|
|
144
|
-
result.schema = {
|
|
145
|
-
fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
|
|
146
|
-
metadata: {}
|
|
147
|
-
};
|
|
148
|
-
state = STATE.FIELD_PROPERTIES;
|
|
149
|
-
// TODO(kyle) Not exactly sure why start offset needs to be headerLength + 1?
|
|
150
|
-
// parsedbf uses ((fields.length + 1) << 5) + 2;
|
|
151
|
-
binaryReader.skip(1);
|
|
152
|
-
break;
|
|
153
|
-
case STATE.FIELD_PROPERTIES:
|
|
154
|
-
const { recordLength = 0, nRecords = 0 } = result?.dbfHeader || {};
|
|
155
|
-
while (result.data.length < nRecords) {
|
|
156
|
-
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
157
|
-
if (!recordView) {
|
|
158
|
-
return state;
|
|
159
|
-
}
|
|
160
|
-
// Note: Avoid actually reading the last byte, which may not be present
|
|
161
|
-
binaryReader.skip(1);
|
|
162
|
-
// @ts-ignore
|
|
163
|
-
const row = parseRow(recordView, result.dbfFields, textDecoder);
|
|
164
|
-
result.data.push(row);
|
|
165
|
-
// @ts-ignore
|
|
166
|
-
result.progress.rows = result.data.length;
|
|
167
|
-
}
|
|
168
|
-
state = STATE.END;
|
|
169
|
-
break;
|
|
170
|
-
default:
|
|
171
|
-
state = STATE.ERROR;
|
|
172
|
-
result.error = `illegal parser state ${state}`;
|
|
173
|
-
return state;
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
catch (error) {
|
|
177
|
-
state = STATE.ERROR;
|
|
178
|
-
result.error = `DBF parsing failed: ${error.message}`;
|
|
179
|
-
return state;
|
|
180
|
-
}
|
|
181
|
-
}
|
|
182
|
-
}
|
|
183
|
-
/**
|
|
184
|
-
* @param headerView
|
|
185
|
-
*/
|
|
186
|
-
function parseDBFHeader(headerView) {
|
|
187
|
-
return {
|
|
188
|
-
// Last updated date
|
|
189
|
-
year: headerView.getUint8(1) + 1900,
|
|
190
|
-
month: headerView.getUint8(2),
|
|
191
|
-
day: headerView.getUint8(3),
|
|
192
|
-
// Number of records in data file
|
|
193
|
-
nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
|
|
194
|
-
// Length of header in bytes
|
|
195
|
-
headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
|
|
196
|
-
// Length of each record
|
|
197
|
-
recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
|
|
198
|
-
// Not sure if this is usually set
|
|
199
|
-
languageDriver: headerView.getUint8(29)
|
|
200
|
-
};
|
|
201
|
-
}
|
|
202
|
-
/**
|
|
203
|
-
* @param view
|
|
204
|
-
*/
|
|
205
|
-
function parseFieldDescriptors(view, textDecoder) {
|
|
206
|
-
// NOTE: this might overestimate the number of fields if the "Database
|
|
207
|
-
// Container" container exists and is included in the headerLength
|
|
208
|
-
const nFields = (view.byteLength - 1) / 32;
|
|
209
|
-
const fields = [];
|
|
210
|
-
let offset = 0;
|
|
211
|
-
for (let i = 0; i < nFields; i++) {
|
|
212
|
-
const name = textDecoder
|
|
213
|
-
.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11))
|
|
214
|
-
// eslint-disable-next-line no-control-regex
|
|
215
|
-
.replace(/\u0000/g, '');
|
|
216
|
-
fields.push({
|
|
217
|
-
name,
|
|
218
|
-
dataType: String.fromCharCode(view.getUint8(offset + 11)),
|
|
219
|
-
fieldLength: view.getUint8(offset + 16),
|
|
220
|
-
decimal: view.getUint8(offset + 17)
|
|
221
|
-
});
|
|
222
|
-
offset += 32;
|
|
223
|
-
}
|
|
224
|
-
return fields;
|
|
225
|
-
}
|
|
226
|
-
/*
|
|
227
|
-
* @param {BinaryChunkReader} binaryReader
|
|
228
|
-
function parseRows(binaryReader, fields, nRecords, recordLength, textDecoder) {
|
|
229
|
-
const rows = [];
|
|
230
|
-
for (let i = 0; i < nRecords; i++) {
|
|
231
|
-
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
232
|
-
binaryReader.skip(1);
|
|
233
|
-
// @ts-ignore
|
|
234
|
-
rows.push(parseRow(recordView, fields, textDecoder));
|
|
235
|
-
}
|
|
236
|
-
return rows;
|
|
237
|
-
}
|
|
238
|
-
*/
|
|
239
|
-
/**
|
|
240
|
-
*
|
|
241
|
-
* @param view
|
|
242
|
-
* @param fields
|
|
243
|
-
* @param textDecoder
|
|
244
|
-
* @returns
|
|
245
|
-
*/
|
|
246
|
-
function parseRow(view, fields, textDecoder) {
|
|
247
|
-
const out = {};
|
|
248
|
-
let offset = 0;
|
|
249
|
-
for (const field of fields) {
|
|
250
|
-
const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
|
|
251
|
-
out[field.name] = parseField(text, field.dataType);
|
|
252
|
-
offset += field.fieldLength;
|
|
253
|
-
}
|
|
254
|
-
return out;
|
|
255
|
-
}
|
|
256
|
-
/**
|
|
257
|
-
* Should NaN be coerced to null?
|
|
258
|
-
* @param text
|
|
259
|
-
* @param dataType
|
|
260
|
-
* @returns Field depends on a type of the data
|
|
261
|
-
*/
|
|
262
|
-
function parseField(text, dataType) {
|
|
263
|
-
switch (dataType) {
|
|
264
|
-
case 'B':
|
|
265
|
-
return parseNumber(text);
|
|
266
|
-
case 'C':
|
|
267
|
-
return parseCharacter(text);
|
|
268
|
-
case 'F':
|
|
269
|
-
return parseNumber(text);
|
|
270
|
-
case 'N':
|
|
271
|
-
return parseNumber(text);
|
|
272
|
-
case 'O':
|
|
273
|
-
return parseNumber(text);
|
|
274
|
-
case 'D':
|
|
275
|
-
return parseDate(text);
|
|
276
|
-
case 'L':
|
|
277
|
-
return parseBoolean(text);
|
|
278
|
-
default:
|
|
279
|
-
throw new Error('Unsupported data type');
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
/**
|
|
283
|
-
* Parse YYYYMMDD to date in milliseconds
|
|
284
|
-
* @param str YYYYMMDD
|
|
285
|
-
* @returns new Date as a number
|
|
286
|
-
*/
|
|
287
|
-
function parseDate(str) {
|
|
288
|
-
return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
|
|
289
|
-
}
|
|
290
|
-
/**
|
|
291
|
-
* Read boolean value
|
|
292
|
-
* any of Y, y, T, t coerce to true
|
|
293
|
-
* any of N, n, F, f coerce to false
|
|
294
|
-
* otherwise null
|
|
295
|
-
* @param value
|
|
296
|
-
* @returns boolean | null
|
|
297
|
-
*/
|
|
298
|
-
function parseBoolean(value) {
|
|
299
|
-
return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
|
|
300
|
-
}
|
|
301
|
-
/**
|
|
302
|
-
* Return null instead of NaN
|
|
303
|
-
* @param text
|
|
304
|
-
* @returns number | null
|
|
305
|
-
*/
|
|
306
|
-
function parseNumber(text) {
|
|
307
|
-
const number = parseFloat(text);
|
|
308
|
-
return isNaN(number) ? null : number;
|
|
309
|
-
}
|
|
310
|
-
/**
|
|
311
|
-
*
|
|
312
|
-
* @param text
|
|
313
|
-
* @returns string | null
|
|
314
|
-
*/
|
|
315
|
-
function parseCharacter(text) {
|
|
316
|
-
return text.trim() || null;
|
|
317
|
-
}
|
|
318
|
-
/**
|
|
319
|
-
* Create a standard Arrow-style `Field` from field descriptor.
|
|
320
|
-
* TODO - use `fieldLength` and `decimal` to generate smaller types?
|
|
321
|
-
* @param param0
|
|
322
|
-
* @returns Field
|
|
323
|
-
*/
|
|
324
|
-
// eslint-disable
|
|
325
|
-
function makeField({ name, dataType, fieldLength, decimal }) {
|
|
326
|
-
switch (dataType) {
|
|
327
|
-
case 'B':
|
|
328
|
-
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
329
|
-
case 'C':
|
|
330
|
-
return { name, type: 'utf8', nullable: true, metadata: {} };
|
|
331
|
-
case 'F':
|
|
332
|
-
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
333
|
-
case 'N':
|
|
334
|
-
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
335
|
-
case 'O':
|
|
336
|
-
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
337
|
-
case 'D':
|
|
338
|
-
return { name, type: 'timestamp-millisecond', nullable: true, metadata: {} };
|
|
339
|
-
case 'L':
|
|
340
|
-
return { name, type: 'bool', nullable: true, metadata: {} };
|
|
341
|
-
default:
|
|
342
|
-
throw new Error('Unsupported data type');
|
|
343
|
-
}
|
|
344
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parse-shapefile.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-shapefile.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,aAAa,CAAC;AAC3C,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,oBAAoB,CAAC;AAClD,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAC5D,OAAO,KAAK,EAAC,sBAAsB,EAAC,MAAM,SAAS,CAAC;AAUpD,UAAU,eAAe;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,MAAM,EAAE,SAAS,CAAC;IAClB,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB;AACD;;GAEG;AAEH,wBAAuB,uBAAuB,CAC5C,aAAa,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACjE,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,aAAa,CAAC,eAAe,CAAC,CAmEhC;AAED;;;;;;;GAOG;AACH,wBAAsB,cAAc,CAClC,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,eAAe,CAAC,CAgC1B;AAwDD;;;;;GAKG;AAEH,wBAAsB,yBAAyB,CAC7C,OAAO,CAAC,EAAE,MAAM,EAChB,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC;IACT,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;CACd,CAAC,CAkCD;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,MAAM,CAQ1E"}
|
|
@@ -1,244 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.replaceExtension = exports.loadShapefileSidecarFiles = exports.parseShapefile = exports.parseShapefileInBatches = void 0;
|
|
4
|
-
const gis_1 = require("@loaders.gl/gis");
|
|
5
|
-
const proj4_1 = require("@math.gl/proj4");
|
|
6
|
-
const parse_shx_1 = require("./parse-shx");
|
|
7
|
-
const zip_batch_iterators_1 = require("../streaming/zip-batch-iterators");
|
|
8
|
-
const shp_loader_1 = require("../../shp-loader");
|
|
9
|
-
const dbf_loader_1 = require("../../dbf-loader");
|
|
10
|
-
/**
|
|
11
|
-
* Parsing of file in batches
|
|
12
|
-
*/
|
|
13
|
-
// eslint-disable-next-line max-statements, complexity
|
|
14
|
-
async function* parseShapefileInBatches(asyncIterator, options, context) {
|
|
15
|
-
const { reproject = false, _targetCrs = 'WGS84' } = options?.gis || {};
|
|
16
|
-
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
17
|
-
// parse geometries
|
|
18
|
-
// @ts-ignore context must be defined
|
|
19
|
-
const shapeIterable = await context.parseInBatches(asyncIterator, shp_loader_1.SHPLoader, options);
|
|
20
|
-
// parse properties
|
|
21
|
-
let propertyIterable;
|
|
22
|
-
// @ts-ignore context must be defined
|
|
23
|
-
const dbfResponse = await context.fetch(replaceExtension(context?.url || '', 'dbf'));
|
|
24
|
-
if (dbfResponse.ok) {
|
|
25
|
-
// @ts-ignore context must be defined
|
|
26
|
-
propertyIterable = await context.parseInBatches(dbfResponse, dbf_loader_1.DBFLoader, {
|
|
27
|
-
...options,
|
|
28
|
-
dbf: { encoding: cpg || 'latin1' }
|
|
29
|
-
});
|
|
30
|
-
}
|
|
31
|
-
// When `options.metadata` is `true`, there's an extra initial `metadata`
|
|
32
|
-
// object before the iterator starts. zipBatchIterators expects to receive
|
|
33
|
-
// batches of Array objects, and will fail with non-iterable batches, so it's
|
|
34
|
-
// important to skip over the first batch.
|
|
35
|
-
let shapeHeader = (await shapeIterable.next()).value;
|
|
36
|
-
if (shapeHeader && shapeHeader.batchType === 'metadata') {
|
|
37
|
-
shapeHeader = (await shapeIterable.next()).value;
|
|
38
|
-
}
|
|
39
|
-
let dbfHeader = {};
|
|
40
|
-
if (propertyIterable) {
|
|
41
|
-
dbfHeader = (await propertyIterable.next()).value;
|
|
42
|
-
if (dbfHeader && dbfHeader.batchType === 'metadata') {
|
|
43
|
-
dbfHeader = (await propertyIterable.next()).value;
|
|
44
|
-
}
|
|
45
|
-
}
|
|
46
|
-
let iterator;
|
|
47
|
-
if (propertyIterable) {
|
|
48
|
-
iterator = (0, zip_batch_iterators_1.zipBatchIterators)(shapeIterable, propertyIterable);
|
|
49
|
-
}
|
|
50
|
-
else {
|
|
51
|
-
iterator = shapeIterable;
|
|
52
|
-
}
|
|
53
|
-
for await (const item of iterator) {
|
|
54
|
-
let geometries;
|
|
55
|
-
let properties;
|
|
56
|
-
if (!propertyIterable) {
|
|
57
|
-
geometries = item;
|
|
58
|
-
}
|
|
59
|
-
else {
|
|
60
|
-
[geometries, properties] = item;
|
|
61
|
-
}
|
|
62
|
-
const geojsonGeometries = parseGeometries(geometries);
|
|
63
|
-
let features = joinProperties(geojsonGeometries, properties);
|
|
64
|
-
if (reproject) {
|
|
65
|
-
// @ts-ignore
|
|
66
|
-
features = reprojectFeatures(features, prj, _targetCrs);
|
|
67
|
-
}
|
|
68
|
-
yield {
|
|
69
|
-
encoding: cpg,
|
|
70
|
-
prj,
|
|
71
|
-
shx,
|
|
72
|
-
header: shapeHeader,
|
|
73
|
-
data: features
|
|
74
|
-
};
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
exports.parseShapefileInBatches = parseShapefileInBatches;
|
|
78
|
-
/**
|
|
79
|
-
* Parse shapefile
|
|
80
|
-
*
|
|
81
|
-
* @param arrayBuffer
|
|
82
|
-
* @param options
|
|
83
|
-
* @param context
|
|
84
|
-
* @returns output of shapefile
|
|
85
|
-
*/
|
|
86
|
-
async function parseShapefile(arrayBuffer, options, context) {
|
|
87
|
-
const { reproject = false, _targetCrs = 'WGS84' } = options?.gis || {};
|
|
88
|
-
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
89
|
-
// parse geometries
|
|
90
|
-
// @ts-ignore context must be defined
|
|
91
|
-
const { header, geometries } = await context.parse(arrayBuffer, shp_loader_1.SHPLoader, options); // {shp: shx}
|
|
92
|
-
const geojsonGeometries = parseGeometries(geometries);
|
|
93
|
-
// parse properties
|
|
94
|
-
let properties = [];
|
|
95
|
-
// @ts-ignore context must be defined
|
|
96
|
-
const dbfResponse = await context.fetch(replaceExtension(context.url, 'dbf'));
|
|
97
|
-
if (dbfResponse.ok) {
|
|
98
|
-
// @ts-ignore context must be defined
|
|
99
|
-
properties = await context.parse(dbfResponse, dbf_loader_1.DBFLoader, { dbf: { encoding: cpg || 'latin1' } });
|
|
100
|
-
}
|
|
101
|
-
let features = joinProperties(geojsonGeometries, properties);
|
|
102
|
-
if (reproject) {
|
|
103
|
-
features = reprojectFeatures(features, prj, _targetCrs);
|
|
104
|
-
}
|
|
105
|
-
return {
|
|
106
|
-
encoding: cpg,
|
|
107
|
-
prj,
|
|
108
|
-
shx,
|
|
109
|
-
header,
|
|
110
|
-
data: features
|
|
111
|
-
};
|
|
112
|
-
}
|
|
113
|
-
exports.parseShapefile = parseShapefile;
|
|
114
|
-
/**
|
|
115
|
-
* Parse geometries
|
|
116
|
-
*
|
|
117
|
-
* @param geometries
|
|
118
|
-
* @returns geometries as an array
|
|
119
|
-
*/
|
|
120
|
-
function parseGeometries(geometries) {
|
|
121
|
-
const geojsonGeometries = [];
|
|
122
|
-
for (const geom of geometries) {
|
|
123
|
-
geojsonGeometries.push((0, gis_1.binaryToGeometry)(geom));
|
|
124
|
-
}
|
|
125
|
-
return geojsonGeometries;
|
|
126
|
-
}
|
|
127
|
-
/**
|
|
128
|
-
* Join properties and geometries into features
|
|
129
|
-
*
|
|
130
|
-
* @param geometries [description]
|
|
131
|
-
* @param properties [description]
|
|
132
|
-
* @return [description]
|
|
133
|
-
*/
|
|
134
|
-
function joinProperties(geometries, properties) {
|
|
135
|
-
const features = [];
|
|
136
|
-
for (let i = 0; i < geometries.length; i++) {
|
|
137
|
-
const geometry = geometries[i];
|
|
138
|
-
const feature = {
|
|
139
|
-
type: 'Feature',
|
|
140
|
-
geometry,
|
|
141
|
-
// properties can be undefined if dbfResponse above was empty
|
|
142
|
-
properties: (properties && properties[i]) || {}
|
|
143
|
-
};
|
|
144
|
-
features.push(feature);
|
|
145
|
-
}
|
|
146
|
-
return features;
|
|
147
|
-
}
|
|
148
|
-
/**
|
|
149
|
-
* Reproject GeoJSON features to output CRS
|
|
150
|
-
*
|
|
151
|
-
* @param features parsed GeoJSON features
|
|
152
|
-
* @param sourceCrs source coordinate reference system
|
|
153
|
-
* @param targetCrs †arget coordinate reference system
|
|
154
|
-
* @return Reprojected Features
|
|
155
|
-
*/
|
|
156
|
-
function reprojectFeatures(features, sourceCrs, targetCrs) {
|
|
157
|
-
if (!sourceCrs && !targetCrs) {
|
|
158
|
-
return features;
|
|
159
|
-
}
|
|
160
|
-
const projection = new proj4_1.Proj4Projection({ from: sourceCrs || 'WGS84', to: targetCrs || 'WGS84' });
|
|
161
|
-
return (0, gis_1.transformGeoJsonCoords)(features, (coord) => projection.project(coord));
|
|
162
|
-
}
|
|
163
|
-
/**
|
|
164
|
-
*
|
|
165
|
-
* @param options
|
|
166
|
-
* @param context
|
|
167
|
-
* @returns Promise
|
|
168
|
-
*/
|
|
169
|
-
// eslint-disable-next-line max-statements
|
|
170
|
-
async function loadShapefileSidecarFiles(options, context) {
|
|
171
|
-
// Attempt a parallel load of the small sidecar files
|
|
172
|
-
// @ts-ignore context must be defined
|
|
173
|
-
const { url, fetch } = context;
|
|
174
|
-
const shxPromise = fetch(replaceExtension(url, 'shx'));
|
|
175
|
-
const cpgPromise = fetch(replaceExtension(url, 'cpg'));
|
|
176
|
-
const prjPromise = fetch(replaceExtension(url, 'prj'));
|
|
177
|
-
await Promise.all([shxPromise, cpgPromise, prjPromise]);
|
|
178
|
-
let shx;
|
|
179
|
-
let cpg;
|
|
180
|
-
let prj;
|
|
181
|
-
const shxResponse = await shxPromise;
|
|
182
|
-
if (shxResponse.ok) {
|
|
183
|
-
const arrayBuffer = await shxResponse.arrayBuffer();
|
|
184
|
-
shx = (0, parse_shx_1.parseShx)(arrayBuffer);
|
|
185
|
-
}
|
|
186
|
-
const cpgResponse = await cpgPromise;
|
|
187
|
-
if (cpgResponse.ok) {
|
|
188
|
-
cpg = await cpgResponse.text();
|
|
189
|
-
}
|
|
190
|
-
const prjResponse = await prjPromise;
|
|
191
|
-
if (prjResponse.ok) {
|
|
192
|
-
prj = await prjResponse.text();
|
|
193
|
-
}
|
|
194
|
-
return {
|
|
195
|
-
shx,
|
|
196
|
-
cpg,
|
|
197
|
-
prj
|
|
198
|
-
};
|
|
199
|
-
}
|
|
200
|
-
exports.loadShapefileSidecarFiles = loadShapefileSidecarFiles;
|
|
201
|
-
/**
|
|
202
|
-
* Replace the extension at the end of a path.
|
|
203
|
-
*
|
|
204
|
-
* Matches the case of new extension with the case of the original file extension,
|
|
205
|
-
* to increase the chance of finding files without firing off a request storm looking for various case combinations
|
|
206
|
-
*
|
|
207
|
-
* NOTE: Extensions can be both lower and uppercase
|
|
208
|
-
* per spec, extensions should be lower case, but that doesn't mean they always are. See:
|
|
209
|
-
* calvinmetcalf/shapefile-js#64, mapserver/mapserver#4712
|
|
210
|
-
* https://trac.osgeo.org/mapserver/ticket/166
|
|
211
|
-
*/
|
|
212
|
-
function replaceExtension(url, newExtension) {
|
|
213
|
-
const baseName = basename(url);
|
|
214
|
-
const extension = extname(url);
|
|
215
|
-
const isUpperCase = extension === extension.toUpperCase();
|
|
216
|
-
if (isUpperCase) {
|
|
217
|
-
newExtension = newExtension.toUpperCase();
|
|
218
|
-
}
|
|
219
|
-
return `${baseName}.${newExtension}`;
|
|
220
|
-
}
|
|
221
|
-
exports.replaceExtension = replaceExtension;
|
|
222
|
-
// NOTE - this gives the entire path minus extension (i.e. NOT same as path.basename)
|
|
223
|
-
/**
|
|
224
|
-
* @param url
|
|
225
|
-
* @returns string
|
|
226
|
-
*/
|
|
227
|
-
function basename(url) {
|
|
228
|
-
const extIndex = url && url.lastIndexOf('.');
|
|
229
|
-
if (typeof extIndex === 'number') {
|
|
230
|
-
return extIndex >= 0 ? url.substr(0, extIndex) : '';
|
|
231
|
-
}
|
|
232
|
-
return extIndex;
|
|
233
|
-
}
|
|
234
|
-
/**
|
|
235
|
-
* @param url
|
|
236
|
-
* @returns string
|
|
237
|
-
*/
|
|
238
|
-
function extname(url) {
|
|
239
|
-
const extIndex = url && url.lastIndexOf('.');
|
|
240
|
-
if (typeof extIndex === 'number') {
|
|
241
|
-
return extIndex >= 0 ? url.substr(extIndex + 1) : '';
|
|
242
|
-
}
|
|
243
|
-
return extIndex;
|
|
244
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parse-shp-geometry.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-shp-geometry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,cAAc,EAAqB,MAAM,oBAAoB,CAAC;AACtE,OAAO,EAAC,gBAAgB,EAAC,MAAM,SAAS,CAAC;AAIzC;;;;;GAKG;AAEH,wBAAgB,WAAW,CAAC,IAAI,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,gBAAgB,GAAG,cAAc,GAAG,IAAI,CAoD7F"}
|