@loaders.gl/shapefile 4.0.0-alpha.5 → 4.0.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.js +2 -2
- package/dist/dbf-loader.js +29 -20
- package/dist/dbf-worker.js +73 -447
- package/dist/dist.min.js +130 -489
- package/dist/es5/bundle.js +6 -0
- package/dist/es5/bundle.js.map +1 -0
- package/dist/es5/dbf-loader.js +53 -0
- package/dist/es5/dbf-loader.js.map +1 -0
- package/dist/es5/index.js +39 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/lib/parsers/parse-dbf.js +394 -0
- package/dist/es5/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shapefile.js +373 -0
- package/dist/es5/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp-geometry.js +220 -0
- package/dist/es5/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp-header.js +35 -0
- package/dist/es5/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shp.js +227 -0
- package/dist/es5/lib/parsers/parse-shp.js.map +1 -0
- package/dist/es5/lib/parsers/parse-shx.js +26 -0
- package/dist/es5/lib/parsers/parse-shx.js.map +1 -0
- package/dist/es5/lib/parsers/types.js +2 -0
- package/dist/es5/lib/parsers/types.js.map +1 -0
- package/dist/es5/lib/streaming/binary-chunk-reader.js +178 -0
- package/dist/es5/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/es5/lib/streaming/binary-reader.js +48 -0
- package/dist/es5/lib/streaming/binary-reader.js.map +1 -0
- package/dist/es5/lib/streaming/zip-batch-iterators.js +91 -0
- package/dist/es5/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/es5/shapefile-loader.js +31 -0
- package/dist/es5/shapefile-loader.js.map +1 -0
- package/dist/es5/shp-loader.js +56 -0
- package/dist/es5/shp-loader.js.map +1 -0
- package/dist/es5/workers/dbf-worker.js +6 -0
- package/dist/es5/workers/dbf-worker.js.map +1 -0
- package/dist/es5/workers/shp-worker.js +6 -0
- package/dist/es5/workers/shp-worker.js.map +1 -0
- package/dist/esm/bundle.js +4 -0
- package/dist/esm/bundle.js.map +1 -0
- package/dist/esm/dbf-loader.js +24 -0
- package/dist/esm/dbf-loader.js.map +1 -0
- package/dist/esm/index.js +4 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/lib/parsers/parse-dbf.js +296 -0
- package/dist/esm/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shapefile.js +187 -0
- package/dist/esm/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp-geometry.js +191 -0
- package/dist/esm/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp-header.js +29 -0
- package/dist/esm/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shp.js +134 -0
- package/dist/esm/lib/parsers/parse-shp.js.map +1 -0
- package/dist/esm/lib/parsers/parse-shx.js +20 -0
- package/dist/esm/lib/parsers/parse-shx.js.map +1 -0
- package/dist/esm/lib/parsers/types.js +2 -0
- package/dist/esm/lib/parsers/types.js.map +1 -0
- package/dist/esm/lib/streaming/binary-chunk-reader.js +106 -0
- package/dist/esm/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/esm/lib/streaming/binary-reader.js +27 -0
- package/dist/esm/lib/streaming/binary-reader.js.map +1 -0
- package/dist/esm/lib/streaming/zip-batch-iterators.js +44 -0
- package/dist/esm/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/esm/shapefile-loader.js +23 -0
- package/dist/esm/shapefile-loader.js.map +1 -0
- package/dist/esm/shp-loader.js +26 -0
- package/dist/esm/shp-loader.js.map +1 -0
- package/dist/esm/workers/dbf-worker.js +4 -0
- package/dist/esm/workers/dbf-worker.js.map +1 -0
- package/dist/esm/workers/shp-worker.js +4 -0
- package/dist/esm/workers/shp-worker.js.map +1 -0
- package/dist/index.js +11 -4
- package/dist/lib/parsers/parse-dbf.d.ts +4 -18
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf.js +309 -264
- package/dist/lib/parsers/parse-shapefile.d.ts +3 -8
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shapefile.js +227 -209
- package/dist/lib/parsers/parse-shp-geometry.d.ts +2 -3
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js +265 -212
- package/dist/lib/parsers/parse-shp-header.js +38 -27
- package/dist/lib/parsers/parse-shp.d.ts +3 -2
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp.js +160 -136
- package/dist/lib/parsers/parse-shx.js +25 -19
- package/dist/lib/parsers/types.d.ts +68 -0
- package/dist/lib/parsers/types.d.ts.map +1 -0
- package/dist/lib/parsers/types.js +2 -0
- package/dist/lib/streaming/binary-chunk-reader.d.ts +5 -3
- package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -1
- package/dist/lib/streaming/binary-chunk-reader.js +152 -128
- package/dist/lib/streaming/binary-reader.js +50 -33
- package/dist/lib/streaming/zip-batch-iterators.js +57 -48
- package/dist/shapefile-loader.js +30 -22
- package/dist/shp-loader.js +32 -22
- package/dist/shp-worker.js +57 -19
- package/dist/workers/dbf-worker.js +5 -4
- package/dist/workers/shp-worker.js +5 -4
- package/package.json +7 -7
- package/src/lib/parsers/parse-dbf.ts +41 -67
- package/src/lib/parsers/parse-shapefile.ts +3 -6
- package/src/lib/parsers/parse-shp-geometry.ts +3 -2
- package/src/lib/parsers/parse-shp.ts +26 -12
- package/src/lib/parsers/types.ts +79 -0
- package/src/lib/streaming/binary-chunk-reader.ts +5 -1
- package/src/lib/streaming/zip-batch-iterators.ts +2 -2
- package/dist/bundle.js.map +0 -1
- package/dist/dbf-loader.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/lib/parsers/parse-shp.js.map +0 -1
- package/dist/lib/parsers/parse-shx.js.map +0 -1
- package/dist/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/lib/streaming/binary-reader.js.map +0 -1
- package/dist/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/shapefile-loader.js.map +0 -1
- package/dist/shp-loader.js.map +0 -1
- package/dist/workers/dbf-worker.js.map +0 -1
- package/dist/workers/shp-worker.js.map +0 -1
|
@@ -1,299 +1,344 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.parseDBFInBatches = exports.parseDBF = void 0;
|
|
7
|
+
const binary_chunk_reader_1 = __importDefault(require("../streaming/binary-chunk-reader"));
|
|
4
8
|
const LITTLE_ENDIAN = true;
|
|
5
9
|
const DBF_HEADER_SIZE = 32;
|
|
6
10
|
var STATE;
|
|
7
|
-
|
|
8
11
|
(function (STATE) {
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
12
|
+
STATE[STATE["START"] = 0] = "START";
|
|
13
|
+
STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
|
|
14
|
+
STATE[STATE["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
|
|
15
|
+
STATE[STATE["END"] = 3] = "END";
|
|
16
|
+
STATE[STATE["ERROR"] = 4] = "ERROR";
|
|
14
17
|
})(STATE || (STATE = {}));
|
|
15
|
-
|
|
16
18
|
class DBFParser {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
19
|
+
constructor(options) {
|
|
20
|
+
this.binaryReader = new binary_chunk_reader_1.default();
|
|
21
|
+
this.state = STATE.START;
|
|
22
|
+
this.result = {
|
|
23
|
+
data: []
|
|
24
|
+
};
|
|
25
|
+
this.textDecoder = new TextDecoder(options.encoding);
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* @param arrayBuffer
|
|
29
|
+
*/
|
|
30
|
+
write(arrayBuffer) {
|
|
31
|
+
this.binaryReader.write(arrayBuffer);
|
|
32
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
33
|
+
// this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
|
|
34
|
+
// important events:
|
|
35
|
+
// - schema available
|
|
36
|
+
// - first rows available
|
|
37
|
+
// - all rows available
|
|
38
|
+
}
|
|
39
|
+
end() {
|
|
40
|
+
this.binaryReader.end();
|
|
41
|
+
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
42
|
+
// this.result.progress.bytesUsed = this.binaryReader.bytesUsed();
|
|
43
|
+
if (this.state !== STATE.END) {
|
|
44
|
+
this.state = STATE.ERROR;
|
|
45
|
+
this.result.error = 'DBF incomplete file';
|
|
46
|
+
}
|
|
43
47
|
}
|
|
44
|
-
}
|
|
45
|
-
|
|
46
48
|
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
encoding
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
49
|
+
/**
|
|
50
|
+
* @param arrayBuffer
|
|
51
|
+
* @param options
|
|
52
|
+
* @returns DBFTable or rows
|
|
53
|
+
*/
|
|
54
|
+
function parseDBF(arrayBuffer, options = {}) {
|
|
55
|
+
const { encoding = 'latin1' } = options.dbf || {};
|
|
56
|
+
const dbfParser = new DBFParser({ encoding });
|
|
57
|
+
dbfParser.write(arrayBuffer);
|
|
58
|
+
dbfParser.end();
|
|
59
|
+
const { data, schema } = dbfParser.result;
|
|
60
|
+
const shape = options?.tables?.format || options?.dbf?.shape;
|
|
61
|
+
switch (shape) {
|
|
62
|
+
case 'object-row-table': {
|
|
63
|
+
const table = {
|
|
64
|
+
shape: 'object-row-table',
|
|
65
|
+
schema,
|
|
66
|
+
data
|
|
67
|
+
};
|
|
68
|
+
return table;
|
|
69
|
+
}
|
|
70
|
+
case 'table':
|
|
71
|
+
return { schema, rows: data };
|
|
72
|
+
case 'rows':
|
|
73
|
+
default:
|
|
74
|
+
return data;
|
|
75
|
+
}
|
|
74
76
|
}
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
encoding
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
77
|
+
exports.parseDBF = parseDBF;
|
|
78
|
+
/**
|
|
79
|
+
* @param asyncIterator
|
|
80
|
+
* @param options
|
|
81
|
+
*/
|
|
82
|
+
async function* parseDBFInBatches(asyncIterator, options = {}) {
|
|
83
|
+
const { encoding = 'latin1' } = options.dbf || {};
|
|
84
|
+
const parser = new DBFParser({ encoding });
|
|
85
|
+
let headerReturned = false;
|
|
86
|
+
for await (const arrayBuffer of asyncIterator) {
|
|
87
|
+
parser.write(arrayBuffer);
|
|
88
|
+
if (!headerReturned && parser.result.dbfHeader) {
|
|
89
|
+
headerReturned = true;
|
|
90
|
+
yield parser.result.dbfHeader;
|
|
91
|
+
}
|
|
92
|
+
if (parser.result.data.length > 0) {
|
|
93
|
+
yield parser.result.data;
|
|
94
|
+
parser.result.data = [];
|
|
95
|
+
}
|
|
91
96
|
}
|
|
92
|
-
|
|
97
|
+
parser.end();
|
|
93
98
|
if (parser.result.data.length > 0) {
|
|
94
|
-
|
|
95
|
-
parser.result.data = [];
|
|
99
|
+
yield parser.result.data;
|
|
96
100
|
}
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
parser.end();
|
|
100
|
-
|
|
101
|
-
if (parser.result.data.length > 0) {
|
|
102
|
-
yield parser.result.data;
|
|
103
|
-
}
|
|
104
101
|
}
|
|
105
|
-
|
|
102
|
+
exports.parseDBFInBatches = parseDBFInBatches;
|
|
103
|
+
/**
|
|
104
|
+
* https://www.dbase.com/Knowledgebase/INT/db7_file_fmt.htm
|
|
105
|
+
* @param state
|
|
106
|
+
* @param result
|
|
107
|
+
* @param binaryReader
|
|
108
|
+
* @param textDecoder
|
|
109
|
+
* @returns
|
|
110
|
+
*/
|
|
111
|
+
/* eslint-disable complexity, max-depth */
|
|
106
112
|
function parseState(state, result, binaryReader, textDecoder) {
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
113
|
+
// eslint-disable-next-line no-constant-condition
|
|
114
|
+
while (true) {
|
|
115
|
+
try {
|
|
116
|
+
switch (state) {
|
|
117
|
+
case STATE.ERROR:
|
|
118
|
+
case STATE.END:
|
|
119
|
+
return state;
|
|
120
|
+
case STATE.START:
|
|
121
|
+
// Parse initial file header
|
|
122
|
+
// DBF Header
|
|
123
|
+
const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
|
|
124
|
+
if (!dataView) {
|
|
125
|
+
return state;
|
|
126
|
+
}
|
|
127
|
+
result.dbfHeader = parseDBFHeader(dataView);
|
|
128
|
+
result.progress = {
|
|
129
|
+
bytesUsed: 0,
|
|
130
|
+
rowsTotal: result.dbfHeader.nRecords,
|
|
131
|
+
rows: 0
|
|
132
|
+
};
|
|
133
|
+
state = STATE.FIELD_DESCRIPTORS;
|
|
134
|
+
break;
|
|
135
|
+
case STATE.FIELD_DESCRIPTORS:
|
|
136
|
+
// Parse DBF field descriptors (schema)
|
|
137
|
+
const fieldDescriptorView = binaryReader.getDataView(
|
|
138
|
+
// @ts-ignore
|
|
139
|
+
result.dbfHeader.headerLength - DBF_HEADER_SIZE);
|
|
140
|
+
if (!fieldDescriptorView) {
|
|
141
|
+
return state;
|
|
142
|
+
}
|
|
143
|
+
result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
|
|
144
|
+
result.schema = {
|
|
145
|
+
fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
|
|
146
|
+
metadata: {}
|
|
147
|
+
};
|
|
148
|
+
state = STATE.FIELD_PROPERTIES;
|
|
149
|
+
// TODO(kyle) Not exactly sure why start offset needs to be headerLength + 1?
|
|
150
|
+
// parsedbf uses ((fields.length + 1) << 5) + 2;
|
|
151
|
+
binaryReader.skip(1);
|
|
152
|
+
break;
|
|
153
|
+
case STATE.FIELD_PROPERTIES:
|
|
154
|
+
const { recordLength = 0, nRecords = 0 } = result?.dbfHeader || {};
|
|
155
|
+
while (result.data.length < nRecords) {
|
|
156
|
+
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
157
|
+
if (!recordView) {
|
|
158
|
+
return state;
|
|
159
|
+
}
|
|
160
|
+
// Note: Avoid actually reading the last byte, which may not be present
|
|
161
|
+
binaryReader.skip(1);
|
|
162
|
+
// @ts-ignore
|
|
163
|
+
const row = parseRow(recordView, result.dbfFields, textDecoder);
|
|
164
|
+
result.data.push(row);
|
|
165
|
+
// @ts-ignore
|
|
166
|
+
result.progress.rows = result.data.length;
|
|
167
|
+
}
|
|
168
|
+
state = STATE.END;
|
|
169
|
+
break;
|
|
170
|
+
default:
|
|
171
|
+
state = STATE.ERROR;
|
|
172
|
+
result.error = `illegal parser state ${state}`;
|
|
173
|
+
return state;
|
|
154
174
|
}
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
result.
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
state = STATE.END;
|
|
163
|
-
break;
|
|
164
|
-
|
|
165
|
-
default:
|
|
166
|
-
state = STATE.ERROR;
|
|
167
|
-
result.error = "illegal parser state ".concat(state);
|
|
168
|
-
return state;
|
|
169
|
-
}
|
|
170
|
-
} catch (error) {
|
|
171
|
-
state = STATE.ERROR;
|
|
172
|
-
result.error = "DBF parsing failed: ".concat(error.message);
|
|
173
|
-
return state;
|
|
175
|
+
}
|
|
176
|
+
catch (error) {
|
|
177
|
+
state = STATE.ERROR;
|
|
178
|
+
result.error = `DBF parsing failed: ${error.message}`;
|
|
179
|
+
return state;
|
|
180
|
+
}
|
|
174
181
|
}
|
|
175
|
-
}
|
|
176
182
|
}
|
|
177
|
-
|
|
183
|
+
/**
|
|
184
|
+
* @param headerView
|
|
185
|
+
*/
|
|
178
186
|
function parseDBFHeader(headerView) {
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
187
|
+
return {
|
|
188
|
+
// Last updated date
|
|
189
|
+
year: headerView.getUint8(1) + 1900,
|
|
190
|
+
month: headerView.getUint8(2),
|
|
191
|
+
day: headerView.getUint8(3),
|
|
192
|
+
// Number of records in data file
|
|
193
|
+
nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
|
|
194
|
+
// Length of header in bytes
|
|
195
|
+
headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
|
|
196
|
+
// Length of each record
|
|
197
|
+
recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
|
|
198
|
+
// Not sure if this is usually set
|
|
199
|
+
languageDriver: headerView.getUint8(29)
|
|
200
|
+
};
|
|
188
201
|
}
|
|
189
|
-
|
|
202
|
+
/**
|
|
203
|
+
* @param view
|
|
204
|
+
*/
|
|
190
205
|
function parseFieldDescriptors(view, textDecoder) {
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
206
|
+
// NOTE: this might overestimate the number of fields if the "Database
|
|
207
|
+
// Container" container exists and is included in the headerLength
|
|
208
|
+
const nFields = (view.byteLength - 1) / 32;
|
|
209
|
+
const fields = [];
|
|
210
|
+
let offset = 0;
|
|
211
|
+
for (let i = 0; i < nFields; i++) {
|
|
212
|
+
const name = textDecoder
|
|
213
|
+
.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11))
|
|
214
|
+
// eslint-disable-next-line no-control-regex
|
|
215
|
+
.replace(/\u0000/g, '');
|
|
216
|
+
fields.push({
|
|
217
|
+
name,
|
|
218
|
+
dataType: String.fromCharCode(view.getUint8(offset + 11)),
|
|
219
|
+
fieldLength: view.getUint8(offset + 16),
|
|
220
|
+
decimal: view.getUint8(offset + 17)
|
|
221
|
+
});
|
|
222
|
+
offset += 32;
|
|
223
|
+
}
|
|
224
|
+
return fields;
|
|
225
|
+
}
|
|
226
|
+
/*
|
|
227
|
+
* @param {BinaryChunkReader} binaryReader
|
|
228
|
+
function parseRows(binaryReader, fields, nRecords, recordLength, textDecoder) {
|
|
229
|
+
const rows = [];
|
|
230
|
+
for (let i = 0; i < nRecords; i++) {
|
|
231
|
+
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
232
|
+
binaryReader.skip(1);
|
|
233
|
+
// @ts-ignore
|
|
234
|
+
rows.push(parseRow(recordView, fields, textDecoder));
|
|
204
235
|
}
|
|
205
|
-
|
|
206
|
-
return fields;
|
|
236
|
+
return rows;
|
|
207
237
|
}
|
|
208
|
-
|
|
238
|
+
*/
|
|
239
|
+
/**
|
|
240
|
+
*
|
|
241
|
+
* @param view
|
|
242
|
+
* @param fields
|
|
243
|
+
* @param textDecoder
|
|
244
|
+
* @returns
|
|
245
|
+
*/
|
|
209
246
|
function parseRow(view, fields, textDecoder) {
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
return out;
|
|
247
|
+
const out = {};
|
|
248
|
+
let offset = 0;
|
|
249
|
+
for (const field of fields) {
|
|
250
|
+
const text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
|
|
251
|
+
out[field.name] = parseField(text, field.dataType);
|
|
252
|
+
offset += field.fieldLength;
|
|
253
|
+
}
|
|
254
|
+
return out;
|
|
220
255
|
}
|
|
221
|
-
|
|
256
|
+
/**
|
|
257
|
+
* Should NaN be coerced to null?
|
|
258
|
+
* @param text
|
|
259
|
+
* @param dataType
|
|
260
|
+
* @returns Field depends on a type of the data
|
|
261
|
+
*/
|
|
222
262
|
function parseField(text, dataType) {
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
case 'L':
|
|
243
|
-
return parseBoolean(text);
|
|
244
|
-
|
|
245
|
-
default:
|
|
246
|
-
throw new Error('Unsupported data type');
|
|
247
|
-
}
|
|
263
|
+
switch (dataType) {
|
|
264
|
+
case 'B':
|
|
265
|
+
return parseNumber(text);
|
|
266
|
+
case 'C':
|
|
267
|
+
return parseCharacter(text);
|
|
268
|
+
case 'F':
|
|
269
|
+
return parseNumber(text);
|
|
270
|
+
case 'N':
|
|
271
|
+
return parseNumber(text);
|
|
272
|
+
case 'O':
|
|
273
|
+
return parseNumber(text);
|
|
274
|
+
case 'D':
|
|
275
|
+
return parseDate(text);
|
|
276
|
+
case 'L':
|
|
277
|
+
return parseBoolean(text);
|
|
278
|
+
default:
|
|
279
|
+
throw new Error('Unsupported data type');
|
|
280
|
+
}
|
|
248
281
|
}
|
|
249
|
-
|
|
282
|
+
/**
|
|
283
|
+
* Parse YYYYMMDD to date in milliseconds
|
|
284
|
+
* @param str YYYYMMDD
|
|
285
|
+
* @returns new Date as a number
|
|
286
|
+
*/
|
|
250
287
|
function parseDate(str) {
|
|
251
|
-
|
|
288
|
+
return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
|
|
252
289
|
}
|
|
253
|
-
|
|
290
|
+
/**
|
|
291
|
+
* Read boolean value
|
|
292
|
+
* any of Y, y, T, t coerce to true
|
|
293
|
+
* any of N, n, F, f coerce to false
|
|
294
|
+
* otherwise null
|
|
295
|
+
* @param value
|
|
296
|
+
* @returns boolean | null
|
|
297
|
+
*/
|
|
254
298
|
function parseBoolean(value) {
|
|
255
|
-
|
|
299
|
+
return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
|
|
256
300
|
}
|
|
257
|
-
|
|
301
|
+
/**
|
|
302
|
+
* Return null instead of NaN
|
|
303
|
+
* @param text
|
|
304
|
+
* @returns number | null
|
|
305
|
+
*/
|
|
258
306
|
function parseNumber(text) {
|
|
259
|
-
|
|
260
|
-
|
|
307
|
+
const number = parseFloat(text);
|
|
308
|
+
return isNaN(number) ? null : number;
|
|
261
309
|
}
|
|
262
|
-
|
|
310
|
+
/**
|
|
311
|
+
*
|
|
312
|
+
* @param text
|
|
313
|
+
* @returns string | null
|
|
314
|
+
*/
|
|
263
315
|
function parseCharacter(text) {
|
|
264
|
-
|
|
316
|
+
return text.trim() || null;
|
|
265
317
|
}
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
case 'L':
|
|
293
|
-
return new Field(name, new Bool(), true);
|
|
294
|
-
|
|
295
|
-
default:
|
|
296
|
-
throw new Error('Unsupported data type');
|
|
297
|
-
}
|
|
318
|
+
/**
|
|
319
|
+
* Create a standard Arrow-style `Field` from field descriptor.
|
|
320
|
+
* TODO - use `fieldLength` and `decimal` to generate smaller types?
|
|
321
|
+
* @param param0
|
|
322
|
+
* @returns Field
|
|
323
|
+
*/
|
|
324
|
+
// eslint-disable
|
|
325
|
+
function makeField({ name, dataType, fieldLength, decimal }) {
|
|
326
|
+
switch (dataType) {
|
|
327
|
+
case 'B':
|
|
328
|
+
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
329
|
+
case 'C':
|
|
330
|
+
return { name, type: 'utf8', nullable: true, metadata: {} };
|
|
331
|
+
case 'F':
|
|
332
|
+
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
333
|
+
case 'N':
|
|
334
|
+
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
335
|
+
case 'O':
|
|
336
|
+
return { name, type: 'float64', nullable: true, metadata: {} };
|
|
337
|
+
case 'D':
|
|
338
|
+
return { name, type: 'timestamp-millisecond', nullable: true, metadata: {} };
|
|
339
|
+
case 'L':
|
|
340
|
+
return { name, type: 'bool', nullable: true, metadata: {} };
|
|
341
|
+
default:
|
|
342
|
+
throw new Error('Unsupported data type');
|
|
343
|
+
}
|
|
298
344
|
}
|
|
299
|
-
//# sourceMappingURL=parse-dbf.js.map
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import type { SHXOutput } from './parse-shx';
|
|
2
2
|
import type { SHPHeader } from './parse-shp-header';
|
|
3
3
|
import type { LoaderContext } from '@loaders.gl/loader-utils';
|
|
4
|
+
import type { ShapefileLoaderOptions } from './types';
|
|
4
5
|
interface ShapefileOutput {
|
|
5
6
|
encoding?: string;
|
|
6
7
|
prj?: string;
|
|
@@ -10,12 +11,8 @@ interface ShapefileOutput {
|
|
|
10
11
|
}
|
|
11
12
|
/**
|
|
12
13
|
* Parsing of file in batches
|
|
13
|
-
*
|
|
14
|
-
* @param asyncIterator
|
|
15
|
-
* @param options
|
|
16
|
-
* @param context
|
|
17
14
|
*/
|
|
18
|
-
export declare function parseShapefileInBatches(asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>, options?:
|
|
15
|
+
export declare function parseShapefileInBatches(asyncIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>, options?: ShapefileLoaderOptions, context?: LoaderContext): AsyncIterable<ShapefileOutput>;
|
|
19
16
|
/**
|
|
20
17
|
* Parse shapefile
|
|
21
18
|
*
|
|
@@ -24,9 +21,7 @@ export declare function parseShapefileInBatches(asyncIterator: AsyncIterable<Arr
|
|
|
24
21
|
* @param context
|
|
25
22
|
* @returns output of shapefile
|
|
26
23
|
*/
|
|
27
|
-
export declare function parseShapefile(arrayBuffer: ArrayBuffer, options?:
|
|
28
|
-
[key: string]: any;
|
|
29
|
-
}, context?: LoaderContext): Promise<ShapefileOutput>;
|
|
24
|
+
export declare function parseShapefile(arrayBuffer: ArrayBuffer, options?: ShapefileLoaderOptions, context?: LoaderContext): Promise<ShapefileOutput>;
|
|
30
25
|
/**
|
|
31
26
|
*
|
|
32
27
|
* @param options
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-shapefile.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-shapefile.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,aAAa,CAAC;AAC3C,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,oBAAoB,CAAC;AAClD,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"parse-shapefile.d.ts","sourceRoot":"","sources":["../../../src/lib/parsers/parse-shapefile.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,aAAa,CAAC;AAC3C,OAAO,KAAK,EAAC,SAAS,EAAC,MAAM,oBAAoB,CAAC;AAClD,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,0BAA0B,CAAC;AAC5D,OAAO,KAAK,EAAC,sBAAsB,EAAC,MAAM,SAAS,CAAC;AAUpD,UAAU,eAAe;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,MAAM,EAAE,SAAS,CAAC;IAClB,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB;AACD;;GAEG;AAEH,wBAAuB,uBAAuB,CAC5C,aAAa,EAAE,aAAa,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAC,WAAW,CAAC,EACjE,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,aAAa,CAAC,eAAe,CAAC,CAmEhC;AAED;;;;;;;GAOG;AACH,wBAAsB,cAAc,CAClC,WAAW,EAAE,WAAW,EACxB,OAAO,CAAC,EAAE,sBAAsB,EAChC,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC,eAAe,CAAC,CAgC1B;AAwDD;;;;;GAKG;AAEH,wBAAsB,yBAAyB,CAC7C,OAAO,CAAC,EAAE,MAAM,EAChB,OAAO,CAAC,EAAE,aAAa,GACtB,OAAO,CAAC;IACT,GAAG,CAAC,EAAE,SAAS,CAAC;IAChB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;CACd,CAAC,CAkCD;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,CAAC,GAAG,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,MAAM,CAQ1E"}
|