@loaders.gl/shapefile 4.0.0-beta.2 → 4.0.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbf-loader.d.ts.map +1 -0
- package/dist/{esm/dbf-loader.js → dbf-loader.js} +2 -2
- package/dist/dbf-loader.js.map +1 -0
- package/dist/dbf-worker.js +82 -26
- package/dist/{dist.min.js → dist.dev.js} +1449 -1973
- package/dist/index.cjs +1107 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +7 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-dbf.js +8 -9
- package/dist/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shapefile.js +5 -5
- package/dist/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/lib/parsers/parse-shp-geometry.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shp-geometry.js +1 -1
- package/dist/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/lib/parsers/parse-shp-header.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shp-header.js +2 -2
- package/dist/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shp.js +11 -12
- package/dist/lib/parsers/parse-shp.js.map +1 -0
- package/dist/lib/parsers/parse-shx.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/parsers/parse-shx.js +1 -1
- package/dist/lib/parsers/parse-shx.js.map +1 -0
- package/dist/lib/parsers/types.d.ts.map +1 -0
- package/dist/lib/parsers/types.js.map +1 -0
- package/dist/lib/streaming/binary-chunk-reader.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/streaming/binary-chunk-reader.js +4 -5
- package/dist/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/lib/streaming/binary-reader.d.ts.map +1 -0
- package/dist/{esm/lib → lib}/streaming/binary-reader.js +2 -3
- package/dist/lib/streaming/binary-reader.js.map +1 -0
- package/dist/lib/streaming/zip-batch-iterators.d.ts.map +1 -0
- package/dist/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/shapefile-loader.d.ts.map +1 -0
- package/dist/{esm/shapefile-loader.js → shapefile-loader.js} +3 -3
- package/dist/shapefile-loader.js.map +1 -0
- package/dist/shp-loader.d.ts.map +1 -0
- package/dist/{esm/shp-loader.js → shp-loader.js} +2 -2
- package/dist/shp-loader.js.map +1 -0
- package/dist/shp-worker.js +64 -3
- package/dist/{src/workers → workers}/dbf-worker.d.ts.map +1 -1
- package/dist/{esm/workers → workers}/dbf-worker.js +1 -1
- package/dist/workers/dbf-worker.js.map +1 -0
- package/dist/{src/workers → workers}/shp-worker.d.ts.map +1 -1
- package/dist/{esm/workers → workers}/shp-worker.js +1 -1
- package/dist/workers/shp-worker.js.map +1 -0
- package/package.json +18 -10
- package/dist/es5/bundle.js +0 -6
- package/dist/es5/bundle.js.map +0 -1
- package/dist/es5/dbf-loader.js +0 -55
- package/dist/es5/dbf-loader.js.map +0 -1
- package/dist/es5/index.js +0 -60
- package/dist/es5/index.js.map +0 -1
- package/dist/es5/lib/parsers/parse-dbf.js +0 -394
- package/dist/es5/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shapefile.js +0 -377
- package/dist/es5/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp-geometry.js +0 -220
- package/dist/es5/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp-header.js +0 -35
- package/dist/es5/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp.js +0 -227
- package/dist/es5/lib/parsers/parse-shp.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shx.js +0 -26
- package/dist/es5/lib/parsers/parse-shx.js.map +0 -1
- package/dist/es5/lib/parsers/types.js +0 -2
- package/dist/es5/lib/parsers/types.js.map +0 -1
- package/dist/es5/lib/streaming/binary-chunk-reader.js +0 -178
- package/dist/es5/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/es5/lib/streaming/binary-reader.js +0 -48
- package/dist/es5/lib/streaming/binary-reader.js.map +0 -1
- package/dist/es5/lib/streaming/zip-batch-iterators.js +0 -95
- package/dist/es5/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/es5/shapefile-loader.js +0 -29
- package/dist/es5/shapefile-loader.js.map +0 -1
- package/dist/es5/shp-loader.js +0 -58
- package/dist/es5/shp-loader.js.map +0 -1
- package/dist/es5/workers/dbf-worker.js +0 -6
- package/dist/es5/workers/dbf-worker.js.map +0 -1
- package/dist/es5/workers/shp-worker.js +0 -6
- package/dist/es5/workers/shp-worker.js.map +0 -1
- package/dist/esm/bundle.js +0 -4
- package/dist/esm/bundle.js.map +0 -1
- package/dist/esm/dbf-loader.js.map +0 -1
- package/dist/esm/index.js +0 -7
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shx.js.map +0 -1
- package/dist/esm/lib/parsers/types.js.map +0 -1
- package/dist/esm/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/esm/lib/streaming/binary-reader.js.map +0 -1
- package/dist/esm/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/esm/shapefile-loader.js.map +0 -1
- package/dist/esm/shp-loader.js.map +0 -1
- package/dist/esm/workers/dbf-worker.js.map +0 -1
- package/dist/esm/workers/shp-worker.js.map +0 -1
- package/dist/src/bundle.d.ts +0 -2
- package/dist/src/bundle.d.ts.map +0 -1
- package/dist/src/dbf-loader.d.ts.map +0 -1
- package/dist/src/index.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-dbf.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shapefile.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shp-geometry.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shp-header.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shp.d.ts.map +0 -1
- package/dist/src/lib/parsers/parse-shx.d.ts.map +0 -1
- package/dist/src/lib/parsers/types.d.ts.map +0 -1
- package/dist/src/lib/streaming/binary-chunk-reader.d.ts.map +0 -1
- package/dist/src/lib/streaming/binary-reader.d.ts.map +0 -1
- package/dist/src/lib/streaming/zip-batch-iterators.d.ts.map +0 -1
- package/dist/src/shapefile-loader.d.ts.map +0 -1
- package/dist/src/shp-loader.d.ts.map +0 -1
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/src/bundle.ts +0 -4
- /package/dist/{src/dbf-loader.d.ts → dbf-loader.d.ts} +0 -0
- /package/dist/{src/index.d.ts → index.d.ts} +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-dbf.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shapefile.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shp-geometry.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shp-header.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shp.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/parse-shx.d.ts +0 -0
- /package/dist/{src/lib → lib}/parsers/types.d.ts +0 -0
- /package/dist/{esm/lib → lib}/parsers/types.js +0 -0
- /package/dist/{src/lib → lib}/streaming/binary-chunk-reader.d.ts +0 -0
- /package/dist/{src/lib → lib}/streaming/binary-reader.d.ts +0 -0
- /package/dist/{src/lib → lib}/streaming/zip-batch-iterators.d.ts +0 -0
- /package/dist/{esm/lib → lib}/streaming/zip-batch-iterators.js +0 -0
- /package/dist/{src/shapefile-loader.d.ts → shapefile-loader.d.ts} +0 -0
- /package/dist/{src/shp-loader.d.ts → shp-loader.d.ts} +0 -0
- /package/dist/{src/workers → workers}/dbf-worker.d.ts +0 -0
- /package/dist/{src/workers → workers}/shp-worker.d.ts +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dbf-loader.d.ts","sourceRoot":"","sources":["../src/dbf-loader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,MAAM,EAAE,gBAAgB,EAAC,MAAM,0BAA0B,CAAC;AAOvE;;GAEG;AACH,eAAO,MAAM,eAAe,EAAE,MAc7B,CAAC;AAEF,sBAAsB;AACtB,eAAO,MAAM,SAAS,EAAE,gBAOvB,CAAC"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import { parseDBF, parseDBFInBatches } from
|
|
2
|
-
const VERSION = typeof
|
|
1
|
+
import { parseDBF, parseDBFInBatches } from "./lib/parsers/parse-dbf.js";
|
|
2
|
+
const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';
|
|
3
3
|
export const DBFWorkerLoader = {
|
|
4
4
|
name: 'DBF',
|
|
5
5
|
id: 'dbf',
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dbf-loader.js","names":["parseDBF","parseDBFInBatches","VERSION","__VERSION__","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseInBatches","arrayBufferIterator"],"sources":["../src/dbf-loader.ts"],"sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches(arrayBufferIterator: AsyncIterable<ArrayBuffer> | Iterable<ArrayBuffer>, options) {\n return parseDBFInBatches(arrayBufferIterator, options);\n }\n};\n"],"mappings":"SACQA,QAAQ,EAAEC,iBAAiB;AAInC,MAAMC,OAAO,GAAG,OAAOC,WAAW,KAAK,WAAW,GAAGA,WAAW,GAAG,QAAQ;AAK3E,OAAO,MAAMC,eAAuB,GAAG;EACrCC,IAAI,EAAE,KAAK;EACXC,EAAE,EAAE,KAAK;EACTC,MAAM,EAAE,WAAW;EACnBC,OAAO,EAAEN,OAAO;EAChBO,MAAM,EAAE,IAAI;EACZC,QAAQ,EAAE,OAAO;EACjBC,UAAU,EAAE,CAAC,KAAK,CAAC;EACnBC,SAAS,EAAE,CAAC,mBAAmB,CAAC;EAChCC,OAAO,EAAE;IACPC,GAAG,EAAE;MACHC,QAAQ,EAAE;IACZ;EACF;AACF,CAAC;AAGD,OAAO,MAAMC,SAA2B,GAAG;EACzC,GAAGZ,eAAe;EAClBa,KAAK,EAAE,MAAAA,CAAOC,WAAW,EAAEL,OAAO,KAAKb,QAAQ,CAACkB,WAAW,EAAEL,OAAO,CAAC;EACrEM,SAAS,EAAEnB,QAAQ;EACnBoB,cAAcA,CAACC,mBAAuE,EAAER,OAAO,EAAE;IAC/F,OAAOZ,iBAAiB,CAACoB,mBAAmB,EAAER,OAAO,CAAC;EACxD;AACF,CAAC"}
|
package/dist/dbf-worker.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
"use strict";
|
|
1
2
|
(() => {
|
|
2
3
|
// src/lib/streaming/binary-chunk-reader.ts
|
|
3
4
|
var BinaryChunkReader = class {
|
|
@@ -8,6 +9,9 @@
|
|
|
8
9
|
this.ended = false;
|
|
9
10
|
this.maxRewindBytes = maxRewindBytes;
|
|
10
11
|
}
|
|
12
|
+
/**
|
|
13
|
+
* @param arrayBuffer
|
|
14
|
+
*/
|
|
11
15
|
write(arrayBuffer) {
|
|
12
16
|
this.arrayBuffers.push(arrayBuffer);
|
|
13
17
|
}
|
|
@@ -15,6 +19,12 @@
|
|
|
15
19
|
this.arrayBuffers = [];
|
|
16
20
|
this.ended = true;
|
|
17
21
|
}
|
|
22
|
+
/**
|
|
23
|
+
* Has enough bytes available in array buffers
|
|
24
|
+
*
|
|
25
|
+
* @param bytes Number of bytes
|
|
26
|
+
* @return boolean
|
|
27
|
+
*/
|
|
18
28
|
hasAvailableBytes(bytes) {
|
|
19
29
|
let bytesAvailable = -this.offset;
|
|
20
30
|
for (const arrayBuffer of this.arrayBuffers) {
|
|
@@ -25,6 +35,12 @@
|
|
|
25
35
|
}
|
|
26
36
|
return false;
|
|
27
37
|
}
|
|
38
|
+
/**
|
|
39
|
+
* Find offsets of byte ranges within this.arrayBuffers
|
|
40
|
+
*
|
|
41
|
+
* @param bytes Byte length to read
|
|
42
|
+
* @return Arrays with byte ranges pointing to this.arrayBuffers, Output type is nested array, e.g. [ [0, [1, 2]], ...]
|
|
43
|
+
*/
|
|
28
44
|
findBufferOffsets(bytes) {
|
|
29
45
|
let offset = -this.offset;
|
|
30
46
|
const selectedBuffers = [];
|
|
@@ -48,6 +64,12 @@
|
|
|
48
64
|
}
|
|
49
65
|
return null;
|
|
50
66
|
}
|
|
67
|
+
/**
|
|
68
|
+
* Get the required number of bytes from the iterator
|
|
69
|
+
*
|
|
70
|
+
* @param bytes Number of bytes
|
|
71
|
+
* @return DataView with data
|
|
72
|
+
*/
|
|
51
73
|
getDataView(bytes) {
|
|
52
74
|
const bufferOffsets = this.findBufferOffsets(bytes);
|
|
53
75
|
if (!bufferOffsets && this.ended) {
|
|
@@ -69,12 +91,25 @@
|
|
|
69
91
|
this.disposeBuffers();
|
|
70
92
|
return view;
|
|
71
93
|
}
|
|
94
|
+
/**
|
|
95
|
+
* Dispose of old array buffers
|
|
96
|
+
*/
|
|
72
97
|
disposeBuffers() {
|
|
73
98
|
while (this.arrayBuffers.length > 0 && this.offset - this.maxRewindBytes >= this.arrayBuffers[0].byteLength) {
|
|
74
99
|
this.offset -= this.arrayBuffers[0].byteLength;
|
|
75
100
|
this.arrayBuffers.shift();
|
|
76
101
|
}
|
|
77
102
|
}
|
|
103
|
+
/**
|
|
104
|
+
* Copy multiple ArrayBuffers into one contiguous ArrayBuffer
|
|
105
|
+
*
|
|
106
|
+
* In contrast to concatenateArrayBuffers, this only copies the necessary
|
|
107
|
+
* portions of the source arrays, rather than first copying the entire arrays
|
|
108
|
+
* then taking a part of them.
|
|
109
|
+
*
|
|
110
|
+
* @param bufferOffsets List of internal array offsets
|
|
111
|
+
* @return New contiguous ArrayBuffer
|
|
112
|
+
*/
|
|
78
113
|
_combineArrayBuffers(bufferOffsets) {
|
|
79
114
|
let byteLength = 0;
|
|
80
115
|
for (const bufferOffset of bufferOffsets) {
|
|
@@ -91,9 +126,15 @@
|
|
|
91
126
|
}
|
|
92
127
|
return result.buffer;
|
|
93
128
|
}
|
|
129
|
+
/**
|
|
130
|
+
* @param bytes
|
|
131
|
+
*/
|
|
94
132
|
skip(bytes) {
|
|
95
133
|
this.offset += bytes;
|
|
96
134
|
}
|
|
135
|
+
/**
|
|
136
|
+
* @param bytes
|
|
137
|
+
*/
|
|
97
138
|
rewind(bytes) {
|
|
98
139
|
this.offset -= bytes;
|
|
99
140
|
}
|
|
@@ -102,23 +143,18 @@
|
|
|
102
143
|
// src/lib/parsers/parse-dbf.ts
|
|
103
144
|
var LITTLE_ENDIAN = true;
|
|
104
145
|
var DBF_HEADER_SIZE = 32;
|
|
105
|
-
var STATE;
|
|
106
|
-
(function(STATE2) {
|
|
107
|
-
STATE2[STATE2["START"] = 0] = "START";
|
|
108
|
-
STATE2[STATE2["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
|
|
109
|
-
STATE2[STATE2["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
|
|
110
|
-
STATE2[STATE2["END"] = 3] = "END";
|
|
111
|
-
STATE2[STATE2["ERROR"] = 4] = "ERROR";
|
|
112
|
-
})(STATE || (STATE = {}));
|
|
113
146
|
var DBFParser = class {
|
|
114
147
|
constructor(options) {
|
|
115
148
|
this.binaryReader = new BinaryChunkReader();
|
|
116
|
-
this.state = 0
|
|
149
|
+
this.state = 0 /* START */;
|
|
117
150
|
this.result = {
|
|
118
151
|
data: []
|
|
119
152
|
};
|
|
120
153
|
this.textDecoder = new TextDecoder(options.encoding);
|
|
121
154
|
}
|
|
155
|
+
/**
|
|
156
|
+
* @param arrayBuffer
|
|
157
|
+
*/
|
|
122
158
|
write(arrayBuffer) {
|
|
123
159
|
this.binaryReader.write(arrayBuffer);
|
|
124
160
|
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
@@ -126,8 +162,8 @@
|
|
|
126
162
|
end() {
|
|
127
163
|
this.binaryReader.end();
|
|
128
164
|
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
129
|
-
if (this.state !== 3) {
|
|
130
|
-
this.state = 4
|
|
165
|
+
if (this.state !== 3 /* END */) {
|
|
166
|
+
this.state = 4 /* ERROR */;
|
|
131
167
|
this.result.error = "DBF incomplete file";
|
|
132
168
|
}
|
|
133
169
|
}
|
|
@@ -179,10 +215,10 @@
|
|
|
179
215
|
while (true) {
|
|
180
216
|
try {
|
|
181
217
|
switch (state) {
|
|
182
|
-
case 4
|
|
183
|
-
case 3
|
|
218
|
+
case 4 /* ERROR */:
|
|
219
|
+
case 3 /* END */:
|
|
184
220
|
return state;
|
|
185
|
-
case 0
|
|
221
|
+
case 0 /* START */:
|
|
186
222
|
const dataView = binaryReader.getDataView(DBF_HEADER_SIZE);
|
|
187
223
|
if (!dataView) {
|
|
188
224
|
return state;
|
|
@@ -193,10 +229,13 @@
|
|
|
193
229
|
rowsTotal: result.dbfHeader.nRecords,
|
|
194
230
|
rows: 0
|
|
195
231
|
};
|
|
196
|
-
state = 1
|
|
232
|
+
state = 1 /* FIELD_DESCRIPTORS */;
|
|
197
233
|
break;
|
|
198
|
-
case 1
|
|
199
|
-
const fieldDescriptorView = binaryReader.getDataView(
|
|
234
|
+
case 1 /* FIELD_DESCRIPTORS */:
|
|
235
|
+
const fieldDescriptorView = binaryReader.getDataView(
|
|
236
|
+
// @ts-ignore
|
|
237
|
+
result.dbfHeader.headerLength - DBF_HEADER_SIZE
|
|
238
|
+
);
|
|
200
239
|
if (!fieldDescriptorView) {
|
|
201
240
|
return state;
|
|
202
241
|
}
|
|
@@ -205,10 +244,10 @@
|
|
|
205
244
|
fields: result.dbfFields.map((dbfField) => makeField(dbfField)),
|
|
206
245
|
metadata: {}
|
|
207
246
|
};
|
|
208
|
-
state = 2
|
|
247
|
+
state = 2 /* FIELD_PROPERTIES */;
|
|
209
248
|
binaryReader.skip(1);
|
|
210
249
|
break;
|
|
211
|
-
case 2
|
|
250
|
+
case 2 /* FIELD_PROPERTIES */:
|
|
212
251
|
const { recordLength = 0, nRecords = 0 } = result?.dbfHeader || {};
|
|
213
252
|
while (result.data.length < nRecords) {
|
|
214
253
|
const recordView = binaryReader.getDataView(recordLength - 1);
|
|
@@ -220,15 +259,15 @@
|
|
|
220
259
|
result.data.push(row);
|
|
221
260
|
result.progress.rows = result.data.length;
|
|
222
261
|
}
|
|
223
|
-
state = 3
|
|
262
|
+
state = 3 /* END */;
|
|
224
263
|
break;
|
|
225
264
|
default:
|
|
226
|
-
state = 4
|
|
265
|
+
state = 4 /* ERROR */;
|
|
227
266
|
result.error = `illegal parser state ${state}`;
|
|
228
267
|
return state;
|
|
229
268
|
}
|
|
230
269
|
} catch (error) {
|
|
231
|
-
state = 4
|
|
270
|
+
state = 4 /* ERROR */;
|
|
232
271
|
result.error = `DBF parsing failed: ${error.message}`;
|
|
233
272
|
return state;
|
|
234
273
|
}
|
|
@@ -236,12 +275,17 @@
|
|
|
236
275
|
}
|
|
237
276
|
function parseDBFHeader(headerView) {
|
|
238
277
|
return {
|
|
278
|
+
// Last updated date
|
|
239
279
|
year: headerView.getUint8(1) + 1900,
|
|
240
280
|
month: headerView.getUint8(2),
|
|
241
281
|
day: headerView.getUint8(3),
|
|
282
|
+
// Number of records in data file
|
|
242
283
|
nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
|
|
284
|
+
// Length of header in bytes
|
|
243
285
|
headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
|
|
286
|
+
// Length of each record
|
|
244
287
|
recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
|
|
288
|
+
// Not sure if this is usually set
|
|
245
289
|
languageDriver: headerView.getUint8(29)
|
|
246
290
|
};
|
|
247
291
|
}
|
|
@@ -265,7 +309,9 @@
|
|
|
265
309
|
const out = {};
|
|
266
310
|
let offset = 0;
|
|
267
311
|
for (const field of fields) {
|
|
268
|
-
const text = textDecoder.decode(
|
|
312
|
+
const text = textDecoder.decode(
|
|
313
|
+
new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength)
|
|
314
|
+
);
|
|
269
315
|
out[field.name] = parseField(text, field.dataType);
|
|
270
316
|
offset += field.fieldLength;
|
|
271
317
|
}
|
|
@@ -326,7 +372,7 @@
|
|
|
326
372
|
}
|
|
327
373
|
|
|
328
374
|
// src/dbf-loader.ts
|
|
329
|
-
var VERSION = true ? "4.0.0-beta.
|
|
375
|
+
var VERSION = true ? "4.0.0-beta.3" : "latest";
|
|
330
376
|
var DBFWorkerLoader = {
|
|
331
377
|
name: "DBF",
|
|
332
378
|
id: "dbf",
|
|
@@ -353,7 +399,7 @@
|
|
|
353
399
|
|
|
354
400
|
// ../worker-utils/src/lib/worker-utils/get-transfer-list.ts
|
|
355
401
|
function getTransferList(object, recursive = true, transfers) {
|
|
356
|
-
const transfersSet = transfers || new Set();
|
|
402
|
+
const transfersSet = transfers || /* @__PURE__ */ new Set();
|
|
357
403
|
if (!object) {
|
|
358
404
|
} else if (isTransferable(object)) {
|
|
359
405
|
transfersSet.add(object);
|
|
@@ -396,11 +442,15 @@
|
|
|
396
442
|
}
|
|
397
443
|
return parentPort;
|
|
398
444
|
}
|
|
399
|
-
var onMessageWrapperMap = new Map();
|
|
445
|
+
var onMessageWrapperMap = /* @__PURE__ */ new Map();
|
|
400
446
|
var WorkerBody = class {
|
|
447
|
+
/** Check that we are actually in a worker thread */
|
|
401
448
|
static inWorkerThread() {
|
|
402
449
|
return typeof self !== "undefined" || Boolean(getParentPort());
|
|
403
450
|
}
|
|
451
|
+
/*
|
|
452
|
+
* (type: WorkerMessageType, payload: WorkerMessagePayload) => any
|
|
453
|
+
*/
|
|
404
454
|
static set onmessage(onMessage) {
|
|
405
455
|
function handleMessage(message) {
|
|
406
456
|
const parentPort3 = getParentPort();
|
|
@@ -444,6 +494,11 @@
|
|
|
444
494
|
globalThis.removeEventListener("message", onMessageWrapper);
|
|
445
495
|
}
|
|
446
496
|
}
|
|
497
|
+
/**
|
|
498
|
+
* Send a message from a worker to creating thread (main thread)
|
|
499
|
+
* @param type
|
|
500
|
+
* @param payload
|
|
501
|
+
*/
|
|
447
502
|
static postMessage(type, payload) {
|
|
448
503
|
const data = { source: "loaders.gl", type, payload };
|
|
449
504
|
const transferList = getTransferList(payload);
|
|
@@ -475,6 +530,7 @@
|
|
|
475
530
|
loader,
|
|
476
531
|
arrayBuffer: input,
|
|
477
532
|
options,
|
|
533
|
+
// @ts-expect-error fetch missing
|
|
478
534
|
context: {
|
|
479
535
|
...context,
|
|
480
536
|
_parse: parseOnMainThread
|