@loaders.gl/shapefile 4.4.0-alpha.1 → 4.4.0-alpha.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dbf-arrow-loader.d.ts +3 -3
- package/dist/dbf-arrow-loader.d.ts.map +1 -1
- package/dist/dbf-arrow-loader.js +2 -1
- package/dist/dbf-arrow-loader.js.map +1 -0
- package/dist/dbf-format.js +1 -0
- package/dist/dbf-format.js.map +1 -0
- package/dist/dbf-loader.d.ts +3 -2
- package/dist/dbf-loader.d.ts.map +1 -1
- package/dist/dbf-loader.js +2 -1
- package/dist/dbf-loader.js.map +1 -0
- package/dist/dbf-worker.js +227 -201
- package/dist/dist.dev.js +51 -22
- package/dist/dist.min.js +5 -5
- package/dist/index.cjs +29 -13
- package/dist/index.cjs.map +4 -4
- package/dist/index.js +1 -0
- package/dist/index.js.map +1 -0
- package/dist/lib/parsers/parse-dbf-to-arrow.d.ts +1 -1
- package/dist/lib/parsers/parse-dbf-to-arrow.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf-to-arrow.js +3 -1
- package/dist/lib/parsers/parse-dbf-to-arrow.js.map +1 -0
- package/dist/lib/parsers/parse-dbf.d.ts +1 -1
- package/dist/lib/parsers/parse-dbf.d.ts.map +1 -1
- package/dist/lib/parsers/parse-dbf.js +3 -1
- package/dist/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/lib/parsers/parse-shapefile.d.ts +1 -1
- package/dist/lib/parsers/parse-shapefile.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shapefile.js +16 -4
- package/dist/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/lib/parsers/parse-shp-geometry.js +1 -0
- package/dist/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/lib/parsers/parse-shp-header.js +1 -0
- package/dist/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/lib/parsers/parse-shp.d.ts +1 -1
- package/dist/lib/parsers/parse-shp.d.ts.map +1 -1
- package/dist/lib/parsers/parse-shp.js +3 -1
- package/dist/lib/parsers/parse-shp.js.map +1 -0
- package/dist/lib/parsers/parse-shx.js +1 -0
- package/dist/lib/parsers/parse-shx.js.map +1 -0
- package/dist/lib/parsers/types.d.ts +9 -5
- package/dist/lib/parsers/types.d.ts.map +1 -1
- package/dist/lib/parsers/types.js +1 -0
- package/dist/lib/parsers/types.js.map +1 -0
- package/dist/lib/streaming/binary-chunk-reader.js +1 -0
- package/dist/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/lib/streaming/binary-reader.js +1 -0
- package/dist/lib/streaming/binary-reader.js.map +1 -0
- package/dist/lib/streaming/zip-batch-iterators.js +1 -0
- package/dist/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/shapefile-loader.d.ts +8 -2
- package/dist/shapefile-loader.d.ts.map +1 -1
- package/dist/shapefile-loader.js +2 -1
- package/dist/shapefile-loader.js.map +1 -0
- package/dist/shp-loader.d.ts +3 -3
- package/dist/shp-loader.d.ts.map +1 -1
- package/dist/shp-loader.js +2 -1
- package/dist/shp-loader.js.map +1 -0
- package/dist/shp-worker.js +227 -201
- package/dist/workers/dbf-worker.js +1 -0
- package/dist/workers/dbf-worker.js.map +1 -0
- package/dist/workers/shp-worker.js +1 -0
- package/dist/workers/shp-worker.js.map +1 -0
- package/package.json +12 -6
- package/src/dbf-arrow-loader.ts +8 -3
- package/src/dbf-loader.ts +9 -3
- package/src/lib/parsers/parse-dbf-to-arrow.ts +5 -2
- package/src/lib/parsers/parse-dbf.ts +5 -2
- package/src/lib/parsers/parse-shapefile.ts +23 -10
- package/src/lib/parsers/parse-shp.ts +5 -2
- package/src/lib/parsers/types.ts +11 -6
- package/src/shapefile-loader.ts +15 -8
- package/src/shp-loader.ts +6 -4
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { StrictLoaderOptions } from '@loaders.gl/loader-utils';
|
|
2
2
|
import type { ArrowTable, ArrowTableBatch } from '@loaders.gl/schema';
|
|
3
3
|
import { parseDBF } from "./lib/parsers/parse-dbf-to-arrow.js";
|
|
4
|
-
export type DBFLoaderOptions =
|
|
4
|
+
export type DBFLoaderOptions = StrictLoaderOptions & {
|
|
5
5
|
dbf?: {
|
|
6
6
|
encoding?: string;
|
|
7
7
|
/** Override the URL to the worker bundle (by default loads from unpkg.com) */
|
|
@@ -32,7 +32,7 @@ export declare const DBFArrowWorkerLoader: {
|
|
|
32
32
|
export declare const DBFArrowLoader: {
|
|
33
33
|
readonly parse: (arrayBuffer: ArrayBuffer, options: DBFLoaderOptions | undefined) => Promise<ArrowTable>;
|
|
34
34
|
readonly parseSync: typeof parseDBF;
|
|
35
|
-
readonly parseInBatches: (arrayBufferIterator: AsyncIterable<
|
|
35
|
+
readonly parseInBatches: (arrayBufferIterator: AsyncIterable<ArrayBufferLike | ArrayBufferView> | Iterable<ArrayBufferLike | ArrayBufferView>, options: DBFLoaderOptions | undefined) => AsyncIterable<ArrowTableBatch>;
|
|
36
36
|
readonly dataType: ArrowTable;
|
|
37
37
|
readonly batchType: ArrowTableBatch;
|
|
38
38
|
readonly version: any;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dbf-arrow-loader.d.ts","sourceRoot":"","sources":["../src/dbf-arrow-loader.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAA2B,
|
|
1
|
+
{"version":3,"file":"dbf-arrow-loader.d.ts","sourceRoot":"","sources":["../src/dbf-arrow-loader.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAA2B,mBAAmB,EAAC,MAAM,0BAA0B,CAAC;AAC5F,OAAO,KAAK,EAAC,UAAU,EAAE,eAAe,EAAC,MAAM,oBAAoB,CAAC;AACpE,OAAO,EAAC,QAAQ,EAAoB,4CAAyC;AAO7E,MAAM,MAAM,gBAAgB,GAAG,mBAAmB,GAAG;IACnD,GAAG,CAAC,EAAE;QACJ,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,8EAA8E;QAC9E,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;CACH,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,oBAAoB;uBAEF,UAAU;wBACT,eAAe;;;;;;;;;;;;;;CAQ2B,CAAC;AAE3E,sBAAsB;AACtB,eAAO,MAAM,cAAc;;;mDAMnB,aAAa,CAAC,eAAe,GAAG,eAAe,CAAC,GAChD,QAAQ,CAAC,eAAe,GAAG,eAAe,CAAC;uBAnBpB,UAAU;wBACT,eAAe;;;;;;;;;;;;;;CAuBqC,CAAC"}
|
package/dist/dbf-arrow-loader.js
CHANGED
|
@@ -5,7 +5,7 @@ import { parseDBF, parseDBFInBatches } from "./lib/parsers/parse-dbf-to-arrow.js
|
|
|
5
5
|
import { DBFFormat } from "./dbf-format.js";
|
|
6
6
|
// __VERSION__ is injected by babel-plugin-version-inline
|
|
7
7
|
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
8
|
-
const VERSION = typeof "4.4.0-alpha.
|
|
8
|
+
const VERSION = typeof "4.4.0-alpha.9" !== 'undefined' ? "4.4.0-alpha.9" : 'latest';
|
|
9
9
|
/**
|
|
10
10
|
* DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles
|
|
11
11
|
*/
|
|
@@ -30,3 +30,4 @@ export const DBFArrowLoader = {
|
|
|
30
30
|
return parseDBFInBatches(arrayBufferIterator, options);
|
|
31
31
|
}
|
|
32
32
|
};
|
|
33
|
+
//# sourceMappingURL=dbf-arrow-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dbf-arrow-loader.js","sourceRoot":"","sources":["../src/dbf-arrow-loader.ts"],"names":[],"mappings":"AAAA,aAAa;AACb,+BAA+B;AAC/B,oCAAoC;AAIpC,OAAO,EAAC,QAAQ,EAAE,iBAAiB,EAAC,4CAAyC;AAC7E,OAAO,EAAC,SAAS,EAAC,wBAAqB;AAEvC,yDAAyD;AACzD,qDAAqD;AACrD,MAAM,OAAO,GAAG,sBAAkB,KAAK,WAAW,CAAC,CAAC,iBAAa,CAAC,CAAC,QAAQ,CAAC;AAU5E;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG;IAClC,GAAG,SAAS;IACZ,QAAQ,EAAE,IAA6B;IACvC,SAAS,EAAE,IAAkC;IAC7C,OAAO,EAAE,OAAO;IAChB,MAAM,EAAE,IAAI;IACZ,OAAO,EAAE;QACP,GAAG,EAAE;YACH,QAAQ,EAAE,QAAQ;SACnB;KACF;CACuE,CAAC;AAE3E,sBAAsB;AACtB,MAAM,CAAC,MAAM,cAAc,GAAG;IAC5B,GAAG,oBAAoB;IACvB,KAAK,EAAE,KAAK,EAAE,WAAW,EAAE,OAAO,EAAE,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC;IACrE,SAAS,EAAE,QAAQ;IACnB,cAAc,CACZ,mBAE+C,EAC/C,OAAO;QAEP,OAAO,iBAAiB,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;IACzD,CAAC;CACiF,CAAC"}
|
package/dist/dbf-format.js
CHANGED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dbf-format.js","sourceRoot":"","sources":["../src/dbf-format.ts"],"names":[],"mappings":"AAAA,aAAa;AACb,+BAA+B;AAC/B,oCAAoC;AAIpC,uCAAuC;AACvC,MAAM,CAAC,MAAM,SAAS,GAAG;IACvB,IAAI,EAAE,KAAK;IACX,EAAE,EAAE,KAAK;IACT,MAAM,EAAE,WAAW;IACnB,QAAQ,EAAE,OAAO;IACjB,UAAU,EAAE,CAAC,KAAK,CAAC;IACnB,SAAS,EAAE,CAAC,mBAAmB,CAAC;CACP,CAAC"}
|
package/dist/dbf-loader.d.ts
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
import type { LoaderWithParser,
|
|
2
|
-
export type DBFLoaderOptions =
|
|
1
|
+
import type { LoaderWithParser, StrictLoaderOptions } from '@loaders.gl/loader-utils';
|
|
2
|
+
export type DBFLoaderOptions = StrictLoaderOptions & {
|
|
3
3
|
dbf?: {
|
|
4
4
|
encoding?: string;
|
|
5
|
+
shape?: 'rows' | 'table' | 'object-row-table';
|
|
5
6
|
/** Override the URL to the worker bundle (by default loads from unpkg.com) */
|
|
6
7
|
workerUrl?: string;
|
|
7
8
|
};
|
package/dist/dbf-loader.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"dbf-loader.d.ts","sourceRoot":"","sources":["../src/dbf-loader.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAS,gBAAgB,EAAE,
|
|
1
|
+
{"version":3,"file":"dbf-loader.d.ts","sourceRoot":"","sources":["../src/dbf-loader.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAS,gBAAgB,EAAE,mBAAmB,EAAC,MAAM,0BAA0B,CAAC;AAO5F,MAAM,MAAM,gBAAgB,GAAG,mBAAmB,GAAG;IACnD,GAAG,CAAC,EAAE;QACJ,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,GAAG,kBAAkB,CAAC;QAC9C,8EAA8E;QAC9E,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;CACH,CAAC;AAEF;;GAEG;AACH,eAAO,MAAM,eAAe;;uBAER,OAAO;wBACN,KAAK;;;;;;;;;;;;;CAc6B,CAAC;AAExD,sBAAsB;AACtB,eAAO,MAAM,SAAS,EAAE,gBAYvB,CAAC"}
|
package/dist/dbf-loader.js
CHANGED
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
import { parseDBF, parseDBFInBatches } from "./lib/parsers/parse-dbf.js";
|
|
5
5
|
// __VERSION__ is injected by babel-plugin-version-inline
|
|
6
6
|
// @ts-ignore TS2304: Cannot find name '__VERSION__'.
|
|
7
|
-
const VERSION = typeof "4.4.0-alpha.
|
|
7
|
+
const VERSION = typeof "4.4.0-alpha.9" !== 'undefined' ? "4.4.0-alpha.9" : 'latest';
|
|
8
8
|
/**
|
|
9
9
|
* DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles
|
|
10
10
|
*/
|
|
@@ -34,3 +34,4 @@ export const DBFLoader = {
|
|
|
34
34
|
return parseDBFInBatches(arrayBufferIterator, options);
|
|
35
35
|
}
|
|
36
36
|
};
|
|
37
|
+
//# sourceMappingURL=dbf-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dbf-loader.js","sourceRoot":"","sources":["../src/dbf-loader.ts"],"names":[],"mappings":"AAAA,aAAa;AACb,+BAA+B;AAC/B,oCAAoC;AAGpC,OAAO,EAAC,QAAQ,EAAE,iBAAiB,EAAC,mCAAgC;AAEpE,yDAAyD;AACzD,qDAAqD;AACrD,MAAM,OAAO,GAAG,sBAAkB,KAAK,WAAW,CAAC,CAAC,iBAAa,CAAC,CAAC,QAAQ,CAAC;AAW5E;;GAEG;AACH,MAAM,CAAC,MAAM,eAAe,GAAG;IAC7B,IAAI,EAAE,KAAK;IACX,QAAQ,EAAE,IAAe;IACzB,SAAS,EAAE,IAAa;IAExB,EAAE,EAAE,KAAK;IACT,MAAM,EAAE,WAAW;IACnB,OAAO,EAAE,OAAO;IAChB,MAAM,EAAE,IAAI;IACZ,QAAQ,EAAE,OAAO;IACjB,UAAU,EAAE,CAAC,KAAK,CAAC;IACnB,SAAS,EAAE,CAAC,mBAAmB,CAAC;IAChC,OAAO,EAAE;QACP,GAAG,EAAE;YACH,QAAQ,EAAE,QAAQ;SACnB;KACF;CACoD,CAAC;AAExD,sBAAsB;AACtB,MAAM,CAAC,MAAM,SAAS,GAAqB;IACzC,GAAG,eAAe;IAClB,KAAK,EAAE,KAAK,EAAE,WAAW,EAAE,OAAO,EAAE,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC;IACrE,SAAS,EAAE,QAAQ;IACnB,cAAc,CACZ,mBAE+C,EAC/C,OAAO;QAEP,OAAO,iBAAiB,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;IACzD,CAAC;CACF,CAAC"}
|
package/dist/dbf-worker.js
CHANGED
|
@@ -1,5 +1,230 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
(() => {
|
|
3
|
+
// ../worker-utils/src/lib/node/worker_threads-browser.ts
|
|
4
|
+
var parentPort = null;
|
|
5
|
+
|
|
6
|
+
// ../worker-utils/src/lib/worker-utils/get-transfer-list.ts
|
|
7
|
+
function getTransferList(object, recursive = true, transfers) {
|
|
8
|
+
const transfersSet = transfers || /* @__PURE__ */ new Set();
|
|
9
|
+
if (!object) {
|
|
10
|
+
} else if (isTransferable(object)) {
|
|
11
|
+
transfersSet.add(object);
|
|
12
|
+
} else if (isTransferable(object.buffer)) {
|
|
13
|
+
transfersSet.add(object.buffer);
|
|
14
|
+
} else if (ArrayBuffer.isView(object)) {
|
|
15
|
+
} else if (recursive && typeof object === "object") {
|
|
16
|
+
for (const key in object) {
|
|
17
|
+
getTransferList(object[key], recursive, transfersSet);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
return transfers === void 0 ? Array.from(transfersSet) : [];
|
|
21
|
+
}
|
|
22
|
+
function isTransferable(object) {
|
|
23
|
+
if (!object) {
|
|
24
|
+
return false;
|
|
25
|
+
}
|
|
26
|
+
if (object instanceof ArrayBuffer) {
|
|
27
|
+
return true;
|
|
28
|
+
}
|
|
29
|
+
if (typeof MessagePort !== "undefined" && object instanceof MessagePort) {
|
|
30
|
+
return true;
|
|
31
|
+
}
|
|
32
|
+
if (typeof ImageBitmap !== "undefined" && object instanceof ImageBitmap) {
|
|
33
|
+
return true;
|
|
34
|
+
}
|
|
35
|
+
if (typeof OffscreenCanvas !== "undefined" && object instanceof OffscreenCanvas) {
|
|
36
|
+
return true;
|
|
37
|
+
}
|
|
38
|
+
return false;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// ../worker-utils/src/lib/worker-farm/worker-body.ts
|
|
42
|
+
async function getParentPort() {
|
|
43
|
+
return parentPort;
|
|
44
|
+
}
|
|
45
|
+
var onMessageWrapperMap = /* @__PURE__ */ new Map();
|
|
46
|
+
var WorkerBody = class {
|
|
47
|
+
/** Check that we are actually in a worker thread */
|
|
48
|
+
static async inWorkerThread() {
|
|
49
|
+
return typeof self !== "undefined" || Boolean(await getParentPort());
|
|
50
|
+
}
|
|
51
|
+
/*
|
|
52
|
+
* (type: WorkerMessageType, payload: WorkerMessagePayload) => any
|
|
53
|
+
*/
|
|
54
|
+
static set onmessage(onMessage) {
|
|
55
|
+
async function handleMessage(message) {
|
|
56
|
+
const parentPort2 = await getParentPort();
|
|
57
|
+
const { type, payload } = parentPort2 ? message : message.data;
|
|
58
|
+
onMessage(type, payload);
|
|
59
|
+
}
|
|
60
|
+
getParentPort().then((parentPort2) => {
|
|
61
|
+
if (parentPort2) {
|
|
62
|
+
parentPort2.on("message", (message) => {
|
|
63
|
+
handleMessage(message);
|
|
64
|
+
});
|
|
65
|
+
parentPort2.on("exit", () => console.debug("Node worker closing"));
|
|
66
|
+
} else {
|
|
67
|
+
globalThis.onmessage = handleMessage;
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
static async addEventListener(onMessage) {
|
|
72
|
+
let onMessageWrapper = onMessageWrapperMap.get(onMessage);
|
|
73
|
+
if (!onMessageWrapper) {
|
|
74
|
+
onMessageWrapper = async (message) => {
|
|
75
|
+
if (!isKnownMessage(message)) {
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
const parentPort3 = await getParentPort();
|
|
79
|
+
const { type, payload } = parentPort3 ? message : message.data;
|
|
80
|
+
onMessage(type, payload);
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
const parentPort2 = await getParentPort();
|
|
84
|
+
if (parentPort2) {
|
|
85
|
+
console.error("not implemented");
|
|
86
|
+
} else {
|
|
87
|
+
globalThis.addEventListener("message", onMessageWrapper);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
static async removeEventListener(onMessage) {
|
|
91
|
+
const onMessageWrapper = onMessageWrapperMap.get(onMessage);
|
|
92
|
+
onMessageWrapperMap.delete(onMessage);
|
|
93
|
+
const parentPort2 = await getParentPort();
|
|
94
|
+
if (parentPort2) {
|
|
95
|
+
console.error("not implemented");
|
|
96
|
+
} else {
|
|
97
|
+
globalThis.removeEventListener("message", onMessageWrapper);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Send a message from a worker to creating thread (main thread)
|
|
102
|
+
* @param type
|
|
103
|
+
* @param payload
|
|
104
|
+
*/
|
|
105
|
+
static async postMessage(type, payload) {
|
|
106
|
+
const data = { source: "loaders.gl", type, payload };
|
|
107
|
+
const transferList = getTransferList(payload);
|
|
108
|
+
const parentPort2 = await getParentPort();
|
|
109
|
+
if (parentPort2) {
|
|
110
|
+
parentPort2.postMessage(data, transferList);
|
|
111
|
+
} else {
|
|
112
|
+
globalThis.postMessage(data, transferList);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
function isKnownMessage(message) {
|
|
117
|
+
const { type, data } = message;
|
|
118
|
+
return type === "message" && data && typeof data.source === "string" && data.source.startsWith("loaders.gl");
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// ../loader-utils/src/lib/worker-loader-utils/create-loader-worker.ts
|
|
122
|
+
var requestId = 0;
|
|
123
|
+
async function createLoaderWorker(loader) {
|
|
124
|
+
if (!await WorkerBody.inWorkerThread()) {
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
WorkerBody.onmessage = async (type, payload) => {
|
|
128
|
+
switch (type) {
|
|
129
|
+
case "process":
|
|
130
|
+
try {
|
|
131
|
+
const { input, options = {}, context = {} } = payload;
|
|
132
|
+
const result = await parseData({
|
|
133
|
+
loader,
|
|
134
|
+
arrayBuffer: input,
|
|
135
|
+
options,
|
|
136
|
+
// @ts-expect-error fetch missing
|
|
137
|
+
context: {
|
|
138
|
+
...context,
|
|
139
|
+
_parse: parseOnMainThread
|
|
140
|
+
}
|
|
141
|
+
});
|
|
142
|
+
WorkerBody.postMessage("done", { result });
|
|
143
|
+
} catch (error) {
|
|
144
|
+
const message = error instanceof Error ? error.message : "";
|
|
145
|
+
WorkerBody.postMessage("error", { error: message });
|
|
146
|
+
}
|
|
147
|
+
break;
|
|
148
|
+
default:
|
|
149
|
+
}
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
function parseOnMainThread(arrayBuffer, loader, options, context) {
|
|
153
|
+
return new Promise((resolve, reject) => {
|
|
154
|
+
const id = requestId++;
|
|
155
|
+
const onMessage = (type, payload2) => {
|
|
156
|
+
if (payload2.id !== id) {
|
|
157
|
+
return;
|
|
158
|
+
}
|
|
159
|
+
switch (type) {
|
|
160
|
+
case "done":
|
|
161
|
+
WorkerBody.removeEventListener(onMessage);
|
|
162
|
+
resolve(payload2.result);
|
|
163
|
+
break;
|
|
164
|
+
case "error":
|
|
165
|
+
WorkerBody.removeEventListener(onMessage);
|
|
166
|
+
reject(payload2.error);
|
|
167
|
+
break;
|
|
168
|
+
default:
|
|
169
|
+
}
|
|
170
|
+
};
|
|
171
|
+
WorkerBody.addEventListener(onMessage);
|
|
172
|
+
const payload = { id, input: arrayBuffer, options };
|
|
173
|
+
WorkerBody.postMessage("process", payload);
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
async function parseData({
|
|
177
|
+
loader,
|
|
178
|
+
arrayBuffer,
|
|
179
|
+
options,
|
|
180
|
+
context
|
|
181
|
+
}) {
|
|
182
|
+
let data;
|
|
183
|
+
let parser;
|
|
184
|
+
if (loader.parseSync || loader.parse) {
|
|
185
|
+
data = arrayBuffer;
|
|
186
|
+
parser = loader.parseSync || loader.parse;
|
|
187
|
+
} else if (loader.parseTextSync) {
|
|
188
|
+
const textDecoder = new TextDecoder();
|
|
189
|
+
data = textDecoder.decode(arrayBuffer);
|
|
190
|
+
parser = loader.parseTextSync;
|
|
191
|
+
} else {
|
|
192
|
+
throw new Error(`Could not load data with ${loader.name} loader`);
|
|
193
|
+
}
|
|
194
|
+
options = {
|
|
195
|
+
...options,
|
|
196
|
+
modules: loader && loader.options && loader.options.modules || {},
|
|
197
|
+
core: {
|
|
198
|
+
...options.core,
|
|
199
|
+
worker: false
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
return await parser(data, { ...options }, context, loader);
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// ../loader-utils/src/lib/iterators/async-iteration.ts
|
|
206
|
+
async function* toArrayBufferIterator(asyncIterator) {
|
|
207
|
+
for await (const chunk of asyncIterator) {
|
|
208
|
+
yield copyToArrayBuffer(chunk);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
function copyToArrayBuffer(chunk) {
|
|
212
|
+
if (chunk instanceof ArrayBuffer) {
|
|
213
|
+
return chunk;
|
|
214
|
+
}
|
|
215
|
+
if (ArrayBuffer.isView(chunk)) {
|
|
216
|
+
const { buffer, byteOffset, byteLength } = chunk;
|
|
217
|
+
return copyFromBuffer(buffer, byteOffset, byteLength);
|
|
218
|
+
}
|
|
219
|
+
return copyFromBuffer(chunk);
|
|
220
|
+
}
|
|
221
|
+
function copyFromBuffer(buffer, byteOffset = 0, byteLength = buffer.byteLength - byteOffset) {
|
|
222
|
+
const view = new Uint8Array(buffer, byteOffset, byteLength);
|
|
223
|
+
const copy = new Uint8Array(view.length);
|
|
224
|
+
copy.set(view);
|
|
225
|
+
return copy.buffer;
|
|
226
|
+
}
|
|
227
|
+
|
|
3
228
|
// src/lib/streaming/binary-chunk-reader.ts
|
|
4
229
|
var BinaryChunkReader = class {
|
|
5
230
|
offset;
|
|
@@ -200,7 +425,7 @@
|
|
|
200
425
|
const { encoding = "latin1" } = options.dbf || {};
|
|
201
426
|
const parser = new DBFParser({ encoding });
|
|
202
427
|
let headerReturned = false;
|
|
203
|
-
for await (const arrayBuffer of asyncIterator) {
|
|
428
|
+
for await (const arrayBuffer of toArrayBufferIterator(asyncIterator)) {
|
|
204
429
|
parser.write(arrayBuffer);
|
|
205
430
|
if (!headerReturned && parser.result.dbfHeader) {
|
|
206
431
|
headerReturned = true;
|
|
@@ -377,7 +602,7 @@
|
|
|
377
602
|
}
|
|
378
603
|
|
|
379
604
|
// src/dbf-loader.ts
|
|
380
|
-
var VERSION = true ? "4.4.0-alpha.
|
|
605
|
+
var VERSION = true ? "4.4.0-alpha.9" : "latest";
|
|
381
606
|
var DBFWorkerLoader = {
|
|
382
607
|
name: "DBF",
|
|
383
608
|
dataType: null,
|
|
@@ -404,205 +629,6 @@
|
|
|
404
629
|
}
|
|
405
630
|
};
|
|
406
631
|
|
|
407
|
-
// ../worker-utils/src/lib/node/worker_threads-browser.ts
|
|
408
|
-
var parentPort = null;
|
|
409
|
-
|
|
410
|
-
// ../worker-utils/src/lib/worker-utils/get-transfer-list.ts
|
|
411
|
-
function getTransferList(object, recursive = true, transfers) {
|
|
412
|
-
const transfersSet = transfers || /* @__PURE__ */ new Set();
|
|
413
|
-
if (!object) {
|
|
414
|
-
} else if (isTransferable(object)) {
|
|
415
|
-
transfersSet.add(object);
|
|
416
|
-
} else if (isTransferable(object.buffer)) {
|
|
417
|
-
transfersSet.add(object.buffer);
|
|
418
|
-
} else if (ArrayBuffer.isView(object)) {
|
|
419
|
-
} else if (recursive && typeof object === "object") {
|
|
420
|
-
for (const key in object) {
|
|
421
|
-
getTransferList(object[key], recursive, transfersSet);
|
|
422
|
-
}
|
|
423
|
-
}
|
|
424
|
-
return transfers === void 0 ? Array.from(transfersSet) : [];
|
|
425
|
-
}
|
|
426
|
-
function isTransferable(object) {
|
|
427
|
-
if (!object) {
|
|
428
|
-
return false;
|
|
429
|
-
}
|
|
430
|
-
if (object instanceof ArrayBuffer) {
|
|
431
|
-
return true;
|
|
432
|
-
}
|
|
433
|
-
if (typeof MessagePort !== "undefined" && object instanceof MessagePort) {
|
|
434
|
-
return true;
|
|
435
|
-
}
|
|
436
|
-
if (typeof ImageBitmap !== "undefined" && object instanceof ImageBitmap) {
|
|
437
|
-
return true;
|
|
438
|
-
}
|
|
439
|
-
if (typeof OffscreenCanvas !== "undefined" && object instanceof OffscreenCanvas) {
|
|
440
|
-
return true;
|
|
441
|
-
}
|
|
442
|
-
return false;
|
|
443
|
-
}
|
|
444
|
-
|
|
445
|
-
// ../worker-utils/src/lib/worker-farm/worker-body.ts
|
|
446
|
-
async function getParentPort() {
|
|
447
|
-
return parentPort;
|
|
448
|
-
}
|
|
449
|
-
var onMessageWrapperMap = /* @__PURE__ */ new Map();
|
|
450
|
-
var WorkerBody = class {
|
|
451
|
-
/** Check that we are actually in a worker thread */
|
|
452
|
-
static async inWorkerThread() {
|
|
453
|
-
return typeof self !== "undefined" || Boolean(await getParentPort());
|
|
454
|
-
}
|
|
455
|
-
/*
|
|
456
|
-
* (type: WorkerMessageType, payload: WorkerMessagePayload) => any
|
|
457
|
-
*/
|
|
458
|
-
static set onmessage(onMessage) {
|
|
459
|
-
async function handleMessage(message) {
|
|
460
|
-
const parentPort2 = await getParentPort();
|
|
461
|
-
const { type, payload } = parentPort2 ? message : message.data;
|
|
462
|
-
onMessage(type, payload);
|
|
463
|
-
}
|
|
464
|
-
getParentPort().then((parentPort2) => {
|
|
465
|
-
if (parentPort2) {
|
|
466
|
-
parentPort2.on("message", (message) => {
|
|
467
|
-
handleMessage(message);
|
|
468
|
-
});
|
|
469
|
-
parentPort2.on("exit", () => console.debug("Node worker closing"));
|
|
470
|
-
} else {
|
|
471
|
-
globalThis.onmessage = handleMessage;
|
|
472
|
-
}
|
|
473
|
-
});
|
|
474
|
-
}
|
|
475
|
-
static async addEventListener(onMessage) {
|
|
476
|
-
let onMessageWrapper = onMessageWrapperMap.get(onMessage);
|
|
477
|
-
if (!onMessageWrapper) {
|
|
478
|
-
onMessageWrapper = async (message) => {
|
|
479
|
-
if (!isKnownMessage(message)) {
|
|
480
|
-
return;
|
|
481
|
-
}
|
|
482
|
-
const parentPort3 = await getParentPort();
|
|
483
|
-
const { type, payload } = parentPort3 ? message : message.data;
|
|
484
|
-
onMessage(type, payload);
|
|
485
|
-
};
|
|
486
|
-
}
|
|
487
|
-
const parentPort2 = await getParentPort();
|
|
488
|
-
if (parentPort2) {
|
|
489
|
-
console.error("not implemented");
|
|
490
|
-
} else {
|
|
491
|
-
globalThis.addEventListener("message", onMessageWrapper);
|
|
492
|
-
}
|
|
493
|
-
}
|
|
494
|
-
static async removeEventListener(onMessage) {
|
|
495
|
-
const onMessageWrapper = onMessageWrapperMap.get(onMessage);
|
|
496
|
-
onMessageWrapperMap.delete(onMessage);
|
|
497
|
-
const parentPort2 = await getParentPort();
|
|
498
|
-
if (parentPort2) {
|
|
499
|
-
console.error("not implemented");
|
|
500
|
-
} else {
|
|
501
|
-
globalThis.removeEventListener("message", onMessageWrapper);
|
|
502
|
-
}
|
|
503
|
-
}
|
|
504
|
-
/**
|
|
505
|
-
* Send a message from a worker to creating thread (main thread)
|
|
506
|
-
* @param type
|
|
507
|
-
* @param payload
|
|
508
|
-
*/
|
|
509
|
-
static async postMessage(type, payload) {
|
|
510
|
-
const data = { source: "loaders.gl", type, payload };
|
|
511
|
-
const transferList = getTransferList(payload);
|
|
512
|
-
const parentPort2 = await getParentPort();
|
|
513
|
-
if (parentPort2) {
|
|
514
|
-
parentPort2.postMessage(data, transferList);
|
|
515
|
-
} else {
|
|
516
|
-
globalThis.postMessage(data, transferList);
|
|
517
|
-
}
|
|
518
|
-
}
|
|
519
|
-
};
|
|
520
|
-
function isKnownMessage(message) {
|
|
521
|
-
const { type, data } = message;
|
|
522
|
-
return type === "message" && data && typeof data.source === "string" && data.source.startsWith("loaders.gl");
|
|
523
|
-
}
|
|
524
|
-
|
|
525
|
-
// ../loader-utils/src/lib/worker-loader-utils/create-loader-worker.ts
|
|
526
|
-
var requestId = 0;
|
|
527
|
-
async function createLoaderWorker(loader) {
|
|
528
|
-
if (!await WorkerBody.inWorkerThread()) {
|
|
529
|
-
return;
|
|
530
|
-
}
|
|
531
|
-
WorkerBody.onmessage = async (type, payload) => {
|
|
532
|
-
switch (type) {
|
|
533
|
-
case "process":
|
|
534
|
-
try {
|
|
535
|
-
const { input, options = {}, context = {} } = payload;
|
|
536
|
-
const result = await parseData({
|
|
537
|
-
loader,
|
|
538
|
-
arrayBuffer: input,
|
|
539
|
-
options,
|
|
540
|
-
// @ts-expect-error fetch missing
|
|
541
|
-
context: {
|
|
542
|
-
...context,
|
|
543
|
-
_parse: parseOnMainThread
|
|
544
|
-
}
|
|
545
|
-
});
|
|
546
|
-
WorkerBody.postMessage("done", { result });
|
|
547
|
-
} catch (error) {
|
|
548
|
-
const message = error instanceof Error ? error.message : "";
|
|
549
|
-
WorkerBody.postMessage("error", { error: message });
|
|
550
|
-
}
|
|
551
|
-
break;
|
|
552
|
-
default:
|
|
553
|
-
}
|
|
554
|
-
};
|
|
555
|
-
}
|
|
556
|
-
function parseOnMainThread(arrayBuffer, loader, options, context) {
|
|
557
|
-
return new Promise((resolve, reject) => {
|
|
558
|
-
const id = requestId++;
|
|
559
|
-
const onMessage = (type, payload2) => {
|
|
560
|
-
if (payload2.id !== id) {
|
|
561
|
-
return;
|
|
562
|
-
}
|
|
563
|
-
switch (type) {
|
|
564
|
-
case "done":
|
|
565
|
-
WorkerBody.removeEventListener(onMessage);
|
|
566
|
-
resolve(payload2.result);
|
|
567
|
-
break;
|
|
568
|
-
case "error":
|
|
569
|
-
WorkerBody.removeEventListener(onMessage);
|
|
570
|
-
reject(payload2.error);
|
|
571
|
-
break;
|
|
572
|
-
default:
|
|
573
|
-
}
|
|
574
|
-
};
|
|
575
|
-
WorkerBody.addEventListener(onMessage);
|
|
576
|
-
const payload = { id, input: arrayBuffer, options };
|
|
577
|
-
WorkerBody.postMessage("process", payload);
|
|
578
|
-
});
|
|
579
|
-
}
|
|
580
|
-
async function parseData({
|
|
581
|
-
loader,
|
|
582
|
-
arrayBuffer,
|
|
583
|
-
options,
|
|
584
|
-
context
|
|
585
|
-
}) {
|
|
586
|
-
let data;
|
|
587
|
-
let parser;
|
|
588
|
-
if (loader.parseSync || loader.parse) {
|
|
589
|
-
data = arrayBuffer;
|
|
590
|
-
parser = loader.parseSync || loader.parse;
|
|
591
|
-
} else if (loader.parseTextSync) {
|
|
592
|
-
const textDecoder = new TextDecoder();
|
|
593
|
-
data = textDecoder.decode(arrayBuffer);
|
|
594
|
-
parser = loader.parseTextSync;
|
|
595
|
-
} else {
|
|
596
|
-
throw new Error(`Could not load data with ${loader.name} loader`);
|
|
597
|
-
}
|
|
598
|
-
options = {
|
|
599
|
-
...options,
|
|
600
|
-
modules: loader && loader.options && loader.options.modules || {},
|
|
601
|
-
worker: false
|
|
602
|
-
};
|
|
603
|
-
return await parser(data, { ...options }, context, loader);
|
|
604
|
-
}
|
|
605
|
-
|
|
606
632
|
// src/workers/dbf-worker.ts
|
|
607
633
|
createLoaderWorker(DBFLoader);
|
|
608
634
|
})();
|
package/dist/dist.dev.js
CHANGED
|
@@ -61,6 +61,40 @@ var __exports__ = (() => {
|
|
|
61
61
|
});
|
|
62
62
|
__reExport(bundle_exports, __toESM(require_core(), 1));
|
|
63
63
|
|
|
64
|
+
// ../loader-utils/src/loader-types.ts
|
|
65
|
+
async function parseFromContext(data, loaders, options, context) {
|
|
66
|
+
return context._parse(data, loaders, options, context);
|
|
67
|
+
}
|
|
68
|
+
async function parseInBatchesFromContext(data, loader, options, context) {
|
|
69
|
+
if (!context._parseInBatches) {
|
|
70
|
+
throw new Error("parseInBatches");
|
|
71
|
+
}
|
|
72
|
+
return context._parseInBatches(data, loader, options, context);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// ../loader-utils/src/lib/iterators/async-iteration.ts
|
|
76
|
+
async function* toArrayBufferIterator(asyncIterator) {
|
|
77
|
+
for await (const chunk of asyncIterator) {
|
|
78
|
+
yield copyToArrayBuffer(chunk);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
function copyToArrayBuffer(chunk) {
|
|
82
|
+
if (chunk instanceof ArrayBuffer) {
|
|
83
|
+
return chunk;
|
|
84
|
+
}
|
|
85
|
+
if (ArrayBuffer.isView(chunk)) {
|
|
86
|
+
const { buffer, byteOffset, byteLength } = chunk;
|
|
87
|
+
return copyFromBuffer(buffer, byteOffset, byteLength);
|
|
88
|
+
}
|
|
89
|
+
return copyFromBuffer(chunk);
|
|
90
|
+
}
|
|
91
|
+
function copyFromBuffer(buffer, byteOffset = 0, byteLength = buffer.byteLength - byteOffset) {
|
|
92
|
+
const view = new Uint8Array(buffer, byteOffset, byteLength);
|
|
93
|
+
const copy = new Uint8Array(view.length);
|
|
94
|
+
copy.set(view);
|
|
95
|
+
return copy.buffer;
|
|
96
|
+
}
|
|
97
|
+
|
|
64
98
|
// src/lib/streaming/binary-chunk-reader.ts
|
|
65
99
|
var BinaryChunkReader = class {
|
|
66
100
|
offset;
|
|
@@ -463,7 +497,7 @@ var __exports__ = (() => {
|
|
|
463
497
|
async function* parseSHPInBatches(asyncIterator, options) {
|
|
464
498
|
const parser = new SHPParser(options);
|
|
465
499
|
let headerReturned = false;
|
|
466
|
-
for await (const arrayBuffer of asyncIterator) {
|
|
500
|
+
for await (const arrayBuffer of toArrayBufferIterator(asyncIterator)) {
|
|
467
501
|
parser.write(arrayBuffer);
|
|
468
502
|
if (!headerReturned && parser.result.header) {
|
|
469
503
|
headerReturned = true;
|
|
@@ -573,17 +607,6 @@ var __exports__ = (() => {
|
|
|
573
607
|
parseInBatches: (arrayBufferIterator, options) => parseSHPInBatches(arrayBufferIterator, options)
|
|
574
608
|
};
|
|
575
609
|
|
|
576
|
-
// ../loader-utils/src/loader-types.ts
|
|
577
|
-
async function parseFromContext(data, loaders, options, context) {
|
|
578
|
-
return context._parse(data, loaders, options, context);
|
|
579
|
-
}
|
|
580
|
-
async function parseInBatchesFromContext(data, loader, options, context) {
|
|
581
|
-
if (!context._parseInBatches) {
|
|
582
|
-
throw new Error("parseInBatches");
|
|
583
|
-
}
|
|
584
|
-
return context._parseInBatches(data, loader, options, context);
|
|
585
|
-
}
|
|
586
|
-
|
|
587
610
|
// ../gis/src/lib/binary-geometry-api/transform-coordinates.ts
|
|
588
611
|
function transformGeoJsonCoords(features, fn) {
|
|
589
612
|
for (const feature of features) {
|
|
@@ -18097,7 +18120,7 @@ return true;`);
|
|
|
18097
18120
|
const { encoding = "latin1" } = options.dbf || {};
|
|
18098
18121
|
const parser = new DBFParser({ encoding });
|
|
18099
18122
|
let headerReturned = false;
|
|
18100
|
-
for await (const arrayBuffer of asyncIterator) {
|
|
18123
|
+
for await (const arrayBuffer of toArrayBufferIterator(asyncIterator)) {
|
|
18101
18124
|
parser.write(arrayBuffer);
|
|
18102
18125
|
if (!headerReturned && parser.result.dbfHeader) {
|
|
18103
18126
|
headerReturned = true;
|
|
@@ -18306,7 +18329,7 @@ return true;`);
|
|
|
18306
18329
|
const { reproject = false, _targetCrs = "WGS84" } = options?.gis || {};
|
|
18307
18330
|
const { shx, cpg, prj } = await loadShapefileSidecarFiles(options, context);
|
|
18308
18331
|
const shapeIterable = await parseInBatchesFromContext(
|
|
18309
|
-
asyncIterator,
|
|
18332
|
+
toArrayBufferIterator(asyncIterator),
|
|
18310
18333
|
SHPLoader,
|
|
18311
18334
|
options,
|
|
18312
18335
|
context
|
|
@@ -18320,7 +18343,10 @@ return true;`);
|
|
|
18320
18343
|
DBFLoader,
|
|
18321
18344
|
{
|
|
18322
18345
|
...options,
|
|
18323
|
-
dbf: {
|
|
18346
|
+
dbf: {
|
|
18347
|
+
...options?.dbf,
|
|
18348
|
+
encoding: cpg || "latin1"
|
|
18349
|
+
}
|
|
18324
18350
|
},
|
|
18325
18351
|
context
|
|
18326
18352
|
);
|
|
@@ -18373,12 +18399,15 @@ return true;`);
|
|
|
18373
18399
|
let propertyTable;
|
|
18374
18400
|
const dbfResponse = await context?.fetch(replaceExtension(context?.url, "dbf"));
|
|
18375
18401
|
if (dbfResponse?.ok) {
|
|
18376
|
-
|
|
18377
|
-
|
|
18378
|
-
|
|
18379
|
-
|
|
18380
|
-
|
|
18381
|
-
|
|
18402
|
+
const dbfOptions = {
|
|
18403
|
+
...options,
|
|
18404
|
+
dbf: {
|
|
18405
|
+
...options?.dbf,
|
|
18406
|
+
shape: "object-row-table",
|
|
18407
|
+
encoding: cpg || "latin1"
|
|
18408
|
+
}
|
|
18409
|
+
};
|
|
18410
|
+
propertyTable = await parseFromContext(dbfResponse, DBFLoader, dbfOptions, context);
|
|
18382
18411
|
}
|
|
18383
18412
|
let features = joinProperties(geojsonGeometries, propertyTable?.data || []);
|
|
18384
18413
|
if (reproject) {
|
|
@@ -18552,7 +18581,7 @@ return true;`);
|
|
|
18552
18581
|
const { encoding = "latin1" } = options.dbf || {};
|
|
18553
18582
|
const parser = new DBFParser2({ encoding });
|
|
18554
18583
|
let headerReturned = false;
|
|
18555
|
-
for await (const arrayBuffer of asyncIterator) {
|
|
18584
|
+
for await (const arrayBuffer of toArrayBufferIterator(asyncIterator)) {
|
|
18556
18585
|
parser.write(arrayBuffer);
|
|
18557
18586
|
if (!headerReturned && parser.result.dbfHeader) {
|
|
18558
18587
|
headerReturned = true;
|