@loaders.gl/arrow 4.0.4 → 4.1.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/arrow-worker.js +237 -2
- package/dist/dist.dev.js +1465 -630
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts +8 -4
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts.map +1 -1
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js +75 -54
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js.map +1 -1
- package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.d.ts +13 -0
- package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.d.ts.map +1 -0
- package/dist/geoarrow/{convert-geoarrow-to-geojson.js → convert-geoarrow-to-geojson-geometry.js} +34 -27
- package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.js.map +1 -0
- package/dist/geoarrow-loader.d.ts.map +1 -1
- package/dist/geoarrow-loader.js +0 -1
- package/dist/geoarrow-loader.js.map +1 -1
- package/dist/index.cjs +429 -347
- package/dist/index.d.ts +5 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -3
- package/dist/index.js.map +1 -1
- package/dist/parsers/parse-arrow-sync.d.ts.map +1 -1
- package/dist/parsers/parse-arrow-sync.js +2 -0
- package/dist/parsers/parse-arrow-sync.js.map +1 -1
- package/dist/tables/convert-arrow-to-columnar-table.d.ts.map +1 -1
- package/dist/tables/convert-arrow-to-columnar-table.js +1 -0
- package/dist/tables/convert-arrow-to-columnar-table.js.map +1 -1
- package/dist/tables/convert-arrow-to-geojson-table.d.ts +1 -1
- package/dist/tables/convert-arrow-to-geojson-table.d.ts.map +1 -1
- package/dist/tables/convert-arrow-to-geojson-table.js +14 -8
- package/dist/tables/convert-arrow-to-geojson-table.js.map +1 -1
- package/dist/tables/convert-columnar-to-row-table.d.ts.map +1 -1
- package/dist/tables/convert-columnar-to-row-table.js +1 -0
- package/dist/tables/convert-columnar-to-row-table.js.map +1 -1
- package/dist/triangulate-on-worker.d.ts +40 -6
- package/dist/triangulate-on-worker.d.ts.map +1 -1
- package/dist/triangulate-on-worker.js +11 -1
- package/dist/triangulate-on-worker.js.map +1 -1
- package/dist/triangulation-worker.js +11703 -34
- package/dist/workers/hard-clone.d.ts +23 -0
- package/dist/workers/hard-clone.d.ts.map +1 -0
- package/dist/workers/hard-clone.js +57 -0
- package/dist/workers/hard-clone.js.map +1 -0
- package/dist/workers/triangulation-worker.js +37 -2
- package/dist/workers/triangulation-worker.js.map +1 -1
- package/package.json +16 -10
- package/src/geoarrow/convert-geoarrow-to-binary-geometry.ts +103 -61
- package/src/geoarrow/{convert-geoarrow-to-geojson.ts → convert-geoarrow-to-geojson-geometry.ts} +56 -46
- package/src/geoarrow-loader.ts +0 -4
- package/src/index.ts +9 -3
- package/src/parsers/parse-arrow-sync.ts +6 -1
- package/src/tables/convert-arrow-to-columnar-table.ts +1 -0
- package/src/tables/convert-arrow-to-geojson-table.ts +18 -7
- package/src/tables/convert-columnar-to-row-table.ts +1 -0
- package/src/triangulate-on-worker.ts +51 -8
- package/src/workers/hard-clone.ts +162 -0
- package/src/workers/triangulation-worker.ts +57 -3
- package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts +0 -20
- package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts.map +0 -1
- package/dist/geoarrow/convert-geoarrow-to-geojson.js.map +0 -1
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import * as arrow from 'apache-arrow';
|
|
2
|
+
/**
|
|
3
|
+
* Clone an Arrow JS Data or Vector, detaching from an existing ArrayBuffer if
|
|
4
|
+
* it is shared with other.
|
|
5
|
+
*
|
|
6
|
+
* The purpose of this function is to enable transferring a `Data` instance,
|
|
7
|
+
* e.g. to a web worker, without neutering any other data.
|
|
8
|
+
*
|
|
9
|
+
* Any internal buffers that are a slice of a larger `ArrayBuffer` (i.e. where
|
|
10
|
+
* the typed array's `byteOffset` is not `0` and where its `byteLength` does not
|
|
11
|
+
* match its `array.buffer.byteLength`) are copied into new `ArrayBuffers`.
|
|
12
|
+
*
|
|
13
|
+
* If `force` is `true`, always clone internal buffers, even if not shared. If
|
|
14
|
+
* the default, `false`, any internal buffers that are **not** a slice of a
|
|
15
|
+
* larger `ArrayBuffer` will not be copied.
|
|
16
|
+
*/
|
|
17
|
+
export declare function hardClone<T extends arrow.DataType>(input: arrow.Data<T>, force?: boolean): arrow.Data<T>;
|
|
18
|
+
export declare function hardClone<T extends arrow.DataType>(input: arrow.Vector<T>, force?: boolean): arrow.Vector<T>;
|
|
19
|
+
/**
|
|
20
|
+
* Test whether an arrow.Data instance is a slice of a larger `ArrayBuffer`.
|
|
21
|
+
*/
|
|
22
|
+
export declare function isShared<T extends arrow.DataType>(data: arrow.Data<T> | arrow.Vector<T>): boolean;
|
|
23
|
+
//# sourceMappingURL=hard-clone.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hard-clone.d.ts","sourceRoot":"","sources":["../../src/workers/hard-clone.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AActC;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,SAAS,CAAC,CAAC,SAAS,KAAK,CAAC,QAAQ,EAChD,KAAK,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EACpB,KAAK,CAAC,EAAE,OAAO,GACd,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACjB,wBAAgB,SAAS,CAAC,CAAC,SAAS,KAAK,CAAC,QAAQ,EAChD,KAAK,EAAE,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EACtB,KAAK,CAAC,EAAE,OAAO,GACd,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAuDnB;;GAEG;AACH,wBAAgB,QAAQ,CAAC,CAAC,SAAS,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,OAAO,CAiCjG"}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import * as arrow from 'apache-arrow';
|
|
2
|
+
export function hardClone(data) {
|
|
3
|
+
let force = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
|
|
4
|
+
if ('data' in data) {
|
|
5
|
+
return new arrow.Vector(data.data.map(data => hardClone(data, force)));
|
|
6
|
+
}
|
|
7
|
+
const clonedChildren = [];
|
|
8
|
+
for (const childData of data.children) {
|
|
9
|
+
clonedChildren.push(hardClone(childData, force));
|
|
10
|
+
}
|
|
11
|
+
let clonedDictionary;
|
|
12
|
+
if (data.dictionary !== undefined) {
|
|
13
|
+
clonedDictionary = hardClone(data.dictionary, force);
|
|
14
|
+
}
|
|
15
|
+
const clonedBuffers = {
|
|
16
|
+
[arrow.BufferType.OFFSET]: cloneBuffer(data.buffers[arrow.BufferType.OFFSET], force),
|
|
17
|
+
[arrow.BufferType.DATA]: cloneBuffer(data.buffers[arrow.BufferType.DATA], force),
|
|
18
|
+
[arrow.BufferType.VALIDITY]: cloneBuffer(data.buffers[arrow.BufferType.VALIDITY], force),
|
|
19
|
+
[arrow.BufferType.TYPE]: cloneBuffer(data.buffers[arrow.BufferType.TYPE], force)
|
|
20
|
+
};
|
|
21
|
+
return new arrow.Data(data.type, data.offset, data.length, data._nullCount, clonedBuffers, clonedChildren, clonedDictionary);
|
|
22
|
+
}
|
|
23
|
+
export function isShared(data) {
|
|
24
|
+
if ('data' in data) {
|
|
25
|
+
return data.data.some(data => isShared(data));
|
|
26
|
+
}
|
|
27
|
+
for (const childData of data.children) {
|
|
28
|
+
if (isShared(childData)) {
|
|
29
|
+
return true;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (data.dictionary !== undefined) {
|
|
33
|
+
if (isShared(data.dictionary)) {
|
|
34
|
+
return true;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
const bufferTypes = [arrow.BufferType.OFFSET, arrow.BufferType.DATA, arrow.BufferType.VALIDITY, arrow.BufferType.TYPE];
|
|
38
|
+
for (const bufferType of bufferTypes) {
|
|
39
|
+
if (data.buffers[bufferType] !== undefined && isTypedArraySliced(data.buffers[bufferType])) {
|
|
40
|
+
return true;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
function isTypedArraySliced(arr) {
|
|
46
|
+
return !(arr.byteOffset === 0 && arr.byteLength === arr.buffer.byteLength);
|
|
47
|
+
}
|
|
48
|
+
function cloneBuffer(arr, force) {
|
|
49
|
+
if (arr === undefined) {
|
|
50
|
+
return arr;
|
|
51
|
+
}
|
|
52
|
+
if (!force && !isTypedArraySliced(arr)) {
|
|
53
|
+
return arr;
|
|
54
|
+
}
|
|
55
|
+
return arr.slice();
|
|
56
|
+
}
|
|
57
|
+
//# sourceMappingURL=hard-clone.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"hard-clone.js","names":["arrow","hardClone","data","force","arguments","length","undefined","Vector","map","clonedChildren","childData","children","push","clonedDictionary","dictionary","clonedBuffers","BufferType","OFFSET","cloneBuffer","buffers","DATA","VALIDITY","TYPE","Data","type","offset","_nullCount","isShared","some","bufferTypes","bufferType","isTypedArraySliced","arr","byteOffset","byteLength","buffer","slice"],"sources":["../../src/workers/hard-clone.ts"],"sourcesContent":["import * as arrow from 'apache-arrow';\nimport type {Buffers} from 'apache-arrow/data';\n\ntype TypedArray =\n | Uint8Array\n | Uint8ClampedArray\n | Uint16Array\n | Uint32Array\n | Int8Array\n | Int16Array\n | Int32Array\n | Float32Array\n | Float64Array;\n\n/**\n * Clone an Arrow JS Data or Vector, detaching from an existing ArrayBuffer if\n * it is shared with other.\n *\n * The purpose of this function is to enable transferring a `Data` instance,\n * e.g. to a web worker, without neutering any other data.\n *\n * Any internal buffers that are a slice of a larger `ArrayBuffer` (i.e. where\n * the typed array's `byteOffset` is not `0` and where its `byteLength` does not\n * match its `array.buffer.byteLength`) are copied into new `ArrayBuffers`.\n *\n * If `force` is `true`, always clone internal buffers, even if not shared. If\n * the default, `false`, any internal buffers that are **not** a slice of a\n * larger `ArrayBuffer` will not be copied.\n */\nexport function hardClone<T extends arrow.DataType>(\n input: arrow.Data<T>,\n force?: boolean\n): arrow.Data<T>;\nexport function hardClone<T extends arrow.DataType>(\n input: arrow.Vector<T>,\n force?: boolean\n): arrow.Vector<T>;\n\nexport function hardClone<T extends arrow.DataType>(\n data: arrow.Data<T> | arrow.Vector<T>,\n force: boolean = false\n): arrow.Data<T> | arrow.Vector<T> {\n // Check if `data` is an arrow.Vector\n if ('data' in data) {\n return new arrow.Vector(data.data.map((data) => hardClone(data, force)));\n }\n\n // Clone each of the children, recursively\n const clonedChildren: arrow.Data[] = [];\n for (const childData of data.children) {\n clonedChildren.push(hardClone(childData, force));\n }\n\n // Clone the dictionary if there is one\n let clonedDictionary: arrow.Vector | undefined;\n if (data.dictionary !== undefined) {\n clonedDictionary = hardClone(data.dictionary, force);\n }\n\n // Buffers can have up to four entries. Each of these can be `undefined` for\n // one or more array types.\n //\n // - OFFSET: value offsets for variable size list types\n // - DATA: the underlying data\n // - VALIDITY: the null buffer. This may be empty or undefined if all elements\n // are non-null/valid.\n // - TYPE: type ids for a union type.\n const clonedBuffers: Buffers<T> = {\n [arrow.BufferType.OFFSET]: cloneBuffer(data.buffers[arrow.BufferType.OFFSET], force),\n [arrow.BufferType.DATA]: cloneBuffer(data.buffers[arrow.BufferType.DATA], force),\n [arrow.BufferType.VALIDITY]: cloneBuffer(data.buffers[arrow.BufferType.VALIDITY], force),\n [arrow.BufferType.TYPE]: cloneBuffer(data.buffers[arrow.BufferType.TYPE], force)\n };\n\n // Note: the data.offset is passed on so that a sliced Data instance will not\n // be \"un-sliced\". However keep in mind that this means we're cloning the\n // _original backing buffer_, not only the portion of the Data that was\n // sliced.\n return new arrow.Data(\n data.type,\n data.offset,\n data.length,\n // @ts-expect-error _nullCount is protected. We're using it here to mimic\n // `Data.clone`\n data._nullCount,\n clonedBuffers,\n clonedChildren,\n clonedDictionary\n );\n}\n\n/**\n * Test whether an arrow.Data instance is a slice of a larger `ArrayBuffer`.\n */\nexport function isShared<T extends arrow.DataType>(data: arrow.Data<T> | arrow.Vector<T>): boolean {\n // Loop over arrow.Vector\n if ('data' in data) {\n return data.data.some((data) => isShared(data));\n }\n\n // Check child data\n for (const childData of data.children) {\n if (isShared(childData)) {\n return true;\n }\n }\n\n // Check dictionary\n if (data.dictionary !== undefined) {\n if (isShared(data.dictionary)) {\n return true;\n }\n }\n\n const bufferTypes = [\n arrow.BufferType.OFFSET,\n arrow.BufferType.DATA,\n arrow.BufferType.VALIDITY,\n arrow.BufferType.TYPE\n ];\n for (const bufferType of bufferTypes) {\n if (data.buffers[bufferType] !== undefined && isTypedArraySliced(data.buffers[bufferType])) {\n return true;\n }\n }\n\n return false;\n}\n\n/**\n * Returns true if the current typed array is a partial slice on a larger\n * ArrayBuffer\n */\nfunction isTypedArraySliced(arr: TypedArray): boolean {\n return !(arr.byteOffset === 0 && arr.byteLength === arr.buffer.byteLength);\n}\n\n/**\n * If a slice of a larger ArrayBuffer, clone to a fresh `ArrayBuffer`.\n *\n * If `force` is `true`, always clone the array, even if not shared.\n */\nfunction cloneBuffer<A extends TypedArray | undefined>(arr: A, force: boolean): A {\n // Not all buffer types are defined for every type of Arrow array. E.g.\n // `arrow.BufferType.TYPE` is only defined for the Union type.\n if (arr === undefined) {\n return arr;\n }\n\n // The current array is not a part of a larger ArrayBuffer, don't clone it\n if (!force && !isTypedArraySliced(arr)) {\n return arr;\n }\n\n // Note: TypedArray.slice() **copies** into a new ArrayBuffer\n\n // @ts-expect-error 'Uint8Array' is assignable to the constraint of type 'A',\n // but 'A' could be instantiated with a different subtype of constraint\n // 'TypedArray'\n // We know from arr.slice that it will always return the same\n return arr.slice();\n}\n"],"mappings":"AAAA,OAAO,KAAKA,KAAK,MAAM,cAAc;AAsCrC,OAAO,SAASC,SAASA,CACvBC,IAAqC,EAEJ;EAAA,IADjCC,KAAc,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,KAAK;EAGtB,IAAI,MAAM,IAAIF,IAAI,EAAE;IAClB,OAAO,IAAIF,KAAK,CAACO,MAAM,CAACL,IAAI,CAACA,IAAI,CAACM,GAAG,CAAEN,IAAI,IAAKD,SAAS,CAACC,IAAI,EAAEC,KAAK,CAAC,CAAC,CAAC;EAC1E;EAGA,MAAMM,cAA4B,GAAG,EAAE;EACvC,KAAK,MAAMC,SAAS,IAAIR,IAAI,CAACS,QAAQ,EAAE;IACrCF,cAAc,CAACG,IAAI,CAACX,SAAS,CAACS,SAAS,EAAEP,KAAK,CAAC,CAAC;EAClD;EAGA,IAAIU,gBAA0C;EAC9C,IAAIX,IAAI,CAACY,UAAU,KAAKR,SAAS,EAAE;IACjCO,gBAAgB,GAAGZ,SAAS,CAACC,IAAI,CAACY,UAAU,EAAEX,KAAK,CAAC;EACtD;EAUA,MAAMY,aAAyB,GAAG;IAChC,CAACf,KAAK,CAACgB,UAAU,CAACC,MAAM,GAAGC,WAAW,CAAChB,IAAI,CAACiB,OAAO,CAACnB,KAAK,CAACgB,UAAU,CAACC,MAAM,CAAC,EAAEd,KAAK,CAAC;IACpF,CAACH,KAAK,CAACgB,UAAU,CAACI,IAAI,GAAGF,WAAW,CAAChB,IAAI,CAACiB,OAAO,CAACnB,KAAK,CAACgB,UAAU,CAACI,IAAI,CAAC,EAAEjB,KAAK,CAAC;IAChF,CAACH,KAAK,CAACgB,UAAU,CAACK,QAAQ,GAAGH,WAAW,CAAChB,IAAI,CAACiB,OAAO,CAACnB,KAAK,CAACgB,UAAU,CAACK,QAAQ,CAAC,EAAElB,KAAK,CAAC;IACxF,CAACH,KAAK,CAACgB,UAAU,CAACM,IAAI,GAAGJ,WAAW,CAAChB,IAAI,CAACiB,OAAO,CAACnB,KAAK,CAACgB,UAAU,CAACM,IAAI,CAAC,EAAEnB,KAAK;EACjF,CAAC;EAMD,OAAO,IAAIH,KAAK,CAACuB,IAAI,CACnBrB,IAAI,CAACsB,IAAI,EACTtB,IAAI,CAACuB,MAAM,EACXvB,IAAI,CAACG,MAAM,EAGXH,IAAI,CAACwB,UAAU,EACfX,aAAa,EACbN,cAAc,EACdI,gBACF,CAAC;AACH;AAKA,OAAO,SAASc,QAAQA,CAA2BzB,IAAqC,EAAW;EAEjG,IAAI,MAAM,IAAIA,IAAI,EAAE;IAClB,OAAOA,IAAI,CAACA,IAAI,CAAC0B,IAAI,CAAE1B,IAAI,IAAKyB,QAAQ,CAACzB,IAAI,CAAC,CAAC;EACjD;EAGA,KAAK,MAAMQ,SAAS,IAAIR,IAAI,CAACS,QAAQ,EAAE;IACrC,IAAIgB,QAAQ,CAACjB,SAAS,CAAC,EAAE;MACvB,OAAO,IAAI;IACb;EACF;EAGA,IAAIR,IAAI,CAACY,UAAU,KAAKR,SAAS,EAAE;IACjC,IAAIqB,QAAQ,CAACzB,IAAI,CAACY,UAAU,CAAC,EAAE;MAC7B,OAAO,IAAI;IACb;EACF;EAEA,MAAMe,WAAW,GAAG,CAClB7B,KAAK,CAACgB,UAAU,CAACC,MAAM,EACvBjB,KAAK,CAACgB,UAAU,CAACI,IAAI,EACrBpB,KAAK,CAACgB,UAAU,CAACK,QAAQ,EACzBrB,KAAK,CAACgB,UAAU,CAACM,IAAI,CACtB;EACD,KAAK,MAAMQ,UAAU,IAAID,WAAW,EAAE;IACpC,IAAI3B,IAAI,CAACiB,OAAO,CAACW,UAAU,CAAC,KAAKxB,SAAS,IAAIyB,kBAAkB,CAAC7B,IAAI,CAACiB,OAAO,CAACW,UAAU,CAAC,CAAC,EAAE;MAC1F,OAAO,IAAI;IACb;EACF;EAEA,OAAO,KAAK;AACd;AAMA,SAASC,kBAAkBA,CAACC,GAAe,EAAW;EACpD,OAAO,EAAEA,GAAG,CAACC,UAAU,KAAK,CAAC,IAAID,GAAG,CAACE,UAAU,KAAKF,GAAG,CAACG,MAAM,CAACD,UAAU,CAAC;AAC5E;AAOA,SAAShB,WAAWA,CAAmCc,GAAM,EAAE7B,KAAc,EAAK;EAGhF,IAAI6B,GAAG,KAAK1B,SAAS,EAAE;IACrB,OAAO0B,GAAG;EACZ;EAGA,IAAI,CAAC7B,KAAK,IAAI,CAAC4B,kBAAkB,CAACC,GAAG,CAAC,EAAE;IACtC,OAAOA,GAAG;EACZ;EAQA,OAAOA,GAAG,CAACI,KAAK,CAAC,CAAC;AACpB"}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
|
+
import * as arrow from 'apache-arrow';
|
|
1
2
|
import { createWorker } from '@loaders.gl/worker-utils';
|
|
2
|
-
import { getTriangleIndices } from "../geoarrow/convert-geoarrow-to-binary-geometry.js";
|
|
3
|
+
import { getTriangleIndices, getBinaryGeometriesFromArrow } from "../geoarrow/convert-geoarrow-to-binary-geometry.js";
|
|
3
4
|
createWorker(async function (data) {
|
|
4
5
|
let options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
5
6
|
const input = data;
|
|
@@ -9,6 +10,8 @@ createWorker(async function (data) {
|
|
|
9
10
|
return input;
|
|
10
11
|
case 'triangulate':
|
|
11
12
|
return triangulateBatch(data);
|
|
13
|
+
case 'parse-geoarrow':
|
|
14
|
+
return parseGeoArrowBatch(data);
|
|
12
15
|
default:
|
|
13
16
|
throw new Error(`TriangulationWorker: Unsupported operation ${operation}. Expected 'triangulate'`);
|
|
14
17
|
}
|
|
@@ -18,7 +21,39 @@ function triangulateBatch(data) {
|
|
|
18
21
|
const triangleIndices = getTriangleIndices(data.polygonIndices, data.primitivePolygonIndices, data.flatCoordinateArray, data.nDim);
|
|
19
22
|
return {
|
|
20
23
|
...data,
|
|
21
|
-
triangleIndices
|
|
24
|
+
...(triangleIndices ? {
|
|
25
|
+
triangleIndices
|
|
26
|
+
} : {})
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
function parseGeoArrowBatch(data) {
|
|
30
|
+
let binaryDataFromGeoArrow = null;
|
|
31
|
+
const {
|
|
32
|
+
chunkData,
|
|
33
|
+
chunkIndex,
|
|
34
|
+
chunkOffset,
|
|
35
|
+
geometryEncoding,
|
|
36
|
+
calculateMeanCenters,
|
|
37
|
+
triangle
|
|
38
|
+
} = data;
|
|
39
|
+
const arrowData = new arrow.Data(chunkData.type, chunkData.offset, chunkData.length, chunkData.nullCount, chunkData.buffers, chunkData.children, chunkData.dictionary);
|
|
40
|
+
const geometryColumn = arrow.makeVector(arrowData);
|
|
41
|
+
if (geometryColumn) {
|
|
42
|
+
const options = {
|
|
43
|
+
calculateMeanCenters,
|
|
44
|
+
triangle,
|
|
45
|
+
chunkIndex: 0,
|
|
46
|
+
chunkOffset
|
|
47
|
+
};
|
|
48
|
+
binaryDataFromGeoArrow = getBinaryGeometriesFromArrow(geometryColumn, geometryEncoding, options);
|
|
49
|
+
return {
|
|
50
|
+
binaryDataFromGeoArrow,
|
|
51
|
+
chunkIndex
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
return {
|
|
55
|
+
binaryDataFromGeoArrow,
|
|
56
|
+
chunkIndex
|
|
22
57
|
};
|
|
23
58
|
}
|
|
24
59
|
//# sourceMappingURL=triangulation-worker.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"triangulation-worker.js","names":["createWorker","getTriangleIndices","data","options","arguments","length","undefined","input","operation","triangulateBatch","Error","console","error","triangleIndices","polygonIndices","primitivePolygonIndices","flatCoordinateArray","nDim"],"sources":["../../src/workers/triangulation-worker.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport {createWorker} from '@loaders.gl/worker-utils';\nimport {getTriangleIndices} from '../geoarrow/convert-geoarrow-to-binary-geometry';\nimport type {\n TriangulationWorkerInput,\n TriangulateInput,\n TriangulateResult\n} from '../triangulate-on-worker';\n\ncreateWorker(async (data, options = {}) => {\n const input = data as TriangulationWorkerInput;\n const operation = input?.operation;\n switch (operation) {\n case 'test':\n return input;\n case 'triangulate':\n return triangulateBatch(data);\n default:\n throw new Error(\n `TriangulationWorker: Unsupported operation ${operation}. Expected 'triangulate'`\n );\n }\n});\n\nfunction triangulateBatch(data: TriangulateInput): TriangulateResult {\n // Parse any WKT/WKB geometries\n // Build binary geometries\n // Call earcut and triangulate\n console.error('TriangulationWorker: tessellating batch', data);\n const triangleIndices = getTriangleIndices(\n data.polygonIndices,\n data.primitivePolygonIndices,\n data.flatCoordinateArray,\n data.nDim\n );\n return {...data, triangleIndices};\n}\n"],"mappings":"AAGA,
|
|
1
|
+
{"version":3,"file":"triangulation-worker.js","names":["arrow","createWorker","getTriangleIndices","getBinaryGeometriesFromArrow","data","options","arguments","length","undefined","input","operation","triangulateBatch","parseGeoArrowBatch","Error","console","error","triangleIndices","polygonIndices","primitivePolygonIndices","flatCoordinateArray","nDim","binaryDataFromGeoArrow","chunkData","chunkIndex","chunkOffset","geometryEncoding","calculateMeanCenters","triangle","arrowData","Data","type","offset","nullCount","buffers","children","dictionary","geometryColumn","makeVector"],"sources":["../../src/workers/triangulation-worker.ts"],"sourcesContent":["// loaders.gl, MIT license\n// Copyright (c) vis.gl contributors\n\nimport * as arrow from 'apache-arrow';\nimport {createWorker} from '@loaders.gl/worker-utils';\nimport {\n getTriangleIndices,\n getBinaryGeometriesFromArrow,\n BinaryDataFromGeoArrow\n} from '../geoarrow/convert-geoarrow-to-binary-geometry';\nimport type {\n TriangulationWorkerInput,\n TriangulateInput,\n TriangulateResult,\n ParseGeoArrowInput,\n ParseGeoArrowResult\n} from '../triangulate-on-worker';\n\ncreateWorker(async (data, options = {}) => {\n const input = data as TriangulationWorkerInput;\n const operation = input?.operation;\n switch (operation) {\n case 'test':\n return input;\n case 'triangulate':\n return triangulateBatch(data);\n case 'parse-geoarrow':\n return parseGeoArrowBatch(data);\n default:\n throw new Error(\n `TriangulationWorker: Unsupported operation ${operation}. Expected 'triangulate'`\n );\n }\n});\n\nfunction triangulateBatch(data: TriangulateInput): TriangulateResult {\n // Parse any WKT/WKB geometries\n // Build binary geometries\n // Call earcut and triangulate\n console.error('TriangulationWorker: tessellating batch', data);\n const triangleIndices = getTriangleIndices(\n data.polygonIndices,\n data.primitivePolygonIndices,\n data.flatCoordinateArray,\n data.nDim\n );\n return {...data, ...(triangleIndices ? {triangleIndices} : {})};\n}\n\n/**\n * Reading the arrow file into memory is very fast. Parsing the geoarrow column is slow, and blocking the main thread.\n * To address this issue, we can move the parsing job from main thread to parallel web workers.\n * Each web worker will parse one chunk/batch of geoarrow column, and return binary geometries to main thread.\n * The app on the main thread will render the binary geometries and the parsing will not block the main thread.\n *\n * @param data\n * @returns\n */\nfunction parseGeoArrowBatch(data: ParseGeoArrowInput): ParseGeoArrowResult {\n let binaryDataFromGeoArrow: BinaryDataFromGeoArrow | null = null;\n const {chunkData, chunkIndex, chunkOffset, geometryEncoding, calculateMeanCenters, triangle} =\n data;\n // rebuild chunkData that is only for geoarrow column\n const arrowData = new arrow.Data(\n chunkData.type,\n chunkData.offset,\n chunkData.length,\n chunkData.nullCount,\n chunkData.buffers,\n chunkData.children,\n chunkData.dictionary\n );\n // rebuild geometry column with chunkData\n const geometryColumn = arrow.makeVector(arrowData);\n if (geometryColumn) {\n // NOTE: for a rebuild arrow.Vector, there is only one chunk, so chunkIndex is always 0\n const options = {calculateMeanCenters, triangle, chunkIndex: 0, chunkOffset};\n binaryDataFromGeoArrow = getBinaryGeometriesFromArrow(\n geometryColumn,\n geometryEncoding,\n options\n );\n // NOTE: here binaryGeometry will be copied to main thread\n return {\n binaryDataFromGeoArrow,\n chunkIndex\n };\n }\n return {\n binaryDataFromGeoArrow,\n chunkIndex\n };\n}\n"],"mappings":"AAGA,OAAO,KAAKA,KAAK,MAAM,cAAc;AACrC,SAAQC,YAAY,QAAO,0BAA0B;AAAC,SAEpDC,kBAAkB,EAClBC,4BAA4B;AAW9BF,YAAY,CAAC,gBAAOG,IAAI,EAAmB;EAAA,IAAjBC,OAAO,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EACpC,MAAMG,KAAK,GAAGL,IAAgC;EAC9C,MAAMM,SAAS,GAAGD,KAAK,aAALA,KAAK,uBAALA,KAAK,CAAEC,SAAS;EAClC,QAAQA,SAAS;IACf,KAAK,MAAM;MACT,OAAOD,KAAK;IACd,KAAK,aAAa;MAChB,OAAOE,gBAAgB,CAACP,IAAI,CAAC;IAC/B,KAAK,gBAAgB;MACnB,OAAOQ,kBAAkB,CAACR,IAAI,CAAC;IACjC;MACE,MAAM,IAAIS,KAAK,CACZ,8CAA6CH,SAAU,0BAC1D,CAAC;EACL;AACF,CAAC,CAAC;AAEF,SAASC,gBAAgBA,CAACP,IAAsB,EAAqB;EAInEU,OAAO,CAACC,KAAK,CAAC,yCAAyC,EAAEX,IAAI,CAAC;EAC9D,MAAMY,eAAe,GAAGd,kBAAkB,CACxCE,IAAI,CAACa,cAAc,EACnBb,IAAI,CAACc,uBAAuB,EAC5Bd,IAAI,CAACe,mBAAmB,EACxBf,IAAI,CAACgB,IACP,CAAC;EACD,OAAO;IAAC,GAAGhB,IAAI;IAAE,IAAIY,eAAe,GAAG;MAACA;IAAe,CAAC,GAAG,CAAC,CAAC;EAAC,CAAC;AACjE;AAWA,SAASJ,kBAAkBA,CAACR,IAAwB,EAAuB;EACzE,IAAIiB,sBAAqD,GAAG,IAAI;EAChE,MAAM;IAACC,SAAS;IAAEC,UAAU;IAAEC,WAAW;IAAEC,gBAAgB;IAAEC,oBAAoB;IAAEC;EAAQ,CAAC,GAC1FvB,IAAI;EAEN,MAAMwB,SAAS,GAAG,IAAI5B,KAAK,CAAC6B,IAAI,CAC9BP,SAAS,CAACQ,IAAI,EACdR,SAAS,CAACS,MAAM,EAChBT,SAAS,CAACf,MAAM,EAChBe,SAAS,CAACU,SAAS,EACnBV,SAAS,CAACW,OAAO,EACjBX,SAAS,CAACY,QAAQ,EAClBZ,SAAS,CAACa,UACZ,CAAC;EAED,MAAMC,cAAc,GAAGpC,KAAK,CAACqC,UAAU,CAACT,SAAS,CAAC;EAClD,IAAIQ,cAAc,EAAE;IAElB,MAAM/B,OAAO,GAAG;MAACqB,oBAAoB;MAAEC,QAAQ;MAAEJ,UAAU,EAAE,CAAC;MAAEC;IAAW,CAAC;IAC5EH,sBAAsB,GAAGlB,4BAA4B,CACnDiC,cAAc,EACdX,gBAAgB,EAChBpB,OACF,CAAC;IAED,OAAO;MACLgB,sBAAsB;MACtBE;IACF,CAAC;EACH;EACA,OAAO;IACLF,sBAAsB;IACtBE;EACF,CAAC;AACH"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/arrow",
|
|
3
|
-
"version": "4.0.
|
|
3
|
+
"version": "4.1.0-alpha.2",
|
|
4
4
|
"description": "Simple columnar table loader for the Apache Arrow format",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -12,12 +12,17 @@
|
|
|
12
12
|
"url": "https://github.com/visgl/loaders.gl"
|
|
13
13
|
},
|
|
14
14
|
"keywords": [
|
|
15
|
-
"webgl",
|
|
16
15
|
"loader",
|
|
17
|
-
"
|
|
18
|
-
"
|
|
19
|
-
"
|
|
20
|
-
"
|
|
16
|
+
"parser",
|
|
17
|
+
"writer",
|
|
18
|
+
"encoder",
|
|
19
|
+
"geoarrow",
|
|
20
|
+
"apache-arrow",
|
|
21
|
+
"arrow",
|
|
22
|
+
"binary columnar",
|
|
23
|
+
"cloud native",
|
|
24
|
+
"webgl",
|
|
25
|
+
"webgpu"
|
|
21
26
|
],
|
|
22
27
|
"types": "dist/index.d.ts",
|
|
23
28
|
"main": "dist/index.cjs",
|
|
@@ -47,11 +52,12 @@
|
|
|
47
52
|
"build-worker2": "esbuild src/workers/arrow-worker.ts --bundle --outfile=dist/arrow-worker.js --platform=browser --external:{stream}"
|
|
48
53
|
},
|
|
49
54
|
"dependencies": {
|
|
50
|
-
"@loaders.gl/gis": "4.0.
|
|
51
|
-
"@loaders.gl/loader-utils": "4.0.
|
|
52
|
-
"@loaders.gl/schema": "4.0.
|
|
55
|
+
"@loaders.gl/gis": "4.1.0-alpha.2",
|
|
56
|
+
"@loaders.gl/loader-utils": "4.1.0-alpha.2",
|
|
57
|
+
"@loaders.gl/schema": "4.1.0-alpha.2",
|
|
58
|
+
"@loaders.gl/wkt": "4.1.0-alpha.2",
|
|
53
59
|
"@math.gl/polygon": "4.0.0",
|
|
54
60
|
"apache-arrow": "^13.0.0"
|
|
55
61
|
},
|
|
56
|
-
"gitHead": "
|
|
62
|
+
"gitHead": "a248382edd20e846c1ccb23c15d089fb9b368dbc"
|
|
57
63
|
}
|
|
@@ -8,6 +8,15 @@ import {GeoArrowEncoding} from '@loaders.gl/gis';
|
|
|
8
8
|
import {updateBoundsFromGeoArrowSamples} from './get-arrow-bounds';
|
|
9
9
|
import {TypedArray} from '@loaders.gl/loader-utils';
|
|
10
10
|
|
|
11
|
+
/**
|
|
12
|
+
* Binary geometry type
|
|
13
|
+
*/
|
|
14
|
+
enum BinaryGeometryType {
|
|
15
|
+
points = 'points',
|
|
16
|
+
lines = 'lines',
|
|
17
|
+
polygons = 'polygons'
|
|
18
|
+
}
|
|
19
|
+
|
|
11
20
|
/**
|
|
12
21
|
* Binary data from geoarrow column and can be used by e.g. deck.gl GeojsonLayer
|
|
13
22
|
*/
|
|
@@ -36,7 +45,7 @@ type BinaryGeometryContent = {
|
|
|
36
45
|
geomOffset: Int32Array;
|
|
37
46
|
/** Array of geometry indicies: the start index of each geometry */
|
|
38
47
|
geometryIndicies: Uint16Array;
|
|
39
|
-
/** (Optional) indices of triangels returned from polygon
|
|
48
|
+
/** (Optional) indices of triangels returned from polygon triangulation (Polygon only) */
|
|
40
49
|
triangles?: Uint32Array;
|
|
41
50
|
/** (Optional) array of mean center of each geometry */
|
|
42
51
|
meanCenters?: Float64Array;
|
|
@@ -45,19 +54,25 @@ type BinaryGeometryContent = {
|
|
|
45
54
|
/**
|
|
46
55
|
* binary geometry template, see deck.gl BinaryGeometry
|
|
47
56
|
*/
|
|
48
|
-
export
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
}
|
|
57
|
+
export function getBinaryGeometryTemplate() {
|
|
58
|
+
return {
|
|
59
|
+
globalFeatureIds: {value: new Uint32Array(0), size: 1},
|
|
60
|
+
positions: {value: new Float32Array(0), size: 2},
|
|
61
|
+
properties: [],
|
|
62
|
+
numericProps: {},
|
|
63
|
+
featureIds: {value: new Uint32Array(0), size: 1}
|
|
64
|
+
};
|
|
65
|
+
}
|
|
55
66
|
|
|
56
67
|
export type BinaryGeometriesFromArrowOptions = {
|
|
57
68
|
/** option to specify which chunk to get binary geometries from, for progressive rendering */
|
|
58
69
|
chunkIndex?: number;
|
|
70
|
+
/** The offset (beginning index of rows) of input chunk. Used for reconstructing globalFeatureIds in web workers */
|
|
71
|
+
chunkOffset?: number;
|
|
59
72
|
/** option to get mean centers from geometries, for polygon filtering */
|
|
60
|
-
|
|
73
|
+
calculateMeanCenters?: boolean;
|
|
74
|
+
/** option to compute the triangle indices by tesselating polygons */
|
|
75
|
+
triangulate?: boolean;
|
|
61
76
|
};
|
|
62
77
|
|
|
63
78
|
/**
|
|
@@ -79,14 +94,17 @@ export function getBinaryGeometriesFromArrow(
|
|
|
79
94
|
line: geoEncoding === 'geoarrow.multilinestring' || geoEncoding === 'geoarrow.linestring'
|
|
80
95
|
};
|
|
81
96
|
|
|
82
|
-
const chunks =
|
|
97
|
+
const chunks =
|
|
98
|
+
options?.chunkIndex !== undefined && options?.chunkIndex >= 0
|
|
99
|
+
? [geoColumn.data[options?.chunkIndex]]
|
|
100
|
+
: geoColumn.data;
|
|
83
101
|
let bounds: [number, number, number, number] = [Infinity, Infinity, -Infinity, -Infinity];
|
|
84
|
-
let globalFeatureIdOffset = 0;
|
|
102
|
+
let globalFeatureIdOffset = options?.chunkOffset || 0;
|
|
85
103
|
const binaryGeometries: BinaryFeatures[] = [];
|
|
86
104
|
|
|
87
105
|
chunks.forEach((chunk) => {
|
|
88
106
|
const {featureIds, flatCoordinateArray, nDim, geomOffset, triangles} =
|
|
89
|
-
getBinaryGeometriesFromChunk(chunk, geoEncoding);
|
|
107
|
+
getBinaryGeometriesFromChunk(chunk, geoEncoding, options);
|
|
90
108
|
|
|
91
109
|
const globalFeatureIds = new Uint32Array(featureIds.length);
|
|
92
110
|
for (let i = 0; i < featureIds.length; i++) {
|
|
@@ -100,6 +118,7 @@ export function getBinaryGeometriesFromArrow(
|
|
|
100
118
|
size: nDim
|
|
101
119
|
},
|
|
102
120
|
featureIds: {value: featureIds, size: 1},
|
|
121
|
+
// eslint-disable-next-line no-loop-func
|
|
103
122
|
properties: [...Array(chunk.length).keys()].map((i) => ({
|
|
104
123
|
index: i + globalFeatureIdOffset
|
|
105
124
|
}))
|
|
@@ -112,18 +131,18 @@ export function getBinaryGeometriesFromArrow(
|
|
|
112
131
|
shape: 'binary-feature-collection',
|
|
113
132
|
points: {
|
|
114
133
|
type: 'Point',
|
|
115
|
-
...
|
|
134
|
+
...getBinaryGeometryTemplate(),
|
|
116
135
|
...(featureTypes.point ? binaryContent : {})
|
|
117
136
|
},
|
|
118
137
|
lines: {
|
|
119
138
|
type: 'LineString',
|
|
120
|
-
...
|
|
139
|
+
...getBinaryGeometryTemplate(),
|
|
121
140
|
...(featureTypes.line ? binaryContent : {}),
|
|
122
141
|
pathIndices: {value: featureTypes.line ? geomOffset : new Uint16Array(0), size: 1}
|
|
123
142
|
},
|
|
124
143
|
polygons: {
|
|
125
144
|
type: 'Polygon',
|
|
126
|
-
...
|
|
145
|
+
...getBinaryGeometryTemplate(),
|
|
127
146
|
...(featureTypes.polygon ? binaryContent : {}),
|
|
128
147
|
polygonIndices: {
|
|
129
148
|
// use geomOffset as polygonIndices same as primitivePolygonIndices since we are using earcut to get triangule indices
|
|
@@ -145,7 +164,7 @@ export function getBinaryGeometriesFromArrow(
|
|
|
145
164
|
binaryGeometries,
|
|
146
165
|
bounds,
|
|
147
166
|
featureTypes,
|
|
148
|
-
...(options?.
|
|
167
|
+
...(options?.calculateMeanCenters
|
|
149
168
|
? {meanCenters: getMeanCentersFromBinaryGeometries(binaryGeometries)}
|
|
150
169
|
: {})
|
|
151
170
|
};
|
|
@@ -159,13 +178,13 @@ export function getBinaryGeometriesFromArrow(
|
|
|
159
178
|
export function getMeanCentersFromBinaryGeometries(binaryGeometries: BinaryFeatures[]): number[][] {
|
|
160
179
|
const globalMeanCenters: number[][] = [];
|
|
161
180
|
binaryGeometries.forEach((binaryGeometry: BinaryFeatures) => {
|
|
162
|
-
let binaryGeometryType:
|
|
181
|
+
let binaryGeometryType: keyof typeof BinaryGeometryType | null = null;
|
|
163
182
|
if (binaryGeometry.points && binaryGeometry.points.positions.value.length > 0) {
|
|
164
|
-
binaryGeometryType =
|
|
183
|
+
binaryGeometryType = BinaryGeometryType.points;
|
|
165
184
|
} else if (binaryGeometry.lines && binaryGeometry.lines.positions.value.length > 0) {
|
|
166
|
-
binaryGeometryType =
|
|
185
|
+
binaryGeometryType = BinaryGeometryType.lines;
|
|
167
186
|
} else if (binaryGeometry.polygons && binaryGeometry.polygons.positions.value.length > 0) {
|
|
168
|
-
binaryGeometryType =
|
|
187
|
+
binaryGeometryType = BinaryGeometryType.polygons;
|
|
169
188
|
}
|
|
170
189
|
|
|
171
190
|
const binaryContent = binaryGeometryType ? binaryGeometry[binaryGeometryType] : null;
|
|
@@ -173,7 +192,8 @@ export function getMeanCentersFromBinaryGeometries(binaryGeometries: BinaryFeatu
|
|
|
173
192
|
const featureIds = binaryContent.featureIds.value;
|
|
174
193
|
const flatCoordinateArray = binaryContent.positions.value;
|
|
175
194
|
const nDim = binaryContent.positions.size;
|
|
176
|
-
const primitivePolygonIndices =
|
|
195
|
+
const primitivePolygonIndices =
|
|
196
|
+
binaryContent.type === 'Polygon' ? binaryContent.primitivePolygonIndices?.value : undefined;
|
|
177
197
|
|
|
178
198
|
const meanCenters = getMeanCentersFromGeometry(
|
|
179
199
|
featureIds,
|
|
@@ -201,30 +221,33 @@ function getMeanCentersFromGeometry(
|
|
|
201
221
|
featureIds: TypedArray,
|
|
202
222
|
flatCoordinateArray: TypedArray,
|
|
203
223
|
nDim: number,
|
|
204
|
-
geometryType:
|
|
224
|
+
geometryType: keyof typeof BinaryGeometryType,
|
|
205
225
|
primitivePolygonIndices?: TypedArray
|
|
206
226
|
) {
|
|
207
227
|
const meanCenters: number[][] = [];
|
|
208
228
|
const vertexCount = flatCoordinateArray.length;
|
|
209
229
|
let vertexIndex = 0;
|
|
230
|
+
let coordIdx = 0;
|
|
231
|
+
let primitiveIdx = 0;
|
|
210
232
|
while (vertexIndex < vertexCount) {
|
|
211
233
|
const featureId = featureIds[vertexIndex / nDim];
|
|
212
234
|
const center = [0, 0];
|
|
213
235
|
let vertexCountInFeature = 0;
|
|
214
|
-
while (vertexIndex < vertexCount && featureIds[
|
|
236
|
+
while (vertexIndex < vertexCount && featureIds[coordIdx] === featureId) {
|
|
215
237
|
if (
|
|
216
|
-
geometryType ===
|
|
217
|
-
primitivePolygonIndices
|
|
218
|
-
primitivePolygonIndices.indexOf(vertexIndex / nDim) >= 0
|
|
238
|
+
geometryType === BinaryGeometryType.polygons &&
|
|
239
|
+
primitivePolygonIndices?.[primitiveIdx] === coordIdx
|
|
219
240
|
) {
|
|
220
241
|
// skip the first point since it is the same as the last point in each ring for polygons
|
|
221
242
|
vertexIndex += nDim;
|
|
243
|
+
primitiveIdx++;
|
|
222
244
|
} else {
|
|
223
245
|
center[0] += flatCoordinateArray[vertexIndex];
|
|
224
246
|
center[1] += flatCoordinateArray[vertexIndex + 1];
|
|
225
247
|
vertexIndex += nDim;
|
|
226
248
|
vertexCountInFeature++;
|
|
227
249
|
}
|
|
250
|
+
coordIdx += 1;
|
|
228
251
|
}
|
|
229
252
|
center[0] /= vertexCountInFeature;
|
|
230
253
|
center[1] /= vertexCountInFeature;
|
|
@@ -237,11 +260,13 @@ function getMeanCentersFromGeometry(
|
|
|
237
260
|
* get binary geometries from geoarrow column
|
|
238
261
|
* @param chunk one chunk/batch of geoarrow column
|
|
239
262
|
* @param geoEncoding geo encoding of the geoarrow column
|
|
263
|
+
* @param options options for getting binary geometries
|
|
240
264
|
* @returns BinaryGeometryContent
|
|
241
265
|
*/
|
|
242
266
|
function getBinaryGeometriesFromChunk(
|
|
243
267
|
chunk: arrow.Data,
|
|
244
|
-
geoEncoding: GeoArrowEncoding
|
|
268
|
+
geoEncoding: GeoArrowEncoding,
|
|
269
|
+
options?: BinaryGeometriesFromArrowOptions
|
|
245
270
|
): BinaryGeometryContent {
|
|
246
271
|
switch (geoEncoding) {
|
|
247
272
|
case 'geoarrow.point':
|
|
@@ -252,7 +277,7 @@ function getBinaryGeometriesFromChunk(
|
|
|
252
277
|
return getBinaryLinesFromChunk(chunk, geoEncoding);
|
|
253
278
|
case 'geoarrow.polygon':
|
|
254
279
|
case 'geoarrow.multipolygon':
|
|
255
|
-
return getBinaryPolygonsFromChunk(chunk, geoEncoding);
|
|
280
|
+
return getBinaryPolygonsFromChunk(chunk, geoEncoding, options);
|
|
256
281
|
default:
|
|
257
282
|
throw Error('invalid geoarrow encoding');
|
|
258
283
|
}
|
|
@@ -264,54 +289,68 @@ function getBinaryGeometriesFromChunk(
|
|
|
264
289
|
* @param primitivePolygonIndices Indices within positions of the start of each primitive Polygon/ring
|
|
265
290
|
* @param flatCoordinateArray Array of x, y or x, y, z positions
|
|
266
291
|
* @param nDim - number of dimensions per position
|
|
267
|
-
* @returns
|
|
292
|
+
* @returns triangle indices or null if invalid polygon and earcut fails
|
|
268
293
|
*/
|
|
269
294
|
export function getTriangleIndices(
|
|
270
295
|
polygonIndices: Uint16Array,
|
|
271
296
|
primitivePolygonIndices: Int32Array,
|
|
272
297
|
flatCoordinateArray: Float64Array,
|
|
273
298
|
nDim: number
|
|
274
|
-
): Uint32Array {
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
299
|
+
): Uint32Array | null {
|
|
300
|
+
try {
|
|
301
|
+
let primitiveIndex = 0;
|
|
302
|
+
const triangles: number[] = [];
|
|
303
|
+
// loop polygonIndices to get triangles
|
|
304
|
+
for (let i = 0; i < polygonIndices.length - 1; i++) {
|
|
305
|
+
const startIdx = polygonIndices[i];
|
|
306
|
+
const endIdx = polygonIndices[i + 1];
|
|
307
|
+
// get subarray of flatCoordinateArray
|
|
308
|
+
const slicedFlatCoords = flatCoordinateArray.subarray(startIdx * nDim, endIdx * nDim);
|
|
309
|
+
// get holeIndices for earcut
|
|
310
|
+
const holeIndices: number[] = [];
|
|
311
|
+
while (primitivePolygonIndices[primitiveIndex] < endIdx) {
|
|
312
|
+
if (primitivePolygonIndices[primitiveIndex] > startIdx) {
|
|
313
|
+
holeIndices.push(primitivePolygonIndices[primitiveIndex] - startIdx);
|
|
314
|
+
}
|
|
315
|
+
primitiveIndex++;
|
|
316
|
+
}
|
|
317
|
+
// TODO check if each ring is closed
|
|
318
|
+
const triangleIndices = earcut(
|
|
319
|
+
slicedFlatCoords,
|
|
320
|
+
holeIndices.length > 0 ? holeIndices : undefined,
|
|
321
|
+
nDim
|
|
322
|
+
);
|
|
323
|
+
if (triangleIndices.length === 0) {
|
|
324
|
+
throw Error('earcut failed e.g. invalid polygon');
|
|
325
|
+
}
|
|
326
|
+
for (let j = 0; j < triangleIndices.length; j++) {
|
|
327
|
+
triangles.push(triangleIndices[j] + startIdx);
|
|
288
328
|
}
|
|
289
|
-
primitiveIndex++;
|
|
290
329
|
}
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
);
|
|
296
|
-
for (let j = 0; j < triangleIndices.length; j++) {
|
|
297
|
-
triangles.push(triangleIndices[j] + startIdx);
|
|
330
|
+
// convert traingles to Uint32Array
|
|
331
|
+
const trianglesUint32 = new Uint32Array(triangles.length);
|
|
332
|
+
for (let i = 0; i < triangles.length; i++) {
|
|
333
|
+
trianglesUint32[i] = triangles[i];
|
|
298
334
|
}
|
|
335
|
+
return trianglesUint32;
|
|
336
|
+
} catch (error) {
|
|
337
|
+
// if earcut fails, return null
|
|
338
|
+
return null;
|
|
299
339
|
}
|
|
300
|
-
// convert traingles to Uint32Array
|
|
301
|
-
const trianglesUint32 = new Uint32Array(triangles.length);
|
|
302
|
-
for (let i = 0; i < triangles.length; i++) {
|
|
303
|
-
trianglesUint32[i] = triangles[i];
|
|
304
|
-
}
|
|
305
|
-
return trianglesUint32;
|
|
306
340
|
}
|
|
307
341
|
|
|
308
342
|
/**
|
|
309
343
|
* get binary polygons from geoarrow polygon column
|
|
310
344
|
* @param chunk one chunk of geoarrow polygon column
|
|
311
345
|
* @param geoEncoding the geo encoding of the geoarrow polygon column
|
|
346
|
+
* @param options options for getting binary geometries
|
|
312
347
|
* @returns BinaryGeometryContent
|
|
313
348
|
*/
|
|
314
|
-
function getBinaryPolygonsFromChunk(
|
|
349
|
+
function getBinaryPolygonsFromChunk(
|
|
350
|
+
chunk: arrow.Data,
|
|
351
|
+
geoEncoding: string,
|
|
352
|
+
options?: BinaryGeometriesFromArrowOptions
|
|
353
|
+
): BinaryGeometryContent {
|
|
315
354
|
const isMultiPolygon = geoEncoding === 'geoarrow.multipolygon';
|
|
316
355
|
|
|
317
356
|
const polygonData = isMultiPolygon ? chunk.children[0] : chunk;
|
|
@@ -341,14 +380,17 @@ function getBinaryPolygonsFromChunk(chunk: arrow.Data, geoEncoding: string): Bin
|
|
|
341
380
|
}
|
|
342
381
|
}
|
|
343
382
|
|
|
344
|
-
const triangles =
|
|
383
|
+
const triangles = options?.triangulate
|
|
384
|
+
? getTriangleIndices(geometryIndicies, geomOffset, flatCoordinateArray, nDim)
|
|
385
|
+
: null;
|
|
386
|
+
|
|
345
387
|
return {
|
|
346
388
|
featureIds,
|
|
347
|
-
flatCoordinateArray,
|
|
348
389
|
nDim,
|
|
390
|
+
flatCoordinateArray,
|
|
349
391
|
geomOffset,
|
|
350
392
|
geometryIndicies,
|
|
351
|
-
triangles
|
|
393
|
+
...(options?.triangulate && triangles ? {triangles} : {})
|
|
352
394
|
};
|
|
353
395
|
}
|
|
354
396
|
|