@loaders.gl/arrow 4.0.2 → 4.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/arrow-loader.d.ts +6 -3
- package/dist/arrow-loader.d.ts.map +1 -1
- package/dist/arrow-loader.js +9 -1
- package/dist/arrow-loader.js.map +1 -1
- package/dist/arrow-worker.js +37 -537
- package/dist/arrow-writer.d.ts +2 -2
- package/dist/arrow-writer.d.ts.map +1 -1
- package/dist/arrow-writer.js.map +1 -1
- package/dist/dist.dev.js +1615 -246
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts +31 -1
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts.map +1 -1
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js +133 -23
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js.map +1 -1
- package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts +2 -1
- package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts.map +1 -1
- package/dist/geoarrow/convert-geoarrow-to-geojson.js +4 -0
- package/dist/geoarrow/convert-geoarrow-to-geojson.js.map +1 -1
- package/dist/geoarrow-loader.d.ts +19 -0
- package/dist/geoarrow-loader.d.ts.map +1 -0
- package/dist/geoarrow-loader.js +24 -0
- package/dist/geoarrow-loader.js.map +1 -0
- package/dist/geoarrow-writer.d.ts +9 -0
- package/dist/geoarrow-writer.d.ts.map +1 -0
- package/dist/geoarrow-writer.js +19 -0
- package/dist/geoarrow-writer.js.map +1 -0
- package/dist/index.cjs +508 -284
- package/dist/index.d.ts +9 -12
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +7 -13
- package/dist/index.js.map +1 -1
- package/dist/lib/arrow-table-batch.d.ts.map +1 -1
- package/dist/lib/arrow-table-batch.js.map +1 -1
- package/dist/lib/encode-arrow.d.ts.map +1 -1
- package/dist/lib/encode-arrow.js.map +1 -1
- package/dist/lib/encode-geoarrow.d.ts +15 -0
- package/dist/lib/encode-geoarrow.d.ts.map +1 -0
- package/dist/lib/encode-geoarrow.js +22 -0
- package/dist/lib/encode-geoarrow.js.map +1 -0
- package/dist/{lib → parsers}/parse-arrow-in-batches.d.ts +1 -1
- package/dist/parsers/parse-arrow-in-batches.d.ts.map +1 -0
- package/dist/parsers/parse-arrow-in-batches.js.map +1 -0
- package/dist/parsers/parse-arrow-sync.d.ts +6 -0
- package/dist/parsers/parse-arrow-sync.d.ts.map +1 -0
- package/dist/parsers/parse-arrow-sync.js +26 -0
- package/dist/parsers/parse-arrow-sync.js.map +1 -0
- package/dist/parsers/parse-geoarrow-in-batches.d.ts +6 -0
- package/dist/parsers/parse-geoarrow-in-batches.d.ts.map +1 -0
- package/dist/parsers/parse-geoarrow-in-batches.js +5 -0
- package/dist/parsers/parse-geoarrow-in-batches.js.map +1 -0
- package/dist/parsers/parse-geoarrow-sync.d.ts +6 -0
- package/dist/parsers/parse-geoarrow-sync.d.ts.map +1 -0
- package/dist/parsers/parse-geoarrow-sync.js +14 -0
- package/dist/parsers/parse-geoarrow-sync.js.map +1 -0
- package/dist/schema/convert-arrow-schema.d.ts.map +1 -1
- package/dist/schema/convert-arrow-schema.js +32 -5
- package/dist/schema/convert-arrow-schema.js.map +1 -1
- package/dist/tables/convert-arrow-to-columnar-table.d.ts +8 -0
- package/dist/tables/convert-arrow-to-columnar-table.d.ts.map +1 -0
- package/dist/tables/convert-arrow-to-columnar-table.js +14 -0
- package/dist/tables/convert-arrow-to-columnar-table.js.map +1 -0
- package/dist/tables/convert-arrow-to-geojson-table.d.ts +16 -0
- package/dist/tables/convert-arrow-to-geojson-table.d.ts.map +1 -0
- package/dist/tables/convert-arrow-to-geojson-table.js +33 -0
- package/dist/tables/convert-arrow-to-geojson-table.js.map +1 -0
- package/dist/tables/convert-columnar-to-row-table.d.ts +7 -0
- package/dist/tables/convert-columnar-to-row-table.d.ts.map +1 -0
- package/dist/tables/convert-columnar-to-row-table.js +18 -0
- package/dist/tables/convert-columnar-to-row-table.js.map +1 -0
- package/dist/triangulate-on-worker.d.ts +36 -0
- package/dist/triangulate-on-worker.d.ts.map +1 -0
- package/dist/triangulate-on-worker.js +14 -0
- package/dist/triangulate-on-worker.js.map +1 -0
- package/dist/triangulation-worker.js +880 -0
- package/dist/workers/arrow-worker.js +1 -1
- package/dist/workers/arrow-worker.js.map +1 -1
- package/dist/workers/triangulation-worker-node.d.ts +2 -0
- package/dist/workers/triangulation-worker-node.d.ts.map +1 -0
- package/dist/workers/triangulation-worker-node.js +2 -0
- package/dist/workers/triangulation-worker-node.js.map +1 -0
- package/dist/workers/triangulation-worker.d.ts +2 -0
- package/dist/workers/triangulation-worker.d.ts.map +1 -0
- package/dist/workers/triangulation-worker.js +24 -0
- package/dist/workers/triangulation-worker.js.map +1 -0
- package/package.json +9 -7
- package/src/arrow-loader.ts +25 -3
- package/src/arrow-writer.ts +2 -2
- package/src/geoarrow/convert-geoarrow-to-binary-geometry.ts +221 -30
- package/src/geoarrow/convert-geoarrow-to-geojson.ts +6 -2
- package/src/geoarrow-loader.ts +55 -0
- package/src/geoarrow-writer.ts +41 -0
- package/src/index.ts +30 -36
- package/src/lib/arrow-table-batch.ts +3 -0
- package/src/lib/encode-arrow.ts +3 -0
- package/src/lib/encode-geoarrow.ts +45 -0
- package/src/{lib → parsers}/parse-arrow-in-batches.ts +4 -2
- package/src/parsers/parse-arrow-sync.ts +38 -0
- package/src/parsers/parse-geoarrow-in-batches.ts +15 -0
- package/src/parsers/parse-geoarrow-sync.ts +22 -0
- package/src/schema/convert-arrow-schema.ts +32 -7
- package/src/tables/convert-arrow-to-columnar-table.ts +29 -0
- package/src/tables/convert-arrow-to-geojson-table.ts +54 -0
- package/src/tables/convert-columnar-to-row-table.ts +29 -0
- package/src/triangulate-on-worker.ts +47 -0
- package/src/workers/arrow-worker.ts +1 -1
- package/src/workers/triangulation-worker-node.ts +4 -0
- package/src/workers/triangulation-worker.ts +39 -0
- package/dist/lib/parse-arrow-in-batches.d.ts.map +0 -1
- package/dist/lib/parse-arrow-in-batches.js.map +0 -1
- package/dist/lib/parse-arrow-sync.d.ts +0 -5
- package/dist/lib/parse-arrow-sync.d.ts.map +0 -1
- package/dist/lib/parse-arrow-sync.js +0 -21
- package/dist/lib/parse-arrow-sync.js.map +0 -1
- package/dist/tables/convert-arrow-to-table.d.ts +0 -21
- package/dist/tables/convert-arrow-to-table.d.ts.map +0 -1
- package/dist/tables/convert-arrow-to-table.js +0 -37
- package/dist/tables/convert-arrow-to-table.js.map +0 -1
- package/src/lib/parse-arrow-sync.ts +0 -35
- package/src/tables/convert-arrow-to-table.ts +0 -68
- /package/dist/{lib → parsers}/parse-arrow-in-batches.js +0 -0
package/dist/index.cjs
CHANGED
|
@@ -30,120 +30,38 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
30
30
|
// src/index.ts
|
|
31
31
|
var src_exports = {};
|
|
32
32
|
__export(src_exports, {
|
|
33
|
-
ArrowLoader: () =>
|
|
34
|
-
ArrowWorkerLoader: () =>
|
|
33
|
+
ArrowLoader: () => ArrowLoader,
|
|
34
|
+
ArrowWorkerLoader: () => ArrowWorkerLoader,
|
|
35
35
|
ArrowWriter: () => ArrowWriter,
|
|
36
36
|
BINARY_GEOMETRY_TEMPLATE: () => BINARY_GEOMETRY_TEMPLATE,
|
|
37
|
+
GeoArrowLoader: () => GeoArrowLoader,
|
|
38
|
+
GeoArrowWorkerLoader: () => GeoArrowWorkerLoader,
|
|
39
|
+
TriangulationWorker: () => TriangulationWorker,
|
|
37
40
|
VECTOR_TYPES: () => VECTOR_TYPES,
|
|
41
|
+
convertArrowToGeoJSONTable: () => convertArrowToGeoJSONTable,
|
|
38
42
|
deserializeArrowField: () => deserializeArrowField,
|
|
39
43
|
deserializeArrowMetadata: () => deserializeArrowMetadata,
|
|
40
44
|
deserializeArrowSchema: () => deserializeArrowSchema,
|
|
41
45
|
deserializeArrowType: () => deserializeArrowType,
|
|
42
46
|
getArrowType: () => getArrowType,
|
|
43
47
|
getBinaryGeometriesFromArrow: () => getBinaryGeometriesFromArrow,
|
|
44
|
-
|
|
48
|
+
getMeanCentersFromBinaryGeometries: () => getMeanCentersFromBinaryGeometries,
|
|
49
|
+
getTriangleIndices: () => getTriangleIndices,
|
|
50
|
+
parseGeometryFromArrow: () => parseGeometryFromArrow2,
|
|
45
51
|
serializeArrowField: () => serializeArrowField,
|
|
46
52
|
serializeArrowMetadata: () => serializeArrowMetadata,
|
|
47
|
-
serializeArrowSchema: () =>
|
|
53
|
+
serializeArrowSchema: () => serializeArrowSchema2,
|
|
48
54
|
serializeArrowType: () => serializeArrowType,
|
|
55
|
+
triangulateOnWorker: () => triangulateOnWorker,
|
|
49
56
|
updateBoundsFromGeoArrowSamples: () => updateBoundsFromGeoArrowSamples
|
|
50
57
|
});
|
|
51
58
|
module.exports = __toCommonJS(src_exports);
|
|
52
59
|
var import_schema3 = require("@loaders.gl/schema");
|
|
53
60
|
|
|
54
|
-
// src/arrow-
|
|
55
|
-
var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
56
|
-
var ArrowLoader = {
|
|
57
|
-
name: "Apache Arrow",
|
|
58
|
-
id: "arrow",
|
|
59
|
-
module: "arrow",
|
|
60
|
-
version: VERSION,
|
|
61
|
-
// worker: true,
|
|
62
|
-
category: "table",
|
|
63
|
-
extensions: ["arrow", "feather"],
|
|
64
|
-
mimeTypes: [
|
|
65
|
-
"application/vnd.apache.arrow.file",
|
|
66
|
-
"application/vnd.apache.arrow.stream",
|
|
67
|
-
"application/octet-stream"
|
|
68
|
-
],
|
|
69
|
-
binary: true,
|
|
70
|
-
tests: ["ARROW"],
|
|
71
|
-
options: {
|
|
72
|
-
arrow: {
|
|
73
|
-
shape: "columnar-table"
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
};
|
|
77
|
-
|
|
78
|
-
// src/lib/parse-arrow-sync.ts
|
|
61
|
+
// src/lib/arrow-table-batch.ts
|
|
79
62
|
var import_schema = require("@loaders.gl/schema");
|
|
80
63
|
var arrow = __toESM(require("apache-arrow"), 1);
|
|
81
|
-
|
|
82
|
-
// src/tables/convert-arrow-to-table.ts
|
|
83
|
-
function convertApacheArrowToArrowTable(arrowTable) {
|
|
84
|
-
return {
|
|
85
|
-
shape: "arrow-table",
|
|
86
|
-
data: arrowTable
|
|
87
|
-
};
|
|
88
|
-
}
|
|
89
|
-
function convertArrowToColumnarTable(table) {
|
|
90
|
-
const arrowTable = table.data;
|
|
91
|
-
const columnarTable = {};
|
|
92
|
-
for (const field of arrowTable.schema.fields) {
|
|
93
|
-
const arrowColumn = arrowTable.getChild(field.name);
|
|
94
|
-
const values = arrowColumn == null ? void 0 : arrowColumn.toArray();
|
|
95
|
-
columnarTable[field.name] = values;
|
|
96
|
-
}
|
|
97
|
-
return {
|
|
98
|
-
shape: "columnar-table",
|
|
99
|
-
data: columnarTable
|
|
100
|
-
};
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
// src/lib/parse-arrow-sync.ts
|
|
104
|
-
function parseArrowSync(arrayBuffer, options) {
|
|
105
|
-
var _a;
|
|
106
|
-
const apacheArrowTable = arrow.tableFromIPC([new Uint8Array(arrayBuffer)]);
|
|
107
|
-
const arrowTable = convertApacheArrowToArrowTable(apacheArrowTable);
|
|
108
|
-
const shape = ((_a = options == null ? void 0 : options.arrow) == null ? void 0 : _a.shape) || "arrow-table";
|
|
109
|
-
switch (shape) {
|
|
110
|
-
case "arrow-table":
|
|
111
|
-
return arrowTable;
|
|
112
|
-
case "columnar-table":
|
|
113
|
-
return convertArrowToColumnarTable(arrowTable);
|
|
114
|
-
case "object-row-table":
|
|
115
|
-
const columnarTable = convertArrowToColumnarTable(arrowTable);
|
|
116
|
-
return (0, import_schema.convertTable)(columnarTable, "object-row-table");
|
|
117
|
-
default:
|
|
118
|
-
throw new Error(shape);
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
// src/lib/parse-arrow-in-batches.ts
|
|
123
|
-
var arrow2 = __toESM(require("apache-arrow"), 1);
|
|
124
|
-
function parseArrowInBatches(asyncIterator) {
|
|
125
|
-
async function* makeArrowAsyncIterator() {
|
|
126
|
-
const readers = arrow2.RecordBatchReader.readAll(asyncIterator);
|
|
127
|
-
for await (const reader of readers) {
|
|
128
|
-
for await (const recordBatch of reader) {
|
|
129
|
-
const arrowTabledBatch = {
|
|
130
|
-
shape: "arrow-table",
|
|
131
|
-
batchType: "data",
|
|
132
|
-
data: new arrow2.Table([recordBatch]),
|
|
133
|
-
length: recordBatch.data.length
|
|
134
|
-
};
|
|
135
|
-
yield arrowTabledBatch;
|
|
136
|
-
}
|
|
137
|
-
break;
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
return makeArrowAsyncIterator();
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
// src/lib/arrow-table-batch.ts
|
|
144
|
-
var import_schema2 = require("@loaders.gl/schema");
|
|
145
|
-
var arrow3 = __toESM(require("apache-arrow"), 1);
|
|
146
|
-
var ArrowTableBatchAggregator = class extends import_schema2.ColumnarTableBatchAggregator {
|
|
64
|
+
var ArrowTableBatchAggregator = class extends import_schema.ColumnarTableBatchAggregator {
|
|
147
65
|
constructor(schema, options) {
|
|
148
66
|
super(schema, options);
|
|
149
67
|
this.arrowSchema = null;
|
|
@@ -153,17 +71,17 @@ var ArrowTableBatchAggregator = class extends import_schema2.ColumnarTableBatchA
|
|
|
153
71
|
if (batch) {
|
|
154
72
|
this.arrowSchema = this.arrowSchema || getArrowSchema(batch.schema);
|
|
155
73
|
const arrowVectors = getArrowVectors(this.arrowSchema, batch.data);
|
|
156
|
-
const recordBatch = new
|
|
74
|
+
const recordBatch = new arrow.RecordBatch(
|
|
157
75
|
this.arrowSchema,
|
|
158
|
-
|
|
159
|
-
type: new
|
|
76
|
+
arrow.makeData({
|
|
77
|
+
type: new arrow.Struct(this.arrowSchema.fields),
|
|
160
78
|
children: arrowVectors.map(({ data }) => data[0])
|
|
161
79
|
})
|
|
162
80
|
);
|
|
163
81
|
return {
|
|
164
82
|
shape: "arrow-table",
|
|
165
83
|
batchType: "data",
|
|
166
|
-
data: new
|
|
84
|
+
data: new arrow.Table([recordBatch]),
|
|
167
85
|
length: batch.length
|
|
168
86
|
};
|
|
169
87
|
}
|
|
@@ -176,21 +94,21 @@ function getArrowSchema(schema) {
|
|
|
176
94
|
const field = schema[key];
|
|
177
95
|
if (field.type === Float32Array) {
|
|
178
96
|
const metadata = /* @__PURE__ */ new Map();
|
|
179
|
-
const arrowField = new
|
|
97
|
+
const arrowField = new arrow.Field(field.name, new arrow.Float32(), field.nullable, metadata);
|
|
180
98
|
arrowFields.push(arrowField);
|
|
181
99
|
}
|
|
182
100
|
}
|
|
183
101
|
if (arrowFields.length === 0) {
|
|
184
102
|
throw new Error("No arrow convertible fields");
|
|
185
103
|
}
|
|
186
|
-
return new
|
|
104
|
+
return new arrow.Schema(arrowFields);
|
|
187
105
|
}
|
|
188
106
|
function getArrowVectors(arrowSchema, data) {
|
|
189
107
|
const arrowVectors = [];
|
|
190
108
|
for (const field of arrowSchema.fields) {
|
|
191
109
|
const vector = data[field.name];
|
|
192
110
|
if (vector instanceof Float32Array) {
|
|
193
|
-
const arrowVector =
|
|
111
|
+
const arrowVector = arrow.makeVector(vector);
|
|
194
112
|
arrowVectors.push(arrowVector);
|
|
195
113
|
}
|
|
196
114
|
}
|
|
@@ -201,40 +119,245 @@ function getArrowVectors(arrowSchema, data) {
|
|
|
201
119
|
}
|
|
202
120
|
|
|
203
121
|
// src/schema/arrow-type-utils.ts
|
|
204
|
-
var
|
|
122
|
+
var arrow2 = __toESM(require("apache-arrow"), 1);
|
|
205
123
|
function getArrowType(array) {
|
|
206
124
|
switch (array.constructor) {
|
|
207
125
|
case Int8Array:
|
|
208
|
-
return new
|
|
126
|
+
return new arrow2.Int8();
|
|
209
127
|
case Uint8Array:
|
|
210
|
-
return new
|
|
128
|
+
return new arrow2.Uint8();
|
|
211
129
|
case Int16Array:
|
|
212
|
-
return new
|
|
130
|
+
return new arrow2.Int16();
|
|
213
131
|
case Uint16Array:
|
|
214
|
-
return new
|
|
132
|
+
return new arrow2.Uint16();
|
|
215
133
|
case Int32Array:
|
|
216
|
-
return new
|
|
134
|
+
return new arrow2.Int32();
|
|
217
135
|
case Uint32Array:
|
|
218
|
-
return new
|
|
136
|
+
return new arrow2.Uint32();
|
|
219
137
|
case Float32Array:
|
|
220
|
-
return new
|
|
138
|
+
return new arrow2.Float32();
|
|
221
139
|
case Float64Array:
|
|
222
|
-
return new
|
|
140
|
+
return new arrow2.Float64();
|
|
223
141
|
default:
|
|
224
142
|
throw new Error("array type not supported");
|
|
225
143
|
}
|
|
226
144
|
}
|
|
227
145
|
|
|
228
|
-
// src/
|
|
146
|
+
// src/types.ts
|
|
147
|
+
var VECTOR_TYPES = /* @__PURE__ */ ((VECTOR_TYPES2) => {
|
|
148
|
+
VECTOR_TYPES2[VECTOR_TYPES2["FLOAT"] = 0] = "FLOAT";
|
|
149
|
+
VECTOR_TYPES2[VECTOR_TYPES2["DATE"] = 1] = "DATE";
|
|
150
|
+
return VECTOR_TYPES2;
|
|
151
|
+
})(VECTOR_TYPES || {});
|
|
152
|
+
|
|
153
|
+
// src/parsers/parse-arrow-sync.ts
|
|
154
|
+
var import_schema2 = require("@loaders.gl/schema");
|
|
155
|
+
var arrow3 = __toESM(require("apache-arrow"), 1);
|
|
156
|
+
|
|
157
|
+
// src/tables/convert-arrow-to-columnar-table.ts
|
|
158
|
+
function convertArrowToColumnarTable(table) {
|
|
159
|
+
const arrowTable = table.data;
|
|
160
|
+
const columnarTable = {};
|
|
161
|
+
for (const field of arrowTable.schema.fields) {
|
|
162
|
+
const arrowColumn = arrowTable.getChild(field.name);
|
|
163
|
+
const values = arrowColumn == null ? void 0 : arrowColumn.toArray();
|
|
164
|
+
columnarTable[field.name] = values;
|
|
165
|
+
}
|
|
166
|
+
return {
|
|
167
|
+
shape: "columnar-table",
|
|
168
|
+
data: columnarTable
|
|
169
|
+
};
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
// src/parsers/parse-arrow-sync.ts
|
|
173
|
+
function parseArrowSync(arrayBuffer, options) {
|
|
174
|
+
const apacheArrowTable = arrow3.tableFromIPC([new Uint8Array(arrayBuffer)]);
|
|
175
|
+
const arrowTable = { shape: "arrow-table", data: apacheArrowTable };
|
|
176
|
+
const shape = (options == null ? void 0 : options.shape) || "arrow-table";
|
|
177
|
+
switch (shape) {
|
|
178
|
+
case "arrow-table":
|
|
179
|
+
return arrowTable;
|
|
180
|
+
case "columnar-table":
|
|
181
|
+
return convertArrowToColumnarTable(arrowTable);
|
|
182
|
+
case "object-row-table":
|
|
183
|
+
let columnarTable = convertArrowToColumnarTable(arrowTable);
|
|
184
|
+
return (0, import_schema2.convertTable)(columnarTable, "object-row-table");
|
|
185
|
+
case "array-row-table":
|
|
186
|
+
columnarTable = convertArrowToColumnarTable(arrowTable);
|
|
187
|
+
return (0, import_schema2.convertTable)(columnarTable, "array-row-table");
|
|
188
|
+
default:
|
|
189
|
+
throw new Error(shape);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// src/parsers/parse-arrow-in-batches.ts
|
|
194
|
+
var arrow4 = __toESM(require("apache-arrow"), 1);
|
|
195
|
+
function parseArrowInBatches(asyncIterator) {
|
|
196
|
+
async function* makeArrowAsyncIterator() {
|
|
197
|
+
const readers = arrow4.RecordBatchReader.readAll(asyncIterator);
|
|
198
|
+
for await (const reader of readers) {
|
|
199
|
+
for await (const recordBatch of reader) {
|
|
200
|
+
const arrowTabledBatch = {
|
|
201
|
+
shape: "arrow-table",
|
|
202
|
+
batchType: "data",
|
|
203
|
+
data: new arrow4.Table([recordBatch]),
|
|
204
|
+
length: recordBatch.data.length
|
|
205
|
+
};
|
|
206
|
+
yield arrowTabledBatch;
|
|
207
|
+
}
|
|
208
|
+
break;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
return makeArrowAsyncIterator();
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// src/arrow-loader.ts
|
|
215
|
+
var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
216
|
+
var ArrowWorkerLoader = {
|
|
217
|
+
name: "Apache Arrow",
|
|
218
|
+
id: "arrow",
|
|
219
|
+
module: "arrow",
|
|
220
|
+
version: VERSION,
|
|
221
|
+
// worker: true,
|
|
222
|
+
category: "table",
|
|
223
|
+
extensions: ["arrow", "feather"],
|
|
224
|
+
mimeTypes: [
|
|
225
|
+
"application/vnd.apache.arrow.file",
|
|
226
|
+
"application/vnd.apache.arrow.stream",
|
|
227
|
+
"application/octet-stream"
|
|
228
|
+
],
|
|
229
|
+
binary: true,
|
|
230
|
+
tests: ["ARROW"],
|
|
231
|
+
options: {
|
|
232
|
+
arrow: {
|
|
233
|
+
shape: "columnar-table"
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
};
|
|
237
|
+
var ArrowLoader = {
|
|
238
|
+
...ArrowWorkerLoader,
|
|
239
|
+
parse: async (arraybuffer, options) => parseArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
|
|
240
|
+
parseSync: (arraybuffer, options) => parseArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
|
|
241
|
+
parseInBatches: parseArrowInBatches
|
|
242
|
+
};
|
|
243
|
+
|
|
244
|
+
// src/lib/encode-arrow.ts
|
|
229
245
|
var arrow5 = __toESM(require("apache-arrow"), 1);
|
|
230
|
-
function
|
|
246
|
+
function encodeArrowSync(data) {
|
|
247
|
+
const vectors = {};
|
|
248
|
+
for (const arrayData of data) {
|
|
249
|
+
const arrayVector = createVector(arrayData.array, arrayData.type);
|
|
250
|
+
vectors[arrayData.name] = arrayVector;
|
|
251
|
+
}
|
|
252
|
+
const table = new arrow5.Table(vectors);
|
|
253
|
+
const arrowBuffer = arrow5.tableToIPC(table);
|
|
254
|
+
return arrowBuffer;
|
|
255
|
+
}
|
|
256
|
+
function createVector(array, type) {
|
|
257
|
+
switch (type) {
|
|
258
|
+
case 1 /* DATE */:
|
|
259
|
+
return arrow5.vectorFromArray(array);
|
|
260
|
+
case 0 /* FLOAT */:
|
|
261
|
+
default:
|
|
262
|
+
return arrow5.vectorFromArray(array);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// src/arrow-writer.ts
|
|
267
|
+
var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
268
|
+
var ArrowWriter = {
|
|
269
|
+
name: "Apache Arrow",
|
|
270
|
+
id: "arrow",
|
|
271
|
+
module: "arrow",
|
|
272
|
+
version: VERSION2,
|
|
273
|
+
extensions: ["arrow", "feather"],
|
|
274
|
+
mimeTypes: [
|
|
275
|
+
"application/vnd.apache.arrow.file",
|
|
276
|
+
"application/vnd.apache.arrow.stream",
|
|
277
|
+
"application/octet-stream"
|
|
278
|
+
],
|
|
279
|
+
binary: true,
|
|
280
|
+
options: {},
|
|
281
|
+
encode: async function encodeArrow(data, options) {
|
|
282
|
+
return encodeArrowSync(data);
|
|
283
|
+
},
|
|
284
|
+
encodeSync(data, options) {
|
|
285
|
+
return encodeArrowSync(data);
|
|
286
|
+
}
|
|
287
|
+
};
|
|
288
|
+
|
|
289
|
+
// src/tables/convert-arrow-to-geojson-table.ts
|
|
290
|
+
var import_arrow = require("@loaders.gl/arrow");
|
|
291
|
+
var import_gis = require("@loaders.gl/gis");
|
|
292
|
+
function convertArrowToGeoJSONTable(table) {
|
|
293
|
+
var _a;
|
|
294
|
+
const arrowTable = table.data;
|
|
295
|
+
const schema = (0, import_arrow.serializeArrowSchema)(arrowTable.schema);
|
|
296
|
+
const geometryColumns = (0, import_gis.getGeometryColumnsFromSchema)(schema);
|
|
297
|
+
const encoding = geometryColumns.geometry.encoding;
|
|
298
|
+
const features = [];
|
|
299
|
+
for (let row = 0; row < arrowTable.numRows; row++) {
|
|
300
|
+
const arrowGeometry = (_a = arrowTable.getChild("geometry")) == null ? void 0 : _a.get(row);
|
|
301
|
+
const arrowGeometryObject = { encoding, data: arrowGeometry };
|
|
302
|
+
const feature = (0, import_arrow.parseGeometryFromArrow)(arrowGeometryObject);
|
|
303
|
+
if (feature) {
|
|
304
|
+
features.push(feature);
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
return {
|
|
308
|
+
shape: "geojson-table",
|
|
309
|
+
type: "FeatureCollection",
|
|
310
|
+
features
|
|
311
|
+
};
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
// src/parsers/parse-geoarrow-sync.ts
|
|
315
|
+
function parseGeoArrowSync(arrayBuffer, options) {
|
|
316
|
+
const table = parseArrowSync(arrayBuffer, { shape: "arrow-table" });
|
|
317
|
+
switch (options == null ? void 0 : options.shape) {
|
|
318
|
+
case "geojson-table":
|
|
319
|
+
return convertArrowToGeoJSONTable(table);
|
|
320
|
+
default:
|
|
321
|
+
return table;
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
// src/parsers/parse-geoarrow-in-batches.ts
|
|
326
|
+
function parseGeoArrowInBatches(asyncIterator) {
|
|
327
|
+
return parseArrowInBatches(asyncIterator);
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// src/geoarrow-loader.ts
|
|
331
|
+
var GeoArrowWorkerLoader = {
|
|
332
|
+
...ArrowWorkerLoader,
|
|
333
|
+
options: {
|
|
334
|
+
arrow: {
|
|
335
|
+
shape: "arrow-table"
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
};
|
|
339
|
+
var GeoArrowLoader = {
|
|
340
|
+
...ArrowWorkerLoader,
|
|
341
|
+
options: {
|
|
342
|
+
arrow: {
|
|
343
|
+
shape: "arrow-table"
|
|
344
|
+
}
|
|
345
|
+
},
|
|
346
|
+
parse: async (arraybuffer, options) => parseGeoArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
|
|
347
|
+
parseSync: (arraybuffer, options) => parseGeoArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
|
|
348
|
+
parseInBatches: parseGeoArrowInBatches
|
|
349
|
+
};
|
|
350
|
+
|
|
351
|
+
// src/schema/convert-arrow-schema.ts
|
|
352
|
+
var arrow6 = __toESM(require("apache-arrow"), 1);
|
|
353
|
+
function serializeArrowSchema2(arrowSchema) {
|
|
231
354
|
return {
|
|
232
355
|
fields: arrowSchema.fields.map((arrowField) => serializeArrowField(arrowField)),
|
|
233
356
|
metadata: serializeArrowMetadata(arrowSchema.metadata)
|
|
234
357
|
};
|
|
235
358
|
}
|
|
236
359
|
function deserializeArrowSchema(schema) {
|
|
237
|
-
return new
|
|
360
|
+
return new arrow6.Schema(
|
|
238
361
|
schema.fields.map((field) => deserializeArrowField(field)),
|
|
239
362
|
deserializeArrowMetadata(schema.metadata)
|
|
240
363
|
);
|
|
@@ -254,7 +377,7 @@ function serializeArrowField(field) {
|
|
|
254
377
|
};
|
|
255
378
|
}
|
|
256
379
|
function deserializeArrowField(field) {
|
|
257
|
-
return new
|
|
380
|
+
return new arrow6.Field(
|
|
258
381
|
field.name,
|
|
259
382
|
deserializeArrowType(field.type),
|
|
260
383
|
field.nullable,
|
|
@@ -263,260 +386,238 @@ function deserializeArrowField(field) {
|
|
|
263
386
|
}
|
|
264
387
|
function serializeArrowType(arrowType) {
|
|
265
388
|
switch (arrowType.constructor) {
|
|
266
|
-
case
|
|
389
|
+
case arrow6.Null:
|
|
267
390
|
return "null";
|
|
268
|
-
case
|
|
391
|
+
case arrow6.Binary:
|
|
269
392
|
return "binary";
|
|
270
|
-
case
|
|
393
|
+
case arrow6.Bool:
|
|
271
394
|
return "bool";
|
|
272
|
-
case
|
|
395
|
+
case arrow6.Int:
|
|
273
396
|
const intType = arrowType;
|
|
274
397
|
return `${intType.isSigned ? "u" : ""}int${intType.bitWidth}`;
|
|
275
|
-
case
|
|
398
|
+
case arrow6.Int8:
|
|
276
399
|
return "int8";
|
|
277
|
-
case
|
|
400
|
+
case arrow6.Int16:
|
|
278
401
|
return "int16";
|
|
279
|
-
case
|
|
402
|
+
case arrow6.Int32:
|
|
280
403
|
return "int32";
|
|
281
|
-
case
|
|
404
|
+
case arrow6.Int64:
|
|
282
405
|
return "int64";
|
|
283
|
-
case
|
|
406
|
+
case arrow6.Uint8:
|
|
284
407
|
return "uint8";
|
|
285
|
-
case
|
|
408
|
+
case arrow6.Uint16:
|
|
286
409
|
return "uint16";
|
|
287
|
-
case
|
|
410
|
+
case arrow6.Uint32:
|
|
288
411
|
return "uint32";
|
|
289
|
-
case
|
|
412
|
+
case arrow6.Uint64:
|
|
290
413
|
return "uint64";
|
|
291
|
-
case
|
|
414
|
+
case arrow6.Float:
|
|
292
415
|
const precision = arrowType.precision;
|
|
293
416
|
switch (precision) {
|
|
294
|
-
case
|
|
417
|
+
case arrow6.Precision.HALF:
|
|
295
418
|
return "float16";
|
|
296
|
-
case
|
|
419
|
+
case arrow6.Precision.SINGLE:
|
|
297
420
|
return "float32";
|
|
298
|
-
case
|
|
421
|
+
case arrow6.Precision.DOUBLE:
|
|
299
422
|
return "float64";
|
|
300
423
|
default:
|
|
301
424
|
return "float16";
|
|
302
425
|
}
|
|
303
|
-
case
|
|
426
|
+
case arrow6.Float16:
|
|
304
427
|
return "float16";
|
|
305
|
-
case
|
|
428
|
+
case arrow6.Float32:
|
|
306
429
|
return "float32";
|
|
307
|
-
case
|
|
430
|
+
case arrow6.Float64:
|
|
308
431
|
return "float64";
|
|
309
|
-
case
|
|
432
|
+
case arrow6.Utf8:
|
|
310
433
|
return "utf8";
|
|
311
|
-
case
|
|
434
|
+
case arrow6.Decimal:
|
|
435
|
+
const decimal = arrowType;
|
|
436
|
+
return {
|
|
437
|
+
type: "decimal",
|
|
438
|
+
bitWidth: decimal.bitWidth,
|
|
439
|
+
precision: decimal.precision,
|
|
440
|
+
scale: decimal.scale
|
|
441
|
+
};
|
|
442
|
+
case arrow6.Date_:
|
|
312
443
|
const dateUnit = arrowType.unit;
|
|
313
|
-
return dateUnit ===
|
|
314
|
-
case
|
|
444
|
+
return dateUnit === arrow6.DateUnit.DAY ? "date-day" : "date-millisecond";
|
|
445
|
+
case arrow6.DateDay:
|
|
315
446
|
return "date-day";
|
|
316
|
-
case
|
|
447
|
+
case arrow6.DateMillisecond:
|
|
317
448
|
return "date-millisecond";
|
|
318
|
-
case
|
|
449
|
+
case arrow6.Time:
|
|
319
450
|
const timeUnit = arrowType.unit;
|
|
320
451
|
switch (timeUnit) {
|
|
321
|
-
case
|
|
452
|
+
case arrow6.TimeUnit.SECOND:
|
|
322
453
|
return "time-second";
|
|
323
|
-
case
|
|
454
|
+
case arrow6.TimeUnit.MILLISECOND:
|
|
324
455
|
return "time-millisecond";
|
|
325
|
-
case
|
|
456
|
+
case arrow6.TimeUnit.MICROSECOND:
|
|
326
457
|
return "time-microsecond";
|
|
327
|
-
case
|
|
458
|
+
case arrow6.TimeUnit.NANOSECOND:
|
|
328
459
|
return "time-nanosecond";
|
|
329
460
|
default:
|
|
330
461
|
return "time-second";
|
|
331
462
|
}
|
|
332
|
-
case
|
|
463
|
+
case arrow6.TimeMillisecond:
|
|
333
464
|
return "time-millisecond";
|
|
334
|
-
case
|
|
465
|
+
case arrow6.TimeSecond:
|
|
335
466
|
return "time-second";
|
|
336
|
-
case
|
|
467
|
+
case arrow6.TimeMicrosecond:
|
|
337
468
|
return "time-microsecond";
|
|
338
|
-
case
|
|
469
|
+
case arrow6.TimeNanosecond:
|
|
339
470
|
return "time-nanosecond";
|
|
340
|
-
case
|
|
471
|
+
case arrow6.Timestamp:
|
|
341
472
|
const timeStampUnit = arrowType.unit;
|
|
342
473
|
switch (timeStampUnit) {
|
|
343
|
-
case
|
|
474
|
+
case arrow6.TimeUnit.SECOND:
|
|
344
475
|
return "timestamp-second";
|
|
345
|
-
case
|
|
476
|
+
case arrow6.TimeUnit.MILLISECOND:
|
|
346
477
|
return "timestamp-millisecond";
|
|
347
|
-
case
|
|
478
|
+
case arrow6.TimeUnit.MICROSECOND:
|
|
348
479
|
return "timestamp-microsecond";
|
|
349
|
-
case
|
|
480
|
+
case arrow6.TimeUnit.NANOSECOND:
|
|
350
481
|
return "timestamp-nanosecond";
|
|
351
482
|
default:
|
|
352
483
|
return "timestamp-second";
|
|
353
484
|
}
|
|
354
|
-
case
|
|
485
|
+
case arrow6.TimestampSecond:
|
|
355
486
|
return "timestamp-second";
|
|
356
|
-
case
|
|
487
|
+
case arrow6.TimestampMillisecond:
|
|
357
488
|
return "timestamp-millisecond";
|
|
358
|
-
case
|
|
489
|
+
case arrow6.TimestampMicrosecond:
|
|
359
490
|
return "timestamp-microsecond";
|
|
360
|
-
case
|
|
491
|
+
case arrow6.TimestampNanosecond:
|
|
361
492
|
return "timestamp-nanosecond";
|
|
362
|
-
case
|
|
493
|
+
case arrow6.Interval:
|
|
363
494
|
const intervalUnit = arrowType.unit;
|
|
364
495
|
switch (intervalUnit) {
|
|
365
|
-
case
|
|
496
|
+
case arrow6.IntervalUnit.DAY_TIME:
|
|
366
497
|
return "interval-daytime";
|
|
367
|
-
case
|
|
498
|
+
case arrow6.IntervalUnit.YEAR_MONTH:
|
|
368
499
|
return "interval-yearmonth";
|
|
369
500
|
default:
|
|
370
501
|
return "interval-daytime";
|
|
371
502
|
}
|
|
372
|
-
case
|
|
503
|
+
case arrow6.IntervalDayTime:
|
|
373
504
|
return "interval-daytime";
|
|
374
|
-
case
|
|
505
|
+
case arrow6.IntervalYearMonth:
|
|
375
506
|
return "interval-yearmonth";
|
|
376
|
-
case
|
|
507
|
+
case arrow6.Map_:
|
|
508
|
+
const mapType = arrowType;
|
|
509
|
+
return {
|
|
510
|
+
type: "map",
|
|
511
|
+
keysSorted: mapType.keysSorted,
|
|
512
|
+
children: mapType.children.map((arrowField) => serializeArrowField(arrowField))
|
|
513
|
+
};
|
|
514
|
+
case arrow6.List:
|
|
377
515
|
const listType = arrowType;
|
|
378
516
|
const listField = listType.valueField;
|
|
379
517
|
return {
|
|
380
518
|
type: "list",
|
|
381
519
|
children: [serializeArrowField(listField)]
|
|
382
520
|
};
|
|
383
|
-
case
|
|
521
|
+
case arrow6.FixedSizeList:
|
|
522
|
+
const fixedSizeList = arrowType;
|
|
384
523
|
return {
|
|
385
524
|
type: "fixed-size-list",
|
|
386
|
-
listSize:
|
|
387
|
-
children: [serializeArrowField(
|
|
525
|
+
listSize: fixedSizeList.listSize,
|
|
526
|
+
children: [serializeArrowField(fixedSizeList.children[0])]
|
|
527
|
+
};
|
|
528
|
+
case arrow6.Struct:
|
|
529
|
+
const structType = arrowType;
|
|
530
|
+
return {
|
|
531
|
+
type: "struct",
|
|
532
|
+
children: structType.children.map((arrowField) => serializeArrowField(arrowField))
|
|
388
533
|
};
|
|
389
534
|
default:
|
|
390
|
-
throw new Error(
|
|
535
|
+
throw new Error(`arrow type not supported: ${arrowType.constructor.name}`);
|
|
391
536
|
}
|
|
392
537
|
}
|
|
393
538
|
function deserializeArrowType(dataType) {
|
|
394
539
|
if (typeof dataType === "object") {
|
|
395
540
|
switch (dataType.type) {
|
|
541
|
+
case "decimal":
|
|
542
|
+
return new arrow6.Decimal(dataType.precision, dataType.scale, dataType.bitWidth);
|
|
543
|
+
case "map":
|
|
544
|
+
let children = dataType.children.map((arrowField) => deserializeArrowField(arrowField));
|
|
545
|
+
return new arrow6.Map_(children, dataType.keysSorted);
|
|
396
546
|
case "list":
|
|
397
547
|
const field = deserializeArrowField(dataType.children[0]);
|
|
398
|
-
return new
|
|
548
|
+
return new arrow6.List(field);
|
|
399
549
|
case "fixed-size-list":
|
|
400
550
|
const child = deserializeArrowField(dataType.children[0]);
|
|
401
|
-
return new
|
|
551
|
+
return new arrow6.FixedSizeList(dataType.listSize, child);
|
|
402
552
|
case "struct":
|
|
403
|
-
|
|
404
|
-
return new
|
|
553
|
+
children = dataType.children.map((arrowField) => deserializeArrowField(arrowField));
|
|
554
|
+
return new arrow6.Struct(children);
|
|
405
555
|
default:
|
|
406
556
|
throw new Error("array type not supported");
|
|
407
557
|
}
|
|
408
558
|
}
|
|
409
559
|
switch (dataType) {
|
|
410
560
|
case "null":
|
|
411
|
-
return new
|
|
561
|
+
return new arrow6.Null();
|
|
412
562
|
case "binary":
|
|
413
|
-
return new
|
|
563
|
+
return new arrow6.Binary();
|
|
414
564
|
case "bool":
|
|
415
|
-
return new
|
|
565
|
+
return new arrow6.Bool();
|
|
416
566
|
case "int8":
|
|
417
|
-
return new
|
|
567
|
+
return new arrow6.Int8();
|
|
418
568
|
case "int16":
|
|
419
|
-
return new
|
|
569
|
+
return new arrow6.Int16();
|
|
420
570
|
case "int32":
|
|
421
|
-
return new
|
|
571
|
+
return new arrow6.Int32();
|
|
422
572
|
case "int64":
|
|
423
|
-
return new
|
|
573
|
+
return new arrow6.Int64();
|
|
424
574
|
case "uint8":
|
|
425
|
-
return new
|
|
575
|
+
return new arrow6.Uint8();
|
|
426
576
|
case "uint16":
|
|
427
|
-
return new
|
|
577
|
+
return new arrow6.Uint16();
|
|
428
578
|
case "uint32":
|
|
429
|
-
return new
|
|
579
|
+
return new arrow6.Uint32();
|
|
430
580
|
case "uint64":
|
|
431
|
-
return new
|
|
581
|
+
return new arrow6.Uint64();
|
|
432
582
|
case "float16":
|
|
433
|
-
return new
|
|
583
|
+
return new arrow6.Float16();
|
|
434
584
|
case "float32":
|
|
435
|
-
return new
|
|
585
|
+
return new arrow6.Float32();
|
|
436
586
|
case "float64":
|
|
437
|
-
return new
|
|
587
|
+
return new arrow6.Float64();
|
|
438
588
|
case "utf8":
|
|
439
|
-
return new
|
|
589
|
+
return new arrow6.Utf8();
|
|
440
590
|
case "date-day":
|
|
441
|
-
return new
|
|
591
|
+
return new arrow6.DateDay();
|
|
442
592
|
case "date-millisecond":
|
|
443
|
-
return new
|
|
593
|
+
return new arrow6.DateMillisecond();
|
|
444
594
|
case "time-second":
|
|
445
|
-
return new
|
|
595
|
+
return new arrow6.TimeSecond();
|
|
446
596
|
case "time-millisecond":
|
|
447
|
-
return new
|
|
597
|
+
return new arrow6.TimeMillisecond();
|
|
448
598
|
case "time-microsecond":
|
|
449
|
-
return new
|
|
599
|
+
return new arrow6.TimeMicrosecond();
|
|
450
600
|
case "time-nanosecond":
|
|
451
|
-
return new
|
|
601
|
+
return new arrow6.TimeNanosecond();
|
|
452
602
|
case "timestamp-second":
|
|
453
|
-
return new
|
|
603
|
+
return new arrow6.TimestampSecond();
|
|
454
604
|
case "timestamp-millisecond":
|
|
455
|
-
return new
|
|
605
|
+
return new arrow6.TimestampMillisecond();
|
|
456
606
|
case "timestamp-microsecond":
|
|
457
|
-
return new
|
|
607
|
+
return new arrow6.TimestampMicrosecond();
|
|
458
608
|
case "timestamp-nanosecond":
|
|
459
|
-
return new
|
|
609
|
+
return new arrow6.TimestampNanosecond();
|
|
460
610
|
case "interval-daytime":
|
|
461
|
-
return new
|
|
611
|
+
return new arrow6.IntervalDayTime();
|
|
462
612
|
case "interval-yearmonth":
|
|
463
|
-
return new
|
|
613
|
+
return new arrow6.IntervalYearMonth();
|
|
464
614
|
default:
|
|
465
615
|
throw new Error("array type not supported");
|
|
466
616
|
}
|
|
467
617
|
}
|
|
468
618
|
|
|
469
|
-
// src/
|
|
470
|
-
var
|
|
471
|
-
VECTOR_TYPES2[VECTOR_TYPES2["FLOAT"] = 0] = "FLOAT";
|
|
472
|
-
VECTOR_TYPES2[VECTOR_TYPES2["DATE"] = 1] = "DATE";
|
|
473
|
-
return VECTOR_TYPES2;
|
|
474
|
-
})(VECTOR_TYPES || {});
|
|
475
|
-
|
|
476
|
-
// src/lib/encode-arrow.ts
|
|
477
|
-
var arrow6 = __toESM(require("apache-arrow"), 1);
|
|
478
|
-
function encodeArrowSync(data) {
|
|
479
|
-
const vectors = {};
|
|
480
|
-
for (const arrayData of data) {
|
|
481
|
-
const arrayVector = createVector(arrayData.array, arrayData.type);
|
|
482
|
-
vectors[arrayData.name] = arrayVector;
|
|
483
|
-
}
|
|
484
|
-
const table = new arrow6.Table(vectors);
|
|
485
|
-
const arrowBuffer = arrow6.tableToIPC(table);
|
|
486
|
-
return arrowBuffer;
|
|
487
|
-
}
|
|
488
|
-
function createVector(array, type) {
|
|
489
|
-
switch (type) {
|
|
490
|
-
case 1 /* DATE */:
|
|
491
|
-
return arrow6.vectorFromArray(array);
|
|
492
|
-
case 0 /* FLOAT */:
|
|
493
|
-
default:
|
|
494
|
-
return arrow6.vectorFromArray(array);
|
|
495
|
-
}
|
|
496
|
-
}
|
|
497
|
-
|
|
498
|
-
// src/arrow-writer.ts
|
|
499
|
-
var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
500
|
-
var ArrowWriter = {
|
|
501
|
-
name: "Apache Arrow",
|
|
502
|
-
id: "arrow",
|
|
503
|
-
module: "arrow",
|
|
504
|
-
version: VERSION2,
|
|
505
|
-
extensions: ["arrow", "feather"],
|
|
506
|
-
mimeTypes: [
|
|
507
|
-
"application/vnd.apache.arrow.file",
|
|
508
|
-
"application/vnd.apache.arrow.stream",
|
|
509
|
-
"application/octet-stream"
|
|
510
|
-
],
|
|
511
|
-
binary: true,
|
|
512
|
-
options: {},
|
|
513
|
-
encode: async function encodeArrow(data, options) {
|
|
514
|
-
return encodeArrowSync(data);
|
|
515
|
-
},
|
|
516
|
-
encodeSync(data, options) {
|
|
517
|
-
return encodeArrowSync(data);
|
|
518
|
-
}
|
|
519
|
-
};
|
|
619
|
+
// src/geoarrow/convert-geoarrow-to-binary-geometry.ts
|
|
620
|
+
var import_polygon = require("@math.gl/polygon");
|
|
520
621
|
|
|
521
622
|
// src/geoarrow/get-arrow-bounds.ts
|
|
522
623
|
function updateBoundsFromGeoArrowSamples(flatCoords, nDim, bounds, sampleSize = 100) {
|
|
@@ -550,24 +651,20 @@ var BINARY_GEOMETRY_TEMPLATE = {
|
|
|
550
651
|
numericProps: {},
|
|
551
652
|
featureIds: { value: new Uint32Array(0), size: 1 }
|
|
552
653
|
};
|
|
553
|
-
function getBinaryGeometriesFromArrow(geoColumn, geoEncoding) {
|
|
654
|
+
function getBinaryGeometriesFromArrow(geoColumn, geoEncoding, options) {
|
|
554
655
|
const featureTypes = {
|
|
555
656
|
polygon: geoEncoding === "geoarrow.multipolygon" || geoEncoding === "geoarrow.polygon",
|
|
556
657
|
point: geoEncoding === "geoarrow.multipoint" || geoEncoding === "geoarrow.point",
|
|
557
658
|
line: geoEncoding === "geoarrow.multilinestring" || geoEncoding === "geoarrow.linestring"
|
|
558
659
|
};
|
|
559
|
-
const chunks = geoColumn.data;
|
|
660
|
+
const chunks = (options == null ? void 0 : options.chunkIndex) ? [geoColumn.data[options == null ? void 0 : options.chunkIndex]] : geoColumn.data;
|
|
560
661
|
let bounds = [Infinity, Infinity, -Infinity, -Infinity];
|
|
561
662
|
let globalFeatureIdOffset = 0;
|
|
562
663
|
const binaryGeometries = [];
|
|
563
664
|
chunks.forEach((chunk) => {
|
|
564
|
-
const { featureIds, flatCoordinateArray, nDim, geomOffset } = getBinaryGeometriesFromChunk(
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
);
|
|
568
|
-
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
569
|
-
const globalFeatureIds = new Uint32Array(numOfVertices);
|
|
570
|
-
for (let i = 0; i < numOfVertices; i++) {
|
|
665
|
+
const { featureIds, flatCoordinateArray, nDim, geomOffset, triangles } = getBinaryGeometriesFromChunk(chunk, geoEncoding);
|
|
666
|
+
const globalFeatureIds = new Uint32Array(featureIds.length);
|
|
667
|
+
for (let i = 0; i < featureIds.length; i++) {
|
|
571
668
|
globalFeatureIds[i] = featureIds[i] + globalFeatureIdOffset;
|
|
572
669
|
}
|
|
573
670
|
const binaryContent = {
|
|
@@ -600,20 +697,81 @@ function getBinaryGeometriesFromArrow(geoColumn, geoEncoding) {
|
|
|
600
697
|
...BINARY_GEOMETRY_TEMPLATE,
|
|
601
698
|
...featureTypes.polygon ? binaryContent : {},
|
|
602
699
|
polygonIndices: {
|
|
603
|
-
//
|
|
604
|
-
// even when there is no hole in any polygon
|
|
700
|
+
// use geomOffset as polygonIndices same as primitivePolygonIndices since we are using earcut to get triangule indices
|
|
605
701
|
value: featureTypes.polygon ? geomOffset : new Uint16Array(0),
|
|
606
702
|
size: 1
|
|
607
703
|
},
|
|
608
704
|
primitivePolygonIndices: {
|
|
609
705
|
value: featureTypes.polygon ? geomOffset : new Uint16Array(0),
|
|
610
706
|
size: 1
|
|
611
|
-
}
|
|
707
|
+
},
|
|
708
|
+
...triangles ? { triangles: { value: triangles, size: 1 } } : {}
|
|
612
709
|
}
|
|
613
710
|
});
|
|
614
711
|
bounds = updateBoundsFromGeoArrowSamples(flatCoordinateArray, nDim, bounds);
|
|
615
712
|
});
|
|
616
|
-
return {
|
|
713
|
+
return {
|
|
714
|
+
binaryGeometries,
|
|
715
|
+
bounds,
|
|
716
|
+
featureTypes,
|
|
717
|
+
...(options == null ? void 0 : options.meanCenter) ? { meanCenters: getMeanCentersFromBinaryGeometries(binaryGeometries) } : {}
|
|
718
|
+
};
|
|
719
|
+
}
|
|
720
|
+
function getMeanCentersFromBinaryGeometries(binaryGeometries) {
|
|
721
|
+
const globalMeanCenters = [];
|
|
722
|
+
binaryGeometries.forEach((binaryGeometry) => {
|
|
723
|
+
var _a;
|
|
724
|
+
let binaryGeometryType = null;
|
|
725
|
+
if (binaryGeometry.points && binaryGeometry.points.positions.value.length > 0) {
|
|
726
|
+
binaryGeometryType = "points";
|
|
727
|
+
} else if (binaryGeometry.lines && binaryGeometry.lines.positions.value.length > 0) {
|
|
728
|
+
binaryGeometryType = "lines";
|
|
729
|
+
} else if (binaryGeometry.polygons && binaryGeometry.polygons.positions.value.length > 0) {
|
|
730
|
+
binaryGeometryType = "polygons";
|
|
731
|
+
}
|
|
732
|
+
const binaryContent = binaryGeometryType ? binaryGeometry[binaryGeometryType] : null;
|
|
733
|
+
if (binaryContent && binaryGeometryType !== null) {
|
|
734
|
+
const featureIds = binaryContent.featureIds.value;
|
|
735
|
+
const flatCoordinateArray = binaryContent.positions.value;
|
|
736
|
+
const nDim = binaryContent.positions.size;
|
|
737
|
+
const primitivePolygonIndices = (_a = binaryContent.primitivePolygonIndices) == null ? void 0 : _a.value;
|
|
738
|
+
const meanCenters = getMeanCentersFromGeometry(
|
|
739
|
+
featureIds,
|
|
740
|
+
flatCoordinateArray,
|
|
741
|
+
nDim,
|
|
742
|
+
binaryGeometryType,
|
|
743
|
+
primitivePolygonIndices
|
|
744
|
+
);
|
|
745
|
+
meanCenters.forEach((center) => {
|
|
746
|
+
globalMeanCenters.push(center);
|
|
747
|
+
});
|
|
748
|
+
}
|
|
749
|
+
});
|
|
750
|
+
return globalMeanCenters;
|
|
751
|
+
}
|
|
752
|
+
function getMeanCentersFromGeometry(featureIds, flatCoordinateArray, nDim, geometryType, primitivePolygonIndices) {
|
|
753
|
+
const meanCenters = [];
|
|
754
|
+
const vertexCount = flatCoordinateArray.length;
|
|
755
|
+
let vertexIndex = 0;
|
|
756
|
+
while (vertexIndex < vertexCount) {
|
|
757
|
+
const featureId = featureIds[vertexIndex / nDim];
|
|
758
|
+
const center = [0, 0];
|
|
759
|
+
let vertexCountInFeature = 0;
|
|
760
|
+
while (vertexIndex < vertexCount && featureIds[vertexIndex / nDim] === featureId) {
|
|
761
|
+
if (geometryType === "polygons" && primitivePolygonIndices && primitivePolygonIndices.indexOf(vertexIndex / nDim) >= 0) {
|
|
762
|
+
vertexIndex += nDim;
|
|
763
|
+
} else {
|
|
764
|
+
center[0] += flatCoordinateArray[vertexIndex];
|
|
765
|
+
center[1] += flatCoordinateArray[vertexIndex + 1];
|
|
766
|
+
vertexIndex += nDim;
|
|
767
|
+
vertexCountInFeature++;
|
|
768
|
+
}
|
|
769
|
+
}
|
|
770
|
+
center[0] /= vertexCountInFeature;
|
|
771
|
+
center[1] /= vertexCountInFeature;
|
|
772
|
+
meanCenters.push(center);
|
|
773
|
+
}
|
|
774
|
+
return meanCenters;
|
|
617
775
|
}
|
|
618
776
|
function getBinaryGeometriesFromChunk(chunk, geoEncoding) {
|
|
619
777
|
switch (geoEncoding) {
|
|
@@ -630,35 +788,67 @@ function getBinaryGeometriesFromChunk(chunk, geoEncoding) {
|
|
|
630
788
|
throw Error("invalid geoarrow encoding");
|
|
631
789
|
}
|
|
632
790
|
}
|
|
791
|
+
function getTriangleIndices(polygonIndices, primitivePolygonIndices, flatCoordinateArray, nDim) {
|
|
792
|
+
let primitiveIndex = 0;
|
|
793
|
+
const triangles = [];
|
|
794
|
+
for (let i = 0; i < polygonIndices.length - 1; i++) {
|
|
795
|
+
const startIdx = polygonIndices[i];
|
|
796
|
+
const endIdx = polygonIndices[i + 1];
|
|
797
|
+
const slicedFlatCoords = flatCoordinateArray.subarray(startIdx * nDim, endIdx * nDim);
|
|
798
|
+
const holeIndices = [];
|
|
799
|
+
while (primitivePolygonIndices[primitiveIndex] < endIdx) {
|
|
800
|
+
if (primitivePolygonIndices[primitiveIndex] > startIdx) {
|
|
801
|
+
holeIndices.push(primitivePolygonIndices[primitiveIndex] - startIdx);
|
|
802
|
+
}
|
|
803
|
+
primitiveIndex++;
|
|
804
|
+
}
|
|
805
|
+
const triangleIndices = (0, import_polygon.earcut)(
|
|
806
|
+
slicedFlatCoords,
|
|
807
|
+
holeIndices.length > 0 ? holeIndices : void 0,
|
|
808
|
+
nDim
|
|
809
|
+
);
|
|
810
|
+
for (let j = 0; j < triangleIndices.length; j++) {
|
|
811
|
+
triangles.push(triangleIndices[j] + startIdx);
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
const trianglesUint32 = new Uint32Array(triangles.length);
|
|
815
|
+
for (let i = 0; i < triangles.length; i++) {
|
|
816
|
+
trianglesUint32[i] = triangles[i];
|
|
817
|
+
}
|
|
818
|
+
return trianglesUint32;
|
|
819
|
+
}
|
|
633
820
|
function getBinaryPolygonsFromChunk(chunk, geoEncoding) {
|
|
634
821
|
const isMultiPolygon = geoEncoding === "geoarrow.multipolygon";
|
|
635
822
|
const polygonData = isMultiPolygon ? chunk.children[0] : chunk;
|
|
823
|
+
const polygonOffset = polygonData.valueOffsets;
|
|
824
|
+
const partData = isMultiPolygon ? chunk.valueOffsets.map((i) => polygonOffset.at(i) || i) : chunk.valueOffsets;
|
|
636
825
|
const ringData = polygonData.children[0];
|
|
637
826
|
const pointData = ringData.children[0];
|
|
638
827
|
const coordData = pointData.children[0];
|
|
639
828
|
const nDim = pointData.stride;
|
|
640
829
|
const geomOffset = ringData.valueOffsets;
|
|
641
830
|
const flatCoordinateArray = coordData.values;
|
|
642
|
-
const geometryIndicies = new Uint16Array(
|
|
643
|
-
for (let i = 0; i <
|
|
644
|
-
geometryIndicies[i] = geomOffset[
|
|
831
|
+
const geometryIndicies = new Uint16Array(polygonOffset.length);
|
|
832
|
+
for (let i = 0; i < polygonOffset.length; i++) {
|
|
833
|
+
geometryIndicies[i] = geomOffset[polygonOffset[i]];
|
|
645
834
|
}
|
|
646
|
-
geometryIndicies[chunk.length] = flatCoordinateArray.length / nDim;
|
|
647
835
|
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
648
836
|
const featureIds = new Uint32Array(numOfVertices);
|
|
649
|
-
for (let i = 0; i <
|
|
650
|
-
const startIdx = geomOffset[
|
|
651
|
-
const endIdx = geomOffset[
|
|
837
|
+
for (let i = 0; i < partData.length - 1; i++) {
|
|
838
|
+
const startIdx = geomOffset[partData[i]];
|
|
839
|
+
const endIdx = geomOffset[partData[i + 1]];
|
|
652
840
|
for (let j = startIdx; j < endIdx; j++) {
|
|
653
841
|
featureIds[j] = i;
|
|
654
842
|
}
|
|
655
843
|
}
|
|
844
|
+
const triangles = getTriangleIndices(geometryIndicies, geomOffset, flatCoordinateArray, nDim);
|
|
656
845
|
return {
|
|
657
846
|
featureIds,
|
|
658
847
|
flatCoordinateArray,
|
|
659
848
|
nDim,
|
|
660
849
|
geomOffset,
|
|
661
|
-
geometryIndicies
|
|
850
|
+
geometryIndicies,
|
|
851
|
+
triangles
|
|
662
852
|
};
|
|
663
853
|
}
|
|
664
854
|
function getBinaryLinesFromChunk(chunk, geoEncoding) {
|
|
@@ -672,11 +862,22 @@ function getBinaryLinesFromChunk(chunk, geoEncoding) {
|
|
|
672
862
|
const geometryIndicies = new Uint16Array(0);
|
|
673
863
|
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
674
864
|
const featureIds = new Uint32Array(numOfVertices);
|
|
675
|
-
|
|
676
|
-
const
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
865
|
+
if (isMultiLineString) {
|
|
866
|
+
const partData = chunk.valueOffsets;
|
|
867
|
+
for (let i = 0; i < partData.length - 1; i++) {
|
|
868
|
+
const startIdx = geomOffset[partData[i]];
|
|
869
|
+
const endIdx = geomOffset[partData[i + 1]];
|
|
870
|
+
for (let j = startIdx; j < endIdx; j++) {
|
|
871
|
+
featureIds[j] = i;
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
} else {
|
|
875
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
876
|
+
const startIdx = geomOffset[i];
|
|
877
|
+
const endIdx = geomOffset[i + 1];
|
|
878
|
+
for (let j = startIdx; j < endIdx; j++) {
|
|
879
|
+
featureIds[j] = i;
|
|
880
|
+
}
|
|
680
881
|
}
|
|
681
882
|
}
|
|
682
883
|
return {
|
|
@@ -697,8 +898,19 @@ function getBinaryPointsFromChunk(chunk, geoEncoding) {
|
|
|
697
898
|
const geomOffset = new Int32Array(0);
|
|
698
899
|
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
699
900
|
const featureIds = new Uint32Array(numOfVertices);
|
|
700
|
-
|
|
701
|
-
|
|
901
|
+
if (isMultiPoint) {
|
|
902
|
+
const partData = chunk.valueOffsets;
|
|
903
|
+
for (let i = 0; i < partData.length - 1; i++) {
|
|
904
|
+
const startIdx = partData[i];
|
|
905
|
+
const endIdx = partData[i + 1];
|
|
906
|
+
for (let j = startIdx; j < endIdx; j++) {
|
|
907
|
+
featureIds[j] = i;
|
|
908
|
+
}
|
|
909
|
+
}
|
|
910
|
+
} else {
|
|
911
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
912
|
+
featureIds[i] = i;
|
|
913
|
+
}
|
|
702
914
|
}
|
|
703
915
|
return {
|
|
704
916
|
featureIds,
|
|
@@ -710,7 +922,7 @@ function getBinaryPointsFromChunk(chunk, geoEncoding) {
|
|
|
710
922
|
}
|
|
711
923
|
|
|
712
924
|
// src/geoarrow/convert-geoarrow-to-geojson.ts
|
|
713
|
-
function
|
|
925
|
+
function parseGeometryFromArrow2(rawData) {
|
|
714
926
|
var _a;
|
|
715
927
|
const encoding = (_a = rawData.encoding) == null ? void 0 : _a.toLowerCase();
|
|
716
928
|
const data = rawData.data;
|
|
@@ -737,6 +949,10 @@ function parseGeometryFromArrow(rawData) {
|
|
|
737
949
|
case "geoarrow.linestring":
|
|
738
950
|
geometry = arrowLineStringToFeature(data);
|
|
739
951
|
break;
|
|
952
|
+
case "geoarrow.wkb":
|
|
953
|
+
throw Error(`GeoArrow encoding not supported ${encoding}`);
|
|
954
|
+
case "geoarrow.wkt":
|
|
955
|
+
throw Error(`GeoArrow encoding not supported ${encoding}`);
|
|
740
956
|
default: {
|
|
741
957
|
throw Error(`GeoArrow encoding not supported ${encoding}`);
|
|
742
958
|
}
|
|
@@ -847,11 +1063,19 @@ function arrowLineStringToFeature(arrowLineString) {
|
|
|
847
1063
|
return geometry;
|
|
848
1064
|
}
|
|
849
1065
|
|
|
1066
|
+
// src/triangulate-on-worker.ts
|
|
1067
|
+
var import_worker_utils = require("@loaders.gl/worker-utils");
|
|
1068
|
+
var VERSION3 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
|
|
1069
|
+
var TriangulationWorker = {
|
|
1070
|
+
id: "triangulation",
|
|
1071
|
+
name: "Triangulate",
|
|
1072
|
+
module: "arrow",
|
|
1073
|
+
version: VERSION3,
|
|
1074
|
+
options: {}
|
|
1075
|
+
};
|
|
1076
|
+
function triangulateOnWorker(data, options = {}) {
|
|
1077
|
+
return (0, import_worker_utils.processOnWorker)(TriangulationWorker, data, options);
|
|
1078
|
+
}
|
|
1079
|
+
|
|
850
1080
|
// src/index.ts
|
|
851
1081
|
import_schema3.TableBatchBuilder.ArrowBatch = ArrowTableBatchAggregator;
|
|
852
|
-
var ArrowLoader2 = {
|
|
853
|
-
...ArrowLoader,
|
|
854
|
-
parse: async (arraybuffer, options) => parseArrowSync(arraybuffer, options),
|
|
855
|
-
parseSync: parseArrowSync,
|
|
856
|
-
parseInBatches: parseArrowInBatches
|
|
857
|
-
};
|