@loaders.gl/arrow 4.0.4 → 4.1.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/arrow-worker.js +237 -2
  2. package/dist/dist.dev.js +1465 -630
  3. package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts +8 -4
  4. package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts.map +1 -1
  5. package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js +75 -54
  6. package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js.map +1 -1
  7. package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.d.ts +13 -0
  8. package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.d.ts.map +1 -0
  9. package/dist/geoarrow/{convert-geoarrow-to-geojson.js → convert-geoarrow-to-geojson-geometry.js} +34 -27
  10. package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.js.map +1 -0
  11. package/dist/geoarrow-loader.d.ts.map +1 -1
  12. package/dist/geoarrow-loader.js +0 -1
  13. package/dist/geoarrow-loader.js.map +1 -1
  14. package/dist/index.cjs +429 -347
  15. package/dist/index.d.ts +5 -3
  16. package/dist/index.d.ts.map +1 -1
  17. package/dist/index.js +4 -3
  18. package/dist/index.js.map +1 -1
  19. package/dist/parsers/parse-arrow-sync.d.ts.map +1 -1
  20. package/dist/parsers/parse-arrow-sync.js +2 -0
  21. package/dist/parsers/parse-arrow-sync.js.map +1 -1
  22. package/dist/tables/convert-arrow-to-columnar-table.d.ts.map +1 -1
  23. package/dist/tables/convert-arrow-to-columnar-table.js +1 -0
  24. package/dist/tables/convert-arrow-to-columnar-table.js.map +1 -1
  25. package/dist/tables/convert-arrow-to-geojson-table.d.ts +1 -1
  26. package/dist/tables/convert-arrow-to-geojson-table.d.ts.map +1 -1
  27. package/dist/tables/convert-arrow-to-geojson-table.js +14 -8
  28. package/dist/tables/convert-arrow-to-geojson-table.js.map +1 -1
  29. package/dist/tables/convert-columnar-to-row-table.d.ts.map +1 -1
  30. package/dist/tables/convert-columnar-to-row-table.js +1 -0
  31. package/dist/tables/convert-columnar-to-row-table.js.map +1 -1
  32. package/dist/triangulate-on-worker.d.ts +40 -6
  33. package/dist/triangulate-on-worker.d.ts.map +1 -1
  34. package/dist/triangulate-on-worker.js +11 -1
  35. package/dist/triangulate-on-worker.js.map +1 -1
  36. package/dist/triangulation-worker.js +11703 -34
  37. package/dist/workers/hard-clone.d.ts +23 -0
  38. package/dist/workers/hard-clone.d.ts.map +1 -0
  39. package/dist/workers/hard-clone.js +57 -0
  40. package/dist/workers/hard-clone.js.map +1 -0
  41. package/dist/workers/triangulation-worker.js +37 -2
  42. package/dist/workers/triangulation-worker.js.map +1 -1
  43. package/package.json +16 -10
  44. package/src/geoarrow/convert-geoarrow-to-binary-geometry.ts +103 -61
  45. package/src/geoarrow/{convert-geoarrow-to-geojson.ts → convert-geoarrow-to-geojson-geometry.ts} +56 -46
  46. package/src/geoarrow-loader.ts +0 -4
  47. package/src/index.ts +9 -3
  48. package/src/parsers/parse-arrow-sync.ts +6 -1
  49. package/src/tables/convert-arrow-to-columnar-table.ts +1 -0
  50. package/src/tables/convert-arrow-to-geojson-table.ts +18 -7
  51. package/src/tables/convert-columnar-to-row-table.ts +1 -0
  52. package/src/triangulate-on-worker.ts +51 -8
  53. package/src/workers/hard-clone.ts +162 -0
  54. package/src/workers/triangulation-worker.ts +57 -3
  55. package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts +0 -20
  56. package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts.map +0 -1
  57. package/dist/geoarrow/convert-geoarrow-to-geojson.js.map +0 -1
package/dist/index.cjs CHANGED
@@ -33,7 +33,6 @@ __export(src_exports, {
33
33
  ArrowLoader: () => ArrowLoader,
34
34
  ArrowWorkerLoader: () => ArrowWorkerLoader,
35
35
  ArrowWriter: () => ArrowWriter,
36
- BINARY_GEOMETRY_TEMPLATE: () => BINARY_GEOMETRY_TEMPLATE,
37
36
  GeoArrowLoader: () => GeoArrowLoader,
38
37
  GeoArrowWorkerLoader: () => GeoArrowWorkerLoader,
39
38
  TriangulationWorker: () => TriangulationWorker,
@@ -45,12 +44,15 @@ __export(src_exports, {
45
44
  deserializeArrowType: () => deserializeArrowType,
46
45
  getArrowType: () => getArrowType,
47
46
  getBinaryGeometriesFromArrow: () => getBinaryGeometriesFromArrow,
47
+ getBinaryGeometryTemplate: () => getBinaryGeometryTemplate,
48
48
  getMeanCentersFromBinaryGeometries: () => getMeanCentersFromBinaryGeometries,
49
49
  getTriangleIndices: () => getTriangleIndices,
50
+ hardClone: () => hardClone,
51
+ parseGeoArrowOnWorker: () => parseGeoArrowOnWorker,
50
52
  parseGeometryFromArrow: () => parseGeometryFromArrow2,
51
53
  serializeArrowField: () => serializeArrowField,
52
54
  serializeArrowMetadata: () => serializeArrowMetadata,
53
- serializeArrowSchema: () => serializeArrowSchema2,
55
+ serializeArrowSchema: () => serializeArrowSchema,
54
56
  serializeArrowType: () => serializeArrowType,
55
57
  triangulateOnWorker: () => triangulateOnWorker,
56
58
  updateBoundsFromGeoArrowSamples: () => updateBoundsFromGeoArrowSamples
@@ -152,7 +154,7 @@ var VECTOR_TYPES = /* @__PURE__ */ ((VECTOR_TYPES2) => {
152
154
 
153
155
  // src/parsers/parse-arrow-sync.ts
154
156
  var import_schema2 = require("@loaders.gl/schema");
155
- var arrow3 = __toESM(require("apache-arrow"), 1);
157
+ var arrow4 = __toESM(require("apache-arrow"), 1);
156
158
 
157
159
  // src/tables/convert-arrow-to-columnar-table.ts
158
160
  function convertArrowToColumnarTable(table) {
@@ -165,199 +167,21 @@ function convertArrowToColumnarTable(table) {
165
167
  }
166
168
  return {
167
169
  shape: "columnar-table",
170
+ schema: table.schema,
168
171
  data: columnarTable
169
172
  };
170
173
  }
171
174
 
172
- // src/parsers/parse-arrow-sync.ts
173
- function parseArrowSync(arrayBuffer, options) {
174
- const apacheArrowTable = arrow3.tableFromIPC([new Uint8Array(arrayBuffer)]);
175
- const arrowTable = { shape: "arrow-table", data: apacheArrowTable };
176
- const shape = (options == null ? void 0 : options.shape) || "arrow-table";
177
- switch (shape) {
178
- case "arrow-table":
179
- return arrowTable;
180
- case "columnar-table":
181
- return convertArrowToColumnarTable(arrowTable);
182
- case "object-row-table":
183
- let columnarTable = convertArrowToColumnarTable(arrowTable);
184
- return (0, import_schema2.convertTable)(columnarTable, "object-row-table");
185
- case "array-row-table":
186
- columnarTable = convertArrowToColumnarTable(arrowTable);
187
- return (0, import_schema2.convertTable)(columnarTable, "array-row-table");
188
- default:
189
- throw new Error(shape);
190
- }
191
- }
192
-
193
- // src/parsers/parse-arrow-in-batches.ts
194
- var arrow4 = __toESM(require("apache-arrow"), 1);
195
- function parseArrowInBatches(asyncIterator) {
196
- async function* makeArrowAsyncIterator() {
197
- const readers = arrow4.RecordBatchReader.readAll(asyncIterator);
198
- for await (const reader of readers) {
199
- for await (const recordBatch of reader) {
200
- const arrowTabledBatch = {
201
- shape: "arrow-table",
202
- batchType: "data",
203
- data: new arrow4.Table([recordBatch]),
204
- length: recordBatch.data.length
205
- };
206
- yield arrowTabledBatch;
207
- }
208
- break;
209
- }
210
- }
211
- return makeArrowAsyncIterator();
212
- }
213
-
214
- // src/arrow-loader.ts
215
- var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
216
- var ArrowWorkerLoader = {
217
- name: "Apache Arrow",
218
- id: "arrow",
219
- module: "arrow",
220
- version: VERSION,
221
- // worker: true,
222
- category: "table",
223
- extensions: ["arrow", "feather"],
224
- mimeTypes: [
225
- "application/vnd.apache.arrow.file",
226
- "application/vnd.apache.arrow.stream",
227
- "application/octet-stream"
228
- ],
229
- binary: true,
230
- tests: ["ARROW"],
231
- options: {
232
- arrow: {
233
- shape: "columnar-table"
234
- }
235
- }
236
- };
237
- var ArrowLoader = {
238
- ...ArrowWorkerLoader,
239
- parse: async (arraybuffer, options) => parseArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
240
- parseSync: (arraybuffer, options) => parseArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
241
- parseInBatches: parseArrowInBatches
242
- };
243
-
244
- // src/lib/encode-arrow.ts
245
- var arrow5 = __toESM(require("apache-arrow"), 1);
246
- function encodeArrowSync(data) {
247
- const vectors = {};
248
- for (const arrayData of data) {
249
- const arrayVector = createVector(arrayData.array, arrayData.type);
250
- vectors[arrayData.name] = arrayVector;
251
- }
252
- const table = new arrow5.Table(vectors);
253
- const arrowBuffer = arrow5.tableToIPC(table);
254
- return arrowBuffer;
255
- }
256
- function createVector(array, type) {
257
- switch (type) {
258
- case 1 /* DATE */:
259
- return arrow5.vectorFromArray(array);
260
- case 0 /* FLOAT */:
261
- default:
262
- return arrow5.vectorFromArray(array);
263
- }
264
- }
265
-
266
- // src/arrow-writer.ts
267
- var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
268
- var ArrowWriter = {
269
- name: "Apache Arrow",
270
- id: "arrow",
271
- module: "arrow",
272
- version: VERSION2,
273
- extensions: ["arrow", "feather"],
274
- mimeTypes: [
275
- "application/vnd.apache.arrow.file",
276
- "application/vnd.apache.arrow.stream",
277
- "application/octet-stream"
278
- ],
279
- binary: true,
280
- options: {},
281
- encode: async function encodeArrow(data, options) {
282
- return encodeArrowSync(data);
283
- },
284
- encodeSync(data, options) {
285
- return encodeArrowSync(data);
286
- }
287
- };
288
-
289
- // src/tables/convert-arrow-to-geojson-table.ts
290
- var import_arrow = require("@loaders.gl/arrow");
291
- var import_gis = require("@loaders.gl/gis");
292
- function convertArrowToGeoJSONTable(table) {
293
- var _a;
294
- const arrowTable = table.data;
295
- const schema = (0, import_arrow.serializeArrowSchema)(arrowTable.schema);
296
- const geometryColumns = (0, import_gis.getGeometryColumnsFromSchema)(schema);
297
- const encoding = geometryColumns.geometry.encoding;
298
- const features = [];
299
- for (let row = 0; row < arrowTable.numRows; row++) {
300
- const arrowGeometry = (_a = arrowTable.getChild("geometry")) == null ? void 0 : _a.get(row);
301
- const arrowGeometryObject = { encoding, data: arrowGeometry };
302
- const feature = (0, import_arrow.parseGeometryFromArrow)(arrowGeometryObject);
303
- if (feature) {
304
- features.push(feature);
305
- }
306
- }
307
- return {
308
- shape: "geojson-table",
309
- type: "FeatureCollection",
310
- features
311
- };
312
- }
313
-
314
- // src/parsers/parse-geoarrow-sync.ts
315
- function parseGeoArrowSync(arrayBuffer, options) {
316
- const table = parseArrowSync(arrayBuffer, { shape: "arrow-table" });
317
- switch (options == null ? void 0 : options.shape) {
318
- case "geojson-table":
319
- return convertArrowToGeoJSONTable(table);
320
- default:
321
- return table;
322
- }
323
- }
324
-
325
- // src/parsers/parse-geoarrow-in-batches.ts
326
- function parseGeoArrowInBatches(asyncIterator) {
327
- return parseArrowInBatches(asyncIterator);
328
- }
329
-
330
- // src/geoarrow-loader.ts
331
- var GeoArrowWorkerLoader = {
332
- ...ArrowWorkerLoader,
333
- options: {
334
- arrow: {
335
- shape: "arrow-table"
336
- }
337
- }
338
- };
339
- var GeoArrowLoader = {
340
- ...ArrowWorkerLoader,
341
- options: {
342
- arrow: {
343
- shape: "arrow-table"
344
- }
345
- },
346
- parse: async (arraybuffer, options) => parseGeoArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
347
- parseSync: (arraybuffer, options) => parseGeoArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
348
- parseInBatches: parseGeoArrowInBatches
349
- };
350
-
351
175
  // src/schema/convert-arrow-schema.ts
352
- var arrow6 = __toESM(require("apache-arrow"), 1);
353
- function serializeArrowSchema2(arrowSchema) {
176
+ var arrow3 = __toESM(require("apache-arrow"), 1);
177
+ function serializeArrowSchema(arrowSchema) {
354
178
  return {
355
179
  fields: arrowSchema.fields.map((arrowField) => serializeArrowField(arrowField)),
356
180
  metadata: serializeArrowMetadata(arrowSchema.metadata)
357
181
  };
358
182
  }
359
183
  function deserializeArrowSchema(schema) {
360
- return new arrow6.Schema(
184
+ return new arrow3.Schema(
361
185
  schema.fields.map((field) => deserializeArrowField(field)),
362
186
  deserializeArrowMetadata(schema.metadata)
363
187
  );
@@ -377,7 +201,7 @@ function serializeArrowField(field) {
377
201
  };
378
202
  }
379
203
  function deserializeArrowField(field) {
380
- return new arrow6.Field(
204
+ return new arrow3.Field(
381
205
  field.name,
382
206
  deserializeArrowType(field.type),
383
207
  field.nullable,
@@ -386,52 +210,52 @@ function deserializeArrowField(field) {
386
210
  }
387
211
  function serializeArrowType(arrowType) {
388
212
  switch (arrowType.constructor) {
389
- case arrow6.Null:
213
+ case arrow3.Null:
390
214
  return "null";
391
- case arrow6.Binary:
215
+ case arrow3.Binary:
392
216
  return "binary";
393
- case arrow6.Bool:
217
+ case arrow3.Bool:
394
218
  return "bool";
395
- case arrow6.Int:
219
+ case arrow3.Int:
396
220
  const intType = arrowType;
397
221
  return `${intType.isSigned ? "u" : ""}int${intType.bitWidth}`;
398
- case arrow6.Int8:
222
+ case arrow3.Int8:
399
223
  return "int8";
400
- case arrow6.Int16:
224
+ case arrow3.Int16:
401
225
  return "int16";
402
- case arrow6.Int32:
226
+ case arrow3.Int32:
403
227
  return "int32";
404
- case arrow6.Int64:
228
+ case arrow3.Int64:
405
229
  return "int64";
406
- case arrow6.Uint8:
230
+ case arrow3.Uint8:
407
231
  return "uint8";
408
- case arrow6.Uint16:
232
+ case arrow3.Uint16:
409
233
  return "uint16";
410
- case arrow6.Uint32:
234
+ case arrow3.Uint32:
411
235
  return "uint32";
412
- case arrow6.Uint64:
236
+ case arrow3.Uint64:
413
237
  return "uint64";
414
- case arrow6.Float:
238
+ case arrow3.Float:
415
239
  const precision = arrowType.precision;
416
240
  switch (precision) {
417
- case arrow6.Precision.HALF:
241
+ case arrow3.Precision.HALF:
418
242
  return "float16";
419
- case arrow6.Precision.SINGLE:
243
+ case arrow3.Precision.SINGLE:
420
244
  return "float32";
421
- case arrow6.Precision.DOUBLE:
245
+ case arrow3.Precision.DOUBLE:
422
246
  return "float64";
423
247
  default:
424
248
  return "float16";
425
249
  }
426
- case arrow6.Float16:
250
+ case arrow3.Float16:
427
251
  return "float16";
428
- case arrow6.Float32:
252
+ case arrow3.Float32:
429
253
  return "float32";
430
- case arrow6.Float64:
254
+ case arrow3.Float64:
431
255
  return "float64";
432
- case arrow6.Utf8:
256
+ case arrow3.Utf8:
433
257
  return "utf8";
434
- case arrow6.Decimal:
258
+ case arrow3.Decimal:
435
259
  const decimal = arrowType;
436
260
  return {
437
261
  type: "decimal",
@@ -439,93 +263,93 @@ function serializeArrowType(arrowType) {
439
263
  precision: decimal.precision,
440
264
  scale: decimal.scale
441
265
  };
442
- case arrow6.Date_:
266
+ case arrow3.Date_:
443
267
  const dateUnit = arrowType.unit;
444
- return dateUnit === arrow6.DateUnit.DAY ? "date-day" : "date-millisecond";
445
- case arrow6.DateDay:
268
+ return dateUnit === arrow3.DateUnit.DAY ? "date-day" : "date-millisecond";
269
+ case arrow3.DateDay:
446
270
  return "date-day";
447
- case arrow6.DateMillisecond:
271
+ case arrow3.DateMillisecond:
448
272
  return "date-millisecond";
449
- case arrow6.Time:
273
+ case arrow3.Time:
450
274
  const timeUnit = arrowType.unit;
451
275
  switch (timeUnit) {
452
- case arrow6.TimeUnit.SECOND:
276
+ case arrow3.TimeUnit.SECOND:
453
277
  return "time-second";
454
- case arrow6.TimeUnit.MILLISECOND:
278
+ case arrow3.TimeUnit.MILLISECOND:
455
279
  return "time-millisecond";
456
- case arrow6.TimeUnit.MICROSECOND:
280
+ case arrow3.TimeUnit.MICROSECOND:
457
281
  return "time-microsecond";
458
- case arrow6.TimeUnit.NANOSECOND:
282
+ case arrow3.TimeUnit.NANOSECOND:
459
283
  return "time-nanosecond";
460
284
  default:
461
285
  return "time-second";
462
286
  }
463
- case arrow6.TimeMillisecond:
287
+ case arrow3.TimeMillisecond:
464
288
  return "time-millisecond";
465
- case arrow6.TimeSecond:
289
+ case arrow3.TimeSecond:
466
290
  return "time-second";
467
- case arrow6.TimeMicrosecond:
291
+ case arrow3.TimeMicrosecond:
468
292
  return "time-microsecond";
469
- case arrow6.TimeNanosecond:
293
+ case arrow3.TimeNanosecond:
470
294
  return "time-nanosecond";
471
- case arrow6.Timestamp:
295
+ case arrow3.Timestamp:
472
296
  const timeStampUnit = arrowType.unit;
473
297
  switch (timeStampUnit) {
474
- case arrow6.TimeUnit.SECOND:
298
+ case arrow3.TimeUnit.SECOND:
475
299
  return "timestamp-second";
476
- case arrow6.TimeUnit.MILLISECOND:
300
+ case arrow3.TimeUnit.MILLISECOND:
477
301
  return "timestamp-millisecond";
478
- case arrow6.TimeUnit.MICROSECOND:
302
+ case arrow3.TimeUnit.MICROSECOND:
479
303
  return "timestamp-microsecond";
480
- case arrow6.TimeUnit.NANOSECOND:
304
+ case arrow3.TimeUnit.NANOSECOND:
481
305
  return "timestamp-nanosecond";
482
306
  default:
483
307
  return "timestamp-second";
484
308
  }
485
- case arrow6.TimestampSecond:
309
+ case arrow3.TimestampSecond:
486
310
  return "timestamp-second";
487
- case arrow6.TimestampMillisecond:
311
+ case arrow3.TimestampMillisecond:
488
312
  return "timestamp-millisecond";
489
- case arrow6.TimestampMicrosecond:
313
+ case arrow3.TimestampMicrosecond:
490
314
  return "timestamp-microsecond";
491
- case arrow6.TimestampNanosecond:
315
+ case arrow3.TimestampNanosecond:
492
316
  return "timestamp-nanosecond";
493
- case arrow6.Interval:
317
+ case arrow3.Interval:
494
318
  const intervalUnit = arrowType.unit;
495
319
  switch (intervalUnit) {
496
- case arrow6.IntervalUnit.DAY_TIME:
320
+ case arrow3.IntervalUnit.DAY_TIME:
497
321
  return "interval-daytime";
498
- case arrow6.IntervalUnit.YEAR_MONTH:
322
+ case arrow3.IntervalUnit.YEAR_MONTH:
499
323
  return "interval-yearmonth";
500
324
  default:
501
325
  return "interval-daytime";
502
326
  }
503
- case arrow6.IntervalDayTime:
327
+ case arrow3.IntervalDayTime:
504
328
  return "interval-daytime";
505
- case arrow6.IntervalYearMonth:
329
+ case arrow3.IntervalYearMonth:
506
330
  return "interval-yearmonth";
507
- case arrow6.Map_:
331
+ case arrow3.Map_:
508
332
  const mapType = arrowType;
509
333
  return {
510
334
  type: "map",
511
335
  keysSorted: mapType.keysSorted,
512
336
  children: mapType.children.map((arrowField) => serializeArrowField(arrowField))
513
337
  };
514
- case arrow6.List:
338
+ case arrow3.List:
515
339
  const listType = arrowType;
516
340
  const listField = listType.valueField;
517
341
  return {
518
342
  type: "list",
519
343
  children: [serializeArrowField(listField)]
520
344
  };
521
- case arrow6.FixedSizeList:
345
+ case arrow3.FixedSizeList:
522
346
  const fixedSizeList = arrowType;
523
347
  return {
524
348
  type: "fixed-size-list",
525
349
  listSize: fixedSizeList.listSize,
526
350
  children: [serializeArrowField(fixedSizeList.children[0])]
527
351
  };
528
- case arrow6.Struct:
352
+ case arrow3.Struct:
529
353
  const structType = arrowType;
530
354
  return {
531
355
  type: "struct",
@@ -539,83 +363,270 @@ function deserializeArrowType(dataType) {
539
363
  if (typeof dataType === "object") {
540
364
  switch (dataType.type) {
541
365
  case "decimal":
542
- return new arrow6.Decimal(dataType.precision, dataType.scale, dataType.bitWidth);
366
+ return new arrow3.Decimal(dataType.precision, dataType.scale, dataType.bitWidth);
543
367
  case "map":
544
368
  let children = dataType.children.map((arrowField) => deserializeArrowField(arrowField));
545
- return new arrow6.Map_(children, dataType.keysSorted);
369
+ return new arrow3.Map_(children, dataType.keysSorted);
546
370
  case "list":
547
371
  const field = deserializeArrowField(dataType.children[0]);
548
- return new arrow6.List(field);
372
+ return new arrow3.List(field);
549
373
  case "fixed-size-list":
550
374
  const child = deserializeArrowField(dataType.children[0]);
551
- return new arrow6.FixedSizeList(dataType.listSize, child);
375
+ return new arrow3.FixedSizeList(dataType.listSize, child);
552
376
  case "struct":
553
377
  children = dataType.children.map((arrowField) => deserializeArrowField(arrowField));
554
- return new arrow6.Struct(children);
378
+ return new arrow3.Struct(children);
555
379
  default:
556
380
  throw new Error("array type not supported");
557
381
  }
558
382
  }
559
383
  switch (dataType) {
560
384
  case "null":
561
- return new arrow6.Null();
385
+ return new arrow3.Null();
562
386
  case "binary":
563
- return new arrow6.Binary();
387
+ return new arrow3.Binary();
564
388
  case "bool":
565
- return new arrow6.Bool();
389
+ return new arrow3.Bool();
566
390
  case "int8":
567
- return new arrow6.Int8();
391
+ return new arrow3.Int8();
568
392
  case "int16":
569
- return new arrow6.Int16();
393
+ return new arrow3.Int16();
570
394
  case "int32":
571
- return new arrow6.Int32();
395
+ return new arrow3.Int32();
572
396
  case "int64":
573
- return new arrow6.Int64();
397
+ return new arrow3.Int64();
574
398
  case "uint8":
575
- return new arrow6.Uint8();
399
+ return new arrow3.Uint8();
576
400
  case "uint16":
577
- return new arrow6.Uint16();
401
+ return new arrow3.Uint16();
578
402
  case "uint32":
579
- return new arrow6.Uint32();
403
+ return new arrow3.Uint32();
580
404
  case "uint64":
581
- return new arrow6.Uint64();
405
+ return new arrow3.Uint64();
582
406
  case "float16":
583
- return new arrow6.Float16();
407
+ return new arrow3.Float16();
584
408
  case "float32":
585
- return new arrow6.Float32();
409
+ return new arrow3.Float32();
586
410
  case "float64":
587
- return new arrow6.Float64();
411
+ return new arrow3.Float64();
588
412
  case "utf8":
589
- return new arrow6.Utf8();
413
+ return new arrow3.Utf8();
590
414
  case "date-day":
591
- return new arrow6.DateDay();
415
+ return new arrow3.DateDay();
592
416
  case "date-millisecond":
593
- return new arrow6.DateMillisecond();
417
+ return new arrow3.DateMillisecond();
594
418
  case "time-second":
595
- return new arrow6.TimeSecond();
419
+ return new arrow3.TimeSecond();
596
420
  case "time-millisecond":
597
- return new arrow6.TimeMillisecond();
421
+ return new arrow3.TimeMillisecond();
598
422
  case "time-microsecond":
599
- return new arrow6.TimeMicrosecond();
423
+ return new arrow3.TimeMicrosecond();
600
424
  case "time-nanosecond":
601
- return new arrow6.TimeNanosecond();
425
+ return new arrow3.TimeNanosecond();
602
426
  case "timestamp-second":
603
- return new arrow6.TimestampSecond();
427
+ return new arrow3.TimestampSecond();
604
428
  case "timestamp-millisecond":
605
- return new arrow6.TimestampMillisecond();
429
+ return new arrow3.TimestampMillisecond();
606
430
  case "timestamp-microsecond":
607
- return new arrow6.TimestampMicrosecond();
431
+ return new arrow3.TimestampMicrosecond();
608
432
  case "timestamp-nanosecond":
609
- return new arrow6.TimestampNanosecond();
433
+ return new arrow3.TimestampNanosecond();
610
434
  case "interval-daytime":
611
- return new arrow6.IntervalDayTime();
435
+ return new arrow3.IntervalDayTime();
612
436
  case "interval-yearmonth":
613
- return new arrow6.IntervalYearMonth();
437
+ return new arrow3.IntervalYearMonth();
614
438
  default:
615
439
  throw new Error("array type not supported");
616
440
  }
617
441
  }
618
442
 
443
+ // src/parsers/parse-arrow-sync.ts
444
+ function parseArrowSync(arrayBuffer, options) {
445
+ const apacheArrowTable = arrow4.tableFromIPC([new Uint8Array(arrayBuffer)]);
446
+ const arrowTable = {
447
+ shape: "arrow-table",
448
+ schema: serializeArrowSchema(apacheArrowTable.schema),
449
+ data: apacheArrowTable
450
+ };
451
+ const shape = (options == null ? void 0 : options.shape) || "arrow-table";
452
+ switch (shape) {
453
+ case "arrow-table":
454
+ return arrowTable;
455
+ case "columnar-table":
456
+ return convertArrowToColumnarTable(arrowTable);
457
+ case "object-row-table":
458
+ let columnarTable = convertArrowToColumnarTable(arrowTable);
459
+ return (0, import_schema2.convertTable)(columnarTable, "object-row-table");
460
+ case "array-row-table":
461
+ columnarTable = convertArrowToColumnarTable(arrowTable);
462
+ return (0, import_schema2.convertTable)(columnarTable, "array-row-table");
463
+ default:
464
+ throw new Error(shape);
465
+ }
466
+ }
467
+
468
+ // src/parsers/parse-arrow-in-batches.ts
469
+ var arrow5 = __toESM(require("apache-arrow"), 1);
470
+ function parseArrowInBatches(asyncIterator) {
471
+ async function* makeArrowAsyncIterator() {
472
+ const readers = arrow5.RecordBatchReader.readAll(asyncIterator);
473
+ for await (const reader of readers) {
474
+ for await (const recordBatch of reader) {
475
+ const arrowTabledBatch = {
476
+ shape: "arrow-table",
477
+ batchType: "data",
478
+ data: new arrow5.Table([recordBatch]),
479
+ length: recordBatch.data.length
480
+ };
481
+ yield arrowTabledBatch;
482
+ }
483
+ break;
484
+ }
485
+ }
486
+ return makeArrowAsyncIterator();
487
+ }
488
+
489
+ // src/arrow-loader.ts
490
+ var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
491
+ var ArrowWorkerLoader = {
492
+ name: "Apache Arrow",
493
+ id: "arrow",
494
+ module: "arrow",
495
+ version: VERSION,
496
+ // worker: true,
497
+ category: "table",
498
+ extensions: ["arrow", "feather"],
499
+ mimeTypes: [
500
+ "application/vnd.apache.arrow.file",
501
+ "application/vnd.apache.arrow.stream",
502
+ "application/octet-stream"
503
+ ],
504
+ binary: true,
505
+ tests: ["ARROW"],
506
+ options: {
507
+ arrow: {
508
+ shape: "columnar-table"
509
+ }
510
+ }
511
+ };
512
+ var ArrowLoader = {
513
+ ...ArrowWorkerLoader,
514
+ parse: async (arraybuffer, options) => parseArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
515
+ parseSync: (arraybuffer, options) => parseArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
516
+ parseInBatches: parseArrowInBatches
517
+ };
518
+
519
+ // src/lib/encode-arrow.ts
520
+ var arrow6 = __toESM(require("apache-arrow"), 1);
521
+ function encodeArrowSync(data) {
522
+ const vectors = {};
523
+ for (const arrayData of data) {
524
+ const arrayVector = createVector(arrayData.array, arrayData.type);
525
+ vectors[arrayData.name] = arrayVector;
526
+ }
527
+ const table = new arrow6.Table(vectors);
528
+ const arrowBuffer = arrow6.tableToIPC(table);
529
+ return arrowBuffer;
530
+ }
531
+ function createVector(array, type) {
532
+ switch (type) {
533
+ case 1 /* DATE */:
534
+ return arrow6.vectorFromArray(array);
535
+ case 0 /* FLOAT */:
536
+ default:
537
+ return arrow6.vectorFromArray(array);
538
+ }
539
+ }
540
+
541
+ // src/arrow-writer.ts
542
+ var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
543
+ var ArrowWriter = {
544
+ name: "Apache Arrow",
545
+ id: "arrow",
546
+ module: "arrow",
547
+ version: VERSION2,
548
+ extensions: ["arrow", "feather"],
549
+ mimeTypes: [
550
+ "application/vnd.apache.arrow.file",
551
+ "application/vnd.apache.arrow.stream",
552
+ "application/octet-stream"
553
+ ],
554
+ binary: true,
555
+ options: {},
556
+ encode: async function encodeArrow(data, options) {
557
+ return encodeArrowSync(data);
558
+ },
559
+ encodeSync(data, options) {
560
+ return encodeArrowSync(data);
561
+ }
562
+ };
563
+
564
+ // src/tables/convert-arrow-to-geojson-table.ts
565
+ var import_arrow = require("@loaders.gl/arrow");
566
+ var import_gis = require("@loaders.gl/gis");
567
+ function convertArrowToGeoJSONTable(table) {
568
+ var _a;
569
+ const arrowTable = table.data;
570
+ const schema = (0, import_arrow.serializeArrowSchema)(arrowTable.schema);
571
+ const geometryColumns = (0, import_gis.getGeometryColumnsFromSchema)(schema);
572
+ const encoding = geometryColumns.geometry.encoding;
573
+ const features = [];
574
+ const propertyColumnNames = arrowTable.schema.fields.map((field) => field.name).filter((name) => !(name in geometryColumns));
575
+ const propertiesTable = arrowTable.select(propertyColumnNames);
576
+ const arrowGeometryColumn = arrowTable.getChild("geometry");
577
+ for (let row = 0; row < arrowTable.numRows; row++) {
578
+ const arrowGeometry = arrowGeometryColumn == null ? void 0 : arrowGeometryColumn.get(row);
579
+ const feature = (0, import_arrow.parseGeometryFromArrow)(arrowGeometry, encoding);
580
+ if (feature) {
581
+ const properties = ((_a = propertiesTable.get(row)) == null ? void 0 : _a.toJSON()) || {};
582
+ features.push({ type: "Feature", geometry: feature, properties });
583
+ }
584
+ }
585
+ return {
586
+ shape: "geojson-table",
587
+ type: "FeatureCollection",
588
+ schema: table.schema,
589
+ features
590
+ };
591
+ }
592
+
593
+ // src/parsers/parse-geoarrow-sync.ts
594
+ function parseGeoArrowSync(arrayBuffer, options) {
595
+ const table = parseArrowSync(arrayBuffer, { shape: "arrow-table" });
596
+ switch (options == null ? void 0 : options.shape) {
597
+ case "geojson-table":
598
+ return convertArrowToGeoJSONTable(table);
599
+ default:
600
+ return table;
601
+ }
602
+ }
603
+
604
+ // src/parsers/parse-geoarrow-in-batches.ts
605
+ function parseGeoArrowInBatches(asyncIterator) {
606
+ return parseArrowInBatches(asyncIterator);
607
+ }
608
+
609
+ // src/geoarrow-loader.ts
610
+ var GeoArrowWorkerLoader = {
611
+ ...ArrowWorkerLoader,
612
+ options: {
613
+ arrow: {
614
+ shape: "arrow-table"
615
+ }
616
+ }
617
+ };
618
+ var GeoArrowLoader = {
619
+ ...ArrowWorkerLoader,
620
+ options: {
621
+ arrow: {
622
+ shape: "arrow-table"
623
+ }
624
+ },
625
+ parse: async (arraybuffer, options) => parseGeoArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
626
+ parseSync: (arraybuffer, options) => parseGeoArrowSync(arraybuffer, options == null ? void 0 : options.arrow),
627
+ parseInBatches: parseGeoArrowInBatches
628
+ };
629
+
619
630
  // src/geoarrow/convert-geoarrow-to-binary-geometry.ts
620
631
  var import_polygon = require("@math.gl/polygon");
621
632
 
@@ -644,25 +655,27 @@ function updateBoundsFromGeoArrowSamples(flatCoords, nDim, bounds, sampleSize =
644
655
  }
645
656
 
646
657
  // src/geoarrow/convert-geoarrow-to-binary-geometry.ts
647
- var BINARY_GEOMETRY_TEMPLATE = {
648
- globalFeatureIds: { value: new Uint32Array(0), size: 1 },
649
- positions: { value: new Float32Array(0), size: 2 },
650
- properties: [],
651
- numericProps: {},
652
- featureIds: { value: new Uint32Array(0), size: 1 }
653
- };
658
+ function getBinaryGeometryTemplate() {
659
+ return {
660
+ globalFeatureIds: { value: new Uint32Array(0), size: 1 },
661
+ positions: { value: new Float32Array(0), size: 2 },
662
+ properties: [],
663
+ numericProps: {},
664
+ featureIds: { value: new Uint32Array(0), size: 1 }
665
+ };
666
+ }
654
667
  function getBinaryGeometriesFromArrow(geoColumn, geoEncoding, options) {
655
668
  const featureTypes = {
656
669
  polygon: geoEncoding === "geoarrow.multipolygon" || geoEncoding === "geoarrow.polygon",
657
670
  point: geoEncoding === "geoarrow.multipoint" || geoEncoding === "geoarrow.point",
658
671
  line: geoEncoding === "geoarrow.multilinestring" || geoEncoding === "geoarrow.linestring"
659
672
  };
660
- const chunks = (options == null ? void 0 : options.chunkIndex) ? [geoColumn.data[options == null ? void 0 : options.chunkIndex]] : geoColumn.data;
673
+ const chunks = (options == null ? void 0 : options.chunkIndex) !== void 0 && (options == null ? void 0 : options.chunkIndex) >= 0 ? [geoColumn.data[options == null ? void 0 : options.chunkIndex]] : geoColumn.data;
661
674
  let bounds = [Infinity, Infinity, -Infinity, -Infinity];
662
- let globalFeatureIdOffset = 0;
675
+ let globalFeatureIdOffset = (options == null ? void 0 : options.chunkOffset) || 0;
663
676
  const binaryGeometries = [];
664
677
  chunks.forEach((chunk) => {
665
- const { featureIds, flatCoordinateArray, nDim, geomOffset, triangles } = getBinaryGeometriesFromChunk(chunk, geoEncoding);
678
+ const { featureIds, flatCoordinateArray, nDim, geomOffset, triangles } = getBinaryGeometriesFromChunk(chunk, geoEncoding, options);
666
679
  const globalFeatureIds = new Uint32Array(featureIds.length);
667
680
  for (let i = 0; i < featureIds.length; i++) {
668
681
  globalFeatureIds[i] = featureIds[i] + globalFeatureIdOffset;
@@ -674,6 +687,7 @@ function getBinaryGeometriesFromArrow(geoColumn, geoEncoding, options) {
674
687
  size: nDim
675
688
  },
676
689
  featureIds: { value: featureIds, size: 1 },
690
+ // eslint-disable-next-line no-loop-func
677
691
  properties: [...Array(chunk.length).keys()].map((i) => ({
678
692
  index: i + globalFeatureIdOffset
679
693
  }))
@@ -683,18 +697,18 @@ function getBinaryGeometriesFromArrow(geoColumn, geoEncoding, options) {
683
697
  shape: "binary-feature-collection",
684
698
  points: {
685
699
  type: "Point",
686
- ...BINARY_GEOMETRY_TEMPLATE,
700
+ ...getBinaryGeometryTemplate(),
687
701
  ...featureTypes.point ? binaryContent : {}
688
702
  },
689
703
  lines: {
690
704
  type: "LineString",
691
- ...BINARY_GEOMETRY_TEMPLATE,
705
+ ...getBinaryGeometryTemplate(),
692
706
  ...featureTypes.line ? binaryContent : {},
693
707
  pathIndices: { value: featureTypes.line ? geomOffset : new Uint16Array(0), size: 1 }
694
708
  },
695
709
  polygons: {
696
710
  type: "Polygon",
697
- ...BINARY_GEOMETRY_TEMPLATE,
711
+ ...getBinaryGeometryTemplate(),
698
712
  ...featureTypes.polygon ? binaryContent : {},
699
713
  polygonIndices: {
700
714
  // use geomOffset as polygonIndices same as primitivePolygonIndices since we are using earcut to get triangule indices
@@ -714,7 +728,7 @@ function getBinaryGeometriesFromArrow(geoColumn, geoEncoding, options) {
714
728
  binaryGeometries,
715
729
  bounds,
716
730
  featureTypes,
717
- ...(options == null ? void 0 : options.meanCenter) ? { meanCenters: getMeanCentersFromBinaryGeometries(binaryGeometries) } : {}
731
+ ...(options == null ? void 0 : options.calculateMeanCenters) ? { meanCenters: getMeanCentersFromBinaryGeometries(binaryGeometries) } : {}
718
732
  };
719
733
  }
720
734
  function getMeanCentersFromBinaryGeometries(binaryGeometries) {
@@ -723,18 +737,18 @@ function getMeanCentersFromBinaryGeometries(binaryGeometries) {
723
737
  var _a;
724
738
  let binaryGeometryType = null;
725
739
  if (binaryGeometry.points && binaryGeometry.points.positions.value.length > 0) {
726
- binaryGeometryType = "points";
740
+ binaryGeometryType = "points" /* points */;
727
741
  } else if (binaryGeometry.lines && binaryGeometry.lines.positions.value.length > 0) {
728
- binaryGeometryType = "lines";
742
+ binaryGeometryType = "lines" /* lines */;
729
743
  } else if (binaryGeometry.polygons && binaryGeometry.polygons.positions.value.length > 0) {
730
- binaryGeometryType = "polygons";
744
+ binaryGeometryType = "polygons" /* polygons */;
731
745
  }
732
746
  const binaryContent = binaryGeometryType ? binaryGeometry[binaryGeometryType] : null;
733
747
  if (binaryContent && binaryGeometryType !== null) {
734
748
  const featureIds = binaryContent.featureIds.value;
735
749
  const flatCoordinateArray = binaryContent.positions.value;
736
750
  const nDim = binaryContent.positions.size;
737
- const primitivePolygonIndices = (_a = binaryContent.primitivePolygonIndices) == null ? void 0 : _a.value;
751
+ const primitivePolygonIndices = binaryContent.type === "Polygon" ? (_a = binaryContent.primitivePolygonIndices) == null ? void 0 : _a.value : void 0;
738
752
  const meanCenters = getMeanCentersFromGeometry(
739
753
  featureIds,
740
754
  flatCoordinateArray,
@@ -753,19 +767,23 @@ function getMeanCentersFromGeometry(featureIds, flatCoordinateArray, nDim, geome
753
767
  const meanCenters = [];
754
768
  const vertexCount = flatCoordinateArray.length;
755
769
  let vertexIndex = 0;
770
+ let coordIdx = 0;
771
+ let primitiveIdx = 0;
756
772
  while (vertexIndex < vertexCount) {
757
773
  const featureId = featureIds[vertexIndex / nDim];
758
774
  const center = [0, 0];
759
775
  let vertexCountInFeature = 0;
760
- while (vertexIndex < vertexCount && featureIds[vertexIndex / nDim] === featureId) {
761
- if (geometryType === "polygons" && primitivePolygonIndices && primitivePolygonIndices.indexOf(vertexIndex / nDim) >= 0) {
776
+ while (vertexIndex < vertexCount && featureIds[coordIdx] === featureId) {
777
+ if (geometryType === "polygons" /* polygons */ && (primitivePolygonIndices == null ? void 0 : primitivePolygonIndices[primitiveIdx]) === coordIdx) {
762
778
  vertexIndex += nDim;
779
+ primitiveIdx++;
763
780
  } else {
764
781
  center[0] += flatCoordinateArray[vertexIndex];
765
782
  center[1] += flatCoordinateArray[vertexIndex + 1];
766
783
  vertexIndex += nDim;
767
784
  vertexCountInFeature++;
768
785
  }
786
+ coordIdx += 1;
769
787
  }
770
788
  center[0] /= vertexCountInFeature;
771
789
  center[1] /= vertexCountInFeature;
@@ -773,7 +791,7 @@ function getMeanCentersFromGeometry(featureIds, flatCoordinateArray, nDim, geome
773
791
  }
774
792
  return meanCenters;
775
793
  }
776
- function getBinaryGeometriesFromChunk(chunk, geoEncoding) {
794
+ function getBinaryGeometriesFromChunk(chunk, geoEncoding, options) {
777
795
  switch (geoEncoding) {
778
796
  case "geoarrow.point":
779
797
  case "geoarrow.multipoint":
@@ -783,41 +801,48 @@ function getBinaryGeometriesFromChunk(chunk, geoEncoding) {
783
801
  return getBinaryLinesFromChunk(chunk, geoEncoding);
784
802
  case "geoarrow.polygon":
785
803
  case "geoarrow.multipolygon":
786
- return getBinaryPolygonsFromChunk(chunk, geoEncoding);
804
+ return getBinaryPolygonsFromChunk(chunk, geoEncoding, options);
787
805
  default:
788
806
  throw Error("invalid geoarrow encoding");
789
807
  }
790
808
  }
791
809
  function getTriangleIndices(polygonIndices, primitivePolygonIndices, flatCoordinateArray, nDim) {
792
- let primitiveIndex = 0;
793
- const triangles = [];
794
- for (let i = 0; i < polygonIndices.length - 1; i++) {
795
- const startIdx = polygonIndices[i];
796
- const endIdx = polygonIndices[i + 1];
797
- const slicedFlatCoords = flatCoordinateArray.subarray(startIdx * nDim, endIdx * nDim);
798
- const holeIndices = [];
799
- while (primitivePolygonIndices[primitiveIndex] < endIdx) {
800
- if (primitivePolygonIndices[primitiveIndex] > startIdx) {
801
- holeIndices.push(primitivePolygonIndices[primitiveIndex] - startIdx);
810
+ try {
811
+ let primitiveIndex = 0;
812
+ const triangles = [];
813
+ for (let i = 0; i < polygonIndices.length - 1; i++) {
814
+ const startIdx = polygonIndices[i];
815
+ const endIdx = polygonIndices[i + 1];
816
+ const slicedFlatCoords = flatCoordinateArray.subarray(startIdx * nDim, endIdx * nDim);
817
+ const holeIndices = [];
818
+ while (primitivePolygonIndices[primitiveIndex] < endIdx) {
819
+ if (primitivePolygonIndices[primitiveIndex] > startIdx) {
820
+ holeIndices.push(primitivePolygonIndices[primitiveIndex] - startIdx);
821
+ }
822
+ primitiveIndex++;
823
+ }
824
+ const triangleIndices = (0, import_polygon.earcut)(
825
+ slicedFlatCoords,
826
+ holeIndices.length > 0 ? holeIndices : void 0,
827
+ nDim
828
+ );
829
+ if (triangleIndices.length === 0) {
830
+ throw Error("earcut failed e.g. invalid polygon");
831
+ }
832
+ for (let j = 0; j < triangleIndices.length; j++) {
833
+ triangles.push(triangleIndices[j] + startIdx);
802
834
  }
803
- primitiveIndex++;
804
835
  }
805
- const triangleIndices = (0, import_polygon.earcut)(
806
- slicedFlatCoords,
807
- holeIndices.length > 0 ? holeIndices : void 0,
808
- nDim
809
- );
810
- for (let j = 0; j < triangleIndices.length; j++) {
811
- triangles.push(triangleIndices[j] + startIdx);
836
+ const trianglesUint32 = new Uint32Array(triangles.length);
837
+ for (let i = 0; i < triangles.length; i++) {
838
+ trianglesUint32[i] = triangles[i];
812
839
  }
840
+ return trianglesUint32;
841
+ } catch (error) {
842
+ return null;
813
843
  }
814
- const trianglesUint32 = new Uint32Array(triangles.length);
815
- for (let i = 0; i < triangles.length; i++) {
816
- trianglesUint32[i] = triangles[i];
817
- }
818
- return trianglesUint32;
819
844
  }
820
- function getBinaryPolygonsFromChunk(chunk, geoEncoding) {
845
+ function getBinaryPolygonsFromChunk(chunk, geoEncoding, options) {
821
846
  const isMultiPolygon = geoEncoding === "geoarrow.multipolygon";
822
847
  const polygonData = isMultiPolygon ? chunk.children[0] : chunk;
823
848
  const polygonOffset = polygonData.valueOffsets;
@@ -841,14 +866,14 @@ function getBinaryPolygonsFromChunk(chunk, geoEncoding) {
841
866
  featureIds[j] = i;
842
867
  }
843
868
  }
844
- const triangles = getTriangleIndices(geometryIndicies, geomOffset, flatCoordinateArray, nDim);
869
+ const triangles = (options == null ? void 0 : options.triangulate) ? getTriangleIndices(geometryIndicies, geomOffset, flatCoordinateArray, nDim) : null;
845
870
  return {
846
871
  featureIds,
847
- flatCoordinateArray,
848
872
  nDim,
873
+ flatCoordinateArray,
849
874
  geomOffset,
850
875
  geometryIndicies,
851
- triangles
876
+ ...(options == null ? void 0 : options.triangulate) && triangles ? { triangles } : {}
852
877
  };
853
878
  }
854
879
  function getBinaryLinesFromChunk(chunk, geoEncoding) {
@@ -921,47 +946,60 @@ function getBinaryPointsFromChunk(chunk, geoEncoding) {
921
946
  };
922
947
  }
923
948
 
924
- // src/geoarrow/convert-geoarrow-to-geojson.ts
925
- function parseGeometryFromArrow2(rawData) {
926
- var _a;
927
- const encoding = (_a = rawData.encoding) == null ? void 0 : _a.toLowerCase();
928
- const data = rawData.data;
929
- if (!encoding || !data) {
949
+ // src/geoarrow/convert-geoarrow-to-geojson-geometry.ts
950
+ var import_gis2 = require("@loaders.gl/gis");
951
+ var import_wkt = require("@loaders.gl/wkt");
952
+ function parseGeometryFromArrow2(arrowCellValue, encoding) {
953
+ encoding = encoding == null ? void 0 : encoding.toLowerCase();
954
+ if (!encoding || !arrowCellValue) {
930
955
  return null;
931
956
  }
932
957
  let geometry;
933
958
  switch (encoding) {
934
959
  case "geoarrow.multipolygon":
935
- geometry = arrowMultiPolygonToFeature(data);
960
+ geometry = arrowMultiPolygonToFeature(arrowCellValue);
936
961
  break;
937
962
  case "geoarrow.polygon":
938
- geometry = arrowPolygonToFeature(data);
963
+ geometry = arrowPolygonToFeature(arrowCellValue);
939
964
  break;
940
965
  case "geoarrow.multipoint":
941
- geometry = arrowMultiPointToFeature(data);
966
+ geometry = arrowMultiPointToFeature(arrowCellValue);
942
967
  break;
943
968
  case "geoarrow.point":
944
- geometry = arrowPointToFeature(data);
969
+ geometry = arrowPointToFeature(arrowCellValue);
945
970
  break;
946
971
  case "geoarrow.multilinestring":
947
- geometry = arrowMultiLineStringToFeature(data);
972
+ geometry = arrowMultiLineStringToFeature(arrowCellValue);
948
973
  break;
949
974
  case "geoarrow.linestring":
950
- geometry = arrowLineStringToFeature(data);
975
+ geometry = arrowLineStringToFeature(arrowCellValue);
951
976
  break;
952
977
  case "geoarrow.wkb":
953
- throw Error(`GeoArrow encoding not supported ${encoding}`);
978
+ geometry = arrowWKBToFeature(arrowCellValue);
979
+ break;
954
980
  case "geoarrow.wkt":
955
- throw Error(`GeoArrow encoding not supported ${encoding}`);
981
+ geometry = arrowWKTToFeature(arrowCellValue);
982
+ break;
956
983
  default: {
957
984
  throw Error(`GeoArrow encoding not supported ${encoding}`);
958
985
  }
959
986
  }
960
- return {
961
- type: "Feature",
962
- geometry,
963
- properties: {}
964
- };
987
+ return geometry;
988
+ }
989
+ function arrowWKBToFeature(arrowCellValue) {
990
+ var _a, _b;
991
+ const arrayBuffer = arrowCellValue.buffer.slice(
992
+ arrowCellValue.byteOffset,
993
+ arrowCellValue.byteOffset + arrowCellValue.byteLength
994
+ );
995
+ const binaryGeometry = (_b = (_a = import_wkt.WKBLoader).parseSync) == null ? void 0 : _b.call(_a, arrayBuffer);
996
+ const geometry = (0, import_gis2.binaryToGeometry)(binaryGeometry);
997
+ return geometry;
998
+ }
999
+ function arrowWKTToFeature(arrowCellValue) {
1000
+ var _a, _b;
1001
+ const string = arrowCellValue;
1002
+ return (_b = (_a = import_wkt.WKTLoader).parseTextSync) == null ? void 0 : _b.call(_a, string);
965
1003
  }
966
1004
  function arrowMultiPolygonToFeature(arrowMultiPolygon) {
967
1005
  const multiPolygon = [];
@@ -1013,19 +1051,17 @@ function arrowMultiPointToFeature(arrowMultiPoint) {
1013
1051
  multiPoint.push(coord);
1014
1052
  }
1015
1053
  }
1016
- const geometry = {
1054
+ return {
1017
1055
  type: "MultiPoint",
1018
1056
  coordinates: multiPoint
1019
1057
  };
1020
- return geometry;
1021
1058
  }
1022
1059
  function arrowPointToFeature(arrowPoint) {
1023
1060
  const point = Array.from(arrowPoint);
1024
- const geometry = {
1061
+ return {
1025
1062
  type: "Point",
1026
1063
  coordinates: point
1027
1064
  };
1028
- return geometry;
1029
1065
  }
1030
1066
  function arrowMultiLineStringToFeature(arrowMultiLineString) {
1031
1067
  const multiLineString = [];
@@ -1041,11 +1077,10 @@ function arrowMultiLineStringToFeature(arrowMultiLineString) {
1041
1077
  }
1042
1078
  multiLineString.push(lineString);
1043
1079
  }
1044
- const geometry = {
1080
+ return {
1045
1081
  type: "MultiLineString",
1046
1082
  coordinates: multiLineString
1047
1083
  };
1048
- return geometry;
1049
1084
  }
1050
1085
  function arrowLineStringToFeature(arrowLineString) {
1051
1086
  const lineString = [];
@@ -1056,11 +1091,55 @@ function arrowLineStringToFeature(arrowLineString) {
1056
1091
  lineString.push(coords);
1057
1092
  }
1058
1093
  }
1059
- const geometry = {
1094
+ return {
1060
1095
  type: "LineString",
1061
1096
  coordinates: lineString
1062
1097
  };
1063
- return geometry;
1098
+ }
1099
+
1100
+ // src/workers/hard-clone.ts
1101
+ var arrow7 = __toESM(require("apache-arrow"), 1);
1102
+ function hardClone(data, force = false) {
1103
+ if ("data" in data) {
1104
+ return new arrow7.Vector(data.data.map((data2) => hardClone(data2, force)));
1105
+ }
1106
+ const clonedChildren = [];
1107
+ for (const childData of data.children) {
1108
+ clonedChildren.push(hardClone(childData, force));
1109
+ }
1110
+ let clonedDictionary;
1111
+ if (data.dictionary !== void 0) {
1112
+ clonedDictionary = hardClone(data.dictionary, force);
1113
+ }
1114
+ const clonedBuffers = {
1115
+ [arrow7.BufferType.OFFSET]: cloneBuffer(data.buffers[arrow7.BufferType.OFFSET], force),
1116
+ [arrow7.BufferType.DATA]: cloneBuffer(data.buffers[arrow7.BufferType.DATA], force),
1117
+ [arrow7.BufferType.VALIDITY]: cloneBuffer(data.buffers[arrow7.BufferType.VALIDITY], force),
1118
+ [arrow7.BufferType.TYPE]: cloneBuffer(data.buffers[arrow7.BufferType.TYPE], force)
1119
+ };
1120
+ return new arrow7.Data(
1121
+ data.type,
1122
+ data.offset,
1123
+ data.length,
1124
+ // @ts-expect-error _nullCount is protected. We're using it here to mimic
1125
+ // `Data.clone`
1126
+ data._nullCount,
1127
+ clonedBuffers,
1128
+ clonedChildren,
1129
+ clonedDictionary
1130
+ );
1131
+ }
1132
+ function isTypedArraySliced(arr) {
1133
+ return !(arr.byteOffset === 0 && arr.byteLength === arr.buffer.byteLength);
1134
+ }
1135
+ function cloneBuffer(arr, force) {
1136
+ if (arr === void 0) {
1137
+ return arr;
1138
+ }
1139
+ if (!force && !isTypedArraySliced(arr)) {
1140
+ return arr;
1141
+ }
1142
+ return arr.slice();
1064
1143
  }
1065
1144
 
1066
1145
  // src/triangulate-on-worker.ts
@@ -1074,7 +1153,10 @@ var TriangulationWorker = {
1074
1153
  options: {}
1075
1154
  };
1076
1155
  function triangulateOnWorker(data, options = {}) {
1077
- return (0, import_worker_utils.processOnWorker)(TriangulationWorker, data, options);
1156
+ return (0, import_worker_utils.processOnWorker)(TriangulationWorker, { ...data, operation: "triangulate" }, options);
1157
+ }
1158
+ function parseGeoArrowOnWorker(data, options = {}) {
1159
+ return (0, import_worker_utils.processOnWorker)(TriangulationWorker, { ...data, operation: "parse-geoarrow" }, options);
1078
1160
  }
1079
1161
 
1080
1162
  // src/index.ts