@loaders.gl/arrow 4.2.0-alpha.4 → 4.2.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/dist/arrow-loader.d.ts +1 -1
  2. package/dist/arrow-loader.d.ts.map +1 -1
  3. package/dist/arrow-loader.js +29 -18
  4. package/dist/arrow-writer.d.ts +1 -1
  5. package/dist/arrow-writer.d.ts.map +1 -1
  6. package/dist/arrow-writer.js +22 -15
  7. package/dist/dist.dev.js +370 -375
  8. package/dist/dist.min.js +23 -0
  9. package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js +345 -288
  10. package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.js +151 -124
  11. package/dist/geoarrow/get-arrow-bounds.js +32 -22
  12. package/dist/geoarrow-loader.d.ts +1 -1
  13. package/dist/geoarrow-loader.d.ts.map +1 -1
  14. package/dist/geoarrow-loader.js +22 -15
  15. package/dist/geoarrow-writer.js +24 -15
  16. package/dist/index.cjs +54 -71
  17. package/dist/index.cjs.map +7 -0
  18. package/dist/index.d.ts +16 -16
  19. package/dist/index.d.ts.map +1 -1
  20. package/dist/index.js +8 -1
  21. package/dist/lib/arrow-table-batch.d.ts +1 -1
  22. package/dist/lib/arrow-table-batch.d.ts.map +1 -1
  23. package/dist/lib/arrow-table-batch.js +54 -51
  24. package/dist/lib/arrow-table.js +3 -1
  25. package/dist/lib/encode-arrow.d.ts +1 -1
  26. package/dist/lib/encode-arrow.d.ts.map +1 -1
  27. package/dist/lib/encode-arrow.js +31 -16
  28. package/dist/lib/encode-geoarrow.d.ts +1 -1
  29. package/dist/lib/encode-geoarrow.d.ts.map +1 -1
  30. package/dist/lib/encode-geoarrow.js +31 -16
  31. package/dist/parsers/parse-arrow-in-batches.d.ts +2 -2
  32. package/dist/parsers/parse-arrow-in-batches.d.ts.map +1 -1
  33. package/dist/parsers/parse-arrow-in-batches.js +49 -21
  34. package/dist/parsers/parse-arrow-sync.d.ts +1 -1
  35. package/dist/parsers/parse-arrow-sync.d.ts.map +1 -1
  36. package/dist/parsers/parse-arrow-sync.js +26 -22
  37. package/dist/parsers/parse-geoarrow-in-batches.d.ts +1 -1
  38. package/dist/parsers/parse-geoarrow-in-batches.d.ts.map +1 -1
  39. package/dist/parsers/parse-geoarrow-in-batches.js +7 -2
  40. package/dist/parsers/parse-geoarrow-sync.d.ts +1 -1
  41. package/dist/parsers/parse-geoarrow-sync.d.ts.map +1 -1
  42. package/dist/parsers/parse-geoarrow-sync.js +12 -10
  43. package/dist/schema/arrow-type-utils.js +36 -9
  44. package/dist/schema/convert-arrow-schema.js +253 -240
  45. package/dist/tables/convert-arrow-to-columnar-table.d.ts +1 -1
  46. package/dist/tables/convert-arrow-to-columnar-table.d.ts.map +1 -1
  47. package/dist/tables/convert-arrow-to-columnar-table.js +22 -13
  48. package/dist/tables/convert-arrow-to-geojson-table.d.ts +1 -1
  49. package/dist/tables/convert-arrow-to-geojson-table.d.ts.map +1 -1
  50. package/dist/tables/convert-arrow-to-geojson-table.js +47 -32
  51. package/dist/tables/convert-columnar-to-row-table.js +22 -16
  52. package/dist/tables/convert-table-to-arrow.js +58 -1
  53. package/dist/triangulate-on-worker.js +23 -18
  54. package/dist/types.js +8 -6
  55. package/dist/workers/arrow-worker.js +3 -1
  56. package/dist/workers/hard-clone.js +93 -49
  57. package/dist/workers/triangulation-worker-node.d.ts +1 -1
  58. package/dist/workers/triangulation-worker-node.d.ts.map +1 -1
  59. package/dist/workers/triangulation-worker-node.js +3 -1
  60. package/dist/workers/triangulation-worker.js +48 -48
  61. package/package.json +12 -8
  62. package/dist/arrow-loader.js.map +0 -1
  63. package/dist/arrow-writer.js.map +0 -1
  64. package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js.map +0 -1
  65. package/dist/geoarrow/convert-geoarrow-to-geojson-geometry.js.map +0 -1
  66. package/dist/geoarrow/get-arrow-bounds.js.map +0 -1
  67. package/dist/geoarrow-loader.js.map +0 -1
  68. package/dist/geoarrow-writer.js.map +0 -1
  69. package/dist/index.js.map +0 -1
  70. package/dist/lib/arrow-table-batch.js.map +0 -1
  71. package/dist/lib/arrow-table.js.map +0 -1
  72. package/dist/lib/encode-arrow.js.map +0 -1
  73. package/dist/lib/encode-geoarrow.js.map +0 -1
  74. package/dist/parsers/parse-arrow-in-batches.js.map +0 -1
  75. package/dist/parsers/parse-arrow-sync.js.map +0 -1
  76. package/dist/parsers/parse-geoarrow-in-batches.js.map +0 -1
  77. package/dist/parsers/parse-geoarrow-sync.js.map +0 -1
  78. package/dist/schema/arrow-type-utils.js.map +0 -1
  79. package/dist/schema/convert-arrow-schema.js.map +0 -1
  80. package/dist/tables/convert-arrow-to-columnar-table.js.map +0 -1
  81. package/dist/tables/convert-arrow-to-geojson-table.js.map +0 -1
  82. package/dist/tables/convert-columnar-to-row-table.js.map +0 -1
  83. package/dist/tables/convert-table-to-arrow.js.map +0 -1
  84. package/dist/triangulate-on-worker.js.map +0 -1
  85. package/dist/types.js.map +0 -1
  86. package/dist/workers/arrow-worker.js.map +0 -1
  87. package/dist/workers/hard-clone.js.map +0 -1
  88. package/dist/workers/triangulation-worker-node.js.map +0 -1
  89. package/dist/workers/triangulation-worker.js.map +0 -1
package/dist/dist.dev.js CHANGED
@@ -2,14 +2,19 @@
2
2
  if (typeof exports === 'object' && typeof module === 'object')
3
3
  module.exports = factory();
4
4
  else if (typeof define === 'function' && define.amd) define([], factory);
5
- else if (typeof exports === 'object') exports['loader'] = factory();
6
- else root['loader'] = factory();})(globalThis, function () {
5
+ else if (typeof exports === 'object') exports['loaders'] = factory();
6
+ else root['loaders'] = factory();})(globalThis, function () {
7
7
  "use strict";
8
8
  var __exports__ = (() => {
9
+ var __create = Object.create;
9
10
  var __defProp = Object.defineProperty;
10
11
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
11
12
  var __getOwnPropNames = Object.getOwnPropertyNames;
13
+ var __getProtoOf = Object.getPrototypeOf;
12
14
  var __hasOwnProp = Object.prototype.hasOwnProperty;
15
+ var __commonJS = (cb, mod) => function __require() {
16
+ return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
17
+ };
13
18
  var __export = (target, all) => {
14
19
  for (var name in all)
15
20
  __defProp(target, name, { get: all[name], enumerable: true });
@@ -22,11 +27,27 @@ var __exports__ = (() => {
22
27
  }
23
28
  return to;
24
29
  };
30
+ var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
31
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
32
+ // If the importer is in node compatibility mode or this is not an ESM
33
+ // file that has been converted to a CommonJS file using a Babel-
34
+ // compatible transform (i.e. "__esModule" has not been set), then set
35
+ // "default" to the CommonJS "module.exports" for node compatibility.
36
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
37
+ mod
38
+ ));
25
39
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
26
40
 
27
- // src/index.ts
28
- var src_exports = {};
29
- __export(src_exports, {
41
+ // external-global-plugin:@loaders.gl/core
42
+ var require_core = __commonJS({
43
+ "external-global-plugin:@loaders.gl/core"(exports, module) {
44
+ module.exports = globalThis.loaders;
45
+ }
46
+ });
47
+
48
+ // bundle.ts
49
+ var bundle_exports = {};
50
+ __export(bundle_exports, {
30
51
  ArrowLoader: () => ArrowLoader,
31
52
  ArrowWorkerLoader: () => ArrowWorkerLoader,
32
53
  ArrowWriter: () => ArrowWriter,
@@ -54,15 +75,16 @@ var __exports__ = (() => {
54
75
  triangulateOnWorker: () => triangulateOnWorker,
55
76
  updateBoundsFromGeoArrowSamples: () => updateBoundsFromGeoArrowSamples
56
77
  });
78
+ __reExport(bundle_exports, __toESM(require_core(), 1));
57
79
 
58
80
  // ../schema/src/lib/table/batches/base-table-batch-aggregator.ts
59
81
  var DEFAULT_ROW_COUNT = 100;
60
82
  var BaseTableBatchAggregator = class {
61
- length = 0;
62
- rows = null;
63
- cursor = 0;
64
- _headers = [];
65
83
  constructor(schema, options) {
84
+ this.length = 0;
85
+ this.rows = null;
86
+ this.cursor = 0;
87
+ this._headers = [];
66
88
  this.options = options;
67
89
  this.schema = schema;
68
90
  if (!Array.isArray(schema)) {
@@ -158,12 +180,12 @@ var __exports__ = (() => {
158
180
  // ../schema/src/lib/table/batches/row-table-batch-aggregator.ts
159
181
  var DEFAULT_ROW_COUNT2 = 100;
160
182
  var RowTableBatchAggregator = class {
161
- length = 0;
162
- objectRows = null;
163
- arrayRows = null;
164
- cursor = 0;
165
- _headers = null;
166
183
  constructor(schema, options) {
184
+ this.length = 0;
185
+ this.objectRows = null;
186
+ this.arrayRows = null;
187
+ this.cursor = 0;
188
+ this._headers = null;
167
189
  this.options = options;
168
190
  this.schema = schema;
169
191
  if (schema) {
@@ -223,6 +245,7 @@ var __exports__ = (() => {
223
245
  batchType: "data",
224
246
  data: rows,
225
247
  length: this.length,
248
+ // @ts-expect-error we should infer a schema
226
249
  schema: this.schema,
227
250
  cursor: this.cursor
228
251
  };
@@ -232,10 +255,10 @@ var __exports__ = (() => {
232
255
  // ../schema/src/lib/table/batches/columnar-table-batch-aggregator.ts
233
256
  var DEFAULT_ROW_COUNT3 = 100;
234
257
  var ColumnarTableBatchAggregator = class {
235
- length = 0;
236
- allocated = 0;
237
- columns = {};
238
258
  constructor(schema, options) {
259
+ this.length = 0;
260
+ this.allocated = 0;
261
+ this.columns = {};
239
262
  this.schema = schema;
240
263
  this._reallocateColumns();
241
264
  }
@@ -276,6 +299,7 @@ var __exports__ = (() => {
276
299
  };
277
300
  return batch;
278
301
  }
302
+ // HELPERS
279
303
  _reallocateColumns() {
280
304
  if (this.length < this.allocated) {
281
305
  return;
@@ -315,20 +339,17 @@ var __exports__ = (() => {
315
339
  };
316
340
  var ERR_MESSAGE = "TableBatchBuilder";
317
341
  var TableBatchBuilder = class {
318
- aggregator = null;
319
- batchCount = 0;
320
- bytesUsed = 0;
321
- isChunkComplete = false;
322
- lastBatchEmittedMs = Date.now();
323
- totalLength = 0;
324
- totalBytes = 0;
325
- rowBytes = 0;
326
342
  constructor(schema, options) {
343
+ this.aggregator = null;
344
+ this.batchCount = 0;
345
+ this.bytesUsed = 0;
346
+ this.isChunkComplete = false;
347
+ this.lastBatchEmittedMs = Date.now();
348
+ this.totalLength = 0;
349
+ this.totalBytes = 0;
350
+ this.rowBytes = 0;
327
351
  this.schema = schema;
328
- this.options = {
329
- ...DEFAULT_OPTIONS,
330
- ...options
331
- };
352
+ this.options = { ...DEFAULT_OPTIONS, ...options };
332
353
  }
333
354
  limitReached() {
334
355
  if (Boolean(this.options?.limit) && this.totalLength >= this.options.limit) {
@@ -339,6 +360,7 @@ var __exports__ = (() => {
339
360
  }
340
361
  return false;
341
362
  }
363
+ /** @deprecated Use addArrayRow or addObjectRow */
342
364
  addRow(row) {
343
365
  if (this.limitReached()) {
344
366
  return;
@@ -352,6 +374,7 @@ var __exports__ = (() => {
352
374
  this.addObjectRow(row);
353
375
  }
354
376
  }
377
+ /** Add one row to the batch */
355
378
  addArrayRow(row) {
356
379
  if (!this.aggregator) {
357
380
  const TableBatchType = this._getTableBatchType();
@@ -359,6 +382,7 @@ var __exports__ = (() => {
359
382
  }
360
383
  this.aggregator.addArrayRow(row);
361
384
  }
385
+ /** Add one row to the batch */
362
386
  addObjectRow(row) {
363
387
  if (!this.aggregator) {
364
388
  const TableBatchType = this._getTableBatchType();
@@ -366,6 +390,7 @@ var __exports__ = (() => {
366
390
  }
367
391
  this.aggregator.addObjectRow(row);
368
392
  }
393
+ /** Mark an incoming raw memory chunk has completed */
369
394
  chunkComplete(chunk) {
370
395
  if (chunk instanceof ArrayBuffer) {
371
396
  this.bytesUsed += chunk.byteLength;
@@ -381,6 +406,7 @@ var __exports__ = (() => {
381
406
  getFinalBatch(options) {
382
407
  return this._getBatch(options);
383
408
  }
409
+ // INTERNAL
384
410
  _estimateRowMB(row) {
385
411
  return Array.isArray(row) ? row.length * 8 : Object.keys(row).length * 8;
386
412
  }
@@ -402,6 +428,9 @@ var __exports__ = (() => {
402
428
  this.lastBatchEmittedMs = Date.now();
403
429
  return true;
404
430
  }
431
+ /**
432
+ * bytesUsed can be set via chunkComplete or via getBatch*
433
+ */
405
434
  _getBatch(options) {
406
435
  if (!this.aggregator) {
407
436
  return null;
@@ -469,7 +498,9 @@ var __exports__ = (() => {
469
498
  return column[rowIndex];
470
499
  case "arrow-table":
471
500
  const arrowTable = table.data;
472
- const arrowColumnIndex = arrowTable.schema.fields.findIndex((field) => field.name === columnName);
501
+ const arrowColumnIndex = arrowTable.schema.fields.findIndex(
502
+ (field) => field.name === columnName
503
+ );
473
504
  return arrowTable.getChildAt(arrowColumnIndex)?.get(rowIndex);
474
505
  default:
475
506
  throw new Error("todo");
@@ -602,22 +633,13 @@ var __exports__ = (() => {
602
633
  function getDataTypeFromArray(array) {
603
634
  let type = getDataTypeFromTypedArray(array);
604
635
  if (type !== "null") {
605
- return {
606
- type,
607
- nullable: false
608
- };
636
+ return { type, nullable: false };
609
637
  }
610
638
  if (array.length > 0) {
611
639
  type = getDataTypeFromValue(array[0]);
612
- return {
613
- type,
614
- nullable: true
615
- };
640
+ return { type, nullable: true };
616
641
  }
617
- return {
618
- type: "null",
619
- nullable: true
620
- };
642
+ return { type: "null", nullable: true };
621
643
  }
622
644
  function getDataTypeFromTypedArray(array) {
623
645
  switch (array.constructor) {
@@ -687,10 +709,7 @@ var __exports__ = (() => {
687
709
  const field = deduceFieldFromColumn(column, columnName);
688
710
  fields.push(field);
689
711
  }
690
- return {
691
- fields,
692
- metadata: {}
693
- };
712
+ return { fields, metadata: {} };
694
713
  }
695
714
  function deduceSchemaFromRows(rowTable) {
696
715
  if (!rowTable.length) {
@@ -701,10 +720,7 @@ var __exports__ = (() => {
701
720
  for (const [columnName, value] of Object.entries(row0)) {
702
721
  fields.push(deduceFieldFromValue(value, columnName));
703
722
  }
704
- return {
705
- fields,
706
- metadata: {}
707
- };
723
+ return { fields, metadata: {} };
708
724
  }
709
725
  function deduceFieldFromColumn(column, name) {
710
726
  if (ArrayBuffer.isView(column)) {
@@ -713,6 +729,7 @@ var __exports__ = (() => {
713
729
  name,
714
730
  type: type.type || "null",
715
731
  nullable: type.nullable
732
+ // metadata: {}
716
733
  };
717
734
  }
718
735
  if (Array.isArray(column) && column.length > 0) {
@@ -722,6 +739,7 @@ var __exports__ = (() => {
722
739
  name,
723
740
  type,
724
741
  nullable: true
742
+ // metadata: {},
725
743
  };
726
744
  }
727
745
  throw new Error("empty table");
@@ -732,6 +750,7 @@ var __exports__ = (() => {
732
750
  name,
733
751
  type,
734
752
  nullable: true
753
+ // metadata: {}
735
754
  };
736
755
  }
737
756
 
@@ -767,10 +786,7 @@ var __exports__ = (() => {
767
786
  const schema = table.schema || deduceTableSchema(table);
768
787
  const fields = table.schema?.fields || [];
769
788
  if (table.shape === "columnar-table") {
770
- return {
771
- ...table,
772
- schema
773
- };
789
+ return { ...table, schema };
774
790
  }
775
791
  const length = getTableLength(table);
776
792
  const columns = {};
@@ -12678,12 +12694,13 @@ return true;`);
12678
12694
  if (batch) {
12679
12695
  this.arrowSchema = this.arrowSchema || getArrowSchema(batch.schema);
12680
12696
  const arrowVectors = getArrowVectors(this.arrowSchema, batch.data);
12681
- const recordBatch = new RecordBatch2(this.arrowSchema, makeData({
12682
- type: new Struct(this.arrowSchema.fields),
12683
- children: arrowVectors.map(({
12684
- data
12685
- }) => data[0])
12686
- }));
12697
+ const recordBatch = new RecordBatch2(
12698
+ this.arrowSchema,
12699
+ makeData({
12700
+ type: new Struct(this.arrowSchema.fields),
12701
+ children: arrowVectors.map(({ data }) => data[0])
12702
+ })
12703
+ );
12687
12704
  return {
12688
12705
  shape: "arrow-table",
12689
12706
  batchType: "data",
@@ -12749,11 +12766,11 @@ return true;`);
12749
12766
  }
12750
12767
 
12751
12768
  // src/types.ts
12752
- var VECTOR_TYPES = function(VECTOR_TYPES2) {
12769
+ var VECTOR_TYPES = /* @__PURE__ */ ((VECTOR_TYPES2) => {
12753
12770
  VECTOR_TYPES2[VECTOR_TYPES2["FLOAT"] = 0] = "FLOAT";
12754
12771
  VECTOR_TYPES2[VECTOR_TYPES2["DATE"] = 1] = "DATE";
12755
12772
  return VECTOR_TYPES2;
12756
- }({});
12773
+ })(VECTOR_TYPES || {});
12757
12774
 
12758
12775
  // src/tables/convert-arrow-to-columnar-table.ts
12759
12776
  function convertArrowToColumnarTable(table) {
@@ -12779,7 +12796,10 @@ return true;`);
12779
12796
  };
12780
12797
  }
12781
12798
  function deserializeArrowSchema(schema) {
12782
- return new Schema2(schema.fields.map((field) => deserializeArrowField(field)), deserializeArrowMetadata(schema.metadata));
12799
+ return new Schema2(
12800
+ schema.fields.map((field) => deserializeArrowField(field)),
12801
+ deserializeArrowMetadata(schema.metadata)
12802
+ );
12783
12803
  }
12784
12804
  function serializeArrowMetadata(arrowMetadata) {
12785
12805
  return Object.fromEntries(arrowMetadata);
@@ -12796,7 +12816,12 @@ return true;`);
12796
12816
  };
12797
12817
  }
12798
12818
  function deserializeArrowField(field) {
12799
- return new Field2(field.name, deserializeArrowType(field.type), field.nullable, deserializeArrowMetadata(field.metadata));
12819
+ return new Field2(
12820
+ field.name,
12821
+ deserializeArrowType(field.type),
12822
+ field.nullable,
12823
+ deserializeArrowMetadata(field.metadata)
12824
+ );
12800
12825
  }
12801
12826
  function serializeArrowType(arrowType) {
12802
12827
  switch (arrowType.constructor) {
@@ -13079,15 +13104,20 @@ return true;`);
13079
13104
  }
13080
13105
 
13081
13106
  // src/arrow-loader.ts
13082
- var VERSION = true ? "4.2.0-alpha.4" : "latest";
13107
+ var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
13083
13108
  var ArrowWorkerLoader = {
13084
13109
  name: "Apache Arrow",
13085
13110
  id: "arrow",
13086
13111
  module: "arrow",
13087
13112
  version: VERSION,
13113
+ // worker: true,
13088
13114
  category: "table",
13089
13115
  extensions: ["arrow", "feather"],
13090
- mimeTypes: ["application/vnd.apache.arrow.file", "application/vnd.apache.arrow.stream", "application/octet-stream"],
13116
+ mimeTypes: [
13117
+ "application/vnd.apache.arrow.file",
13118
+ "application/vnd.apache.arrow.stream",
13119
+ "application/octet-stream"
13120
+ ],
13091
13121
  binary: true,
13092
13122
  tests: ["ARROW"],
13093
13123
  options: {
@@ -13116,23 +13146,27 @@ return true;`);
13116
13146
  }
13117
13147
  function createVector(array, type) {
13118
13148
  switch (type) {
13119
- case VECTOR_TYPES.DATE:
13149
+ case 1 /* DATE */:
13120
13150
  return vectorFromArray(array);
13121
- case VECTOR_TYPES.FLOAT:
13151
+ case 0 /* FLOAT */:
13122
13152
  default:
13123
13153
  return vectorFromArray(array);
13124
13154
  }
13125
13155
  }
13126
13156
 
13127
13157
  // src/arrow-writer.ts
13128
- var VERSION2 = true ? "4.2.0-alpha.4" : "latest";
13158
+ var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
13129
13159
  var ArrowWriter = {
13130
13160
  name: "Apache Arrow",
13131
13161
  id: "arrow",
13132
13162
  module: "arrow",
13133
13163
  version: VERSION2,
13134
13164
  extensions: ["arrow", "feather"],
13135
- mimeTypes: ["application/vnd.apache.arrow.file", "application/vnd.apache.arrow.stream", "application/octet-stream"],
13165
+ mimeTypes: [
13166
+ "application/vnd.apache.arrow.file",
13167
+ "application/vnd.apache.arrow.stream",
13168
+ "application/octet-stream"
13169
+ ],
13136
13170
  binary: true,
13137
13171
  options: {},
13138
13172
  encode: async function encodeArrow(data, options) {
@@ -13144,7 +13178,16 @@ return true;`);
13144
13178
  };
13145
13179
 
13146
13180
  // ../gis/src/lib/geo/geoarrow-metadata.ts
13147
- var GEOARROW_ENCODINGS = ["geoarrow.multipolygon", "geoarrow.polygon", "geoarrow.multilinestring", "geoarrow.linestring", "geoarrow.multipoint", "geoarrow.point", "geoarrow.wkb", "geoarrow.wkt"];
13181
+ var GEOARROW_ENCODINGS = [
13182
+ "geoarrow.multipolygon",
13183
+ "geoarrow.polygon",
13184
+ "geoarrow.multilinestring",
13185
+ "geoarrow.linestring",
13186
+ "geoarrow.multipoint",
13187
+ "geoarrow.point",
13188
+ "geoarrow.wkb",
13189
+ "geoarrow.wkt"
13190
+ ];
13148
13191
  var GEOARROW_COLUMN_METADATA_ENCODING = "ARROW:extension:name";
13149
13192
  var GEOARROW_COLUMN_METADATA_METADATA = "ARROW:extension:metadata";
13150
13193
  function getGeometryColumnsFromSchema(schema) {
@@ -13760,11 +13803,11 @@ return true;`);
13760
13803
  }
13761
13804
  }
13762
13805
  function polygonToGeoJson(data, startIndex = -Infinity, endIndex = Infinity) {
13763
- const {
13764
- positions
13765
- } = data;
13806
+ const { positions } = data;
13766
13807
  const polygonIndices = data.polygonIndices.value.filter((x) => x >= startIndex && x <= endIndex);
13767
- const primitivePolygonIndices = data.primitivePolygonIndices.value.filter((x) => x >= startIndex && x <= endIndex);
13808
+ const primitivePolygonIndices = data.primitivePolygonIndices.value.filter(
13809
+ (x) => x >= startIndex && x <= endIndex
13810
+ );
13768
13811
  const multi = polygonIndices.length > 2;
13769
13812
  if (!multi) {
13770
13813
  const coordinates2 = [];
@@ -13774,62 +13817,44 @@ return true;`);
13774
13817
  const ringCoordinates = ringToGeoJson(positions, startRingIndex, endRingIndex);
13775
13818
  coordinates2.push(ringCoordinates);
13776
13819
  }
13777
- return {
13778
- type: "Polygon",
13779
- coordinates: coordinates2
13780
- };
13820
+ return { type: "Polygon", coordinates: coordinates2 };
13781
13821
  }
13782
13822
  const coordinates = [];
13783
13823
  for (let i = 0; i < polygonIndices.length - 1; i++) {
13784
13824
  const startPolygonIndex = polygonIndices[i];
13785
13825
  const endPolygonIndex = polygonIndices[i + 1];
13786
- const polygonCoordinates = polygonToGeoJson(data, startPolygonIndex, endPolygonIndex).coordinates;
13826
+ const polygonCoordinates = polygonToGeoJson(
13827
+ data,
13828
+ startPolygonIndex,
13829
+ endPolygonIndex
13830
+ ).coordinates;
13787
13831
  coordinates.push(polygonCoordinates);
13788
13832
  }
13789
- return {
13790
- type: "MultiPolygon",
13791
- coordinates
13792
- };
13833
+ return { type: "MultiPolygon", coordinates };
13793
13834
  }
13794
13835
  function lineStringToGeoJson(data, startIndex = -Infinity, endIndex = Infinity) {
13795
- const {
13796
- positions
13797
- } = data;
13836
+ const { positions } = data;
13798
13837
  const pathIndices = data.pathIndices.value.filter((x) => x >= startIndex && x <= endIndex);
13799
13838
  const multi = pathIndices.length > 2;
13800
13839
  if (!multi) {
13801
13840
  const coordinates2 = ringToGeoJson(positions, pathIndices[0], pathIndices[1]);
13802
- return {
13803
- type: "LineString",
13804
- coordinates: coordinates2
13805
- };
13841
+ return { type: "LineString", coordinates: coordinates2 };
13806
13842
  }
13807
13843
  const coordinates = [];
13808
13844
  for (let i = 0; i < pathIndices.length - 1; i++) {
13809
13845
  const ringCoordinates = ringToGeoJson(positions, pathIndices[i], pathIndices[i + 1]);
13810
13846
  coordinates.push(ringCoordinates);
13811
13847
  }
13812
- return {
13813
- type: "MultiLineString",
13814
- coordinates
13815
- };
13848
+ return { type: "MultiLineString", coordinates };
13816
13849
  }
13817
13850
  function pointToGeoJson(data, startIndex, endIndex) {
13818
- const {
13819
- positions
13820
- } = data;
13851
+ const { positions } = data;
13821
13852
  const coordinates = ringToGeoJson(positions, startIndex, endIndex);
13822
13853
  const multi = coordinates.length > 1;
13823
13854
  if (multi) {
13824
- return {
13825
- type: "MultiPoint",
13826
- coordinates
13827
- };
13855
+ return { type: "MultiPoint", coordinates };
13828
13856
  }
13829
- return {
13830
- type: "Point",
13831
- coordinates: coordinates[0]
13832
- };
13857
+ return { type: "Point", coordinates: coordinates[0] };
13833
13858
  }
13834
13859
  function ringToGeoJson(positions, startIndex, endIndex) {
13835
13860
  startIndex = startIndex || 0;
@@ -13860,11 +13885,7 @@ return true;`);
13860
13885
  const feature = parseGeometryFromArrow(arrowGeometry, encoding);
13861
13886
  if (feature) {
13862
13887
  const properties = propertiesTable.get(row)?.toJSON() || {};
13863
- features.push({
13864
- type: "Feature",
13865
- geometry: feature,
13866
- properties
13867
- });
13888
+ features.push({ type: "Feature", geometry: feature, properties });
13868
13889
  }
13869
13890
  }
13870
13891
  return {
@@ -13877,9 +13898,7 @@ return true;`);
13877
13898
 
13878
13899
  // src/parsers/parse-geoarrow-sync.ts
13879
13900
  function parseGeoArrowSync(arrayBuffer, options) {
13880
- const table = parseArrowSync(arrayBuffer, {
13881
- shape: "arrow-table"
13882
- });
13901
+ const table = parseArrowSync(arrayBuffer, { shape: "arrow-table" });
13883
13902
  switch (options?.shape) {
13884
13903
  case "geojson-table":
13885
13904
  return convertArrowToGeoJSONTable(table);
@@ -13939,28 +13958,13 @@ return true;`);
13939
13958
  }
13940
13959
 
13941
13960
  // src/geoarrow/convert-geoarrow-to-binary-geometry.ts
13942
- var BinaryGeometryType = function(BinaryGeometryType2) {
13943
- BinaryGeometryType2["points"] = "points";
13944
- BinaryGeometryType2["lines"] = "lines";
13945
- BinaryGeometryType2["polygons"] = "polygons";
13946
- return BinaryGeometryType2;
13947
- }(BinaryGeometryType || {});
13948
13961
  function getBinaryGeometryTemplate() {
13949
13962
  return {
13950
- globalFeatureIds: {
13951
- value: new Uint32Array(0),
13952
- size: 1
13953
- },
13954
- positions: {
13955
- value: new Float32Array(0),
13956
- size: 2
13957
- },
13963
+ globalFeatureIds: { value: new Uint32Array(0), size: 1 },
13964
+ positions: { value: new Float32Array(0), size: 2 },
13958
13965
  properties: [],
13959
13966
  numericProps: {},
13960
- featureIds: {
13961
- value: new Uint32Array(0),
13962
- size: 1
13963
- }
13967
+ featureIds: { value: new Uint32Array(0), size: 1 }
13964
13968
  };
13965
13969
  }
13966
13970
  function getBinaryGeometriesFromArrow(geoColumn, geoEncoding, options) {
@@ -13974,30 +13978,19 @@ return true;`);
13974
13978
  let globalFeatureIdOffset = options?.chunkOffset || 0;
13975
13979
  const binaryGeometries = [];
13976
13980
  chunks.forEach((chunk) => {
13977
- const {
13978
- featureIds,
13979
- flatCoordinateArray,
13980
- nDim,
13981
- geomOffset,
13982
- triangles
13983
- } = getBinaryGeometriesFromChunk(chunk, geoEncoding, options);
13981
+ const { featureIds, flatCoordinateArray, nDim, geomOffset, triangles } = getBinaryGeometriesFromChunk(chunk, geoEncoding, options);
13984
13982
  const globalFeatureIds = new Uint32Array(featureIds.length);
13985
13983
  for (let i = 0; i < featureIds.length; i++) {
13986
13984
  globalFeatureIds[i] = featureIds[i] + globalFeatureIdOffset;
13987
13985
  }
13988
13986
  const binaryContent = {
13989
- globalFeatureIds: {
13990
- value: globalFeatureIds,
13991
- size: 1
13992
- },
13987
+ globalFeatureIds: { value: globalFeatureIds, size: 1 },
13993
13988
  positions: {
13994
13989
  value: flatCoordinateArray,
13995
13990
  size: nDim
13996
13991
  },
13997
- featureIds: {
13998
- value: featureIds,
13999
- size: 1
14000
- },
13992
+ featureIds: { value: featureIds, size: 1 },
13993
+ // eslint-disable-next-line no-loop-func
14001
13994
  properties: [...Array(chunk.length).keys()].map((i) => ({
14002
13995
  index: i + globalFeatureIdOffset
14003
13996
  }))
@@ -14014,16 +14007,14 @@ return true;`);
14014
14007
  type: "LineString",
14015
14008
  ...getBinaryGeometryTemplate(),
14016
14009
  ...featureTypes.line ? binaryContent : {},
14017
- pathIndices: {
14018
- value: featureTypes.line ? geomOffset : new Uint16Array(0),
14019
- size: 1
14020
- }
14010
+ pathIndices: { value: featureTypes.line ? geomOffset : new Uint16Array(0), size: 1 }
14021
14011
  },
14022
14012
  polygons: {
14023
14013
  type: "Polygon",
14024
14014
  ...getBinaryGeometryTemplate(),
14025
14015
  ...featureTypes.polygon ? binaryContent : {},
14026
14016
  polygonIndices: {
14017
+ // use geomOffset as polygonIndices same as primitivePolygonIndices since we are using earcut to get triangule indices
14027
14018
  value: featureTypes.polygon ? geomOffset : new Uint16Array(0),
14028
14019
  size: 1
14029
14020
  },
@@ -14031,12 +14022,7 @@ return true;`);
14031
14022
  value: featureTypes.polygon ? geomOffset : new Uint16Array(0),
14032
14023
  size: 1
14033
14024
  },
14034
- ...triangles ? {
14035
- triangles: {
14036
- value: triangles,
14037
- size: 1
14038
- }
14039
- } : {}
14025
+ ...triangles ? { triangles: { value: triangles, size: 1 } } : {}
14040
14026
  }
14041
14027
  });
14042
14028
  bounds = updateBoundsFromGeoArrowSamples(flatCoordinateArray, nDim, bounds);
@@ -14045,9 +14031,7 @@ return true;`);
14045
14031
  binaryGeometries,
14046
14032
  bounds,
14047
14033
  featureTypes,
14048
- ...options?.calculateMeanCenters ? {
14049
- meanCenters: getMeanCentersFromBinaryGeometries(binaryGeometries)
14050
- } : {}
14034
+ ...options?.calculateMeanCenters ? { meanCenters: getMeanCentersFromBinaryGeometries(binaryGeometries) } : {}
14051
14035
  };
14052
14036
  }
14053
14037
  function getMeanCentersFromBinaryGeometries(binaryGeometries) {
@@ -14055,11 +14039,11 @@ return true;`);
14055
14039
  binaryGeometries.forEach((binaryGeometry) => {
14056
14040
  let binaryGeometryType = null;
14057
14041
  if (binaryGeometry.points && binaryGeometry.points.positions.value.length > 0) {
14058
- binaryGeometryType = BinaryGeometryType.points;
14042
+ binaryGeometryType = "points" /* points */;
14059
14043
  } else if (binaryGeometry.lines && binaryGeometry.lines.positions.value.length > 0) {
14060
- binaryGeometryType = BinaryGeometryType.lines;
14044
+ binaryGeometryType = "lines" /* lines */;
14061
14045
  } else if (binaryGeometry.polygons && binaryGeometry.polygons.positions.value.length > 0) {
14062
- binaryGeometryType = BinaryGeometryType.polygons;
14046
+ binaryGeometryType = "polygons" /* polygons */;
14063
14047
  }
14064
14048
  const binaryContent = binaryGeometryType ? binaryGeometry[binaryGeometryType] : null;
14065
14049
  if (binaryContent && binaryGeometryType !== null) {
@@ -14067,7 +14051,13 @@ return true;`);
14067
14051
  const flatCoordinateArray = binaryContent.positions.value;
14068
14052
  const nDim = binaryContent.positions.size;
14069
14053
  const primitivePolygonIndices = binaryContent.type === "Polygon" ? binaryContent.primitivePolygonIndices?.value : void 0;
14070
- const meanCenters = getMeanCentersFromGeometry(featureIds, flatCoordinateArray, nDim, binaryGeometryType, primitivePolygonIndices);
14054
+ const meanCenters = getMeanCentersFromGeometry(
14055
+ featureIds,
14056
+ flatCoordinateArray,
14057
+ nDim,
14058
+ binaryGeometryType,
14059
+ primitivePolygonIndices
14060
+ );
14071
14061
  meanCenters.forEach((center) => {
14072
14062
  globalMeanCenters.push(center);
14073
14063
  });
@@ -14086,7 +14076,7 @@ return true;`);
14086
14076
  const center = [0, 0];
14087
14077
  let vertexCountInFeature = 0;
14088
14078
  while (vertexIndex < vertexCount && featureIds[coordIdx] === featureId) {
14089
- if (geometryType === BinaryGeometryType.polygons && primitivePolygonIndices?.[primitiveIdx] === coordIdx) {
14079
+ if (geometryType === "polygons" /* polygons */ && primitivePolygonIndices?.[primitiveIdx] === coordIdx) {
14090
14080
  vertexIndex += nDim;
14091
14081
  primitiveIdx++;
14092
14082
  } else {
@@ -14133,7 +14123,11 @@ return true;`);
14133
14123
  }
14134
14124
  primitiveIndex++;
14135
14125
  }
14136
- const triangleIndices = earcut(slicedFlatCoords, holeIndices.length > 0 ? holeIndices : void 0, nDim);
14126
+ const triangleIndices = earcut(
14127
+ slicedFlatCoords,
14128
+ holeIndices.length > 0 ? holeIndices : void 0,
14129
+ nDim
14130
+ );
14137
14131
  if (triangleIndices.length === 0) {
14138
14132
  throw Error("earcut failed e.g. invalid polygon");
14139
14133
  }
@@ -14181,9 +14175,7 @@ return true;`);
14181
14175
  flatCoordinateArray,
14182
14176
  geomOffset,
14183
14177
  geometryIndicies,
14184
- ...options?.triangulate && triangles ? {
14185
- triangles
14186
- } : {}
14178
+ ...options?.triangulate && triangles ? { triangles } : {}
14187
14179
  };
14188
14180
  }
14189
14181
  function getBinaryLinesFromChunk(chunk, geoEncoding) {
@@ -14257,12 +14249,21 @@ return true;`);
14257
14249
  }
14258
14250
 
14259
14251
  // ../wkt/src/lib/utils/version.ts
14260
- var VERSION3 = true ? "4.2.0-alpha.4" : "latest";
14252
+ var VERSION3 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
14261
14253
 
14262
14254
  // ../wkt/src/lib/parse-wkt.ts
14263
14255
  var numberRegexp = /[-+]?([0-9]*\.[0-9]+|[0-9]+)([eE][-+]?[0-9]+)?/;
14264
14256
  var tuples = new RegExp("^" + numberRegexp.source + "(\\s" + numberRegexp.source + "){1,}");
14265
- var WKT_MAGIC_STRINGS = ["POINT(", "LINESTRING(", "POLYGON(", "MULTIPOINT(", "MULTILINESTRING(", "MULTIPOLYGON(", "GEOMETRYCOLLECTION("];
14257
+ var WKT_MAGIC_STRINGS = [
14258
+ "POINT(",
14259
+ "LINESTRING(",
14260
+ "POLYGON(",
14261
+ "MULTIPOINT(",
14262
+ "MULTILINESTRING(",
14263
+ "MULTIPOLYGON(",
14264
+ "GEOMETRYCOLLECTION("
14265
+ // We only support this "geojson" subset of the OGC simple features standard
14266
+ ];
14266
14267
  function isWKT(input) {
14267
14268
  return WKT_MAGIC_STRINGS.some((magicString) => input.startsWith(magicString));
14268
14269
  }
@@ -14273,11 +14274,7 @@ return true;`);
14273
14274
  const parts = input.split(";");
14274
14275
  let _ = parts.pop();
14275
14276
  const srid = (parts.shift() || "").split("=").pop();
14276
- const state = {
14277
- parts,
14278
- _,
14279
- i: 0
14280
- };
14277
+ const state = { parts, _, i: 0 };
14281
14278
  const geometry = parseGeometry(state);
14282
14279
  return options?.wkt?.crs ? addCRS(geometry, srid) : geometry;
14283
14280
  }
@@ -14364,7 +14361,9 @@ return true;`);
14364
14361
  }
14365
14362
  white(state);
14366
14363
  return {
14364
+ // @ts-ignore
14367
14365
  type: "MultiLineString",
14366
+ // @ts-expect-error
14368
14367
  coordinates: c
14369
14368
  };
14370
14369
  }
@@ -14378,7 +14377,9 @@ return true;`);
14378
14377
  return null;
14379
14378
  }
14380
14379
  return {
14380
+ // @ts-ignore
14381
14381
  type: "Polygon",
14382
+ // @ts-expect-error
14382
14383
  coordinates: c
14383
14384
  };
14384
14385
  }
@@ -14393,6 +14394,7 @@ return true;`);
14393
14394
  }
14394
14395
  return {
14395
14396
  type: "MultiPolygon",
14397
+ // @ts-expect-error
14396
14398
  coordinates: c
14397
14399
  };
14398
14400
  }
@@ -14521,16 +14523,6 @@ return true;`);
14521
14523
  var EWKB_FLAG_M = 1073741824;
14522
14524
  var EWKB_FLAG_SRID = 536870912;
14523
14525
  var MAX_SRID = 1e4;
14524
- var WKBGeometryType = function(WKBGeometryType2) {
14525
- WKBGeometryType2[WKBGeometryType2["Point"] = 1] = "Point";
14526
- WKBGeometryType2[WKBGeometryType2["LineString"] = 2] = "LineString";
14527
- WKBGeometryType2[WKBGeometryType2["Polygon"] = 3] = "Polygon";
14528
- WKBGeometryType2[WKBGeometryType2["MultiPoint"] = 4] = "MultiPoint";
14529
- WKBGeometryType2[WKBGeometryType2["MultiLineString"] = 5] = "MultiLineString";
14530
- WKBGeometryType2[WKBGeometryType2["MultiPolygon"] = 6] = "MultiPolygon";
14531
- WKBGeometryType2[WKBGeometryType2["GeometryCollection"] = 7] = "GeometryCollection";
14532
- return WKBGeometryType2;
14533
- }({});
14534
14526
  function isWKB(arrayBuffer) {
14535
14527
  const dataView = new DataView(arrayBuffer);
14536
14528
  let byteOffset = 0;
@@ -14641,31 +14633,27 @@ return true;`);
14641
14633
  function parseWKBToBinary(arrayBuffer, options) {
14642
14634
  const dataView = new DataView(arrayBuffer);
14643
14635
  const wkbHeader = parseWKBHeader(dataView);
14644
- const {
14645
- geometryType,
14646
- dimensions,
14647
- littleEndian
14648
- } = wkbHeader;
14636
+ const { geometryType, dimensions, littleEndian } = wkbHeader;
14649
14637
  const offset = wkbHeader.byteOffset;
14650
14638
  switch (geometryType) {
14651
- case WKBGeometryType.Point:
14639
+ case 1 /* Point */:
14652
14640
  const point = parsePoint2(dataView, offset, dimensions, littleEndian);
14653
14641
  return point.geometry;
14654
- case WKBGeometryType.LineString:
14642
+ case 2 /* LineString */:
14655
14643
  const line = parseLineString2(dataView, offset, dimensions, littleEndian);
14656
14644
  return line.geometry;
14657
- case WKBGeometryType.Polygon:
14645
+ case 3 /* Polygon */:
14658
14646
  const polygon = parsePolygon2(dataView, offset, dimensions, littleEndian);
14659
14647
  return polygon.geometry;
14660
- case WKBGeometryType.MultiPoint:
14648
+ case 4 /* MultiPoint */:
14661
14649
  const multiPoint = parseMultiPoint2(dataView, offset, dimensions, littleEndian);
14662
14650
  multiPoint.type = "Point";
14663
14651
  return multiPoint;
14664
- case WKBGeometryType.MultiLineString:
14652
+ case 5 /* MultiLineString */:
14665
14653
  const multiLine = parseMultiLineString2(dataView, offset, dimensions, littleEndian);
14666
14654
  multiLine.type = "LineString";
14667
14655
  return multiLine;
14668
- case WKBGeometryType.MultiPolygon:
14656
+ case 6 /* MultiPolygon */:
14669
14657
  const multiPolygon = parseMultiPolygon2(dataView, offset, dimensions, littleEndian);
14670
14658
  multiPolygon.type = "Polygon";
14671
14659
  return multiPolygon;
@@ -14680,13 +14668,7 @@ return true;`);
14680
14668
  offset += 8;
14681
14669
  }
14682
14670
  return {
14683
- geometry: {
14684
- type: "Point",
14685
- positions: {
14686
- value: positions,
14687
- size: dimension
14688
- }
14689
- },
14671
+ geometry: { type: "Point", positions: { value: positions, size: dimension } },
14690
14672
  offset
14691
14673
  };
14692
14674
  }
@@ -14705,14 +14687,8 @@ return true;`);
14705
14687
  return {
14706
14688
  geometry: {
14707
14689
  type: "LineString",
14708
- positions: {
14709
- value: positions,
14710
- size: dimension
14711
- },
14712
- pathIndices: {
14713
- value: new Uint16Array(pathIndices),
14714
- size: 1
14715
- }
14690
+ positions: { value: positions, size: dimension },
14691
+ pathIndices: { value: new Uint16Array(pathIndices), size: 1 }
14716
14692
  },
14717
14693
  offset
14718
14694
  };
@@ -14724,9 +14700,7 @@ return true;`);
14724
14700
  const rings = [];
14725
14701
  for (let i = 0; i < nRings; i++) {
14726
14702
  const parsed = parseLineString2(dataView, offset, dimension, littleEndian);
14727
- const {
14728
- positions
14729
- } = parsed.geometry;
14703
+ const { positions } = parsed.geometry;
14730
14704
  offset = parsed.offset;
14731
14705
  rings.push(positions.value);
14732
14706
  }
@@ -14740,18 +14714,12 @@ return true;`);
14740
14714
  return {
14741
14715
  geometry: {
14742
14716
  type: "Polygon",
14743
- positions: {
14744
- value: concatenatedPositions,
14745
- size: dimension
14746
- },
14717
+ positions: { value: concatenatedPositions, size: dimension },
14747
14718
  polygonIndices: {
14748
14719
  value: new Uint16Array(polygonIndices),
14749
14720
  size: 1
14750
14721
  },
14751
- primitivePolygonIndices: {
14752
- value: new Uint16Array(primitivePolygonIndices),
14753
- size: 1
14754
- }
14722
+ primitivePolygonIndices: { value: new Uint16Array(primitivePolygonIndices), size: 1 }
14755
14723
  },
14756
14724
  offset
14757
14725
  };
@@ -14812,10 +14780,7 @@ return true;`);
14812
14780
  const concatenatedPositions = new Float64Array(concatTypedArrays(positions).buffer);
14813
14781
  return {
14814
14782
  type: "Point",
14815
- positions: {
14816
- value: concatenatedPositions,
14817
- size: dimension
14818
- }
14783
+ positions: { value: concatenatedPositions, size: dimension }
14819
14784
  };
14820
14785
  }
14821
14786
  function concatenateBinaryLineGeometries(binaryLineGeometries, dimension) {
@@ -14825,24 +14790,15 @@ return true;`);
14825
14790
  pathIndices.unshift(0);
14826
14791
  return {
14827
14792
  type: "LineString",
14828
- positions: {
14829
- value: concatenatedPositions,
14830
- size: dimension
14831
- },
14832
- pathIndices: {
14833
- value: new Uint16Array(pathIndices),
14834
- size: 1
14835
- }
14793
+ positions: { value: concatenatedPositions, size: dimension },
14794
+ pathIndices: { value: new Uint16Array(pathIndices), size: 1 }
14836
14795
  };
14837
14796
  }
14838
14797
  function concatenateBinaryPolygonGeometries(binaryPolygonGeometries, dimension) {
14839
14798
  const polygons = [];
14840
14799
  const primitivePolygons = [];
14841
14800
  for (const binaryPolygon of binaryPolygonGeometries) {
14842
- const {
14843
- positions,
14844
- primitivePolygonIndices: primitivePolygonIndices2
14845
- } = binaryPolygon;
14801
+ const { positions, primitivePolygonIndices: primitivePolygonIndices2 } = binaryPolygon;
14846
14802
  polygons.push(positions.value);
14847
14803
  primitivePolygons.push(primitivePolygonIndices2.value);
14848
14804
  }
@@ -14851,22 +14807,15 @@ return true;`);
14851
14807
  polygonIndices.unshift(0);
14852
14808
  const primitivePolygonIndices = [0];
14853
14809
  for (const primitivePolygon of primitivePolygons) {
14854
- primitivePolygonIndices.push(...primitivePolygon.filter((x) => x > 0).map((x) => x + primitivePolygonIndices[primitivePolygonIndices.length - 1]));
14810
+ primitivePolygonIndices.push(
14811
+ ...primitivePolygon.filter((x) => x > 0).map((x) => x + primitivePolygonIndices[primitivePolygonIndices.length - 1])
14812
+ );
14855
14813
  }
14856
14814
  return {
14857
14815
  type: "Polygon",
14858
- positions: {
14859
- value: concatenatedPositions,
14860
- size: dimension
14861
- },
14862
- polygonIndices: {
14863
- value: new Uint16Array(polygonIndices),
14864
- size: 1
14865
- },
14866
- primitivePolygonIndices: {
14867
- value: new Uint16Array(primitivePolygonIndices),
14868
- size: 1
14869
- }
14816
+ positions: { value: concatenatedPositions, size: dimension },
14817
+ polygonIndices: { value: new Uint16Array(polygonIndices), size: 1 },
14818
+ primitivePolygonIndices: { value: new Uint16Array(primitivePolygonIndices), size: 1 }
14870
14819
  };
14871
14820
  }
14872
14821
  function concatTypedArrays(arrays) {
@@ -14896,10 +14845,12 @@ return true;`);
14896
14845
  category: "geometry",
14897
14846
  extensions: ["wkb"],
14898
14847
  mimeTypes: [],
14848
+ // TODO can we define static, serializable tests, eg. some binary strings?
14899
14849
  tests: [isWKB],
14900
14850
  options: {
14901
14851
  wkb: {
14902
14852
  shape: "binary-geometry"
14853
+ // 'geojson-geometry'
14903
14854
  }
14904
14855
  }
14905
14856
  };
@@ -14948,7 +14899,10 @@ return true;`);
14948
14899
  return geometry;
14949
14900
  }
14950
14901
  function arrowWKBToFeature(arrowCellValue) {
14951
- const arrayBuffer = arrowCellValue.buffer.slice(arrowCellValue.byteOffset, arrowCellValue.byteOffset + arrowCellValue.byteLength);
14902
+ const arrayBuffer = arrowCellValue.buffer.slice(
14903
+ arrowCellValue.byteOffset,
14904
+ arrowCellValue.byteOffset + arrowCellValue.byteLength
14905
+ );
14952
14906
  const binaryGeometry = WKBLoader.parseSync?.(arrayBuffer);
14953
14907
  const geometry = binaryToGeometry(binaryGeometry);
14954
14908
  return geometry;
@@ -15072,7 +15026,17 @@ return true;`);
15072
15026
  [BufferType.VALIDITY]: cloneBuffer(data.buffers[BufferType.VALIDITY], force),
15073
15027
  [BufferType.TYPE]: cloneBuffer(data.buffers[BufferType.TYPE], force)
15074
15028
  };
15075
- return new Data(data.type, data.offset, data.length, data._nullCount, clonedBuffers, clonedChildren, clonedDictionary);
15029
+ return new Data(
15030
+ data.type,
15031
+ data.offset,
15032
+ data.length,
15033
+ // @ts-expect-error _nullCount is protected. We're using it here to mimic
15034
+ // `Data.clone`
15035
+ data._nullCount,
15036
+ clonedBuffers,
15037
+ clonedChildren,
15038
+ clonedDictionary
15039
+ );
15076
15040
  }
15077
15041
  function isTypedArraySliced(arr) {
15078
15042
  return !(arr.byteOffset === 0 && arr.byteLength === arr.buffer.byteLength);
@@ -15092,11 +15056,13 @@ return true;`);
15092
15056
  function getVersion() {
15093
15057
  if (!globalThis._loadersgl_?.version) {
15094
15058
  globalThis._loadersgl_ = globalThis._loadersgl_ || {};
15095
- if (false) {
15096
- console.warn("loaders.gl: The __VERSION__ variable is not injected using babel plugin. Latest unstable workers would be fetched from the CDN.");
15059
+ if (typeof __VERSION__ === "undefined") {
15060
+ console.warn(
15061
+ "loaders.gl: The __VERSION__ variable is not injected using babel plugin. Latest unstable workers would be fetched from the CDN."
15062
+ );
15097
15063
  globalThis._loadersgl_.version = NPM_TAG;
15098
15064
  } else {
15099
- globalThis._loadersgl_.version = "4.2.0-alpha.4";
15065
+ globalThis._loadersgl_.version = __VERSION__;
15100
15066
  }
15101
15067
  }
15102
15068
  return globalThis._loadersgl_.version;
@@ -15121,19 +15087,22 @@ return true;`);
15121
15087
  var window_ = globals.window || globals.self || globals.global || {};
15122
15088
  var global_ = globals.global || globals.self || globals.window || {};
15123
15089
  var document_ = globals.document || {};
15124
- var isBrowser = typeof process !== "object" || String(process) !== "[object process]" || process.browser;
15090
+ var isBrowser = (
15091
+ // @ts-ignore process.browser
15092
+ typeof process !== "object" || String(process) !== "[object process]" || process.browser
15093
+ );
15125
15094
  var isMobile = typeof window !== "undefined" && typeof window.orientation !== "undefined";
15126
15095
  var matches = typeof process !== "undefined" && process.version && /v([0-9]*)/.exec(process.version);
15127
15096
  var nodeVersion = matches && parseFloat(matches[1]) || 0;
15128
15097
 
15129
15098
  // ../worker-utils/src/lib/worker-farm/worker-job.ts
15130
15099
  var WorkerJob = class {
15131
- isRunning = true;
15132
- _resolve = () => {
15133
- };
15134
- _reject = () => {
15135
- };
15136
15100
  constructor(jobName, workerThread) {
15101
+ this.isRunning = true;
15102
+ this._resolve = () => {
15103
+ };
15104
+ this._reject = () => {
15105
+ };
15137
15106
  this.name = jobName;
15138
15107
  this.workerThread = workerThread;
15139
15108
  this.result = new Promise((resolve, reject) => {
@@ -15141,18 +15110,29 @@ return true;`);
15141
15110
  this._reject = reject;
15142
15111
  });
15143
15112
  }
15113
+ /**
15114
+ * Send a message to the job's worker thread
15115
+ * @param data any data structure, ideally consisting mostly of transferrable objects
15116
+ */
15144
15117
  postMessage(type, payload) {
15145
15118
  this.workerThread.postMessage({
15146
15119
  source: "loaders.gl",
15120
+ // Lets worker ignore unrelated messages
15147
15121
  type,
15148
15122
  payload
15149
15123
  });
15150
15124
  }
15125
+ /**
15126
+ * Call to resolve the `result` Promise with the supplied value
15127
+ */
15151
15128
  done(value) {
15152
15129
  assert(this.isRunning);
15153
15130
  this.isRunning = false;
15154
15131
  this._resolve(value);
15155
15132
  }
15133
+ /**
15134
+ * Call to reject the `result` Promise with the supplied error
15135
+ */
15156
15136
  error(error) {
15157
15137
  assert(this.isRunning);
15158
15138
  this.isRunning = false;
@@ -15192,9 +15172,7 @@ return true;`);
15192
15172
  return getLoadableWorkerURLFromSource(workerSource);
15193
15173
  }
15194
15174
  function getLoadableWorkerURLFromSource(workerSource) {
15195
- const blob = new Blob([workerSource], {
15196
- type: "application/javascript"
15197
- });
15175
+ const blob = new Blob([workerSource], { type: "application/javascript" });
15198
15176
  return URL.createObjectURL(blob);
15199
15177
  }
15200
15178
  function buildScriptSource(workerUrl) {
@@ -15261,17 +15239,10 @@ return true;`);
15261
15239
  var NOOP = () => {
15262
15240
  };
15263
15241
  var WorkerThread = class {
15264
- terminated = false;
15265
- _loadableURL = "";
15266
- static isSupported() {
15267
- return typeof Worker !== "undefined" && isBrowser || typeof NodeWorker !== "undefined" && !isBrowser;
15268
- }
15269
15242
  constructor(props) {
15270
- const {
15271
- name,
15272
- source,
15273
- url
15274
- } = props;
15243
+ this.terminated = false;
15244
+ this._loadableURL = "";
15245
+ const { name, source, url } = props;
15275
15246
  assert(source || url);
15276
15247
  this.name = name;
15277
15248
  this.source = source;
@@ -15280,6 +15251,14 @@ return true;`);
15280
15251
  this.onError = (error) => console.log(error);
15281
15252
  this.worker = isBrowser ? this._createBrowserWorker() : this._createNodeWorker();
15282
15253
  }
15254
+ /** Checks if workers are supported on this platform */
15255
+ static isSupported() {
15256
+ return typeof Worker !== "undefined" && isBrowser || typeof NodeWorker !== "undefined" && !isBrowser;
15257
+ }
15258
+ /**
15259
+ * Terminate this worker thread
15260
+ * @note Can free up significant memory
15261
+ */
15283
15262
  destroy() {
15284
15263
  this.onMessage = NOOP;
15285
15264
  this.onError = NOOP;
@@ -15289,10 +15268,20 @@ return true;`);
15289
15268
  get isRunning() {
15290
15269
  return Boolean(this.onMessage);
15291
15270
  }
15271
+ /**
15272
+ * Send a message to this worker thread
15273
+ * @param data any data structure, ideally consisting mostly of transferrable objects
15274
+ * @param transferList If not supplied, calculated automatically by traversing data
15275
+ */
15292
15276
  postMessage(data, transferList) {
15293
15277
  transferList = transferList || getTransferList(data);
15294
15278
  this.worker.postMessage(data, transferList);
15295
15279
  }
15280
+ // PRIVATE
15281
+ /**
15282
+ * Generate a standard Error from an ErrorEvent
15283
+ * @param event
15284
+ */
15296
15285
  _getErrorFromErrorEvent(event) {
15297
15286
  let message = "Failed to load ";
15298
15287
  message += `worker ${this.name} from ${this.url}. `;
@@ -15304,14 +15293,12 @@ return true;`);
15304
15293
  }
15305
15294
  return new Error(message);
15306
15295
  }
15296
+ /**
15297
+ * Creates a worker thread on the browser
15298
+ */
15307
15299
  _createBrowserWorker() {
15308
- this._loadableURL = getLoadableWorkerURL({
15309
- source: this.source,
15310
- url: this.url
15311
- });
15312
- const worker = new Worker(this._loadableURL, {
15313
- name: this.name
15314
- });
15300
+ this._loadableURL = getLoadableWorkerURL({ source: this.source, url: this.url });
15301
+ const worker = new Worker(this._loadableURL, { name: this.name });
15315
15302
  worker.onmessage = (event) => {
15316
15303
  if (!event.data) {
15317
15304
  this.onError(new Error("No data received"));
@@ -15326,18 +15313,18 @@ return true;`);
15326
15313
  worker.onmessageerror = (event) => console.error(event);
15327
15314
  return worker;
15328
15315
  }
15316
+ /**
15317
+ * Creates a worker thread in node.js
15318
+ * @todo https://nodejs.org/api/async_hooks.html#async-resource-worker-pool
15319
+ */
15329
15320
  _createNodeWorker() {
15330
15321
  let worker;
15331
15322
  if (this.url) {
15332
15323
  const absolute = this.url.includes(":/") || this.url.startsWith("/");
15333
15324
  const url = absolute ? this.url : `./${this.url}`;
15334
- worker = new NodeWorker(url, {
15335
- eval: false
15336
- });
15325
+ worker = new NodeWorker(url, { eval: false });
15337
15326
  } else if (this.source) {
15338
- worker = new NodeWorker(this.source, {
15339
- eval: true
15340
- });
15327
+ worker = new NodeWorker(this.source, { eval: true });
15341
15328
  } else {
15342
15329
  throw new Error("no worker");
15343
15330
  }
@@ -15355,34 +15342,40 @@ return true;`);
15355
15342
 
15356
15343
  // ../worker-utils/src/lib/worker-farm/worker-pool.ts
15357
15344
  var WorkerPool = class {
15358
- name = "unnamed";
15359
- maxConcurrency = 1;
15360
- maxMobileConcurrency = 1;
15361
- onDebug = () => {
15362
- };
15363
- reuseWorkers = true;
15364
- props = {};
15365
- jobQueue = [];
15366
- idleQueue = [];
15367
- count = 0;
15368
- isDestroyed = false;
15369
- static isSupported() {
15370
- return WorkerThread.isSupported();
15371
- }
15345
+ /**
15346
+ * @param processor - worker function
15347
+ * @param maxConcurrency - max count of workers
15348
+ */
15372
15349
  constructor(props) {
15350
+ this.name = "unnamed";
15351
+ this.maxConcurrency = 1;
15352
+ this.maxMobileConcurrency = 1;
15353
+ this.onDebug = () => {
15354
+ };
15355
+ this.reuseWorkers = true;
15356
+ this.props = {};
15357
+ this.jobQueue = [];
15358
+ this.idleQueue = [];
15359
+ this.count = 0;
15360
+ this.isDestroyed = false;
15373
15361
  this.source = props.source;
15374
15362
  this.url = props.url;
15375
15363
  this.setProps(props);
15376
15364
  }
15365
+ /** Checks if workers are supported on this platform */
15366
+ static isSupported() {
15367
+ return WorkerThread.isSupported();
15368
+ }
15369
+ /**
15370
+ * Terminates all workers in the pool
15371
+ * @note Can free up significant memory
15372
+ */
15377
15373
  destroy() {
15378
15374
  this.idleQueue.forEach((worker) => worker.destroy());
15379
15375
  this.isDestroyed = true;
15380
15376
  }
15381
15377
  setProps(props) {
15382
- this.props = {
15383
- ...this.props,
15384
- ...props
15385
- };
15378
+ this.props = { ...this.props, ...props };
15386
15379
  if (props.name !== void 0) {
15387
15380
  this.name = props.name;
15388
15381
  }
@@ -15401,17 +15394,17 @@ return true;`);
15401
15394
  }
15402
15395
  async startJob(name, onMessage2 = (job, type, data) => job.done(data), onError = (job, error) => job.error(error)) {
15403
15396
  const startPromise = new Promise((onStart) => {
15404
- this.jobQueue.push({
15405
- name,
15406
- onMessage: onMessage2,
15407
- onError,
15408
- onStart
15409
- });
15397
+ this.jobQueue.push({ name, onMessage: onMessage2, onError, onStart });
15410
15398
  return this;
15411
15399
  });
15412
15400
  this._startQueuedJob();
15413
15401
  return await startPromise;
15414
15402
  }
15403
+ // PRIVATE
15404
+ /**
15405
+ * Starts first queued job if worker is available or can be created
15406
+ * Called when job is started and whenever a worker returns to the idleQueue
15407
+ */
15415
15408
  async _startQueuedJob() {
15416
15409
  if (!this.jobQueue.length) {
15417
15410
  return;
@@ -15441,8 +15434,23 @@ return true;`);
15441
15434
  }
15442
15435
  }
15443
15436
  }
15437
+ /**
15438
+ * Returns a worker to the idle queue
15439
+ * Destroys the worker if
15440
+ * - pool is destroyed
15441
+ * - if this pool doesn't reuse workers
15442
+ * - if maxConcurrency has been lowered
15443
+ * @param worker
15444
+ */
15444
15445
  returnWorkerToQueue(worker) {
15445
- const shouldDestroyWorker = !isBrowser || this.isDestroyed || !this.reuseWorkers || this.count > this._getMaxConcurrency();
15446
+ const shouldDestroyWorker = (
15447
+ // Workers on Node.js prevent the process from exiting.
15448
+ // Until we figure out how to close them before exit, we always destroy them
15449
+ !isBrowser || // If the pool is destroyed, there is no reason to keep the worker around
15450
+ this.isDestroyed || // If the app has disabled worker reuse, any completed workers should be destroyed
15451
+ !this.reuseWorkers || // If concurrency has been lowered, this worker might be surplus to requirements
15452
+ this.count > this._getMaxConcurrency()
15453
+ );
15446
15454
  if (shouldDestroyWorker) {
15447
15455
  worker.destroy();
15448
15456
  this.count--;
@@ -15453,6 +15461,9 @@ return true;`);
15453
15461
  this._startQueuedJob();
15454
15462
  }
15455
15463
  }
15464
+ /**
15465
+ * Returns idle worker or creates new worker if maxConcurrency has not been reached
15466
+ */
15456
15467
  _getAvailableWorker() {
15457
15468
  if (this.idleQueue.length > 0) {
15458
15469
  return this.idleQueue.shift() || null;
@@ -15460,11 +15471,7 @@ return true;`);
15460
15471
  if (this.count < this._getMaxConcurrency()) {
15461
15472
  this.count++;
15462
15473
  const name = `${this.name.toLowerCase()} (#${this.count} of ${this.maxConcurrency})`;
15463
- return new WorkerThread({
15464
- name,
15465
- source: this.source,
15466
- url: this.url
15467
- });
15474
+ return new WorkerThread({ name, source: this.source, url: this.url });
15468
15475
  }
15469
15476
  return null;
15470
15477
  }
@@ -15482,43 +15489,54 @@ return true;`);
15482
15489
  }
15483
15490
  };
15484
15491
  var WorkerFarm = class {
15485
- workerPools = /* @__PURE__ */ new Map();
15492
+ /** get global instance with WorkerFarm.getWorkerFarm() */
15493
+ constructor(props) {
15494
+ this.workerPools = /* @__PURE__ */ new Map();
15495
+ this.props = { ...DEFAULT_PROPS };
15496
+ this.setProps(props);
15497
+ this.workerPools = /* @__PURE__ */ new Map();
15498
+ }
15499
+ /** Checks if workers are supported on this platform */
15486
15500
  static isSupported() {
15487
15501
  return WorkerThread.isSupported();
15488
15502
  }
15503
+ /** Get the singleton instance of the global worker farm */
15489
15504
  static getWorkerFarm(props = {}) {
15490
15505
  WorkerFarm._workerFarm = WorkerFarm._workerFarm || new WorkerFarm({});
15491
15506
  WorkerFarm._workerFarm.setProps(props);
15492
15507
  return WorkerFarm._workerFarm;
15493
15508
  }
15494
- constructor(props) {
15495
- this.props = {
15496
- ...DEFAULT_PROPS
15497
- };
15498
- this.setProps(props);
15499
- this.workerPools = /* @__PURE__ */ new Map();
15500
- }
15509
+ /**
15510
+ * Terminate all workers in the farm
15511
+ * @note Can free up significant memory
15512
+ */
15501
15513
  destroy() {
15502
15514
  for (const workerPool of this.workerPools.values()) {
15503
15515
  workerPool.destroy();
15504
15516
  }
15505
15517
  this.workerPools = /* @__PURE__ */ new Map();
15506
15518
  }
15519
+ /**
15520
+ * Set props used when initializing worker pools
15521
+ * @param props
15522
+ */
15507
15523
  setProps(props) {
15508
- this.props = {
15509
- ...this.props,
15510
- ...props
15511
- };
15524
+ this.props = { ...this.props, ...props };
15512
15525
  for (const workerPool of this.workerPools.values()) {
15513
15526
  workerPool.setProps(this._getWorkerPoolProps());
15514
15527
  }
15515
15528
  }
15529
+ /**
15530
+ * Returns a worker pool for the specified worker
15531
+ * @param options - only used first time for a specific worker name
15532
+ * @param options.name - the name of the worker - used to identify worker pool
15533
+ * @param options.url -
15534
+ * @param options.source -
15535
+ * @example
15536
+ * const job = WorkerFarm.getWorkerFarm().getWorkerPool({name, url}).startJob(...);
15537
+ */
15516
15538
  getWorkerPool(options) {
15517
- const {
15518
- name,
15519
- source,
15520
- url
15521
- } = options;
15539
+ const { name, source, url } = options;
15522
15540
  let workerPool = this.workerPools.get(name);
15523
15541
  if (!workerPool) {
15524
15542
  workerPool = new WorkerPool({
@@ -15576,24 +15594,20 @@ return true;`);
15576
15594
  async function processOnWorker(worker, data, options = {}, context = {}) {
15577
15595
  const name = getWorkerName(worker);
15578
15596
  const workerFarm = WorkerFarm.getWorkerFarm(options);
15579
- const {
15580
- source
15581
- } = options;
15582
- const workerPoolProps = {
15583
- name,
15584
- source
15585
- };
15597
+ const { source } = options;
15598
+ const workerPoolProps = { name, source };
15586
15599
  if (!source) {
15587
15600
  workerPoolProps.url = getWorkerURL(worker, options);
15588
15601
  }
15589
15602
  const workerPool = workerFarm.getWorkerPool(workerPoolProps);
15590
15603
  const jobName = options.jobName || worker.name;
15591
- const job = await workerPool.startJob(jobName, onMessage.bind(null, context));
15604
+ const job = await workerPool.startJob(
15605
+ jobName,
15606
+ // eslint-disable-next-line
15607
+ onMessage.bind(null, context)
15608
+ );
15592
15609
  const transferableOptions = getTransferListForWriter(options);
15593
- job.postMessage("process", {
15594
- input: data,
15595
- options: transferableOptions
15596
- });
15610
+ job.postMessage("process", { input: data, options: transferableOptions });
15597
15611
  const result = await job.result;
15598
15612
  return result.result;
15599
15613
  }
@@ -15606,30 +15620,17 @@ return true;`);
15606
15620
  job.error(new Error(payload.error));
15607
15621
  break;
15608
15622
  case "process":
15609
- const {
15610
- id,
15611
- input,
15612
- options
15613
- } = payload;
15623
+ const { id, input, options } = payload;
15614
15624
  try {
15615
15625
  if (!context.process) {
15616
- job.postMessage("error", {
15617
- id,
15618
- error: "Worker not set up to process on main thread"
15619
- });
15626
+ job.postMessage("error", { id, error: "Worker not set up to process on main thread" });
15620
15627
  return;
15621
15628
  }
15622
15629
  const result = await context.process(input, options);
15623
- job.postMessage("done", {
15624
- id,
15625
- result
15626
- });
15630
+ job.postMessage("done", { id, result });
15627
15631
  } catch (error) {
15628
15632
  const message = error instanceof Error ? error.message : "unknown error";
15629
- job.postMessage("error", {
15630
- id,
15631
- error: message
15632
- });
15633
+ job.postMessage("error", { id, error: message });
15633
15634
  }
15634
15635
  break;
15635
15636
  default:
@@ -15638,7 +15639,7 @@ return true;`);
15638
15639
  }
15639
15640
 
15640
15641
  // src/triangulate-on-worker.ts
15641
- var VERSION5 = true ? "4.2.0-alpha.4" : "latest";
15642
+ var VERSION5 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
15642
15643
  var TriangulationWorker = {
15643
15644
  id: "triangulation",
15644
15645
  name: "Triangulate",
@@ -15647,21 +15648,15 @@ return true;`);
15647
15648
  options: {}
15648
15649
  };
15649
15650
  function triangulateOnWorker(data, options = {}) {
15650
- return processOnWorker(TriangulationWorker, {
15651
- ...data,
15652
- operation: "triangulate"
15653
- }, options);
15651
+ return processOnWorker(TriangulationWorker, { ...data, operation: "triangulate" }, options);
15654
15652
  }
15655
15653
  function parseGeoArrowOnWorker(data, options = {}) {
15656
- return processOnWorker(TriangulationWorker, {
15657
- ...data,
15658
- operation: "parse-geoarrow"
15659
- }, options);
15654
+ return processOnWorker(TriangulationWorker, { ...data, operation: "parse-geoarrow" }, options);
15660
15655
  }
15661
15656
 
15662
15657
  // src/index.ts
15663
15658
  TableBatchBuilder.ArrowBatch = ArrowTableBatchAggregator;
15664
- return __toCommonJS(src_exports);
15659
+ return __toCommonJS(bundle_exports);
15665
15660
  })();
15666
15661
  return __exports__;
15667
15662
  });