@loaders.gl/arrow 4.0.0 → 4.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/arrow-loader.d.ts.map +1 -1
- package/dist/arrow-loader.js.map +1 -1
- package/dist/arrow-worker.js +34 -26
- package/dist/arrow-writer.d.ts.map +1 -1
- package/dist/arrow-writer.js +6 -3
- package/dist/arrow-writer.js.map +1 -1
- package/dist/dist.dev.js +658 -6
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts +40 -0
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.d.ts.map +1 -0
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js +189 -0
- package/dist/geoarrow/convert-geoarrow-to-binary-geometry.js.map +1 -0
- package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts +19 -0
- package/dist/geoarrow/convert-geoarrow-to-geojson.d.ts.map +1 -0
- package/dist/geoarrow/convert-geoarrow-to-geojson.js +138 -0
- package/dist/geoarrow/convert-geoarrow-to-geojson.js.map +1 -0
- package/dist/geoarrow/get-arrow-bounds.d.ts +11 -0
- package/dist/geoarrow/get-arrow-bounds.d.ts.map +1 -0
- package/dist/geoarrow/get-arrow-bounds.js +24 -0
- package/dist/geoarrow/get-arrow-bounds.js.map +1 -0
- package/dist/index.cjs +627 -32
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -1
- package/dist/index.js.map +1 -1
- package/dist/lib/arrow-table-batch.d.ts +2 -2
- package/dist/lib/arrow-table-batch.d.ts.map +1 -1
- package/dist/lib/arrow-table-batch.js +7 -7
- package/dist/lib/arrow-table-batch.js.map +1 -1
- package/dist/lib/arrow-table.d.ts +3 -3
- package/dist/lib/arrow-table.d.ts.map +1 -1
- package/dist/lib/arrow-table.js.map +1 -1
- package/dist/lib/encode-arrow.js +5 -5
- package/dist/lib/encode-arrow.js.map +1 -1
- package/dist/lib/parse-arrow-in-batches.js +3 -3
- package/dist/lib/parse-arrow-in-batches.js.map +1 -1
- package/dist/lib/parse-arrow-sync.d.ts.map +1 -1
- package/dist/lib/parse-arrow-sync.js +3 -3
- package/dist/lib/parse-arrow-sync.js.map +1 -1
- package/dist/schema/arrow-type-utils.d.ts +3 -2
- package/dist/schema/arrow-type-utils.d.ts.map +1 -1
- package/dist/schema/arrow-type-utils.js +9 -9
- package/dist/schema/arrow-type-utils.js.map +1 -1
- package/dist/schema/convert-arrow-schema.d.ts +19 -0
- package/dist/schema/convert-arrow-schema.d.ts.map +1 -0
- package/dist/schema/convert-arrow-schema.js +232 -0
- package/dist/schema/convert-arrow-schema.js.map +1 -0
- package/dist/{lib/convert-table.d.ts → tables/convert-arrow-to-table.d.ts} +4 -4
- package/dist/tables/convert-arrow-to-table.d.ts.map +1 -0
- package/dist/{lib/convert-table.js → tables/convert-arrow-to-table.js} +1 -1
- package/dist/tables/convert-arrow-to-table.js.map +1 -0
- package/dist/{schema → tables}/convert-table-to-arrow.d.ts.map +1 -1
- package/dist/{schema → tables}/convert-table-to-arrow.js.map +1 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/workers/arrow-worker.js.map +1 -1
- package/package.json +5 -4
- package/src/arrow-loader.ts +1 -0
- package/src/arrow-writer.ts +7 -3
- package/src/geoarrow/convert-geoarrow-to-binary-geometry.ts +260 -0
- package/src/geoarrow/convert-geoarrow-to-geojson.ts +192 -0
- package/src/geoarrow/get-arrow-bounds.ts +40 -0
- package/src/index.ts +33 -4
- package/src/lib/arrow-table-batch.ts +13 -23
- package/src/lib/arrow-table.ts +3 -3
- package/src/lib/encode-arrow.ts +8 -8
- package/src/lib/parse-arrow-in-batches.ts +4 -4
- package/src/lib/parse-arrow-sync.ts +6 -3
- package/src/schema/arrow-type-utils.ts +11 -29
- package/src/schema/convert-arrow-schema.ts +265 -0
- package/src/{lib/convert-table.ts → tables/convert-arrow-to-table.ts} +4 -3
- package/src/types.ts +3 -0
- package/src/workers/arrow-worker.ts +3 -0
- package/dist/lib/convert-table.d.ts.map +0 -1
- package/dist/lib/convert-table.js.map +0 -1
- package/dist/schema/convert-schema-arrow.d.ts +0 -13
- package/dist/schema/convert-schema-arrow.d.ts.map +0 -1
- package/dist/schema/convert-schema-arrow.js +0 -158
- package/dist/schema/convert-schema-arrow.js.map +0 -1
- package/src/schema/convert-schema-arrow.ts +0 -233
- /package/dist/{schema → tables}/convert-table-to-arrow.d.ts +0 -0
- /package/dist/{schema → tables}/convert-table-to-arrow.js +0 -0
- /package/src/{schema → tables}/convert-table-to-arrow.ts +0 -0
package/dist/index.cjs
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
2
3
|
var __defProp = Object.defineProperty;
|
|
3
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
5
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
8
|
var __export = (target, all) => {
|
|
7
9
|
for (var name in all)
|
|
@@ -15,6 +17,14 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
15
17
|
}
|
|
16
18
|
return to;
|
|
17
19
|
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
18
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
29
|
|
|
20
30
|
// src/index.ts
|
|
@@ -23,8 +33,20 @@ __export(src_exports, {
|
|
|
23
33
|
ArrowLoader: () => ArrowLoader2,
|
|
24
34
|
ArrowWorkerLoader: () => ArrowLoader,
|
|
25
35
|
ArrowWriter: () => ArrowWriter,
|
|
36
|
+
BINARY_GEOMETRY_TEMPLATE: () => BINARY_GEOMETRY_TEMPLATE,
|
|
26
37
|
VECTOR_TYPES: () => VECTOR_TYPES,
|
|
27
|
-
|
|
38
|
+
deserializeArrowField: () => deserializeArrowField,
|
|
39
|
+
deserializeArrowMetadata: () => deserializeArrowMetadata,
|
|
40
|
+
deserializeArrowSchema: () => deserializeArrowSchema,
|
|
41
|
+
deserializeArrowType: () => deserializeArrowType,
|
|
42
|
+
getArrowType: () => getArrowType,
|
|
43
|
+
getBinaryGeometriesFromArrow: () => getBinaryGeometriesFromArrow,
|
|
44
|
+
parseGeometryFromArrow: () => parseGeometryFromArrow,
|
|
45
|
+
serializeArrowField: () => serializeArrowField,
|
|
46
|
+
serializeArrowMetadata: () => serializeArrowMetadata,
|
|
47
|
+
serializeArrowSchema: () => serializeArrowSchema,
|
|
48
|
+
serializeArrowType: () => serializeArrowType,
|
|
49
|
+
updateBoundsFromGeoArrowSamples: () => updateBoundsFromGeoArrowSamples
|
|
28
50
|
});
|
|
29
51
|
module.exports = __toCommonJS(src_exports);
|
|
30
52
|
var import_schema3 = require("@loaders.gl/schema");
|
|
@@ -55,9 +77,9 @@ var ArrowLoader = {
|
|
|
55
77
|
|
|
56
78
|
// src/lib/parse-arrow-sync.ts
|
|
57
79
|
var import_schema = require("@loaders.gl/schema");
|
|
58
|
-
var
|
|
80
|
+
var arrow = __toESM(require("apache-arrow"), 1);
|
|
59
81
|
|
|
60
|
-
// src/
|
|
82
|
+
// src/tables/convert-arrow-to-table.ts
|
|
61
83
|
function convertApacheArrowToArrowTable(arrowTable) {
|
|
62
84
|
return {
|
|
63
85
|
shape: "arrow-table",
|
|
@@ -81,7 +103,7 @@ function convertArrowToColumnarTable(table) {
|
|
|
81
103
|
// src/lib/parse-arrow-sync.ts
|
|
82
104
|
function parseArrowSync(arrayBuffer, options) {
|
|
83
105
|
var _a;
|
|
84
|
-
const apacheArrowTable =
|
|
106
|
+
const apacheArrowTable = arrow.tableFromIPC([new Uint8Array(arrayBuffer)]);
|
|
85
107
|
const arrowTable = convertApacheArrowToArrowTable(apacheArrowTable);
|
|
86
108
|
const shape = ((_a = options == null ? void 0 : options.arrow) == null ? void 0 : _a.shape) || "arrow-table";
|
|
87
109
|
switch (shape) {
|
|
@@ -98,16 +120,16 @@ function parseArrowSync(arrayBuffer, options) {
|
|
|
98
120
|
}
|
|
99
121
|
|
|
100
122
|
// src/lib/parse-arrow-in-batches.ts
|
|
101
|
-
var
|
|
123
|
+
var arrow2 = __toESM(require("apache-arrow"), 1);
|
|
102
124
|
function parseArrowInBatches(asyncIterator) {
|
|
103
125
|
async function* makeArrowAsyncIterator() {
|
|
104
|
-
const readers =
|
|
126
|
+
const readers = arrow2.RecordBatchReader.readAll(asyncIterator);
|
|
105
127
|
for await (const reader of readers) {
|
|
106
128
|
for await (const recordBatch of reader) {
|
|
107
129
|
const arrowTabledBatch = {
|
|
108
130
|
shape: "arrow-table",
|
|
109
131
|
batchType: "data",
|
|
110
|
-
data: new
|
|
132
|
+
data: new arrow2.Table([recordBatch]),
|
|
111
133
|
length: recordBatch.data.length
|
|
112
134
|
};
|
|
113
135
|
yield arrowTabledBatch;
|
|
@@ -120,7 +142,7 @@ function parseArrowInBatches(asyncIterator) {
|
|
|
120
142
|
|
|
121
143
|
// src/lib/arrow-table-batch.ts
|
|
122
144
|
var import_schema2 = require("@loaders.gl/schema");
|
|
123
|
-
var
|
|
145
|
+
var arrow3 = __toESM(require("apache-arrow"), 1);
|
|
124
146
|
var ArrowTableBatchAggregator = class extends import_schema2.ColumnarTableBatchAggregator {
|
|
125
147
|
constructor(schema, options) {
|
|
126
148
|
super(schema, options);
|
|
@@ -131,17 +153,17 @@ var ArrowTableBatchAggregator = class extends import_schema2.ColumnarTableBatchA
|
|
|
131
153
|
if (batch) {
|
|
132
154
|
this.arrowSchema = this.arrowSchema || getArrowSchema(batch.schema);
|
|
133
155
|
const arrowVectors = getArrowVectors(this.arrowSchema, batch.data);
|
|
134
|
-
const recordBatch = new
|
|
156
|
+
const recordBatch = new arrow3.RecordBatch(
|
|
135
157
|
this.arrowSchema,
|
|
136
|
-
|
|
137
|
-
type: new
|
|
158
|
+
arrow3.makeData({
|
|
159
|
+
type: new arrow3.Struct(this.arrowSchema.fields),
|
|
138
160
|
children: arrowVectors.map(({ data }) => data[0])
|
|
139
161
|
})
|
|
140
162
|
);
|
|
141
163
|
return {
|
|
142
164
|
shape: "arrow-table",
|
|
143
165
|
batchType: "data",
|
|
144
|
-
data: new
|
|
166
|
+
data: new arrow3.Table([recordBatch]),
|
|
145
167
|
length: batch.length
|
|
146
168
|
};
|
|
147
169
|
}
|
|
@@ -154,21 +176,21 @@ function getArrowSchema(schema) {
|
|
|
154
176
|
const field = schema[key];
|
|
155
177
|
if (field.type === Float32Array) {
|
|
156
178
|
const metadata = /* @__PURE__ */ new Map();
|
|
157
|
-
const arrowField = new
|
|
179
|
+
const arrowField = new arrow3.Field(field.name, new arrow3.Float32(), field.nullable, metadata);
|
|
158
180
|
arrowFields.push(arrowField);
|
|
159
181
|
}
|
|
160
182
|
}
|
|
161
183
|
if (arrowFields.length === 0) {
|
|
162
184
|
throw new Error("No arrow convertible fields");
|
|
163
185
|
}
|
|
164
|
-
return new
|
|
186
|
+
return new arrow3.Schema(arrowFields);
|
|
165
187
|
}
|
|
166
188
|
function getArrowVectors(arrowSchema, data) {
|
|
167
189
|
const arrowVectors = [];
|
|
168
190
|
for (const field of arrowSchema.fields) {
|
|
169
191
|
const vector = data[field.name];
|
|
170
192
|
if (vector instanceof Float32Array) {
|
|
171
|
-
const arrowVector =
|
|
193
|
+
const arrowVector = arrow3.makeVector(vector);
|
|
172
194
|
arrowVectors.push(arrowVector);
|
|
173
195
|
}
|
|
174
196
|
}
|
|
@@ -179,25 +201,266 @@ function getArrowVectors(arrowSchema, data) {
|
|
|
179
201
|
}
|
|
180
202
|
|
|
181
203
|
// src/schema/arrow-type-utils.ts
|
|
182
|
-
var
|
|
204
|
+
var arrow4 = __toESM(require("apache-arrow"), 1);
|
|
183
205
|
function getArrowType(array) {
|
|
184
206
|
switch (array.constructor) {
|
|
185
207
|
case Int8Array:
|
|
186
|
-
return new
|
|
208
|
+
return new arrow4.Int8();
|
|
187
209
|
case Uint8Array:
|
|
188
|
-
return new
|
|
210
|
+
return new arrow4.Uint8();
|
|
189
211
|
case Int16Array:
|
|
190
|
-
return new
|
|
212
|
+
return new arrow4.Int16();
|
|
191
213
|
case Uint16Array:
|
|
192
|
-
return new
|
|
214
|
+
return new arrow4.Uint16();
|
|
193
215
|
case Int32Array:
|
|
194
|
-
return new
|
|
216
|
+
return new arrow4.Int32();
|
|
195
217
|
case Uint32Array:
|
|
196
|
-
return new
|
|
218
|
+
return new arrow4.Uint32();
|
|
197
219
|
case Float32Array:
|
|
198
|
-
return new
|
|
220
|
+
return new arrow4.Float32();
|
|
199
221
|
case Float64Array:
|
|
200
|
-
return new
|
|
222
|
+
return new arrow4.Float64();
|
|
223
|
+
default:
|
|
224
|
+
throw new Error("array type not supported");
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// src/schema/convert-arrow-schema.ts
|
|
229
|
+
var arrow5 = __toESM(require("apache-arrow"), 1);
|
|
230
|
+
function serializeArrowSchema(arrowSchema) {
|
|
231
|
+
return {
|
|
232
|
+
fields: arrowSchema.fields.map((arrowField) => serializeArrowField(arrowField)),
|
|
233
|
+
metadata: serializeArrowMetadata(arrowSchema.metadata)
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
function deserializeArrowSchema(schema) {
|
|
237
|
+
return new arrow5.Schema(
|
|
238
|
+
schema.fields.map((field) => deserializeArrowField(field)),
|
|
239
|
+
deserializeArrowMetadata(schema.metadata)
|
|
240
|
+
);
|
|
241
|
+
}
|
|
242
|
+
function serializeArrowMetadata(arrowMetadata) {
|
|
243
|
+
return Object.fromEntries(arrowMetadata);
|
|
244
|
+
}
|
|
245
|
+
function deserializeArrowMetadata(metadata) {
|
|
246
|
+
return metadata ? new Map(Object.entries(metadata)) : /* @__PURE__ */ new Map();
|
|
247
|
+
}
|
|
248
|
+
function serializeArrowField(field) {
|
|
249
|
+
return {
|
|
250
|
+
name: field.name,
|
|
251
|
+
type: serializeArrowType(field.type),
|
|
252
|
+
nullable: field.nullable,
|
|
253
|
+
metadata: serializeArrowMetadata(field.metadata)
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
function deserializeArrowField(field) {
|
|
257
|
+
return new arrow5.Field(
|
|
258
|
+
field.name,
|
|
259
|
+
deserializeArrowType(field.type),
|
|
260
|
+
field.nullable,
|
|
261
|
+
deserializeArrowMetadata(field.metadata)
|
|
262
|
+
);
|
|
263
|
+
}
|
|
264
|
+
function serializeArrowType(arrowType) {
|
|
265
|
+
switch (arrowType.constructor) {
|
|
266
|
+
case arrow5.Null:
|
|
267
|
+
return "null";
|
|
268
|
+
case arrow5.Binary:
|
|
269
|
+
return "binary";
|
|
270
|
+
case arrow5.Bool:
|
|
271
|
+
return "bool";
|
|
272
|
+
case arrow5.Int:
|
|
273
|
+
const intType = arrowType;
|
|
274
|
+
return `${intType.isSigned ? "u" : ""}int${intType.bitWidth}`;
|
|
275
|
+
case arrow5.Int8:
|
|
276
|
+
return "int8";
|
|
277
|
+
case arrow5.Int16:
|
|
278
|
+
return "int16";
|
|
279
|
+
case arrow5.Int32:
|
|
280
|
+
return "int32";
|
|
281
|
+
case arrow5.Int64:
|
|
282
|
+
return "int64";
|
|
283
|
+
case arrow5.Uint8:
|
|
284
|
+
return "uint8";
|
|
285
|
+
case arrow5.Uint16:
|
|
286
|
+
return "uint16";
|
|
287
|
+
case arrow5.Uint32:
|
|
288
|
+
return "uint32";
|
|
289
|
+
case arrow5.Uint64:
|
|
290
|
+
return "uint64";
|
|
291
|
+
case arrow5.Float:
|
|
292
|
+
const precision = arrowType.precision;
|
|
293
|
+
switch (precision) {
|
|
294
|
+
case arrow5.Precision.HALF:
|
|
295
|
+
return "float16";
|
|
296
|
+
case arrow5.Precision.SINGLE:
|
|
297
|
+
return "float32";
|
|
298
|
+
case arrow5.Precision.DOUBLE:
|
|
299
|
+
return "float64";
|
|
300
|
+
default:
|
|
301
|
+
return "float16";
|
|
302
|
+
}
|
|
303
|
+
case arrow5.Float16:
|
|
304
|
+
return "float16";
|
|
305
|
+
case arrow5.Float32:
|
|
306
|
+
return "float32";
|
|
307
|
+
case arrow5.Float64:
|
|
308
|
+
return "float64";
|
|
309
|
+
case arrow5.Utf8:
|
|
310
|
+
return "utf8";
|
|
311
|
+
case Date:
|
|
312
|
+
const dateUnit = arrowType.unit;
|
|
313
|
+
return dateUnit === arrow5.DateUnit.DAY ? "date-day" : "date-millisecond";
|
|
314
|
+
case arrow5.DateDay:
|
|
315
|
+
return "date-day";
|
|
316
|
+
case arrow5.DateMillisecond:
|
|
317
|
+
return "date-millisecond";
|
|
318
|
+
case arrow5.Time:
|
|
319
|
+
const timeUnit = arrowType.unit;
|
|
320
|
+
switch (timeUnit) {
|
|
321
|
+
case arrow5.TimeUnit.SECOND:
|
|
322
|
+
return "time-second";
|
|
323
|
+
case arrow5.TimeUnit.MILLISECOND:
|
|
324
|
+
return "time-millisecond";
|
|
325
|
+
case arrow5.TimeUnit.MICROSECOND:
|
|
326
|
+
return "time-microsecond";
|
|
327
|
+
case arrow5.TimeUnit.NANOSECOND:
|
|
328
|
+
return "time-nanosecond";
|
|
329
|
+
default:
|
|
330
|
+
return "time-second";
|
|
331
|
+
}
|
|
332
|
+
case arrow5.TimeMillisecond:
|
|
333
|
+
return "time-millisecond";
|
|
334
|
+
case arrow5.TimeSecond:
|
|
335
|
+
return "time-second";
|
|
336
|
+
case arrow5.TimeMicrosecond:
|
|
337
|
+
return "time-microsecond";
|
|
338
|
+
case arrow5.TimeNanosecond:
|
|
339
|
+
return "time-nanosecond";
|
|
340
|
+
case arrow5.Timestamp:
|
|
341
|
+
const timeStampUnit = arrowType.unit;
|
|
342
|
+
switch (timeStampUnit) {
|
|
343
|
+
case arrow5.TimeUnit.SECOND:
|
|
344
|
+
return "timestamp-second";
|
|
345
|
+
case arrow5.TimeUnit.MILLISECOND:
|
|
346
|
+
return "timestamp-millisecond";
|
|
347
|
+
case arrow5.TimeUnit.MICROSECOND:
|
|
348
|
+
return "timestamp-microsecond";
|
|
349
|
+
case arrow5.TimeUnit.NANOSECOND:
|
|
350
|
+
return "timestamp-nanosecond";
|
|
351
|
+
default:
|
|
352
|
+
return "timestamp-second";
|
|
353
|
+
}
|
|
354
|
+
case arrow5.TimestampSecond:
|
|
355
|
+
return "timestamp-second";
|
|
356
|
+
case arrow5.TimestampMillisecond:
|
|
357
|
+
return "timestamp-millisecond";
|
|
358
|
+
case arrow5.TimestampMicrosecond:
|
|
359
|
+
return "timestamp-microsecond";
|
|
360
|
+
case arrow5.TimestampNanosecond:
|
|
361
|
+
return "timestamp-nanosecond";
|
|
362
|
+
case arrow5.Interval:
|
|
363
|
+
const intervalUnit = arrowType.unit;
|
|
364
|
+
switch (intervalUnit) {
|
|
365
|
+
case arrow5.IntervalUnit.DAY_TIME:
|
|
366
|
+
return "interval-daytime";
|
|
367
|
+
case arrow5.IntervalUnit.YEAR_MONTH:
|
|
368
|
+
return "interval-yearmonth";
|
|
369
|
+
default:
|
|
370
|
+
return "interval-daytime";
|
|
371
|
+
}
|
|
372
|
+
case arrow5.IntervalDayTime:
|
|
373
|
+
return "interval-daytime";
|
|
374
|
+
case arrow5.IntervalYearMonth:
|
|
375
|
+
return "interval-yearmonth";
|
|
376
|
+
case arrow5.List:
|
|
377
|
+
const listType = arrowType;
|
|
378
|
+
const listField = listType.valueField;
|
|
379
|
+
return {
|
|
380
|
+
type: "list",
|
|
381
|
+
children: [serializeArrowField(listField)]
|
|
382
|
+
};
|
|
383
|
+
case arrow5.FixedSizeList:
|
|
384
|
+
return {
|
|
385
|
+
type: "fixed-size-list",
|
|
386
|
+
listSize: arrowType.listSize,
|
|
387
|
+
children: [serializeArrowField(arrowType.children[0])]
|
|
388
|
+
};
|
|
389
|
+
default:
|
|
390
|
+
throw new Error("array type not supported");
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
function deserializeArrowType(dataType) {
|
|
394
|
+
if (typeof dataType === "object") {
|
|
395
|
+
switch (dataType.type) {
|
|
396
|
+
case "list":
|
|
397
|
+
const field = deserializeArrowField(dataType.children[0]);
|
|
398
|
+
return new arrow5.List(field);
|
|
399
|
+
case "fixed-size-list":
|
|
400
|
+
const child = deserializeArrowField(dataType.children[0]);
|
|
401
|
+
return new arrow5.FixedSizeList(dataType.listSize, child);
|
|
402
|
+
case "struct":
|
|
403
|
+
const children = dataType.children.map((arrowField) => deserializeArrowField(arrowField));
|
|
404
|
+
return new arrow5.Struct(children);
|
|
405
|
+
default:
|
|
406
|
+
throw new Error("array type not supported");
|
|
407
|
+
}
|
|
408
|
+
}
|
|
409
|
+
switch (dataType) {
|
|
410
|
+
case "null":
|
|
411
|
+
return new arrow5.Null();
|
|
412
|
+
case "binary":
|
|
413
|
+
return new arrow5.Binary();
|
|
414
|
+
case "bool":
|
|
415
|
+
return new arrow5.Bool();
|
|
416
|
+
case "int8":
|
|
417
|
+
return new arrow5.Int8();
|
|
418
|
+
case "int16":
|
|
419
|
+
return new arrow5.Int16();
|
|
420
|
+
case "int32":
|
|
421
|
+
return new arrow5.Int32();
|
|
422
|
+
case "int64":
|
|
423
|
+
return new arrow5.Int64();
|
|
424
|
+
case "uint8":
|
|
425
|
+
return new arrow5.Uint8();
|
|
426
|
+
case "uint16":
|
|
427
|
+
return new arrow5.Uint16();
|
|
428
|
+
case "uint32":
|
|
429
|
+
return new arrow5.Uint32();
|
|
430
|
+
case "uint64":
|
|
431
|
+
return new arrow5.Uint64();
|
|
432
|
+
case "float16":
|
|
433
|
+
return new arrow5.Float16();
|
|
434
|
+
case "float32":
|
|
435
|
+
return new arrow5.Float32();
|
|
436
|
+
case "float64":
|
|
437
|
+
return new arrow5.Float64();
|
|
438
|
+
case "utf8":
|
|
439
|
+
return new arrow5.Utf8();
|
|
440
|
+
case "date-day":
|
|
441
|
+
return new arrow5.DateDay();
|
|
442
|
+
case "date-millisecond":
|
|
443
|
+
return new arrow5.DateMillisecond();
|
|
444
|
+
case "time-second":
|
|
445
|
+
return new arrow5.TimeSecond();
|
|
446
|
+
case "time-millisecond":
|
|
447
|
+
return new arrow5.TimeMillisecond();
|
|
448
|
+
case "time-microsecond":
|
|
449
|
+
return new arrow5.TimeMicrosecond();
|
|
450
|
+
case "time-nanosecond":
|
|
451
|
+
return new arrow5.TimeNanosecond();
|
|
452
|
+
case "timestamp-second":
|
|
453
|
+
return new arrow5.TimestampSecond();
|
|
454
|
+
case "timestamp-millisecond":
|
|
455
|
+
return new arrow5.TimestampMillisecond();
|
|
456
|
+
case "timestamp-microsecond":
|
|
457
|
+
return new arrow5.TimestampMicrosecond();
|
|
458
|
+
case "timestamp-nanosecond":
|
|
459
|
+
return new arrow5.TimestampNanosecond();
|
|
460
|
+
case "interval-daytime":
|
|
461
|
+
return new arrow5.IntervalDayTime();
|
|
462
|
+
case "interval-yearmonth":
|
|
463
|
+
return new arrow5.IntervalYearMonth();
|
|
201
464
|
default:
|
|
202
465
|
throw new Error("array type not supported");
|
|
203
466
|
}
|
|
@@ -211,24 +474,24 @@ var VECTOR_TYPES = /* @__PURE__ */ ((VECTOR_TYPES2) => {
|
|
|
211
474
|
})(VECTOR_TYPES || {});
|
|
212
475
|
|
|
213
476
|
// src/lib/encode-arrow.ts
|
|
214
|
-
var
|
|
477
|
+
var arrow6 = __toESM(require("apache-arrow"), 1);
|
|
215
478
|
function encodeArrowSync(data) {
|
|
216
479
|
const vectors = {};
|
|
217
480
|
for (const arrayData of data) {
|
|
218
481
|
const arrayVector = createVector(arrayData.array, arrayData.type);
|
|
219
482
|
vectors[arrayData.name] = arrayVector;
|
|
220
483
|
}
|
|
221
|
-
const table = new
|
|
222
|
-
const arrowBuffer =
|
|
484
|
+
const table = new arrow6.Table(vectors);
|
|
485
|
+
const arrowBuffer = arrow6.tableToIPC(table);
|
|
223
486
|
return arrowBuffer;
|
|
224
487
|
}
|
|
225
488
|
function createVector(array, type) {
|
|
226
489
|
switch (type) {
|
|
227
490
|
case 1 /* DATE */:
|
|
228
|
-
return
|
|
491
|
+
return arrow6.vectorFromArray(array);
|
|
229
492
|
case 0 /* FLOAT */:
|
|
230
493
|
default:
|
|
231
|
-
return
|
|
494
|
+
return arrow6.vectorFromArray(array);
|
|
232
495
|
}
|
|
233
496
|
}
|
|
234
497
|
|
|
@@ -245,12 +508,344 @@ var ArrowWriter = {
|
|
|
245
508
|
"application/vnd.apache.arrow.stream",
|
|
246
509
|
"application/octet-stream"
|
|
247
510
|
],
|
|
248
|
-
|
|
511
|
+
binary: true,
|
|
512
|
+
options: {},
|
|
513
|
+
encode: async function encodeArrow(data, options) {
|
|
249
514
|
return encodeArrowSync(data);
|
|
250
515
|
},
|
|
251
|
-
|
|
252
|
-
|
|
516
|
+
encodeSync(data, options) {
|
|
517
|
+
return encodeArrowSync(data);
|
|
518
|
+
}
|
|
519
|
+
};
|
|
520
|
+
|
|
521
|
+
// src/geoarrow/get-arrow-bounds.ts
|
|
522
|
+
function updateBoundsFromGeoArrowSamples(flatCoords, nDim, bounds, sampleSize = 100) {
|
|
523
|
+
const numberOfFeatures = flatCoords.length / nDim;
|
|
524
|
+
const sampleStep = Math.max(Math.floor(numberOfFeatures / sampleSize), 1);
|
|
525
|
+
const newBounds = [...bounds];
|
|
526
|
+
for (let i = 0; i < numberOfFeatures; i += sampleStep) {
|
|
527
|
+
const lng = flatCoords[i * nDim];
|
|
528
|
+
const lat = flatCoords[i * nDim + 1];
|
|
529
|
+
if (lng < newBounds[0]) {
|
|
530
|
+
newBounds[0] = lng;
|
|
531
|
+
}
|
|
532
|
+
if (lat < newBounds[1]) {
|
|
533
|
+
newBounds[1] = lat;
|
|
534
|
+
}
|
|
535
|
+
if (lng > newBounds[2]) {
|
|
536
|
+
newBounds[2] = lng;
|
|
537
|
+
}
|
|
538
|
+
if (lat > newBounds[3]) {
|
|
539
|
+
newBounds[3] = lat;
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
return newBounds;
|
|
543
|
+
}
|
|
544
|
+
|
|
545
|
+
// src/geoarrow/convert-geoarrow-to-binary-geometry.ts
|
|
546
|
+
var BINARY_GEOMETRY_TEMPLATE = {
|
|
547
|
+
globalFeatureIds: { value: new Uint32Array(0), size: 1 },
|
|
548
|
+
positions: { value: new Float32Array(0), size: 2 },
|
|
549
|
+
properties: [],
|
|
550
|
+
numericProps: {},
|
|
551
|
+
featureIds: { value: new Uint32Array(0), size: 1 }
|
|
253
552
|
};
|
|
553
|
+
function getBinaryGeometriesFromArrow(geoColumn, geoEncoding) {
|
|
554
|
+
const featureTypes = {
|
|
555
|
+
polygon: geoEncoding === "geoarrow.multipolygon" || geoEncoding === "geoarrow.polygon",
|
|
556
|
+
point: geoEncoding === "geoarrow.multipoint" || geoEncoding === "geoarrow.point",
|
|
557
|
+
line: geoEncoding === "geoarrow.multilinestring" || geoEncoding === "geoarrow.linestring"
|
|
558
|
+
};
|
|
559
|
+
const chunks = geoColumn.data;
|
|
560
|
+
let bounds = [Infinity, Infinity, -Infinity, -Infinity];
|
|
561
|
+
let globalFeatureIdOffset = 0;
|
|
562
|
+
const binaryGeometries = [];
|
|
563
|
+
chunks.forEach((chunk) => {
|
|
564
|
+
const { featureIds, flatCoordinateArray, nDim, geomOffset } = getBinaryGeometriesFromChunk(
|
|
565
|
+
chunk,
|
|
566
|
+
geoEncoding
|
|
567
|
+
);
|
|
568
|
+
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
569
|
+
const globalFeatureIds = new Uint32Array(numOfVertices);
|
|
570
|
+
for (let i = 0; i < numOfVertices; i++) {
|
|
571
|
+
globalFeatureIds[i] = featureIds[i] + globalFeatureIdOffset;
|
|
572
|
+
}
|
|
573
|
+
const binaryContent = {
|
|
574
|
+
globalFeatureIds: { value: globalFeatureIds, size: 1 },
|
|
575
|
+
positions: {
|
|
576
|
+
value: flatCoordinateArray,
|
|
577
|
+
size: nDim
|
|
578
|
+
},
|
|
579
|
+
featureIds: { value: featureIds, size: 1 },
|
|
580
|
+
properties: [...Array(chunk.length).keys()].map((i) => ({
|
|
581
|
+
index: i + globalFeatureIdOffset
|
|
582
|
+
}))
|
|
583
|
+
};
|
|
584
|
+
globalFeatureIdOffset += chunk.length;
|
|
585
|
+
binaryGeometries.push({
|
|
586
|
+
shape: "binary-feature-collection",
|
|
587
|
+
points: {
|
|
588
|
+
type: "Point",
|
|
589
|
+
...BINARY_GEOMETRY_TEMPLATE,
|
|
590
|
+
...featureTypes.point ? binaryContent : {}
|
|
591
|
+
},
|
|
592
|
+
lines: {
|
|
593
|
+
type: "LineString",
|
|
594
|
+
...BINARY_GEOMETRY_TEMPLATE,
|
|
595
|
+
...featureTypes.line ? binaryContent : {},
|
|
596
|
+
pathIndices: { value: featureTypes.line ? geomOffset : new Uint16Array(0), size: 1 }
|
|
597
|
+
},
|
|
598
|
+
polygons: {
|
|
599
|
+
type: "Polygon",
|
|
600
|
+
...BINARY_GEOMETRY_TEMPLATE,
|
|
601
|
+
...featureTypes.polygon ? binaryContent : {},
|
|
602
|
+
polygonIndices: {
|
|
603
|
+
// TODO why deck.gl's tessellatePolygon performance is not good when using geometryIndicies
|
|
604
|
+
// even when there is no hole in any polygon
|
|
605
|
+
value: featureTypes.polygon ? geomOffset : new Uint16Array(0),
|
|
606
|
+
size: 1
|
|
607
|
+
},
|
|
608
|
+
primitivePolygonIndices: {
|
|
609
|
+
value: featureTypes.polygon ? geomOffset : new Uint16Array(0),
|
|
610
|
+
size: 1
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
});
|
|
614
|
+
bounds = updateBoundsFromGeoArrowSamples(flatCoordinateArray, nDim, bounds);
|
|
615
|
+
});
|
|
616
|
+
return { binaryGeometries, bounds, featureTypes };
|
|
617
|
+
}
|
|
618
|
+
function getBinaryGeometriesFromChunk(chunk, geoEncoding) {
|
|
619
|
+
switch (geoEncoding) {
|
|
620
|
+
case "geoarrow.point":
|
|
621
|
+
case "geoarrow.multipoint":
|
|
622
|
+
return getBinaryPointsFromChunk(chunk, geoEncoding);
|
|
623
|
+
case "geoarrow.linestring":
|
|
624
|
+
case "geoarrow.multilinestring":
|
|
625
|
+
return getBinaryLinesFromChunk(chunk, geoEncoding);
|
|
626
|
+
case "geoarrow.polygon":
|
|
627
|
+
case "geoarrow.multipolygon":
|
|
628
|
+
return getBinaryPolygonsFromChunk(chunk, geoEncoding);
|
|
629
|
+
default:
|
|
630
|
+
throw Error("invalid geoarrow encoding");
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
function getBinaryPolygonsFromChunk(chunk, geoEncoding) {
|
|
634
|
+
const isMultiPolygon = geoEncoding === "geoarrow.multipolygon";
|
|
635
|
+
const polygonData = isMultiPolygon ? chunk.children[0] : chunk;
|
|
636
|
+
const ringData = polygonData.children[0];
|
|
637
|
+
const pointData = ringData.children[0];
|
|
638
|
+
const coordData = pointData.children[0];
|
|
639
|
+
const nDim = pointData.stride;
|
|
640
|
+
const geomOffset = ringData.valueOffsets;
|
|
641
|
+
const flatCoordinateArray = coordData.values;
|
|
642
|
+
const geometryIndicies = new Uint16Array(chunk.length + 1);
|
|
643
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
644
|
+
geometryIndicies[i] = geomOffset[chunk.valueOffsets[i]];
|
|
645
|
+
}
|
|
646
|
+
geometryIndicies[chunk.length] = flatCoordinateArray.length / nDim;
|
|
647
|
+
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
648
|
+
const featureIds = new Uint32Array(numOfVertices);
|
|
649
|
+
for (let i = 0; i < chunk.length - 1; i++) {
|
|
650
|
+
const startIdx = geomOffset[chunk.valueOffsets[i]];
|
|
651
|
+
const endIdx = geomOffset[chunk.valueOffsets[i + 1]];
|
|
652
|
+
for (let j = startIdx; j < endIdx; j++) {
|
|
653
|
+
featureIds[j] = i;
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
return {
|
|
657
|
+
featureIds,
|
|
658
|
+
flatCoordinateArray,
|
|
659
|
+
nDim,
|
|
660
|
+
geomOffset,
|
|
661
|
+
geometryIndicies
|
|
662
|
+
};
|
|
663
|
+
}
|
|
664
|
+
function getBinaryLinesFromChunk(chunk, geoEncoding) {
|
|
665
|
+
const isMultiLineString = geoEncoding === "geoarrow.multilinestring";
|
|
666
|
+
const lineData = isMultiLineString ? chunk.children[0] : chunk;
|
|
667
|
+
const pointData = lineData.children[0];
|
|
668
|
+
const coordData = pointData.children[0];
|
|
669
|
+
const nDim = pointData.stride;
|
|
670
|
+
const geomOffset = lineData.valueOffsets;
|
|
671
|
+
const flatCoordinateArray = coordData.values;
|
|
672
|
+
const geometryIndicies = new Uint16Array(0);
|
|
673
|
+
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
674
|
+
const featureIds = new Uint32Array(numOfVertices);
|
|
675
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
676
|
+
const startIdx = geomOffset[i];
|
|
677
|
+
const endIdx = geomOffset[i + 1];
|
|
678
|
+
for (let j = startIdx; j < endIdx; j++) {
|
|
679
|
+
featureIds[j] = i;
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
return {
|
|
683
|
+
featureIds,
|
|
684
|
+
flatCoordinateArray,
|
|
685
|
+
nDim,
|
|
686
|
+
geomOffset,
|
|
687
|
+
geometryIndicies
|
|
688
|
+
};
|
|
689
|
+
}
|
|
690
|
+
function getBinaryPointsFromChunk(chunk, geoEncoding) {
|
|
691
|
+
const isMultiPoint = geoEncoding === "geoarrow.multipoint";
|
|
692
|
+
const pointData = isMultiPoint ? chunk.children[0] : chunk;
|
|
693
|
+
const coordData = pointData.children[0];
|
|
694
|
+
const nDim = pointData.stride;
|
|
695
|
+
const flatCoordinateArray = coordData.values;
|
|
696
|
+
const geometryIndicies = new Uint16Array(0);
|
|
697
|
+
const geomOffset = new Int32Array(0);
|
|
698
|
+
const numOfVertices = flatCoordinateArray.length / nDim;
|
|
699
|
+
const featureIds = new Uint32Array(numOfVertices);
|
|
700
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
701
|
+
featureIds[i] = i;
|
|
702
|
+
}
|
|
703
|
+
return {
|
|
704
|
+
featureIds,
|
|
705
|
+
flatCoordinateArray,
|
|
706
|
+
nDim,
|
|
707
|
+
geomOffset,
|
|
708
|
+
geometryIndicies
|
|
709
|
+
};
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
// src/geoarrow/convert-geoarrow-to-geojson.ts
|
|
713
|
+
function parseGeometryFromArrow(rawData) {
|
|
714
|
+
var _a;
|
|
715
|
+
const encoding = (_a = rawData.encoding) == null ? void 0 : _a.toLowerCase();
|
|
716
|
+
const data = rawData.data;
|
|
717
|
+
if (!encoding || !data) {
|
|
718
|
+
return null;
|
|
719
|
+
}
|
|
720
|
+
let geometry;
|
|
721
|
+
switch (encoding) {
|
|
722
|
+
case "geoarrow.multipolygon":
|
|
723
|
+
geometry = arrowMultiPolygonToFeature(data);
|
|
724
|
+
break;
|
|
725
|
+
case "geoarrow.polygon":
|
|
726
|
+
geometry = arrowPolygonToFeature(data);
|
|
727
|
+
break;
|
|
728
|
+
case "geoarrow.multipoint":
|
|
729
|
+
geometry = arrowMultiPointToFeature(data);
|
|
730
|
+
break;
|
|
731
|
+
case "geoarrow.point":
|
|
732
|
+
geometry = arrowPointToFeature(data);
|
|
733
|
+
break;
|
|
734
|
+
case "geoarrow.multilinestring":
|
|
735
|
+
geometry = arrowMultiLineStringToFeature(data);
|
|
736
|
+
break;
|
|
737
|
+
case "geoarrow.linestring":
|
|
738
|
+
geometry = arrowLineStringToFeature(data);
|
|
739
|
+
break;
|
|
740
|
+
default: {
|
|
741
|
+
throw Error(`GeoArrow encoding not supported ${encoding}`);
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
return {
|
|
745
|
+
type: "Feature",
|
|
746
|
+
geometry,
|
|
747
|
+
properties: {}
|
|
748
|
+
};
|
|
749
|
+
}
|
|
750
|
+
function arrowMultiPolygonToFeature(arrowMultiPolygon) {
|
|
751
|
+
const multiPolygon = [];
|
|
752
|
+
for (let m = 0; m < arrowMultiPolygon.length; m++) {
|
|
753
|
+
const arrowPolygon = arrowMultiPolygon.get(m);
|
|
754
|
+
const polygon = [];
|
|
755
|
+
for (let i = 0; arrowPolygon && i < (arrowPolygon == null ? void 0 : arrowPolygon.length); i++) {
|
|
756
|
+
const arrowRing = arrowPolygon == null ? void 0 : arrowPolygon.get(i);
|
|
757
|
+
const ring = [];
|
|
758
|
+
for (let j = 0; arrowRing && j < arrowRing.length; j++) {
|
|
759
|
+
const arrowCoord = arrowRing.get(j);
|
|
760
|
+
const coord = Array.from(arrowCoord);
|
|
761
|
+
ring.push(coord);
|
|
762
|
+
}
|
|
763
|
+
polygon.push(ring);
|
|
764
|
+
}
|
|
765
|
+
multiPolygon.push(polygon);
|
|
766
|
+
}
|
|
767
|
+
const geometry = {
|
|
768
|
+
type: "MultiPolygon",
|
|
769
|
+
coordinates: multiPolygon
|
|
770
|
+
};
|
|
771
|
+
return geometry;
|
|
772
|
+
}
|
|
773
|
+
function arrowPolygonToFeature(arrowPolygon) {
|
|
774
|
+
const polygon = [];
|
|
775
|
+
for (let i = 0; arrowPolygon && i < arrowPolygon.length; i++) {
|
|
776
|
+
const arrowRing = arrowPolygon.get(i);
|
|
777
|
+
const ring = [];
|
|
778
|
+
for (let j = 0; arrowRing && j < arrowRing.length; j++) {
|
|
779
|
+
const arrowCoord = arrowRing.get(j);
|
|
780
|
+
const coords = Array.from(arrowCoord);
|
|
781
|
+
ring.push(coords);
|
|
782
|
+
}
|
|
783
|
+
polygon.push(ring);
|
|
784
|
+
}
|
|
785
|
+
const geometry = {
|
|
786
|
+
type: "Polygon",
|
|
787
|
+
coordinates: polygon
|
|
788
|
+
};
|
|
789
|
+
return geometry;
|
|
790
|
+
}
|
|
791
|
+
function arrowMultiPointToFeature(arrowMultiPoint) {
|
|
792
|
+
const multiPoint = [];
|
|
793
|
+
for (let i = 0; arrowMultiPoint && i < arrowMultiPoint.length; i++) {
|
|
794
|
+
const arrowPoint = arrowMultiPoint.get(i);
|
|
795
|
+
if (arrowPoint) {
|
|
796
|
+
const coord = Array.from(arrowPoint);
|
|
797
|
+
multiPoint.push(coord);
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
const geometry = {
|
|
801
|
+
type: "MultiPoint",
|
|
802
|
+
coordinates: multiPoint
|
|
803
|
+
};
|
|
804
|
+
return geometry;
|
|
805
|
+
}
|
|
806
|
+
function arrowPointToFeature(arrowPoint) {
|
|
807
|
+
const point = Array.from(arrowPoint);
|
|
808
|
+
const geometry = {
|
|
809
|
+
type: "Point",
|
|
810
|
+
coordinates: point
|
|
811
|
+
};
|
|
812
|
+
return geometry;
|
|
813
|
+
}
|
|
814
|
+
function arrowMultiLineStringToFeature(arrowMultiLineString) {
|
|
815
|
+
const multiLineString = [];
|
|
816
|
+
for (let i = 0; arrowMultiLineString && i < arrowMultiLineString.length; i++) {
|
|
817
|
+
const arrowLineString = arrowMultiLineString.get(i);
|
|
818
|
+
const lineString = [];
|
|
819
|
+
for (let j = 0; arrowLineString && j < arrowLineString.length; j++) {
|
|
820
|
+
const arrowCoord = arrowLineString.get(j);
|
|
821
|
+
if (arrowCoord) {
|
|
822
|
+
const coords = Array.from(arrowCoord);
|
|
823
|
+
lineString.push(coords);
|
|
824
|
+
}
|
|
825
|
+
}
|
|
826
|
+
multiLineString.push(lineString);
|
|
827
|
+
}
|
|
828
|
+
const geometry = {
|
|
829
|
+
type: "MultiLineString",
|
|
830
|
+
coordinates: multiLineString
|
|
831
|
+
};
|
|
832
|
+
return geometry;
|
|
833
|
+
}
|
|
834
|
+
function arrowLineStringToFeature(arrowLineString) {
|
|
835
|
+
const lineString = [];
|
|
836
|
+
for (let i = 0; arrowLineString && i < arrowLineString.length; i++) {
|
|
837
|
+
const arrowCoord = arrowLineString.get(i);
|
|
838
|
+
if (arrowCoord) {
|
|
839
|
+
const coords = Array.from(arrowCoord);
|
|
840
|
+
lineString.push(coords);
|
|
841
|
+
}
|
|
842
|
+
}
|
|
843
|
+
const geometry = {
|
|
844
|
+
type: "LineString",
|
|
845
|
+
coordinates: lineString
|
|
846
|
+
};
|
|
847
|
+
return geometry;
|
|
848
|
+
}
|
|
254
849
|
|
|
255
850
|
// src/index.ts
|
|
256
851
|
import_schema3.TableBatchBuilder.ArrowBatch = ArrowTableBatchAggregator;
|