@loaders.gl/parquet 3.4.11 → 3.4.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +16 -16
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/parquet-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/esm/parquet-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/parquet-worker.js +2 -2
- package/package.json +9 -7
- package/dist/bundle.js +0 -5
- package/dist/constants.js +0 -18
- package/dist/index.js +0 -56
- package/dist/lib/arrow/convert-columns-to-row-group.js +0 -1
- package/dist/lib/arrow/convert-row-group-to-columns.js +0 -12
- package/dist/lib/arrow/convert-schema-from-parquet.js +0 -82
- package/dist/lib/arrow/convert-schema-to-parquet.js +0 -72
- package/dist/lib/geo/decode-geo-metadata.js +0 -73
- package/dist/lib/geo/geoparquet-schema.js +0 -69
- package/dist/lib/parsers/parse-parquet-to-columns.js +0 -40
- package/dist/lib/parsers/parse-parquet-to-rows.js +0 -40
- package/dist/lib/wasm/encode-parquet-wasm.js +0 -30
- package/dist/lib/wasm/load-wasm/index.js +0 -5
- package/dist/lib/wasm/load-wasm/load-wasm-browser.js +0 -38
- package/dist/lib/wasm/load-wasm/load-wasm-node.js +0 -31
- package/dist/lib/wasm/parse-parquet-wasm.js +0 -27
- package/dist/parquet-loader.js +0 -29
- package/dist/parquet-wasm-loader.js +0 -27
- package/dist/parquet-wasm-writer.js +0 -23
- package/dist/parquet-writer.js +0 -21
- package/dist/parquetjs/codecs/declare.js +0 -2
- package/dist/parquetjs/codecs/dictionary.js +0 -14
- package/dist/parquetjs/codecs/index.js +0 -55
- package/dist/parquetjs/codecs/plain.js +0 -211
- package/dist/parquetjs/codecs/rle.js +0 -145
- package/dist/parquetjs/compression.js +0 -183
- package/dist/parquetjs/encoder/parquet-encoder.js +0 -484
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +0 -15
- package/dist/parquetjs/parquet-thrift/BsonType.js +0 -62
- package/dist/parquetjs/parquet-thrift/ColumnChunk.js +0 -211
- package/dist/parquetjs/parquet-thrift/ColumnIndex.js +0 -217
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +0 -402
- package/dist/parquetjs/parquet-thrift/ColumnOrder.js +0 -108
- package/dist/parquetjs/parquet-thrift/CompressionCodec.js +0 -20
- package/dist/parquetjs/parquet-thrift/ConvertedType.js +0 -34
- package/dist/parquetjs/parquet-thrift/DataPageHeader.js +0 -170
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -230
- package/dist/parquetjs/parquet-thrift/DateType.js +0 -62
- package/dist/parquetjs/parquet-thrift/DecimalType.js +0 -109
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -126
- package/dist/parquetjs/parquet-thrift/Encoding.js +0 -20
- package/dist/parquetjs/parquet-thrift/EnumType.js +0 -62
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -15
- package/dist/parquetjs/parquet-thrift/FileMetaData.js +0 -260
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +0 -62
- package/dist/parquetjs/parquet-thrift/IntType.js +0 -109
- package/dist/parquetjs/parquet-thrift/JsonType.js +0 -62
- package/dist/parquetjs/parquet-thrift/KeyValue.js +0 -106
- package/dist/parquetjs/parquet-thrift/ListType.js +0 -62
- package/dist/parquetjs/parquet-thrift/LogicalType.js +0 -384
- package/dist/parquetjs/parquet-thrift/MapType.js +0 -62
- package/dist/parquetjs/parquet-thrift/MicroSeconds.js +0 -62
- package/dist/parquetjs/parquet-thrift/MilliSeconds.js +0 -62
- package/dist/parquetjs/parquet-thrift/NullType.js +0 -62
- package/dist/parquetjs/parquet-thrift/OffsetIndex.js +0 -101
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +0 -131
- package/dist/parquetjs/parquet-thrift/PageHeader.js +0 -220
- package/dist/parquetjs/parquet-thrift/PageLocation.js +0 -145
- package/dist/parquetjs/parquet-thrift/PageType.js +0 -16
- package/dist/parquetjs/parquet-thrift/RowGroup.js +0 -186
- package/dist/parquetjs/parquet-thrift/SchemaElement.js +0 -243
- package/dist/parquetjs/parquet-thrift/SortingColumn.js +0 -131
- package/dist/parquetjs/parquet-thrift/Statistics.js +0 -180
- package/dist/parquetjs/parquet-thrift/StringType.js +0 -62
- package/dist/parquetjs/parquet-thrift/TimeType.js +0 -110
- package/dist/parquetjs/parquet-thrift/TimeUnit.js +0 -131
- package/dist/parquetjs/parquet-thrift/TimestampType.js +0 -110
- package/dist/parquetjs/parquet-thrift/Type.js +0 -20
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -62
- package/dist/parquetjs/parquet-thrift/UUIDType.js +0 -62
- package/dist/parquetjs/parquet-thrift/index.js +0 -65
- package/dist/parquetjs/parser/decoders.js +0 -318
- package/dist/parquetjs/parser/parquet-reader.js +0 -200
- package/dist/parquetjs/schema/declare.js +0 -12
- package/dist/parquetjs/schema/schema.js +0 -162
- package/dist/parquetjs/schema/shred.js +0 -347
- package/dist/parquetjs/schema/types.js +0 -416
- package/dist/parquetjs/utils/file-utils.js +0 -43
- package/dist/parquetjs/utils/read-utils.js +0 -109
- package/dist/workers/parquet-worker.js +0 -5
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/parquet",
|
|
3
|
-
"version": "3.4.
|
|
3
|
+
"version": "3.4.12",
|
|
4
4
|
"description": "Framework-independent loader for Apache Parquet files",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"publishConfig": {
|
|
@@ -38,13 +38,15 @@
|
|
|
38
38
|
"tls": false,
|
|
39
39
|
"stream": false,
|
|
40
40
|
"fs": false,
|
|
41
|
-
"./src/lib/wasm/load-wasm/load-wasm-node.ts": "./src/lib/wasm/load-wasm/load-wasm-browser.ts"
|
|
41
|
+
"./src/lib/wasm/load-wasm/load-wasm-node.ts": "./src/lib/wasm/load-wasm/load-wasm-browser.ts",
|
|
42
|
+
"./dist/es5/lib/wasm/load-wasm/load-wasm-node.js": "./dist/es5/lib/wasm/load-wasm/load-wasm-browser.js",
|
|
43
|
+
"./dist/esm/lib/wasm/load-wasm/load-wasm-node.js": "./dist/esm/lib/wasm/load-wasm/load-wasm-browser.js"
|
|
42
44
|
},
|
|
43
45
|
"dependencies": {
|
|
44
|
-
"@loaders.gl/bson": "3.4.
|
|
45
|
-
"@loaders.gl/compression": "3.4.
|
|
46
|
-
"@loaders.gl/loader-utils": "3.4.
|
|
47
|
-
"@loaders.gl/schema": "3.4.
|
|
46
|
+
"@loaders.gl/bson": "3.4.12",
|
|
47
|
+
"@loaders.gl/compression": "3.4.12",
|
|
48
|
+
"@loaders.gl/loader-utils": "3.4.12",
|
|
49
|
+
"@loaders.gl/schema": "3.4.12",
|
|
48
50
|
"async-mutex": "^0.2.2",
|
|
49
51
|
"brotli": "^1.3.2",
|
|
50
52
|
"int53": "^0.2.4",
|
|
@@ -67,5 +69,5 @@
|
|
|
67
69
|
"@types/varint": "^5.0.0",
|
|
68
70
|
"apache-arrow": "^4.0.0"
|
|
69
71
|
},
|
|
70
|
-
"gitHead": "
|
|
72
|
+
"gitHead": "85254762ae3082f608a863291deef09f2b40c6cf"
|
|
71
73
|
}
|
package/dist/bundle.js
DELETED
package/dist/constants.js
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.PARQUET_RDLVL_ENCODING = exports.PARQUET_RDLVL_TYPE = exports.PARQUET_VERSION = exports.PARQUET_MAGIC_ENCRYPTED = exports.PARQUET_MAGIC = void 0;
|
|
4
|
-
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
5
|
-
/**
|
|
6
|
-
* Parquet File Magic String
|
|
7
|
-
*/
|
|
8
|
-
exports.PARQUET_MAGIC = 'PAR1';
|
|
9
|
-
exports.PARQUET_MAGIC_ENCRYPTED = 'PARE';
|
|
10
|
-
/**
|
|
11
|
-
* Parquet File Format Version
|
|
12
|
-
*/
|
|
13
|
-
exports.PARQUET_VERSION = 1;
|
|
14
|
-
/**
|
|
15
|
-
* Internal type used for repetition/definition levels
|
|
16
|
-
*/
|
|
17
|
-
exports.PARQUET_RDLVL_TYPE = 'INT32';
|
|
18
|
-
exports.PARQUET_RDLVL_ENCODING = 'RLE';
|
package/dist/index.js
DELETED
|
@@ -1,56 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.unpackGeoMetadata = exports.setGeoMetadata = exports.getGeoMetadata = exports.geoJSONSchema = exports._typecheckParquetLoader = exports.convertParquetToArrowSchema = exports.convertSchemaFromParquet = exports.ParquetEncoder = exports.ParquetReader = exports.ParquetSchema = exports.preloadCompressions = exports.ParquetWasmWriter = exports._ParquetWriter = exports.ParquetWasmLoader = exports.ParquetColumnarLoader = exports.ParquetLoader = exports.ParquetWasmWorkerLoader = exports.ParquetWorkerLoader = void 0;
|
|
7
|
-
// ParquetLoader
|
|
8
|
-
const parquet_wasm_loader_1 = require("./parquet-wasm-loader");
|
|
9
|
-
Object.defineProperty(exports, "ParquetWasmWorkerLoader", { enumerable: true, get: function () { return parquet_wasm_loader_1.ParquetWasmLoader; } });
|
|
10
|
-
const parquet_loader_1 = require("./parquet-loader");
|
|
11
|
-
Object.defineProperty(exports, "ParquetWorkerLoader", { enumerable: true, get: function () { return parquet_loader_1.ParquetLoader; } });
|
|
12
|
-
const parse_parquet_to_rows_1 = require("./lib/parsers/parse-parquet-to-rows");
|
|
13
|
-
const parse_parquet_to_columns_1 = require("./lib/parsers/parse-parquet-to-columns");
|
|
14
|
-
const parse_parquet_wasm_1 = require("./lib/wasm/parse-parquet-wasm");
|
|
15
|
-
/** ParquetJS table loader */
|
|
16
|
-
exports.ParquetLoader = {
|
|
17
|
-
...parquet_loader_1.ParquetLoader,
|
|
18
|
-
parse: parse_parquet_to_rows_1.parseParquet,
|
|
19
|
-
parseFileInBatches: parse_parquet_to_rows_1.parseParquetFileInBatches
|
|
20
|
-
};
|
|
21
|
-
/** ParquetJS table loader */
|
|
22
|
-
exports.ParquetColumnarLoader = {
|
|
23
|
-
...parquet_loader_1.ParquetLoader,
|
|
24
|
-
parse: parse_parquet_to_columns_1.parseParquetInColumns,
|
|
25
|
-
parseFileInBatches: parse_parquet_to_columns_1.parseParquetFileInColumnarBatches
|
|
26
|
-
};
|
|
27
|
-
exports.ParquetWasmLoader = {
|
|
28
|
-
...parquet_wasm_loader_1.ParquetWasmLoader,
|
|
29
|
-
parse: parse_parquet_wasm_1.parseParquet
|
|
30
|
-
};
|
|
31
|
-
// ParquetWriter
|
|
32
|
-
var parquet_writer_1 = require("./parquet-writer");
|
|
33
|
-
Object.defineProperty(exports, "_ParquetWriter", { enumerable: true, get: function () { return parquet_writer_1.ParquetWriter; } });
|
|
34
|
-
var parquet_wasm_writer_1 = require("./parquet-wasm-writer");
|
|
35
|
-
Object.defineProperty(exports, "ParquetWasmWriter", { enumerable: true, get: function () { return parquet_wasm_writer_1.ParquetWasmWriter; } });
|
|
36
|
-
// EXPERIMENTAL - expose the internal parquetjs API
|
|
37
|
-
var compression_1 = require("./parquetjs/compression");
|
|
38
|
-
Object.defineProperty(exports, "preloadCompressions", { enumerable: true, get: function () { return compression_1.preloadCompressions; } });
|
|
39
|
-
var schema_1 = require("./parquetjs/schema/schema");
|
|
40
|
-
Object.defineProperty(exports, "ParquetSchema", { enumerable: true, get: function () { return schema_1.ParquetSchema; } });
|
|
41
|
-
var parquet_reader_1 = require("./parquetjs/parser/parquet-reader");
|
|
42
|
-
Object.defineProperty(exports, "ParquetReader", { enumerable: true, get: function () { return parquet_reader_1.ParquetReader; } });
|
|
43
|
-
var parquet_encoder_1 = require("./parquetjs/encoder/parquet-encoder");
|
|
44
|
-
Object.defineProperty(exports, "ParquetEncoder", { enumerable: true, get: function () { return parquet_encoder_1.ParquetEncoder; } });
|
|
45
|
-
var convert_schema_from_parquet_1 = require("./lib/arrow/convert-schema-from-parquet");
|
|
46
|
-
Object.defineProperty(exports, "convertSchemaFromParquet", { enumerable: true, get: function () { return convert_schema_from_parquet_1.convertSchemaFromParquet; } });
|
|
47
|
-
Object.defineProperty(exports, "convertParquetToArrowSchema", { enumerable: true, get: function () { return convert_schema_from_parquet_1.convertSchemaFromParquet; } });
|
|
48
|
-
// TESTS
|
|
49
|
-
exports._typecheckParquetLoader = exports.ParquetLoader;
|
|
50
|
-
// Geo Metadata
|
|
51
|
-
var geoparquet_schema_1 = require("./lib/geo/geoparquet-schema");
|
|
52
|
-
Object.defineProperty(exports, "geoJSONSchema", { enumerable: true, get: function () { return __importDefault(geoparquet_schema_1).default; } });
|
|
53
|
-
var decode_geo_metadata_1 = require("./lib/geo/decode-geo-metadata");
|
|
54
|
-
Object.defineProperty(exports, "getGeoMetadata", { enumerable: true, get: function () { return decode_geo_metadata_1.getGeoMetadata; } });
|
|
55
|
-
Object.defineProperty(exports, "setGeoMetadata", { enumerable: true, get: function () { return decode_geo_metadata_1.setGeoMetadata; } });
|
|
56
|
-
Object.defineProperty(exports, "unpackGeoMetadata", { enumerable: true, get: function () { return decode_geo_metadata_1.unpackGeoMetadata; } });
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.convertParquetRowGroupToColumns = void 0;
|
|
5
|
-
function convertParquetRowGroupToColumns(schema, rowGroup) {
|
|
6
|
-
const columns = {};
|
|
7
|
-
for (const [columnName, data] of Object.entries(rowGroup.columnData)) {
|
|
8
|
-
columns[columnName] = columns[columnName] || data.values;
|
|
9
|
-
}
|
|
10
|
-
return columns;
|
|
11
|
-
}
|
|
12
|
-
exports.convertParquetRowGroupToColumns = convertParquetRowGroupToColumns;
|
|
@@ -1,82 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.convertSchemaFromParquet = exports.PARQUET_TYPE_MAPPING = void 0;
|
|
5
|
-
const schema_1 = require("@loaders.gl/schema");
|
|
6
|
-
exports.PARQUET_TYPE_MAPPING = {
|
|
7
|
-
BOOLEAN: schema_1.Bool,
|
|
8
|
-
INT32: schema_1.Int32,
|
|
9
|
-
INT64: schema_1.Float64,
|
|
10
|
-
INT96: schema_1.Float64,
|
|
11
|
-
FLOAT: schema_1.Float32,
|
|
12
|
-
DOUBLE: schema_1.Float64,
|
|
13
|
-
BYTE_ARRAY: schema_1.Binary,
|
|
14
|
-
FIXED_LEN_BYTE_ARRAY: schema_1.Binary,
|
|
15
|
-
UTF8: schema_1.Utf8,
|
|
16
|
-
DATE: schema_1.Int32,
|
|
17
|
-
TIME_MILLIS: schema_1.Int64,
|
|
18
|
-
TIME_MICROS: schema_1.Int64,
|
|
19
|
-
TIMESTAMP_MILLIS: schema_1.Int64,
|
|
20
|
-
TIMESTAMP_MICROS: schema_1.Int64,
|
|
21
|
-
UINT_8: schema_1.Int32,
|
|
22
|
-
UINT_16: schema_1.Uint16,
|
|
23
|
-
UINT_32: schema_1.Uint32,
|
|
24
|
-
UINT_64: schema_1.Uint64,
|
|
25
|
-
INT_8: schema_1.Int8,
|
|
26
|
-
INT_16: schema_1.Int16,
|
|
27
|
-
INT_32: schema_1.Int32,
|
|
28
|
-
INT_64: schema_1.Int64,
|
|
29
|
-
JSON: schema_1.Binary,
|
|
30
|
-
BSON: schema_1.Binary,
|
|
31
|
-
// TODO check interval type
|
|
32
|
-
INTERVAL: schema_1.Binary,
|
|
33
|
-
DECIMAL_INT32: schema_1.Float32,
|
|
34
|
-
DECIMAL_INT64: schema_1.Float64,
|
|
35
|
-
DECIMAL_BYTE_ARRAY: schema_1.Float64,
|
|
36
|
-
DECIMAL_FIXED_LEN_BYTE_ARRAY: schema_1.Float64
|
|
37
|
-
};
|
|
38
|
-
function convertSchemaFromParquet(parquetSchema, parquetMetadata) {
|
|
39
|
-
const fields = getFields(parquetSchema.schema);
|
|
40
|
-
const metadata = parquetMetadata && getSchemaMetadata(parquetMetadata);
|
|
41
|
-
return new schema_1.Schema(fields, metadata);
|
|
42
|
-
}
|
|
43
|
-
exports.convertSchemaFromParquet = convertSchemaFromParquet;
|
|
44
|
-
function getFields(schema) {
|
|
45
|
-
const fields = [];
|
|
46
|
-
for (const name in schema) {
|
|
47
|
-
const field = schema[name];
|
|
48
|
-
if (field.fields) {
|
|
49
|
-
const childFields = getFields(field.fields);
|
|
50
|
-
const nestedField = new schema_1.Field(name, new schema_1.Struct(childFields), field.optional);
|
|
51
|
-
fields.push(nestedField);
|
|
52
|
-
}
|
|
53
|
-
else {
|
|
54
|
-
const FieldType = exports.PARQUET_TYPE_MAPPING[field.type];
|
|
55
|
-
const metadata = getFieldMetadata(field);
|
|
56
|
-
const arrowField = new schema_1.Field(name, new FieldType(), field.optional, metadata);
|
|
57
|
-
fields.push(arrowField);
|
|
58
|
-
}
|
|
59
|
-
}
|
|
60
|
-
return fields;
|
|
61
|
-
}
|
|
62
|
-
function getFieldMetadata(field) {
|
|
63
|
-
const metadata = new Map();
|
|
64
|
-
for (const key in field) {
|
|
65
|
-
if (key !== 'name') {
|
|
66
|
-
let value = field[key] || '';
|
|
67
|
-
value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];
|
|
68
|
-
metadata.set(key, value);
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
return metadata;
|
|
72
|
-
}
|
|
73
|
-
function getSchemaMetadata(parquetMetadata) {
|
|
74
|
-
const metadata = new Map();
|
|
75
|
-
const keyValueList = parquetMetadata.key_value_metadata || [];
|
|
76
|
-
for (const { key, value } of keyValueList) {
|
|
77
|
-
if (typeof value === 'string') {
|
|
78
|
-
metadata.set(key, value);
|
|
79
|
-
}
|
|
80
|
-
}
|
|
81
|
-
return metadata;
|
|
82
|
-
}
|
|
@@ -1,72 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.convertToParquetSchema = exports.PARQUET_TYPE_MAPPING = void 0;
|
|
5
|
-
const schema_1 = require("@loaders.gl/schema");
|
|
6
|
-
exports.PARQUET_TYPE_MAPPING = {
|
|
7
|
-
BOOLEAN: schema_1.Bool,
|
|
8
|
-
INT32: schema_1.Int32,
|
|
9
|
-
INT64: schema_1.Float64,
|
|
10
|
-
INT96: schema_1.Float64,
|
|
11
|
-
FLOAT: schema_1.Float32,
|
|
12
|
-
DOUBLE: schema_1.Float64,
|
|
13
|
-
BYTE_ARRAY: schema_1.Binary,
|
|
14
|
-
FIXED_LEN_BYTE_ARRAY: schema_1.Binary,
|
|
15
|
-
UTF8: schema_1.Utf8,
|
|
16
|
-
DATE: schema_1.Int32,
|
|
17
|
-
TIME_MILLIS: schema_1.Int64,
|
|
18
|
-
TIME_MICROS: schema_1.Int64,
|
|
19
|
-
TIMESTAMP_MILLIS: schema_1.Int64,
|
|
20
|
-
TIMESTAMP_MICROS: schema_1.Int64,
|
|
21
|
-
UINT_8: schema_1.Int32,
|
|
22
|
-
UINT_16: schema_1.Uint16,
|
|
23
|
-
UINT_32: schema_1.Uint32,
|
|
24
|
-
UINT_64: schema_1.Uint64,
|
|
25
|
-
INT_8: schema_1.Int8,
|
|
26
|
-
INT_16: schema_1.Int16,
|
|
27
|
-
INT_32: schema_1.Int32,
|
|
28
|
-
INT_64: schema_1.Int64,
|
|
29
|
-
JSON: schema_1.Binary,
|
|
30
|
-
BSON: schema_1.Binary,
|
|
31
|
-
// TODO check interval type
|
|
32
|
-
INTERVAL: schema_1.Binary,
|
|
33
|
-
DECIMAL_INT32: schema_1.Float32,
|
|
34
|
-
DECIMAL_INT64: schema_1.Float64,
|
|
35
|
-
DECIMAL_BYTE_ARRAY: schema_1.Float64,
|
|
36
|
-
DECIMAL_FIXED_LEN_BYTE_ARRAY: schema_1.Float64
|
|
37
|
-
};
|
|
38
|
-
function convertToParquetSchema(schema) {
|
|
39
|
-
const fields = []; // getFields(schema.fields);
|
|
40
|
-
// TODO add metadata if needed.
|
|
41
|
-
return new schema_1.Schema(fields);
|
|
42
|
-
}
|
|
43
|
-
exports.convertToParquetSchema = convertToParquetSchema;
|
|
44
|
-
// function getFields(schema: Field[]): Definition[] {
|
|
45
|
-
// const fields: Field[] = [];
|
|
46
|
-
// for (const name in schema) {
|
|
47
|
-
// const field = schema[name];
|
|
48
|
-
// // @ts-ignore
|
|
49
|
-
// const children = field.children as DataType[];
|
|
50
|
-
// if (children) {
|
|
51
|
-
// const childField = getFields(field.fields);
|
|
52
|
-
// const nestedField = new Field(name, new Struct(childField), field.optional);
|
|
53
|
-
// fields.push(nestedField);
|
|
54
|
-
// } else {
|
|
55
|
-
// const FieldType = PARQUET_TYPE_MAPPING[field.type];
|
|
56
|
-
// const metadata = getFieldMetadata(field);
|
|
57
|
-
// const arrowField = new Field(name, new FieldType(), field.optional, metadata);
|
|
58
|
-
// fields.push(arrowField);
|
|
59
|
-
// }
|
|
60
|
-
// }
|
|
61
|
-
// return fields;
|
|
62
|
-
// }
|
|
63
|
-
// function getFieldMetadata(field: ParquetField): Map<string, string> {
|
|
64
|
-
// const metadata = new Map();
|
|
65
|
-
// for (const key in field) {
|
|
66
|
-
// if (key !== 'name') {
|
|
67
|
-
// const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];
|
|
68
|
-
// metadata.set(key, value);
|
|
69
|
-
// }
|
|
70
|
-
// }
|
|
71
|
-
// return metadata;
|
|
72
|
-
// }
|
|
@@ -1,73 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.unpackGeoMetadata = exports.setGeoMetadata = exports.getGeoMetadata = void 0;
|
|
4
|
-
/**
|
|
5
|
-
* Reads the GeoMetadata object from the metadata
|
|
6
|
-
* @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */
|
|
7
|
-
function getGeoMetadata(schema) {
|
|
8
|
-
const stringifiedGeoMetadata = schema.metadata.get('geo');
|
|
9
|
-
if (!stringifiedGeoMetadata) {
|
|
10
|
-
return null;
|
|
11
|
-
}
|
|
12
|
-
try {
|
|
13
|
-
const geoMetadata = JSON.parse(stringifiedGeoMetadata);
|
|
14
|
-
return geoMetadata;
|
|
15
|
-
}
|
|
16
|
-
catch {
|
|
17
|
-
return null;
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
exports.getGeoMetadata = getGeoMetadata;
|
|
21
|
-
/**
|
|
22
|
-
* Stores a geoarrow / geoparquet geo metadata object in the schema
|
|
23
|
-
* @note geoarrow / geoparquet geo metadata is a single stringified JSON field
|
|
24
|
-
*/
|
|
25
|
-
function setGeoMetadata(schema, geoMetadata) {
|
|
26
|
-
const stringifiedGeoMetadata = JSON.stringify(geoMetadata);
|
|
27
|
-
schema.metadata.set('geo', stringifiedGeoMetadata);
|
|
28
|
-
}
|
|
29
|
-
exports.setGeoMetadata = setGeoMetadata;
|
|
30
|
-
/**
|
|
31
|
-
* Unpacks geo metadata into separate metadata fields (parses the long JSON string)
|
|
32
|
-
* @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata
|
|
33
|
-
*/
|
|
34
|
-
function unpackGeoMetadata(schema) {
|
|
35
|
-
const geoMetadata = getGeoMetadata(schema);
|
|
36
|
-
if (!geoMetadata) {
|
|
37
|
-
return;
|
|
38
|
-
}
|
|
39
|
-
// Store Parquet Schema Level Metadata
|
|
40
|
-
const { version, primary_column, columns } = geoMetadata;
|
|
41
|
-
if (version) {
|
|
42
|
-
schema.metadata.set('geo.version', version);
|
|
43
|
-
}
|
|
44
|
-
if (primary_column) {
|
|
45
|
-
schema.metadata.set('geo.primary_column', primary_column);
|
|
46
|
-
}
|
|
47
|
-
// store column names as comma separated list
|
|
48
|
-
schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));
|
|
49
|
-
for (const [columnName, columnMetadata] of Object.entries(columns || {})) {
|
|
50
|
-
const field = schema.fields.find((field) => field.name === columnName);
|
|
51
|
-
if (field) {
|
|
52
|
-
if (field.name === primary_column) {
|
|
53
|
-
field.metadata.set('geo.primary_field', 'true');
|
|
54
|
-
}
|
|
55
|
-
unpackGeoFieldMetadata(field, columnMetadata);
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
}
|
|
59
|
-
exports.unpackGeoMetadata = unpackGeoMetadata;
|
|
60
|
-
function unpackGeoFieldMetadata(field, columnMetadata) {
|
|
61
|
-
for (const [key, value] of Object.entries(columnMetadata || {})) {
|
|
62
|
-
switch (key) {
|
|
63
|
-
case 'geometry_type':
|
|
64
|
-
field.metadata.set(`geo.${key}`, value.join(','));
|
|
65
|
-
break;
|
|
66
|
-
case 'bbox':
|
|
67
|
-
case 'crs':
|
|
68
|
-
case 'edges':
|
|
69
|
-
default:
|
|
70
|
-
field.metadata.set(`geo.${key}`, typeof value === 'string' ? value : JSON.stringify(value));
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
/* eslint-disable camelcase */
|
|
5
|
-
/**
|
|
6
|
-
* Geoparquet JSON schema for geo metadata
|
|
7
|
-
* @see https://github.com/geoarrow/geoarrow/blob/main/metadata.md
|
|
8
|
-
* @see https://github.com/opengeospatial/geoparquet/blob/main/format-specs/geoparquet.md
|
|
9
|
-
*/
|
|
10
|
-
exports.default = {
|
|
11
|
-
$schema: 'http://json-schema.org/draft-07/schema#',
|
|
12
|
-
title: 'GeoParquet',
|
|
13
|
-
description: 'Parquet metadata included in the geo field.',
|
|
14
|
-
type: 'object',
|
|
15
|
-
required: ['version', 'primary_column', 'columns'],
|
|
16
|
-
properties: {
|
|
17
|
-
version: { type: 'string', const: '1.0.0-beta.1' },
|
|
18
|
-
primary_column: { type: 'string', minLength: 1 },
|
|
19
|
-
columns: {
|
|
20
|
-
type: 'object',
|
|
21
|
-
minProperties: 1,
|
|
22
|
-
patternProperties: {
|
|
23
|
-
'.+': {
|
|
24
|
-
type: 'object',
|
|
25
|
-
required: ['encoding', 'geometry_types'],
|
|
26
|
-
properties: {
|
|
27
|
-
encoding: { type: 'string', const: 'WKB' },
|
|
28
|
-
geometry_types: {
|
|
29
|
-
type: 'array',
|
|
30
|
-
uniqueItems: true,
|
|
31
|
-
items: {
|
|
32
|
-
type: 'string',
|
|
33
|
-
pattern: '^(GeometryCollection|(Multi)?(Point|LineString|Polygon))( Z)?$'
|
|
34
|
-
}
|
|
35
|
-
},
|
|
36
|
-
crs: {
|
|
37
|
-
oneOf: [
|
|
38
|
-
{
|
|
39
|
-
$ref: 'https://proj.org/schemas/v0.5/projjson.schema.json'
|
|
40
|
-
},
|
|
41
|
-
{ type: 'null' }
|
|
42
|
-
]
|
|
43
|
-
},
|
|
44
|
-
edges: { type: 'string', enum: ['planar', 'spherical'] },
|
|
45
|
-
orientation: { type: 'string', const: 'counterclockwise' },
|
|
46
|
-
bbox: {
|
|
47
|
-
type: 'array',
|
|
48
|
-
items: { type: 'number' },
|
|
49
|
-
oneOf: [
|
|
50
|
-
{
|
|
51
|
-
description: '2D bbox consisting of (xmin, ymin, xmax, ymax)',
|
|
52
|
-
minItems: 4,
|
|
53
|
-
maxItems: 4
|
|
54
|
-
},
|
|
55
|
-
{
|
|
56
|
-
description: '3D bbox consisting of (xmin, ymin, zmin, xmax, ymax, zmax)',
|
|
57
|
-
minItems: 6,
|
|
58
|
-
maxItems: 6
|
|
59
|
-
}
|
|
60
|
-
]
|
|
61
|
-
},
|
|
62
|
-
epoch: { type: 'number' }
|
|
63
|
-
}
|
|
64
|
-
}
|
|
65
|
-
},
|
|
66
|
-
additionalProperties: false
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
};
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// loaders.gl, MIT license
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.parseParquetFileInColumnarBatches = exports.parseParquetInColumns = void 0;
|
|
5
|
-
const loader_utils_1 = require("@loaders.gl/loader-utils");
|
|
6
|
-
const parquet_reader_1 = require("../../parquetjs/parser/parquet-reader");
|
|
7
|
-
const convert_schema_from_parquet_1 = require("../arrow/convert-schema-from-parquet");
|
|
8
|
-
const convert_row_group_to_columns_1 = require("../arrow/convert-row-group-to-columns");
|
|
9
|
-
const decode_geo_metadata_1 = require("../geo/decode-geo-metadata");
|
|
10
|
-
async function parseParquetInColumns(arrayBuffer, options) {
|
|
11
|
-
const blob = new Blob([arrayBuffer]);
|
|
12
|
-
for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {
|
|
13
|
-
return batch;
|
|
14
|
-
}
|
|
15
|
-
return null;
|
|
16
|
-
}
|
|
17
|
-
exports.parseParquetInColumns = parseParquetInColumns;
|
|
18
|
-
async function* parseParquetFileInColumnarBatches(blob, options) {
|
|
19
|
-
const file = (0, loader_utils_1.makeReadableFile)(blob);
|
|
20
|
-
const reader = new parquet_reader_1.ParquetReader(file);
|
|
21
|
-
const parquetSchema = await reader.getSchema();
|
|
22
|
-
const parquetMetadata = await reader.getFileMetadata();
|
|
23
|
-
const schema = (0, convert_schema_from_parquet_1.convertSchemaFromParquet)(parquetSchema, parquetMetadata);
|
|
24
|
-
(0, decode_geo_metadata_1.unpackGeoMetadata)(schema);
|
|
25
|
-
const rowGroups = reader.rowGroupIterator(options?.parquet);
|
|
26
|
-
for await (const rowGroup of rowGroups) {
|
|
27
|
-
yield convertRowGroupToTableBatch(schema, rowGroup);
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
exports.parseParquetFileInColumnarBatches = parseParquetFileInColumnarBatches;
|
|
31
|
-
function convertRowGroupToTableBatch(schema, rowGroup) {
|
|
32
|
-
const data = (0, convert_row_group_to_columns_1.convertParquetRowGroupToColumns)(schema, rowGroup);
|
|
33
|
-
return {
|
|
34
|
-
shape: 'columnar-table',
|
|
35
|
-
batchType: 'data',
|
|
36
|
-
schema,
|
|
37
|
-
data,
|
|
38
|
-
length: rowGroup.rowCount
|
|
39
|
-
};
|
|
40
|
-
}
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.parseParquetFileInBatches = exports.parseParquet = void 0;
|
|
4
|
-
// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';
|
|
5
|
-
// import {ColumnarTableBatch} from '@loaders.gl/schema';
|
|
6
|
-
const loader_utils_1 = require("@loaders.gl/loader-utils");
|
|
7
|
-
const parquet_reader_1 = require("../../parquetjs/parser/parquet-reader");
|
|
8
|
-
async function parseParquet(arrayBuffer, options) {
|
|
9
|
-
const blob = new Blob([arrayBuffer]);
|
|
10
|
-
for await (const batch of parseParquetFileInBatches(blob, options)) {
|
|
11
|
-
return batch;
|
|
12
|
-
}
|
|
13
|
-
return null;
|
|
14
|
-
}
|
|
15
|
-
exports.parseParquet = parseParquet;
|
|
16
|
-
async function* parseParquetFileInBatches(blob, options) {
|
|
17
|
-
const file = (0, loader_utils_1.makeReadableFile)(blob);
|
|
18
|
-
const reader = new parquet_reader_1.ParquetReader(file);
|
|
19
|
-
const rowBatches = reader.rowBatchIterator(options?.parquet);
|
|
20
|
-
for await (const rows of rowBatches) {
|
|
21
|
-
yield rows;
|
|
22
|
-
}
|
|
23
|
-
}
|
|
24
|
-
exports.parseParquetFileInBatches = parseParquetFileInBatches;
|
|
25
|
-
// export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {
|
|
26
|
-
// const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});
|
|
27
|
-
// try {
|
|
28
|
-
// for await (const rowGroup of rowGroupReader) {
|
|
29
|
-
// yield convertRowGroupToTableBatch(rowGroup);
|
|
30
|
-
// }
|
|
31
|
-
// } finally {
|
|
32
|
-
// await rowGroupReader.close();
|
|
33
|
-
// }
|
|
34
|
-
// }
|
|
35
|
-
// function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {
|
|
36
|
-
// // @ts-expect-error
|
|
37
|
-
// return {
|
|
38
|
-
// data: rowGroup
|
|
39
|
-
// };
|
|
40
|
-
// }
|
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.tableToIPC = exports.encode = void 0;
|
|
4
|
-
const apache_arrow_1 = require("apache-arrow");
|
|
5
|
-
const load_wasm_1 = require("./load-wasm");
|
|
6
|
-
/**
|
|
7
|
-
* Encode Arrow Table to Parquet buffer
|
|
8
|
-
*/
|
|
9
|
-
async function encode(table, options) {
|
|
10
|
-
const wasmUrl = options?.parquet?.wasmUrl;
|
|
11
|
-
const wasm = await (0, load_wasm_1.loadWasm)(wasmUrl);
|
|
12
|
-
const arrowIPCBytes = tableToIPC(table);
|
|
13
|
-
// TODO: provide options for how to write table.
|
|
14
|
-
const writerProperties = new wasm.WriterPropertiesBuilder().build();
|
|
15
|
-
const parquetBytes = wasm.writeParquet(arrowIPCBytes, writerProperties);
|
|
16
|
-
return parquetBytes.buffer.slice(parquetBytes.byteOffset, parquetBytes.byteLength + parquetBytes.byteOffset);
|
|
17
|
-
}
|
|
18
|
-
exports.encode = encode;
|
|
19
|
-
/**
|
|
20
|
-
* Serialize a {@link Table} to the IPC format. This function is a convenience
|
|
21
|
-
* wrapper for {@link RecordBatchStreamWriter} and {@link RecordBatchFileWriter}.
|
|
22
|
-
* Opposite of {@link tableFromIPC}.
|
|
23
|
-
*
|
|
24
|
-
* @param table The Table to serialize.
|
|
25
|
-
* @param type Whether to serialize the Table as a file or a stream.
|
|
26
|
-
*/
|
|
27
|
-
function tableToIPC(table) {
|
|
28
|
-
return apache_arrow_1.RecordBatchStreamWriter.writeAll(table).toUint8Array(true);
|
|
29
|
-
}
|
|
30
|
-
exports.tableToIPC = tableToIPC;
|
|
@@ -1,5 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.loadWasm = void 0;
|
|
4
|
-
var load_wasm_node_1 = require("./load-wasm-node");
|
|
5
|
-
Object.defineProperty(exports, "loadWasm", { enumerable: true, get: function () { return load_wasm_node_1.loadWasm; } });
|
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.loadWasm = void 0;
|
|
27
|
-
const wasmEsm = __importStar(require("parquet-wasm/esm2/arrow1"));
|
|
28
|
-
let cached = null;
|
|
29
|
-
async function loadWasm(wasmUrl) {
|
|
30
|
-
if (cached !== null) {
|
|
31
|
-
return cached;
|
|
32
|
-
}
|
|
33
|
-
// For ESM bundles, need to await the default export, which loads the WASM
|
|
34
|
-
await wasmEsm.default(wasmUrl);
|
|
35
|
-
cached = wasmEsm;
|
|
36
|
-
return wasmEsm;
|
|
37
|
-
}
|
|
38
|
-
exports.loadWasm = loadWasm;
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.loadWasm = void 0;
|
|
27
|
-
const wasmNode = __importStar(require("parquet-wasm/node/arrow1"));
|
|
28
|
-
async function loadWasm(wasmUrl) {
|
|
29
|
-
return wasmNode;
|
|
30
|
-
}
|
|
31
|
-
exports.loadWasm = loadWasm;
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.parseParquet = void 0;
|
|
4
|
-
const apache_arrow_1 = require("apache-arrow");
|
|
5
|
-
const load_wasm_node_1 = require("./load-wasm/load-wasm-node");
|
|
6
|
-
async function parseParquet(arrayBuffer, options) {
|
|
7
|
-
const wasmUrl = options?.parquet?.wasmUrl;
|
|
8
|
-
const wasm = await (0, load_wasm_node_1.loadWasm)(wasmUrl);
|
|
9
|
-
const arr = new Uint8Array(arrayBuffer);
|
|
10
|
-
const arrowIPCUint8Arr = wasm.readParquet(arr);
|
|
11
|
-
const arrowIPCBuffer = arrowIPCUint8Arr.buffer.slice(arrowIPCUint8Arr.byteOffset, arrowIPCUint8Arr.byteLength + arrowIPCUint8Arr.byteOffset);
|
|
12
|
-
const arrowTable = tableFromIPC(arrowIPCBuffer);
|
|
13
|
-
return arrowTable;
|
|
14
|
-
}
|
|
15
|
-
exports.parseParquet = parseParquet;
|
|
16
|
-
/**
|
|
17
|
-
* Deserialize the IPC format into a {@link Table}. This function is a
|
|
18
|
-
* convenience wrapper for {@link RecordBatchReader}. Opposite of {@link tableToIPC}.
|
|
19
|
-
*/
|
|
20
|
-
function tableFromIPC(input) {
|
|
21
|
-
const reader = apache_arrow_1.RecordBatchStreamReader.from(input);
|
|
22
|
-
const recordBatches = [];
|
|
23
|
-
for (const recordBatch of reader) {
|
|
24
|
-
recordBatches.push(recordBatch);
|
|
25
|
-
}
|
|
26
|
-
return new apache_arrow_1.Table(recordBatches);
|
|
27
|
-
}
|