@loaders.gl/parquet 3.4.0-alpha.2 → 3.4.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +21 -28
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/constants.js +0 -2
- package/dist/es5/constants.js.map +1 -1
- package/dist/es5/index.js +0 -3
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js +0 -1
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js +1 -1
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -1
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js +0 -2
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -1
- package/dist/es5/lib/geo/decode-geo-metadata.js +0 -5
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/es5/lib/geo/geoparquet-schema.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +115 -119
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js +105 -109
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/es5/lib/wasm/encode-parquet-wasm.js +14 -16
- package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/index.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +16 -18
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +6 -8
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/es5/lib/wasm/parse-parquet-wasm.js +16 -18
- package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/es5/parquet-loader.js +1 -2
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -2
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/es5/parquetjs/codecs/index.js +0 -1
- package/dist/es5/parquetjs/codecs/index.js.map +1 -1
- package/dist/es5/parquetjs/codecs/plain.js +0 -3
- package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
- package/dist/es5/parquetjs/codecs/rle.js +0 -4
- package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +48 -58
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/parquet-encoder.js +301 -345
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/PageType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Type.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/Type.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js +244 -261
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +396 -428
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js +1 -3
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/schema.js +2 -12
- package/dist/es5/parquetjs/schema/schema.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +2 -14
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js +6 -11
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +0 -1
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/es5/parquetjs/utils/read-utils.js +0 -7
- package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/es5/workers/parquet-worker.js.map +1 -1
- package/dist/esm/bundle.js +0 -1
- package/dist/esm/bundle.js.map +1 -1
- package/dist/esm/constants.js +0 -3
- package/dist/esm/constants.js.map +1 -1
- package/dist/esm/index.js +0 -8
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js +0 -2
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -1
- package/dist/esm/lib/arrow/convert-schema-from-parquet.js +0 -2
- package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -1
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js +0 -3
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -1
- package/dist/esm/lib/geo/decode-geo-metadata.js +0 -6
- package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/esm/lib/geo/geoparquet-schema.js +0 -2
- package/dist/esm/lib/geo/geoparquet-schema.js.map +1 -1
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js +0 -2
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js +0 -1
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/esm/lib/wasm/encode-parquet-wasm.js +0 -1
- package/dist/esm/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js +0 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/esm/lib/wasm/parse-parquet-wasm.js +0 -3
- package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/esm/parquet-loader.js +1 -3
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -3
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -2
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -2
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/esm/parquetjs/codecs/index.js +0 -2
- package/dist/esm/parquetjs/codecs/index.js.map +1 -1
- package/dist/esm/parquetjs/codecs/plain.js +0 -4
- package/dist/esm/parquetjs/codecs/plain.js.map +1 -1
- package/dist/esm/parquetjs/codecs/rle.js +0 -6
- package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +0 -9
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/parquet-encoder.js +0 -38
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DateType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ListType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MapType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/NullType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/PageType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/StringType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Type.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/Type.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/esm/parquetjs/parser/decoders.js +1 -18
- package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
- package/dist/esm/parquetjs/parser/parquet-reader.js +0 -13
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +0 -2
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/schema.js +0 -10
- package/dist/esm/parquetjs/schema/schema.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +1 -15
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js +6 -10
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +0 -1
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/parquetjs/utils/read-utils.js +0 -8
- package/dist/esm/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/parquet-worker.js +25 -32
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquetjs/schema/types.js +4 -6
- package/package.json +6 -7
- package/src/parquetjs/schema/types.ts +4 -3
package/dist/es5/constants.js
CHANGED
|
@@ -7,10 +7,8 @@ exports.PARQUET_VERSION = exports.PARQUET_RDLVL_TYPE = exports.PARQUET_RDLVL_ENC
|
|
|
7
7
|
var PARQUET_MAGIC = 'PAR1';
|
|
8
8
|
exports.PARQUET_MAGIC = PARQUET_MAGIC;
|
|
9
9
|
var PARQUET_MAGIC_ENCRYPTED = 'PARE';
|
|
10
|
-
|
|
11
10
|
exports.PARQUET_MAGIC_ENCRYPTED = PARQUET_MAGIC_ENCRYPTED;
|
|
12
11
|
var PARQUET_VERSION = 1;
|
|
13
|
-
|
|
14
12
|
exports.PARQUET_VERSION = PARQUET_VERSION;
|
|
15
13
|
var PARQUET_RDLVL_TYPE = 'INT32';
|
|
16
14
|
exports.PARQUET_RDLVL_TYPE = PARQUET_RDLVL_TYPE;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"constants.js","names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"sources":["../../src/constants.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"mappings":";;;;;;AAIO,IAAMA,aAAa,GAAG,MAAM;
|
|
1
|
+
{"version":3,"file":"constants.js","names":["PARQUET_MAGIC","exports","PARQUET_MAGIC_ENCRYPTED","PARQUET_VERSION","PARQUET_RDLVL_TYPE","PARQUET_RDLVL_ENCODING"],"sources":["../../src/constants.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n/**\n * Parquet File Magic String\n */\nexport const PARQUET_MAGIC = 'PAR1';\nexport const PARQUET_MAGIC_ENCRYPTED = 'PARE';\n\n/**\n * Parquet File Format Version\n */\nexport const PARQUET_VERSION = 1;\n\n/**\n * Internal type used for repetition/definition levels\n */\nexport const PARQUET_RDLVL_TYPE = 'INT32';\nexport const PARQUET_RDLVL_ENCODING = 'RLE';\n"],"mappings":";;;;;;AAIO,IAAMA,aAAa,GAAG,MAAM;AAACC,OAAA,CAAAD,aAAA,GAAAA,aAAA;AAC7B,IAAME,uBAAuB,GAAG,MAAM;AAACD,OAAA,CAAAC,uBAAA,GAAAA,uBAAA;AAKvC,IAAMC,eAAe,GAAG,CAAC;AAACF,OAAA,CAAAE,eAAA,GAAAA,eAAA;AAK1B,IAAMC,kBAAkB,GAAG,OAAO;AAACH,OAAA,CAAAG,kBAAA,GAAAA,kBAAA;AACnC,IAAMC,sBAAsB,GAAG,KAAK;AAACJ,OAAA,CAAAI,sBAAA,GAAAA,sBAAA"}
|
package/dist/es5/index.js
CHANGED
|
@@ -113,7 +113,6 @@ var ParquetLoader = _objectSpread(_objectSpread({}, _parquetLoader.ParquetLoader
|
|
|
113
113
|
parse: _parseParquetToRows.parseParquet,
|
|
114
114
|
parseFileInBatches: _parseParquetToRows.parseParquetFileInBatches
|
|
115
115
|
});
|
|
116
|
-
|
|
117
116
|
exports.ParquetLoader = ParquetLoader;
|
|
118
117
|
var ParquetColumnarLoader = _objectSpread(_objectSpread({}, _parquetLoader.ParquetLoader), {}, {
|
|
119
118
|
parse: _parseParquetToColumns.parseParquetInColumns,
|
|
@@ -123,9 +122,7 @@ exports.ParquetColumnarLoader = ParquetColumnarLoader;
|
|
|
123
122
|
var ParquetWasmLoader = _objectSpread(_objectSpread({}, _parquetWasmLoader.ParquetWasmLoader), {}, {
|
|
124
123
|
parse: _parseParquetWasm.parseParquet
|
|
125
124
|
});
|
|
126
|
-
|
|
127
125
|
exports.ParquetWasmLoader = ParquetWasmLoader;
|
|
128
126
|
var _typecheckParquetLoader = ParquetLoader;
|
|
129
|
-
|
|
130
127
|
exports._typecheckParquetLoader = _typecheckParquetLoader;
|
|
131
128
|
//# sourceMappingURL=index.js.map
|
package/dist/es5/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","ParquetColumnarLoader","parseParquetInColumns","parseParquetFileInColumnarBatches","ParquetWasmLoader","ParquetWasmWorkerLoader","parseParquetWasm","_typecheckParquetLoader"],"sources":["../../src/index.ts"],"sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\n\n// ParquetLoader\n\nimport {ParquetWasmLoader as ParquetWasmWorkerLoader} from './parquet-wasm-loader';\nimport {ParquetLoader as ParquetWorkerLoader} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parsers/parse-parquet-to-rows';\nimport {\n parseParquetInColumns,\n parseParquetFileInColumnarBatches\n} from './lib/parsers/parse-parquet-to-columns';\nimport {parseParquet as parseParquetWasm} from './lib/wasm/parse-parquet-wasm';\n\nexport {ParquetWorkerLoader, ParquetWasmWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n/** ParquetJS table loader */\nexport const ParquetColumnarLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquetInColumns,\n parseFileInBatches: parseParquetFileInColumnarBatches\n};\n\nexport const ParquetWasmLoader = {\n ...ParquetWasmWorkerLoader,\n parse: parseParquetWasm\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\nexport {ParquetWasmWriter} from './parquet-wasm-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEncoder} from './parquetjs/encoder/parquet-encoder';\n\nexport {\n convertSchemaFromParquet,\n convertSchemaFromParquet as convertParquetToArrowSchema\n} from './lib/arrow/convert-schema-from-parquet';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n\n// Geo Metadata\nexport {default as geoJSONSchema} from './lib/geo/geoparquet-schema';\n\nexport type {GeoMetadata} from './lib/geo/decode-geo-metadata';\nexport {getGeoMetadata, setGeoMetadata, unpackGeoMetadata} from './lib/geo/decode-geo-metadata';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA;AACA;AACA;AACA;AAIA;AAyBA;AACA;AAIA;AAEA;AACA;AACA;AAEA;AASA;AAGA;AAAgG;AAAA;AA3CzF,
|
|
1
|
+
{"version":3,"file":"index.js","names":["_parquetWasmLoader","require","_parquetLoader","_parseParquetToRows","_parseParquetToColumns","_parseParquetWasm","_parquetWriter","_parquetWasmWriter","_compression","_schema","_parquetReader","_parquetEncoder","_convertSchemaFromParquet","_geoparquetSchema","_interopRequireDefault","_decodeGeoMetadata","ownKeys","object","enumerableOnly","keys","Object","getOwnPropertySymbols","symbols","filter","sym","getOwnPropertyDescriptor","enumerable","push","apply","_objectSpread","target","i","arguments","length","source","forEach","key","_defineProperty2","default","getOwnPropertyDescriptors","defineProperties","defineProperty","ParquetLoader","ParquetWorkerLoader","parse","parseParquet","parseFileInBatches","parseParquetFileInBatches","exports","ParquetColumnarLoader","parseParquetInColumns","parseParquetFileInColumnarBatches","ParquetWasmLoader","ParquetWasmWorkerLoader","parseParquetWasm","_typecheckParquetLoader"],"sources":["../../src/index.ts"],"sourcesContent":["import type {LoaderWithParser} from '@loaders.gl/loader-utils';\n\n// ParquetLoader\n\nimport {ParquetWasmLoader as ParquetWasmWorkerLoader} from './parquet-wasm-loader';\nimport {ParquetLoader as ParquetWorkerLoader} from './parquet-loader';\nimport {parseParquet, parseParquetFileInBatches} from './lib/parsers/parse-parquet-to-rows';\nimport {\n parseParquetInColumns,\n parseParquetFileInColumnarBatches\n} from './lib/parsers/parse-parquet-to-columns';\nimport {parseParquet as parseParquetWasm} from './lib/wasm/parse-parquet-wasm';\n\nexport {ParquetWorkerLoader, ParquetWasmWorkerLoader};\n\n/** ParquetJS table loader */\nexport const ParquetLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquet,\n parseFileInBatches: parseParquetFileInBatches\n};\n\n/** ParquetJS table loader */\nexport const ParquetColumnarLoader = {\n ...ParquetWorkerLoader,\n parse: parseParquetInColumns,\n parseFileInBatches: parseParquetFileInColumnarBatches\n};\n\nexport const ParquetWasmLoader = {\n ...ParquetWasmWorkerLoader,\n parse: parseParquetWasm\n};\n\n// ParquetWriter\n\nexport {ParquetWriter as _ParquetWriter} from './parquet-writer';\nexport {ParquetWasmWriter} from './parquet-wasm-writer';\n\n// EXPERIMENTAL - expose the internal parquetjs API\n\nexport {preloadCompressions} from './parquetjs/compression';\n\nexport {ParquetSchema} from './parquetjs/schema/schema';\nexport {ParquetReader} from './parquetjs/parser/parquet-reader';\nexport {ParquetEncoder} from './parquetjs/encoder/parquet-encoder';\n\nexport {\n convertSchemaFromParquet,\n convertSchemaFromParquet as convertParquetToArrowSchema\n} from './lib/arrow/convert-schema-from-parquet';\n\n// TESTS\nexport const _typecheckParquetLoader: LoaderWithParser = ParquetLoader;\n\n// Geo Metadata\nexport {default as geoJSONSchema} from './lib/geo/geoparquet-schema';\n\nexport type {GeoMetadata} from './lib/geo/decode-geo-metadata';\nexport {getGeoMetadata, setGeoMetadata, unpackGeoMetadata} from './lib/geo/decode-geo-metadata';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,IAAAA,kBAAA,GAAAC,OAAA;AACA,IAAAC,cAAA,GAAAD,OAAA;AACA,IAAAE,mBAAA,GAAAF,OAAA;AACA,IAAAG,sBAAA,GAAAH,OAAA;AAIA,IAAAI,iBAAA,GAAAJ,OAAA;AAyBA,IAAAK,cAAA,GAAAL,OAAA;AACA,IAAAM,kBAAA,GAAAN,OAAA;AAIA,IAAAO,YAAA,GAAAP,OAAA;AAEA,IAAAQ,OAAA,GAAAR,OAAA;AACA,IAAAS,cAAA,GAAAT,OAAA;AACA,IAAAU,eAAA,GAAAV,OAAA;AAEA,IAAAW,yBAAA,GAAAX,OAAA;AASA,IAAAY,iBAAA,GAAAC,sBAAA,CAAAb,OAAA;AAGA,IAAAc,kBAAA,GAAAd,OAAA;AAAgG,SAAAe,QAAAC,MAAA,EAAAC,cAAA,QAAAC,IAAA,GAAAC,MAAA,CAAAD,IAAA,CAAAF,MAAA,OAAAG,MAAA,CAAAC,qBAAA,QAAAC,OAAA,GAAAF,MAAA,CAAAC,qBAAA,CAAAJ,MAAA,GAAAC,cAAA,KAAAI,OAAA,GAAAA,OAAA,CAAAC,MAAA,WAAAC,GAAA,WAAAJ,MAAA,CAAAK,wBAAA,CAAAR,MAAA,EAAAO,GAAA,EAAAE,UAAA,OAAAP,IAAA,CAAAQ,IAAA,CAAAC,KAAA,CAAAT,IAAA,EAAAG,OAAA,YAAAH,IAAA;AAAA,SAAAU,cAAAC,MAAA,aAAAC,CAAA,MAAAA,CAAA,GAAAC,SAAA,CAAAC,MAAA,EAAAF,CAAA,UAAAG,MAAA,WAAAF,SAAA,CAAAD,CAAA,IAAAC,SAAA,CAAAD,CAAA,QAAAA,CAAA,OAAAf,OAAA,CAAAI,MAAA,CAAAc,MAAA,OAAAC,OAAA,WAAAC,GAAA,QAAAC,gBAAA,CAAAC,OAAA,EAAAR,MAAA,EAAAM,GAAA,EAAAF,MAAA,CAAAE,GAAA,SAAAhB,MAAA,CAAAmB,yBAAA,GAAAnB,MAAA,CAAAoB,gBAAA,CAAAV,MAAA,EAAAV,MAAA,CAAAmB,yBAAA,CAAAL,MAAA,KAAAlB,OAAA,CAAAI,MAAA,CAAAc,MAAA,GAAAC,OAAA,WAAAC,GAAA,IAAAhB,MAAA,CAAAqB,cAAA,CAAAX,MAAA,EAAAM,GAAA,EAAAhB,MAAA,CAAAK,wBAAA,CAAAS,MAAA,EAAAE,GAAA,iBAAAN,MAAA;AA3CzF,IAAMY,aAAa,GAAAb,aAAA,CAAAA,aAAA,KACrBc,4BAAmB;EACtBC,KAAK,EAAEC,gCAAY;EACnBC,kBAAkB,EAAEC;AAAyB,EAC9C;AAACC,OAAA,CAAAN,aAAA,GAAAA,aAAA;AAGK,IAAMO,qBAAqB,GAAApB,aAAA,CAAAA,aAAA,KAC7Bc,4BAAmB;EACtBC,KAAK,EAAEM,4CAAqB;EAC5BJ,kBAAkB,EAAEK;AAAiC,EACtD;AAACH,OAAA,CAAAC,qBAAA,GAAAA,qBAAA;AAEK,IAAMG,iBAAiB,GAAAvB,aAAA,CAAAA,aAAA,KACzBwB,oCAAuB;EAC1BT,KAAK,EAAEU;AAAgB,EACxB;AAACN,OAAA,CAAAI,iBAAA,GAAAA,iBAAA;AAqBK,IAAMG,uBAAyC,GAAGb,aAAa;AAACM,OAAA,CAAAO,uBAAA,GAAAA,uBAAA"}
|
|
@@ -6,7 +6,6 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
});
|
|
7
7
|
exports.convertParquetRowGroupToColumns = convertParquetRowGroupToColumns;
|
|
8
8
|
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
|
|
9
|
-
|
|
10
9
|
function convertParquetRowGroupToColumns(schema, rowGroup) {
|
|
11
10
|
var columns = {};
|
|
12
11
|
for (var _i = 0, _Object$entries = Object.entries(rowGroup.columnData); _i < _Object$entries.length; _i++) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"convert-row-group-to-columns.js","names":["convertParquetRowGroupToColumns","schema","rowGroup","columns","Object","entries","columnData","columnName","data","values"],"sources":["../../../../src/lib/arrow/convert-row-group-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {Schema} from '@loaders.gl/schema';\nimport {ParquetBuffer} from '@loaders.gl/parquet/parquetjs/schema/declare';\n\nexport function convertParquetRowGroupToColumns(\n schema: Schema,\n rowGroup: ParquetBuffer\n): Record<string, any[]> {\n const columns: Record<string, any[]> = {};\n for (const [columnName, data] of Object.entries(rowGroup.columnData)) {\n columns[columnName] = columns[columnName] || data.values;\n }\n return columns;\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"convert-row-group-to-columns.js","names":["convertParquetRowGroupToColumns","schema","rowGroup","columns","_i","_Object$entries","Object","entries","columnData","length","_Object$entries$_i","_slicedToArray2","default","columnName","data","values"],"sources":["../../../../src/lib/arrow/convert-row-group-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport {Schema} from '@loaders.gl/schema';\nimport {ParquetBuffer} from '@loaders.gl/parquet/parquetjs/schema/declare';\n\nexport function convertParquetRowGroupToColumns(\n schema: Schema,\n rowGroup: ParquetBuffer\n): Record<string, any[]> {\n const columns: Record<string, any[]> = {};\n for (const [columnName, data] of Object.entries(rowGroup.columnData)) {\n columns[columnName] = columns[columnName] || data.values;\n }\n return columns;\n}\n"],"mappings":";;;;;;;;AAKO,SAASA,+BAA+BA,CAC7CC,MAAc,EACdC,QAAuB,EACA;EACvB,IAAMC,OAA8B,GAAG,CAAC,CAAC;EACzC,SAAAC,EAAA,MAAAC,eAAA,GAAiCC,MAAM,CAACC,OAAO,CAACL,QAAQ,CAACM,UAAU,CAAC,EAAAJ,EAAA,GAAAC,eAAA,CAAAI,MAAA,EAAAL,EAAA,IAAE;IAAjE,IAAAM,kBAAA,OAAAC,eAAA,CAAAC,OAAA,EAAAP,eAAA,CAAAD,EAAA;MAAOS,UAAU,GAAAH,kBAAA;MAAEI,IAAI,GAAAJ,kBAAA;IAC1BP,OAAO,CAACU,UAAU,CAAC,GAAGV,OAAO,CAACU,UAAU,CAAC,IAAIC,IAAI,CAACC,MAAM;EAC1D;EACA,OAAOZ,OAAO;AAChB"}
|
|
@@ -8,7 +8,7 @@ exports.convertSchemaFromParquet = convertSchemaFromParquet;
|
|
|
8
8
|
var _schema = require("@loaders.gl/schema");
|
|
9
9
|
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
|
10
10
|
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
11
|
-
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++)
|
|
11
|
+
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
|
|
12
12
|
var PARQUET_TYPE_MAPPING = {
|
|
13
13
|
BOOLEAN: _schema.Bool,
|
|
14
14
|
INT32: _schema.Int32,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"convert-schema-from-parquet.js","names":["PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertSchemaFromParquet","parquetSchema","parquetMetadata","fields","getFields","schema","metadata","getSchemaMetadata","Schema","
|
|
1
|
+
{"version":3,"file":"convert-schema-from-parquet.js","names":["_schema","require","_createForOfIteratorHelper","o","allowArrayLike","it","Symbol","iterator","Array","isArray","_unsupportedIterableToArray","length","i","F","s","n","done","value","e","_e","f","TypeError","normalCompletion","didErr","err","call","step","next","_e2","return","minLen","_arrayLikeToArray","Object","prototype","toString","slice","constructor","name","from","test","arr","len","arr2","PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","exports","convertSchemaFromParquet","parquetSchema","parquetMetadata","fields","getFields","schema","metadata","getSchemaMetadata","Schema","field","childFields","nestedField","Field","Struct","optional","push","FieldType","type","getFieldMetadata","arrowField","Map","key","stringify","set","keyValueList","key_value_metadata","_iterator","_step","_step$value"],"sources":["../../../../src/lib/arrow/convert-schema-from-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\nimport type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../../parquetjs/schema/declare';\nimport {FileMetaData} from '@loaders.gl/parquet/parquetjs/parquet-thrift';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interval type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertSchemaFromParquet(\n parquetSchema: ParquetSchema,\n parquetMetadata?: FileMetaData\n): Schema {\n const fields = getFields(parquetSchema.schema);\n const metadata = parquetMetadata && getSchemaMetadata(parquetMetadata);\n return new Schema(fields, metadata);\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childFields = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childFields), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n let value = field[key] || '';\n value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getSchemaMetadata(parquetMetadata: FileMetaData): Map<string, string> {\n const metadata = new Map();\n\n const keyValueList = parquetMetadata.key_value_metadata || [];\n for (const {key, value} of keyValueList) {\n if (typeof value === 'string') {\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n"],"mappings":";;;;;;;AAMA,IAAAA,OAAA,GAAAC,OAAA;AAiB4B,SAAAC,2BAAAC,CAAA,EAAAC,cAAA,QAAAC,EAAA,UAAAC,MAAA,oBAAAH,CAAA,CAAAG,MAAA,CAAAC,QAAA,KAAAJ,CAAA,qBAAAE,EAAA,QAAAG,KAAA,CAAAC,OAAA,CAAAN,CAAA,MAAAE,EAAA,GAAAK,2BAAA,CAAAP,CAAA,MAAAC,cAAA,IAAAD,CAAA,WAAAA,CAAA,CAAAQ,MAAA,qBAAAN,EAAA,EAAAF,CAAA,GAAAE,EAAA,MAAAO,CAAA,UAAAC,CAAA,YAAAA,EAAA,eAAAC,CAAA,EAAAD,CAAA,EAAAE,CAAA,WAAAA,EAAA,QAAAH,CAAA,IAAAT,CAAA,CAAAQ,MAAA,WAAAK,IAAA,mBAAAA,IAAA,SAAAC,KAAA,EAAAd,CAAA,CAAAS,CAAA,UAAAM,CAAA,WAAAA,EAAAC,EAAA,UAAAA,EAAA,KAAAC,CAAA,EAAAP,CAAA,gBAAAQ,SAAA,iJAAAC,gBAAA,SAAAC,MAAA,UAAAC,GAAA,WAAAV,CAAA,WAAAA,EAAA,IAAAT,EAAA,GAAAA,EAAA,CAAAoB,IAAA,CAAAtB,CAAA,MAAAY,CAAA,WAAAA,EAAA,QAAAW,IAAA,GAAArB,EAAA,CAAAsB,IAAA,IAAAL,gBAAA,GAAAI,IAAA,CAAAV,IAAA,SAAAU,IAAA,KAAAR,CAAA,WAAAA,EAAAU,GAAA,IAAAL,MAAA,SAAAC,GAAA,GAAAI,GAAA,KAAAR,CAAA,WAAAA,EAAA,eAAAE,gBAAA,IAAAjB,EAAA,CAAAwB,MAAA,UAAAxB,EAAA,CAAAwB,MAAA,oBAAAN,MAAA,QAAAC,GAAA;AAAA,SAAAd,4BAAAP,CAAA,EAAA2B,MAAA,SAAA3B,CAAA,qBAAAA,CAAA,sBAAA4B,iBAAA,CAAA5B,CAAA,EAAA2B,MAAA,OAAAf,CAAA,GAAAiB,MAAA,CAAAC,SAAA,CAAAC,QAAA,CAAAT,IAAA,CAAAtB,CAAA,EAAAgC,KAAA,aAAApB,CAAA,iBAAAZ,CAAA,CAAAiC,WAAA,EAAArB,CAAA,GAAAZ,CAAA,CAAAiC,WAAA,CAAAC,IAAA,MAAAtB,CAAA,cAAAA,CAAA,mBAAAP,KAAA,CAAA8B,IAAA,CAAAnC,CAAA,OAAAY,CAAA,+DAAAwB,IAAA,CAAAxB,CAAA,UAAAgB,iBAAA,CAAA5B,CAAA,EAAA2B,MAAA;AAAA,SAAAC,kBAAAS,GAAA,EAAAC,GAAA,QAAAA,GAAA,YAAAA,GAAA,GAAAD,GAAA,CAAA7B,MAAA,EAAA8B,GAAA,GAAAD,GAAA,CAAA7B,MAAA,WAAAC,CAAA,MAAA8B,IAAA,OAAAlC,KAAA,CAAAiC,GAAA,GAAA7B,CAAA,GAAA6B,GAAA,EAAA7B,CAAA,IAAA8B,IAAA,CAAA9B,CAAA,IAAA4B,GAAA,CAAA5B,CAAA,UAAA8B,IAAA;AAErB,IAAMC,oBAA8D,GAAG;EAC5EC,OAAO,EAAEC,YAAI;EACbC,KAAK,EAAEC,aAAK;EACZC,KAAK,EAAEC,eAAO;EACdC,KAAK,EAAED,eAAO;EACdE,KAAK,EAAEC,eAAO;EACdC,MAAM,EAAEJ,eAAO;EACfK,UAAU,EAAEC,cAAM;EAClBC,oBAAoB,EAAED,cAAM;EAC5BE,IAAI,EAAEC,YAAI;EACVC,IAAI,EAAEZ,aAAK;EACXa,WAAW,EAAEC,aAAK;EAClBC,WAAW,EAAED,aAAK;EAClBE,gBAAgB,EAAEF,aAAK;EACvBG,gBAAgB,EAAEH,aAAK;EACvBI,MAAM,EAAElB,aAAK;EACbmB,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,KAAK,EAAEC,YAAI;EACXC,MAAM,EAAEC,aAAK;EACbC,MAAM,EAAE7B,aAAK;EACb8B,MAAM,EAAEhB,aAAK;EACbiB,IAAI,EAAEvB,cAAM;EACZwB,IAAI,EAAExB,cAAM;EAEZyB,QAAQ,EAAEzB,cAAM;EAChB0B,aAAa,EAAE7B,eAAO;EACtB8B,aAAa,EAAEjC,eAAO;EACtBkC,kBAAkB,EAAElC,eAAO;EAC3BmC,4BAA4B,EAAEnC;AAChC,CAAC;AAACoC,OAAA,CAAA1C,oBAAA,GAAAA,oBAAA;AAEK,SAAS2C,wBAAwBA,CACtCC,aAA4B,EAC5BC,eAA8B,EACtB;EACR,IAAMC,MAAM,GAAGC,SAAS,CAACH,aAAa,CAACI,MAAM,CAAC;EAC9C,IAAMC,QAAQ,GAAGJ,eAAe,IAAIK,iBAAiB,CAACL,eAAe,CAAC;EACtE,OAAO,IAAIM,cAAM,CAACL,MAAM,EAAEG,QAAQ,CAAC;AACrC;AAEA,SAASF,SAASA,CAACC,MAAuB,EAAW;EACnD,IAAMF,MAAe,GAAG,EAAE;EAE1B,KAAK,IAAMpD,IAAI,IAAIsD,MAAM,EAAE;IACzB,IAAMI,KAAK,GAAGJ,MAAM,CAACtD,IAAI,CAAC;IAE1B,IAAI0D,KAAK,CAACN,MAAM,EAAE;MAChB,IAAMO,WAAW,GAAGN,SAAS,CAACK,KAAK,CAACN,MAAM,CAAC;MAC3C,IAAMQ,WAAW,GAAG,IAAIC,aAAK,CAAC7D,IAAI,EAAE,IAAI8D,cAAM,CAACH,WAAW,CAAC,EAAED,KAAK,CAACK,QAAQ,CAAC;MAC5EX,MAAM,CAACY,IAAI,CAACJ,WAAW,CAAC;IAC1B,CAAC,MAAM;MACL,IAAMK,SAAS,GAAG3D,oBAAoB,CAACoD,KAAK,CAACQ,IAAI,CAAC;MAClD,IAAMX,QAAQ,GAAGY,gBAAgB,CAACT,KAAK,CAAC;MACxC,IAAMU,UAAU,GAAG,IAAIP,aAAK,CAAC7D,IAAI,EAAE,IAAIiE,SAAS,EAAE,EAAEP,KAAK,CAACK,QAAQ,EAAER,QAAQ,CAAC;MAC7EH,MAAM,CAACY,IAAI,CAACI,UAAU,CAAC;IACzB;EACF;EAEA,OAAOhB,MAAM;AACf;AAEA,SAASe,gBAAgBA,CAACT,KAAmB,EAAuB;EAClE,IAAMH,QAAQ,GAAG,IAAIc,GAAG,EAAE;EAE1B,KAAK,IAAMC,GAAG,IAAIZ,KAAK,EAAE;IACvB,IAAIY,GAAG,KAAK,MAAM,EAAE;MAClB,IAAI1F,KAAK,GAAG8E,KAAK,CAACY,GAAG,CAAC,IAAI,EAAE;MAC5B1F,KAAK,GAAG,OAAO8E,KAAK,CAACY,GAAG,CAAC,KAAK,QAAQ,GAAG7B,IAAI,CAAC8B,SAAS,CAACb,KAAK,CAACY,GAAG,CAAC,CAAC,GAAGZ,KAAK,CAACY,GAAG,CAAC;MAChFf,QAAQ,CAACiB,GAAG,CAACF,GAAG,EAAE1F,KAAK,CAAC;IAC1B;EACF;EAEA,OAAO2E,QAAQ;AACjB;AAEA,SAASC,iBAAiBA,CAACL,eAA6B,EAAuB;EAC7E,IAAMI,QAAQ,GAAG,IAAIc,GAAG,EAAE;EAE1B,IAAMI,YAAY,GAAGtB,eAAe,CAACuB,kBAAkB,IAAI,EAAE;EAAC,IAAAC,SAAA,GAAA9G,0BAAA,CACnC4G,YAAY;IAAAG,KAAA;EAAA;IAAvC,KAAAD,SAAA,CAAAlG,CAAA,MAAAmG,KAAA,GAAAD,SAAA,CAAAjG,CAAA,IAAAC,IAAA,GAAyC;MAAA,IAAAkG,WAAA,GAAAD,KAAA,CAAAhG,KAAA;QAA7B0F,GAAG,GAAAO,WAAA,CAAHP,GAAG;QAAE1F,KAAK,GAAAiG,WAAA,CAALjG,KAAK;MACpB,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;QAC7B2E,QAAQ,CAACiB,GAAG,CAACF,GAAG,EAAE1F,KAAK,CAAC;MAC1B;IACF;EAAC,SAAAO,GAAA;IAAAwF,SAAA,CAAA9F,CAAA,CAAAM,GAAA;EAAA;IAAAwF,SAAA,CAAA5F,CAAA;EAAA;EAED,OAAOwE,QAAQ;AACjB"}
|
|
@@ -6,7 +6,6 @@ Object.defineProperty(exports, "__esModule", {
|
|
|
6
6
|
exports.PARQUET_TYPE_MAPPING = void 0;
|
|
7
7
|
exports.convertToParquetSchema = convertToParquetSchema;
|
|
8
8
|
var _schema = require("@loaders.gl/schema");
|
|
9
|
-
|
|
10
9
|
var PARQUET_TYPE_MAPPING = {
|
|
11
10
|
BOOLEAN: _schema.Bool,
|
|
12
11
|
INT32: _schema.Int32,
|
|
@@ -41,7 +40,6 @@ var PARQUET_TYPE_MAPPING = {
|
|
|
41
40
|
exports.PARQUET_TYPE_MAPPING = PARQUET_TYPE_MAPPING;
|
|
42
41
|
function convertToParquetSchema(schema) {
|
|
43
42
|
var fields = [];
|
|
44
|
-
|
|
45
43
|
return new _schema.Schema(fields);
|
|
46
44
|
}
|
|
47
45
|
//# sourceMappingURL=convert-schema-to-parquet.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"convert-schema-to-parquet.js","names":["PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertToParquetSchema","schema","fields","Schema"],"sources":["../../../../src/lib/arrow/convert-schema-to-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {\n // FieldDefinition, ParquetField,\n ParquetType\n} from '../../parquetjs/schema/declare';\n\nimport {\n Schema,\n // Struct,\n // Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interval type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertToParquetSchema(schema: Schema): Schema {\n const fields = []; // getFields(schema.fields);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\n// function getFields(schema: Field[]): Definition[] {\n// const fields: Field[] = [];\n\n// for (const name in schema) {\n// const field = schema[name];\n\n// // @ts-ignore\n// const children = field.children as DataType[];\n// if (children) {\n// const childField = getFields(field.fields);\n// const nestedField = new Field(name, new Struct(childField), field.optional);\n// fields.push(nestedField);\n// } else {\n// const FieldType = PARQUET_TYPE_MAPPING[field.type];\n// const metadata = getFieldMetadata(field);\n// const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n// fields.push(arrowField);\n// }\n// }\n\n// return fields;\n// }\n\n// function getFieldMetadata(field: ParquetField): Map<string, string> {\n// const metadata = new Map();\n\n// for (const key in field) {\n// if (key !== 'name') {\n// const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n// metadata.set(key, value);\n// }\n// }\n\n// return metadata;\n// }\n"],"mappings":";;;;;;;AAQA
|
|
1
|
+
{"version":3,"file":"convert-schema-to-parquet.js","names":["_schema","require","PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","exports","convertToParquetSchema","schema","fields","Schema"],"sources":["../../../../src/lib/arrow/convert-schema-to-parquet.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {ParquetSchema} from '../../parquetjs/schema/schema';\nimport type {\n // FieldDefinition, ParquetField,\n ParquetType\n} from '../../parquetjs/schema/declare';\n\nimport {\n Schema,\n // Struct,\n // Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interval type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertToParquetSchema(schema: Schema): Schema {\n const fields = []; // getFields(schema.fields);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\n// function getFields(schema: Field[]): Definition[] {\n// const fields: Field[] = [];\n\n// for (const name in schema) {\n// const field = schema[name];\n\n// // @ts-ignore\n// const children = field.children as DataType[];\n// if (children) {\n// const childField = getFields(field.fields);\n// const nestedField = new Field(name, new Struct(childField), field.optional);\n// fields.push(nestedField);\n// } else {\n// const FieldType = PARQUET_TYPE_MAPPING[field.type];\n// const metadata = getFieldMetadata(field);\n// const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n// fields.push(arrowField);\n// }\n// }\n\n// return fields;\n// }\n\n// function getFieldMetadata(field: ParquetField): Map<string, string> {\n// const metadata = new Map();\n\n// for (const key in field) {\n// if (key !== 'name') {\n// const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n// metadata.set(key, value);\n// }\n// }\n\n// return metadata;\n// }\n"],"mappings":";;;;;;;AAQA,IAAAA,OAAA,GAAAC,OAAA;AAmBO,IAAMC,oBAA8D,GAAG;EAC5EC,OAAO,EAAEC,YAAI;EACbC,KAAK,EAAEC,aAAK;EACZC,KAAK,EAAEC,eAAO;EACdC,KAAK,EAAED,eAAO;EACdE,KAAK,EAAEC,eAAO;EACdC,MAAM,EAAEJ,eAAO;EACfK,UAAU,EAAEC,cAAM;EAClBC,oBAAoB,EAAED,cAAM;EAC5BE,IAAI,EAAEC,YAAI;EACVC,IAAI,EAAEZ,aAAK;EACXa,WAAW,EAAEC,aAAK;EAClBC,WAAW,EAAED,aAAK;EAClBE,gBAAgB,EAAEF,aAAK;EACvBG,gBAAgB,EAAEH,aAAK;EACvBI,MAAM,EAAElB,aAAK;EACbmB,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,KAAK,EAAEC,YAAI;EACXC,MAAM,EAAEC,aAAK;EACbC,MAAM,EAAE7B,aAAK;EACb8B,MAAM,EAAEhB,aAAK;EACbiB,IAAI,EAAEvB,cAAM;EACZwB,IAAI,EAAExB,cAAM;EAEZyB,QAAQ,EAAEzB,cAAM;EAChB0B,aAAa,EAAE7B,eAAO;EACtB8B,aAAa,EAAEjC,eAAO;EACtBkC,kBAAkB,EAAElC,eAAO;EAC3BmC,4BAA4B,EAAEnC;AAChC,CAAC;AAACoC,OAAA,CAAA1C,oBAAA,GAAAA,oBAAA;AAEK,SAAS2C,sBAAsBA,CAACC,MAAc,EAAU;EAC7D,IAAMC,MAAM,GAAG,EAAE;EAGjB,OAAO,IAAIC,cAAM,CAACD,MAAM,CAAC;AAC3B"}
|
|
@@ -8,7 +8,6 @@ exports.getGeoMetadata = getGeoMetadata;
|
|
|
8
8
|
exports.setGeoMetadata = setGeoMetadata;
|
|
9
9
|
exports.unpackGeoMetadata = unpackGeoMetadata;
|
|
10
10
|
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
|
|
11
|
-
|
|
12
11
|
function getGeoMetadata(schema) {
|
|
13
12
|
var stringifiedGeoMetadata = schema.metadata.get('geo');
|
|
14
13
|
if (!stringifiedGeoMetadata) {
|
|
@@ -21,18 +20,15 @@ function getGeoMetadata(schema) {
|
|
|
21
20
|
return null;
|
|
22
21
|
}
|
|
23
22
|
}
|
|
24
|
-
|
|
25
23
|
function setGeoMetadata(schema, geoMetadata) {
|
|
26
24
|
var stringifiedGeoMetadata = JSON.stringify(geoMetadata);
|
|
27
25
|
schema.metadata.set('geo', stringifiedGeoMetadata);
|
|
28
26
|
}
|
|
29
|
-
|
|
30
27
|
function unpackGeoMetadata(schema) {
|
|
31
28
|
var geoMetadata = getGeoMetadata(schema);
|
|
32
29
|
if (!geoMetadata) {
|
|
33
30
|
return;
|
|
34
31
|
}
|
|
35
|
-
|
|
36
32
|
var version = geoMetadata.version,
|
|
37
33
|
primary_column = geoMetadata.primary_column,
|
|
38
34
|
columns = geoMetadata.columns;
|
|
@@ -42,7 +38,6 @@ function unpackGeoMetadata(schema) {
|
|
|
42
38
|
if (primary_column) {
|
|
43
39
|
schema.metadata.set('geo.primary_column', primary_column);
|
|
44
40
|
}
|
|
45
|
-
|
|
46
41
|
schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));
|
|
47
42
|
var _loop = function _loop() {
|
|
48
43
|
var _Object$entries$_i = (0, _slicedToArray2.default)(_Object$entries[_i], 2),
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","get","geoMetadata","JSON","parse","setGeoMetadata","stringify","set","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","columnName","columnMetadata","field","fields","find","name","unpackGeoFieldMetadata","entries","key","value"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.get('geo');\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.set('geo', stringifiedGeoMetadata);\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata.set('geo.version', version);\n }\n\n if (primary_column) {\n schema.metadata.set('geo.primary_column', primary_column);\n }\n\n // store column names as comma separated list\n schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n field.metadata.set('geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n field.metadata.set(`geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n field.metadata.set(`geo.${key}`, typeof value === 'string' ? value : JSON.stringify(value));\n }\n }\n}\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"decode-geo-metadata.js","names":["getGeoMetadata","schema","stringifiedGeoMetadata","metadata","get","geoMetadata","JSON","parse","_unused","setGeoMetadata","stringify","set","unpackGeoMetadata","version","primary_column","columns","Object","keys","join","_loop","_Object$entries$_i","_slicedToArray2","default","_Object$entries","_i","columnName","columnMetadata","field","fields","find","name","unpackGeoFieldMetadata","entries","length","_i2","_Object$entries2","_Object$entries2$_i","key","value","concat"],"sources":["../../../../src/lib/geo/decode-geo-metadata.ts"],"sourcesContent":["// loaders.gl, MIT license\nimport {Schema, Field} from '@loaders.gl/schema';\n\n/* eslint-disable camelcase */\n\n/** A geoarrow / geoparquet geo metadata object (stored in stringified form in the top level metadata 'geo' key) */\nexport type GeoMetadata = {\n version?: string;\n primary_column?: string;\n columns: Record<string, GeoColumnMetadata>;\n [key: string]: unknown;\n};\n\n/** A geoarrow / geoparquet geo metadata for one geometry column */\nexport type GeoColumnMetadata = {\n bounding_box?:\n | [number, number, number, number]\n | [number, number, number, number, number, number];\n crs?: string;\n geometry_type?: string[];\n edges?: string;\n [key: string]: unknown;\n};\n\n/**\n * Reads the GeoMetadata object from the metadata\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata */\nexport function getGeoMetadata(schema: Schema): GeoMetadata | null {\n const stringifiedGeoMetadata = schema.metadata.get('geo');\n if (!stringifiedGeoMetadata) {\n return null;\n }\n\n try {\n const geoMetadata = JSON.parse(stringifiedGeoMetadata) as GeoMetadata;\n return geoMetadata;\n } catch {\n return null;\n }\n}\n\n/**\n * Stores a geoarrow / geoparquet geo metadata object in the schema\n * @note geoarrow / geoparquet geo metadata is a single stringified JSON field\n */\nexport function setGeoMetadata(schema: Schema, geoMetadata: GeoMetadata): void {\n const stringifiedGeoMetadata = JSON.stringify(geoMetadata);\n schema.metadata.set('geo', stringifiedGeoMetadata);\n}\n\n/**\n * Unpacks geo metadata into separate metadata fields (parses the long JSON string)\n * @note geoarrow / parquet schema is stringified into a single key-value pair in the parquet metadata\n */\nexport function unpackGeoMetadata(schema: Schema): void {\n const geoMetadata = getGeoMetadata(schema);\n if (!geoMetadata) {\n return;\n }\n\n // Store Parquet Schema Level Metadata\n\n const {version, primary_column, columns} = geoMetadata;\n if (version) {\n schema.metadata.set('geo.version', version);\n }\n\n if (primary_column) {\n schema.metadata.set('geo.primary_column', primary_column);\n }\n\n // store column names as comma separated list\n schema.metadata.set('geo.columns', Object.keys(columns || {}).join(''));\n\n for (const [columnName, columnMetadata] of Object.entries(columns || {})) {\n const field = schema.fields.find((field) => field.name === columnName);\n if (field) {\n if (field.name === primary_column) {\n field.metadata.set('geo.primary_field', 'true');\n }\n unpackGeoFieldMetadata(field, columnMetadata);\n }\n }\n}\n\nfunction unpackGeoFieldMetadata(field: Field, columnMetadata): void {\n for (const [key, value] of Object.entries(columnMetadata || {})) {\n switch (key) {\n case 'geometry_type':\n field.metadata.set(`geo.${key}`, (value as string[]).join(','));\n break;\n case 'bbox':\n case 'crs':\n case 'edges':\n default:\n field.metadata.set(`geo.${key}`, typeof value === 'string' ? value : JSON.stringify(value));\n }\n }\n}\n"],"mappings":";;;;;;;;;;AA2BO,SAASA,cAAcA,CAACC,MAAc,EAAsB;EACjE,IAAMC,sBAAsB,GAAGD,MAAM,CAACE,QAAQ,CAACC,GAAG,CAAC,KAAK,CAAC;EACzD,IAAI,CAACF,sBAAsB,EAAE;IAC3B,OAAO,IAAI;EACb;EAEA,IAAI;IACF,IAAMG,WAAW,GAAGC,IAAI,CAACC,KAAK,CAACL,sBAAsB,CAAgB;IACrE,OAAOG,WAAW;EACpB,CAAC,CAAC,OAAAG,OAAA,EAAM;IACN,OAAO,IAAI;EACb;AACF;AAMO,SAASC,cAAcA,CAACR,MAAc,EAAEI,WAAwB,EAAQ;EAC7E,IAAMH,sBAAsB,GAAGI,IAAI,CAACI,SAAS,CAACL,WAAW,CAAC;EAC1DJ,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,KAAK,EAAET,sBAAsB,CAAC;AACpD;AAMO,SAASU,iBAAiBA,CAACX,MAAc,EAAQ;EACtD,IAAMI,WAAW,GAAGL,cAAc,CAACC,MAAM,CAAC;EAC1C,IAAI,CAACI,WAAW,EAAE;IAChB;EACF;EAIA,IAAOQ,OAAO,GAA6BR,WAAW,CAA/CQ,OAAO;IAAEC,cAAc,GAAaT,WAAW,CAAtCS,cAAc;IAAEC,OAAO,GAAIV,WAAW,CAAtBU,OAAO;EACvC,IAAIF,OAAO,EAAE;IACXZ,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,aAAa,EAAEE,OAAO,CAAC;EAC7C;EAEA,IAAIC,cAAc,EAAE;IAClBb,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,oBAAoB,EAAEG,cAAc,CAAC;EAC3D;EAGAb,MAAM,CAACE,QAAQ,CAACQ,GAAG,CAAC,aAAa,EAAEK,MAAM,CAACC,IAAI,CAACF,OAAO,IAAI,CAAC,CAAC,CAAC,CAACG,IAAI,CAAC,EAAE,CAAC,CAAC;EAAC,IAAAC,KAAA,YAAAA,MAAA,EAEE;IAArE,IAAAC,kBAAA,OAAAC,eAAA,CAAAC,OAAA,EAAAC,eAAA,CAAAC,EAAA;MAAOC,UAAU,GAAAL,kBAAA;MAAEM,cAAc,GAAAN,kBAAA;IACpC,IAAMO,KAAK,GAAG1B,MAAM,CAAC2B,MAAM,CAACC,IAAI,CAAC,UAACF,KAAK;MAAA,OAAKA,KAAK,CAACG,IAAI,KAAKL,UAAU;IAAA,EAAC;IACtE,IAAIE,KAAK,EAAE;MACT,IAAIA,KAAK,CAACG,IAAI,KAAKhB,cAAc,EAAE;QACjCa,KAAK,CAACxB,QAAQ,CAACQ,GAAG,CAAC,mBAAmB,EAAE,MAAM,CAAC;MACjD;MACAoB,sBAAsB,CAACJ,KAAK,EAAED,cAAc,CAAC;IAC/C;EACF,CAAC;EARD,SAAAF,EAAA,MAAAD,eAAA,GAA2CP,MAAM,CAACgB,OAAO,CAACjB,OAAO,IAAI,CAAC,CAAC,CAAC,EAAAS,EAAA,GAAAD,eAAA,CAAAU,MAAA,EAAAT,EAAA;IAAAL,KAAA;EAAA;AAS1E;AAEA,SAASY,sBAAsBA,CAACJ,KAAY,EAAED,cAAc,EAAQ;EAClE,SAAAQ,GAAA,MAAAC,gBAAA,GAA2BnB,MAAM,CAACgB,OAAO,CAACN,cAAc,IAAI,CAAC,CAAC,CAAC,EAAAQ,GAAA,GAAAC,gBAAA,CAAAF,MAAA,EAAAC,GAAA,IAAE;IAA5D,IAAAE,mBAAA,OAAAf,eAAA,CAAAC,OAAA,EAAAa,gBAAA,CAAAD,GAAA;MAAOG,IAAG,GAAAD,mBAAA;MAAEE,KAAK,GAAAF,mBAAA;IACpB,QAAQC,IAAG;MACT,KAAK,eAAe;QAClBV,KAAK,CAACxB,QAAQ,CAACQ,GAAG,QAAA4B,MAAA,CAAQF,IAAG,GAAKC,KAAK,CAAcpB,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/D;MACF,KAAK,MAAM;MACX,KAAK,KAAK;MACV,KAAK,OAAO;MACZ;QACES,KAAK,CAACxB,QAAQ,CAACQ,GAAG,QAAA4B,MAAA,CAAQF,IAAG,GAAI,OAAOC,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGhC,IAAI,CAACI,SAAS,CAAC4B,KAAK,CAAC,CAAC;IAAC;EAElG;AACF"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"geoparquet-schema.js","names":["$schema","title","description","type","required","properties","version","const","primary_column","minLength","columns","minProperties","patternProperties","encoding","geometry_types","uniqueItems","items","pattern","crs","oneOf","$ref","edges","enum","orientation","bbox","minItems","maxItems","epoch","additionalProperties"],"sources":["../../../../src/lib/geo/geoparquet-schema.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n/* eslint-disable camelcase */\n\n/**\n * Geoparquet JSON schema for geo metadata\n * @see https://github.com/geoarrow/geoarrow/blob/main/metadata.md\n * @see https://github.com/opengeospatial/geoparquet/blob/main/format-specs/geoparquet.md\n */\nexport default {\n $schema: 'http://json-schema.org/draft-07/schema#',\n title: 'GeoParquet',\n description: 'Parquet metadata included in the geo field.',\n type: 'object',\n required: ['version', 'primary_column', 'columns'],\n properties: {\n version: {type: 'string', const: '1.0.0-beta.1'},\n primary_column: {type: 'string', minLength: 1},\n columns: {\n type: 'object',\n minProperties: 1,\n patternProperties: {\n '.+': {\n type: 'object',\n required: ['encoding', 'geometry_types'],\n properties: {\n encoding: {type: 'string', const: 'WKB'},\n geometry_types: {\n type: 'array',\n uniqueItems: true,\n items: {\n type: 'string',\n pattern: '^(GeometryCollection|(Multi)?(Point|LineString|Polygon))( Z)?$'\n }\n },\n crs: {\n oneOf: [\n {\n $ref: 'https://proj.org/schemas/v0.5/projjson.schema.json'\n },\n {type: 'null'}\n ]\n },\n edges: {type: 'string', enum: ['planar', 'spherical']},\n orientation: {type: 'string', const: 'counterclockwise'},\n bbox: {\n type: 'array',\n items: {type: 'number'},\n oneOf: [\n {\n description: '2D bbox consisting of (xmin, ymin, xmax, ymax)',\n minItems: 4,\n maxItems: 4\n },\n {\n description: '3D bbox consisting of (xmin, ymin, zmin, xmax, ymax, zmax)',\n minItems: 6,\n maxItems: 6\n }\n ]\n },\n epoch: {type: 'number'}\n }\n }\n },\n additionalProperties: false\n }\n }\n};\n"],"mappings":";;;;;;eASe;EACbA,OAAO,EAAE,yCAAyC;EAClDC,KAAK,EAAE,YAAY;EACnBC,WAAW,EAAE,6CAA6C;EAC1DC,IAAI,EAAE,QAAQ;EACdC,QAAQ,EAAE,CAAC,SAAS,EAAE,gBAAgB,EAAE,SAAS,CAAC;EAClDC,UAAU,EAAE;IACVC,OAAO,EAAE;MAACH,IAAI,EAAE,QAAQ;MAAEI,KAAK,EAAE;IAAc,CAAC;IAChDC,cAAc,EAAE;MAACL,IAAI,EAAE,QAAQ;MAAEM,SAAS,EAAE;IAAC,CAAC;IAC9CC,OAAO,EAAE;MACPP,IAAI,EAAE,QAAQ;MACdQ,aAAa,EAAE,CAAC;MAChBC,iBAAiB,EAAE;QACjB,IAAI,EAAE;UACJT,IAAI,EAAE,QAAQ;UACdC,QAAQ,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC;UACxCC,UAAU,EAAE;YACVQ,QAAQ,EAAE;cAACV,IAAI,EAAE,QAAQ;cAAEI,KAAK,EAAE;YAAK,CAAC;YACxCO,cAAc,EAAE;cACdX,IAAI,EAAE,OAAO;cACbY,WAAW,EAAE,IAAI;cACjBC,KAAK,EAAE;gBACLb,IAAI,EAAE,QAAQ;gBACdc,OAAO,EAAE;cACX;YACF,CAAC;YACDC,GAAG,EAAE;cACHC,KAAK,EAAE,CACL;gBACEC,IAAI,EAAE;cACR,CAAC,EACD;gBAACjB,IAAI,EAAE;cAAM,CAAC;YAElB,CAAC;YACDkB,KAAK,EAAE;cAAClB,IAAI,EAAE,QAAQ;cAAEmB,IAAI,EAAE,CAAC,QAAQ,EAAE,WAAW;YAAC,CAAC;YACtDC,WAAW,EAAE;cAACpB,IAAI,EAAE,QAAQ;cAAEI,KAAK,EAAE;YAAkB,CAAC;YACxDiB,IAAI,EAAE;cACJrB,IAAI,EAAE,OAAO;cACba,KAAK,EAAE;gBAACb,IAAI,EAAE;cAAQ,CAAC;cACvBgB,KAAK,EAAE,CACL;gBACEjB,WAAW,EAAE,gDAAgD;gBAC7DuB,QAAQ,EAAE,CAAC;gBACXC,QAAQ,EAAE;cACZ,CAAC,EACD;gBACExB,WAAW,EAAE,4DAA4D;gBACzEuB,QAAQ,EAAE,CAAC;gBACXC,QAAQ,EAAE;cACZ,CAAC;YAEL,CAAC;YACDC,KAAK,EAAE;cAACxB,IAAI,EAAE;YAAQ;UACxB;QACF;MACF,CAAC;MACDyB,oBAAoB,EAAE;IACxB;EACF;AACF,CAAC;
|
|
1
|
+
{"version":3,"file":"geoparquet-schema.js","names":["$schema","title","description","type","required","properties","version","const","primary_column","minLength","columns","minProperties","patternProperties","encoding","geometry_types","uniqueItems","items","pattern","crs","oneOf","$ref","edges","enum","orientation","bbox","minItems","maxItems","epoch","additionalProperties","exports","default","_default"],"sources":["../../../../src/lib/geo/geoparquet-schema.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n/* eslint-disable camelcase */\n\n/**\n * Geoparquet JSON schema for geo metadata\n * @see https://github.com/geoarrow/geoarrow/blob/main/metadata.md\n * @see https://github.com/opengeospatial/geoparquet/blob/main/format-specs/geoparquet.md\n */\nexport default {\n $schema: 'http://json-schema.org/draft-07/schema#',\n title: 'GeoParquet',\n description: 'Parquet metadata included in the geo field.',\n type: 'object',\n required: ['version', 'primary_column', 'columns'],\n properties: {\n version: {type: 'string', const: '1.0.0-beta.1'},\n primary_column: {type: 'string', minLength: 1},\n columns: {\n type: 'object',\n minProperties: 1,\n patternProperties: {\n '.+': {\n type: 'object',\n required: ['encoding', 'geometry_types'],\n properties: {\n encoding: {type: 'string', const: 'WKB'},\n geometry_types: {\n type: 'array',\n uniqueItems: true,\n items: {\n type: 'string',\n pattern: '^(GeometryCollection|(Multi)?(Point|LineString|Polygon))( Z)?$'\n }\n },\n crs: {\n oneOf: [\n {\n $ref: 'https://proj.org/schemas/v0.5/projjson.schema.json'\n },\n {type: 'null'}\n ]\n },\n edges: {type: 'string', enum: ['planar', 'spherical']},\n orientation: {type: 'string', const: 'counterclockwise'},\n bbox: {\n type: 'array',\n items: {type: 'number'},\n oneOf: [\n {\n description: '2D bbox consisting of (xmin, ymin, xmax, ymax)',\n minItems: 4,\n maxItems: 4\n },\n {\n description: '3D bbox consisting of (xmin, ymin, zmin, xmax, ymax, zmax)',\n minItems: 6,\n maxItems: 6\n }\n ]\n },\n epoch: {type: 'number'}\n }\n }\n },\n additionalProperties: false\n }\n }\n};\n"],"mappings":";;;;;;eASe;EACbA,OAAO,EAAE,yCAAyC;EAClDC,KAAK,EAAE,YAAY;EACnBC,WAAW,EAAE,6CAA6C;EAC1DC,IAAI,EAAE,QAAQ;EACdC,QAAQ,EAAE,CAAC,SAAS,EAAE,gBAAgB,EAAE,SAAS,CAAC;EAClDC,UAAU,EAAE;IACVC,OAAO,EAAE;MAACH,IAAI,EAAE,QAAQ;MAAEI,KAAK,EAAE;IAAc,CAAC;IAChDC,cAAc,EAAE;MAACL,IAAI,EAAE,QAAQ;MAAEM,SAAS,EAAE;IAAC,CAAC;IAC9CC,OAAO,EAAE;MACPP,IAAI,EAAE,QAAQ;MACdQ,aAAa,EAAE,CAAC;MAChBC,iBAAiB,EAAE;QACjB,IAAI,EAAE;UACJT,IAAI,EAAE,QAAQ;UACdC,QAAQ,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC;UACxCC,UAAU,EAAE;YACVQ,QAAQ,EAAE;cAACV,IAAI,EAAE,QAAQ;cAAEI,KAAK,EAAE;YAAK,CAAC;YACxCO,cAAc,EAAE;cACdX,IAAI,EAAE,OAAO;cACbY,WAAW,EAAE,IAAI;cACjBC,KAAK,EAAE;gBACLb,IAAI,EAAE,QAAQ;gBACdc,OAAO,EAAE;cACX;YACF,CAAC;YACDC,GAAG,EAAE;cACHC,KAAK,EAAE,CACL;gBACEC,IAAI,EAAE;cACR,CAAC,EACD;gBAACjB,IAAI,EAAE;cAAM,CAAC;YAElB,CAAC;YACDkB,KAAK,EAAE;cAAClB,IAAI,EAAE,QAAQ;cAAEmB,IAAI,EAAE,CAAC,QAAQ,EAAE,WAAW;YAAC,CAAC;YACtDC,WAAW,EAAE;cAACpB,IAAI,EAAE,QAAQ;cAAEI,KAAK,EAAE;YAAkB,CAAC;YACxDiB,IAAI,EAAE;cACJrB,IAAI,EAAE,OAAO;cACba,KAAK,EAAE;gBAACb,IAAI,EAAE;cAAQ,CAAC;cACvBgB,KAAK,EAAE,CACL;gBACEjB,WAAW,EAAE,gDAAgD;gBAC7DuB,QAAQ,EAAE,CAAC;gBACXC,QAAQ,EAAE;cACZ,CAAC,EACD;gBACExB,WAAW,EAAE,4DAA4D;gBACzEuB,QAAQ,EAAE,CAAC;gBACXC,QAAQ,EAAE;cACZ,CAAC;YAEL,CAAC;YACDC,KAAK,EAAE;cAACxB,IAAI,EAAE;YAAQ;UACxB;QACF;MACF,CAAC;MACDyB,oBAAoB,EAAE;IACxB;EACF;AACF,CAAC;AAAAC,OAAA,CAAAC,OAAA,GAAAC,QAAA"}
|
|
@@ -24,62 +24,60 @@ function _parseParquetInColumns() {
|
|
|
24
24
|
_parseParquetInColumns = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee2(arrayBuffer, options) {
|
|
25
25
|
var blob, _iteratorAbruptCompletion, _didIteratorError, _iteratorError, _iterator, _step, batch;
|
|
26
26
|
return _regenerator.default.wrap(function _callee2$(_context2) {
|
|
27
|
-
while (1) {
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
_context2.next = 13;
|
|
41
|
-
break;
|
|
42
|
-
}
|
|
43
|
-
batch = _step.value;
|
|
44
|
-
return _context2.abrupt("return", batch);
|
|
45
|
-
case 10:
|
|
46
|
-
_iteratorAbruptCompletion = false;
|
|
47
|
-
_context2.next = 5;
|
|
27
|
+
while (1) switch (_context2.prev = _context2.next) {
|
|
28
|
+
case 0:
|
|
29
|
+
blob = new Blob([arrayBuffer]);
|
|
30
|
+
_iteratorAbruptCompletion = false;
|
|
31
|
+
_didIteratorError = false;
|
|
32
|
+
_context2.prev = 3;
|
|
33
|
+
_iterator = _asyncIterator(parseParquetFileInColumnarBatches(blob, options));
|
|
34
|
+
case 5:
|
|
35
|
+
_context2.next = 7;
|
|
36
|
+
return _iterator.next();
|
|
37
|
+
case 7:
|
|
38
|
+
if (!(_iteratorAbruptCompletion = !(_step = _context2.sent).done)) {
|
|
39
|
+
_context2.next = 13;
|
|
48
40
|
break;
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
41
|
+
}
|
|
42
|
+
batch = _step.value;
|
|
43
|
+
return _context2.abrupt("return", batch);
|
|
44
|
+
case 10:
|
|
45
|
+
_iteratorAbruptCompletion = false;
|
|
46
|
+
_context2.next = 5;
|
|
47
|
+
break;
|
|
48
|
+
case 13:
|
|
49
|
+
_context2.next = 19;
|
|
50
|
+
break;
|
|
51
|
+
case 15:
|
|
52
|
+
_context2.prev = 15;
|
|
53
|
+
_context2.t0 = _context2["catch"](3);
|
|
54
|
+
_didIteratorError = true;
|
|
55
|
+
_iteratorError = _context2.t0;
|
|
56
|
+
case 19:
|
|
57
|
+
_context2.prev = 19;
|
|
58
|
+
_context2.prev = 20;
|
|
59
|
+
if (!(_iteratorAbruptCompletion && _iterator.return != null)) {
|
|
64
60
|
_context2.next = 24;
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
61
|
+
break;
|
|
62
|
+
}
|
|
63
|
+
_context2.next = 24;
|
|
64
|
+
return _iterator.return();
|
|
65
|
+
case 24:
|
|
66
|
+
_context2.prev = 24;
|
|
67
|
+
if (!_didIteratorError) {
|
|
68
|
+
_context2.next = 27;
|
|
69
|
+
break;
|
|
70
|
+
}
|
|
71
|
+
throw _iteratorError;
|
|
72
|
+
case 27:
|
|
73
|
+
return _context2.finish(24);
|
|
74
|
+
case 28:
|
|
75
|
+
return _context2.finish(19);
|
|
76
|
+
case 29:
|
|
77
|
+
return _context2.abrupt("return", null);
|
|
78
|
+
case 30:
|
|
79
|
+
case "end":
|
|
80
|
+
return _context2.stop();
|
|
83
81
|
}
|
|
84
82
|
}, _callee2, null, [[3, 15, 19, 29], [20,, 24, 28]]);
|
|
85
83
|
}));
|
|
@@ -92,73 +90,71 @@ function _parseParquetFileInColumnarBatches() {
|
|
|
92
90
|
_parseParquetFileInColumnarBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(blob, options) {
|
|
93
91
|
var file, reader, parquetSchema, parquetMetadata, schema, rowGroups, _iteratorAbruptCompletion2, _didIteratorError2, _iteratorError2, _iterator2, _step2, rowGroup;
|
|
94
92
|
return _regenerator.default.wrap(function _callee$(_context) {
|
|
95
|
-
while (1) {
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
_context.next = 24;
|
|
121
|
-
break;
|
|
122
|
-
}
|
|
123
|
-
rowGroup = _step2.value;
|
|
124
|
-
_context.next = 21;
|
|
125
|
-
return convertRowGroupToTableBatch(schema, rowGroup);
|
|
126
|
-
case 21:
|
|
127
|
-
_iteratorAbruptCompletion2 = false;
|
|
128
|
-
_context.next = 15;
|
|
93
|
+
while (1) switch (_context.prev = _context.next) {
|
|
94
|
+
case 0:
|
|
95
|
+
file = (0, _loaderUtils.makeReadableFile)(blob);
|
|
96
|
+
reader = new _parquetReader.ParquetReader(file);
|
|
97
|
+
_context.next = 4;
|
|
98
|
+
return (0, _awaitAsyncGenerator2.default)(reader.getSchema());
|
|
99
|
+
case 4:
|
|
100
|
+
parquetSchema = _context.sent;
|
|
101
|
+
_context.next = 7;
|
|
102
|
+
return (0, _awaitAsyncGenerator2.default)(reader.getFileMetadata());
|
|
103
|
+
case 7:
|
|
104
|
+
parquetMetadata = _context.sent;
|
|
105
|
+
schema = (0, _convertSchemaFromParquet.convertSchemaFromParquet)(parquetSchema, parquetMetadata);
|
|
106
|
+
(0, _decodeGeoMetadata.unpackGeoMetadata)(schema);
|
|
107
|
+
rowGroups = reader.rowGroupIterator(options === null || options === void 0 ? void 0 : options.parquet);
|
|
108
|
+
_iteratorAbruptCompletion2 = false;
|
|
109
|
+
_didIteratorError2 = false;
|
|
110
|
+
_context.prev = 13;
|
|
111
|
+
_iterator2 = _asyncIterator(rowGroups);
|
|
112
|
+
case 15:
|
|
113
|
+
_context.next = 17;
|
|
114
|
+
return (0, _awaitAsyncGenerator2.default)(_iterator2.next());
|
|
115
|
+
case 17:
|
|
116
|
+
if (!(_iteratorAbruptCompletion2 = !(_step2 = _context.sent).done)) {
|
|
117
|
+
_context.next = 24;
|
|
129
118
|
break;
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
119
|
+
}
|
|
120
|
+
rowGroup = _step2.value;
|
|
121
|
+
_context.next = 21;
|
|
122
|
+
return convertRowGroupToTableBatch(schema, rowGroup);
|
|
123
|
+
case 21:
|
|
124
|
+
_iteratorAbruptCompletion2 = false;
|
|
125
|
+
_context.next = 15;
|
|
126
|
+
break;
|
|
127
|
+
case 24:
|
|
128
|
+
_context.next = 30;
|
|
129
|
+
break;
|
|
130
|
+
case 26:
|
|
131
|
+
_context.prev = 26;
|
|
132
|
+
_context.t0 = _context["catch"](13);
|
|
133
|
+
_didIteratorError2 = true;
|
|
134
|
+
_iteratorError2 = _context.t0;
|
|
135
|
+
case 30:
|
|
136
|
+
_context.prev = 30;
|
|
137
|
+
_context.prev = 31;
|
|
138
|
+
if (!(_iteratorAbruptCompletion2 && _iterator2.return != null)) {
|
|
145
139
|
_context.next = 35;
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
140
|
+
break;
|
|
141
|
+
}
|
|
142
|
+
_context.next = 35;
|
|
143
|
+
return (0, _awaitAsyncGenerator2.default)(_iterator2.return());
|
|
144
|
+
case 35:
|
|
145
|
+
_context.prev = 35;
|
|
146
|
+
if (!_didIteratorError2) {
|
|
147
|
+
_context.next = 38;
|
|
148
|
+
break;
|
|
149
|
+
}
|
|
150
|
+
throw _iteratorError2;
|
|
151
|
+
case 38:
|
|
152
|
+
return _context.finish(35);
|
|
153
|
+
case 39:
|
|
154
|
+
return _context.finish(30);
|
|
155
|
+
case 40:
|
|
156
|
+
case "end":
|
|
157
|
+
return _context.stop();
|
|
162
158
|
}
|
|
163
159
|
}, _callee, null, [[13, 26, 30, 40], [31,, 35, 39]]);
|
|
164
160
|
}));
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet-to-columns.js","names":["parseParquetInColumns","arrayBuffer","options","blob","Blob","parseParquetFileInColumnarBatches","
|
|
1
|
+
{"version":3,"file":"parse-parquet-to-columns.js","names":["_loaderUtils","require","_parquetReader","_convertSchemaFromParquet","_convertRowGroupToColumns","_decodeGeoMetadata","_asyncIterator","iterable","method","async","sync","retry","Symbol","asyncIterator","iterator","call","AsyncFromSyncIterator","TypeError","s","AsyncFromSyncIteratorContinuation","r","Object","Promise","reject","done","resolve","value","then","n","next","prototype","apply","arguments","return","_return","ret","throw","_throw","thr","parseParquetInColumns","_x3","_x4","_parseParquetInColumns","_asyncToGenerator2","default","_regenerator","mark","_callee2","arrayBuffer","options","blob","_iteratorAbruptCompletion","_didIteratorError","_iteratorError","_iterator","_step","batch","wrap","_callee2$","_context2","prev","Blob","parseParquetFileInColumnarBatches","sent","abrupt","t0","finish","stop","_x","_x2","_parseParquetFileInColumnarBatches","_wrapAsyncGenerator2","_callee","file","reader","parquetSchema","parquetMetadata","schema","rowGroups","_iteratorAbruptCompletion2","_didIteratorError2","_iteratorError2","_iterator2","_step2","rowGroup","_callee$","_context","makeReadableFile","ParquetReader","_awaitAsyncGenerator2","getSchema","getFileMetadata","convertSchemaFromParquet","unpackGeoMetadata","rowGroupIterator","parquet","convertRowGroupToTableBatch","data","convertParquetRowGroupToColumns","shape","batchType","length","rowCount"],"sources":["../../../../src/lib/parsers/parse-parquet-to-columns.ts"],"sourcesContent":["// loaders.gl, MIT license\n\n// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\nimport {ColumnarTableBatch, Schema} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../../parquet-loader';\nimport {ParquetReader} from '../../parquetjs/parser/parquet-reader';\nimport {ParquetBuffer} from '../../parquetjs/schema/declare';\nimport {convertSchemaFromParquet} from '../arrow/convert-schema-from-parquet';\nimport {convertParquetRowGroupToColumns} from '../arrow/convert-row-group-to-columns';\nimport {unpackGeoMetadata} from '../geo/decode-geo-metadata';\n\nexport async function parseParquetInColumns(\n arrayBuffer: ArrayBuffer,\n options?: ParquetLoaderOptions\n) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInColumnarBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInColumnarBatches(\n blob: Blob,\n options?: ParquetLoaderOptions\n): AsyncIterable<ColumnarTableBatch> {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const parquetSchema = await reader.getSchema();\n const parquetMetadata = await reader.getFileMetadata();\n const schema = convertSchemaFromParquet(parquetSchema, parquetMetadata);\n unpackGeoMetadata(schema);\n const rowGroups = reader.rowGroupIterator(options?.parquet);\n for await (const rowGroup of rowGroups) {\n yield convertRowGroupToTableBatch(schema, rowGroup);\n }\n}\n\nfunction convertRowGroupToTableBatch(schema: Schema, rowGroup: ParquetBuffer): ColumnarTableBatch {\n const data = convertParquetRowGroupToColumns(schema, rowGroup);\n return {\n shape: 'columnar-table',\n batchType: 'data',\n schema,\n data,\n length: rowGroup.rowCount\n };\n}\n"],"mappings":";;;;;;;;;;;;AAIA,IAAAA,YAAA,GAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAEA,IAAAE,yBAAA,GAAAF,OAAA;AACA,IAAAG,yBAAA,GAAAH,OAAA;AACA,IAAAI,kBAAA,GAAAJ,OAAA;AAA6D,SAAAK,eAAAC,QAAA,QAAAC,MAAA,EAAAC,KAAA,EAAAC,IAAA,EAAAC,KAAA,iCAAAC,MAAA,KAAAH,KAAA,GAAAG,MAAA,CAAAC,aAAA,EAAAH,IAAA,GAAAE,MAAA,CAAAE,QAAA,GAAAH,KAAA,WAAAF,KAAA,aAAAD,MAAA,GAAAD,QAAA,CAAAE,KAAA,WAAAD,MAAA,CAAAO,IAAA,CAAAR,QAAA,OAAAG,IAAA,aAAAF,MAAA,GAAAD,QAAA,CAAAG,IAAA,eAAAM,qBAAA,CAAAR,MAAA,CAAAO,IAAA,CAAAR,QAAA,IAAAE,KAAA,sBAAAC,IAAA,6BAAAO,SAAA;AAAA,SAAAD,sBAAAE,CAAA,aAAAC,kCAAAC,CAAA,QAAAC,MAAA,CAAAD,CAAA,MAAAA,CAAA,SAAAE,OAAA,CAAAC,MAAA,KAAAN,SAAA,CAAAG,CAAA,+BAAAI,IAAA,GAAAJ,CAAA,CAAAI,IAAA,SAAAF,OAAA,CAAAG,OAAA,CAAAL,CAAA,CAAAM,KAAA,EAAAC,IAAA,WAAAD,KAAA,aAAAA,KAAA,EAAAA,KAAA,EAAAF,IAAA,EAAAA,IAAA,iBAAAR,qBAAA,YAAAA,sBAAAE,CAAA,SAAAA,CAAA,GAAAA,CAAA,OAAAU,CAAA,GAAAV,CAAA,CAAAW,IAAA,KAAAb,qBAAA,CAAAc,SAAA,KAAAZ,CAAA,QAAAU,CAAA,QAAAC,IAAA,WAAAA,KAAA,WAAAV,iCAAA,MAAAS,CAAA,CAAAG,KAAA,MAAAb,CAAA,EAAAc,SAAA,OAAAC,MAAA,WAAAC,QAAAR,KAAA,QAAAS,GAAA,QAAAjB,CAAA,CAAAe,MAAA,oBAAAE,GAAA,GAAAb,OAAA,CAAAG,OAAA,GAAAC,KAAA,EAAAA,KAAA,EAAAF,IAAA,UAAAL,iCAAA,CAAAgB,GAAA,CAAAJ,KAAA,MAAAb,CAAA,EAAAc,SAAA,OAAAI,KAAA,WAAAC,OAAAX,KAAA,QAAAY,GAAA,QAAApB,CAAA,CAAAe,MAAA,oBAAAK,GAAA,GAAAhB,OAAA,CAAAC,MAAA,CAAAG,KAAA,IAAAP,iCAAA,CAAAmB,GAAA,CAAAP,KAAA,MAAAb,CAAA,EAAAc,SAAA,aAAAhB,qBAAA,CAAAE,CAAA;AAAA,SAEvCqB,qBAAqBA,CAAAC,GAAA,EAAAC,GAAA;EAAA,OAAAC,sBAAA,CAAAX,KAAA,OAAAC,SAAA;AAAA;AAAA,SAAAU,uBAAA;EAAAA,sBAAA,OAAAC,kBAAA,CAAAC,OAAA,EAAAC,YAAA,CAAAD,OAAA,CAAAE,IAAA,CAApC,SAAAC,SACLC,WAAwB,EACxBC,OAA8B;IAAA,IAAAC,IAAA,EAAAC,yBAAA,EAAAC,iBAAA,EAAAC,cAAA,EAAAC,SAAA,EAAAC,KAAA,EAAAC,KAAA;IAAA,OAAAX,YAAA,CAAAD,OAAA,CAAAa,IAAA,UAAAC,UAAAC,SAAA;MAAA,kBAAAA,SAAA,CAAAC,IAAA,GAAAD,SAAA,CAAA9B,IAAA;QAAA;UAExBqB,IAAI,GAAG,IAAIW,IAAI,CAAC,CAACb,WAAW,CAAC,CAAC;UAAAG,yBAAA;UAAAC,iBAAA;UAAAO,SAAA,CAAAC,IAAA;UAAAN,SAAA,GAAAhD,cAAA,CACVwD,iCAAiC,CAACZ,IAAI,EAAED,OAAO,CAAC;QAAA;UAAAU,SAAA,CAAA9B,IAAA;UAAA,OAAAyB,SAAA,CAAAzB,IAAA;QAAA;UAAA,MAAAsB,yBAAA,KAAAI,KAAA,GAAAI,SAAA,CAAAI,IAAA,EAAAvC,IAAA;YAAAmC,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAzD2B,KAAK,GAAAD,KAAA,CAAA7B,KAAA;UAAA,OAAAiC,SAAA,CAAAK,MAAA,WACbR,KAAK;QAAA;UAAAL,yBAAA;UAAAQ,SAAA,CAAA9B,IAAA;UAAA;QAAA;UAAA8B,SAAA,CAAA9B,IAAA;UAAA;QAAA;UAAA8B,SAAA,CAAAC,IAAA;UAAAD,SAAA,CAAAM,EAAA,GAAAN,SAAA;UAAAP,iBAAA;UAAAC,cAAA,GAAAM,SAAA,CAAAM,EAAA;QAAA;UAAAN,SAAA,CAAAC,IAAA;UAAAD,SAAA,CAAAC,IAAA;UAAA,MAAAT,yBAAA,IAAAG,SAAA,CAAArB,MAAA;YAAA0B,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAA8B,SAAA,CAAA9B,IAAA;UAAA,OAAAyB,SAAA,CAAArB,MAAA;QAAA;UAAA0B,SAAA,CAAAC,IAAA;UAAA,KAAAR,iBAAA;YAAAO,SAAA,CAAA9B,IAAA;YAAA;UAAA;UAAA,MAAAwB,cAAA;QAAA;UAAA,OAAAM,SAAA,CAAAO,MAAA;QAAA;UAAA,OAAAP,SAAA,CAAAO,MAAA;QAAA;UAAA,OAAAP,SAAA,CAAAK,MAAA,WAEP,IAAI;QAAA;QAAA;UAAA,OAAAL,SAAA,CAAAQ,IAAA;MAAA;IAAA,GAAApB,QAAA;EAAA,CACZ;EAAA,OAAAL,sBAAA,CAAAX,KAAA,OAAAC,SAAA;AAAA;AAAA,SAEsB8B,iCAAiCA,CAAAM,EAAA,EAAAC,GAAA;EAAA,OAAAC,kCAAA,CAAAvC,KAAA,OAAAC,SAAA;AAAA;AAAA,SAAAsC,mCAAA;EAAAA,kCAAA,OAAAC,oBAAA,CAAA3B,OAAA,EAAAC,YAAA,CAAAD,OAAA,CAAAE,IAAA,CAAjD,SAAA0B,QACLtB,IAAU,EACVD,OAA8B;IAAA,IAAAwB,IAAA,EAAAC,MAAA,EAAAC,aAAA,EAAAC,eAAA,EAAAC,MAAA,EAAAC,SAAA,EAAAC,0BAAA,EAAAC,kBAAA,EAAAC,eAAA,EAAAC,UAAA,EAAAC,MAAA,EAAAC,QAAA;IAAA,OAAAvC,YAAA,CAAAD,OAAA,CAAAa,IAAA,UAAA4B,SAAAC,QAAA;MAAA,kBAAAA,QAAA,CAAA1B,IAAA,GAAA0B,QAAA,CAAAzD,IAAA;QAAA;UAExB4C,IAAI,GAAG,IAAAc,6BAAgB,EAACrC,IAAI,CAAC;UAC7BwB,MAAM,GAAG,IAAIc,4BAAa,CAACf,IAAI,CAAC;UAAAa,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EACV8B,MAAM,CAACgB,SAAS,EAAE;QAAA;UAAxCf,aAAa,GAAAW,QAAA,CAAAvB,IAAA;UAAAuB,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EACW8B,MAAM,CAACiB,eAAe,EAAE;QAAA;UAAhDf,eAAe,GAAAU,QAAA,CAAAvB,IAAA;UACfc,MAAM,GAAG,IAAAe,kDAAwB,EAACjB,aAAa,EAAEC,eAAe,CAAC;UACvE,IAAAiB,oCAAiB,EAAChB,MAAM,CAAC;UACnBC,SAAS,GAAGJ,MAAM,CAACoB,gBAAgB,CAAC7C,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAE8C,OAAO,CAAC;UAAAhB,0BAAA;UAAAC,kBAAA;UAAAM,QAAA,CAAA1B,IAAA;UAAAsB,UAAA,GAAA5E,cAAA,CAC9BwE,SAAS;QAAA;UAAAQ,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EAAAsC,UAAA,CAAArD,IAAA;QAAA;UAAA,MAAAkD,0BAAA,KAAAI,MAAA,GAAAG,QAAA,CAAAvB,IAAA,EAAAvC,IAAA;YAAA8D,QAAA,CAAAzD,IAAA;YAAA;UAAA;UAArBuD,QAAQ,GAAAD,MAAA,CAAAzD,KAAA;UAAA4D,QAAA,CAAAzD,IAAA;UACvB,OAAMmE,2BAA2B,CAACnB,MAAM,EAAEO,QAAQ,CAAC;QAAA;UAAAL,0BAAA;UAAAO,QAAA,CAAAzD,IAAA;UAAA;QAAA;UAAAyD,QAAA,CAAAzD,IAAA;UAAA;QAAA;UAAAyD,QAAA,CAAA1B,IAAA;UAAA0B,QAAA,CAAArB,EAAA,GAAAqB,QAAA;UAAAN,kBAAA;UAAAC,eAAA,GAAAK,QAAA,CAAArB,EAAA;QAAA;UAAAqB,QAAA,CAAA1B,IAAA;UAAA0B,QAAA,CAAA1B,IAAA;UAAA,MAAAmB,0BAAA,IAAAG,UAAA,CAAAjD,MAAA;YAAAqD,QAAA,CAAAzD,IAAA;YAAA;UAAA;UAAAyD,QAAA,CAAAzD,IAAA;UAAA,WAAA4D,qBAAA,CAAA7C,OAAA,EAAAsC,UAAA,CAAAjD,MAAA;QAAA;UAAAqD,QAAA,CAAA1B,IAAA;UAAA,KAAAoB,kBAAA;YAAAM,QAAA,CAAAzD,IAAA;YAAA;UAAA;UAAA,MAAAoD,eAAA;QAAA;UAAA,OAAAK,QAAA,CAAApB,MAAA;QAAA;UAAA,OAAAoB,QAAA,CAAApB,MAAA;QAAA;QAAA;UAAA,OAAAoB,QAAA,CAAAnB,IAAA;MAAA;IAAA,GAAAK,OAAA;EAAA,CAEtD;EAAA,OAAAF,kCAAA,CAAAvC,KAAA,OAAAC,SAAA;AAAA;AAED,SAASgE,2BAA2BA,CAACnB,MAAc,EAAEO,QAAuB,EAAsB;EAChG,IAAMa,IAAI,GAAG,IAAAC,yDAA+B,EAACrB,MAAM,EAAEO,QAAQ,CAAC;EAC9D,OAAO;IACLe,KAAK,EAAE,gBAAgB;IACvBC,SAAS,EAAE,MAAM;IACjBvB,MAAM,EAANA,MAAM;IACNoB,IAAI,EAAJA,IAAI;IACJI,MAAM,EAAEjB,QAAQ,CAACkB;EACnB,CAAC;AACH"}
|