@loaders.gl/parquet 3.4.0-alpha.1 → 3.4.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +22 -29
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/constants.js +0 -2
- package/dist/es5/constants.js.map +1 -1
- package/dist/es5/index.js +47 -9
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/arrow/convert-columns-to-row-group.js +2 -0
- package/dist/es5/lib/arrow/convert-columns-to-row-group.js.map +1 -0
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js +19 -0
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -0
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js +98 -0
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -0
- package/dist/es5/lib/{convert-schema.js → arrow/convert-schema-to-parquet.js} +4 -32
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -0
- package/dist/es5/lib/geo/decode-geo-metadata.js +77 -0
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -0
- package/dist/es5/lib/geo/geoparquet-schema.js +83 -0
- package/dist/es5/lib/geo/geoparquet-schema.js.map +1 -0
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +173 -0
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -0
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js +150 -0
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -0
- package/dist/es5/lib/wasm/encode-parquet-wasm.js +14 -16
- package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/index.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +16 -18
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +6 -8
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/es5/lib/wasm/parse-parquet-wasm.js +16 -18
- package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/es5/lib/wip/convert-schema-deep.java.disabled +910 -0
- package/dist/es5/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/dist/es5/parquet-loader.js +4 -3
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -2
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/es5/parquetjs/codecs/index.js +0 -1
- package/dist/es5/parquetjs/codecs/index.js.map +1 -1
- package/dist/es5/parquetjs/codecs/plain.js +0 -3
- package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
- package/dist/es5/parquetjs/codecs/rle.js +0 -4
- package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +58 -58
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/parquet-encoder.js +625 -0
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/PageType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Type.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/Type.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js +244 -261
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +555 -256
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/schema.js +2 -12
- package/dist/es5/parquetjs/schema/schema.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +40 -46
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js +6 -11
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +2 -4
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/es5/parquetjs/utils/read-utils.js +0 -7
- package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/es5/workers/parquet-worker.js.map +1 -1
- package/dist/esm/bundle.js +0 -1
- package/dist/esm/bundle.js.map +1 -1
- package/dist/esm/constants.js +0 -3
- package/dist/esm/constants.js.map +1 -1
- package/dist/esm/index.js +11 -9
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/arrow/convert-columns-to-row-group.js +2 -0
- package/dist/esm/lib/arrow/convert-columns-to-row-group.js.map +1 -0
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js +8 -0
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -0
- package/dist/esm/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +30 -16
- package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -0
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js +37 -0
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -0
- package/dist/esm/lib/geo/decode-geo-metadata.js +58 -0
- package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -0
- package/dist/esm/lib/geo/geoparquet-schema.js +76 -0
- package/dist/esm/lib/geo/geoparquet-schema.js.map +1 -0
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js +35 -0
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -0
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js +18 -0
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -0
- package/dist/esm/lib/wasm/encode-parquet-wasm.js +0 -1
- package/dist/esm/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js +0 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/esm/lib/wasm/parse-parquet-wasm.js +0 -3
- package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/esm/lib/wip/convert-schema-deep.java.disabled +910 -0
- package/dist/esm/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/dist/esm/parquet-loader.js +4 -4
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -3
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -2
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -2
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/esm/parquetjs/codecs/index.js +0 -2
- package/dist/esm/parquetjs/codecs/index.js.map +1 -1
- package/dist/esm/parquetjs/codecs/plain.js +0 -4
- package/dist/esm/parquetjs/codecs/plain.js.map +1 -1
- package/dist/esm/parquetjs/codecs/rle.js +0 -6
- package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +10 -10
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +6 -74
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DateType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ListType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MapType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/NullType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/PageType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/StringType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Type.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/Type.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/esm/parquetjs/parser/decoders.js +1 -18
- package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
- package/dist/esm/parquetjs/parser/parquet-reader.js +153 -80
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +0 -1
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/schema.js +0 -10
- package/dist/esm/parquetjs/schema/schema.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +42 -48
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js +6 -10
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +1 -2
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/parquetjs/utils/read-utils.js +0 -8
- package/dist/esm/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/index.d.ts +24 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +26 -9
- package/dist/lib/arrow/convert-columns-to-row-group.d.ts +1 -0
- package/dist/lib/arrow/convert-columns-to-row-group.d.ts.map +1 -0
- package/dist/lib/arrow/convert-columns-to-row-group.js +1 -0
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts +4 -0
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -0
- package/dist/lib/arrow/convert-row-group-to-columns.js +12 -0
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts +9 -0
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -0
- package/dist/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +30 -18
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts +7 -0
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -0
- package/dist/lib/arrow/convert-schema-to-parquet.js +72 -0
- package/dist/lib/geo/decode-geo-metadata.d.ts +31 -0
- package/dist/lib/geo/decode-geo-metadata.d.ts.map +1 -0
- package/dist/lib/geo/decode-geo-metadata.js +73 -0
- package/dist/lib/geo/geoparquet-schema.d.ts +80 -0
- package/dist/lib/geo/geoparquet-schema.d.ts.map +1 -0
- package/dist/lib/geo/geoparquet-schema.js +69 -0
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts +5 -0
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -0
- package/dist/lib/parsers/parse-parquet-to-columns.js +40 -0
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts +4 -0
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -0
- package/dist/lib/parsers/parse-parquet-to-rows.js +40 -0
- package/dist/parquet-loader.d.ts +2 -0
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +3 -1
- package/dist/parquet-worker.js +25 -32
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquetjs/compression.d.ts.map +1 -1
- package/dist/parquetjs/compression.js +16 -5
- package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
- package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +168 -102
- package/dist/parquetjs/schema/declare.d.ts +14 -7
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +2 -0
- package/dist/parquetjs/schema/shred.d.ts +115 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +161 -43
- package/dist/parquetjs/schema/types.d.ts +2 -2
- package/dist/parquetjs/schema/types.d.ts.map +1 -1
- package/dist/parquetjs/schema/types.js +4 -6
- package/dist/parquetjs/utils/file-utils.d.ts +3 -4
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.js +2 -5
- package/package.json +8 -7
- package/src/index.ts +24 -4
- package/src/lib/arrow/convert-columns-to-row-group.ts +0 -0
- package/src/lib/arrow/convert-row-group-to-columns.ts +15 -0
- package/src/lib/{convert-schema.ts → arrow/convert-schema-from-parquet.ts} +41 -22
- package/src/lib/arrow/convert-schema-to-parquet.ts +102 -0
- package/src/lib/geo/decode-geo-metadata.ts +99 -0
- package/src/lib/geo/geoparquet-schema.ts +69 -0
- package/src/lib/parsers/parse-parquet-to-columns.ts +49 -0
- package/src/lib/parsers/parse-parquet-to-rows.ts +40 -0
- package/src/lib/wip/convert-schema-deep.java.disabled +910 -0
- package/src/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/src/parquet-loader.ts +5 -1
- package/src/parquetjs/compression.ts +14 -1
- package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
- package/src/parquetjs/parser/parquet-reader.ts +239 -122
- package/src/parquetjs/schema/declare.ts +17 -9
- package/src/parquetjs/schema/shred.ts +157 -28
- package/src/parquetjs/schema/types.ts +25 -30
- package/src/parquetjs/utils/file-utils.ts +3 -4
- package/dist/es5/lib/convert-schema.js.map +0 -1
- package/dist/es5/lib/parse-parquet.js +0 -130
- package/dist/es5/lib/parse-parquet.js.map +0 -1
- package/dist/es5/lib/read-array-buffer.js +0 -43
- package/dist/es5/lib/read-array-buffer.js.map +0 -1
- package/dist/es5/parquetjs/encoder/writer.js +0 -757
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -94
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/lib/convert-schema.js.map +0 -1
- package/dist/esm/lib/parse-parquet.js +0 -25
- package/dist/esm/lib/parse-parquet.js.map +0 -1
- package/dist/esm/lib/read-array-buffer.js +0 -10
- package/dist/esm/lib/read-array-buffer.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -81
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/lib/convert-schema.d.ts +0 -8
- package/dist/lib/convert-schema.d.ts.map +0 -1
- package/dist/lib/parse-parquet.d.ts +0 -4
- package/dist/lib/parse-parquet.d.ts.map +0 -1
- package/dist/lib/parse-parquet.js +0 -28
- package/dist/lib/read-array-buffer.d.ts +0 -19
- package/dist/lib/read-array-buffer.d.ts.map +0 -1
- package/dist/lib/read-array-buffer.js +0 -29
- package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
- package/dist/parquetjs/file.d.ts +0 -10
- package/dist/parquetjs/file.d.ts.map +0 -1
- package/dist/parquetjs/file.js +0 -99
- package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-cursor.js +0 -74
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
- package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/buffer-utils.js +0 -22
- package/src/lib/parse-parquet.ts +0 -27
- package/src/lib/read-array-buffer.ts +0 -31
- package/src/parquetjs/file.ts +0 -90
- package/src/parquetjs/parser/parquet-cursor.ts +0 -94
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
- package/src/parquetjs/utils/buffer-utils.ts +0 -18
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"compression.d.ts","sourceRoot":"","sources":["../../src/parquetjs/compression.ts"],"names":[],"mappings":";AAIA,OAAO,EACL,WAAW,EAQZ,MAAM,yBAAyB,CAAC;AAEjC,OAAO,EAAC,kBAAkB,EAAC,MAAM,kBAAkB,CAAC;
|
|
1
|
+
{"version":3,"file":"compression.d.ts","sourceRoot":"","sources":["../../src/parquetjs/compression.ts"],"names":[],"mappings":";AAIA,OAAO,EACL,WAAW,EAQZ,MAAM,yBAAyB,CAAC;AAEjC,OAAO,EAAC,kBAAkB,EAAC,MAAM,kBAAkB,CAAC;AAuCpD,eAAO,MAAM,2BAA2B,EAAE,MAAM,CAAC,kBAAkB,EAAE,WAAW,CAU/E,CAAC;AAEF;;;GAGG;AACH,wBAAsB,mBAAmB,CAAC,OAAO,CAAC,EAAE;IAAC,OAAO,EAAE;QAAC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAC,CAAA;CAAC,mBAGlF;AAED;;GAEG;AACH,wBAAsB,OAAO,CAAC,MAAM,EAAE,kBAAkB,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAQxF;AAED;;GAEG;AACH,wBAAsB,UAAU,CAC9B,MAAM,EAAE,kBAAkB,EAC1B,KAAK,EAAE,MAAM,EACb,IAAI,EAAE,MAAM,GACX,OAAO,CAAC,MAAM,CAAC,CAQjB;AAKD,wBAAgB,OAAO,CAAC,MAAM,EAAE,kBAAkB,EAAE,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,MAAM,CAMvF"}
|
|
@@ -8,7 +8,18 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
8
8
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
9
|
exports.inflate = exports.decompress = exports.deflate = exports.preloadCompressions = exports.PARQUET_COMPRESSION_METHODS = void 0;
|
|
10
10
|
const compression_1 = require("@loaders.gl/compression");
|
|
11
|
-
|
|
11
|
+
/** We can't use loaders-util buffer handling since we are dependent on buffers even in the browser */
|
|
12
|
+
function toBuffer(arrayBuffer) {
|
|
13
|
+
return Buffer.from(arrayBuffer);
|
|
14
|
+
}
|
|
15
|
+
function toArrayBuffer(buffer) {
|
|
16
|
+
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
17
|
+
if (Buffer.isBuffer(buffer)) {
|
|
18
|
+
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
19
|
+
return typedArray.slice().buffer;
|
|
20
|
+
}
|
|
21
|
+
return buffer;
|
|
22
|
+
}
|
|
12
23
|
// TODO switch to worker compression to avoid bundling...
|
|
13
24
|
// import brotli from 'brotli'; - brotli has problems with decompress in browsers
|
|
14
25
|
// import brotliDecompress from 'brotli/decompress';
|
|
@@ -57,9 +68,9 @@ async function deflate(method, value) {
|
|
|
57
68
|
if (!compression) {
|
|
58
69
|
throw new Error(`parquet: invalid compression method: ${method}`);
|
|
59
70
|
}
|
|
60
|
-
const inputArrayBuffer =
|
|
71
|
+
const inputArrayBuffer = toArrayBuffer(value);
|
|
61
72
|
const compressedArrayBuffer = await compression.compress(inputArrayBuffer);
|
|
62
|
-
return
|
|
73
|
+
return toBuffer(compressedArrayBuffer);
|
|
63
74
|
}
|
|
64
75
|
exports.deflate = deflate;
|
|
65
76
|
/**
|
|
@@ -70,9 +81,9 @@ async function decompress(method, value, size) {
|
|
|
70
81
|
if (!compression) {
|
|
71
82
|
throw new Error(`parquet: invalid compression method: ${method}`);
|
|
72
83
|
}
|
|
73
|
-
const inputArrayBuffer =
|
|
84
|
+
const inputArrayBuffer = toArrayBuffer(value);
|
|
74
85
|
const compressedArrayBuffer = await compression.decompress(inputArrayBuffer, size);
|
|
75
|
-
return
|
|
86
|
+
return toBuffer(compressedArrayBuffer);
|
|
76
87
|
}
|
|
77
88
|
exports.decompress = decompress;
|
|
78
89
|
/*
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
2
|
/// <reference types="node" />
|
|
3
|
-
import {
|
|
3
|
+
import { stream } from '@loaders.gl/loader-utils';
|
|
4
4
|
import { ParquetBuffer } from '../schema/declare';
|
|
5
5
|
import { ParquetSchema } from '../schema/schema';
|
|
6
6
|
import { RowGroup } from '../parquet-thrift';
|
|
7
|
-
export interface
|
|
7
|
+
export interface ParquetEncoderOptions {
|
|
8
8
|
baseOffset?: number;
|
|
9
9
|
rowGroupSize?: number;
|
|
10
10
|
pageSize?: number;
|
|
@@ -17,21 +17,21 @@ export interface ParquetWriterOptions {
|
|
|
17
17
|
start?: number;
|
|
18
18
|
}
|
|
19
19
|
/**
|
|
20
|
-
* Write a parquet file to an output stream. The
|
|
20
|
+
* Write a parquet file to an output stream. The ParquetEncoder will perform
|
|
21
21
|
* buffering/batching for performance, so close() must be called after all rows
|
|
22
22
|
* are written.
|
|
23
23
|
*/
|
|
24
|
-
export declare class
|
|
24
|
+
export declare class ParquetEncoder<T> {
|
|
25
25
|
/**
|
|
26
26
|
* Convenience method to create a new buffered parquet writer that writes to
|
|
27
27
|
* the specified file
|
|
28
28
|
*/
|
|
29
|
-
static openFile<T>(schema: ParquetSchema, path: string, opts?:
|
|
29
|
+
static openFile<T>(schema: ParquetSchema, path: string, opts?: ParquetEncoderOptions): Promise<ParquetEncoder<T>>;
|
|
30
30
|
/**
|
|
31
31
|
* Convenience method to create a new buffered parquet writer that writes to
|
|
32
32
|
* the specified stream
|
|
33
33
|
*/
|
|
34
|
-
static openStream<T>(schema: ParquetSchema, outputStream: Writable, opts?:
|
|
34
|
+
static openStream<T>(schema: ParquetSchema, outputStream: stream.Writable, opts?: ParquetEncoderOptions): Promise<ParquetEncoder<T>>;
|
|
35
35
|
schema: ParquetSchema;
|
|
36
36
|
envelopeWriter: ParquetEnvelopeWriter;
|
|
37
37
|
rowBuffer: ParquetBuffer;
|
|
@@ -41,7 +41,7 @@ export declare class ParquetWriter<T> {
|
|
|
41
41
|
/**
|
|
42
42
|
* Create a new buffered parquet writer for a given envelope writer
|
|
43
43
|
*/
|
|
44
|
-
constructor(schema: ParquetSchema, envelopeWriter: ParquetEnvelopeWriter, opts:
|
|
44
|
+
constructor(schema: ParquetSchema, envelopeWriter: ParquetEnvelopeWriter, opts: ParquetEncoderOptions);
|
|
45
45
|
writeHeader(): Promise<void>;
|
|
46
46
|
/**
|
|
47
47
|
* Append a single row to the parquet file. Rows are buffered in memory until
|
|
@@ -82,7 +82,7 @@ export declare class ParquetEnvelopeWriter {
|
|
|
82
82
|
/**
|
|
83
83
|
* Create a new parquet envelope writer that writes to the specified stream
|
|
84
84
|
*/
|
|
85
|
-
static openStream(schema: ParquetSchema, outputStream: Writable, opts:
|
|
85
|
+
static openStream(schema: ParquetSchema, outputStream: stream.Writable, opts: ParquetEncoderOptions): Promise<ParquetEnvelopeWriter>;
|
|
86
86
|
schema: ParquetSchema;
|
|
87
87
|
write: (buf: Buffer) => Promise<void>;
|
|
88
88
|
close: () => Promise<void>;
|
|
@@ -91,7 +91,7 @@ export declare class ParquetEnvelopeWriter {
|
|
|
91
91
|
rowGroups: RowGroup[];
|
|
92
92
|
pageSize: number;
|
|
93
93
|
useDataPageV2: boolean;
|
|
94
|
-
constructor(schema: ParquetSchema, writeFn: (buf: Buffer) => Promise<void>, closeFn: () => Promise<void>, fileOffset: number, opts:
|
|
94
|
+
constructor(schema: ParquetSchema, writeFn: (buf: Buffer) => Promise<void>, closeFn: () => Promise<void>, fileOffset: number, opts: ParquetEncoderOptions);
|
|
95
95
|
writeSection(buf: Buffer): Promise<void>;
|
|
96
96
|
/**
|
|
97
97
|
* Encode the parquet file header
|
|
@@ -112,13 +112,4 @@ export declare class ParquetEnvelopeWriter {
|
|
|
112
112
|
*/
|
|
113
113
|
setPageSize(cnt: number): void;
|
|
114
114
|
}
|
|
115
|
-
|
|
116
|
-
* Create a parquet transform stream
|
|
117
|
-
*/
|
|
118
|
-
export declare class ParquetTransformer<T> extends Transform {
|
|
119
|
-
writer: ParquetWriter<T>;
|
|
120
|
-
constructor(schema: ParquetSchema, opts?: ParquetWriterOptions);
|
|
121
|
-
_transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void>;
|
|
122
|
-
_flush(callback: (val?: any) => void): Promise<void>;
|
|
123
|
-
}
|
|
124
|
-
//# sourceMappingURL=writer.d.ts.map
|
|
115
|
+
//# sourceMappingURL=parquet-encoder.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parquet-encoder.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/encoder/parquet-encoder.ts"],"names":[],"mappings":";;AAEA,OAAO,EAAC,MAAM,EAAC,MAAM,0BAA0B,CAAC;AAGhD,OAAO,EACL,aAAa,EAKd,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAE/C,OAAO,EAaL,QAAQ,EAGT,MAAM,mBAAmB,CAAC;AA2B3B,MAAM,WAAW,qBAAqB;IACpC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IAGxB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;;;GAIG;AAEH,qBAAa,cAAc,CAAC,CAAC;IAC3B;;;OAGG;WACU,QAAQ,CAAC,CAAC,EACrB,MAAM,EAAE,aAAa,EACrB,IAAI,EAAE,MAAM,EACZ,IAAI,CAAC,EAAE,qBAAqB,GAC3B,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC;IAK7B;;;OAGG;WACU,UAAU,CAAC,CAAC,EACvB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,MAAM,CAAC,QAAQ,EAC7B,IAAI,GAAE,qBAA0B,GAC/B,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC;IAKtB,MAAM,EAAE,aAAa,CAAC;IACtB,cAAc,EAAE,qBAAqB,CAAC;IACtC,SAAS,EAAE,aAAa,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,OAAO,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE5C;;OAEG;gBAED,MAAM,EAAE,aAAa,EACrB,cAAc,EAAE,qBAAqB,EACrC,IAAI,EAAE,qBAAqB;IAcvB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAUlC;;;OAGG;IACG,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWzC;;;;;OAKG;IACG,KAAK,CAAC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAqBjD;;OAEG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAK7C;;;;;OAKG;IACH,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;IAIlC;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;;;;GAKG;AACH,qBAAa,qBAAqB;IAChC;;OAEG;WACU,UAAU,CACrB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,MAAM,CAAC,QAAQ,EAC7B,IAAI,EAAE,qBAAqB,GAC1B,OAAO,CAAC,qBAAqB,CAAC;IAM1B,MAAM,EAAE,aAAa,CAAC;IACtB,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,QAAQ,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;IACjB,aAAa,EAAE,OAAO,CAAC;gBAG5B,MAAM,EAAE,aAAa,EACrB,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,EACvC,OAAO,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAC5B,UAAU,EAAE,MAAM,EAClB,IAAI,EAAE,qBAAqB;IAY7B,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKxC;;OAEG;IACH,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;;OAGG;IACG,aAAa,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC;IAY1D;;OAEG;IACH,WAAW,CAAC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWhE;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B"}
|
|
@@ -26,10 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
26
26
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
27
|
};
|
|
28
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
exports.
|
|
30
|
-
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
31
|
-
/* eslint-disable camelcase */
|
|
32
|
-
const stream_1 = require("stream");
|
|
29
|
+
exports.ParquetEnvelopeWriter = exports.ParquetEncoder = void 0;
|
|
33
30
|
const codecs_1 = require("../codecs");
|
|
34
31
|
const Compression = __importStar(require("../compression"));
|
|
35
32
|
const Shred = __importStar(require("../schema/shred"));
|
|
@@ -56,31 +53,27 @@ const PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;
|
|
|
56
53
|
const PARQUET_RDLVL_TYPE = 'INT32';
|
|
57
54
|
const PARQUET_RDLVL_ENCODING = 'RLE';
|
|
58
55
|
/**
|
|
59
|
-
* Write a parquet file to an output stream. The
|
|
56
|
+
* Write a parquet file to an output stream. The ParquetEncoder will perform
|
|
60
57
|
* buffering/batching for performance, so close() must be called after all rows
|
|
61
58
|
* are written.
|
|
62
59
|
*/
|
|
63
60
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
64
|
-
class
|
|
61
|
+
class ParquetEncoder {
|
|
65
62
|
/**
|
|
66
63
|
* Convenience method to create a new buffered parquet writer that writes to
|
|
67
64
|
* the specified file
|
|
68
65
|
*/
|
|
69
66
|
static async openFile(schema, path, opts) {
|
|
70
67
|
const outputStream = await (0, file_utils_1.osopen)(path, opts);
|
|
71
|
-
return
|
|
68
|
+
return ParquetEncoder.openStream(schema, outputStream, opts);
|
|
72
69
|
}
|
|
73
70
|
/**
|
|
74
71
|
* Convenience method to create a new buffered parquet writer that writes to
|
|
75
72
|
* the specified stream
|
|
76
73
|
*/
|
|
77
|
-
static async openStream(schema, outputStream, opts) {
|
|
78
|
-
if (!opts) {
|
|
79
|
-
// tslint:disable-next-line:no-parameter-reassignment
|
|
80
|
-
opts = {};
|
|
81
|
-
}
|
|
74
|
+
static async openStream(schema, outputStream, opts = {}) {
|
|
82
75
|
const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);
|
|
83
|
-
return new
|
|
76
|
+
return new ParquetEncoder(schema, envelopeWriter, opts);
|
|
84
77
|
}
|
|
85
78
|
/**
|
|
86
79
|
* Create a new buffered parquet writer for a given envelope writer
|
|
@@ -166,7 +159,7 @@ class ParquetWriter {
|
|
|
166
159
|
this.envelopeWriter.setPageSize(cnt);
|
|
167
160
|
}
|
|
168
161
|
}
|
|
169
|
-
exports.
|
|
162
|
+
exports.ParquetEncoder = ParquetEncoder;
|
|
170
163
|
/**
|
|
171
164
|
* Create a parquet file from a schema and a number of row groups. This class
|
|
172
165
|
* performs direct, unbuffered writes to the underlying output stream and is
|
|
@@ -237,31 +230,40 @@ class ParquetEnvelopeWriter {
|
|
|
237
230
|
exports.ParquetEnvelopeWriter = ParquetEnvelopeWriter;
|
|
238
231
|
/**
|
|
239
232
|
* Create a parquet transform stream
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
233
|
+
export class ParquetTransformer<T> extends stream.Transform {
|
|
234
|
+
public writer: ParquetEncoder<T>;
|
|
235
|
+
|
|
236
|
+
constructor(schema: ParquetSchema, opts: ParquetEncoderOptions = {}) {
|
|
237
|
+
super({objectMode: true});
|
|
238
|
+
|
|
239
|
+
const writeProxy = (function (t: ParquetTransformer<any>) {
|
|
240
|
+
return async function (b: any): Promise<void> {
|
|
241
|
+
t.push(b);
|
|
242
|
+
};
|
|
243
|
+
})(this);
|
|
244
|
+
|
|
245
|
+
this.writer = new ParquetEncoder(
|
|
246
|
+
schema,
|
|
247
|
+
new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),
|
|
248
|
+
opts
|
|
249
|
+
);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// tslint:disable-next-line:function-name
|
|
253
|
+
_transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {
|
|
254
|
+
if (row) {
|
|
255
|
+
return this.writer.appendRow(row).then(callback);
|
|
262
256
|
}
|
|
257
|
+
callback();
|
|
258
|
+
return Promise.resolve();
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// tslint:disable-next-line:function-name
|
|
262
|
+
async _flush(callback: (val?: any) => void) {
|
|
263
|
+
await this.writer.close(callback);
|
|
264
|
+
}
|
|
263
265
|
}
|
|
264
|
-
|
|
266
|
+
*/
|
|
265
267
|
/**
|
|
266
268
|
* Encode a consecutive array of data using one of the parquet encodings
|
|
267
269
|
*/
|
|
@@ -1,68 +1,58 @@
|
|
|
1
|
-
|
|
2
|
-
import { ParquetEnvelopeReader } from './parquet-envelope-reader';
|
|
3
|
-
import { FileMetaData } from '../parquet-thrift';
|
|
1
|
+
import type { ReadableFile } from '@loaders.gl/loader-utils';
|
|
4
2
|
import { ParquetSchema } from '../schema/schema';
|
|
5
|
-
import {
|
|
3
|
+
import { ColumnChunk, FileMetaData, RowGroup } from '../parquet-thrift';
|
|
4
|
+
import { ParquetBuffer, ParquetData, ParquetOptions } from '../schema/declare';
|
|
5
|
+
export type ParquetReaderProps = {
|
|
6
|
+
defaultDictionarySize?: number;
|
|
7
|
+
};
|
|
8
|
+
/** Properties for initializing a ParquetRowGroupReader */
|
|
9
|
+
export type ParquetIterationProps = {
|
|
10
|
+
/** Filter allowing some columns to be dropped */
|
|
11
|
+
columnList?: string[] | string[][];
|
|
12
|
+
};
|
|
6
13
|
/**
|
|
7
|
-
*
|
|
8
|
-
*
|
|
9
|
-
*
|
|
10
|
-
*
|
|
11
|
-
* avoid leaking file descriptors.
|
|
14
|
+
* The parquet envelope reader allows direct, unbuffered access to the individual
|
|
15
|
+
* sections of the parquet file, namely the header, footer and the row groups.
|
|
16
|
+
* This class is intended for advanced/internal users; if you just want to retrieve
|
|
17
|
+
* rows from a parquet file use the ParquetReader instead
|
|
12
18
|
*/
|
|
13
|
-
export declare class ParquetReader
|
|
19
|
+
export declare class ParquetReader {
|
|
20
|
+
props: Required<ParquetReaderProps>;
|
|
21
|
+
file: ReadableFile;
|
|
22
|
+
metadata: Promise<FileMetaData> | null;
|
|
23
|
+
constructor(file: ReadableFile, props?: ParquetReaderProps);
|
|
24
|
+
close(): void;
|
|
25
|
+
/** Yield one row at a time */
|
|
26
|
+
rowIterator(props?: ParquetIterationProps): AsyncGenerator<import("../schema/declare").ParquetRecord, void, unknown>;
|
|
27
|
+
/** Yield one batch of rows at a time */
|
|
28
|
+
rowBatchIterator(props?: ParquetIterationProps): AsyncGenerator<import("../schema/declare").ParquetRecord[], void, unknown>;
|
|
29
|
+
/** Iterate over the raw row groups */
|
|
30
|
+
rowGroupIterator(props?: ParquetIterationProps): AsyncGenerator<ParquetBuffer, void, unknown>;
|
|
31
|
+
getRowCount(): Promise<number>;
|
|
32
|
+
getSchema(): Promise<ParquetSchema>;
|
|
14
33
|
/**
|
|
15
|
-
*
|
|
16
|
-
|
|
17
|
-
static openBlob<T>(blob: Blob): Promise<ParquetReader<T>>;
|
|
18
|
-
/**
|
|
19
|
-
* return a new parquet reader initialized with a read function
|
|
20
|
-
*/
|
|
21
|
-
static openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>>;
|
|
22
|
-
static openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>>;
|
|
23
|
-
metadata: FileMetaData;
|
|
24
|
-
envelopeReader: ParquetEnvelopeReader;
|
|
25
|
-
schema: ParquetSchema;
|
|
26
|
-
/**
|
|
27
|
-
* Create a new parquet reader from the file metadata and an envelope reader.
|
|
28
|
-
* It is not recommended to call this constructor directly except for advanced
|
|
29
|
-
* and internal use cases. Consider using one of the open{File,Buffer} methods
|
|
30
|
-
* instead
|
|
31
|
-
*/
|
|
32
|
-
constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader);
|
|
33
|
-
/**
|
|
34
|
-
* Close this parquet reader. You MUST call this method once you're finished
|
|
35
|
-
* reading rows
|
|
36
|
-
*/
|
|
37
|
-
close(): Promise<void>;
|
|
38
|
-
/**
|
|
39
|
-
* Return a cursor to the file. You may open more than one cursor and use
|
|
40
|
-
* them concurrently. All cursors become invalid once close() is called on
|
|
41
|
-
* the reader object.
|
|
42
|
-
*
|
|
43
|
-
* The required_columns parameter controls which columns are actually read
|
|
44
|
-
* from disk. An empty array or no value implies all columns. A list of column
|
|
45
|
-
* names means that only those columns should be loaded from disk.
|
|
46
|
-
*/
|
|
47
|
-
getCursor(): ParquetCursor<T>;
|
|
48
|
-
getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;
|
|
49
|
-
getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;
|
|
50
|
-
/**
|
|
51
|
-
* Return the number of rows in this file. Note that the number of rows is
|
|
52
|
-
* not neccessarily equal to the number of rows in each column.
|
|
53
|
-
*/
|
|
54
|
-
getRowCount(): number;
|
|
55
|
-
/**
|
|
56
|
-
* Returns the ParquetSchema for this file
|
|
34
|
+
* Returns the user (key/value) metadata for this file
|
|
35
|
+
* In parquet this is not stored on the schema like it is in arrow
|
|
57
36
|
*/
|
|
58
|
-
|
|
37
|
+
getSchemaMetadata(): Promise<Record<string, string>>;
|
|
38
|
+
getFileMetadata(): Promise<FileMetaData>;
|
|
39
|
+
/** Metadata is stored in the footer */
|
|
40
|
+
readHeader(): Promise<void>;
|
|
41
|
+
/** Metadata is stored in the footer */
|
|
42
|
+
readFooter(): Promise<FileMetaData>;
|
|
43
|
+
/** Data is stored in row groups (similar to Apache Arrow record batches) */
|
|
44
|
+
readRowGroup(schema: ParquetSchema, rowGroup: RowGroup, columnList: string[][]): Promise<ParquetBuffer>;
|
|
59
45
|
/**
|
|
60
|
-
*
|
|
46
|
+
* Each row group contains column chunks for all the columns.
|
|
61
47
|
*/
|
|
62
|
-
|
|
48
|
+
readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData>;
|
|
63
49
|
/**
|
|
64
|
-
*
|
|
50
|
+
* Getting dictionary for allows to flatten values by indices.
|
|
51
|
+
* @param dictionaryPageOffset
|
|
52
|
+
* @param options
|
|
53
|
+
* @param pagesOffset
|
|
54
|
+
* @returns
|
|
65
55
|
*/
|
|
66
|
-
|
|
56
|
+
getDictionary(dictionaryPageOffset: number, options: ParquetOptions, pagesOffset: number): Promise<string[]>;
|
|
67
57
|
}
|
|
68
58
|
//# sourceMappingURL=parquet-reader.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-reader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"parquet-reader.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-reader.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,YAAY,EAAC,MAAM,0BAA0B,CAAC;AAE3D,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAK/C,OAAO,EAAC,WAAW,EAAoB,YAAY,EAAE,QAAQ,EAAO,MAAM,mBAAmB,CAAC;AAC9F,OAAO,EACL,aAAa,EAEb,WAAW,EAEX,cAAc,EACf,MAAM,mBAAmB,CAAC;AAI3B,MAAM,MAAM,kBAAkB,GAAG;IAC/B,qBAAqB,CAAC,EAAE,MAAM,CAAC;CAChC,CAAC;AAEF,0DAA0D;AAC1D,MAAM,MAAM,qBAAqB,GAAG;IAClC,iDAAiD;IACjD,UAAU,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,EAAE,CAAC;CACpC,CAAC;AAMF;;;;;GAKG;AACH,qBAAa,aAAa;IACxB,KAAK,EAAE,QAAQ,CAAC,kBAAkB,CAAC,CAAC;IACpC,IAAI,EAAE,YAAY,CAAC;IACnB,QAAQ,EAAE,OAAO,CAAC,YAAY,CAAC,GAAG,IAAI,CAAQ;gBAElC,IAAI,EAAE,YAAY,EAAE,KAAK,CAAC,EAAE,kBAAkB;IAK1D,KAAK,IAAI,IAAI;IAOb,8BAA8B;IACvB,WAAW,CAAC,KAAK,CAAC,EAAE,qBAAqB;IAShD,wCAAwC;IACjC,gBAAgB,CAAC,KAAK,CAAC,EAAE,qBAAqB;IAOrD,sCAAsC;IAC/B,gBAAgB,CAAC,KAAK,CAAC,EAAE,qBAAqB;IAqB/C,WAAW,IAAI,OAAO,CAAC,MAAM,CAAC;IAK9B,SAAS,IAAI,OAAO,CAAC,aAAa,CAAC;IAQzC;;;OAGG;IACG,iBAAiB,IAAI,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IASpD,eAAe,IAAI,OAAO,CAAC,YAAY,CAAC;IAU9C,uCAAuC;IACjC,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAajC,uCAAuC;IACjC,UAAU,IAAI,OAAO,CAAC,YAAY,CAAC;IAsBzC,4EAA4E;IACtE,YAAY,CAChB,MAAM,EAAE,aAAa,EACrB,QAAQ,EAAE,QAAQ,EAClB,UAAU,EAAE,MAAM,EAAE,EAAE,GACrB,OAAO,CAAC,aAAa,CAAC;IAgBzB;;OAEG;IACG,eAAe,CAAC,MAAM,EAAE,aAAa,EAAE,QAAQ,EAAE,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;IAoDzF;;;;;;OAMG;IACG,aAAa,CACjB,oBAAoB,EAAE,MAAM,EAC5B,OAAO,EAAE,cAAc,EACvB,WAAW,EAAE,MAAM,GAClB,OAAO,CAAC,MAAM,EAAE,CAAC;CAuBrB"}
|