@loaders.gl/parquet 3.4.0-alpha.1 → 3.4.0-alpha.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +22 -29
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/constants.js +0 -2
- package/dist/es5/constants.js.map +1 -1
- package/dist/es5/index.js +47 -9
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/arrow/convert-columns-to-row-group.js +2 -0
- package/dist/es5/lib/arrow/convert-columns-to-row-group.js.map +1 -0
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js +19 -0
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -0
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js +98 -0
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -0
- package/dist/es5/lib/{convert-schema.js → arrow/convert-schema-to-parquet.js} +4 -32
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -0
- package/dist/es5/lib/geo/decode-geo-metadata.js +77 -0
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -0
- package/dist/es5/lib/geo/geoparquet-schema.js +83 -0
- package/dist/es5/lib/geo/geoparquet-schema.js.map +1 -0
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +173 -0
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -0
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js +150 -0
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -0
- package/dist/es5/lib/wasm/encode-parquet-wasm.js +14 -16
- package/dist/es5/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/index.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js +16 -18
- package/dist/es5/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js +6 -8
- package/dist/es5/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/es5/lib/wasm/parse-parquet-wasm.js +16 -18
- package/dist/es5/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/es5/lib/wip/convert-schema-deep.java.disabled +910 -0
- package/dist/es5/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/dist/es5/parquet-loader.js +4 -3
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -2
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/es5/parquetjs/codecs/index.js +0 -1
- package/dist/es5/parquetjs/codecs/index.js.map +1 -1
- package/dist/es5/parquetjs/codecs/plain.js +0 -3
- package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
- package/dist/es5/parquetjs/codecs/rle.js +0 -4
- package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
- package/dist/es5/parquetjs/compression.js +58 -58
- package/dist/es5/parquetjs/compression.js.map +1 -1
- package/dist/es5/parquetjs/encoder/parquet-encoder.js +625 -0
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/PageType.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/PageType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/Type.js +4 -4
- package/dist/es5/parquetjs/parquet-thrift/Type.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js +244 -261
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +555 -256
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/schema/schema.js +2 -12
- package/dist/es5/parquetjs/schema/schema.js.map +1 -1
- package/dist/es5/parquetjs/schema/shred.js +40 -46
- package/dist/es5/parquetjs/schema/shred.js.map +1 -1
- package/dist/es5/parquetjs/schema/types.js +6 -11
- package/dist/es5/parquetjs/schema/types.js.map +1 -1
- package/dist/es5/parquetjs/utils/file-utils.js +2 -4
- package/dist/es5/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/es5/parquetjs/utils/read-utils.js +0 -7
- package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/es5/workers/parquet-worker.js.map +1 -1
- package/dist/esm/bundle.js +0 -1
- package/dist/esm/bundle.js.map +1 -1
- package/dist/esm/constants.js +0 -3
- package/dist/esm/constants.js.map +1 -1
- package/dist/esm/index.js +11 -9
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/arrow/convert-columns-to-row-group.js +2 -0
- package/dist/esm/lib/arrow/convert-columns-to-row-group.js.map +1 -0
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js +8 -0
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -0
- package/dist/esm/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +30 -16
- package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -0
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js +37 -0
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -0
- package/dist/esm/lib/geo/decode-geo-metadata.js +58 -0
- package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -0
- package/dist/esm/lib/geo/geoparquet-schema.js +76 -0
- package/dist/esm/lib/geo/geoparquet-schema.js.map +1 -0
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js +35 -0
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -0
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js +18 -0
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -0
- package/dist/esm/lib/wasm/encode-parquet-wasm.js +0 -1
- package/dist/esm/lib/wasm/encode-parquet-wasm.js.map +1 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js +0 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-browser.js.map +1 -1
- package/dist/esm/lib/wasm/load-wasm/load-wasm-node.js.map +1 -1
- package/dist/esm/lib/wasm/parse-parquet-wasm.js +0 -3
- package/dist/esm/lib/wasm/parse-parquet-wasm.js.map +1 -1
- package/dist/esm/lib/wip/convert-schema-deep.java.disabled +910 -0
- package/dist/esm/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/dist/esm/parquet-loader.js +4 -4
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -3
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -2
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -2
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/codecs/dictionary.js.map +1 -1
- package/dist/esm/parquetjs/codecs/index.js +0 -2
- package/dist/esm/parquetjs/codecs/index.js.map +1 -1
- package/dist/esm/parquetjs/codecs/plain.js +0 -4
- package/dist/esm/parquetjs/codecs/plain.js.map +1 -1
- package/dist/esm/parquetjs/codecs/rle.js +0 -6
- package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
- package/dist/esm/parquetjs/compression.js +10 -10
- package/dist/esm/parquetjs/compression.js.map +1 -1
- package/dist/esm/parquetjs/encoder/{writer.js → parquet-encoder.js} +6 -74
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -0
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DateType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/ListType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MapType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/NullType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/PageType.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/PageType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/StringType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/Type.js +3 -4
- package/dist/esm/parquetjs/parquet-thrift/Type.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js +0 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/esm/parquetjs/parser/decoders.js +1 -18
- package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
- package/dist/esm/parquetjs/parser/parquet-reader.js +153 -80
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js +0 -1
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/schema/schema.js +0 -10
- package/dist/esm/parquetjs/schema/schema.js.map +1 -1
- package/dist/esm/parquetjs/schema/shred.js +42 -48
- package/dist/esm/parquetjs/schema/shred.js.map +1 -1
- package/dist/esm/parquetjs/schema/types.js +6 -10
- package/dist/esm/parquetjs/schema/types.js.map +1 -1
- package/dist/esm/parquetjs/utils/file-utils.js +1 -2
- package/dist/esm/parquetjs/utils/file-utils.js.map +1 -1
- package/dist/esm/parquetjs/utils/read-utils.js +0 -8
- package/dist/esm/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/index.d.ts +24 -4
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +26 -9
- package/dist/lib/arrow/convert-columns-to-row-group.d.ts +1 -0
- package/dist/lib/arrow/convert-columns-to-row-group.d.ts.map +1 -0
- package/dist/lib/arrow/convert-columns-to-row-group.js +1 -0
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts +4 -0
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -0
- package/dist/lib/arrow/convert-row-group-to-columns.js +12 -0
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts +9 -0
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -0
- package/dist/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +30 -18
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts +7 -0
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -0
- package/dist/lib/arrow/convert-schema-to-parquet.js +72 -0
- package/dist/lib/geo/decode-geo-metadata.d.ts +31 -0
- package/dist/lib/geo/decode-geo-metadata.d.ts.map +1 -0
- package/dist/lib/geo/decode-geo-metadata.js +73 -0
- package/dist/lib/geo/geoparquet-schema.d.ts +80 -0
- package/dist/lib/geo/geoparquet-schema.d.ts.map +1 -0
- package/dist/lib/geo/geoparquet-schema.js +69 -0
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts +5 -0
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -0
- package/dist/lib/parsers/parse-parquet-to-columns.js +40 -0
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts +4 -0
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -0
- package/dist/lib/parsers/parse-parquet-to-rows.js +40 -0
- package/dist/parquet-loader.d.ts +2 -0
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +3 -1
- package/dist/parquet-worker.js +25 -32
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquetjs/compression.d.ts.map +1 -1
- package/dist/parquetjs/compression.js +16 -5
- package/dist/parquetjs/encoder/{writer.d.ts → parquet-encoder.d.ts} +10 -19
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/parquetjs/encoder/{writer.js → parquet-encoder.js} +39 -37
- package/dist/parquetjs/parser/parquet-reader.d.ts +47 -57
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +1 -1
- package/dist/parquetjs/parser/parquet-reader.js +168 -102
- package/dist/parquetjs/schema/declare.d.ts +14 -7
- package/dist/parquetjs/schema/declare.d.ts.map +1 -1
- package/dist/parquetjs/schema/declare.js +2 -0
- package/dist/parquetjs/schema/shred.d.ts +115 -0
- package/dist/parquetjs/schema/shred.d.ts.map +1 -1
- package/dist/parquetjs/schema/shred.js +161 -43
- package/dist/parquetjs/schema/types.d.ts +2 -2
- package/dist/parquetjs/schema/types.d.ts.map +1 -1
- package/dist/parquetjs/schema/types.js +4 -6
- package/dist/parquetjs/utils/file-utils.d.ts +3 -4
- package/dist/parquetjs/utils/file-utils.d.ts.map +1 -1
- package/dist/parquetjs/utils/file-utils.js +2 -5
- package/package.json +8 -7
- package/src/index.ts +24 -4
- package/src/lib/arrow/convert-columns-to-row-group.ts +0 -0
- package/src/lib/arrow/convert-row-group-to-columns.ts +15 -0
- package/src/lib/{convert-schema.ts → arrow/convert-schema-from-parquet.ts} +41 -22
- package/src/lib/arrow/convert-schema-to-parquet.ts +102 -0
- package/src/lib/geo/decode-geo-metadata.ts +99 -0
- package/src/lib/geo/geoparquet-schema.ts +69 -0
- package/src/lib/parsers/parse-parquet-to-columns.ts +49 -0
- package/src/lib/parsers/parse-parquet-to-rows.ts +40 -0
- package/src/lib/wip/convert-schema-deep.java.disabled +910 -0
- package/src/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/src/parquet-loader.ts +5 -1
- package/src/parquetjs/compression.ts +14 -1
- package/src/parquetjs/encoder/{writer.ts → parquet-encoder.ts} +22 -28
- package/src/parquetjs/parser/parquet-reader.ts +239 -122
- package/src/parquetjs/schema/declare.ts +17 -9
- package/src/parquetjs/schema/shred.ts +157 -28
- package/src/parquetjs/schema/types.ts +25 -30
- package/src/parquetjs/utils/file-utils.ts +3 -4
- package/dist/es5/lib/convert-schema.js.map +0 -1
- package/dist/es5/lib/parse-parquet.js +0 -130
- package/dist/es5/lib/parse-parquet.js.map +0 -1
- package/dist/es5/lib/read-array-buffer.js +0 -43
- package/dist/es5/lib/read-array-buffer.js.map +0 -1
- package/dist/es5/parquetjs/encoder/writer.js +0 -757
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -94
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -183
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -327
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -19
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/lib/convert-schema.js.map +0 -1
- package/dist/esm/lib/parse-parquet.js +0 -25
- package/dist/esm/lib/parse-parquet.js.map +0 -1
- package/dist/esm/lib/read-array-buffer.js +0 -10
- package/dist/esm/lib/read-array-buffer.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -81
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -78
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -129
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -13
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/lib/convert-schema.d.ts +0 -8
- package/dist/lib/convert-schema.d.ts.map +0 -1
- package/dist/lib/parse-parquet.d.ts +0 -4
- package/dist/lib/parse-parquet.d.ts.map +0 -1
- package/dist/lib/parse-parquet.js +0 -28
- package/dist/lib/read-array-buffer.d.ts +0 -19
- package/dist/lib/read-array-buffer.d.ts.map +0 -1
- package/dist/lib/read-array-buffer.js +0 -29
- package/dist/parquetjs/encoder/writer.d.ts.map +0 -1
- package/dist/parquetjs/file.d.ts +0 -10
- package/dist/parquetjs/file.d.ts.map +0 -1
- package/dist/parquetjs/file.js +0 -99
- package/dist/parquetjs/parser/parquet-cursor.d.ts +0 -36
- package/dist/parquetjs/parser/parquet-cursor.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-cursor.js +0 -74
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts +0 -40
- package/dist/parquetjs/parser/parquet-envelope-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-envelope-reader.js +0 -136
- package/dist/parquetjs/utils/buffer-utils.d.ts +0 -10
- package/dist/parquetjs/utils/buffer-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/buffer-utils.js +0 -22
- package/src/lib/parse-parquet.ts +0 -27
- package/src/lib/read-array-buffer.ts +0 -31
- package/src/parquetjs/file.ts +0 -90
- package/src/parquetjs/parser/parquet-cursor.ts +0 -94
- package/src/parquetjs/parser/parquet-envelope-reader.ts +0 -199
- package/src/parquetjs/utils/buffer-utils.ts +0 -18
|
@@ -1,78 +0,0 @@
|
|
|
1
|
-
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
-
let _Symbol$asyncIterator;
|
|
3
|
-
|
|
4
|
-
import { materializeRecords } from '../schema/shred';
|
|
5
|
-
|
|
6
|
-
_Symbol$asyncIterator = Symbol.asyncIterator;
|
|
7
|
-
export class ParquetCursor {
|
|
8
|
-
constructor(metadata, envelopeReader, schema, columnList) {
|
|
9
|
-
_defineProperty(this, "metadata", void 0);
|
|
10
|
-
_defineProperty(this, "envelopeReader", void 0);
|
|
11
|
-
_defineProperty(this, "schema", void 0);
|
|
12
|
-
_defineProperty(this, "columnList", void 0);
|
|
13
|
-
_defineProperty(this, "rowGroup", []);
|
|
14
|
-
_defineProperty(this, "rowGroupIndex", void 0);
|
|
15
|
-
this.metadata = metadata;
|
|
16
|
-
this.envelopeReader = envelopeReader;
|
|
17
|
-
this.schema = schema;
|
|
18
|
-
this.columnList = columnList;
|
|
19
|
-
this.rowGroupIndex = 0;
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
async next() {
|
|
23
|
-
if (this.rowGroup.length === 0) {
|
|
24
|
-
if (this.rowGroupIndex >= this.metadata.row_groups.length) {
|
|
25
|
-
return null;
|
|
26
|
-
}
|
|
27
|
-
const rowBuffer = await this.envelopeReader.readRowGroup(this.schema, this.metadata.row_groups[this.rowGroupIndex], this.columnList);
|
|
28
|
-
this.rowGroup = materializeRecords(this.schema, rowBuffer);
|
|
29
|
-
this.rowGroupIndex++;
|
|
30
|
-
}
|
|
31
|
-
return this.rowGroup.shift();
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
rewind() {
|
|
35
|
-
this.rowGroup = [];
|
|
36
|
-
this.rowGroupIndex = 0;
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
[_Symbol$asyncIterator]() {
|
|
40
|
-
let done = false;
|
|
41
|
-
return {
|
|
42
|
-
next: async () => {
|
|
43
|
-
if (done) {
|
|
44
|
-
return {
|
|
45
|
-
done,
|
|
46
|
-
value: null
|
|
47
|
-
};
|
|
48
|
-
}
|
|
49
|
-
const value = await this.next();
|
|
50
|
-
if (value === null) {
|
|
51
|
-
return {
|
|
52
|
-
done: true,
|
|
53
|
-
value
|
|
54
|
-
};
|
|
55
|
-
}
|
|
56
|
-
return {
|
|
57
|
-
done: false,
|
|
58
|
-
value
|
|
59
|
-
};
|
|
60
|
-
},
|
|
61
|
-
return: async () => {
|
|
62
|
-
done = true;
|
|
63
|
-
return {
|
|
64
|
-
done,
|
|
65
|
-
value: null
|
|
66
|
-
};
|
|
67
|
-
},
|
|
68
|
-
throw: async () => {
|
|
69
|
-
done = true;
|
|
70
|
-
return {
|
|
71
|
-
done: true,
|
|
72
|
-
value: null
|
|
73
|
-
};
|
|
74
|
-
}
|
|
75
|
-
};
|
|
76
|
-
}
|
|
77
|
-
}
|
|
78
|
-
//# sourceMappingURL=parquet-cursor.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-cursor.js","names":["materializeRecords","Symbol","asyncIterator","ParquetCursor","constructor","metadata","envelopeReader","schema","columnList","rowGroupIndex","next","rowGroup","length","row_groups","rowBuffer","readRowGroup","shift","rewind","done","value","return","throw"],"sources":["../../../../src/parquetjs/parser/parquet-cursor.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetRecord} from '../schema/declare';\nimport {materializeRecords} from '../schema/shred';\n\n/**\n * A parquet cursor is used to retrieve rows from a parquet file in order\n */\nexport class ParquetCursor<T> implements AsyncIterable<T> {\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n public columnList: string[][];\n public rowGroup: ParquetRecord[] = [];\n public rowGroupIndex: number;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is usually not recommended to call this constructor directly except for\n * advanced and internal use cases. Consider using getCursor() on the\n * ParquetReader instead\n */\n constructor(\n metadata: FileMetaData,\n envelopeReader: ParquetEnvelopeReader,\n schema: ParquetSchema,\n columnList: string[][]\n ) {\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n this.schema = schema;\n this.columnList = columnList;\n this.rowGroupIndex = 0;\n }\n\n /**\n * Retrieve the next row from the cursor. Returns a row or NULL if the end\n * of the file was reached\n */\n async next<T = any>(): Promise<T> {\n if (this.rowGroup.length === 0) {\n if (this.rowGroupIndex >= this.metadata.row_groups.length) {\n // @ts-ignore\n return null;\n }\n const rowBuffer = await this.envelopeReader.readRowGroup(\n this.schema,\n this.metadata.row_groups[this.rowGroupIndex],\n this.columnList\n );\n this.rowGroup = materializeRecords(this.schema, rowBuffer);\n this.rowGroupIndex++;\n }\n return this.rowGroup.shift() as any;\n }\n\n /**\n * Rewind the cursor the the beginning of the file\n */\n rewind(): void {\n this.rowGroup = [];\n this.rowGroupIndex = 0;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n let done = false;\n return {\n next: async () => {\n if (done) {\n return {done, value: null};\n }\n const value = await this.next();\n if (value === null) {\n return {done: true, value};\n }\n return {done: false, value};\n },\n return: async () => {\n done = true;\n return {done, value: null};\n },\n throw: async () => {\n done = true;\n return {done: true, value: null};\n }\n };\n }\n}\n"],"mappings":";;;AAKA,SAAQA,kBAAkB,QAAO,iBAAiB;;AAAC,wBAiEhDC,MAAM,CAACC,aAAa;AA5DvB,OAAO,MAAMC,aAAa,CAAgC;EAcxDC,WAAW,CACTC,QAAsB,EACtBC,cAAqC,EACrCC,MAAqB,EACrBC,UAAsB,EACtB;IAAA;IAAA;IAAA;IAAA;IAAA,kCAdiC,EAAE;IAAA;IAenC,IAAI,CAACH,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACC,cAAc,GAAGA,cAAc;IACpC,IAAI,CAACC,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,UAAU,GAAGA,UAAU;IAC5B,IAAI,CAACC,aAAa,GAAG,CAAC;EACxB;;EAMA,MAAMC,IAAI,GAAwB;IAChC,IAAI,IAAI,CAACC,QAAQ,CAACC,MAAM,KAAK,CAAC,EAAE;MAC9B,IAAI,IAAI,CAACH,aAAa,IAAI,IAAI,CAACJ,QAAQ,CAACQ,UAAU,CAACD,MAAM,EAAE;QAEzD,OAAO,IAAI;MACb;MACA,MAAME,SAAS,GAAG,MAAM,IAAI,CAACR,cAAc,CAACS,YAAY,CACtD,IAAI,CAACR,MAAM,EACX,IAAI,CAACF,QAAQ,CAACQ,UAAU,CAAC,IAAI,CAACJ,aAAa,CAAC,EAC5C,IAAI,CAACD,UAAU,CAChB;MACD,IAAI,CAACG,QAAQ,GAAGX,kBAAkB,CAAC,IAAI,CAACO,MAAM,EAAEO,SAAS,CAAC;MAC1D,IAAI,CAACL,aAAa,EAAE;IACtB;IACA,OAAO,IAAI,CAACE,QAAQ,CAACK,KAAK,EAAE;EAC9B;;EAKAC,MAAM,GAAS;IACb,IAAI,CAACN,QAAQ,GAAG,EAAE;IAClB,IAAI,CAACF,aAAa,GAAG,CAAC;EACxB;;EAMA,0BAA2C;IACzC,IAAIS,IAAI,GAAG,KAAK;IAChB,OAAO;MACLR,IAAI,EAAE,YAAY;QAChB,IAAIQ,IAAI,EAAE;UACR,OAAO;YAACA,IAAI;YAAEC,KAAK,EAAE;UAAI,CAAC;QAC5B;QACA,MAAMA,KAAK,GAAG,MAAM,IAAI,CAACT,IAAI,EAAE;QAC/B,IAAIS,KAAK,KAAK,IAAI,EAAE;UAClB,OAAO;YAACD,IAAI,EAAE,IAAI;YAAEC;UAAK,CAAC;QAC5B;QACA,OAAO;UAACD,IAAI,EAAE,KAAK;UAAEC;QAAK,CAAC;MAC7B,CAAC;MACDC,MAAM,EAAE,YAAY;QAClBF,IAAI,GAAG,IAAI;QACX,OAAO;UAACA,IAAI;UAAEC,KAAK,EAAE;QAAI,CAAC;MAC5B,CAAC;MACDE,KAAK,EAAE,YAAY;QACjBH,IAAI,GAAG,IAAI;QACX,OAAO;UAACA,IAAI,EAAE,IAAI;UAAEC,KAAK,EAAE;QAAI,CAAC;MAClC;IACF,CAAC;EACH;AACF"}
|
|
@@ -1,129 +0,0 @@
|
|
|
1
|
-
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
-
|
|
3
|
-
import { PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED } from '../../constants';
|
|
4
|
-
import { CompressionCodec, Type } from '../parquet-thrift';
|
|
5
|
-
import { decodeFileMetadata, getThriftEnum, fieldIndexOf } from '../utils/read-utils';
|
|
6
|
-
import { decodeDataPages, decodePage } from './decoders';
|
|
7
|
-
const DEFAULT_DICTIONARY_SIZE = 1e6;
|
|
8
|
-
|
|
9
|
-
export class ParquetEnvelopeReader {
|
|
10
|
-
|
|
11
|
-
static async openBuffer(buffer) {
|
|
12
|
-
const readFn = (position, length) => Promise.resolve(buffer.slice(position, position + length));
|
|
13
|
-
const closeFn = () => Promise.resolve();
|
|
14
|
-
return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);
|
|
15
|
-
}
|
|
16
|
-
constructor(read, close, fileSize, options) {
|
|
17
|
-
_defineProperty(this, "read", void 0);
|
|
18
|
-
_defineProperty(this, "close", void 0);
|
|
19
|
-
_defineProperty(this, "fileSize", void 0);
|
|
20
|
-
_defineProperty(this, "defaultDictionarySize", void 0);
|
|
21
|
-
this.read = read;
|
|
22
|
-
this.close = close;
|
|
23
|
-
this.fileSize = fileSize;
|
|
24
|
-
this.defaultDictionarySize = (options === null || options === void 0 ? void 0 : options.defaultDictionarySize) || DEFAULT_DICTIONARY_SIZE;
|
|
25
|
-
}
|
|
26
|
-
async readHeader() {
|
|
27
|
-
const buffer = await this.read(0, PARQUET_MAGIC.length);
|
|
28
|
-
const magic = buffer.toString();
|
|
29
|
-
switch (magic) {
|
|
30
|
-
case PARQUET_MAGIC:
|
|
31
|
-
break;
|
|
32
|
-
case PARQUET_MAGIC_ENCRYPTED:
|
|
33
|
-
throw new Error('Encrypted parquet file not supported');
|
|
34
|
-
default:
|
|
35
|
-
throw new Error("Invalid parquet file (magic=".concat(magic, ")"));
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
async readRowGroup(schema, rowGroup, columnList) {
|
|
39
|
-
const buffer = {
|
|
40
|
-
rowCount: Number(rowGroup.num_rows),
|
|
41
|
-
columnData: {}
|
|
42
|
-
};
|
|
43
|
-
for (const colChunk of rowGroup.columns) {
|
|
44
|
-
const colMetadata = colChunk.meta_data;
|
|
45
|
-
const colKey = colMetadata === null || colMetadata === void 0 ? void 0 : colMetadata.path_in_schema;
|
|
46
|
-
if (columnList.length > 0 && fieldIndexOf(columnList, colKey) < 0) {
|
|
47
|
-
continue;
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
buffer.columnData[colKey.join()] = await this.readColumnChunk(schema, colChunk);
|
|
51
|
-
}
|
|
52
|
-
return buffer;
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
async readColumnChunk(schema, colChunk) {
|
|
56
|
-
var _colChunk$meta_data, _colChunk$meta_data2, _colChunk$meta_data3, _colChunk$meta_data4, _colChunk$meta_data5, _colChunk$meta_data7, _colChunk$meta_data8, _options$dictionary;
|
|
57
|
-
if (colChunk.file_path !== undefined && colChunk.file_path !== null) {
|
|
58
|
-
throw new Error('external references are not supported');
|
|
59
|
-
}
|
|
60
|
-
const field = schema.findField((_colChunk$meta_data = colChunk.meta_data) === null || _colChunk$meta_data === void 0 ? void 0 : _colChunk$meta_data.path_in_schema);
|
|
61
|
-
const type = getThriftEnum(Type, (_colChunk$meta_data2 = colChunk.meta_data) === null || _colChunk$meta_data2 === void 0 ? void 0 : _colChunk$meta_data2.type);
|
|
62
|
-
if (type !== field.primitiveType) {
|
|
63
|
-
throw new Error("chunk type not matching schema: ".concat(type));
|
|
64
|
-
}
|
|
65
|
-
const compression = getThriftEnum(CompressionCodec, (_colChunk$meta_data3 = colChunk.meta_data) === null || _colChunk$meta_data3 === void 0 ? void 0 : _colChunk$meta_data3.codec);
|
|
66
|
-
const pagesOffset = Number((_colChunk$meta_data4 = colChunk.meta_data) === null || _colChunk$meta_data4 === void 0 ? void 0 : _colChunk$meta_data4.data_page_offset);
|
|
67
|
-
let pagesSize = Number((_colChunk$meta_data5 = colChunk.meta_data) === null || _colChunk$meta_data5 === void 0 ? void 0 : _colChunk$meta_data5.total_compressed_size);
|
|
68
|
-
if (!colChunk.file_path) {
|
|
69
|
-
var _colChunk$meta_data6;
|
|
70
|
-
pagesSize = Math.min(this.fileSize - pagesOffset, Number((_colChunk$meta_data6 = colChunk.meta_data) === null || _colChunk$meta_data6 === void 0 ? void 0 : _colChunk$meta_data6.total_compressed_size));
|
|
71
|
-
}
|
|
72
|
-
const options = {
|
|
73
|
-
type,
|
|
74
|
-
rLevelMax: field.rLevelMax,
|
|
75
|
-
dLevelMax: field.dLevelMax,
|
|
76
|
-
compression,
|
|
77
|
-
column: field,
|
|
78
|
-
numValues: (_colChunk$meta_data7 = colChunk.meta_data) === null || _colChunk$meta_data7 === void 0 ? void 0 : _colChunk$meta_data7.num_values,
|
|
79
|
-
dictionary: []
|
|
80
|
-
};
|
|
81
|
-
let dictionary;
|
|
82
|
-
const dictionaryPageOffset = colChunk === null || colChunk === void 0 ? void 0 : (_colChunk$meta_data8 = colChunk.meta_data) === null || _colChunk$meta_data8 === void 0 ? void 0 : _colChunk$meta_data8.dictionary_page_offset;
|
|
83
|
-
if (dictionaryPageOffset) {
|
|
84
|
-
const dictionaryOffset = Number(dictionaryPageOffset);
|
|
85
|
-
dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);
|
|
86
|
-
}
|
|
87
|
-
dictionary = (_options$dictionary = options.dictionary) !== null && _options$dictionary !== void 0 && _options$dictionary.length ? options.dictionary : dictionary;
|
|
88
|
-
const pagesBuf = await this.read(pagesOffset, pagesSize);
|
|
89
|
-
return await decodeDataPages(pagesBuf, {
|
|
90
|
-
...options,
|
|
91
|
-
dictionary
|
|
92
|
-
});
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
async getDictionary(dictionaryPageOffset, options, pagesOffset) {
|
|
96
|
-
if (dictionaryPageOffset === 0) {
|
|
97
|
-
|
|
98
|
-
return [];
|
|
99
|
-
}
|
|
100
|
-
const dictionarySize = Math.min(this.fileSize - dictionaryPageOffset, this.defaultDictionarySize);
|
|
101
|
-
const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);
|
|
102
|
-
const cursor = {
|
|
103
|
-
buffer: pagesBuf,
|
|
104
|
-
offset: 0,
|
|
105
|
-
size: pagesBuf.length
|
|
106
|
-
};
|
|
107
|
-
const decodedPage = await decodePage(cursor, options);
|
|
108
|
-
return decodedPage.dictionary;
|
|
109
|
-
}
|
|
110
|
-
async readFooter() {
|
|
111
|
-
const trailerLen = PARQUET_MAGIC.length + 4;
|
|
112
|
-
const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);
|
|
113
|
-
const magic = trailerBuf.slice(4).toString();
|
|
114
|
-
if (magic !== PARQUET_MAGIC) {
|
|
115
|
-
throw new Error("Not a valid parquet file (magic=\"".concat(magic, ")"));
|
|
116
|
-
}
|
|
117
|
-
const metadataSize = trailerBuf.readUInt32LE(0);
|
|
118
|
-
const metadataOffset = this.fileSize - metadataSize - trailerLen;
|
|
119
|
-
if (metadataOffset < PARQUET_MAGIC.length) {
|
|
120
|
-
throw new Error("Invalid metadata size ".concat(metadataOffset));
|
|
121
|
-
}
|
|
122
|
-
const metadataBuf = await this.read(metadataOffset, metadataSize);
|
|
123
|
-
const {
|
|
124
|
-
metadata
|
|
125
|
-
} = decodeFileMetadata(metadataBuf);
|
|
126
|
-
return metadata;
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
//# sourceMappingURL=parquet-envelope-reader.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-envelope-reader.js","names":["PARQUET_MAGIC","PARQUET_MAGIC_ENCRYPTED","CompressionCodec","Type","decodeFileMetadata","getThriftEnum","fieldIndexOf","decodeDataPages","decodePage","DEFAULT_DICTIONARY_SIZE","ParquetEnvelopeReader","openBuffer","buffer","readFn","position","length","Promise","resolve","slice","closeFn","constructor","read","close","fileSize","options","defaultDictionarySize","readHeader","magic","toString","Error","readRowGroup","schema","rowGroup","columnList","rowCount","Number","num_rows","columnData","colChunk","columns","colMetadata","meta_data","colKey","path_in_schema","join","readColumnChunk","file_path","undefined","field","findField","type","primitiveType","compression","codec","pagesOffset","data_page_offset","pagesSize","total_compressed_size","Math","min","rLevelMax","dLevelMax","column","numValues","num_values","dictionary","dictionaryPageOffset","dictionary_page_offset","dictionaryOffset","getDictionary","pagesBuf","dictionarySize","cursor","offset","size","decodedPage","readFooter","trailerLen","trailerBuf","metadataSize","readUInt32LE","metadataOffset","metadataBuf","metadata"],"sources":["../../../../src/parquetjs/parser/parquet-envelope-reader.ts"],"sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetSchema} from '../schema/schema';\nimport {PARQUET_MAGIC, PARQUET_MAGIC_ENCRYPTED} from '../../constants';\nimport {ColumnChunk, CompressionCodec, FileMetaData, RowGroup, Type} from '../parquet-thrift';\nimport {\n ParquetBuffer,\n ParquetCompression,\n ParquetData,\n PrimitiveType,\n ParquetOptions\n} from '../schema/declare';\nimport {decodeFileMetadata, getThriftEnum, fieldIndexOf} from '../utils/read-utils';\nimport {decodeDataPages, decodePage} from './decoders';\n\nconst DEFAULT_DICTIONARY_SIZE = 1e6;\n\n/**\n * The parquet envelope reader allows direct, unbuffered access to the individual\n * sections of the parquet file, namely the header, footer and the row groups.\n * This class is intended for advanced/internal users; if you just want to retrieve\n * rows from a parquet file use the ParquetReader instead\n */\nexport class ParquetEnvelopeReader {\n public read: (position: number, length: number) => Promise<Buffer>;\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n public close: () => Promise<void>;\n public fileSize: number;\n public defaultDictionarySize: number;\n\n static async openBuffer(buffer: Buffer): Promise<ParquetEnvelopeReader> {\n const readFn = (position: number, length: number) =>\n Promise.resolve(buffer.slice(position, position + length));\n const closeFn = () => Promise.resolve();\n return new ParquetEnvelopeReader(readFn, closeFn, buffer.length);\n }\n\n constructor(\n read: (position: number, length: number) => Promise<Buffer>,\n close: () => Promise<void>,\n fileSize: number,\n options?: any\n ) {\n this.read = read;\n this.close = close;\n this.fileSize = fileSize;\n this.defaultDictionarySize = options?.defaultDictionarySize || DEFAULT_DICTIONARY_SIZE;\n }\n\n async readHeader(): Promise<void> {\n const buffer = await this.read(0, PARQUET_MAGIC.length);\n\n const magic = buffer.toString();\n switch (magic) {\n case PARQUET_MAGIC:\n break;\n case PARQUET_MAGIC_ENCRYPTED:\n throw new Error('Encrypted parquet file not supported');\n default:\n throw new Error(`Invalid parquet file (magic=${magic})`);\n }\n }\n\n async readRowGroup(\n schema: ParquetSchema,\n rowGroup: RowGroup,\n columnList: string[][]\n ): Promise<ParquetBuffer> {\n const buffer: ParquetBuffer = {\n rowCount: Number(rowGroup.num_rows),\n columnData: {}\n };\n for (const colChunk of rowGroup.columns) {\n const colMetadata = colChunk.meta_data;\n const colKey = colMetadata?.path_in_schema;\n if (columnList.length > 0 && fieldIndexOf(columnList, colKey!) < 0) {\n continue; // eslint-disable-line no-continue\n }\n buffer.columnData[colKey!.join()] = await this.readColumnChunk(schema, colChunk);\n }\n return buffer;\n }\n\n /**\n * Do reading of parquet file's column chunk\n * @param schema\n * @param colChunk\n */\n async readColumnChunk(schema: ParquetSchema, colChunk: ColumnChunk): Promise<ParquetData> {\n if (colChunk.file_path !== undefined && colChunk.file_path !== null) {\n throw new Error('external references are not supported');\n }\n\n const field = schema.findField(colChunk.meta_data?.path_in_schema!);\n const type: PrimitiveType = getThriftEnum(Type, colChunk.meta_data?.type!) as any;\n\n if (type !== field.primitiveType) {\n throw new Error(`chunk type not matching schema: ${type}`);\n }\n\n const compression: ParquetCompression = getThriftEnum(\n CompressionCodec,\n colChunk.meta_data?.codec!\n ) as any;\n\n const pagesOffset = Number(colChunk.meta_data?.data_page_offset!);\n let pagesSize = Number(colChunk.meta_data?.total_compressed_size!);\n\n if (!colChunk.file_path) {\n pagesSize = Math.min(\n this.fileSize - pagesOffset,\n Number(colChunk.meta_data?.total_compressed_size)\n );\n }\n\n const options: ParquetOptions = {\n type,\n rLevelMax: field.rLevelMax,\n dLevelMax: field.dLevelMax,\n compression,\n column: field,\n numValues: colChunk.meta_data?.num_values,\n dictionary: []\n };\n\n let dictionary;\n\n const dictionaryPageOffset = colChunk?.meta_data?.dictionary_page_offset;\n\n if (dictionaryPageOffset) {\n const dictionaryOffset = Number(dictionaryPageOffset);\n // Getting dictionary from column chunk to iterate all over indexes to get dataPage values.\n dictionary = await this.getDictionary(dictionaryOffset, options, pagesOffset);\n }\n\n dictionary = options.dictionary?.length ? options.dictionary : dictionary;\n const pagesBuf = await this.read(pagesOffset, pagesSize);\n return await decodeDataPages(pagesBuf, {...options, dictionary});\n }\n\n /**\n * Getting dictionary for allows to flatten values by indices.\n * @param dictionaryPageOffset\n * @param options\n * @param pagesOffset\n * @returns\n */\n async getDictionary(\n dictionaryPageOffset: number,\n options: ParquetOptions,\n pagesOffset: number\n ): Promise<string[]> {\n if (dictionaryPageOffset === 0) {\n // dictionarySize = Math.min(this.fileSize - pagesOffset, this.defaultDictionarySize);\n // pagesBuf = await this.read(pagesOffset, dictionarySize);\n\n // In this case we are working with parquet-mr files format. Problem is described below:\n // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding\n // We need to get dictionary page from column chunk if it exists.\n // Now if we use code commented above we don't get DICTIONARY_PAGE we get DATA_PAGE instead.\n return [];\n }\n\n const dictionarySize = Math.min(\n this.fileSize - dictionaryPageOffset,\n this.defaultDictionarySize\n );\n const pagesBuf = await this.read(dictionaryPageOffset, dictionarySize);\n\n const cursor = {buffer: pagesBuf, offset: 0, size: pagesBuf.length};\n const decodedPage = await decodePage(cursor, options);\n\n return decodedPage.dictionary!;\n }\n\n async readFooter(): Promise<FileMetaData> {\n const trailerLen = PARQUET_MAGIC.length + 4;\n const trailerBuf = await this.read(this.fileSize - trailerLen, trailerLen);\n\n const magic = trailerBuf.slice(4).toString();\n if (magic !== PARQUET_MAGIC) {\n throw new Error(`Not a valid parquet file (magic=\"${magic})`);\n }\n\n const metadataSize = trailerBuf.readUInt32LE(0);\n const metadataOffset = this.fileSize - metadataSize - trailerLen;\n if (metadataOffset < PARQUET_MAGIC.length) {\n throw new Error(`Invalid metadata size ${metadataOffset}`);\n }\n\n const metadataBuf = await this.read(metadataOffset, metadataSize);\n // let metadata = new parquet_thrift.FileMetaData();\n // parquet_util.decodeThrift(metadata, metadataBuf);\n const {metadata} = decodeFileMetadata(metadataBuf);\n return metadata;\n }\n}\n"],"mappings":";;AAEA,SAAQA,aAAa,EAAEC,uBAAuB,QAAO,iBAAiB;AACtE,SAAqBC,gBAAgB,EAA0BC,IAAI,QAAO,mBAAmB;AAQ7F,SAAQC,kBAAkB,EAAEC,aAAa,EAAEC,YAAY,QAAO,qBAAqB;AACnF,SAAQC,eAAe,EAAEC,UAAU,QAAO,YAAY;AAEtD,MAAMC,uBAAuB,GAAG,GAAG;;AAQnC,OAAO,MAAMC,qBAAqB,CAAC;;EAUjC,aAAaC,UAAU,CAACC,MAAc,EAAkC;IACtE,MAAMC,MAAM,GAAG,CAACC,QAAgB,EAAEC,MAAc,KAC9CC,OAAO,CAACC,OAAO,CAACL,MAAM,CAACM,KAAK,CAACJ,QAAQ,EAAEA,QAAQ,GAAGC,MAAM,CAAC,CAAC;IAC5D,MAAMI,OAAO,GAAG,MAAMH,OAAO,CAACC,OAAO,EAAE;IACvC,OAAO,IAAIP,qBAAqB,CAACG,MAAM,EAAEM,OAAO,EAAEP,MAAM,CAACG,MAAM,CAAC;EAClE;EAEAK,WAAW,CACTC,IAA2D,EAC3DC,KAA0B,EAC1BC,QAAgB,EAChBC,OAAa,EACb;IAAA;IAAA;IAAA;IAAA;IACA,IAAI,CAACH,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,QAAQ,GAAGA,QAAQ;IACxB,IAAI,CAACE,qBAAqB,GAAG,CAAAD,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEC,qBAAqB,KAAIhB,uBAAuB;EACxF;EAEA,MAAMiB,UAAU,GAAkB;IAChC,MAAMd,MAAM,GAAG,MAAM,IAAI,CAACS,IAAI,CAAC,CAAC,EAAErB,aAAa,CAACe,MAAM,CAAC;IAEvD,MAAMY,KAAK,GAAGf,MAAM,CAACgB,QAAQ,EAAE;IAC/B,QAAQD,KAAK;MACX,KAAK3B,aAAa;QAChB;MACF,KAAKC,uBAAuB;QAC1B,MAAM,IAAI4B,KAAK,CAAC,sCAAsC,CAAC;MACzD;QACE,MAAM,IAAIA,KAAK,uCAAgCF,KAAK,OAAI;IAAC;EAE/D;EAEA,MAAMG,YAAY,CAChBC,MAAqB,EACrBC,QAAkB,EAClBC,UAAsB,EACE;IACxB,MAAMrB,MAAqB,GAAG;MAC5BsB,QAAQ,EAAEC,MAAM,CAACH,QAAQ,CAACI,QAAQ,CAAC;MACnCC,UAAU,EAAE,CAAC;IACf,CAAC;IACD,KAAK,MAAMC,QAAQ,IAAIN,QAAQ,CAACO,OAAO,EAAE;MACvC,MAAMC,WAAW,GAAGF,QAAQ,CAACG,SAAS;MACtC,MAAMC,MAAM,GAAGF,WAAW,aAAXA,WAAW,uBAAXA,WAAW,CAAEG,cAAc;MAC1C,IAAIV,UAAU,CAAClB,MAAM,GAAG,CAAC,IAAIT,YAAY,CAAC2B,UAAU,EAAES,MAAM,CAAE,GAAG,CAAC,EAAE;QAClE;MACF;;MACA9B,MAAM,CAACyB,UAAU,CAACK,MAAM,CAAEE,IAAI,EAAE,CAAC,GAAG,MAAM,IAAI,CAACC,eAAe,CAACd,MAAM,EAAEO,QAAQ,CAAC;IAClF;IACA,OAAO1B,MAAM;EACf;;EAOA,MAAMiC,eAAe,CAACd,MAAqB,EAAEO,QAAqB,EAAwB;IAAA;IACxF,IAAIA,QAAQ,CAACQ,SAAS,KAAKC,SAAS,IAAIT,QAAQ,CAACQ,SAAS,KAAK,IAAI,EAAE;MACnE,MAAM,IAAIjB,KAAK,CAAC,uCAAuC,CAAC;IAC1D;IAEA,MAAMmB,KAAK,GAAGjB,MAAM,CAACkB,SAAS,wBAACX,QAAQ,CAACG,SAAS,wDAAlB,oBAAoBE,cAAc,CAAE;IACnE,MAAMO,IAAmB,GAAG7C,aAAa,CAACF,IAAI,0BAAEmC,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBS,IAAI,CAAS;IAEjF,IAAIA,IAAI,KAAKF,KAAK,CAACG,aAAa,EAAE;MAChC,MAAM,IAAItB,KAAK,2CAAoCqB,IAAI,EAAG;IAC5D;IAEA,MAAME,WAA+B,GAAG/C,aAAa,CACnDH,gBAAgB,0BAChBoC,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBY,KAAK,CACnB;IAER,MAAMC,WAAW,GAAGnB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBc,gBAAgB,CAAE;IACjE,IAAIC,SAAS,GAAGrB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAE;IAElE,IAAI,CAACnB,QAAQ,CAACQ,SAAS,EAAE;MAAA;MACvBU,SAAS,GAAGE,IAAI,CAACC,GAAG,CAClB,IAAI,CAACpC,QAAQ,GAAG+B,WAAW,EAC3BnB,MAAM,yBAACG,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBgB,qBAAqB,CAAC,CAClD;IACH;IAEA,MAAMjC,OAAuB,GAAG;MAC9B0B,IAAI;MACJU,SAAS,EAAEZ,KAAK,CAACY,SAAS;MAC1BC,SAAS,EAAEb,KAAK,CAACa,SAAS;MAC1BT,WAAW;MACXU,MAAM,EAAEd,KAAK;MACbe,SAAS,0BAAEzB,QAAQ,CAACG,SAAS,yDAAlB,qBAAoBuB,UAAU;MACzCC,UAAU,EAAE;IACd,CAAC;IAED,IAAIA,UAAU;IAEd,MAAMC,oBAAoB,GAAG5B,QAAQ,aAARA,QAAQ,+CAARA,QAAQ,CAAEG,SAAS,yDAAnB,qBAAqB0B,sBAAsB;IAExE,IAAID,oBAAoB,EAAE;MACxB,MAAME,gBAAgB,GAAGjC,MAAM,CAAC+B,oBAAoB,CAAC;MAErDD,UAAU,GAAG,MAAM,IAAI,CAACI,aAAa,CAACD,gBAAgB,EAAE5C,OAAO,EAAE8B,WAAW,CAAC;IAC/E;IAEAW,UAAU,GAAG,uBAAAzC,OAAO,CAACyC,UAAU,gDAAlB,oBAAoBlD,MAAM,GAAGS,OAAO,CAACyC,UAAU,GAAGA,UAAU;IACzE,MAAMK,QAAQ,GAAG,MAAM,IAAI,CAACjD,IAAI,CAACiC,WAAW,EAAEE,SAAS,CAAC;IACxD,OAAO,MAAMjD,eAAe,CAAC+D,QAAQ,EAAE;MAAC,GAAG9C,OAAO;MAAEyC;IAAU,CAAC,CAAC;EAClE;;EASA,MAAMI,aAAa,CACjBH,oBAA4B,EAC5B1C,OAAuB,EACvB8B,WAAmB,EACA;IACnB,IAAIY,oBAAoB,KAAK,CAAC,EAAE;;MAQ9B,OAAO,EAAE;IACX;IAEA,MAAMK,cAAc,GAAGb,IAAI,CAACC,GAAG,CAC7B,IAAI,CAACpC,QAAQ,GAAG2C,oBAAoB,EACpC,IAAI,CAACzC,qBAAqB,CAC3B;IACD,MAAM6C,QAAQ,GAAG,MAAM,IAAI,CAACjD,IAAI,CAAC6C,oBAAoB,EAAEK,cAAc,CAAC;IAEtE,MAAMC,MAAM,GAAG;MAAC5D,MAAM,EAAE0D,QAAQ;MAAEG,MAAM,EAAE,CAAC;MAAEC,IAAI,EAAEJ,QAAQ,CAACvD;IAAM,CAAC;IACnE,MAAM4D,WAAW,GAAG,MAAMnE,UAAU,CAACgE,MAAM,EAAEhD,OAAO,CAAC;IAErD,OAAOmD,WAAW,CAACV,UAAU;EAC/B;EAEA,MAAMW,UAAU,GAA0B;IACxC,MAAMC,UAAU,GAAG7E,aAAa,CAACe,MAAM,GAAG,CAAC;IAC3C,MAAM+D,UAAU,GAAG,MAAM,IAAI,CAACzD,IAAI,CAAC,IAAI,CAACE,QAAQ,GAAGsD,UAAU,EAAEA,UAAU,CAAC;IAE1E,MAAMlD,KAAK,GAAGmD,UAAU,CAAC5D,KAAK,CAAC,CAAC,CAAC,CAACU,QAAQ,EAAE;IAC5C,IAAID,KAAK,KAAK3B,aAAa,EAAE;MAC3B,MAAM,IAAI6B,KAAK,6CAAqCF,KAAK,OAAI;IAC/D;IAEA,MAAMoD,YAAY,GAAGD,UAAU,CAACE,YAAY,CAAC,CAAC,CAAC;IAC/C,MAAMC,cAAc,GAAG,IAAI,CAAC1D,QAAQ,GAAGwD,YAAY,GAAGF,UAAU;IAChE,IAAII,cAAc,GAAGjF,aAAa,CAACe,MAAM,EAAE;MACzC,MAAM,IAAIc,KAAK,iCAA0BoD,cAAc,EAAG;IAC5D;IAEA,MAAMC,WAAW,GAAG,MAAM,IAAI,CAAC7D,IAAI,CAAC4D,cAAc,EAAEF,YAAY,CAAC;IAGjE,MAAM;MAACI;IAAQ,CAAC,GAAG/E,kBAAkB,CAAC8E,WAAW,CAAC;IAClD,OAAOC,QAAQ;EACjB;AACF"}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
export function toArrayBuffer(buffer) {
|
|
3
|
-
if (Buffer.isBuffer(buffer)) {
|
|
4
|
-
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
5
|
-
return typedArray.slice().buffer;
|
|
6
|
-
}
|
|
7
|
-
return buffer;
|
|
8
|
-
}
|
|
9
|
-
|
|
10
|
-
export function toBuffer(arrayBuffer) {
|
|
11
|
-
return Buffer.from(arrayBuffer);
|
|
12
|
-
}
|
|
13
|
-
//# sourceMappingURL=buffer-utils.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"buffer-utils.js","names":["toArrayBuffer","buffer","Buffer","isBuffer","typedArray","Uint8Array","byteOffset","length","slice","toBuffer","arrayBuffer","from"],"sources":["../../../../src/parquetjs/utils/buffer-utils.ts"],"sourcesContent":["/**\n * Convert Buffer to ArrayBuffer\n */\nexport function toArrayBuffer(buffer: Buffer): ArrayBuffer {\n // TODO - per docs we should just be able to call buffer.buffer, but there are issues\n if (Buffer.isBuffer(buffer)) {\n const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);\n return typedArray.slice().buffer;\n }\n return buffer;\n}\n\n/**\n * Convert (copy) ArrayBuffer to Buffer\n */\nexport function toBuffer(arrayBuffer: ArrayBuffer): Buffer {\n return Buffer.from(arrayBuffer);\n}\n"],"mappings":";AAGA,OAAO,SAASA,aAAa,CAACC,MAAc,EAAe;EAEzD,IAAIC,MAAM,CAACC,QAAQ,CAACF,MAAM,CAAC,EAAE;IAC3B,MAAMG,UAAU,GAAG,IAAIC,UAAU,CAACJ,MAAM,CAACA,MAAM,EAAEA,MAAM,CAACK,UAAU,EAAEL,MAAM,CAACM,MAAM,CAAC;IAClF,OAAOH,UAAU,CAACI,KAAK,EAAE,CAACP,MAAM;EAClC;EACA,OAAOA,MAAM;AACf;;AAKA,OAAO,SAASQ,QAAQ,CAACC,WAAwB,EAAU;EACzD,OAAOR,MAAM,CAACS,IAAI,CAACD,WAAW,CAAC;AACjC"}
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import type { ParquetSchema } from '../parquetjs/schema/schema';
|
|
2
|
-
import type { ParquetType } from '../parquetjs/schema/declare';
|
|
3
|
-
import { Schema, DataType } from '@loaders.gl/schema';
|
|
4
|
-
export declare const PARQUET_TYPE_MAPPING: {
|
|
5
|
-
[type in ParquetType]: typeof DataType;
|
|
6
|
-
};
|
|
7
|
-
export declare function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema;
|
|
8
|
-
//# sourceMappingURL=convert-schema.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"convert-schema.d.ts","sourceRoot":"","sources":["../../src/lib/convert-schema.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,4BAA4B,CAAC;AAC9D,OAAO,KAAK,EAAgC,WAAW,EAAC,MAAM,6BAA6B,CAAC;AAE5F,OAAO,EACL,MAAM,EAGN,QAAQ,EAaT,MAAM,oBAAoB,CAAC;AAE5B,eAAO,MAAM,oBAAoB,EAAE;KAAE,IAAI,IAAI,WAAW,GAAG,OAAO,QAAQ;CA+BzE,CAAC;AAEF,wBAAgB,2BAA2B,CAAC,aAAa,EAAE,aAAa,GAAG,MAAM,CAKhF"}
|
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
import type { ParquetLoaderOptions } from '../parquet-loader';
|
|
2
|
-
export declare function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions): Promise<any[][] | null>;
|
|
3
|
-
export declare function parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions): AsyncGenerator<any[][], void, unknown>;
|
|
4
|
-
//# sourceMappingURL=parse-parquet.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet.d.ts","sourceRoot":"","sources":["../../src/lib/parse-parquet.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAC,oBAAoB,EAAC,MAAM,mBAAmB,CAAC;AAI5D,wBAAsB,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE,oBAAoB,2BAM1F;AAED,wBAAuB,yBAAyB,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,EAAE,oBAAoB,0CAa1F"}
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.parseParquetFileInBatches = exports.parseParquet = void 0;
|
|
4
|
-
const parquet_reader_1 = require("../parquetjs/parser/parquet-reader");
|
|
5
|
-
async function parseParquet(arrayBuffer, options) {
|
|
6
|
-
const blob = new Blob([arrayBuffer]);
|
|
7
|
-
for await (const batch of parseParquetFileInBatches(blob, options)) {
|
|
8
|
-
return batch;
|
|
9
|
-
}
|
|
10
|
-
return null;
|
|
11
|
-
}
|
|
12
|
-
exports.parseParquet = parseParquet;
|
|
13
|
-
async function* parseParquetFileInBatches(blob, options) {
|
|
14
|
-
const reader = await parquet_reader_1.ParquetReader.openBlob(blob);
|
|
15
|
-
const rows = [];
|
|
16
|
-
try {
|
|
17
|
-
const cursor = reader.getCursor();
|
|
18
|
-
let record;
|
|
19
|
-
while ((record = await cursor.next())) {
|
|
20
|
-
rows.push(record);
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
finally {
|
|
24
|
-
await reader.close();
|
|
25
|
-
}
|
|
26
|
-
yield rows;
|
|
27
|
-
}
|
|
28
|
-
exports.parseParquetFileInBatches = parseParquetFileInBatches;
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
export declare function readArrayBuffer(file: Blob | ArrayBuffer | any, start: number, length: number): Promise<ArrayBuffer>;
|
|
2
|
-
/**
|
|
3
|
-
* Read a slice of a Blob or File, without loading the entire file into memory
|
|
4
|
-
* The trick when reading File objects is to read successive "slices" of the File
|
|
5
|
-
* Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields
|
|
6
|
-
* Actually reading from file happens in `readAsArrayBuffer`
|
|
7
|
-
* @param blob to read
|
|
8
|
-
export async function readBlob(blob: Blob): Promise<ArrayBuffer> {
|
|
9
|
-
return await new Promise((resolve, reject) => {
|
|
10
|
-
const fileReader = new FileReader();
|
|
11
|
-
fileReader.onload = (event: ProgressEvent<FileReader>) =>
|
|
12
|
-
resolve(event?.target?.result as ArrayBuffer);
|
|
13
|
-
// TODO - reject with a proper Error
|
|
14
|
-
fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);
|
|
15
|
-
fileReader.readAsArrayBuffer(blob);
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
*/
|
|
19
|
-
//# sourceMappingURL=read-array-buffer.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"read-array-buffer.d.ts","sourceRoot":"","sources":["../../src/lib/read-array-buffer.ts"],"names":[],"mappings":"AAEA,wBAAsB,eAAe,CACnC,IAAI,EAAE,IAAI,GAAG,WAAW,GAAG,GAAG,EAC9B,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,WAAW,CAAC,CAMtB;AAED;;;;;;;;;;;;;;;;EAgBE"}
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// Random-Access read
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.readArrayBuffer = void 0;
|
|
5
|
-
async function readArrayBuffer(file, start, length) {
|
|
6
|
-
if (file instanceof Blob) {
|
|
7
|
-
const slice = file.slice(start, start + length);
|
|
8
|
-
return await slice.arrayBuffer();
|
|
9
|
-
}
|
|
10
|
-
return await file.read(start, start + length);
|
|
11
|
-
}
|
|
12
|
-
exports.readArrayBuffer = readArrayBuffer;
|
|
13
|
-
/**
|
|
14
|
-
* Read a slice of a Blob or File, without loading the entire file into memory
|
|
15
|
-
* The trick when reading File objects is to read successive "slices" of the File
|
|
16
|
-
* Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields
|
|
17
|
-
* Actually reading from file happens in `readAsArrayBuffer`
|
|
18
|
-
* @param blob to read
|
|
19
|
-
export async function readBlob(blob: Blob): Promise<ArrayBuffer> {
|
|
20
|
-
return await new Promise((resolve, reject) => {
|
|
21
|
-
const fileReader = new FileReader();
|
|
22
|
-
fileReader.onload = (event: ProgressEvent<FileReader>) =>
|
|
23
|
-
resolve(event?.target?.result as ArrayBuffer);
|
|
24
|
-
// TODO - reject with a proper Error
|
|
25
|
-
fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);
|
|
26
|
-
fileReader.readAsArrayBuffer(blob);
|
|
27
|
-
});
|
|
28
|
-
}
|
|
29
|
-
*/
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"writer.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/encoder/writer.ts"],"names":[],"mappings":";;AAEA,OAAO,EAAC,SAAS,EAAE,QAAQ,EAAC,MAAM,QAAQ,CAAC;AAG3C,OAAO,EACL,aAAa,EAKd,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAE/C,OAAO,EAaL,QAAQ,EAGT,MAAM,mBAAmB,CAAC;AA2B3B,MAAM,WAAW,oBAAoB;IACnC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IAGxB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;;;GAIG;AAEH,qBAAa,aAAa,CAAC,CAAC;IAC1B;;;OAGG;WACU,QAAQ,CAAC,CAAC,EACrB,MAAM,EAAE,aAAa,EACrB,IAAI,EAAE,MAAM,EACZ,IAAI,CAAC,EAAE,oBAAoB,GAC1B,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAK5B;;;OAGG;WACU,UAAU,CAAC,CAAC,EACvB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,QAAQ,EACtB,IAAI,CAAC,EAAE,oBAAoB,GAC1B,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;IAWrB,MAAM,EAAE,aAAa,CAAC;IACtB,cAAc,EAAE,qBAAqB,CAAC;IACtC,SAAS,EAAE,aAAa,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,OAAO,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE5C;;OAEG;gBAED,MAAM,EAAE,aAAa,EACrB,cAAc,EAAE,qBAAqB,EACrC,IAAI,EAAE,oBAAoB;IActB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAUlC;;;OAGG;IACG,SAAS,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWzC;;;;;OAKG;IACG,KAAK,CAAC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAqBjD;;OAEG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAK7C;;;;;OAKG;IACH,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;IAIlC;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;;;;GAKG;AACH,qBAAa,qBAAqB;IAChC;;OAEG;WACU,UAAU,CACrB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,QAAQ,EACtB,IAAI,EAAE,oBAAoB,GACzB,OAAO,CAAC,qBAAqB,CAAC;IAM1B,MAAM,EAAE,aAAa,CAAC;IACtB,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,QAAQ,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;IACjB,aAAa,EAAE,OAAO,CAAC;gBAG5B,MAAM,EAAE,aAAa,EACrB,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,EACvC,OAAO,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAC5B,UAAU,EAAE,MAAM,EAClB,IAAI,EAAE,oBAAoB;IAY5B,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKxC;;OAEG;IACH,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;;OAGG;IACG,aAAa,CAAC,OAAO,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC;IAY1D;;OAEG;IACH,WAAW,CAAC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWhE;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;GAEG;AACH,qBAAa,kBAAkB,CAAC,CAAC,CAAE,SAAQ,SAAS;IAC3C,MAAM,EAAE,aAAa,CAAC,CAAC,CAAC,CAAC;gBAEpB,MAAM,EAAE,aAAa,EAAE,IAAI,GAAE,oBAAyB;IAiBlE,UAAU,CAAC,GAAG,EAAE,GAAG,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAS9E,MAAM,CAAC,QAAQ,EAAE,CAAC,GAAG,CAAC,EAAE,GAAG,KAAK,IAAI;CAG3C"}
|
package/dist/parquetjs/file.d.ts
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
/// <reference types="node" />
|
|
2
|
-
import fs from 'fs';
|
|
3
|
-
export declare function fopen(filePath: any): Promise<unknown>;
|
|
4
|
-
export declare function fstat(filePath: any): Promise<fs.Stats>;
|
|
5
|
-
export declare function fread(fd: any, position: any, length: any): Promise<unknown>;
|
|
6
|
-
export declare function fclose(fd: any): Promise<unknown>;
|
|
7
|
-
export declare function oswrite(os: any, buf: any): Promise<void>;
|
|
8
|
-
export declare function osclose(os: any): Promise<void>;
|
|
9
|
-
export declare function osopen(path: any, opts: any): Promise<unknown>;
|
|
10
|
-
//# sourceMappingURL=file.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/parquetjs/file.ts"],"names":[],"mappings":";AACA,OAAO,EAAE,MAAM,IAAI,CAAC;AAEpB,wBAAgB,KAAK,CAAC,QAAQ,KAAA,oBAU7B;AAED,wBAAgB,KAAK,CAAC,QAAQ,KAAA,qBAU7B;AAED,wBAAgB,KAAK,CAAC,EAAE,KAAA,EAAE,QAAQ,KAAA,EAAE,MAAM,KAAA,oBAYzC;AAED,wBAAgB,MAAM,CAAC,EAAE,KAAA,oBAUxB;AAED,wBAAgB,OAAO,CAAC,EAAE,KAAA,EAAE,GAAG,KAAA,GAAG,OAAO,CAAC,IAAI,CAAC,CAU9C;AAED,wBAAgB,OAAO,CAAC,EAAE,KAAA,GAAG,OAAO,CAAC,IAAI,CAAC,CAUzC;AAED,wBAAgB,MAAM,CAAC,IAAI,KAAA,EAAE,IAAI,KAAA,oBAYhC"}
|
package/dist/parquetjs/file.js
DELETED
|
@@ -1,99 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.osopen = exports.osclose = exports.oswrite = exports.fclose = exports.fread = exports.fstat = exports.fopen = void 0;
|
|
7
|
-
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
8
|
-
const fs_1 = __importDefault(require("fs"));
|
|
9
|
-
function fopen(filePath) {
|
|
10
|
-
return new Promise((resolve, reject) => {
|
|
11
|
-
fs_1.default.open(filePath, 'r', (err, fd) => {
|
|
12
|
-
if (err) {
|
|
13
|
-
reject(err);
|
|
14
|
-
}
|
|
15
|
-
else {
|
|
16
|
-
resolve(fd);
|
|
17
|
-
}
|
|
18
|
-
});
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
exports.fopen = fopen;
|
|
22
|
-
function fstat(filePath) {
|
|
23
|
-
return new Promise((resolve, reject) => {
|
|
24
|
-
fs_1.default.stat(filePath, (err, stat) => {
|
|
25
|
-
if (err) {
|
|
26
|
-
reject(err);
|
|
27
|
-
}
|
|
28
|
-
else {
|
|
29
|
-
resolve(stat);
|
|
30
|
-
}
|
|
31
|
-
});
|
|
32
|
-
});
|
|
33
|
-
}
|
|
34
|
-
exports.fstat = fstat;
|
|
35
|
-
function fread(fd, position, length) {
|
|
36
|
-
const buffer = Buffer.alloc(length);
|
|
37
|
-
return new Promise((resolve, reject) => {
|
|
38
|
-
fs_1.default.read(fd, buffer, 0, length, position, (err, bytesRead, buf) => {
|
|
39
|
-
if (err || bytesRead !== length) {
|
|
40
|
-
reject(err || Error('read failed'));
|
|
41
|
-
}
|
|
42
|
-
else {
|
|
43
|
-
resolve(buf);
|
|
44
|
-
}
|
|
45
|
-
});
|
|
46
|
-
});
|
|
47
|
-
}
|
|
48
|
-
exports.fread = fread;
|
|
49
|
-
function fclose(fd) {
|
|
50
|
-
return new Promise((resolve, reject) => {
|
|
51
|
-
fs_1.default.close(fd, (err) => {
|
|
52
|
-
if (err) {
|
|
53
|
-
reject(err);
|
|
54
|
-
}
|
|
55
|
-
else {
|
|
56
|
-
resolve(err);
|
|
57
|
-
}
|
|
58
|
-
});
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
exports.fclose = fclose;
|
|
62
|
-
function oswrite(os, buf) {
|
|
63
|
-
return new Promise((resolve, reject) => {
|
|
64
|
-
os.write(buf, (err) => {
|
|
65
|
-
if (err) {
|
|
66
|
-
reject(err);
|
|
67
|
-
}
|
|
68
|
-
else {
|
|
69
|
-
resolve();
|
|
70
|
-
}
|
|
71
|
-
});
|
|
72
|
-
});
|
|
73
|
-
}
|
|
74
|
-
exports.oswrite = oswrite;
|
|
75
|
-
function osclose(os) {
|
|
76
|
-
return new Promise((resolve, reject) => {
|
|
77
|
-
os.close((err) => {
|
|
78
|
-
if (err) {
|
|
79
|
-
reject(err);
|
|
80
|
-
}
|
|
81
|
-
else {
|
|
82
|
-
resolve();
|
|
83
|
-
}
|
|
84
|
-
});
|
|
85
|
-
});
|
|
86
|
-
}
|
|
87
|
-
exports.osclose = osclose;
|
|
88
|
-
function osopen(path, opts) {
|
|
89
|
-
return new Promise((resolve, reject) => {
|
|
90
|
-
const outputStream = fs_1.default.createWriteStream(path, opts);
|
|
91
|
-
outputStream.on('open', function (fd) {
|
|
92
|
-
resolve(outputStream);
|
|
93
|
-
});
|
|
94
|
-
outputStream.on('error', function (err) {
|
|
95
|
-
reject(err);
|
|
96
|
-
});
|
|
97
|
-
});
|
|
98
|
-
}
|
|
99
|
-
exports.osopen = osopen;
|
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
import { FileMetaData } from '../parquet-thrift';
|
|
2
|
-
import { ParquetEnvelopeReader } from './parquet-envelope-reader';
|
|
3
|
-
import { ParquetSchema } from '../schema/schema';
|
|
4
|
-
import { ParquetRecord } from '../schema/declare';
|
|
5
|
-
/**
|
|
6
|
-
* A parquet cursor is used to retrieve rows from a parquet file in order
|
|
7
|
-
*/
|
|
8
|
-
export declare class ParquetCursor<T> implements AsyncIterable<T> {
|
|
9
|
-
metadata: FileMetaData;
|
|
10
|
-
envelopeReader: ParquetEnvelopeReader;
|
|
11
|
-
schema: ParquetSchema;
|
|
12
|
-
columnList: string[][];
|
|
13
|
-
rowGroup: ParquetRecord[];
|
|
14
|
-
rowGroupIndex: number;
|
|
15
|
-
/**
|
|
16
|
-
* Create a new parquet reader from the file metadata and an envelope reader.
|
|
17
|
-
* It is usually not recommended to call this constructor directly except for
|
|
18
|
-
* advanced and internal use cases. Consider using getCursor() on the
|
|
19
|
-
* ParquetReader instead
|
|
20
|
-
*/
|
|
21
|
-
constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader, schema: ParquetSchema, columnList: string[][]);
|
|
22
|
-
/**
|
|
23
|
-
* Retrieve the next row from the cursor. Returns a row or NULL if the end
|
|
24
|
-
* of the file was reached
|
|
25
|
-
*/
|
|
26
|
-
next<T = any>(): Promise<T>;
|
|
27
|
-
/**
|
|
28
|
-
* Rewind the cursor the the beginning of the file
|
|
29
|
-
*/
|
|
30
|
-
rewind(): void;
|
|
31
|
-
/**
|
|
32
|
-
* Implement AsyncIterable
|
|
33
|
-
*/
|
|
34
|
-
[Symbol.asyncIterator](): AsyncIterator<T>;
|
|
35
|
-
}
|
|
36
|
-
//# sourceMappingURL=parquet-cursor.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-cursor.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parser/parquet-cursor.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,YAAY,EAAC,MAAM,mBAAmB,CAAC;AAC/C,OAAO,EAAC,qBAAqB,EAAC,MAAM,2BAA2B,CAAC;AAChE,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAC/C,OAAO,EAAC,aAAa,EAAC,MAAM,mBAAmB,CAAC;AAGhD;;GAEG;AACH,qBAAa,aAAa,CAAC,CAAC,CAAE,YAAW,aAAa,CAAC,CAAC,CAAC;IAChD,QAAQ,EAAE,YAAY,CAAC;IACvB,cAAc,EAAE,qBAAqB,CAAC;IACtC,MAAM,EAAE,aAAa,CAAC;IACtB,UAAU,EAAE,MAAM,EAAE,EAAE,CAAC;IACvB,QAAQ,EAAE,aAAa,EAAE,CAAM;IAC/B,aAAa,EAAE,MAAM,CAAC;IAE7B;;;;;OAKG;gBAED,QAAQ,EAAE,YAAY,EACtB,cAAc,EAAE,qBAAqB,EACrC,MAAM,EAAE,aAAa,EACrB,UAAU,EAAE,MAAM,EAAE,EAAE;IASxB;;;OAGG;IACG,IAAI,CAAC,CAAC,GAAG,GAAG,KAAK,OAAO,CAAC,CAAC,CAAC;IAiBjC;;OAEG;IACH,MAAM,IAAI,IAAI;IAKd;;OAEG;IAEH,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,CAAC,CAAC;CAuB3C"}
|
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ParquetCursor = void 0;
|
|
4
|
-
const shred_1 = require("../schema/shred");
|
|
5
|
-
/**
|
|
6
|
-
* A parquet cursor is used to retrieve rows from a parquet file in order
|
|
7
|
-
*/
|
|
8
|
-
class ParquetCursor {
|
|
9
|
-
/**
|
|
10
|
-
* Create a new parquet reader from the file metadata and an envelope reader.
|
|
11
|
-
* It is usually not recommended to call this constructor directly except for
|
|
12
|
-
* advanced and internal use cases. Consider using getCursor() on the
|
|
13
|
-
* ParquetReader instead
|
|
14
|
-
*/
|
|
15
|
-
constructor(metadata, envelopeReader, schema, columnList) {
|
|
16
|
-
this.rowGroup = [];
|
|
17
|
-
this.metadata = metadata;
|
|
18
|
-
this.envelopeReader = envelopeReader;
|
|
19
|
-
this.schema = schema;
|
|
20
|
-
this.columnList = columnList;
|
|
21
|
-
this.rowGroupIndex = 0;
|
|
22
|
-
}
|
|
23
|
-
/**
|
|
24
|
-
* Retrieve the next row from the cursor. Returns a row or NULL if the end
|
|
25
|
-
* of the file was reached
|
|
26
|
-
*/
|
|
27
|
-
async next() {
|
|
28
|
-
if (this.rowGroup.length === 0) {
|
|
29
|
-
if (this.rowGroupIndex >= this.metadata.row_groups.length) {
|
|
30
|
-
// @ts-ignore
|
|
31
|
-
return null;
|
|
32
|
-
}
|
|
33
|
-
const rowBuffer = await this.envelopeReader.readRowGroup(this.schema, this.metadata.row_groups[this.rowGroupIndex], this.columnList);
|
|
34
|
-
this.rowGroup = (0, shred_1.materializeRecords)(this.schema, rowBuffer);
|
|
35
|
-
this.rowGroupIndex++;
|
|
36
|
-
}
|
|
37
|
-
return this.rowGroup.shift();
|
|
38
|
-
}
|
|
39
|
-
/**
|
|
40
|
-
* Rewind the cursor the the beginning of the file
|
|
41
|
-
*/
|
|
42
|
-
rewind() {
|
|
43
|
-
this.rowGroup = [];
|
|
44
|
-
this.rowGroupIndex = 0;
|
|
45
|
-
}
|
|
46
|
-
/**
|
|
47
|
-
* Implement AsyncIterable
|
|
48
|
-
*/
|
|
49
|
-
// tslint:disable-next-line:function-name
|
|
50
|
-
[Symbol.asyncIterator]() {
|
|
51
|
-
let done = false;
|
|
52
|
-
return {
|
|
53
|
-
next: async () => {
|
|
54
|
-
if (done) {
|
|
55
|
-
return { done, value: null };
|
|
56
|
-
}
|
|
57
|
-
const value = await this.next();
|
|
58
|
-
if (value === null) {
|
|
59
|
-
return { done: true, value };
|
|
60
|
-
}
|
|
61
|
-
return { done: false, value };
|
|
62
|
-
},
|
|
63
|
-
return: async () => {
|
|
64
|
-
done = true;
|
|
65
|
-
return { done, value: null };
|
|
66
|
-
},
|
|
67
|
-
throw: async () => {
|
|
68
|
-
done = true;
|
|
69
|
-
return { done: true, value: null };
|
|
70
|
-
}
|
|
71
|
-
};
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
exports.ParquetCursor = ParquetCursor;
|