@loaders.gl/parquet 3.1.3 → 4.0.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.js +2 -2
- package/dist/bundle.js.map +1 -0
- package/dist/constants.js +6 -18
- package/dist/constants.js.map +1 -0
- package/dist/dist.min.js +17 -8
- package/dist/dist.min.js.map +3 -3
- package/dist/index.js +14 -29
- package/dist/index.js.map +1 -0
- package/dist/lib/convert-schema.js +63 -62
- package/dist/lib/convert-schema.js.map +1 -0
- package/dist/lib/parse-parquet.js +25 -25
- package/dist/lib/parse-parquet.js.map +1 -0
- package/dist/lib/read-array-buffer.js +8 -28
- package/dist/lib/read-array-buffer.js.map +1 -0
- package/dist/parquet-loader.js +19 -24
- package/dist/parquet-loader.js.map +1 -0
- package/dist/parquet-worker.js +18 -9
- package/dist/parquet-worker.js.map +3 -3
- package/dist/parquet-writer.js +14 -17
- package/dist/parquet-writer.js.map +1 -0
- package/dist/{es5/parquetjs → parquetjs}/LICENSE +0 -0
- package/dist/parquetjs/codecs/declare.js +2 -2
- package/dist/{es5/parquetjs → parquetjs}/codecs/declare.js.map +0 -0
- package/dist/parquetjs/codecs/dictionary.js +10 -12
- package/dist/parquetjs/codecs/dictionary.js.map +1 -0
- package/dist/parquetjs/codecs/index.js +22 -50
- package/dist/parquetjs/codecs/index.js.map +1 -0
- package/dist/parquetjs/codecs/plain.js +232 -173
- package/dist/parquetjs/codecs/plain.js.map +1 -0
- package/dist/parquetjs/codecs/rle.js +140 -134
- package/dist/parquetjs/codecs/rle.js.map +1 -0
- package/dist/parquetjs/compression.js +48 -154
- package/dist/parquetjs/compression.js.map +1 -0
- package/dist/parquetjs/encoder/writer.js +383 -440
- package/dist/parquetjs/encoder/writer.js.map +1 -0
- package/dist/parquetjs/file.js +66 -85
- package/dist/parquetjs/file.js.map +1 -0
- package/dist/{es5/parquetjs → parquetjs}/modules.d.ts +0 -0
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +7 -14
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/BsonType.js +37 -56
- package/dist/parquetjs/parquet-thrift/BsonType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnChunk.js +215 -205
- package/dist/parquetjs/parquet-thrift/ColumnChunk.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnIndex.js +212 -207
- package/dist/parquetjs/parquet-thrift/ColumnIndex.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +422 -391
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/ColumnOrder.js +90 -99
- package/dist/parquetjs/parquet-thrift/ColumnOrder.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/CompressionCodec.js +12 -19
- package/dist/parquetjs/parquet-thrift/CompressionCodec.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/ConvertedType.js +26 -33
- package/dist/parquetjs/parquet-thrift/ConvertedType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeader.js +162 -162
- package/dist/parquetjs/parquet-thrift/DataPageHeader.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +234 -224
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/DateType.js +37 -56
- package/dist/parquetjs/parquet-thrift/DateType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/DecimalType.js +91 -101
- package/dist/parquetjs/parquet-thrift/DecimalType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +113 -118
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/Encoding.js +12 -19
- package/dist/parquetjs/parquet-thrift/Encoding.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/EnumType.js +37 -56
- package/dist/parquetjs/parquet-thrift/EnumType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +7 -14
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/FileMetaData.js +264 -250
- package/dist/parquetjs/parquet-thrift/FileMetaData.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +37 -56
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/IntType.js +91 -101
- package/dist/parquetjs/parquet-thrift/IntType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/JsonType.js +37 -56
- package/dist/parquetjs/parquet-thrift/JsonType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/KeyValue.js +89 -98
- package/dist/parquetjs/parquet-thrift/KeyValue.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/ListType.js +37 -56
- package/dist/parquetjs/parquet-thrift/ListType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/LogicalType.js +450 -363
- package/dist/parquetjs/parquet-thrift/LogicalType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/MapType.js +37 -56
- package/dist/parquetjs/parquet-thrift/MapType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/MicroSeconds.js +37 -56
- package/dist/parquetjs/parquet-thrift/MicroSeconds.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/MilliSeconds.js +37 -56
- package/dist/parquetjs/parquet-thrift/MilliSeconds.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/NullType.js +37 -56
- package/dist/parquetjs/parquet-thrift/NullType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/OffsetIndex.js +80 -92
- package/dist/parquetjs/parquet-thrift/OffsetIndex.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +115 -123
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageHeader.js +231 -214
- package/dist/parquetjs/parquet-thrift/PageHeader.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageLocation.js +124 -137
- package/dist/parquetjs/parquet-thrift/PageLocation.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/PageType.js +8 -15
- package/dist/parquetjs/parquet-thrift/PageType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/RowGroup.js +172 -176
- package/dist/parquetjs/parquet-thrift/RowGroup.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/SchemaElement.js +268 -237
- package/dist/parquetjs/parquet-thrift/SchemaElement.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/SortingColumn.js +115 -123
- package/dist/parquetjs/parquet-thrift/SortingColumn.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/Statistics.js +179 -172
- package/dist/parquetjs/parquet-thrift/Statistics.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/StringType.js +37 -56
- package/dist/parquetjs/parquet-thrift/StringType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimeType.js +92 -102
- package/dist/parquetjs/parquet-thrift/TimeType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimeUnit.js +120 -121
- package/dist/parquetjs/parquet-thrift/TimeUnit.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/TimestampType.js +92 -102
- package/dist/parquetjs/parquet-thrift/TimestampType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/Type.js +12 -19
- package/dist/parquetjs/parquet-thrift/Type.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +37 -56
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/UUIDType.js +37 -56
- package/dist/parquetjs/parquet-thrift/UUIDType.js.map +1 -0
- package/dist/parquetjs/parquet-thrift/index.js +44 -61
- package/dist/parquetjs/parquet-thrift/index.js.map +1 -0
- package/dist/parquetjs/parser/decoders.js +283 -301
- package/dist/{es5/parquetjs → parquetjs}/parser/decoders.js.map +1 -1
- package/dist/parquetjs/parser/parquet-cursor.js +85 -69
- package/dist/parquetjs/parser/parquet-cursor.js.map +1 -0
- package/dist/parquetjs/parser/parquet-envelope-reader.js +146 -127
- package/dist/parquetjs/parser/parquet-envelope-reader.js.map +1 -0
- package/dist/parquetjs/parser/parquet-reader.js +113 -127
- package/dist/parquetjs/parser/parquet-reader.js.map +1 -0
- package/dist/parquetjs/schema/declare.js +12 -9
- package/dist/parquetjs/schema/declare.js.map +1 -0
- package/dist/parquetjs/schema/schema.js +162 -148
- package/dist/{es5/parquetjs → parquetjs}/schema/schema.js.map +1 -1
- package/dist/parquetjs/schema/shred.js +151 -214
- package/dist/parquetjs/schema/shred.js.map +1 -0
- package/dist/parquetjs/schema/types.js +415 -357
- package/dist/parquetjs/schema/types.js.map +1 -0
- package/dist/parquetjs/utils/buffer-utils.js +10 -20
- package/dist/parquetjs/utils/buffer-utils.js.map +1 -0
- package/dist/parquetjs/utils/file-utils.js +28 -40
- package/dist/parquetjs/utils/file-utils.js.map +1 -0
- package/dist/parquetjs/utils/read-utils.js +95 -99
- package/dist/parquetjs/utils/read-utils.js.map +1 -0
- package/dist/workers/parquet-worker.js +4 -5
- package/dist/workers/parquet-worker.js.map +1 -0
- package/package.json +8 -8
- package/dist/es5/bundle.js +0 -7
- package/dist/es5/bundle.js.map +0 -1
- package/dist/es5/constants.js +0 -17
- package/dist/es5/constants.js.map +0 -1
- package/dist/es5/index.js +0 -82
- package/dist/es5/index.js.map +0 -1
- package/dist/es5/lib/convert-schema.js +0 -82
- package/dist/es5/lib/convert-schema.js.map +0 -1
- package/dist/es5/lib/parse-parquet.js +0 -173
- package/dist/es5/lib/parse-parquet.js.map +0 -1
- package/dist/es5/lib/read-array-buffer.js +0 -53
- package/dist/es5/lib/read-array-buffer.js.map +0 -1
- package/dist/es5/parquet-loader.js +0 -30
- package/dist/es5/parquet-loader.js.map +0 -1
- package/dist/es5/parquet-writer.js +0 -25
- package/dist/es5/parquet-writer.js.map +0 -1
- package/dist/es5/parquetjs/codecs/declare.js +0 -2
- package/dist/es5/parquetjs/codecs/dictionary.js +0 -30
- package/dist/es5/parquetjs/codecs/dictionary.js.map +0 -1
- package/dist/es5/parquetjs/codecs/index.js +0 -56
- package/dist/es5/parquetjs/codecs/index.js.map +0 -1
- package/dist/es5/parquetjs/codecs/plain.js +0 -287
- package/dist/es5/parquetjs/codecs/plain.js.map +0 -1
- package/dist/es5/parquetjs/codecs/rle.js +0 -174
- package/dist/es5/parquetjs/codecs/rle.js.map +0 -1
- package/dist/es5/parquetjs/compression.js +0 -167
- package/dist/es5/parquetjs/compression.js.map +0 -1
- package/dist/es5/parquetjs/encoder/writer.js +0 -875
- package/dist/es5/parquetjs/encoder/writer.js.map +0 -1
- package/dist/es5/parquetjs/file.js +0 -103
- package/dist/es5/parquetjs/file.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js +0 -15
- package/dist/es5/parquetjs/parquet-thrift/BoundaryOrder.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/BsonType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js +0 -241
- package/dist/es5/parquetjs/parquet-thrift/ColumnChunk.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js +0 -245
- package/dist/es5/parquetjs/parquet-thrift/ColumnIndex.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js +0 -449
- package/dist/es5/parquetjs/parquet-thrift/ColumnMetaData.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js +0 -124
- package/dist/es5/parquetjs/parquet-thrift/ColumnOrder.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js +0 -20
- package/dist/es5/parquetjs/parquet-thrift/CompressionCodec.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js +0 -34
- package/dist/es5/parquetjs/parquet-thrift/ConvertedType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js +0 -191
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeader.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -258
- package/dist/es5/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/DateType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/DateType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js +0 -122
- package/dist/es5/parquetjs/parquet-thrift/DecimalType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -143
- package/dist/es5/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js +0 -20
- package/dist/es5/parquetjs/parquet-thrift/Encoding.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/EnumType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -15
- package/dist/es5/parquetjs/parquet-thrift/FieldRepetitionType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js +0 -298
- package/dist/es5/parquetjs/parquet-thrift/FileMetaData.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/IndexPageHeader.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/IntType.js +0 -122
- package/dist/es5/parquetjs/parquet-thrift/IntType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/JsonType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js +0 -120
- package/dist/es5/parquetjs/parquet-thrift/KeyValue.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/ListType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/ListType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js +0 -508
- package/dist/es5/parquetjs/parquet-thrift/LogicalType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/MapType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/MapType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/MicroSeconds.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/MilliSeconds.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/NullType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/NullType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js +0 -114
- package/dist/es5/parquetjs/parquet-thrift/OffsetIndex.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js +0 -145
- package/dist/es5/parquetjs/parquet-thrift/PageEncodingStats.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js +0 -258
- package/dist/es5/parquetjs/parquet-thrift/PageHeader.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js +0 -155
- package/dist/es5/parquetjs/parquet-thrift/PageLocation.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/PageType.js +0 -16
- package/dist/es5/parquetjs/parquet-thrift/PageType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js +0 -206
- package/dist/es5/parquetjs/parquet-thrift/RowGroup.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js +0 -290
- package/dist/es5/parquetjs/parquet-thrift/SchemaElement.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js +0 -145
- package/dist/es5/parquetjs/parquet-thrift/SortingColumn.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js +0 -207
- package/dist/es5/parquetjs/parquet-thrift/Statistics.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/StringType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/StringType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js +0 -124
- package/dist/es5/parquetjs/parquet-thrift/TimeType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js +0 -156
- package/dist/es5/parquetjs/parquet-thrift/TimeUnit.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js +0 -124
- package/dist/es5/parquetjs/parquet-thrift/TimestampType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/Type.js +0 -20
- package/dist/es5/parquetjs/parquet-thrift/Type.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js +0 -67
- package/dist/es5/parquetjs/parquet-thrift/UUIDType.js.map +0 -1
- package/dist/es5/parquetjs/parquet-thrift/index.js +0 -565
- package/dist/es5/parquetjs/parquet-thrift/index.js.map +0 -1
- package/dist/es5/parquetjs/parser/decoders.js +0 -489
- package/dist/es5/parquetjs/parser/parquet-cursor.js +0 -215
- package/dist/es5/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js +0 -413
- package/dist/es5/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +0 -364
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +0 -1
- package/dist/es5/parquetjs/schema/declare.js +0 -25
- package/dist/es5/parquetjs/schema/declare.js.map +0 -1
- package/dist/es5/parquetjs/schema/schema.js +0 -203
- package/dist/es5/parquetjs/schema/shred.js +0 -223
- package/dist/es5/parquetjs/schema/shred.js.map +0 -1
- package/dist/es5/parquetjs/schema/types.js +0 -492
- package/dist/es5/parquetjs/schema/types.js.map +0 -1
- package/dist/es5/parquetjs/utils/buffer-utils.js +0 -21
- package/dist/es5/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/es5/parquetjs/utils/file-utils.js +0 -55
- package/dist/es5/parquetjs/utils/file-utils.js.map +0 -1
- package/dist/es5/parquetjs/utils/read-utils.js +0 -159
- package/dist/es5/parquetjs/utils/read-utils.js.map +0 -1
- package/dist/es5/workers/parquet-worker.js +0 -8
- package/dist/es5/workers/parquet-worker.js.map +0 -1
- package/dist/esm/bundle.js +0 -5
- package/dist/esm/bundle.js.map +0 -1
- package/dist/esm/constants.js +0 -6
- package/dist/esm/constants.js.map +0 -1
- package/dist/esm/index.js +0 -15
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/lib/convert-schema.js +0 -71
- package/dist/esm/lib/convert-schema.js.map +0 -1
- package/dist/esm/lib/parse-parquet.js +0 -28
- package/dist/esm/lib/parse-parquet.js.map +0 -1
- package/dist/esm/lib/read-array-buffer.js +0 -9
- package/dist/esm/lib/read-array-buffer.js.map +0 -1
- package/dist/esm/parquet-loader.js +0 -22
- package/dist/esm/parquet-loader.js.map +0 -1
- package/dist/esm/parquet-writer.js +0 -18
- package/dist/esm/parquet-writer.js.map +0 -1
- package/dist/esm/parquetjs/LICENSE +0 -20
- package/dist/esm/parquetjs/codecs/declare.js +0 -2
- package/dist/esm/parquetjs/codecs/declare.js.map +0 -1
- package/dist/esm/parquetjs/codecs/dictionary.js +0 -12
- package/dist/esm/parquetjs/codecs/dictionary.js.map +0 -1
- package/dist/esm/parquetjs/codecs/index.js +0 -23
- package/dist/esm/parquetjs/codecs/index.js.map +0 -1
- package/dist/esm/parquetjs/codecs/plain.js +0 -270
- package/dist/esm/parquetjs/codecs/plain.js.map +0 -1
- package/dist/esm/parquetjs/codecs/rle.js +0 -151
- package/dist/esm/parquetjs/codecs/rle.js.map +0 -1
- package/dist/esm/parquetjs/compression.js +0 -62
- package/dist/esm/parquetjs/compression.js.map +0 -1
- package/dist/esm/parquetjs/encoder/writer.js +0 -421
- package/dist/esm/parquetjs/encoder/writer.js.map +0 -1
- package/dist/esm/parquetjs/file.js +0 -80
- package/dist/esm/parquetjs/file.js.map +0 -1
- package/dist/esm/parquetjs/modules.d.ts +0 -21
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js +0 -8
- package/dist/esm/parquetjs/parquet-thrift/BoundaryOrder.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/BsonType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js +0 -217
- package/dist/esm/parquetjs/parquet-thrift/ColumnChunk.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js +0 -218
- package/dist/esm/parquetjs/parquet-thrift/ColumnIndex.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js +0 -429
- package/dist/esm/parquetjs/parquet-thrift/ColumnMetaData.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js +0 -95
- package/dist/esm/parquetjs/parquet-thrift/ColumnOrder.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js +0 -13
- package/dist/esm/parquetjs/parquet-thrift/CompressionCodec.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js +0 -27
- package/dist/esm/parquetjs/parquet-thrift/ConvertedType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js +0 -166
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeader.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -236
- package/dist/esm/parquetjs/parquet-thrift/DataPageHeaderV2.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DateType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/DateType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DecimalType.js +0 -95
- package/dist/esm/parquetjs/parquet-thrift/DecimalType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -117
- package/dist/esm/parquetjs/parquet-thrift/DictionaryPageHeader.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js +0 -13
- package/dist/esm/parquetjs/parquet-thrift/Encoding.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/EnumType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -8
- package/dist/esm/parquetjs/parquet-thrift/FieldRepetitionType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js +0 -270
- package/dist/esm/parquetjs/parquet-thrift/FileMetaData.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/IndexPageHeader.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/IntType.js +0 -95
- package/dist/esm/parquetjs/parquet-thrift/IntType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/JsonType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/KeyValue.js +0 -93
- package/dist/esm/parquetjs/parquet-thrift/KeyValue.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/ListType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/ListType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/LogicalType.js +0 -467
- package/dist/esm/parquetjs/parquet-thrift/LogicalType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MapType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/MapType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/MicroSeconds.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/MilliSeconds.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/NullType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/NullType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js +0 -85
- package/dist/esm/parquetjs/parquet-thrift/OffsetIndex.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js +0 -119
- package/dist/esm/parquetjs/parquet-thrift/PageEncodingStats.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/PageHeader.js +0 -233
- package/dist/esm/parquetjs/parquet-thrift/PageHeader.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/PageLocation.js +0 -128
- package/dist/esm/parquetjs/parquet-thrift/PageLocation.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/PageType.js +0 -9
- package/dist/esm/parquetjs/parquet-thrift/PageType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/RowGroup.js +0 -178
- package/dist/esm/parquetjs/parquet-thrift/RowGroup.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js +0 -270
- package/dist/esm/parquetjs/parquet-thrift/SchemaElement.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js +0 -119
- package/dist/esm/parquetjs/parquet-thrift/SortingColumn.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/Statistics.js +0 -183
- package/dist/esm/parquetjs/parquet-thrift/Statistics.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/StringType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/StringType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeType.js +0 -96
- package/dist/esm/parquetjs/parquet-thrift/TimeType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js +0 -126
- package/dist/esm/parquetjs/parquet-thrift/TimeUnit.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/TimestampType.js +0 -96
- package/dist/esm/parquetjs/parquet-thrift/TimestampType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/Type.js +0 -13
- package/dist/esm/parquetjs/parquet-thrift/Type.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/TypeDefinedOrder.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js +0 -39
- package/dist/esm/parquetjs/parquet-thrift/UUIDType.js.map +0 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js +0 -44
- package/dist/esm/parquetjs/parquet-thrift/index.js.map +0 -1
- package/dist/esm/parquetjs/parser/decoders.js +0 -300
- package/dist/esm/parquetjs/parser/decoders.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-cursor.js +0 -90
- package/dist/esm/parquetjs/parser/parquet-cursor.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js +0 -155
- package/dist/esm/parquetjs/parser/parquet-envelope-reader.js.map +0 -1
- package/dist/esm/parquetjs/parser/parquet-reader.js +0 -120
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +0 -1
- package/dist/esm/parquetjs/schema/declare.js +0 -13
- package/dist/esm/parquetjs/schema/declare.js.map +0 -1
- package/dist/esm/parquetjs/schema/schema.js +0 -176
- package/dist/esm/parquetjs/schema/schema.js.map +0 -1
- package/dist/esm/parquetjs/schema/shred.js +0 -162
- package/dist/esm/parquetjs/schema/shred.js.map +0 -1
- package/dist/esm/parquetjs/schema/types.js +0 -476
- package/dist/esm/parquetjs/schema/types.js.map +0 -1
- package/dist/esm/parquetjs/utils/buffer-utils.js +0 -12
- package/dist/esm/parquetjs/utils/buffer-utils.js.map +0 -1
- package/dist/esm/parquetjs/utils/file-utils.js +0 -34
- package/dist/esm/parquetjs/utils/file-utils.js.map +0 -1
- package/dist/esm/parquetjs/utils/read-utils.js +0 -105
- package/dist/esm/parquetjs/utils/read-utils.js.map +0 -1
- package/dist/esm/workers/parquet-worker.js +0 -4
- package/dist/esm/workers/parquet-worker.js.map +0 -1
|
@@ -1,120 +0,0 @@
|
|
|
1
|
-
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
-
|
|
3
|
-
let _Symbol$asyncIterator;
|
|
4
|
-
|
|
5
|
-
import { ParquetEnvelopeReader } from './parquet-envelope-reader';
|
|
6
|
-
import { ParquetSchema } from '../schema/schema';
|
|
7
|
-
import { ParquetCursor } from './parquet-cursor';
|
|
8
|
-
import { PARQUET_VERSION } from '../../constants';
|
|
9
|
-
import { decodeSchema } from './decoders';
|
|
10
|
-
_Symbol$asyncIterator = Symbol.asyncIterator;
|
|
11
|
-
export class ParquetReader {
|
|
12
|
-
static async openBlob(blob) {
|
|
13
|
-
const readFn = async (start, length) => {
|
|
14
|
-
const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();
|
|
15
|
-
return Buffer.from(arrayBuffer);
|
|
16
|
-
};
|
|
17
|
-
|
|
18
|
-
const closeFn = async () => {};
|
|
19
|
-
|
|
20
|
-
const size = blob.size;
|
|
21
|
-
const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
|
|
22
|
-
|
|
23
|
-
try {
|
|
24
|
-
await envelopeReader.readHeader();
|
|
25
|
-
const metadata = await envelopeReader.readFooter();
|
|
26
|
-
return new ParquetReader(metadata, envelopeReader);
|
|
27
|
-
} catch (err) {
|
|
28
|
-
await envelopeReader.close();
|
|
29
|
-
throw err;
|
|
30
|
-
}
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
static async openArrayBuffer(arrayBuffer) {
|
|
34
|
-
const readFn = async (start, length) => Buffer.from(arrayBuffer, start, length);
|
|
35
|
-
|
|
36
|
-
const closeFn = async () => {};
|
|
37
|
-
|
|
38
|
-
const size = arrayBuffer.byteLength;
|
|
39
|
-
const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);
|
|
40
|
-
|
|
41
|
-
try {
|
|
42
|
-
await envelopeReader.readHeader();
|
|
43
|
-
const metadata = await envelopeReader.readFooter();
|
|
44
|
-
return new ParquetReader(metadata, envelopeReader);
|
|
45
|
-
} catch (err) {
|
|
46
|
-
await envelopeReader.close();
|
|
47
|
-
throw err;
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
static async openBuffer(buffer) {
|
|
52
|
-
const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);
|
|
53
|
-
|
|
54
|
-
try {
|
|
55
|
-
await envelopeReader.readHeader();
|
|
56
|
-
const metadata = await envelopeReader.readFooter();
|
|
57
|
-
return new ParquetReader(metadata, envelopeReader);
|
|
58
|
-
} catch (err) {
|
|
59
|
-
await envelopeReader.close();
|
|
60
|
-
throw err;
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
constructor(metadata, envelopeReader) {
|
|
65
|
-
_defineProperty(this, "metadata", void 0);
|
|
66
|
-
|
|
67
|
-
_defineProperty(this, "envelopeReader", void 0);
|
|
68
|
-
|
|
69
|
-
_defineProperty(this, "schema", void 0);
|
|
70
|
-
|
|
71
|
-
if (metadata.version !== PARQUET_VERSION) {
|
|
72
|
-
throw new Error('invalid parquet version');
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
this.metadata = metadata;
|
|
76
|
-
this.envelopeReader = envelopeReader;
|
|
77
|
-
const root = this.metadata.schema[0];
|
|
78
|
-
const {
|
|
79
|
-
schema
|
|
80
|
-
} = decodeSchema(this.metadata.schema, 1, root.num_children);
|
|
81
|
-
this.schema = new ParquetSchema(schema);
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
async close() {
|
|
85
|
-
await this.envelopeReader.close();
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
getCursor(columnList) {
|
|
89
|
-
if (!columnList) {
|
|
90
|
-
columnList = [];
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
columnList = columnList.map(x => Array.isArray(x) ? x : [x]);
|
|
94
|
-
return new ParquetCursor(this.metadata, this.envelopeReader, this.schema, columnList);
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
getRowCount() {
|
|
98
|
-
return Number(this.metadata.num_rows);
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
getSchema() {
|
|
102
|
-
return this.schema;
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
getMetadata() {
|
|
106
|
-
const md = {};
|
|
107
|
-
|
|
108
|
-
for (const kv of this.metadata.key_value_metadata) {
|
|
109
|
-
md[kv.key] = kv.value;
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
return md;
|
|
113
|
-
}
|
|
114
|
-
|
|
115
|
-
[_Symbol$asyncIterator]() {
|
|
116
|
-
return this.getCursor()[Symbol.asyncIterator]();
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
}
|
|
120
|
-
//# sourceMappingURL=parquet-reader.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/parquetjs/parser/parquet-reader.ts"],"names":["ParquetEnvelopeReader","ParquetSchema","ParquetCursor","PARQUET_VERSION","decodeSchema","Symbol","asyncIterator","ParquetReader","openBlob","blob","readFn","start","length","arrayBuffer","slice","Buffer","from","closeFn","size","envelopeReader","readHeader","metadata","readFooter","err","close","openArrayBuffer","byteLength","openBuffer","buffer","constructor","version","Error","root","schema","num_children","getCursor","columnList","map","x","Array","isArray","getRowCount","Number","num_rows","getSchema","getMetadata","md","kv","key_value_metadata","key","value"],"mappings":";;;;AACA,SAAQA,qBAAR,QAAoC,2BAApC;AAEA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,aAAR,QAA4B,kBAA5B;AACA,SAAQC,eAAR,QAA8B,iBAA9B;AACA,SAAQC,YAAR,QAA2B,YAA3B;wBAyJGC,MAAM,CAACC,a;AAhJV,OAAO,MAAMC,aAAN,CAAmD;AAInC,eAARC,QAAQ,CAAIC,IAAJ,EAA2C;AAC9D,UAAMC,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyC;AACtD,YAAMC,WAAW,GAAG,MAAMJ,IAAI,CAACK,KAAL,CAAWH,KAAX,EAAkBA,KAAK,GAAGC,MAA1B,EAAkCC,WAAlC,EAA1B;AACA,aAAOE,MAAM,CAACC,IAAP,CAAYH,WAAZ,CAAP;AACD,KAHD;;AAIA,UAAMI,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGT,IAAI,CAACS,IAAlB;AACA,UAAMC,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAK2B,eAAfE,eAAe,CAAIZ,WAAJ,EAAyD;AACnF,UAAMH,MAAM,GAAG,OAAOC,KAAP,EAAsBC,MAAtB,KAAyCG,MAAM,CAACC,IAAP,CAAYH,WAAZ,EAAyBF,KAAzB,EAAgCC,MAAhC,CAAxD;;AACA,UAAMK,OAAO,GAAG,YAAY,CAAE,CAA9B;;AACA,UAAMC,IAAI,GAAGL,WAAW,CAACa,UAAzB;AACA,UAAMP,cAAc,GAAG,IAAInB,qBAAJ,CAA0BU,MAA1B,EAAkCO,OAAlC,EAA2CC,IAA3C,CAAvB;;AACA,QAAI;AACF,YAAMC,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAkBc,QAAlB,EAA4BF,cAA5B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAEsB,eAAVI,UAAU,CAAIC,MAAJ,EAA+C;AACpE,UAAMT,cAAc,GAAG,MAAMnB,qBAAqB,CAAC2B,UAAtB,CAAiCC,MAAjC,CAA7B;;AACA,QAAI;AACF,YAAMT,cAAc,CAACC,UAAf,EAAN;AACA,YAAMC,QAAQ,GAAG,MAAMF,cAAc,CAACG,UAAf,EAAvB;AACA,aAAO,IAAIf,aAAJ,CAAqBc,QAArB,EAA+BF,cAA/B,CAAP;AACD,KAJD,CAIE,OAAOI,GAAP,EAAY;AACZ,YAAMJ,cAAc,CAACK,KAAf,EAAN;AACA,YAAMD,GAAN;AACD;AACF;;AAYDM,EAAAA,WAAW,CAACR,QAAD,EAAyBF,cAAzB,EAAgE;AAAA;;AAAA;;AAAA;;AACzE,QAAIE,QAAQ,CAACS,OAAT,KAAqB3B,eAAzB,EAA0C;AACxC,YAAM,IAAI4B,KAAJ,CAAU,yBAAV,CAAN;AACD;;AAED,SAAKV,QAAL,GAAgBA,QAAhB;AACA,SAAKF,cAAL,GAAsBA,cAAtB;AACA,UAAMa,IAAI,GAAG,KAAKX,QAAL,CAAcY,MAAd,CAAqB,CAArB,CAAb;AACA,UAAM;AAACA,MAAAA;AAAD,QAAW7B,YAAY,CAAC,KAAKiB,QAAL,CAAcY,MAAf,EAAuB,CAAvB,EAA0BD,IAAI,CAACE,YAA/B,CAA7B;AACA,SAAKD,MAAL,GAAc,IAAIhC,aAAJ,CAAkBgC,MAAlB,CAAd;AACD;;AAMU,QAALT,KAAK,GAAkB;AAC3B,UAAM,KAAKL,cAAL,CAAoBK,KAApB,EAAN;AAGD;;AAeDW,EAAAA,SAAS,CAACC,UAAD,EAAgE;AACvE,QAAI,CAACA,UAAL,EAAiB;AAEfA,MAAAA,UAAU,GAAG,EAAb;AACD;;AAGDA,IAAAA,UAAU,GAAGA,UAAU,CAACC,GAAX,CAAgBC,CAAD,IAAQC,KAAK,CAACC,OAAN,CAAcF,CAAd,IAAmBA,CAAnB,GAAuB,CAACA,CAAD,CAA9C,CAAb;AAEA,WAAO,IAAIpC,aAAJ,CACL,KAAKmB,QADA,EAEL,KAAKF,cAFA,EAGL,KAAKc,MAHA,EAILG,UAJK,CAAP;AAMD;;AAMDK,EAAAA,WAAW,GAAW;AACpB,WAAOC,MAAM,CAAC,KAAKrB,QAAL,CAAcsB,QAAf,CAAb;AACD;;AAKDC,EAAAA,SAAS,GAAkB;AACzB,WAAO,KAAKX,MAAZ;AACD;;AAKDY,EAAAA,WAAW,GAA2B;AACpC,UAAMC,EAA0B,GAAG,EAAnC;;AACA,SAAK,MAAMC,EAAX,IAAiB,KAAK1B,QAAL,CAAc2B,kBAA/B,EAAoD;AAClDF,MAAAA,EAAE,CAACC,EAAE,CAACE,GAAJ,CAAF,GAAaF,EAAE,CAACG,KAAhB;AACD;;AACD,WAAOJ,EAAP;AACD;;AAMD,4BAA2C;AACzC,WAAO,KAAKX,SAAL,GAAiB9B,MAAM,CAACC,aAAxB,GAAP;AACD;;AAlJuD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport {ParquetEnvelopeReader} from './parquet-envelope-reader';\nimport {FileMetaData} from '../parquet-thrift';\nimport {ParquetSchema} from '../schema/schema';\nimport {ParquetCursor} from './parquet-cursor';\nimport {PARQUET_VERSION} from '../../constants';\nimport {decodeSchema} from './decoders';\n\n/**\n * A parquet reader allows retrieving the rows from a parquet file in order.\n * The basic usage is to create a reader and then retrieve a cursor/iterator\n * which allows you to consume row after row until all rows have been read. It is\n * important that you call close() after you are finished reading the file to\n * avoid leaking file descriptors.\n */\nexport class ParquetReader<T> implements AsyncIterable<T> {\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openBlob<T>(blob: Blob): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => {\n const arrayBuffer = await blob.slice(start, start + length).arrayBuffer();\n return Buffer.from(arrayBuffer);\n };\n const closeFn = async () => {};\n const size = blob.size;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n /**\n * return a new parquet reader initialized with a read function\n */\n static async openArrayBuffer<T>(arrayBuffer: ArrayBuffer): Promise<ParquetReader<T>> {\n const readFn = async (start: number, length: number) => Buffer.from(arrayBuffer, start, length);\n const closeFn = async () => {};\n const size = arrayBuffer.byteLength;\n const envelopeReader = new ParquetEnvelopeReader(readFn, closeFn, size);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n static async openBuffer<T>(buffer: Buffer): Promise<ParquetReader<T>> {\n const envelopeReader = await ParquetEnvelopeReader.openBuffer(buffer);\n try {\n await envelopeReader.readHeader();\n const metadata = await envelopeReader.readFooter();\n return new ParquetReader<T>(metadata, envelopeReader);\n } catch (err) {\n await envelopeReader.close();\n throw err;\n }\n }\n\n public metadata: FileMetaData;\n public envelopeReader: ParquetEnvelopeReader;\n public schema: ParquetSchema;\n\n /**\n * Create a new parquet reader from the file metadata and an envelope reader.\n * It is not recommended to call this constructor directly except for advanced\n * and internal use cases. Consider using one of the open{File,Buffer} methods\n * instead\n */\n constructor(metadata: FileMetaData, envelopeReader: ParquetEnvelopeReader) {\n if (metadata.version !== PARQUET_VERSION) {\n throw new Error('invalid parquet version');\n }\n\n this.metadata = metadata;\n this.envelopeReader = envelopeReader;\n const root = this.metadata.schema[0];\n const {schema} = decodeSchema(this.metadata.schema, 1, root.num_children!);\n this.schema = new ParquetSchema(schema);\n }\n\n /**\n * Close this parquet reader. You MUST call this method once you're finished\n * reading rows\n */\n async close(): Promise<void> {\n await this.envelopeReader.close();\n // this.envelopeReader = null;\n // this.metadata = null;\n }\n\n /**\n * Return a cursor to the file. You may open more than one cursor and use\n * them concurrently. All cursors become invalid once close() is called on\n * the reader object.\n *\n * The required_columns parameter controls which columns are actually read\n * from disk. An empty array or no value implies all columns. A list of column\n * names means that only those columns should be loaded from disk.\n */\n getCursor(): ParquetCursor<T>;\n // @ts-ignore\n getCursor<K extends keyof T>(columnList: (K | K[])[]): ParquetCursor<Pick<T, K>>;\n getCursor(columnList: (string | string[])[]): ParquetCursor<Partial<T>>;\n getCursor(columnList?: (string | string[])[]): ParquetCursor<Partial<T>> {\n if (!columnList) {\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = [];\n }\n\n // tslint:disable-next-line:no-parameter-reassignment\n columnList = columnList.map((x) => (Array.isArray(x) ? x : [x]));\n\n return new ParquetCursor<T>(\n this.metadata,\n this.envelopeReader,\n this.schema,\n columnList as string[][]\n );\n }\n\n /**\n * Return the number of rows in this file. Note that the number of rows is\n * not neccessarily equal to the number of rows in each column.\n */\n getRowCount(): number {\n return Number(this.metadata.num_rows);\n }\n\n /**\n * Returns the ParquetSchema for this file\n */\n getSchema(): ParquetSchema {\n return this.schema;\n }\n\n /**\n * Returns the user (key/value) metadata for this file\n */\n getMetadata(): Record<string, string> {\n const md: Record<string, string> = {};\n for (const kv of this.metadata.key_value_metadata!) {\n md[kv.key] = kv.value!;\n }\n return md;\n }\n\n /**\n * Implement AsyncIterable\n */\n // tslint:disable-next-line:function-name\n [Symbol.asyncIterator](): AsyncIterator<T> {\n return this.getCursor()[Symbol.asyncIterator]();\n }\n}\n"],"file":"parquet-reader.js"}
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
-
export class ParquetBuffer {
|
|
3
|
-
constructor(rowCount = 0, columnData = {}) {
|
|
4
|
-
_defineProperty(this, "rowCount", void 0);
|
|
5
|
-
|
|
6
|
-
_defineProperty(this, "columnData", void 0);
|
|
7
|
-
|
|
8
|
-
this.rowCount = rowCount;
|
|
9
|
-
this.columnData = columnData;
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
}
|
|
13
|
-
//# sourceMappingURL=declare.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/parquetjs/schema/declare.ts"],"names":["ParquetBuffer","constructor","rowCount","columnData"],"mappings":";AAmIA,OAAO,MAAMA,aAAN,CAAoB;AAGzBC,EAAAA,WAAW,CAACC,QAAgB,GAAG,CAApB,EAAuBC,UAAuC,GAAG,EAAjE,EAAqE;AAAA;;AAAA;;AAC9E,SAAKD,QAAL,GAAgBA,QAAhB;AACA,SAAKC,UAAL,GAAkBA,UAAlB;AACD;;AANwB","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\nimport Int64 from 'node-int64';\nimport type {PageHeader} from '../parquet-thrift';\n\nexport type ParquetCodec = 'PLAIN' | 'RLE' | 'PLAIN_DICTIONARY';\nexport type ParquetCompression =\n | 'UNCOMPRESSED'\n | 'GZIP'\n | 'SNAPPY'\n | 'LZO'\n | 'BROTLI'\n | 'LZ4'\n | 'LZ4_RAW'\n | 'ZSTD';\nexport type RepetitionType = 'REQUIRED' | 'OPTIONAL' | 'REPEATED';\nexport type ParquetType = PrimitiveType | OriginalType;\n\n/**\n * Physical type\n */\nexport type PrimitiveType =\n // Base Types\n | 'BOOLEAN' // 0\n | 'INT32' // 1\n | 'INT64' // 2\n | 'INT96' // 3\n | 'FLOAT' // 4\n | 'DOUBLE' // 5\n | 'BYTE_ARRAY' // 6,\n | 'FIXED_LEN_BYTE_ARRAY'; // 7\n\n/**\n * Logical type\n */\nexport type OriginalType =\n // Converted Types\n | 'UTF8' // 0\n // | 'MAP' // 1\n // | 'MAP_KEY_VALUE' // 2\n // | 'LIST' // 3\n // | 'ENUM' // 4\n // | 'DECIMAL' // 5\n | 'DECIMAL_INT32' // 5\n | 'DECIMAL_INT64' // 5\n | 'DECIMAL_BYTE_ARRAY' // 5\n | 'DECIMAL_FIXED_LEN_BYTE_ARRAY' // 5\n | 'DATE' // 6\n | 'TIME_MILLIS' // 7\n | 'TIME_MICROS' // 8\n | 'TIMESTAMP_MILLIS' // 9\n | 'TIMESTAMP_MICROS' // 10\n | 'UINT_8' // 11\n | 'UINT_16' // 12\n | 'UINT_32' // 13\n | 'UINT_64' // 14\n | 'INT_8' // 15\n | 'INT_16' // 16\n | 'INT_32' // 17\n | 'INT_64' // 18\n | 'JSON' // 19\n | 'BSON' // 20\n | 'INTERVAL'; // 21\n\nexport type ParquetDictionary = string[];\n\nexport interface SchemaDefinition {\n [string: string]: FieldDefinition;\n}\n\nexport interface FieldDefinition {\n type?: ParquetType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n optional?: boolean;\n repeated?: boolean;\n fields?: SchemaDefinition;\n}\n\nexport interface ParquetField {\n name: string;\n path: string[];\n key: string;\n primitiveType?: PrimitiveType;\n originalType?: OriginalType;\n repetitionType: RepetitionType;\n typeLength?: number;\n presision?: number;\n scale?: number;\n encoding?: ParquetCodec;\n compression?: ParquetCompression;\n rLevelMax: number;\n dLevelMax: number;\n isNested?: boolean;\n fieldCount?: number;\n fields?: Record<string, ParquetField>;\n}\n\nexport interface ParquetOptions {\n type: ParquetType;\n rLevelMax: number;\n dLevelMax: number;\n compression: ParquetCompression;\n column: ParquetField;\n numValues?: Int64;\n dictionary?: ParquetDictionary;\n}\n\nexport interface ParquetData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n pageHeaders: PageHeader[];\n}\n\nexport interface ParquetPageData {\n dlevels: number[];\n rlevels: number[];\n values: any[];\n count: number;\n dictionary?: ParquetDictionary;\n pageHeader: PageHeader;\n}\n\nexport interface ParquetRecord {\n [key: string]: any;\n}\n\nexport class ParquetBuffer {\n rowCount: number;\n columnData: Record<string, ParquetData>;\n constructor(rowCount: number = 0, columnData: Record<string, ParquetData> = {}) {\n this.rowCount = rowCount;\n this.columnData = columnData;\n }\n}\n"],"file":"declare.js"}
|
|
@@ -1,176 +0,0 @@
|
|
|
1
|
-
import _defineProperty from "@babel/runtime/helpers/esm/defineProperty";
|
|
2
|
-
import { PARQUET_CODECS } from '../codecs';
|
|
3
|
-
import { PARQUET_COMPRESSION_METHODS } from '../compression';
|
|
4
|
-
import { materializeRecords, shredBuffer, shredRecord } from './shred';
|
|
5
|
-
import { PARQUET_LOGICAL_TYPES } from './types';
|
|
6
|
-
export class ParquetSchema {
|
|
7
|
-
constructor(schema) {
|
|
8
|
-
_defineProperty(this, "schema", void 0);
|
|
9
|
-
|
|
10
|
-
_defineProperty(this, "fields", void 0);
|
|
11
|
-
|
|
12
|
-
_defineProperty(this, "fieldList", void 0);
|
|
13
|
-
|
|
14
|
-
this.schema = schema;
|
|
15
|
-
this.fields = buildFields(schema, 0, 0, []);
|
|
16
|
-
this.fieldList = listFields(this.fields);
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
findField(path) {
|
|
20
|
-
if (typeof path === 'string') {
|
|
21
|
-
path = path.split(',');
|
|
22
|
-
} else {
|
|
23
|
-
path = path.slice(0);
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
let n = this.fields;
|
|
27
|
-
|
|
28
|
-
for (; path.length > 1; path.shift()) {
|
|
29
|
-
n = n[path[0]].fields;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
return n[path[0]];
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
findFieldBranch(path) {
|
|
36
|
-
if (typeof path === 'string') {
|
|
37
|
-
path = path.split(',');
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
const branch = [];
|
|
41
|
-
let n = this.fields;
|
|
42
|
-
|
|
43
|
-
for (; path.length > 0; path.shift()) {
|
|
44
|
-
branch.push(n[path[0]]);
|
|
45
|
-
|
|
46
|
-
if (path.length > 1) {
|
|
47
|
-
n = n[path[0]].fields;
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
return branch;
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
shredRecord(record, buffer) {
|
|
55
|
-
shredRecord(this, record, buffer);
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
materializeRecords(buffer) {
|
|
59
|
-
return materializeRecords(this, buffer);
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
compress(type) {
|
|
63
|
-
setCompress(this.schema, type);
|
|
64
|
-
setCompress(this.fields, type);
|
|
65
|
-
return this;
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
buffer() {
|
|
69
|
-
return shredBuffer(this);
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
function setCompress(schema, type) {
|
|
75
|
-
for (const name in schema) {
|
|
76
|
-
const node = schema[name];
|
|
77
|
-
|
|
78
|
-
if (node.fields) {
|
|
79
|
-
setCompress(node.fields, type);
|
|
80
|
-
} else {
|
|
81
|
-
node.compression = type;
|
|
82
|
-
}
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
function buildFields(schema, rLevelParentMax, dLevelParentMax, path) {
|
|
87
|
-
const fieldList = {};
|
|
88
|
-
|
|
89
|
-
for (const name in schema) {
|
|
90
|
-
const opts = schema[name];
|
|
91
|
-
const required = !opts.optional;
|
|
92
|
-
const repeated = Boolean(opts.repeated);
|
|
93
|
-
let rLevelMax = rLevelParentMax;
|
|
94
|
-
let dLevelMax = dLevelParentMax;
|
|
95
|
-
let repetitionType = 'REQUIRED';
|
|
96
|
-
|
|
97
|
-
if (!required) {
|
|
98
|
-
repetitionType = 'OPTIONAL';
|
|
99
|
-
dLevelMax++;
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
if (repeated) {
|
|
103
|
-
repetitionType = 'REPEATED';
|
|
104
|
-
rLevelMax++;
|
|
105
|
-
if (required) dLevelMax++;
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
if (opts.fields) {
|
|
109
|
-
const cpath = path.concat([name]);
|
|
110
|
-
fieldList[name] = {
|
|
111
|
-
name,
|
|
112
|
-
path: cpath,
|
|
113
|
-
key: cpath.join(),
|
|
114
|
-
repetitionType,
|
|
115
|
-
rLevelMax,
|
|
116
|
-
dLevelMax,
|
|
117
|
-
isNested: true,
|
|
118
|
-
fieldCount: Object.keys(opts.fields).length,
|
|
119
|
-
fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)
|
|
120
|
-
};
|
|
121
|
-
continue;
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
const typeDef = PARQUET_LOGICAL_TYPES[opts.type];
|
|
125
|
-
|
|
126
|
-
if (!typeDef) {
|
|
127
|
-
throw new Error("invalid parquet type: ".concat(opts.type));
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
opts.encoding = opts.encoding || 'PLAIN';
|
|
131
|
-
|
|
132
|
-
if (!(opts.encoding in PARQUET_CODECS)) {
|
|
133
|
-
throw new Error("unsupported parquet encoding: ".concat(opts.encoding));
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
opts.compression = opts.compression || 'UNCOMPRESSED';
|
|
137
|
-
|
|
138
|
-
if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {
|
|
139
|
-
throw new Error("unsupported compression method: ".concat(opts.compression));
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
const cpath = path.concat([name]);
|
|
143
|
-
fieldList[name] = {
|
|
144
|
-
name,
|
|
145
|
-
primitiveType: typeDef.primitiveType,
|
|
146
|
-
originalType: typeDef.originalType,
|
|
147
|
-
path: cpath,
|
|
148
|
-
key: cpath.join(),
|
|
149
|
-
repetitionType,
|
|
150
|
-
encoding: opts.encoding,
|
|
151
|
-
compression: opts.compression,
|
|
152
|
-
typeLength: opts.typeLength || typeDef.typeLength,
|
|
153
|
-
presision: opts.presision,
|
|
154
|
-
scale: opts.scale,
|
|
155
|
-
rLevelMax,
|
|
156
|
-
dLevelMax
|
|
157
|
-
};
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
return fieldList;
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
function listFields(fields) {
|
|
164
|
-
let list = [];
|
|
165
|
-
|
|
166
|
-
for (const k in fields) {
|
|
167
|
-
list.push(fields[k]);
|
|
168
|
-
|
|
169
|
-
if (fields[k].isNested) {
|
|
170
|
-
list = list.concat(listFields(fields[k].fields));
|
|
171
|
-
}
|
|
172
|
-
}
|
|
173
|
-
|
|
174
|
-
return list;
|
|
175
|
-
}
|
|
176
|
-
//# sourceMappingURL=schema.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/parquetjs/schema/schema.ts"],"names":["PARQUET_CODECS","PARQUET_COMPRESSION_METHODS","materializeRecords","shredBuffer","shredRecord","PARQUET_LOGICAL_TYPES","ParquetSchema","constructor","schema","fields","buildFields","fieldList","listFields","findField","path","split","slice","n","length","shift","findFieldBranch","branch","push","record","buffer","compress","type","setCompress","name","node","compression","rLevelParentMax","dLevelParentMax","opts","required","optional","repeated","Boolean","rLevelMax","dLevelMax","repetitionType","cpath","concat","key","join","isNested","fieldCount","Object","keys","typeDef","Error","encoding","primitiveType","originalType","typeLength","presision","scale","list","k"],"mappings":";AAEA,SAAQA,cAAR,QAA6B,WAA7B;AACA,SAAQC,2BAAR,QAA0C,gBAA1C;AAUA,SAAQC,kBAAR,EAA4BC,WAA5B,EAAyCC,WAAzC,QAA2D,SAA3D;AACA,SAAQC,qBAAR,QAAoC,SAApC;AAKA,OAAO,MAAMC,aAAN,CAAoB;AAQzBC,EAAAA,WAAW,CAACC,MAAD,EAA2B;AAAA;;AAAA;;AAAA;;AACpC,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKC,MAAL,GAAcC,WAAW,CAACF,MAAD,EAAS,CAAT,EAAY,CAAZ,EAAe,EAAf,CAAzB;AACA,SAAKG,SAAL,GAAiBC,UAAU,CAAC,KAAKH,MAAN,CAA3B;AACD;;AAKDI,EAAAA,SAAS,CAACC,IAAD,EAAwC;AAC/C,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD,KAHD,MAGO;AAELD,MAAAA,IAAI,GAAGA,IAAI,CAACE,KAAL,CAAW,CAAX,CAAP;AACD;;AAED,QAAIC,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCF,MAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;;AAED,WAAOQ,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAR;AACD;;AAKDM,EAAAA,eAAe,CAACN,IAAD,EAA0C;AACvD,QAAI,OAAOA,IAAP,KAAgB,QAApB,EAA8B;AAE5BA,MAAAA,IAAI,GAAGA,IAAI,CAACC,KAAL,CAAW,GAAX,CAAP;AACD;;AACD,UAAMM,MAAsB,GAAG,EAA/B;AACA,QAAIJ,CAAC,GAAG,KAAKR,MAAb;;AACA,WAAOK,IAAI,CAACI,MAAL,GAAc,CAArB,EAAwBJ,IAAI,CAACK,KAAL,EAAxB,EAAsC;AACpCE,MAAAA,MAAM,CAACC,IAAP,CAAYL,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAb;;AACA,UAAIA,IAAI,CAACI,MAAL,GAAc,CAAlB,EAAqB;AACnBD,QAAAA,CAAC,GAAGA,CAAC,CAACH,IAAI,CAAC,CAAD,CAAL,CAAD,CAAWL,MAAf;AACD;AACF;;AACD,WAAOY,MAAP;AACD;;AAEDjB,EAAAA,WAAW,CAACmB,MAAD,EAAwBC,MAAxB,EAAqD;AAC9DpB,IAAAA,WAAW,CAAC,IAAD,EAAOmB,MAAP,EAAeC,MAAf,CAAX;AACD;;AAEDtB,EAAAA,kBAAkB,CAACsB,MAAD,EAAyC;AACzD,WAAOtB,kBAAkB,CAAC,IAAD,EAAOsB,MAAP,CAAzB;AACD;;AAEDC,EAAAA,QAAQ,CAACC,IAAD,EAAiC;AACvCC,IAAAA,WAAW,CAAC,KAAKnB,MAAN,EAAckB,IAAd,CAAX;AACAC,IAAAA,WAAW,CAAC,KAAKlB,MAAN,EAAciB,IAAd,CAAX;AACA,WAAO,IAAP;AACD;;AAEDF,EAAAA,MAAM,GAAkB;AACtB,WAAOrB,WAAW,CAAC,IAAD,CAAlB;AACD;;AArEwB;;AAwE3B,SAASwB,WAAT,CAAqBnB,MAArB,EAAkCkB,IAAlC,EAA4D;AAC1D,OAAK,MAAME,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMqB,IAAI,GAAGrB,MAAM,CAACoB,IAAD,CAAnB;;AACA,QAAIC,IAAI,CAACpB,MAAT,EAAiB;AACfkB,MAAAA,WAAW,CAACE,IAAI,CAACpB,MAAN,EAAciB,IAAd,CAAX;AACD,KAFD,MAEO;AACLG,MAAAA,IAAI,CAACC,WAAL,GAAmBJ,IAAnB;AACD;AACF;AACF;;AAGD,SAAShB,WAAT,CACEF,MADF,EAEEuB,eAFF,EAGEC,eAHF,EAIElB,IAJF,EAKgC;AAC9B,QAAMH,SAAuC,GAAG,EAAhD;;AAEA,OAAK,MAAMiB,IAAX,IAAmBpB,MAAnB,EAA2B;AACzB,UAAMyB,IAAI,GAAGzB,MAAM,CAACoB,IAAD,CAAnB;AAGA,UAAMM,QAAQ,GAAG,CAACD,IAAI,CAACE,QAAvB;AACA,UAAMC,QAAQ,GAAGC,OAAO,CAACJ,IAAI,CAACG,QAAN,CAAxB;AACA,QAAIE,SAAS,GAAGP,eAAhB;AACA,QAAIQ,SAAS,GAAGP,eAAhB;AAEA,QAAIQ,cAA8B,GAAG,UAArC;;AACA,QAAI,CAACN,QAAL,EAAe;AACbM,MAAAA,cAAc,GAAG,UAAjB;AACAD,MAAAA,SAAS;AACV;;AACD,QAAIH,QAAJ,EAAc;AACZI,MAAAA,cAAc,GAAG,UAAjB;AACAF,MAAAA,SAAS;AACT,UAAIJ,QAAJ,EAAcK,SAAS;AACxB;;AAGD,QAAIN,IAAI,CAACxB,MAAT,EAAiB;AACf,YAAMgC,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,MAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,QAAAA,IADgB;AAEhBd,QAAAA,IAAI,EAAE2B,KAFU;AAGhBE,QAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EAHW;AAIhBJ,QAAAA,cAJgB;AAKhBF,QAAAA,SALgB;AAMhBC,QAAAA,SANgB;AAOhBM,QAAAA,QAAQ,EAAE,IAPM;AAQhBC,QAAAA,UAAU,EAAEC,MAAM,CAACC,IAAP,CAAYf,IAAI,CAACxB,MAAjB,EAAyBS,MARrB;AAShBT,QAAAA,MAAM,EAAEC,WAAW,CAACuB,IAAI,CAACxB,MAAN,EAAc6B,SAAd,EAAyBC,SAAzB,EAAoCE,KAApC;AATH,OAAlB;AAWA;AACD;;AAED,UAAMQ,OAAY,GAAG5C,qBAAqB,CAAC4B,IAAI,CAACP,IAAN,CAA1C;;AACA,QAAI,CAACuB,OAAL,EAAc;AACZ,YAAM,IAAIC,KAAJ,iCAAmCjB,IAAI,CAACP,IAAxC,EAAN;AACD;;AAEDO,IAAAA,IAAI,CAACkB,QAAL,GAAgBlB,IAAI,CAACkB,QAAL,IAAiB,OAAjC;;AACA,QAAI,EAAElB,IAAI,CAACkB,QAAL,IAAiBnD,cAAnB,CAAJ,EAAwC;AACtC,YAAM,IAAIkD,KAAJ,yCAA2CjB,IAAI,CAACkB,QAAhD,EAAN;AACD;;AAEDlB,IAAAA,IAAI,CAACH,WAAL,GAAmBG,IAAI,CAACH,WAAL,IAAoB,cAAvC;;AACA,QAAI,EAAEG,IAAI,CAACH,WAAL,IAAoB7B,2BAAtB,CAAJ,EAAwD;AACtD,YAAM,IAAIiD,KAAJ,2CAA6CjB,IAAI,CAACH,WAAlD,EAAN;AACD;;AAGD,UAAMW,KAAK,GAAG3B,IAAI,CAAC4B,MAAL,CAAY,CAACd,IAAD,CAAZ,CAAd;AACAjB,IAAAA,SAAS,CAACiB,IAAD,CAAT,GAAkB;AAChBA,MAAAA,IADgB;AAEhBwB,MAAAA,aAAa,EAAEH,OAAO,CAACG,aAFP;AAGhBC,MAAAA,YAAY,EAAEJ,OAAO,CAACI,YAHN;AAIhBvC,MAAAA,IAAI,EAAE2B,KAJU;AAKhBE,MAAAA,GAAG,EAAEF,KAAK,CAACG,IAAN,EALW;AAMhBJ,MAAAA,cANgB;AAOhBW,MAAAA,QAAQ,EAAElB,IAAI,CAACkB,QAPC;AAQhBrB,MAAAA,WAAW,EAAEG,IAAI,CAACH,WARF;AAShBwB,MAAAA,UAAU,EAAErB,IAAI,CAACqB,UAAL,IAAmBL,OAAO,CAACK,UATvB;AAUhBC,MAAAA,SAAS,EAAEtB,IAAI,CAACsB,SAVA;AAWhBC,MAAAA,KAAK,EAAEvB,IAAI,CAACuB,KAXI;AAYhBlB,MAAAA,SAZgB;AAahBC,MAAAA;AAbgB,KAAlB;AAeD;;AACD,SAAO5B,SAAP;AACD;;AAED,SAASC,UAAT,CAAoBH,MAApB,EAA0E;AACxE,MAAIgD,IAAoB,GAAG,EAA3B;;AACA,OAAK,MAAMC,CAAX,IAAgBjD,MAAhB,EAAwB;AACtBgD,IAAAA,IAAI,CAACnC,IAAL,CAAUb,MAAM,CAACiD,CAAD,CAAhB;;AACA,QAAIjD,MAAM,CAACiD,CAAD,CAAN,CAAUb,QAAd,EAAwB;AACtBY,MAAAA,IAAI,GAAGA,IAAI,CAACf,MAAL,CAAY9B,UAAU,CAACH,MAAM,CAACiD,CAAD,CAAN,CAAUjD,MAAX,CAAtB,CAAP;AACD;AACF;;AACD,SAAOgD,IAAP;AACD","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {PARQUET_CODECS} from '../codecs';\nimport {PARQUET_COMPRESSION_METHODS} from '../compression';\nimport {\n FieldDefinition,\n ParquetBuffer,\n ParquetCompression,\n ParquetField,\n ParquetRecord,\n RepetitionType,\n SchemaDefinition\n} from './declare';\nimport {materializeRecords, shredBuffer, shredRecord} from './shred';\nimport {PARQUET_LOGICAL_TYPES} from './types';\n\n/**\n * A parquet file schema\n */\nexport class ParquetSchema {\n public schema: Record<string, FieldDefinition>;\n public fields: Record<string, ParquetField>;\n public fieldList: ParquetField[];\n\n /**\n * Create a new schema from a JSON schema definition\n */\n constructor(schema: SchemaDefinition) {\n this.schema = schema;\n this.fields = buildFields(schema, 0, 0, []);\n this.fieldList = listFields(this.fields);\n }\n\n /**\n * Retrieve a field definition\n */\n findField(path: string | string[]): ParquetField {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n } else {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.slice(0); // clone array\n }\n\n let n = this.fields;\n for (; path.length > 1; path.shift()) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n\n return n[path[0]];\n }\n\n /**\n * Retrieve a field definition and all the field's ancestors\n */\n findFieldBranch(path: string | string[]): ParquetField[] {\n if (typeof path === 'string') {\n // tslint:disable-next-line:no-parameter-reassignment\n path = path.split(',');\n }\n const branch: ParquetField[] = [];\n let n = this.fields;\n for (; path.length > 0; path.shift()) {\n branch.push(n[path[0]]);\n if (path.length > 1) {\n n = n[path[0]].fields as Record<string, ParquetField>;\n }\n }\n return branch;\n }\n\n shredRecord(record: ParquetRecord, buffer: ParquetBuffer): void {\n shredRecord(this, record, buffer);\n }\n\n materializeRecords(buffer: ParquetBuffer): ParquetRecord[] {\n return materializeRecords(this, buffer);\n }\n\n compress(type: ParquetCompression): this {\n setCompress(this.schema, type);\n setCompress(this.fields, type);\n return this;\n }\n\n buffer(): ParquetBuffer {\n return shredBuffer(this);\n }\n}\n\nfunction setCompress(schema: any, type: ParquetCompression) {\n for (const name in schema) {\n const node = schema[name];\n if (node.fields) {\n setCompress(node.fields, type);\n } else {\n node.compression = type;\n }\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction buildFields(\n schema: SchemaDefinition,\n rLevelParentMax: number,\n dLevelParentMax: number,\n path: string[]\n): Record<string, ParquetField> {\n const fieldList: Record<string, ParquetField> = {};\n\n for (const name in schema) {\n const opts = schema[name];\n\n /* field repetition type */\n const required = !opts.optional;\n const repeated = Boolean(opts.repeated);\n let rLevelMax = rLevelParentMax;\n let dLevelMax = dLevelParentMax;\n\n let repetitionType: RepetitionType = 'REQUIRED';\n if (!required) {\n repetitionType = 'OPTIONAL';\n dLevelMax++;\n }\n if (repeated) {\n repetitionType = 'REPEATED';\n rLevelMax++;\n if (required) dLevelMax++;\n }\n\n /* nested field */\n if (opts.fields) {\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n rLevelMax,\n dLevelMax,\n isNested: true,\n fieldCount: Object.keys(opts.fields).length,\n fields: buildFields(opts.fields, rLevelMax, dLevelMax, cpath)\n };\n continue; // eslint-disable-line no-continue\n }\n\n const typeDef: any = PARQUET_LOGICAL_TYPES[opts.type!];\n if (!typeDef) {\n throw new Error(`invalid parquet type: ${opts.type}`);\n }\n\n opts.encoding = opts.encoding || 'PLAIN';\n if (!(opts.encoding in PARQUET_CODECS)) {\n throw new Error(`unsupported parquet encoding: ${opts.encoding}`);\n }\n\n opts.compression = opts.compression || 'UNCOMPRESSED';\n if (!(opts.compression in PARQUET_COMPRESSION_METHODS)) {\n throw new Error(`unsupported compression method: ${opts.compression}`);\n }\n\n /* add to schema */\n const cpath = path.concat([name]);\n fieldList[name] = {\n name,\n primitiveType: typeDef.primitiveType,\n originalType: typeDef.originalType,\n path: cpath,\n key: cpath.join(),\n repetitionType,\n encoding: opts.encoding,\n compression: opts.compression,\n typeLength: opts.typeLength || typeDef.typeLength,\n presision: opts.presision,\n scale: opts.scale,\n rLevelMax,\n dLevelMax\n };\n }\n return fieldList;\n}\n\nfunction listFields(fields: Record<string, ParquetField>): ParquetField[] {\n let list: ParquetField[] = [];\n for (const k in fields) {\n list.push(fields[k]);\n if (fields[k].isNested) {\n list = list.concat(listFields(fields[k].fields!));\n }\n }\n return list;\n}\n"],"file":"schema.js"}
|
|
@@ -1,162 +0,0 @@
|
|
|
1
|
-
import { ParquetBuffer } from './declare';
|
|
2
|
-
import * as Types from './types';
|
|
3
|
-
export { ParquetBuffer };
|
|
4
|
-
export function shredBuffer(schema) {
|
|
5
|
-
const columnData = {};
|
|
6
|
-
|
|
7
|
-
for (const field of schema.fieldList) {
|
|
8
|
-
columnData[field.key] = {
|
|
9
|
-
dlevels: [],
|
|
10
|
-
rlevels: [],
|
|
11
|
-
values: [],
|
|
12
|
-
pageHeaders: [],
|
|
13
|
-
count: 0
|
|
14
|
-
};
|
|
15
|
-
}
|
|
16
|
-
|
|
17
|
-
return {
|
|
18
|
-
rowCount: 0,
|
|
19
|
-
columnData
|
|
20
|
-
};
|
|
21
|
-
}
|
|
22
|
-
export function shredRecord(schema, record, buffer) {
|
|
23
|
-
const data = shredBuffer(schema).columnData;
|
|
24
|
-
shredRecordFields(schema.fields, record, data, 0, 0);
|
|
25
|
-
|
|
26
|
-
if (buffer.rowCount === 0) {
|
|
27
|
-
buffer.rowCount = 1;
|
|
28
|
-
buffer.columnData = data;
|
|
29
|
-
return;
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
buffer.rowCount += 1;
|
|
33
|
-
|
|
34
|
-
for (const field of schema.fieldList) {
|
|
35
|
-
Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);
|
|
36
|
-
Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);
|
|
37
|
-
Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);
|
|
38
|
-
buffer.columnData[field.key].count += data[field.key].count;
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
function shredRecordFields(fields, record, data, rLevel, dLevel) {
|
|
43
|
-
for (const name in fields) {
|
|
44
|
-
const field = fields[name];
|
|
45
|
-
let values = [];
|
|
46
|
-
|
|
47
|
-
if (record && field.name in record && record[field.name] !== undefined && record[field.name] !== null) {
|
|
48
|
-
if (record[field.name].constructor === Array) {
|
|
49
|
-
values = record[field.name];
|
|
50
|
-
} else {
|
|
51
|
-
values.push(record[field.name]);
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {
|
|
56
|
-
throw new Error("missing required field: ".concat(field.name));
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
if (values.length > 1 && field.repetitionType !== 'REPEATED') {
|
|
60
|
-
throw new Error("too many values for field: ".concat(field.name));
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
if (values.length === 0) {
|
|
64
|
-
if (field.isNested) {
|
|
65
|
-
shredRecordFields(field.fields, null, data, rLevel, dLevel);
|
|
66
|
-
} else {
|
|
67
|
-
data[field.key].count += 1;
|
|
68
|
-
data[field.key].rlevels.push(rLevel);
|
|
69
|
-
data[field.key].dlevels.push(dLevel);
|
|
70
|
-
}
|
|
71
|
-
|
|
72
|
-
continue;
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
for (let i = 0; i < values.length; i++) {
|
|
76
|
-
const rlvl = i === 0 ? rLevel : field.rLevelMax;
|
|
77
|
-
|
|
78
|
-
if (field.isNested) {
|
|
79
|
-
shredRecordFields(field.fields, values[i], data, rlvl, field.dLevelMax);
|
|
80
|
-
} else {
|
|
81
|
-
data[field.key].count += 1;
|
|
82
|
-
data[field.key].rlevels.push(rlvl);
|
|
83
|
-
data[field.key].dlevels.push(field.dLevelMax);
|
|
84
|
-
data[field.key].values.push(Types.toPrimitive(field.originalType || field.primitiveType, values[i]));
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
export function materializeRecords(schema, buffer) {
|
|
91
|
-
const records = [];
|
|
92
|
-
|
|
93
|
-
for (let i = 0; i < buffer.rowCount; i++) records.push({});
|
|
94
|
-
|
|
95
|
-
for (const key in buffer.columnData) {
|
|
96
|
-
materializeColumn(schema, buffer, key, records);
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
return records;
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
function materializeColumn(schema, buffer, key, records) {
|
|
103
|
-
const data = buffer.columnData[key];
|
|
104
|
-
if (!data.count) return;
|
|
105
|
-
const field = schema.findField(key);
|
|
106
|
-
const branch = schema.findFieldBranch(key);
|
|
107
|
-
const rLevels = new Array(field.rLevelMax + 1).fill(0);
|
|
108
|
-
let vIndex = 0;
|
|
109
|
-
|
|
110
|
-
for (let i = 0; i < data.count; i++) {
|
|
111
|
-
const dLevel = data.dlevels[i];
|
|
112
|
-
const rLevel = data.rlevels[i];
|
|
113
|
-
rLevels[rLevel]++;
|
|
114
|
-
rLevels.fill(0, rLevel + 1);
|
|
115
|
-
let rIndex = 0;
|
|
116
|
-
let record = records[rLevels[rIndex++] - 1];
|
|
117
|
-
|
|
118
|
-
for (const step of branch) {
|
|
119
|
-
if (step === field) break;
|
|
120
|
-
if (dLevel < step.dLevelMax) break;
|
|
121
|
-
|
|
122
|
-
if (step.repetitionType === 'REPEATED') {
|
|
123
|
-
if (!(step.name in record)) {
|
|
124
|
-
record[step.name] = [];
|
|
125
|
-
}
|
|
126
|
-
|
|
127
|
-
const ix = rLevels[rIndex++];
|
|
128
|
-
|
|
129
|
-
while (record[step.name].length <= ix) {
|
|
130
|
-
record[step.name].push({});
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
record = record[step.name][ix];
|
|
134
|
-
} else {
|
|
135
|
-
record[step.name] = record[step.name] || {};
|
|
136
|
-
record = record[step.name];
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
if (dLevel === field.dLevelMax) {
|
|
141
|
-
const value = Types.fromPrimitive(field.originalType || field.primitiveType, data.values[vIndex], field);
|
|
142
|
-
vIndex++;
|
|
143
|
-
|
|
144
|
-
if (field.repetitionType === 'REPEATED') {
|
|
145
|
-
if (!(field.name in record)) {
|
|
146
|
-
record[field.name] = [];
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
const ix = rLevels[rIndex];
|
|
150
|
-
|
|
151
|
-
while (record[field.name].length <= ix) {
|
|
152
|
-
record[field.name].push(null);
|
|
153
|
-
}
|
|
154
|
-
|
|
155
|
-
record[field.name][ix] = value;
|
|
156
|
-
} else {
|
|
157
|
-
record[field.name] = value;
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
//# sourceMappingURL=shred.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/parquetjs/schema/shred.ts"],"names":["ParquetBuffer","Types","shredBuffer","schema","columnData","field","fieldList","key","dlevels","rlevels","values","pageHeaders","count","rowCount","shredRecord","record","buffer","data","shredRecordFields","fields","Array","prototype","push","apply","rLevel","dLevel","name","undefined","constructor","length","Boolean","repetitionType","Error","isNested","i","rlvl","rLevelMax","dLevelMax","toPrimitive","originalType","primitiveType","materializeRecords","records","materializeColumn","findField","branch","findFieldBranch","rLevels","fill","vIndex","rIndex","step","ix","value","fromPrimitive"],"mappings":"AAEA,SAAQA,aAAR,QAAsE,WAAtE;AAEA,OAAO,KAAKC,KAAZ,MAAuB,SAAvB;AAEA,SAAQD,aAAR;AAEA,OAAO,SAASE,WAAT,CAAqBC,MAArB,EAA2D;AAChE,QAAMC,UAAuC,GAAG,EAAhD;;AACA,OAAK,MAAMC,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCF,IAAAA,UAAU,CAACC,KAAK,CAACE,GAAP,CAAV,GAAwB;AACtBC,MAAAA,OAAO,EAAE,EADa;AAEtBC,MAAAA,OAAO,EAAE,EAFa;AAGtBC,MAAAA,MAAM,EAAE,EAHc;AAItBC,MAAAA,WAAW,EAAE,EAJS;AAKtBC,MAAAA,KAAK,EAAE;AALe,KAAxB;AAOD;;AACD,SAAO;AAACC,IAAAA,QAAQ,EAAE,CAAX;AAAcT,IAAAA;AAAd,GAAP;AACD;AAwBD,OAAO,SAASU,WAAT,CAAqBX,MAArB,EAA4CY,MAA5C,EAAyDC,MAAzD,EAAsF;AAE3F,QAAMC,IAAI,GAAGf,WAAW,CAACC,MAAD,CAAX,CAAoBC,UAAjC;AAEAc,EAAAA,iBAAiB,CAACf,MAAM,CAACgB,MAAR,EAAgBJ,MAAhB,EAAwBE,IAAxB,EAA8B,CAA9B,EAAiC,CAAjC,CAAjB;;AAGA,MAAID,MAAM,CAACH,QAAP,KAAoB,CAAxB,EAA2B;AACzBG,IAAAA,MAAM,CAACH,QAAP,GAAkB,CAAlB;AACAG,IAAAA,MAAM,CAACZ,UAAP,GAAoBa,IAApB;AACA;AACD;;AACDD,EAAAA,MAAM,CAACH,QAAP,IAAmB,CAAnB;;AACA,OAAK,MAAMR,KAAX,IAAoBF,MAAM,CAACG,SAA3B,EAAsC;AACpCc,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BE,OAAxD,EAAiEQ,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAjF;AACAW,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BC,OAAxD,EAAiES,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAjF;AACAY,IAAAA,KAAK,CAACC,SAAN,CAAgBC,IAAhB,CAAqBC,KAArB,CAA2BP,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BG,MAAxD,EAAgEO,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhF;AACAM,IAAAA,MAAM,CAACZ,UAAP,CAAkBC,KAAK,CAACE,GAAxB,EAA6BK,KAA7B,IAAsCK,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAtD;AACD;AACF;;AAGD,SAASM,iBAAT,CACEC,MADF,EAEEJ,MAFF,EAGEE,IAHF,EAIEO,MAJF,EAKEC,MALF,EAME;AACA,OAAK,MAAMC,IAAX,IAAmBP,MAAnB,EAA2B;AACzB,UAAMd,KAAK,GAAGc,MAAM,CAACO,IAAD,CAApB;AAGA,QAAIhB,MAAa,GAAG,EAApB;;AACA,QACEK,MAAM,IACNV,KAAK,CAACqB,IAAN,IAAcX,MADd,IAEAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,KAAuBC,SAFvB,IAGAZ,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,KAAuB,IAJzB,EAKE;AACA,UAAIX,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBE,WAAnB,KAAmCR,KAAvC,EAA8C;AAC5CV,QAAAA,MAAM,GAAGK,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAf;AACD,OAFD,MAEO;AACLhB,QAAAA,MAAM,CAACY,IAAP,CAAYP,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAlB;AACD;AACF;;AAED,QAAIhB,MAAM,CAACmB,MAAP,KAAkB,CAAlB,IAAuBC,OAAO,CAACf,MAAD,CAA9B,IAA0CV,KAAK,CAAC0B,cAAN,KAAyB,UAAvE,EAAmF;AACjF,YAAM,IAAIC,KAAJ,mCAAqC3B,KAAK,CAACqB,IAA3C,EAAN;AACD;;AACD,QAAIhB,MAAM,CAACmB,MAAP,GAAgB,CAAhB,IAAqBxB,KAAK,CAAC0B,cAAN,KAAyB,UAAlD,EAA8D;AAC5D,YAAM,IAAIC,KAAJ,sCAAwC3B,KAAK,CAACqB,IAA9C,EAAN;AACD;;AAGD,QAAIhB,MAAM,CAACmB,MAAP,KAAkB,CAAtB,EAAyB;AACvB,UAAIxB,KAAK,CAAC4B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACb,KAAK,CAACc,MAAP,EAAgB,IAAhB,EAAsBF,IAAtB,EAA4BO,MAA5B,EAAoCC,MAApC,CAAjB;AACD,OAFD,MAEO;AACLR,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBa,IAAxB,CAA6BE,MAA7B;AACAP,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBc,IAAxB,CAA6BG,MAA7B;AACD;;AACD;AACD;;AAGD,SAAK,IAAIS,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGxB,MAAM,CAACmB,MAA3B,EAAmCK,CAAC,EAApC,EAAwC;AACtC,YAAMC,IAAI,GAAGD,CAAC,KAAK,CAAN,GAAUV,MAAV,GAAmBnB,KAAK,CAAC+B,SAAtC;;AACA,UAAI/B,KAAK,CAAC4B,QAAV,EAAoB;AAClBf,QAAAA,iBAAiB,CAACb,KAAK,CAACc,MAAP,EAAgBT,MAAM,CAACwB,CAAD,CAAtB,EAA2BjB,IAA3B,EAAiCkB,IAAjC,EAAuC9B,KAAK,CAACgC,SAA7C,CAAjB;AACD,OAFD,MAEO;AACLpB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBK,KAAhB,IAAyB,CAAzB;AACAK,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBE,OAAhB,CAAwBa,IAAxB,CAA6Ba,IAA7B;AACAlB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBC,OAAhB,CAAwBc,IAAxB,CAA6BjB,KAAK,CAACgC,SAAnC;AACApB,QAAAA,IAAI,CAACZ,KAAK,CAACE,GAAP,CAAJ,CAAgBG,MAAhB,CAAuBY,IAAvB,CACErB,KAAK,CAACqC,WAAN,CAAmBjC,KAAK,CAACkC,YAAN,IAAsBlC,KAAK,CAACmC,aAA/C,EAAgE9B,MAAM,CAACwB,CAAD,CAAtE,CADF;AAGD;AACF;AACF;AACF;;AAqBD,OAAO,SAASO,kBAAT,CAA4BtC,MAA5B,EAAmDa,MAAnD,EAA2F;AAChG,QAAM0B,OAAwB,GAAG,EAAjC;;AACA,OAAK,IAAIR,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGlB,MAAM,CAACH,QAA3B,EAAqCqB,CAAC,EAAtC,EAA0CQ,OAAO,CAACpB,IAAR,CAAa,EAAb;;AAC1C,OAAK,MAAMf,GAAX,IAAkBS,MAAM,CAACZ,UAAzB,EAAqC;AACnCuC,IAAAA,iBAAiB,CAACxC,MAAD,EAASa,MAAT,EAAiBT,GAAjB,EAAsBmC,OAAtB,CAAjB;AACD;;AACD,SAAOA,OAAP;AACD;;AAGD,SAASC,iBAAT,CACExC,MADF,EAEEa,MAFF,EAGET,GAHF,EAIEmC,OAJF,EAKE;AACA,QAAMzB,IAAI,GAAGD,MAAM,CAACZ,UAAP,CAAkBG,GAAlB,CAAb;AACA,MAAI,CAACU,IAAI,CAACL,KAAV,EAAiB;AAEjB,QAAMP,KAAK,GAAGF,MAAM,CAACyC,SAAP,CAAiBrC,GAAjB,CAAd;AACA,QAAMsC,MAAM,GAAG1C,MAAM,CAAC2C,eAAP,CAAuBvC,GAAvB,CAAf;AAGA,QAAMwC,OAAiB,GAAG,IAAI3B,KAAJ,CAAUf,KAAK,CAAC+B,SAAN,GAAkB,CAA5B,EAA+BY,IAA/B,CAAoC,CAApC,CAA1B;AACA,MAAIC,MAAM,GAAG,CAAb;;AACA,OAAK,IAAIf,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGjB,IAAI,CAACL,KAAzB,EAAgCsB,CAAC,EAAjC,EAAqC;AACnC,UAAMT,MAAM,GAAGR,IAAI,CAACT,OAAL,CAAa0B,CAAb,CAAf;AACA,UAAMV,MAAM,GAAGP,IAAI,CAACR,OAAL,CAAayB,CAAb,CAAf;AACAa,IAAAA,OAAO,CAACvB,MAAD,CAAP;AACAuB,IAAAA,OAAO,CAACC,IAAR,CAAa,CAAb,EAAgBxB,MAAM,GAAG,CAAzB;AAEA,QAAI0B,MAAM,GAAG,CAAb;AACA,QAAInC,MAAM,GAAG2B,OAAO,CAACK,OAAO,CAACG,MAAM,EAAP,CAAP,GAAoB,CAArB,CAApB;;AAGA,SAAK,MAAMC,IAAX,IAAmBN,MAAnB,EAA2B;AACzB,UAAIM,IAAI,KAAK9C,KAAb,EAAoB;AACpB,UAAIoB,MAAM,GAAG0B,IAAI,CAACd,SAAlB,EAA6B;;AAC7B,UAAIc,IAAI,CAACpB,cAAL,KAAwB,UAA5B,EAAwC;AACtC,YAAI,EAAEoB,IAAI,CAACzB,IAAL,IAAaX,MAAf,CAAJ,EAA4B;AAE1BA,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoB,EAApB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAM,EAAP,CAAlB;;AACA,eAAOnC,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBG,MAAlB,IAA4BuB,EAAnC,EAAuC;AAErCrC,UAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkBJ,IAAlB,CAAuB,EAAvB;AACD;;AACDP,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,CAAkB0B,EAAlB,CAAT;AACD,OAXD,MAWO;AACLrC,QAAAA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,GAAoBX,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAN,IAAqB,EAAzC;AACAX,QAAAA,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACzB,IAAN,CAAf;AACD;AACF;;AAGD,QAAID,MAAM,KAAKpB,KAAK,CAACgC,SAArB,EAAgC;AAC9B,YAAMgB,KAAK,GAAGpD,KAAK,CAACqD,aAAN,CAEZjD,KAAK,CAACkC,YAAN,IAAsBlC,KAAK,CAACmC,aAFhB,EAGZvB,IAAI,CAACP,MAAL,CAAYuC,MAAZ,CAHY,EAIZ5C,KAJY,CAAd;AAMA4C,MAAAA,MAAM;;AACN,UAAI5C,KAAK,CAAC0B,cAAN,KAAyB,UAA7B,EAAyC;AACvC,YAAI,EAAE1B,KAAK,CAACqB,IAAN,IAAcX,MAAhB,CAAJ,EAA6B;AAE3BA,UAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,GAAqB,EAArB;AACD;;AACD,cAAM0B,EAAE,GAAGL,OAAO,CAACG,MAAD,CAAlB;;AACA,eAAOnC,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBG,MAAnB,IAA6BuB,EAApC,EAAwC;AAEtCrC,UAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmBJ,IAAnB,CAAwB,IAAxB;AACD;;AACDP,QAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,CAAmB0B,EAAnB,IAAyBC,KAAzB;AACD,OAXD,MAWO;AACLtC,QAAAA,MAAM,CAACV,KAAK,CAACqB,IAAP,CAAN,GAAqB2B,KAArB;AACD;AACF;AACF;AACF","sourcesContent":["// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)\n\nimport {ParquetBuffer, ParquetData, ParquetField, ParquetRecord} from './declare';\nimport {ParquetSchema} from './schema';\nimport * as Types from './types';\n\nexport {ParquetBuffer};\n\nexport function shredBuffer(schema: ParquetSchema): ParquetBuffer {\n const columnData: Record<string, ParquetData> = {};\n for (const field of schema.fieldList) {\n columnData[field.key] = {\n dlevels: [],\n rlevels: [],\n values: [],\n pageHeaders: [],\n count: 0\n };\n }\n return {rowCount: 0, columnData};\n}\n\n/**\n * 'Shred' a record into a list of <value, repetition_level, definition_level>\n * tuples per column using the Google Dremel Algorithm..\n *\n * The buffer argument must point to an object into which the shredded record\n * will be returned. You may re-use the buffer for repeated calls to this function\n * to append to an existing buffer, as long as the schema is unchanged.\n *\n * The format in which the shredded records will be stored in the buffer is as\n * follows:\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function shredRecord(schema: ParquetSchema, record: any, buffer: ParquetBuffer): void {\n /* shred the record, this may raise an exception */\n const data = shredBuffer(schema).columnData;\n\n shredRecordFields(schema.fields, record, data, 0, 0);\n\n /* if no error during shredding, add the shredded record to the buffer */\n if (buffer.rowCount === 0) {\n buffer.rowCount = 1;\n buffer.columnData = data;\n return;\n }\n buffer.rowCount += 1;\n for (const field of schema.fieldList) {\n Array.prototype.push.apply(buffer.columnData[field.key].rlevels, data[field.key].rlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].dlevels, data[field.key].dlevels);\n Array.prototype.push.apply(buffer.columnData[field.key].values, data[field.key].values);\n buffer.columnData[field.key].count += data[field.key].count;\n }\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction shredRecordFields(\n fields: Record<string, ParquetField>,\n record: any,\n data: Record<string, ParquetData>,\n rLevel: number,\n dLevel: number\n) {\n for (const name in fields) {\n const field = fields[name];\n\n // fetch values\n let values: any[] = [];\n if (\n record &&\n field.name in record &&\n record[field.name] !== undefined &&\n record[field.name] !== null\n ) {\n if (record[field.name].constructor === Array) {\n values = record[field.name];\n } else {\n values.push(record[field.name]);\n }\n }\n // check values\n if (values.length === 0 && Boolean(record) && field.repetitionType === 'REQUIRED') {\n throw new Error(`missing required field: ${field.name}`);\n }\n if (values.length > 1 && field.repetitionType !== 'REPEATED') {\n throw new Error(`too many values for field: ${field.name}`);\n }\n\n // push null\n if (values.length === 0) {\n if (field.isNested) {\n shredRecordFields(field.fields!, null, data, rLevel, dLevel);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rLevel);\n data[field.key].dlevels.push(dLevel);\n }\n continue; // eslint-disable-line no-continue\n }\n\n // push values\n for (let i = 0; i < values.length; i++) {\n const rlvl = i === 0 ? rLevel : field.rLevelMax;\n if (field.isNested) {\n shredRecordFields(field.fields!, values[i], data, rlvl, field.dLevelMax);\n } else {\n data[field.key].count += 1;\n data[field.key].rlevels.push(rlvl);\n data[field.key].dlevels.push(field.dLevelMax);\n data[field.key].values.push(\n Types.toPrimitive((field.originalType || field.primitiveType)!, values[i])\n );\n }\n }\n }\n}\n\n/**\n * 'Materialize' a list of <value, repetition_level, definition_level>\n * tuples back to nested records (objects/arrays) using the Google Dremel\n * Algorithm..\n *\n * The buffer argument must point to an object with the following structure (i.e.\n * the same structure that is returned by shredRecords):\n *\n * buffer = {\n * columnData: [\n * 'my_col': {\n * dlevels: [d1, d2, .. dN],\n * rlevels: [r1, r2, .. rN],\n * values: [v1, v2, .. vN],\n * }, ...\n * ],\n * rowCount: X,\n * }\n */\nexport function materializeRecords(schema: ParquetSchema, buffer: ParquetBuffer): ParquetRecord[] {\n const records: ParquetRecord[] = [];\n for (let i = 0; i < buffer.rowCount; i++) records.push({});\n for (const key in buffer.columnData) {\n materializeColumn(schema, buffer, key, records);\n }\n return records;\n}\n\n// eslint-disable-next-line max-statements, complexity\nfunction materializeColumn(\n schema: ParquetSchema,\n buffer: ParquetBuffer,\n key: string,\n records: ParquetRecord[]\n) {\n const data = buffer.columnData[key];\n if (!data.count) return;\n\n const field = schema.findField(key);\n const branch = schema.findFieldBranch(key);\n\n // tslint:disable-next-line:prefer-array-literal\n const rLevels: number[] = new Array(field.rLevelMax + 1).fill(0);\n let vIndex = 0;\n for (let i = 0; i < data.count; i++) {\n const dLevel = data.dlevels[i];\n const rLevel = data.rlevels[i];\n rLevels[rLevel]++;\n rLevels.fill(0, rLevel + 1);\n\n let rIndex = 0;\n let record = records[rLevels[rIndex++] - 1];\n\n // Internal nodes\n for (const step of branch) {\n if (step === field) break;\n if (dLevel < step.dLevelMax) break;\n if (step.repetitionType === 'REPEATED') {\n if (!(step.name in record)) {\n // eslint-disable max-depth\n record[step.name] = [];\n }\n const ix = rLevels[rIndex++];\n while (record[step.name].length <= ix) {\n // eslint-disable max-depth\n record[step.name].push({});\n }\n record = record[step.name][ix];\n } else {\n record[step.name] = record[step.name] || {};\n record = record[step.name];\n }\n }\n\n // Leaf node\n if (dLevel === field.dLevelMax) {\n const value = Types.fromPrimitive(\n // @ts-ignore\n field.originalType || field.primitiveType,\n data.values[vIndex],\n field\n );\n vIndex++;\n if (field.repetitionType === 'REPEATED') {\n if (!(field.name in record)) {\n // eslint-disable max-depth\n record[field.name] = [];\n }\n const ix = rLevels[rIndex];\n while (record[field.name].length <= ix) {\n // eslint-disable max-depth\n record[field.name].push(null);\n }\n record[field.name][ix] = value;\n } else {\n record[field.name] = value;\n }\n }\n }\n}\n"],"file":"shred.js"}
|