@loaders.gl/parquet 4.0.0-alpha.9 → 4.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +21 -32
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/buffer-polyfill/buffer-polyfill.browser.js +12 -0
- package/dist/es5/buffer-polyfill/buffer-polyfill.browser.js.map +1 -0
- package/dist/es5/buffer-polyfill/buffer-polyfill.node.js +16 -0
- package/dist/es5/buffer-polyfill/buffer-polyfill.node.js.map +1 -0
- package/dist/es5/buffer-polyfill/buffer.js +1665 -0
- package/dist/es5/buffer-polyfill/buffer.js.map +1 -0
- package/dist/es5/buffer-polyfill/index.js +27 -0
- package/dist/es5/buffer-polyfill/index.js.map +1 -0
- package/dist/es5/buffer-polyfill/install-buffer-polyfill.js +10 -0
- package/dist/es5/buffer-polyfill/install-buffer-polyfill.js.map +1 -0
- package/dist/es5/index.js +25 -28
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/geo/decode-geo-column.js +53 -0
- package/dist/es5/lib/geo/decode-geo-column.js.map +1 -0
- package/dist/es5/lib/geo/decode-geo-metadata.js +52 -16
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/es5/lib/geo/{geoparquet-schema.js → geoparquet-metadata-schema.js} +4 -4
- package/dist/es5/lib/geo/geoparquet-metadata-schema.js.map +1 -0
- package/dist/es5/lib/geo/geoparquet-metadata-schema.json +60 -0
- package/dist/es5/lib/parsers/get-parquet-schema.js +41 -0
- package/dist/es5/lib/parsers/get-parquet-schema.js.map +1 -0
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +71 -72
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js +95 -62
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/es5/parquet-loader.js +14 -14
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +8 -11
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +6 -7
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +2 -3
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/es5/parquetjs/codecs/plain.js.map +1 -1
- package/dist/es5/parquetjs/codecs/rle.js.map +1 -1
- package/dist/es5/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/es5/parquetjs/parquet-thrift/index.js +75 -0
- package/dist/es5/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/es5/parquetjs/parser/decoders.js +53 -36
- package/dist/es5/parquetjs/parser/decoders.js.map +1 -1
- package/dist/es5/parquetjs/parser/parquet-reader.js +15 -13
- package/dist/es5/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/es5/parquetjs/schema/declare.js.map +1 -1
- package/dist/es5/parquetjs/utils/read-utils.js +6 -7
- package/dist/es5/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/esm/buffer-polyfill/buffer-polyfill.browser.js +6 -0
- package/dist/esm/buffer-polyfill/buffer-polyfill.browser.js.map +1 -0
- package/dist/esm/buffer-polyfill/buffer-polyfill.node.js +10 -0
- package/dist/esm/buffer-polyfill/buffer-polyfill.node.js.map +1 -0
- package/dist/esm/buffer-polyfill/buffer.js +1489 -0
- package/dist/esm/buffer-polyfill/buffer.js.map +1 -0
- package/dist/esm/buffer-polyfill/index.js +4 -0
- package/dist/esm/buffer-polyfill/index.js.map +1 -0
- package/dist/esm/buffer-polyfill/install-buffer-polyfill.js +3 -0
- package/dist/esm/buffer-polyfill/install-buffer-polyfill.js.map +1 -0
- package/dist/esm/index.js +4 -10
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/geo/decode-geo-column.js +47 -0
- package/dist/esm/lib/geo/decode-geo-column.js.map +1 -0
- package/dist/esm/lib/geo/decode-geo-metadata.js +32 -5
- package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -1
- package/dist/esm/lib/geo/{geoparquet-schema.js → geoparquet-metadata-schema.js} +2 -2
- package/dist/esm/lib/geo/geoparquet-metadata-schema.js.map +1 -0
- package/dist/esm/lib/geo/geoparquet-metadata-schema.json +60 -0
- package/dist/esm/lib/parsers/get-parquet-schema.js +11 -0
- package/dist/esm/lib/parsers/get-parquet-schema.js.map +1 -0
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js +10 -11
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -1
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js +32 -7
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -1
- package/dist/esm/parquet-loader.js +12 -12
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +7 -9
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +6 -7
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +2 -3
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/esm/parquetjs/codecs/plain.js.map +1 -1
- package/dist/esm/parquetjs/codecs/rle.js.map +1 -1
- package/dist/esm/parquetjs/encoder/parquet-encoder.js.map +1 -1
- package/dist/esm/parquetjs/parquet-thrift/index.js +2 -0
- package/dist/esm/parquetjs/parquet-thrift/index.js.map +1 -1
- package/dist/esm/parquetjs/parser/decoders.js +48 -32
- package/dist/esm/parquetjs/parser/decoders.js.map +1 -1
- package/dist/esm/parquetjs/parser/parquet-reader.js +14 -12
- package/dist/esm/parquetjs/parser/parquet-reader.js.map +1 -1
- package/dist/esm/parquetjs/schema/declare.js.map +1 -1
- package/dist/esm/parquetjs/utils/read-utils.js +1 -1
- package/dist/esm/parquetjs/utils/read-utils.js.map +1 -1
- package/dist/parquet-worker.js +23 -34
- package/dist/parquet-worker.js.map +3 -3
- package/dist/src/buffer-polyfill/buffer-polyfill.browser.d.ts +4 -0
- package/dist/src/buffer-polyfill/buffer-polyfill.browser.d.ts.map +1 -0
- package/dist/src/buffer-polyfill/buffer-polyfill.node.d.ts +4 -0
- package/dist/src/buffer-polyfill/buffer-polyfill.node.d.ts.map +1 -0
- package/dist/src/buffer-polyfill/buffer.d.ts +222 -0
- package/dist/src/buffer-polyfill/buffer.d.ts.map +1 -0
- package/dist/src/buffer-polyfill/index.d.ts +4 -0
- package/dist/src/buffer-polyfill/index.d.ts.map +1 -0
- package/dist/src/buffer-polyfill/install-buffer-polyfill.d.ts +30 -0
- package/dist/src/buffer-polyfill/install-buffer-polyfill.d.ts.map +1 -0
- package/dist/src/bundle.d.ts.map +1 -0
- package/dist/src/constants.d.ts.map +1 -0
- package/dist/{index.d.ts → src/index.d.ts} +6 -10
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/lib/arrow/convert-columns-to-row-group.d.ts.map +1 -0
- package/dist/src/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -0
- package/dist/src/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -0
- package/dist/src/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -0
- package/dist/src/lib/geo/decode-geo-column.d.ts +4 -0
- package/dist/src/lib/geo/decode-geo-column.d.ts.map +1 -0
- package/dist/src/lib/geo/decode-geo-metadata.d.ts +44 -0
- package/dist/src/lib/geo/decode-geo-metadata.d.ts.map +1 -0
- package/dist/{lib/geo/geoparquet-schema.d.ts → src/lib/geo/geoparquet-metadata-schema.d.ts} +2 -3
- package/dist/src/lib/geo/geoparquet-metadata-schema.d.ts.map +1 -0
- package/dist/src/lib/parsers/get-parquet-schema.d.ts +4 -0
- package/dist/src/lib/parsers/get-parquet-schema.d.ts.map +1 -0
- package/dist/src/lib/parsers/parse-parquet-to-columns.d.ts +6 -0
- package/dist/src/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -0
- package/dist/src/lib/parsers/parse-parquet-to-rows.d.ts +6 -0
- package/dist/src/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -0
- package/dist/src/lib/wasm/encode-parquet-wasm.d.ts.map +1 -0
- package/dist/src/lib/wasm/load-wasm/index.d.ts.map +1 -0
- package/dist/src/lib/wasm/load-wasm/load-wasm-browser.d.ts.map +1 -0
- package/dist/src/lib/wasm/load-wasm/load-wasm-node.d.ts.map +1 -0
- package/dist/src/lib/wasm/parse-parquet-wasm.d.ts.map +1 -0
- package/dist/src/parquet-loader.d.ts +22 -0
- package/dist/src/parquet-loader.d.ts.map +1 -0
- package/dist/src/parquet-wasm-loader.d.ts +12 -0
- package/dist/src/parquet-wasm-loader.d.ts.map +1 -0
- package/dist/src/parquet-wasm-writer.d.ts +6 -0
- package/dist/src/parquet-wasm-writer.d.ts.map +1 -0
- package/dist/src/parquet-writer.d.ts.map +1 -0
- package/dist/src/parquetjs/codecs/declare.d.ts.map +1 -0
- package/dist/src/parquetjs/codecs/dictionary.d.ts.map +1 -0
- package/dist/src/parquetjs/codecs/index.d.ts.map +1 -0
- package/dist/src/parquetjs/codecs/plain.d.ts.map +1 -0
- package/dist/src/parquetjs/codecs/rle.d.ts.map +1 -0
- package/dist/src/parquetjs/compression.d.ts.map +1 -0
- package/dist/src/parquetjs/encoder/parquet-encoder.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/BoundaryOrder.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/BsonType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/ColumnChunk.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/ColumnIndex.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/ColumnMetaData.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/ColumnOrder.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/CompressionCodec.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/ConvertedType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/DataPageHeader.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/DateType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/DecimalType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/Encoding.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/EnumType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/FieldRepetitionType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/FileMetaData.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/IndexPageHeader.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/IntType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/JsonType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/KeyValue.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/ListType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/LogicalType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/MapType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/MicroSeconds.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/MilliSeconds.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/NullType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/OffsetIndex.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/PageEncodingStats.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/PageHeader.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/PageLocation.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/PageType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/RowGroup.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/SchemaElement.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/SortingColumn.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/Statistics.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/StringType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/TimeType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/TimeUnit.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/TimestampType.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/Type.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts.map +1 -0
- package/dist/src/parquetjs/parquet-thrift/UUIDType.d.ts.map +1 -0
- package/dist/{parquetjs → src/parquetjs}/parquet-thrift/index.d.ts +2 -0
- package/dist/src/parquetjs/parquet-thrift/index.d.ts.map +1 -0
- package/dist/{parquetjs → src/parquetjs}/parser/decoders.d.ts +4 -4
- package/dist/src/parquetjs/parser/decoders.d.ts.map +1 -0
- package/dist/{parquetjs → src/parquetjs}/parser/parquet-reader.d.ts +5 -3
- package/dist/src/parquetjs/parser/parquet-reader.d.ts.map +1 -0
- package/dist/{parquetjs → src/parquetjs}/schema/declare.d.ts +3 -1
- package/dist/src/parquetjs/schema/declare.d.ts.map +1 -0
- package/dist/src/parquetjs/schema/schema.d.ts.map +1 -0
- package/dist/src/parquetjs/schema/shred.d.ts.map +1 -0
- package/dist/src/parquetjs/schema/types.d.ts.map +1 -0
- package/dist/src/parquetjs/utils/file-utils.d.ts.map +1 -0
- package/dist/src/parquetjs/utils/read-utils.d.ts.map +1 -0
- package/dist/src/workers/parquet-worker.d.ts.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -0
- package/package.json +21 -9
- package/src/buffer-polyfill/buffer-polyfill.browser.ts +11 -0
- package/src/buffer-polyfill/buffer-polyfill.node.ts +15 -0
- package/src/buffer-polyfill/buffer.ts +2207 -0
- package/src/buffer-polyfill/index.ts +8 -0
- package/src/buffer-polyfill/install-buffer-polyfill.ts +3 -0
- package/src/index.ts +31 -17
- package/src/lib/geo/decode-geo-column.ts +54 -0
- package/src/lib/geo/decode-geo-metadata.ts +81 -15
- package/src/lib/geo/geoparquet-metadata-schema.json +60 -0
- package/src/lib/geo/{geoparquet-schema.ts → geoparquet-metadata-schema.ts} +1 -1
- package/src/lib/parsers/get-parquet-schema.ts +14 -0
- package/src/lib/parsers/parse-parquet-to-columns.ts +15 -13
- package/src/lib/parsers/parse-parquet-to-rows.ts +39 -11
- package/src/parquet-loader.ts +29 -16
- package/src/parquet-wasm-loader.ts +13 -13
- package/src/parquet-wasm-writer.ts +10 -8
- package/src/parquet-writer.ts +1 -3
- package/src/parquetjs/codecs/plain.ts +1 -0
- package/src/parquetjs/codecs/rle.ts +2 -0
- package/src/parquetjs/encoder/parquet-encoder.ts +1 -0
- package/src/parquetjs/parquet-thrift/index.ts +4 -0
- package/src/parquetjs/parser/decoders.ts +63 -41
- package/src/parquetjs/parser/parquet-reader.ts +19 -14
- package/src/parquetjs/schema/declare.ts +3 -1
- package/src/parquetjs/utils/read-utils.ts +2 -2
- package/dist/bundle.d.ts.map +0 -1
- package/dist/bundle.js +0 -5
- package/dist/constants.d.ts.map +0 -1
- package/dist/constants.js +0 -18
- package/dist/es5/lib/geo/geoparquet-schema.js.map +0 -1
- package/dist/esm/lib/geo/geoparquet-schema.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js +0 -58
- package/dist/lib/arrow/convert-columns-to-row-group.d.ts.map +0 -1
- package/dist/lib/arrow/convert-columns-to-row-group.js +0 -1
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +0 -1
- package/dist/lib/arrow/convert-row-group-to-columns.js +0 -12
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +0 -1
- package/dist/lib/arrow/convert-schema-from-parquet.js +0 -86
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +0 -1
- package/dist/lib/arrow/convert-schema-to-parquet.js +0 -71
- package/dist/lib/geo/decode-geo-metadata.d.ts +0 -31
- package/dist/lib/geo/decode-geo-metadata.d.ts.map +0 -1
- package/dist/lib/geo/decode-geo-metadata.js +0 -77
- package/dist/lib/geo/geoparquet-schema.d.ts.map +0 -1
- package/dist/lib/geo/geoparquet-schema.js +0 -69
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts +0 -5
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +0 -1
- package/dist/lib/parsers/parse-parquet-to-columns.js +0 -46
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts +0 -5
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +0 -1
- package/dist/lib/parsers/parse-parquet-to-rows.js +0 -37
- package/dist/lib/wasm/encode-parquet-wasm.d.ts.map +0 -1
- package/dist/lib/wasm/encode-parquet-wasm.js +0 -30
- package/dist/lib/wasm/load-wasm/index.d.ts.map +0 -1
- package/dist/lib/wasm/load-wasm/index.js +0 -5
- package/dist/lib/wasm/load-wasm/load-wasm-browser.d.ts.map +0 -1
- package/dist/lib/wasm/load-wasm/load-wasm-browser.js +0 -38
- package/dist/lib/wasm/load-wasm/load-wasm-node.d.ts.map +0 -1
- package/dist/lib/wasm/load-wasm/load-wasm-node.js +0 -31
- package/dist/lib/wasm/parse-parquet-wasm.d.ts.map +0 -1
- package/dist/lib/wasm/parse-parquet-wasm.js +0 -27
- package/dist/parquet-loader.d.ts +0 -14
- package/dist/parquet-loader.d.ts.map +0 -1
- package/dist/parquet-loader.js +0 -41
- package/dist/parquet-wasm-loader.d.ts +0 -23
- package/dist/parquet-wasm-loader.d.ts.map +0 -1
- package/dist/parquet-wasm-loader.js +0 -27
- package/dist/parquet-wasm-writer.d.ts +0 -3
- package/dist/parquet-wasm-writer.d.ts.map +0 -1
- package/dist/parquet-wasm-writer.js +0 -23
- package/dist/parquet-writer.d.ts.map +0 -1
- package/dist/parquet-writer.js +0 -22
- package/dist/parquetjs/codecs/declare.d.ts.map +0 -1
- package/dist/parquetjs/codecs/declare.js +0 -2
- package/dist/parquetjs/codecs/dictionary.d.ts.map +0 -1
- package/dist/parquetjs/codecs/dictionary.js +0 -14
- package/dist/parquetjs/codecs/index.d.ts.map +0 -1
- package/dist/parquetjs/codecs/index.js +0 -55
- package/dist/parquetjs/codecs/plain.d.ts.map +0 -1
- package/dist/parquetjs/codecs/plain.js +0 -211
- package/dist/parquetjs/codecs/rle.d.ts.map +0 -1
- package/dist/parquetjs/codecs/rle.js +0 -145
- package/dist/parquetjs/compression.d.ts.map +0 -1
- package/dist/parquetjs/compression.js +0 -183
- package/dist/parquetjs/encoder/parquet-encoder.d.ts.map +0 -1
- package/dist/parquetjs/encoder/parquet-encoder.js +0 -484
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/BoundaryOrder.js +0 -15
- package/dist/parquetjs/parquet-thrift/BsonType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/BsonType.js +0 -62
- package/dist/parquetjs/parquet-thrift/ColumnChunk.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/ColumnChunk.js +0 -211
- package/dist/parquetjs/parquet-thrift/ColumnIndex.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/ColumnIndex.js +0 -217
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/ColumnMetaData.js +0 -402
- package/dist/parquetjs/parquet-thrift/ColumnOrder.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/ColumnOrder.js +0 -108
- package/dist/parquetjs/parquet-thrift/CompressionCodec.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/CompressionCodec.js +0 -20
- package/dist/parquetjs/parquet-thrift/ConvertedType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/ConvertedType.js +0 -34
- package/dist/parquetjs/parquet-thrift/DataPageHeader.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/DataPageHeader.js +0 -170
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/DataPageHeaderV2.js +0 -230
- package/dist/parquetjs/parquet-thrift/DateType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/DateType.js +0 -62
- package/dist/parquetjs/parquet-thrift/DecimalType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/DecimalType.js +0 -109
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/DictionaryPageHeader.js +0 -126
- package/dist/parquetjs/parquet-thrift/Encoding.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/Encoding.js +0 -20
- package/dist/parquetjs/parquet-thrift/EnumType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/EnumType.js +0 -62
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/FieldRepetitionType.js +0 -15
- package/dist/parquetjs/parquet-thrift/FileMetaData.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/FileMetaData.js +0 -260
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/IndexPageHeader.js +0 -62
- package/dist/parquetjs/parquet-thrift/IntType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/IntType.js +0 -109
- package/dist/parquetjs/parquet-thrift/JsonType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/JsonType.js +0 -62
- package/dist/parquetjs/parquet-thrift/KeyValue.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/KeyValue.js +0 -106
- package/dist/parquetjs/parquet-thrift/ListType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/ListType.js +0 -62
- package/dist/parquetjs/parquet-thrift/LogicalType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/LogicalType.js +0 -384
- package/dist/parquetjs/parquet-thrift/MapType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/MapType.js +0 -62
- package/dist/parquetjs/parquet-thrift/MicroSeconds.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/MicroSeconds.js +0 -62
- package/dist/parquetjs/parquet-thrift/MilliSeconds.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/MilliSeconds.js +0 -62
- package/dist/parquetjs/parquet-thrift/NullType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/NullType.js +0 -62
- package/dist/parquetjs/parquet-thrift/OffsetIndex.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/OffsetIndex.js +0 -101
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/PageEncodingStats.js +0 -131
- package/dist/parquetjs/parquet-thrift/PageHeader.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/PageHeader.js +0 -220
- package/dist/parquetjs/parquet-thrift/PageLocation.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/PageLocation.js +0 -145
- package/dist/parquetjs/parquet-thrift/PageType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/PageType.js +0 -16
- package/dist/parquetjs/parquet-thrift/RowGroup.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/RowGroup.js +0 -186
- package/dist/parquetjs/parquet-thrift/SchemaElement.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/SchemaElement.js +0 -243
- package/dist/parquetjs/parquet-thrift/SortingColumn.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/SortingColumn.js +0 -131
- package/dist/parquetjs/parquet-thrift/Statistics.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/Statistics.js +0 -180
- package/dist/parquetjs/parquet-thrift/StringType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/StringType.js +0 -62
- package/dist/parquetjs/parquet-thrift/TimeType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/TimeType.js +0 -110
- package/dist/parquetjs/parquet-thrift/TimeUnit.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/TimeUnit.js +0 -131
- package/dist/parquetjs/parquet-thrift/TimestampType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/TimestampType.js +0 -110
- package/dist/parquetjs/parquet-thrift/Type.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/Type.js +0 -20
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/TypeDefinedOrder.js +0 -62
- package/dist/parquetjs/parquet-thrift/UUIDType.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/UUIDType.js +0 -62
- package/dist/parquetjs/parquet-thrift/index.d.ts.map +0 -1
- package/dist/parquetjs/parquet-thrift/index.js +0 -65
- package/dist/parquetjs/parser/decoders.d.ts.map +0 -1
- package/dist/parquetjs/parser/decoders.js +0 -318
- package/dist/parquetjs/parser/parquet-reader.d.ts.map +0 -1
- package/dist/parquetjs/parser/parquet-reader.js +0 -200
- package/dist/parquetjs/schema/declare.d.ts.map +0 -1
- package/dist/parquetjs/schema/declare.js +0 -12
- package/dist/parquetjs/schema/schema.d.ts.map +0 -1
- package/dist/parquetjs/schema/schema.js +0 -162
- package/dist/parquetjs/schema/shred.d.ts.map +0 -1
- package/dist/parquetjs/schema/shred.js +0 -355
- package/dist/parquetjs/schema/types.d.ts.map +0 -1
- package/dist/parquetjs/schema/types.js +0 -416
- package/dist/parquetjs/utils/file-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/file-utils.js +0 -43
- package/dist/parquetjs/utils/read-utils.d.ts.map +0 -1
- package/dist/parquetjs/utils/read-utils.js +0 -109
- package/dist/workers/parquet-worker.d.ts.map +0 -1
- package/dist/workers/parquet-worker.js +0 -5
- /package/dist/{bundle.d.ts → src/bundle.d.ts} +0 -0
- /package/dist/{constants.d.ts → src/constants.d.ts} +0 -0
- /package/dist/{lib → src/lib}/arrow/convert-columns-to-row-group.d.ts +0 -0
- /package/dist/{lib → src/lib}/arrow/convert-row-group-to-columns.d.ts +0 -0
- /package/dist/{lib → src/lib}/arrow/convert-schema-from-parquet.d.ts +0 -0
- /package/dist/{lib → src/lib}/arrow/convert-schema-to-parquet.d.ts +0 -0
- /package/dist/{lib → src/lib}/wasm/encode-parquet-wasm.d.ts +0 -0
- /package/dist/{lib → src/lib}/wasm/load-wasm/index.d.ts +0 -0
- /package/dist/{lib → src/lib}/wasm/load-wasm/load-wasm-browser.d.ts +0 -0
- /package/dist/{lib → src/lib}/wasm/load-wasm/load-wasm-node.d.ts +0 -0
- /package/dist/{lib → src/lib}/wasm/parse-parquet-wasm.d.ts +0 -0
- /package/dist/{parquet-writer.d.ts → src/parquet-writer.d.ts} +0 -0
- /package/dist/{parquetjs → src/parquetjs}/codecs/declare.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/codecs/dictionary.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/codecs/index.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/codecs/plain.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/codecs/rle.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/compression.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/encoder/parquet-encoder.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/BoundaryOrder.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/BsonType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/ColumnChunk.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/ColumnIndex.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/ColumnMetaData.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/ColumnOrder.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/CompressionCodec.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/ConvertedType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/DataPageHeader.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/DataPageHeaderV2.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/DateType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/DecimalType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/DictionaryPageHeader.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/Encoding.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/EnumType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/FieldRepetitionType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/FileMetaData.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/IndexPageHeader.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/IntType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/JsonType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/KeyValue.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/ListType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/LogicalType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/MapType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/MicroSeconds.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/MilliSeconds.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/NullType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/OffsetIndex.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/PageEncodingStats.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/PageHeader.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/PageLocation.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/PageType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/RowGroup.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/SchemaElement.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/SortingColumn.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/Statistics.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/StringType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/TimeType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/TimeUnit.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/TimestampType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/Type.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/TypeDefinedOrder.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/parquet-thrift/UUIDType.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/schema/schema.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/schema/shred.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/schema/types.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/utils/file-utils.d.ts +0 -0
- /package/dist/{parquetjs → src/parquetjs}/utils/read-utils.d.ts +0 -0
- /package/dist/{workers → src/workers}/parquet-worker.d.ts +0 -0
|
@@ -1,183 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// Forked from https://github.com/kbajalc/parquets under MIT license (Copyright (c) 2017 ironSource Ltd.)
|
|
3
|
-
/* eslint-disable camelcase */
|
|
4
|
-
// Forked from https://github.com/ironSource/parquetjs under MIT license
|
|
5
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
6
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
7
|
-
};
|
|
8
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
9
|
-
exports.inflate = exports.decompress = exports.deflate = exports.preloadCompressions = exports.PARQUET_COMPRESSION_METHODS = void 0;
|
|
10
|
-
const compression_1 = require("@loaders.gl/compression");
|
|
11
|
-
/** We can't use loaders-util buffer handling since we are dependent on buffers even in the browser */
|
|
12
|
-
function toBuffer(arrayBuffer) {
|
|
13
|
-
return Buffer.from(arrayBuffer);
|
|
14
|
-
}
|
|
15
|
-
function toArrayBuffer(buffer) {
|
|
16
|
-
// TODO - per docs we should just be able to call buffer.buffer, but there are issues
|
|
17
|
-
if (Buffer.isBuffer(buffer)) {
|
|
18
|
-
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
19
|
-
return typedArray.slice().buffer;
|
|
20
|
-
}
|
|
21
|
-
return buffer;
|
|
22
|
-
}
|
|
23
|
-
// TODO switch to worker compression to avoid bundling...
|
|
24
|
-
// import brotli from 'brotli'; - brotli has problems with decompress in browsers
|
|
25
|
-
// import brotliDecompress from 'brotli/decompress';
|
|
26
|
-
const lz4js_1 = __importDefault(require("lz4js"));
|
|
27
|
-
// import lzo from 'lzo';
|
|
28
|
-
// import {ZstdCodec} from 'zstd-codec';
|
|
29
|
-
// Inject large dependencies through Compression constructor options
|
|
30
|
-
const modules = {
|
|
31
|
-
// brotli has problems with decompress in browsers
|
|
32
|
-
// brotli: {
|
|
33
|
-
// decompress: brotliDecompress,
|
|
34
|
-
// compress: () => {
|
|
35
|
-
// throw new Error('brotli compress');
|
|
36
|
-
// }
|
|
37
|
-
// },
|
|
38
|
-
lz4js: lz4js_1.default
|
|
39
|
-
// lzo
|
|
40
|
-
// 'zstd-codec': ZstdCodec
|
|
41
|
-
};
|
|
42
|
-
/**
|
|
43
|
-
* See https://github.com/apache/parquet-format/blob/master/Compression.md
|
|
44
|
-
*/
|
|
45
|
-
// @ts-expect-error
|
|
46
|
-
exports.PARQUET_COMPRESSION_METHODS = {
|
|
47
|
-
UNCOMPRESSED: new compression_1.NoCompression(),
|
|
48
|
-
GZIP: new compression_1.GZipCompression(),
|
|
49
|
-
SNAPPY: new compression_1.SnappyCompression(),
|
|
50
|
-
BROTLI: new compression_1.BrotliCompression({ modules }),
|
|
51
|
-
// TODO: Understand difference between LZ4 and LZ4_RAW
|
|
52
|
-
LZ4: new compression_1.LZ4Compression({ modules }),
|
|
53
|
-
LZ4_RAW: new compression_1.LZ4Compression({ modules }),
|
|
54
|
-
//
|
|
55
|
-
// LZO: new LZOCompression({modules}),
|
|
56
|
-
ZSTD: new compression_1.ZstdCompression({ modules })
|
|
57
|
-
};
|
|
58
|
-
/**
|
|
59
|
-
* Register compressions that have big external libraries
|
|
60
|
-
* @param options.modules External library dependencies
|
|
61
|
-
*/
|
|
62
|
-
async function preloadCompressions(options) {
|
|
63
|
-
const compressions = Object.values(exports.PARQUET_COMPRESSION_METHODS);
|
|
64
|
-
return await Promise.all(compressions.map((compression) => compression.preload()));
|
|
65
|
-
}
|
|
66
|
-
exports.preloadCompressions = preloadCompressions;
|
|
67
|
-
/**
|
|
68
|
-
* Deflate a value using compression method `method`
|
|
69
|
-
*/
|
|
70
|
-
async function deflate(method, value) {
|
|
71
|
-
const compression = exports.PARQUET_COMPRESSION_METHODS[method];
|
|
72
|
-
if (!compression) {
|
|
73
|
-
throw new Error(`parquet: invalid compression method: ${method}`);
|
|
74
|
-
}
|
|
75
|
-
const inputArrayBuffer = toArrayBuffer(value);
|
|
76
|
-
const compressedArrayBuffer = await compression.compress(inputArrayBuffer);
|
|
77
|
-
return toBuffer(compressedArrayBuffer);
|
|
78
|
-
}
|
|
79
|
-
exports.deflate = deflate;
|
|
80
|
-
/**
|
|
81
|
-
* Inflate a value using compression method `method`
|
|
82
|
-
*/
|
|
83
|
-
async function decompress(method, value, size) {
|
|
84
|
-
const compression = exports.PARQUET_COMPRESSION_METHODS[method];
|
|
85
|
-
if (!compression) {
|
|
86
|
-
throw new Error(`parquet: invalid compression method: ${method}`);
|
|
87
|
-
}
|
|
88
|
-
const inputArrayBuffer = toArrayBuffer(value);
|
|
89
|
-
const compressedArrayBuffer = await compression.decompress(inputArrayBuffer, size);
|
|
90
|
-
return toBuffer(compressedArrayBuffer);
|
|
91
|
-
}
|
|
92
|
-
exports.decompress = decompress;
|
|
93
|
-
/*
|
|
94
|
-
* Inflate a value using compression method `method`
|
|
95
|
-
*/
|
|
96
|
-
function inflate(method, value, size) {
|
|
97
|
-
if (!(method in exports.PARQUET_COMPRESSION_METHODS)) {
|
|
98
|
-
throw new Error(`invalid compression method: ${method}`);
|
|
99
|
-
}
|
|
100
|
-
// @ts-ignore
|
|
101
|
-
return exports.PARQUET_COMPRESSION_METHODS[method].inflate(value, size);
|
|
102
|
-
}
|
|
103
|
-
exports.inflate = inflate;
|
|
104
|
-
/*
|
|
105
|
-
function deflate_identity(value: Buffer): Buffer {
|
|
106
|
-
return value;
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
function deflate_gzip(value: Buffer): Buffer {
|
|
110
|
-
return zlib.gzipSync(value);
|
|
111
|
-
}
|
|
112
|
-
|
|
113
|
-
function deflate_snappy(value: Buffer): Buffer {
|
|
114
|
-
return snappyjs.compress(value);
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
function deflate_lzo(value: Buffer): Buffer {
|
|
118
|
-
lzo = lzo || Util.load('lzo');
|
|
119
|
-
return lzo.compress(value);
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
function deflate_brotli(value: Buffer): Buffer {
|
|
123
|
-
brotli = brotli || Util.load('brotli');
|
|
124
|
-
const result = brotli.compress(value, {
|
|
125
|
-
mode: 0,
|
|
126
|
-
quality: 8,
|
|
127
|
-
lgwin: 22
|
|
128
|
-
});
|
|
129
|
-
return result ? Buffer.from(result) : Buffer.alloc(0);
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
function deflate_lz4(value: Buffer): Buffer {
|
|
133
|
-
lz4js = lz4js || Util.load('lz4js');
|
|
134
|
-
try {
|
|
135
|
-
// let result = Buffer.alloc(lz4js.encodeBound(value.length));
|
|
136
|
-
// const compressedSize = lz4.encodeBlock(value, result);
|
|
137
|
-
// // remove unnecessary bytes
|
|
138
|
-
// result = result.slice(0, compressedSize);
|
|
139
|
-
// return result;
|
|
140
|
-
return Buffer.from(lz4js.compress(value));
|
|
141
|
-
} catch (err) {
|
|
142
|
-
throw err;
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
function inflate_identity(value: Buffer): Buffer {
|
|
146
|
-
return value;
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
function inflate_gzip(value: Buffer): Buffer {
|
|
150
|
-
return zlib.gunzipSync(value);
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
function inflate_snappy(value: Buffer): Buffer {
|
|
154
|
-
return snappyjs.uncompress(value);
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
function inflate_lzo(value: Buffer, size: number): Buffer {
|
|
158
|
-
lzo = lzo || Util.load('lzo');
|
|
159
|
-
return lzo.decompress(value, size);
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
function inflate_lz4(value: Buffer, size: number): Buffer {
|
|
163
|
-
lz4js = lz4js || Util.load('lz4js');
|
|
164
|
-
try {
|
|
165
|
-
// let result = Buffer.alloc(size);
|
|
166
|
-
// const uncompressedSize = lz4js.decodeBlock(value, result);
|
|
167
|
-
// // remove unnecessary bytes
|
|
168
|
-
// result = result.slice(0, uncompressedSize);
|
|
169
|
-
// return result;
|
|
170
|
-
return Buffer.from(lz4js.decompress(value, size));
|
|
171
|
-
} catch (err) {
|
|
172
|
-
throw err;
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
function inflate_brotli(value: Buffer): Buffer {
|
|
177
|
-
brotli = brotli || Util.load('brotli');
|
|
178
|
-
if (!value.length) {
|
|
179
|
-
return Buffer.alloc(0);
|
|
180
|
-
}
|
|
181
|
-
return Buffer.from(brotli.decompress(value));
|
|
182
|
-
}
|
|
183
|
-
*/
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parquet-encoder.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/encoder/parquet-encoder.ts"],"names":[],"mappings":";;AAEA,OAAO,EAAC,MAAM,EAAC,MAAM,0BAA0B,CAAC;AAGhD,OAAO,EACL,eAAe,EAKf,UAAU,EACX,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAC,aAAa,EAAC,MAAM,kBAAkB,CAAC;AAE/C,OAAO,EAaL,QAAQ,EAGT,MAAM,mBAAmB,CAAC;AA2B3B,MAAM,WAAW,qBAAqB;IACpC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,OAAO,CAAC;IAGxB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED;;;;GAIG;AAEH,qBAAa,cAAc,CAAC,CAAC;IAC3B;;;OAGG;WACU,QAAQ,CAAC,CAAC,EACrB,MAAM,EAAE,aAAa,EACrB,IAAI,EAAE,MAAM,EACZ,IAAI,CAAC,EAAE,qBAAqB,GAC3B,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC;IAK7B;;;OAGG;WACU,UAAU,CAAC,CAAC,EACvB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,MAAM,CAAC,QAAQ,EAC7B,IAAI,GAAE,qBAA0B,GAC/B,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC;IAKtB,MAAM,EAAE,aAAa,CAAC;IACtB,cAAc,EAAE,qBAAqB,CAAC;IACtC,SAAS,EAAE,eAAe,CAAC;IAC3B,YAAY,EAAE,MAAM,CAAC;IACrB,MAAM,EAAE,OAAO,CAAC;IAChB,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE5C;;OAEG;gBAED,MAAM,EAAE,aAAa,EACrB,cAAc,EAAE,qBAAqB,EACrC,IAAI,EAAE,qBAAqB;IAcvB,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAUlC;;;OAGG;IACG,SAAS,CAAC,CAAC,SAAS,UAAU,EAAE,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAW5D;;;;;OAKG;IACG,KAAK,CAAC,QAAQ,CAAC,EAAE,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAqBjD;;OAEG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,IAAI;IAK7C;;;;;OAKG;IACH,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;IAIlC;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B;AAED;;;;;GAKG;AACH,qBAAa,qBAAqB;IAChC;;OAEG;WACU,UAAU,CACrB,MAAM,EAAE,aAAa,EACrB,YAAY,EAAE,MAAM,CAAC,QAAQ,EAC7B,IAAI,EAAE,qBAAqB,GAC1B,OAAO,CAAC,qBAAqB,CAAC;IAM1B,MAAM,EAAE,aAAa,CAAC;IACtB,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC3B,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,QAAQ,EAAE,CAAC;IACtB,QAAQ,EAAE,MAAM,CAAC;IACjB,aAAa,EAAE,OAAO,CAAC;gBAG5B,MAAM,EAAE,aAAa,EACrB,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,EACvC,OAAO,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,EAC5B,UAAU,EAAE,MAAM,EAClB,IAAI,EAAE,qBAAqB;IAY7B,YAAY,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKxC;;OAEG;IACH,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B;;;OAGG;IACG,aAAa,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC;IAY5D;;OAEG;IACH,WAAW,CAAC,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;IAWhE;;;OAGG;IACH,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,IAAI;CAG/B"}
|
|
@@ -1,484 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
-
};
|
|
28
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
exports.ParquetEnvelopeWriter = exports.ParquetEncoder = void 0;
|
|
30
|
-
const codecs_1 = require("../codecs");
|
|
31
|
-
const Compression = __importStar(require("../compression"));
|
|
32
|
-
const Shred = __importStar(require("../schema/shred"));
|
|
33
|
-
const parquet_thrift_1 = require("../parquet-thrift");
|
|
34
|
-
const file_utils_1 = require("../utils/file-utils");
|
|
35
|
-
const read_utils_1 = require("../utils/read-utils");
|
|
36
|
-
const node_int64_1 = __importDefault(require("node-int64"));
|
|
37
|
-
/**
|
|
38
|
-
* Parquet File Magic String
|
|
39
|
-
*/
|
|
40
|
-
const PARQUET_MAGIC = 'PAR1';
|
|
41
|
-
/**
|
|
42
|
-
* Parquet File Format Version
|
|
43
|
-
*/
|
|
44
|
-
const PARQUET_VERSION = 1;
|
|
45
|
-
/**
|
|
46
|
-
* Default Page and Row Group sizes
|
|
47
|
-
*/
|
|
48
|
-
const PARQUET_DEFAULT_PAGE_SIZE = 8192;
|
|
49
|
-
const PARQUET_DEFAULT_ROW_GROUP_SIZE = 4096;
|
|
50
|
-
/**
|
|
51
|
-
* Repetition and Definition Level Encoding
|
|
52
|
-
*/
|
|
53
|
-
const PARQUET_RDLVL_TYPE = 'INT32';
|
|
54
|
-
const PARQUET_RDLVL_ENCODING = 'RLE';
|
|
55
|
-
/**
|
|
56
|
-
* Write a parquet file to an output stream. The ParquetEncoder will perform
|
|
57
|
-
* buffering/batching for performance, so close() must be called after all rows
|
|
58
|
-
* are written.
|
|
59
|
-
*/
|
|
60
|
-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
61
|
-
class ParquetEncoder {
|
|
62
|
-
/**
|
|
63
|
-
* Convenience method to create a new buffered parquet writer that writes to
|
|
64
|
-
* the specified file
|
|
65
|
-
*/
|
|
66
|
-
static async openFile(schema, path, opts) {
|
|
67
|
-
const outputStream = await (0, file_utils_1.osopen)(path, opts);
|
|
68
|
-
return ParquetEncoder.openStream(schema, outputStream, opts);
|
|
69
|
-
}
|
|
70
|
-
/**
|
|
71
|
-
* Convenience method to create a new buffered parquet writer that writes to
|
|
72
|
-
* the specified stream
|
|
73
|
-
*/
|
|
74
|
-
static async openStream(schema, outputStream, opts = {}) {
|
|
75
|
-
const envelopeWriter = await ParquetEnvelopeWriter.openStream(schema, outputStream, opts);
|
|
76
|
-
return new ParquetEncoder(schema, envelopeWriter, opts);
|
|
77
|
-
}
|
|
78
|
-
/**
|
|
79
|
-
* Create a new buffered parquet writer for a given envelope writer
|
|
80
|
-
*/
|
|
81
|
-
constructor(schema, envelopeWriter, opts) {
|
|
82
|
-
this.schema = schema;
|
|
83
|
-
this.envelopeWriter = envelopeWriter;
|
|
84
|
-
// @ts-ignore Row buffer typings...
|
|
85
|
-
this.rowBuffer = {};
|
|
86
|
-
this.rowGroupSize = opts.rowGroupSize || PARQUET_DEFAULT_ROW_GROUP_SIZE;
|
|
87
|
-
this.closed = false;
|
|
88
|
-
this.userMetadata = {};
|
|
89
|
-
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
|
90
|
-
this.writeHeader();
|
|
91
|
-
}
|
|
92
|
-
async writeHeader() {
|
|
93
|
-
// TODO - better not mess with promises in the constructor
|
|
94
|
-
try {
|
|
95
|
-
await this.envelopeWriter.writeHeader();
|
|
96
|
-
}
|
|
97
|
-
catch (err) {
|
|
98
|
-
await this.envelopeWriter.close();
|
|
99
|
-
throw err;
|
|
100
|
-
}
|
|
101
|
-
}
|
|
102
|
-
/**
|
|
103
|
-
* Append a single row to the parquet file. Rows are buffered in memory until
|
|
104
|
-
* rowGroupSize rows are in the buffer or close() is called
|
|
105
|
-
*/
|
|
106
|
-
async appendRow(row) {
|
|
107
|
-
if (this.closed) {
|
|
108
|
-
throw new Error('writer was closed');
|
|
109
|
-
}
|
|
110
|
-
Shred.shredRecord(this.schema, row, this.rowBuffer);
|
|
111
|
-
if (this.rowBuffer.rowCount >= this.rowGroupSize) {
|
|
112
|
-
// @ts-ignore
|
|
113
|
-
this.rowBuffer = {};
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
/**
|
|
117
|
-
* Finish writing the parquet file and commit the footer to disk. This method
|
|
118
|
-
* MUST be called after you are finished adding rows. You must not call this
|
|
119
|
-
* method twice on the same object or add any rows after the close() method has
|
|
120
|
-
* been called
|
|
121
|
-
*/
|
|
122
|
-
async close(callback) {
|
|
123
|
-
if (this.closed) {
|
|
124
|
-
throw new Error('writer was closed');
|
|
125
|
-
}
|
|
126
|
-
this.closed = true;
|
|
127
|
-
if (this.rowBuffer.rowCount > 0 || this.rowBuffer.rowCount >= this.rowGroupSize) {
|
|
128
|
-
// @ts-ignore
|
|
129
|
-
this.rowBuffer = {};
|
|
130
|
-
}
|
|
131
|
-
await this.envelopeWriter.writeFooter(this.userMetadata);
|
|
132
|
-
await this.envelopeWriter.close();
|
|
133
|
-
// this.envelopeWriter = null;
|
|
134
|
-
if (callback) {
|
|
135
|
-
callback();
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
/**
|
|
139
|
-
* Add key<>value metadata to the file
|
|
140
|
-
*/
|
|
141
|
-
setMetadata(key, value) {
|
|
142
|
-
// TODO: value to be any, obj -> JSON
|
|
143
|
-
this.userMetadata[String(key)] = String(value);
|
|
144
|
-
}
|
|
145
|
-
/**
|
|
146
|
-
* Set the parquet row group size. This values controls the maximum number
|
|
147
|
-
* of rows that are buffered in memory at any given time as well as the number
|
|
148
|
-
* of rows that are co-located on disk. A higher value is generally better for
|
|
149
|
-
* read-time I/O performance at the tradeoff of write-time memory usage.
|
|
150
|
-
*/
|
|
151
|
-
setRowGroupSize(cnt) {
|
|
152
|
-
this.rowGroupSize = cnt;
|
|
153
|
-
}
|
|
154
|
-
/**
|
|
155
|
-
* Set the parquet data page size. The data page size controls the maximum
|
|
156
|
-
* number of column values that are written to disk as a consecutive array
|
|
157
|
-
*/
|
|
158
|
-
setPageSize(cnt) {
|
|
159
|
-
this.envelopeWriter.setPageSize(cnt);
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
exports.ParquetEncoder = ParquetEncoder;
|
|
163
|
-
/**
|
|
164
|
-
* Create a parquet file from a schema and a number of row groups. This class
|
|
165
|
-
* performs direct, unbuffered writes to the underlying output stream and is
|
|
166
|
-
* intendend for advanced and internal users; the writeXXX methods must be
|
|
167
|
-
* called in the correct order to produce a valid file.
|
|
168
|
-
*/
|
|
169
|
-
class ParquetEnvelopeWriter {
|
|
170
|
-
/**
|
|
171
|
-
* Create a new parquet envelope writer that writes to the specified stream
|
|
172
|
-
*/
|
|
173
|
-
static async openStream(schema, outputStream, opts) {
|
|
174
|
-
const writeFn = file_utils_1.oswrite.bind(undefined, outputStream);
|
|
175
|
-
const closeFn = file_utils_1.osclose.bind(undefined, outputStream);
|
|
176
|
-
return new ParquetEnvelopeWriter(schema, writeFn, closeFn, 0, opts);
|
|
177
|
-
}
|
|
178
|
-
constructor(schema, writeFn, closeFn, fileOffset, opts) {
|
|
179
|
-
this.schema = schema;
|
|
180
|
-
this.write = writeFn;
|
|
181
|
-
this.close = closeFn;
|
|
182
|
-
this.offset = fileOffset;
|
|
183
|
-
this.rowCount = 0;
|
|
184
|
-
this.rowGroups = [];
|
|
185
|
-
this.pageSize = opts.pageSize || PARQUET_DEFAULT_PAGE_SIZE;
|
|
186
|
-
this.useDataPageV2 = 'useDataPageV2' in opts ? Boolean(opts.useDataPageV2) : false;
|
|
187
|
-
}
|
|
188
|
-
writeSection(buf) {
|
|
189
|
-
this.offset += buf.length;
|
|
190
|
-
return this.write(buf);
|
|
191
|
-
}
|
|
192
|
-
/**
|
|
193
|
-
* Encode the parquet file header
|
|
194
|
-
*/
|
|
195
|
-
writeHeader() {
|
|
196
|
-
return this.writeSection(Buffer.from(PARQUET_MAGIC));
|
|
197
|
-
}
|
|
198
|
-
/**
|
|
199
|
-
* Encode a parquet row group. The records object should be created using the
|
|
200
|
-
* shredRecord method
|
|
201
|
-
*/
|
|
202
|
-
async writeRowGroup(records) {
|
|
203
|
-
const rgroup = await encodeRowGroup(this.schema, records, {
|
|
204
|
-
baseOffset: this.offset,
|
|
205
|
-
pageSize: this.pageSize,
|
|
206
|
-
useDataPageV2: this.useDataPageV2
|
|
207
|
-
});
|
|
208
|
-
this.rowCount += records.rowCount;
|
|
209
|
-
this.rowGroups.push(rgroup.metadata);
|
|
210
|
-
return await this.writeSection(rgroup.body);
|
|
211
|
-
}
|
|
212
|
-
/**
|
|
213
|
-
* Write the parquet file footer
|
|
214
|
-
*/
|
|
215
|
-
writeFooter(userMetadata) {
|
|
216
|
-
if (!userMetadata) {
|
|
217
|
-
// tslint:disable-next-line:no-parameter-reassignment
|
|
218
|
-
userMetadata = {};
|
|
219
|
-
}
|
|
220
|
-
return this.writeSection(encodeFooter(this.schema, this.rowCount, this.rowGroups, userMetadata));
|
|
221
|
-
}
|
|
222
|
-
/**
|
|
223
|
-
* Set the parquet data page size. The data page size controls the maximum
|
|
224
|
-
* number of column values that are written to disk as a consecutive array
|
|
225
|
-
*/
|
|
226
|
-
setPageSize(cnt) {
|
|
227
|
-
this.pageSize = cnt;
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
exports.ParquetEnvelopeWriter = ParquetEnvelopeWriter;
|
|
231
|
-
/**
|
|
232
|
-
* Create a parquet transform stream
|
|
233
|
-
export class ParquetTransformer<T> extends stream.Transform {
|
|
234
|
-
public writer: ParquetEncoder<T>;
|
|
235
|
-
|
|
236
|
-
constructor(schema: ParquetSchema, opts: ParquetEncoderOptions = {}) {
|
|
237
|
-
super({objectMode: true});
|
|
238
|
-
|
|
239
|
-
const writeProxy = (function (t: ParquetTransformer<any>) {
|
|
240
|
-
return async function (b: any): Promise<void> {
|
|
241
|
-
t.push(b);
|
|
242
|
-
};
|
|
243
|
-
})(this);
|
|
244
|
-
|
|
245
|
-
this.writer = new ParquetEncoder(
|
|
246
|
-
schema,
|
|
247
|
-
new ParquetEnvelopeWriter(schema, writeProxy, async () => {}, 0, opts),
|
|
248
|
-
opts
|
|
249
|
-
);
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
// tslint:disable-next-line:function-name
|
|
253
|
-
_transform(row: any, encoding: string, callback: (val?: any) => void): Promise<void> {
|
|
254
|
-
if (row) {
|
|
255
|
-
return this.writer.appendRow(row).then(callback);
|
|
256
|
-
}
|
|
257
|
-
callback();
|
|
258
|
-
return Promise.resolve();
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
// tslint:disable-next-line:function-name
|
|
262
|
-
async _flush(callback: (val?: any) => void) {
|
|
263
|
-
await this.writer.close(callback);
|
|
264
|
-
}
|
|
265
|
-
}
|
|
266
|
-
*/
|
|
267
|
-
/**
|
|
268
|
-
* Encode a consecutive array of data using one of the parquet encodings
|
|
269
|
-
*/
|
|
270
|
-
function encodeValues(type, encoding, values, opts) {
|
|
271
|
-
if (!(encoding in codecs_1.PARQUET_CODECS)) {
|
|
272
|
-
throw new Error(`invalid encoding: ${encoding}`);
|
|
273
|
-
}
|
|
274
|
-
return codecs_1.PARQUET_CODECS[encoding].encodeValues(type, values, opts);
|
|
275
|
-
}
|
|
276
|
-
/**
|
|
277
|
-
* Encode a parquet data page
|
|
278
|
-
*/
|
|
279
|
-
async function encodeDataPage(column, data) {
|
|
280
|
-
/* encode repetition and definition levels */
|
|
281
|
-
let rLevelsBuf = Buffer.alloc(0);
|
|
282
|
-
if (column.rLevelMax > 0) {
|
|
283
|
-
rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {
|
|
284
|
-
bitWidth: (0, read_utils_1.getBitWidth)(column.rLevelMax)
|
|
285
|
-
// disableEnvelope: false
|
|
286
|
-
});
|
|
287
|
-
}
|
|
288
|
-
let dLevelsBuf = Buffer.alloc(0);
|
|
289
|
-
if (column.dLevelMax > 0) {
|
|
290
|
-
dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {
|
|
291
|
-
bitWidth: (0, read_utils_1.getBitWidth)(column.dLevelMax)
|
|
292
|
-
// disableEnvelope: false
|
|
293
|
-
});
|
|
294
|
-
}
|
|
295
|
-
/* encode values */
|
|
296
|
-
const valuesBuf = encodeValues(column.primitiveType, column.encoding, data.values, {
|
|
297
|
-
typeLength: column.typeLength,
|
|
298
|
-
bitWidth: column.typeLength
|
|
299
|
-
});
|
|
300
|
-
const dataBuf = Buffer.concat([rLevelsBuf, dLevelsBuf, valuesBuf]);
|
|
301
|
-
// compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;
|
|
302
|
-
const compressedBuf = await Compression.deflate(column.compression, dataBuf);
|
|
303
|
-
/* build page header */
|
|
304
|
-
const header = new parquet_thrift_1.PageHeader({
|
|
305
|
-
type: parquet_thrift_1.PageType.DATA_PAGE,
|
|
306
|
-
data_page_header: new parquet_thrift_1.DataPageHeader({
|
|
307
|
-
num_values: data.count,
|
|
308
|
-
encoding: parquet_thrift_1.Encoding[column.encoding],
|
|
309
|
-
definition_level_encoding: parquet_thrift_1.Encoding[PARQUET_RDLVL_ENCODING],
|
|
310
|
-
repetition_level_encoding: parquet_thrift_1.Encoding[PARQUET_RDLVL_ENCODING] // [PARQUET_RDLVL_ENCODING]
|
|
311
|
-
}),
|
|
312
|
-
uncompressed_page_size: dataBuf.length,
|
|
313
|
-
compressed_page_size: compressedBuf.length
|
|
314
|
-
});
|
|
315
|
-
/* concat page header, repetition and definition levels and values */
|
|
316
|
-
const headerBuf = (0, read_utils_1.serializeThrift)(header);
|
|
317
|
-
const page = Buffer.concat([headerBuf, compressedBuf]);
|
|
318
|
-
return { header, headerSize: headerBuf.length, page };
|
|
319
|
-
}
|
|
320
|
-
/**
|
|
321
|
-
* Encode a parquet data page (v2)
|
|
322
|
-
*/
|
|
323
|
-
async function encodeDataPageV2(column, data, rowCount) {
|
|
324
|
-
/* encode values */
|
|
325
|
-
const valuesBuf = encodeValues(column.primitiveType, column.encoding, data.values, {
|
|
326
|
-
typeLength: column.typeLength,
|
|
327
|
-
bitWidth: column.typeLength
|
|
328
|
-
});
|
|
329
|
-
// compression = column.compression === 'UNCOMPRESSED' ? (compression || 'UNCOMPRESSED') : column.compression;
|
|
330
|
-
const compressedBuf = await Compression.deflate(column.compression, valuesBuf);
|
|
331
|
-
/* encode repetition and definition levels */
|
|
332
|
-
let rLevelsBuf = Buffer.alloc(0);
|
|
333
|
-
if (column.rLevelMax > 0) {
|
|
334
|
-
rLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.rlevels, {
|
|
335
|
-
bitWidth: (0, read_utils_1.getBitWidth)(column.rLevelMax),
|
|
336
|
-
disableEnvelope: true
|
|
337
|
-
});
|
|
338
|
-
}
|
|
339
|
-
let dLevelsBuf = Buffer.alloc(0);
|
|
340
|
-
if (column.dLevelMax > 0) {
|
|
341
|
-
dLevelsBuf = encodeValues(PARQUET_RDLVL_TYPE, PARQUET_RDLVL_ENCODING, data.dlevels, {
|
|
342
|
-
bitWidth: (0, read_utils_1.getBitWidth)(column.dLevelMax),
|
|
343
|
-
disableEnvelope: true
|
|
344
|
-
});
|
|
345
|
-
}
|
|
346
|
-
/* build page header */
|
|
347
|
-
const header = new parquet_thrift_1.PageHeader({
|
|
348
|
-
type: parquet_thrift_1.PageType.DATA_PAGE_V2,
|
|
349
|
-
data_page_header_v2: new parquet_thrift_1.DataPageHeaderV2({
|
|
350
|
-
num_values: data.count,
|
|
351
|
-
num_nulls: data.count - data.values.length,
|
|
352
|
-
num_rows: rowCount,
|
|
353
|
-
encoding: parquet_thrift_1.Encoding[column.encoding],
|
|
354
|
-
definition_levels_byte_length: dLevelsBuf.length,
|
|
355
|
-
repetition_levels_byte_length: rLevelsBuf.length,
|
|
356
|
-
is_compressed: column.compression !== 'UNCOMPRESSED'
|
|
357
|
-
}),
|
|
358
|
-
uncompressed_page_size: rLevelsBuf.length + dLevelsBuf.length + valuesBuf.length,
|
|
359
|
-
compressed_page_size: rLevelsBuf.length + dLevelsBuf.length + compressedBuf.length
|
|
360
|
-
});
|
|
361
|
-
/* concat page header, repetition and definition levels and values */
|
|
362
|
-
const headerBuf = (0, read_utils_1.serializeThrift)(header);
|
|
363
|
-
const page = Buffer.concat([headerBuf, rLevelsBuf, dLevelsBuf, compressedBuf]);
|
|
364
|
-
return { header, headerSize: headerBuf.length, page };
|
|
365
|
-
}
|
|
366
|
-
/**
|
|
367
|
-
* Encode an array of values into a parquet column chunk
|
|
368
|
-
*/
|
|
369
|
-
async function encodeColumnChunk(column, buffer, offset, opts) {
|
|
370
|
-
const data = buffer.columnData[column.path.join()];
|
|
371
|
-
const baseOffset = (opts.baseOffset || 0) + offset;
|
|
372
|
-
/* encode data page(s) */
|
|
373
|
-
// const pages: Buffer[] = [];
|
|
374
|
-
let pageBuf;
|
|
375
|
-
// tslint:disable-next-line:variable-name
|
|
376
|
-
let total_uncompressed_size = 0;
|
|
377
|
-
// tslint:disable-next-line:variable-name
|
|
378
|
-
let total_compressed_size = 0;
|
|
379
|
-
{
|
|
380
|
-
const result = opts.useDataPageV2
|
|
381
|
-
? await encodeDataPageV2(column, data, buffer.rowCount)
|
|
382
|
-
: await encodeDataPage(column, data);
|
|
383
|
-
// pages.push(result.page);
|
|
384
|
-
pageBuf = result.page;
|
|
385
|
-
total_uncompressed_size += result.header.uncompressed_page_size + result.headerSize;
|
|
386
|
-
total_compressed_size += result.header.compressed_page_size + result.headerSize;
|
|
387
|
-
}
|
|
388
|
-
// const pagesBuf = Buffer.concat(pages);
|
|
389
|
-
// const compression = column.compression === 'UNCOMPRESSED' ? (opts.compression || 'UNCOMPRESSED') : column.compression;
|
|
390
|
-
/* prepare metadata header */
|
|
391
|
-
const metadata = new parquet_thrift_1.ColumnMetaData({
|
|
392
|
-
path_in_schema: column.path,
|
|
393
|
-
num_values: data.count,
|
|
394
|
-
data_page_offset: baseOffset,
|
|
395
|
-
encodings: [],
|
|
396
|
-
total_uncompressed_size,
|
|
397
|
-
total_compressed_size,
|
|
398
|
-
type: parquet_thrift_1.Type[column.primitiveType],
|
|
399
|
-
codec: parquet_thrift_1.CompressionCodec[column.compression]
|
|
400
|
-
});
|
|
401
|
-
/* list encodings */
|
|
402
|
-
metadata.encodings.push(parquet_thrift_1.Encoding[PARQUET_RDLVL_ENCODING]);
|
|
403
|
-
metadata.encodings.push(parquet_thrift_1.Encoding[column.encoding]);
|
|
404
|
-
/* concat metadata header and data pages */
|
|
405
|
-
const metadataOffset = baseOffset + pageBuf.length;
|
|
406
|
-
const body = Buffer.concat([pageBuf, (0, read_utils_1.serializeThrift)(metadata)]);
|
|
407
|
-
return { body, metadata, metadataOffset };
|
|
408
|
-
}
|
|
409
|
-
/**
|
|
410
|
-
* Encode a list of column values into a parquet row group
|
|
411
|
-
*/
|
|
412
|
-
async function encodeRowGroup(schema, data, opts) {
|
|
413
|
-
const metadata = new parquet_thrift_1.RowGroup({
|
|
414
|
-
num_rows: data.rowCount,
|
|
415
|
-
columns: [],
|
|
416
|
-
total_byte_size: 0
|
|
417
|
-
});
|
|
418
|
-
let body = Buffer.alloc(0);
|
|
419
|
-
for (const field of schema.fieldList) {
|
|
420
|
-
if (field.isNested) {
|
|
421
|
-
continue; // eslint-disable-line no-continue
|
|
422
|
-
}
|
|
423
|
-
const cchunkData = await encodeColumnChunk(field, data, body.length, opts);
|
|
424
|
-
const cchunk = new parquet_thrift_1.ColumnChunk({
|
|
425
|
-
file_offset: cchunkData.metadataOffset,
|
|
426
|
-
meta_data: cchunkData.metadata
|
|
427
|
-
});
|
|
428
|
-
metadata.columns.push(cchunk);
|
|
429
|
-
metadata.total_byte_size = new node_int64_1.default(Number(metadata.total_byte_size) + cchunkData.body.length);
|
|
430
|
-
body = Buffer.concat([body, cchunkData.body]);
|
|
431
|
-
}
|
|
432
|
-
return { body, metadata };
|
|
433
|
-
}
|
|
434
|
-
/**
|
|
435
|
-
* Encode a parquet file metadata footer
|
|
436
|
-
*/
|
|
437
|
-
function encodeFooter(schema, rowCount, rowGroups, userMetadata) {
|
|
438
|
-
const metadata = new parquet_thrift_1.FileMetaData({
|
|
439
|
-
version: PARQUET_VERSION,
|
|
440
|
-
created_by: 'parquets',
|
|
441
|
-
num_rows: rowCount,
|
|
442
|
-
row_groups: rowGroups,
|
|
443
|
-
schema: [],
|
|
444
|
-
key_value_metadata: []
|
|
445
|
-
});
|
|
446
|
-
for (const key in userMetadata) {
|
|
447
|
-
const kv = new parquet_thrift_1.KeyValue({
|
|
448
|
-
key,
|
|
449
|
-
value: userMetadata[key]
|
|
450
|
-
});
|
|
451
|
-
metadata.key_value_metadata?.push?.(kv);
|
|
452
|
-
}
|
|
453
|
-
{
|
|
454
|
-
const schemaRoot = new parquet_thrift_1.SchemaElement({
|
|
455
|
-
name: 'root',
|
|
456
|
-
num_children: Object.keys(schema.fields).length
|
|
457
|
-
});
|
|
458
|
-
metadata.schema.push(schemaRoot);
|
|
459
|
-
}
|
|
460
|
-
for (const field of schema.fieldList) {
|
|
461
|
-
const relt = parquet_thrift_1.FieldRepetitionType[field.repetitionType];
|
|
462
|
-
const schemaElem = new parquet_thrift_1.SchemaElement({
|
|
463
|
-
name: field.name,
|
|
464
|
-
repetition_type: relt
|
|
465
|
-
});
|
|
466
|
-
if (field.isNested) {
|
|
467
|
-
schemaElem.num_children = field.fieldCount;
|
|
468
|
-
}
|
|
469
|
-
else {
|
|
470
|
-
schemaElem.type = parquet_thrift_1.Type[field.primitiveType];
|
|
471
|
-
}
|
|
472
|
-
if (field.originalType) {
|
|
473
|
-
schemaElem.converted_type = parquet_thrift_1.ConvertedType[field.originalType];
|
|
474
|
-
}
|
|
475
|
-
schemaElem.type_length = field.typeLength;
|
|
476
|
-
metadata.schema.push(schemaElem);
|
|
477
|
-
}
|
|
478
|
-
const metadataEncoded = (0, read_utils_1.serializeThrift)(metadata);
|
|
479
|
-
const footerEncoded = Buffer.alloc(metadataEncoded.length + 8);
|
|
480
|
-
metadataEncoded.copy(footerEncoded);
|
|
481
|
-
footerEncoded.writeUInt32LE(metadataEncoded.length, metadataEncoded.length);
|
|
482
|
-
footerEncoded.write(PARQUET_MAGIC, metadataEncoded.length + 4);
|
|
483
|
-
return footerEncoded;
|
|
484
|
-
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"BoundaryOrder.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/BoundaryOrder.ts"],"names":[],"mappings":"AAMA,oBAAY,aAAa;IACvB,SAAS,IAAI;IACb,SAAS,IAAI;IACb,UAAU,IAAI;CACf"}
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.BoundaryOrder = void 0;
|
|
4
|
-
/* tslint:disable */
|
|
5
|
-
/* eslint-disable */
|
|
6
|
-
/*
|
|
7
|
-
* Autogenerated by @creditkarma/thrift-typescript v3.7.2
|
|
8
|
-
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
|
9
|
-
*/
|
|
10
|
-
var BoundaryOrder;
|
|
11
|
-
(function (BoundaryOrder) {
|
|
12
|
-
BoundaryOrder[BoundaryOrder["UNORDERED"] = 0] = "UNORDERED";
|
|
13
|
-
BoundaryOrder[BoundaryOrder["ASCENDING"] = 1] = "ASCENDING";
|
|
14
|
-
BoundaryOrder[BoundaryOrder["DESCENDING"] = 2] = "DESCENDING";
|
|
15
|
-
})(BoundaryOrder = exports.BoundaryOrder || (exports.BoundaryOrder = {}));
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"BsonType.d.ts","sourceRoot":"","sources":["../../../src/parquetjs/parquet-thrift/BsonType.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,MAAM,WAAW,aAAa;CAAG;AACjC,qBAAa,QAAQ;;IAEZ,KAAK,CAAC,MAAM,EAAE,MAAM,CAAC,SAAS,GAAG,IAAI;WAM9B,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,SAAS,GAAG,QAAQ;CAmBtD"}
|